var/home/core/zuul-output/0000755000175000017500000000000015072232027014525 5ustar corecorevar/home/core/zuul-output/logs/0000755000175000017500000000000015072257154015501 5ustar corecorevar/home/core/zuul-output/logs/kubelet.log0000644000000000000000007047516715072257144017725 0ustar rootrootOct 10 16:31:45 crc systemd[1]: Starting Kubernetes Kubelet... Oct 10 16:31:45 crc restorecon[4732]: Relabeled /var/lib/kubelet/config.json from system_u:object_r:unlabeled_t:s0 to system_u:object_r:container_var_lib_t:s0 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/device-plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/device-plugins/kubelet.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/volumes/kubernetes.io~configmap/nginx-conf/..2025_02_23_05_40_35.4114275528/nginx.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/22e96971 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/21c98286 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/0f1869e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/46889d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/5b6a5969 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/6c7921f5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4804f443 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/2a46b283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/a6b5573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4f88ee5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/5a4eee4b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/cd87c521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/38602af4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/1483b002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/0346718b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/d3ed4ada not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/3bb473a5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/8cd075a9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/00ab4760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/54a21c09 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/70478888 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/43802770 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/955a0edc not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/bca2d009 not reset as customized by admin to system_u:object_r:container_file_t:s0:c140,c1009 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/b295f9bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/bc46ea27 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5731fc1b not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5e1b2a3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/943f0936 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/3f764ee4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/8695e3f9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/aed7aa86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/c64d7448 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/0ba16bd2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/207a939f not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/54aa8cdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/1f5fa595 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/bf9c8153 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/47fba4ea not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/7ae55ce9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7906a268 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/ce43fa69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7fc7ea3a not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/d8c38b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/9ef015fb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/b9db6a41 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/b1733d79 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/afccd338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/9df0a185 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/18938cf8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/7ab4eb23 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/56930be6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_35.630010865 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/0d8e3722 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/d22b2e76 not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/e036759f not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/2734c483 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/57878fe7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/3f3c2e58 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/375bec3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/7bc41e08 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/48c7a72d not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/4b66701f not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/a5a1c202 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_40.1388695756 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/26f3df5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/6d8fb21d not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/50e94777 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208473b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/ec9e08ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3b787c39 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208eaed5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/93aa3a2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3c697968 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/ba950ec9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/cb5cdb37 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/f2df9827 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/fedaa673 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/9ca2df95 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/b2d7460e not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2207853c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/241c1c29 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2d910eaf not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/c6c0f2e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/399edc97 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8049f7cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/0cec5484 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/312446d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c406,c828 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8e56a35d not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/2d30ddb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/eca8053d not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/c3a25c9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c168,c522 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/b9609c22 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/e8b0eca9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/b36a9c3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/38af7b07 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/ae821620 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/baa23338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/2c534809 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/59b29eae not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/c91a8e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/4d87494a not reset as customized by admin to system_u:object_r:container_file_t:s0:c442,c857 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/1e33ca63 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/8dea7be2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d0b04a99 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d84f01e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/4109059b not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/a7258a3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/05bdf2b6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/f3261b51 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/315d045e not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/5fdcf278 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/d053f757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/c2850dc7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fcfb0b2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c7ac9b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fa0c0d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c609b6ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/2be6c296 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/89a32653 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/4eb9afeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/13af6efa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/b03f9724 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/e3d105cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/3aed4d83 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/0765fa6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/2cefc627 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/3dcc6345 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/365af391 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b1130c0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/236a5913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b9432e26 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/5ddb0e3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/986dc4fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/8a23ff9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/9728ae68 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/665f31d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/136c9b42 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/98a1575b not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/cac69136 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/5deb77a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/2ae53400 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/e46f2326 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/dc688d3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/3497c3cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/177eb008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/af5a2afa not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/d780cb1f not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/49b0f374 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/26fbb125 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/cf14125a not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/b7f86972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/e51d739c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/88ba6a69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/669a9acf not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/5cd51231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/75349ec7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/15c26839 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/45023dcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/2bb66a50 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/64d03bdd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/ab8e7ca0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/bb9be25f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/9a0b61d3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/d471b9d2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/8cb76b8e not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/11a00840 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/ec355a92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/992f735e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d59cdbbc not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/72133ff0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/c56c834c not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d13724c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/0a498258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa471982 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fc900d92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa7d68da not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/4bacf9b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/424021b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/fc2e31a3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/f51eefac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/c8997f2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/7481f599 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/fdafea19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/d0e1c571 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/ee398915 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/682bb6b8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a3e67855 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a989f289 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/915431bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/7796fdab not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/dcdb5f19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/a3aaa88c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/5508e3e6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/160585de not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/e99f8da3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/8bc85570 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/a5861c91 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/84db1135 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/9e1a6043 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/c1aba1c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/d55ccd6d not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/971cc9f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/8f2e3dcf not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/ceb35e9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/1c192745 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/5209e501 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/f83de4df not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/e7b978ac not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/c64304a1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/5384386b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/cce3e3ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/8fb75465 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/740f573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/32fd1134 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/0a861bd3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/80363026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/bfa952a8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..2025_02_23_05_33_31.333075221 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/793bf43d not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/7db1bb6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/4f6a0368 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/c12c7d86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/36c4a773 not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/4c1e98ae not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/a4c8115c not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/setup/7db1802e not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver/a008a7ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-syncer/2c836bac not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-regeneration-controller/0ce62299 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-insecure-readyz/945d2457 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-check-endpoints/7d5c1dd8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/index.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/bundle-v1.15.0.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/channel.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/package.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/bc8d0691 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/6b76097a not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/34d1af30 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/312ba61c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/645d5dd1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/16e825f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/4cf51fc9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/2a23d348 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/075dbd49 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/dd585ddd not reset as customized by admin to system_u:object_r:container_file_t:s0:c377,c642 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/17ebd0ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c343 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/005579f4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_23_11.1287037894 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/bf5f3b9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/af276eb7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/ea28e322 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/692e6683 not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/871746a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/4eb2e958 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/ca9b62da not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/0edd6fce not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/containers/controller-manager/89b4555f not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/655fcd71 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/0d43c002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/e68efd17 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/9acf9b65 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/5ae3ff11 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/1e59206a not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/27af16d1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c304,c1017 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/7918e729 not reset as customized by admin to system_u:object_r:container_file_t:s0:c853,c893 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/5d976d0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c585,c981 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/d7f55cbb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/f0812073 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/1a56cbeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/7fdd437e not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/cdfb5652 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/fix-audit-permissions/fb93119e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver/f1e8fc0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver-check-endpoints/218511f3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server/serving-certs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/ca8af7b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/72cc8a75 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/6e8a3760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4c3455c0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/2278acb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4b453e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/3ec09bda not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2/cacerts.bin not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java/cacerts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl/ca-bundle.trust.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/email-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/objsign-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2ae6433e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fde84897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75680d2e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/openshift-service-serving-signer_1740288168.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/facfc4fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f5a969c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CFCA_EV_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9ef4a08a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ingress-operator_1740288202.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2f332aed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/248c8271.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d10a21f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ACCVRAIZ1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a94d09e5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c9a4d3b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40193066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd8c0d63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b936d1c6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CA_Disig_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4fd49c6c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM_SERVIDORES_SEGUROS.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b81b93f0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f9a69fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b30d5fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ANF_Secure_Server_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b433981b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93851c9e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9282e51c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7dd1bc4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Actalis_Authentication_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/930ac5d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f47b495.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e113c810.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5931b5bc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Commercial.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2b349938.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e48193cf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/302904dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a716d4ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Networking.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93bc0acc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/86212b19.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b727005e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbc54cab.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f51bb24c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c28a8a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9c8dfbd4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ccc52f49.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cb1c3204.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ce5e74ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd08c599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6d41d539.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb5fa911.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e35234b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8cb5ee0f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a7c655d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f8fc53da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/de6d66f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d41b5e2a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/41a3f684.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1df5a75f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_2011.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e36a6752.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b872f2b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9576d26b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/228f89db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_ECC_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb717492.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d21b73c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b1b94ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/595e996b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_RSA_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b46e03d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/128f4b91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_3_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81f2d2b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Autoridad_de_Certificacion_Firmaprofesional_CIF_A62634068.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3bde41ac.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d16a5865.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_EC-384_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0179095f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ffa7f1eb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9482e63a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4dae3dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e359ba6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7e067d03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/95aff9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7746a63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Baltimore_CyberTrust_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/653b494a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3ad48a91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_2_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/54657681.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/82223c44.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8de2f56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d9dafe4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d96b65e2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee64a828.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40547a79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5a3f0ff8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a780d93.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/34d996fb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/eed8c118.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/89c02a45.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b1159c4c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d6325660.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4c339cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8312c4c1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_E1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8508e720.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5fdd185d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48bec511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/69105f4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b9bc432.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/32888f65.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b03dec0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/219d9499.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5acf816d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbf06781.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc99f41e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AAA_Certificate_Services.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/985c1f52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8794b4e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_BR_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7c037b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ef954a4e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_EV_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2add47b6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/90c5a3c8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0f3e76e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/53a1b57a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_EV_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5ad8a5d6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/68dd7389.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d04f354.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d6437c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/062cdee6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bd43e1dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7f3d5d1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c491639e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3513523f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/399e7759.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/feffd413.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d18e9066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/607986c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c90bc37d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1b0f7e5c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e08bfd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dd8e9d41.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed39abd0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a3418fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bc3f2570.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_High_Assurance_EV_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/244b5494.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81b9768f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4be590e0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_ECC_P384_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9846683b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/252252d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e8e7201.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_RSA4096_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d52c538d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c44cc0c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Trusted_Root_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75d1b2ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a2c66da8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ecccd8db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust.net_Certification_Authority__2048_.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/aee5f10d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e7271e8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0e59380.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4c3982f2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b99d060.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf64f35b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0a775a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/002c0b4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cc450945.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_EC1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/106f3e4d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b3fb433b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4042bcee.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/02265526.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/455f1b52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0d69c7e1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9f727ac7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5e98733a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0cd152c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc4d6a89.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6187b673.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/FIRMAPROFESIONAL_CA_ROOT-A_WEB.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ba8887ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/068570d1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f081611a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48a195d8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GDCA_TrustAUTH_R5_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f6fa695.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab59055e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b92fd57f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GLOBALTRUST_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fa5da96b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ec40989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7719f463.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1001acf7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f013ecaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/626dceaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c559d742.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1d3472b9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9479c8c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a81e292b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4bfab552.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e071171e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/57bcb2da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_ECC_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab5346f4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5046c355.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_RSA_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/865fbdf9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da0cfd1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/85cde254.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_ECC_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbb3f32b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureSign_RootCA11.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5860aaa6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/31188b5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HiPKI_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c7f1359b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f15c80c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hongkong_Post_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/09789157.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/18856ac4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e09d511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Commercial_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cf701eeb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d06393bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Public_Sector_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/10531352.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Izenpe.com.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureTrust_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0ed035a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsec_e-Szigno_Root_CA_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8160b96c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8651083.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2c63f966.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_ECC_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d89cda1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/01419da9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_RSA_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7a5b843.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_RSA_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf53fb88.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9591a472.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3afde786.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Gold_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NAVER_Global_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3fb36b73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d39b0a2c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a89d74c2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd58d51e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7db1890.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NetLock_Arany__Class_Gold__F__tan__s__tv__ny.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/988a38cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/60afe812.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f39fc864.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5443e9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GB_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e73d606e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dfc0fe80.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b66938e9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e1eab7c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GC_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/773e07ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c899c73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d59297b8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ddcda989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_1_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/749e9e03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/52b525c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7e8dc79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a819ef2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/08063a00.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b483515.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/064e0aa9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1f58a078.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6f7454b3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7fa05551.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76faf6c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9339512a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f387163d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee37c333.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e18bfb83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e442e424.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fe8a2cd8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/23f4c490.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5cd81ad7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0c70a8d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7892ad52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SZAFIR_ROOT_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4f316efb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_RSA_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/06dc52d5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/583d0756.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0bf05006.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/88950faa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9046744a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c860d51.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_RSA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6fa5da56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/33ee480d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Secure_Global_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/63a2c897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:45 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_ECC_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bdacca6f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ff34af3f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbff3a01.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_ECC_RootCA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_C1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/406c9bb1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_C3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Services_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Silver_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/99e1b953.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/14bc7599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TUBITAK_Kamu_SM_SSL_Kok_Sertifikasi_-_Surum_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a3adc42.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f459871d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_ECC_Root_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_RSA_Root_2023.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TeliaSonera_Root_CA_v1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telia_Root_CA_v2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f103249.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f058632f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-certificates.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9bf03295.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/98aaf404.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1cef98f5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/073bfcc5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2923b3f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f249de83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/edcbddb5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P256_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b5697b0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ae85e5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b74d2bd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P384_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d887a5bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9aef356c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TunTrust_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd64f3fc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e13665f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Extended_Validation_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f5dc4f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da7377f6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Global_G2_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c01eb047.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/304d27c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed858448.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f30dd6ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/04f60c28.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_ECC_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fc5a8f99.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/35105088.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee532fd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/XRamp_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/706f604c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76579174.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d86cdd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/882de061.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f618aec.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a9d40e02.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e-Szigno_Root_CA_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e868b802.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/83e9984f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ePKI_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca6e4ad9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d6523ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4b718d9b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/869fbf79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/containers/registry/f8d22bdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/6e8bbfac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/54dd7996 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/a4f1bb05 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/207129da not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/c1df39e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/15b8f1cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/77bd6913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/2382c1b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/704ce128 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/70d16fe0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/bfb95535 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/57a8e8e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/1b9d3e5e not reset as customized by admin to system_u:object_r:container_file_t:s0:c107,c917 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/fddb173c not reset as customized by admin to system_u:object_r:container_file_t:s0:c202,c983 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/95d3c6c4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/bfb5fff5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/2aef40aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/c0391cad not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/1119e69d not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/660608b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/8220bd53 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/85f99d5c not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/4b0225f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/9c2a3394 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/e820b243 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/1ca52ea0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/e6988e45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/6655f00b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/98bc3986 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/08e3458a not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/2a191cb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/6c4eeefb not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/f61a549c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/24891863 not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/fbdfd89c not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/9b63b3bc not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/8acde6d6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/node-driver-registrar/59ecbba3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/csi-provisioner/685d4be3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/containers/route-controller-manager/feaea55e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/63709497 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/d966b7fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/f5773757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/81c9edb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/57bf57ee not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/86f5e6aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/0aabe31d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/d2af85c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/09d157d9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c0fe7256 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c30319e4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/e6b1dd45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/2bb643f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/920de426 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/70fa1e87 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/a1c12a2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/9442e6c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/5b45ec72 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/3c9f3a59 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/1091c11b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/9a6821c6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/ec0c35e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/517f37e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/6214fe78 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/ba189c8b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/351e4f31 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/c0f219ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/8069f607 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/559c3d82 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/605ad488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/148df488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/3bf6dcb4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/022a2feb not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/938c3924 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/729fe23e not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/1fd5cbd4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/a96697e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/e155ddca not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/10dd0e0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/6f2c8392 not reset as customized by admin to system_u:object_r:container_file_t:s0:c267,c588 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/bd241ad9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/plugins/csi-hostpath not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/plugins/csi-hostpath/csi.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/plugins/kubernetes.io not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/plugins/kubernetes.io/csi not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983 not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/vol_data.json not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 10 16:31:46 crc restorecon[4732]: /var/lib/kubelet/plugins_registry not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 10 16:31:46 crc restorecon[4732]: Relabeled /var/usrlocal/bin/kubenswrapper from system_u:object_r:bin_t:s0 to system_u:object_r:kubelet_exec_t:s0 Oct 10 16:31:47 crc kubenswrapper[4799]: Flag --container-runtime-endpoint has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Oct 10 16:31:47 crc kubenswrapper[4799]: Flag --minimum-container-ttl-duration has been deprecated, Use --eviction-hard or --eviction-soft instead. Will be removed in a future version. Oct 10 16:31:47 crc kubenswrapper[4799]: Flag --volume-plugin-dir has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Oct 10 16:31:47 crc kubenswrapper[4799]: Flag --register-with-taints has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Oct 10 16:31:47 crc kubenswrapper[4799]: Flag --pod-infra-container-image has been deprecated, will be removed in a future release. Image garbage collector will get sandbox image information from CRI. Oct 10 16:31:47 crc kubenswrapper[4799]: Flag --system-reserved has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.108580 4799 server.go:211] "--pod-infra-container-image will not be pruned by the image garbage collector in kubelet and should also be set in the remote runtime" Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.113912 4799 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.113948 4799 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.113958 4799 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.113967 4799 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.113977 4799 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.113985 4799 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.113996 4799 feature_gate.go:330] unrecognized feature gate: OVNObservability Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.114008 4799 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.114019 4799 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.114028 4799 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.114037 4799 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.114048 4799 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.114058 4799 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.114066 4799 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.114075 4799 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.114084 4799 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.114093 4799 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.114101 4799 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.114109 4799 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.114118 4799 feature_gate.go:330] unrecognized feature gate: SignatureStores Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.114125 4799 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.114133 4799 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.114140 4799 feature_gate.go:330] unrecognized feature gate: NewOLM Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.114148 4799 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.114156 4799 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.114175 4799 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.114203 4799 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.114211 4799 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.114219 4799 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.114261 4799 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.114268 4799 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.114280 4799 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.114290 4799 feature_gate.go:330] unrecognized feature gate: PlatformOperators Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.114298 4799 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.114308 4799 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.114326 4799 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.114334 4799 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.114342 4799 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.114350 4799 feature_gate.go:330] unrecognized feature gate: Example Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.114358 4799 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.114367 4799 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.114385 4799 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.114393 4799 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.114401 4799 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.114408 4799 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.114417 4799 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.114433 4799 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.114441 4799 feature_gate.go:330] unrecognized feature gate: PinnedImages Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.114449 4799 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.114457 4799 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.114467 4799 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.114476 4799 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.114486 4799 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.114494 4799 feature_gate.go:330] unrecognized feature gate: GatewayAPI Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.114502 4799 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.114511 4799 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.114519 4799 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.114527 4799 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.114536 4799 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.114544 4799 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.114552 4799 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.114561 4799 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.114569 4799 feature_gate.go:330] unrecognized feature gate: InsightsConfig Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.114576 4799 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.114584 4799 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.114591 4799 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.114600 4799 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.114607 4799 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.114615 4799 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.114623 4799 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.114630 4799 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.115655 4799 flags.go:64] FLAG: --address="0.0.0.0" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.115681 4799 flags.go:64] FLAG: --allowed-unsafe-sysctls="[]" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.115698 4799 flags.go:64] FLAG: --anonymous-auth="true" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.115709 4799 flags.go:64] FLAG: --application-metrics-count-limit="100" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.115721 4799 flags.go:64] FLAG: --authentication-token-webhook="false" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.115730 4799 flags.go:64] FLAG: --authentication-token-webhook-cache-ttl="2m0s" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.115742 4799 flags.go:64] FLAG: --authorization-mode="AlwaysAllow" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.115788 4799 flags.go:64] FLAG: --authorization-webhook-cache-authorized-ttl="5m0s" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.115799 4799 flags.go:64] FLAG: --authorization-webhook-cache-unauthorized-ttl="30s" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.115809 4799 flags.go:64] FLAG: --boot-id-file="/proc/sys/kernel/random/boot_id" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.115819 4799 flags.go:64] FLAG: --bootstrap-kubeconfig="/etc/kubernetes/kubeconfig" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.115829 4799 flags.go:64] FLAG: --cert-dir="/var/lib/kubelet/pki" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.115838 4799 flags.go:64] FLAG: --cgroup-driver="cgroupfs" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.115847 4799 flags.go:64] FLAG: --cgroup-root="" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.115856 4799 flags.go:64] FLAG: --cgroups-per-qos="true" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.115865 4799 flags.go:64] FLAG: --client-ca-file="" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.115874 4799 flags.go:64] FLAG: --cloud-config="" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.115883 4799 flags.go:64] FLAG: --cloud-provider="" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.115891 4799 flags.go:64] FLAG: --cluster-dns="[]" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.115903 4799 flags.go:64] FLAG: --cluster-domain="" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.115911 4799 flags.go:64] FLAG: --config="/etc/kubernetes/kubelet.conf" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.115921 4799 flags.go:64] FLAG: --config-dir="" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.115930 4799 flags.go:64] FLAG: --container-hints="/etc/cadvisor/container_hints.json" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.115940 4799 flags.go:64] FLAG: --container-log-max-files="5" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.115952 4799 flags.go:64] FLAG: --container-log-max-size="10Mi" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.115961 4799 flags.go:64] FLAG: --container-runtime-endpoint="/var/run/crio/crio.sock" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.115970 4799 flags.go:64] FLAG: --containerd="/run/containerd/containerd.sock" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.115980 4799 flags.go:64] FLAG: --containerd-namespace="k8s.io" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.115989 4799 flags.go:64] FLAG: --contention-profiling="false" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.115998 4799 flags.go:64] FLAG: --cpu-cfs-quota="true" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.116007 4799 flags.go:64] FLAG: --cpu-cfs-quota-period="100ms" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.116017 4799 flags.go:64] FLAG: --cpu-manager-policy="none" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.116026 4799 flags.go:64] FLAG: --cpu-manager-policy-options="" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.116039 4799 flags.go:64] FLAG: --cpu-manager-reconcile-period="10s" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.116063 4799 flags.go:64] FLAG: --enable-controller-attach-detach="true" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.116072 4799 flags.go:64] FLAG: --enable-debugging-handlers="true" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.116081 4799 flags.go:64] FLAG: --enable-load-reader="false" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.116091 4799 flags.go:64] FLAG: --enable-server="true" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.116100 4799 flags.go:64] FLAG: --enforce-node-allocatable="[pods]" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.116112 4799 flags.go:64] FLAG: --event-burst="100" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.116122 4799 flags.go:64] FLAG: --event-qps="50" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.116135 4799 flags.go:64] FLAG: --event-storage-age-limit="default=0" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.116145 4799 flags.go:64] FLAG: --event-storage-event-limit="default=0" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.116153 4799 flags.go:64] FLAG: --eviction-hard="" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.116173 4799 flags.go:64] FLAG: --eviction-max-pod-grace-period="0" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.116182 4799 flags.go:64] FLAG: --eviction-minimum-reclaim="" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.116191 4799 flags.go:64] FLAG: --eviction-pressure-transition-period="5m0s" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.116200 4799 flags.go:64] FLAG: --eviction-soft="" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.116209 4799 flags.go:64] FLAG: --eviction-soft-grace-period="" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.116218 4799 flags.go:64] FLAG: --exit-on-lock-contention="false" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.116227 4799 flags.go:64] FLAG: --experimental-allocatable-ignore-eviction="false" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.116235 4799 flags.go:64] FLAG: --experimental-mounter-path="" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.116245 4799 flags.go:64] FLAG: --fail-cgroupv1="false" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.116253 4799 flags.go:64] FLAG: --fail-swap-on="true" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.116263 4799 flags.go:64] FLAG: --feature-gates="" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.116277 4799 flags.go:64] FLAG: --file-check-frequency="20s" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.116289 4799 flags.go:64] FLAG: --global-housekeeping-interval="1m0s" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.116301 4799 flags.go:64] FLAG: --hairpin-mode="promiscuous-bridge" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.116313 4799 flags.go:64] FLAG: --healthz-bind-address="127.0.0.1" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.116325 4799 flags.go:64] FLAG: --healthz-port="10248" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.116336 4799 flags.go:64] FLAG: --help="false" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.116347 4799 flags.go:64] FLAG: --hostname-override="" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.116358 4799 flags.go:64] FLAG: --housekeeping-interval="10s" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.116369 4799 flags.go:64] FLAG: --http-check-frequency="20s" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.116382 4799 flags.go:64] FLAG: --image-credential-provider-bin-dir="" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.116393 4799 flags.go:64] FLAG: --image-credential-provider-config="" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.116404 4799 flags.go:64] FLAG: --image-gc-high-threshold="85" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.116415 4799 flags.go:64] FLAG: --image-gc-low-threshold="80" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.116427 4799 flags.go:64] FLAG: --image-service-endpoint="" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.116437 4799 flags.go:64] FLAG: --kernel-memcg-notification="false" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.116449 4799 flags.go:64] FLAG: --kube-api-burst="100" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.116461 4799 flags.go:64] FLAG: --kube-api-content-type="application/vnd.kubernetes.protobuf" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.116473 4799 flags.go:64] FLAG: --kube-api-qps="50" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.116488 4799 flags.go:64] FLAG: --kube-reserved="" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.116498 4799 flags.go:64] FLAG: --kube-reserved-cgroup="" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.116507 4799 flags.go:64] FLAG: --kubeconfig="/var/lib/kubelet/kubeconfig" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.116516 4799 flags.go:64] FLAG: --kubelet-cgroups="" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.116525 4799 flags.go:64] FLAG: --local-storage-capacity-isolation="true" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.116534 4799 flags.go:64] FLAG: --lock-file="" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.116543 4799 flags.go:64] FLAG: --log-cadvisor-usage="false" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.116552 4799 flags.go:64] FLAG: --log-flush-frequency="5s" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.116562 4799 flags.go:64] FLAG: --log-json-info-buffer-size="0" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.116628 4799 flags.go:64] FLAG: --log-json-split-stream="false" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.116637 4799 flags.go:64] FLAG: --log-text-info-buffer-size="0" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.116646 4799 flags.go:64] FLAG: --log-text-split-stream="false" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.116655 4799 flags.go:64] FLAG: --logging-format="text" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.116664 4799 flags.go:64] FLAG: --machine-id-file="/etc/machine-id,/var/lib/dbus/machine-id" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.116674 4799 flags.go:64] FLAG: --make-iptables-util-chains="true" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.116682 4799 flags.go:64] FLAG: --manifest-url="" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.116691 4799 flags.go:64] FLAG: --manifest-url-header="" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.116703 4799 flags.go:64] FLAG: --max-housekeeping-interval="15s" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.116712 4799 flags.go:64] FLAG: --max-open-files="1000000" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.116723 4799 flags.go:64] FLAG: --max-pods="110" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.116732 4799 flags.go:64] FLAG: --maximum-dead-containers="-1" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.116742 4799 flags.go:64] FLAG: --maximum-dead-containers-per-container="1" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.116750 4799 flags.go:64] FLAG: --memory-manager-policy="None" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.116789 4799 flags.go:64] FLAG: --minimum-container-ttl-duration="6m0s" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.116799 4799 flags.go:64] FLAG: --minimum-image-ttl-duration="2m0s" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.116811 4799 flags.go:64] FLAG: --node-ip="192.168.126.11" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.116822 4799 flags.go:64] FLAG: --node-labels="node-role.kubernetes.io/control-plane=,node-role.kubernetes.io/master=,node.openshift.io/os_id=rhcos" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.116851 4799 flags.go:64] FLAG: --node-status-max-images="50" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.116863 4799 flags.go:64] FLAG: --node-status-update-frequency="10s" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.116875 4799 flags.go:64] FLAG: --oom-score-adj="-999" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.116886 4799 flags.go:64] FLAG: --pod-cidr="" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.116898 4799 flags.go:64] FLAG: --pod-infra-container-image="quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:33549946e22a9ffa738fd94b1345f90921bc8f92fa6137784cb33c77ad806f9d" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.116921 4799 flags.go:64] FLAG: --pod-manifest-path="" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.116932 4799 flags.go:64] FLAG: --pod-max-pids="-1" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.116944 4799 flags.go:64] FLAG: --pods-per-core="0" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.116955 4799 flags.go:64] FLAG: --port="10250" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.116968 4799 flags.go:64] FLAG: --protect-kernel-defaults="false" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.116979 4799 flags.go:64] FLAG: --provider-id="" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.116989 4799 flags.go:64] FLAG: --qos-reserved="" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.117000 4799 flags.go:64] FLAG: --read-only-port="10255" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.117010 4799 flags.go:64] FLAG: --register-node="true" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.117019 4799 flags.go:64] FLAG: --register-schedulable="true" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.117028 4799 flags.go:64] FLAG: --register-with-taints="node-role.kubernetes.io/master=:NoSchedule" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.117042 4799 flags.go:64] FLAG: --registry-burst="10" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.117051 4799 flags.go:64] FLAG: --registry-qps="5" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.117060 4799 flags.go:64] FLAG: --reserved-cpus="" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.117069 4799 flags.go:64] FLAG: --reserved-memory="" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.117082 4799 flags.go:64] FLAG: --resolv-conf="/etc/resolv.conf" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.117093 4799 flags.go:64] FLAG: --root-dir="/var/lib/kubelet" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.117121 4799 flags.go:64] FLAG: --rotate-certificates="false" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.117133 4799 flags.go:64] FLAG: --rotate-server-certificates="false" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.117143 4799 flags.go:64] FLAG: --runonce="false" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.117156 4799 flags.go:64] FLAG: --runtime-cgroups="/system.slice/crio.service" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.117167 4799 flags.go:64] FLAG: --runtime-request-timeout="2m0s" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.117178 4799 flags.go:64] FLAG: --seccomp-default="false" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.117188 4799 flags.go:64] FLAG: --serialize-image-pulls="true" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.117197 4799 flags.go:64] FLAG: --storage-driver-buffer-duration="1m0s" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.117207 4799 flags.go:64] FLAG: --storage-driver-db="cadvisor" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.117226 4799 flags.go:64] FLAG: --storage-driver-host="localhost:8086" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.117235 4799 flags.go:64] FLAG: --storage-driver-password="root" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.117244 4799 flags.go:64] FLAG: --storage-driver-secure="false" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.117253 4799 flags.go:64] FLAG: --storage-driver-table="stats" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.117263 4799 flags.go:64] FLAG: --storage-driver-user="root" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.117272 4799 flags.go:64] FLAG: --streaming-connection-idle-timeout="4h0m0s" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.117285 4799 flags.go:64] FLAG: --sync-frequency="1m0s" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.117309 4799 flags.go:64] FLAG: --system-cgroups="" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.117344 4799 flags.go:64] FLAG: --system-reserved="cpu=200m,ephemeral-storage=350Mi,memory=350Mi" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.117370 4799 flags.go:64] FLAG: --system-reserved-cgroup="" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.117380 4799 flags.go:64] FLAG: --tls-cert-file="" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.117388 4799 flags.go:64] FLAG: --tls-cipher-suites="[]" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.117399 4799 flags.go:64] FLAG: --tls-min-version="" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.117408 4799 flags.go:64] FLAG: --tls-private-key-file="" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.117418 4799 flags.go:64] FLAG: --topology-manager-policy="none" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.117427 4799 flags.go:64] FLAG: --topology-manager-policy-options="" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.117445 4799 flags.go:64] FLAG: --topology-manager-scope="container" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.117456 4799 flags.go:64] FLAG: --v="2" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.117470 4799 flags.go:64] FLAG: --version="false" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.117501 4799 flags.go:64] FLAG: --vmodule="" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.117512 4799 flags.go:64] FLAG: --volume-plugin-dir="/etc/kubernetes/kubelet-plugins/volume/exec" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.117522 4799 flags.go:64] FLAG: --volume-stats-agg-period="1m0s" Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.117793 4799 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.117806 4799 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.117823 4799 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.117831 4799 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.117839 4799 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.117847 4799 feature_gate.go:330] unrecognized feature gate: PinnedImages Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.117855 4799 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.117862 4799 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.117870 4799 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.117879 4799 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.117886 4799 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.117894 4799 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.117902 4799 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.117909 4799 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.117918 4799 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.117925 4799 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.117935 4799 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.117942 4799 feature_gate.go:330] unrecognized feature gate: PlatformOperators Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.117950 4799 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.117958 4799 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.117967 4799 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.117975 4799 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.117983 4799 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.117991 4799 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.117999 4799 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.118006 4799 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.118014 4799 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.118022 4799 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.118051 4799 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.118059 4799 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.118067 4799 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.118075 4799 feature_gate.go:330] unrecognized feature gate: NewOLM Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.118083 4799 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.118091 4799 feature_gate.go:330] unrecognized feature gate: InsightsConfig Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.118098 4799 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.118109 4799 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.118118 4799 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.118127 4799 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.118135 4799 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.118143 4799 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.118152 4799 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.118160 4799 feature_gate.go:330] unrecognized feature gate: GatewayAPI Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.118168 4799 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.118178 4799 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.118189 4799 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.118199 4799 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.118207 4799 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.118218 4799 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.118228 4799 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.118236 4799 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.118245 4799 feature_gate.go:330] unrecognized feature gate: OVNObservability Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.118254 4799 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.118263 4799 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.118272 4799 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.118279 4799 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.118287 4799 feature_gate.go:330] unrecognized feature gate: SignatureStores Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.118296 4799 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.118303 4799 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.118311 4799 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.118319 4799 feature_gate.go:330] unrecognized feature gate: Example Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.118327 4799 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.118335 4799 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.118343 4799 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.118350 4799 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.118367 4799 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.118375 4799 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.118383 4799 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.118393 4799 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.118402 4799 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.118411 4799 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.118419 4799 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.118451 4799 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.131121 4799 server.go:491] "Kubelet version" kubeletVersion="v1.31.5" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.131171 4799 server.go:493] "Golang settings" GOGC="" GOMAXPROCS="" GOTRACEBACK="" Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.131293 4799 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.131305 4799 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.131314 4799 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.131324 4799 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.131333 4799 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.131341 4799 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.131349 4799 feature_gate.go:330] unrecognized feature gate: Example Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.131357 4799 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.131365 4799 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.131373 4799 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.131380 4799 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.131388 4799 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.131396 4799 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.131404 4799 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.131412 4799 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.131420 4799 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.131427 4799 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.131435 4799 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.131443 4799 feature_gate.go:330] unrecognized feature gate: OVNObservability Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.131451 4799 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.131458 4799 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.131466 4799 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.131474 4799 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.131481 4799 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.131490 4799 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.131498 4799 feature_gate.go:330] unrecognized feature gate: NewOLM Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.131505 4799 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.131514 4799 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.131522 4799 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.131529 4799 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.131537 4799 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.131545 4799 feature_gate.go:330] unrecognized feature gate: InsightsConfig Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.131552 4799 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.131560 4799 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.131569 4799 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.131577 4799 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.131584 4799 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.131592 4799 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.131600 4799 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.131611 4799 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.131623 4799 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.131636 4799 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.131646 4799 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.131654 4799 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.131662 4799 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.131670 4799 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.131678 4799 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.131686 4799 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.131694 4799 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.131701 4799 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.131709 4799 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.131717 4799 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.131725 4799 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.131733 4799 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.131742 4799 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.131750 4799 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.131784 4799 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.131793 4799 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.131800 4799 feature_gate.go:330] unrecognized feature gate: SignatureStores Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.131812 4799 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.131822 4799 feature_gate.go:330] unrecognized feature gate: PlatformOperators Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.131832 4799 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.131841 4799 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.131849 4799 feature_gate.go:330] unrecognized feature gate: GatewayAPI Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.131857 4799 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.131865 4799 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.131873 4799 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.131880 4799 feature_gate.go:330] unrecognized feature gate: PinnedImages Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.131888 4799 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.131898 4799 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.131908 4799 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.131922 4799 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.132138 4799 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.132152 4799 feature_gate.go:330] unrecognized feature gate: NewOLM Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.132161 4799 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.132170 4799 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.132178 4799 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.132187 4799 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.132197 4799 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.132208 4799 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.132216 4799 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.132227 4799 feature_gate.go:330] unrecognized feature gate: GatewayAPI Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.132236 4799 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.132245 4799 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.132252 4799 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.132260 4799 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.132268 4799 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.132276 4799 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.132284 4799 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.132291 4799 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.132298 4799 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.132306 4799 feature_gate.go:330] unrecognized feature gate: Example Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.132314 4799 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.132322 4799 feature_gate.go:330] unrecognized feature gate: SignatureStores Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.132329 4799 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.132337 4799 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.132344 4799 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.132353 4799 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.132361 4799 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.132368 4799 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.132376 4799 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.132386 4799 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.132396 4799 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.132404 4799 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.132412 4799 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.132419 4799 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.132427 4799 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.132435 4799 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.132444 4799 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.132451 4799 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.132459 4799 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.132469 4799 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.132479 4799 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.132490 4799 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.132500 4799 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.132508 4799 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.132517 4799 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.132526 4799 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.132534 4799 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.132542 4799 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.132550 4799 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.132557 4799 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.132565 4799 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.132573 4799 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.132580 4799 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.132588 4799 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.132596 4799 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.132603 4799 feature_gate.go:330] unrecognized feature gate: PinnedImages Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.132611 4799 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.132619 4799 feature_gate.go:330] unrecognized feature gate: PlatformOperators Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.132626 4799 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.132634 4799 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.132641 4799 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.132649 4799 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.132658 4799 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.132665 4799 feature_gate.go:330] unrecognized feature gate: InsightsConfig Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.132674 4799 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.132681 4799 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.132688 4799 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.132696 4799 feature_gate.go:330] unrecognized feature gate: OVNObservability Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.132704 4799 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.132712 4799 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.132720 4799 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.132733 4799 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.134106 4799 server.go:940] "Client rotation is on, will bootstrap in background" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.141119 4799 bootstrap.go:85] "Current kubeconfig file contents are still valid, no bootstrap necessary" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.141304 4799 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-client-current.pem". Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.144040 4799 server.go:997] "Starting client certificate rotation" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.144089 4799 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate rotation is enabled Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.144340 4799 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate expiration is 2026-02-24 05:52:08 +0000 UTC, rotation deadline is 2026-01-03 22:49:59.445974459 +0000 UTC Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.144466 4799 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Waiting 2046h18m12.30151331s for next certificate rotation Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.182378 4799 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.185246 4799 dynamic_cafile_content.go:161] "Starting controller" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.209633 4799 log.go:25] "Validated CRI v1 runtime API" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.255309 4799 log.go:25] "Validated CRI v1 image API" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.257916 4799 server.go:1437] "Using cgroup driver setting received from the CRI runtime" cgroupDriver="systemd" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.264396 4799 fs.go:133] Filesystem UUIDs: map[0b076daa-c26a-46d2-b3a6-72a8dbc6e257:/dev/vda4 2025-10-10-16-26-38-00:/dev/sr0 7B77-95E7:/dev/vda2 de0497b0-db1b-465a-b278-03db02455c71:/dev/vda3] Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.264450 4799 fs.go:134] Filesystem partitions: map[/dev/shm:{mountpoint:/dev/shm major:0 minor:22 fsType:tmpfs blockSize:0} /dev/vda3:{mountpoint:/boot major:252 minor:3 fsType:ext4 blockSize:0} /dev/vda4:{mountpoint:/var major:252 minor:4 fsType:xfs blockSize:0} /run:{mountpoint:/run major:0 minor:24 fsType:tmpfs blockSize:0} /run/user/1000:{mountpoint:/run/user/1000 major:0 minor:41 fsType:tmpfs blockSize:0} /tmp:{mountpoint:/tmp major:0 minor:30 fsType:tmpfs blockSize:0} /var/lib/etcd:{mountpoint:/var/lib/etcd major:0 minor:43 fsType:tmpfs blockSize:0}] Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.300874 4799 manager.go:217] Machine: {Timestamp:2025-10-10 16:31:47.293908834 +0000 UTC m=+0.802233029 CPUVendorID:AuthenticAMD NumCores:12 NumPhysicalCores:1 NumSockets:12 CpuFrequency:2800000 MemoryCapacity:33654132736 SwapCapacity:0 MemoryByType:map[] NVMInfo:{MemoryModeCapacity:0 AppDirectModeCapacity:0 AvgPowerBudget:0} HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] MachineID:21801e6708c44f15b81395eb736a7cec SystemUUID:19c7da3e-bb2d-454e-9c2c-9c9464638bfe BootID:d99534f1-66d4-4990-b867-b559b1013899 Filesystems:[{Device:/tmp DeviceMajor:0 DeviceMinor:30 Capacity:16827068416 Type:vfs Inodes:1048576 HasInodes:true} {Device:/dev/vda3 DeviceMajor:252 DeviceMinor:3 Capacity:366869504 Type:vfs Inodes:98304 HasInodes:true} {Device:/run/user/1000 DeviceMajor:0 DeviceMinor:41 Capacity:3365412864 Type:vfs Inodes:821634 HasInodes:true} {Device:/var/lib/etcd DeviceMajor:0 DeviceMinor:43 Capacity:1073741824 Type:vfs Inodes:4108170 HasInodes:true} {Device:/dev/shm DeviceMajor:0 DeviceMinor:22 Capacity:16827064320 Type:vfs Inodes:4108170 HasInodes:true} {Device:/run DeviceMajor:0 DeviceMinor:24 Capacity:6730829824 Type:vfs Inodes:819200 HasInodes:true} {Device:/dev/vda4 DeviceMajor:252 DeviceMinor:4 Capacity:85292941312 Type:vfs Inodes:41679680 HasInodes:true}] DiskMap:map[252:0:{Name:vda Major:252 Minor:0 Size:214748364800 Scheduler:none}] NetworkDevices:[{Name:br-ex MacAddress:fa:16:3e:cb:ed:eb Speed:0 Mtu:1500} {Name:br-int MacAddress:d6:39:55:2e:22:71 Speed:0 Mtu:1400} {Name:ens3 MacAddress:fa:16:3e:cb:ed:eb Speed:-1 Mtu:1500} {Name:ens7 MacAddress:fa:16:3e:70:fd:f4 Speed:-1 Mtu:1500} {Name:ens7.20 MacAddress:52:54:00:17:93:3b Speed:-1 Mtu:1496} {Name:ens7.21 MacAddress:52:54:00:60:bd:8b Speed:-1 Mtu:1496} {Name:ens7.22 MacAddress:52:54:00:14:e2:8b Speed:-1 Mtu:1496} {Name:ens7.23 MacAddress:52:54:00:1b:3d:3e Speed:-1 Mtu:1496} {Name:eth10 MacAddress:02:a2:59:1a:cb:08 Speed:0 Mtu:1500} {Name:ovn-k8s-mp0 MacAddress:0a:58:0a:d9:00:02 Speed:0 Mtu:1400} {Name:ovs-system MacAddress:52:59:b9:7f:0f:c8 Speed:0 Mtu:1500}] Topology:[{Id:0 Memory:33654132736 HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] Cores:[{Id:0 Threads:[0] Caches:[{Id:0 Size:32768 Type:Data Level:1} {Id:0 Size:32768 Type:Instruction Level:1} {Id:0 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:0 Size:16777216 Type:Unified Level:3}] SocketID:0 BookID: DrawerID:} {Id:0 Threads:[1] Caches:[{Id:1 Size:32768 Type:Data Level:1} {Id:1 Size:32768 Type:Instruction Level:1} {Id:1 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:1 Size:16777216 Type:Unified Level:3}] SocketID:1 BookID: DrawerID:} {Id:0 Threads:[10] Caches:[{Id:10 Size:32768 Type:Data Level:1} {Id:10 Size:32768 Type:Instruction Level:1} {Id:10 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:10 Size:16777216 Type:Unified Level:3}] SocketID:10 BookID: DrawerID:} {Id:0 Threads:[11] Caches:[{Id:11 Size:32768 Type:Data Level:1} {Id:11 Size:32768 Type:Instruction Level:1} {Id:11 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:11 Size:16777216 Type:Unified Level:3}] SocketID:11 BookID: DrawerID:} {Id:0 Threads:[2] Caches:[{Id:2 Size:32768 Type:Data Level:1} {Id:2 Size:32768 Type:Instruction Level:1} {Id:2 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:2 Size:16777216 Type:Unified Level:3}] SocketID:2 BookID: DrawerID:} {Id:0 Threads:[3] Caches:[{Id:3 Size:32768 Type:Data Level:1} {Id:3 Size:32768 Type:Instruction Level:1} {Id:3 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:3 Size:16777216 Type:Unified Level:3}] SocketID:3 BookID: DrawerID:} {Id:0 Threads:[4] Caches:[{Id:4 Size:32768 Type:Data Level:1} {Id:4 Size:32768 Type:Instruction Level:1} {Id:4 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:4 Size:16777216 Type:Unified Level:3}] SocketID:4 BookID: DrawerID:} {Id:0 Threads:[5] Caches:[{Id:5 Size:32768 Type:Data Level:1} {Id:5 Size:32768 Type:Instruction Level:1} {Id:5 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:5 Size:16777216 Type:Unified Level:3}] SocketID:5 BookID: DrawerID:} {Id:0 Threads:[6] Caches:[{Id:6 Size:32768 Type:Data Level:1} {Id:6 Size:32768 Type:Instruction Level:1} {Id:6 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:6 Size:16777216 Type:Unified Level:3}] SocketID:6 BookID: DrawerID:} {Id:0 Threads:[7] Caches:[{Id:7 Size:32768 Type:Data Level:1} {Id:7 Size:32768 Type:Instruction Level:1} {Id:7 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:7 Size:16777216 Type:Unified Level:3}] SocketID:7 BookID: DrawerID:} {Id:0 Threads:[8] Caches:[{Id:8 Size:32768 Type:Data Level:1} {Id:8 Size:32768 Type:Instruction Level:1} {Id:8 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:8 Size:16777216 Type:Unified Level:3}] SocketID:8 BookID: DrawerID:} {Id:0 Threads:[9] Caches:[{Id:9 Size:32768 Type:Data Level:1} {Id:9 Size:32768 Type:Instruction Level:1} {Id:9 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:9 Size:16777216 Type:Unified Level:3}] SocketID:9 BookID: DrawerID:}] Caches:[] Distances:[10]}] CloudProvider:Unknown InstanceType:Unknown InstanceID:None} Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.301403 4799 manager_no_libpfm.go:29] cAdvisor is build without cgo and/or libpfm support. Perf event counters are not available. Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.301674 4799 manager.go:233] Version: {KernelVersion:5.14.0-427.50.2.el9_4.x86_64 ContainerOsVersion:Red Hat Enterprise Linux CoreOS 418.94.202502100215-0 DockerVersion: DockerAPIVersion: CadvisorVersion: CadvisorRevision:} Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.303457 4799 swap_util.go:113] "Swap is on" /proc/swaps contents="Filename\t\t\t\tType\t\tSize\t\tUsed\t\tPriority" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.303966 4799 container_manager_linux.go:267] "Container manager verified user specified cgroup-root exists" cgroupRoot=[] Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.304044 4799 container_manager_linux.go:272] "Creating Container Manager object based on Node Config" nodeConfig={"NodeName":"crc","RuntimeCgroupsName":"/system.slice/crio.service","SystemCgroupsName":"/system.slice","KubeletCgroupsName":"","KubeletOOMScoreAdj":-999,"ContainerRuntime":"","CgroupsPerQOS":true,"CgroupRoot":"/","CgroupDriver":"systemd","KubeletRootDir":"/var/lib/kubelet","ProtectKernelDefaults":true,"KubeReservedCgroupName":"","SystemReservedCgroupName":"","ReservedSystemCPUs":{},"EnforceNodeAllocatable":{"pods":{}},"KubeReserved":null,"SystemReserved":{"cpu":"200m","ephemeral-storage":"350Mi","memory":"350Mi"},"HardEvictionThresholds":[{"Signal":"memory.available","Operator":"LessThan","Value":{"Quantity":"100Mi","Percentage":0},"GracePeriod":0,"MinReclaim":null},{"Signal":"nodefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.1},"GracePeriod":0,"MinReclaim":null},{"Signal":"nodefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null},{"Signal":"imagefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.15},"GracePeriod":0,"MinReclaim":null},{"Signal":"imagefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null}],"QOSReserved":{},"CPUManagerPolicy":"none","CPUManagerPolicyOptions":null,"TopologyManagerScope":"container","CPUManagerReconcilePeriod":10000000000,"ExperimentalMemoryManagerPolicy":"None","ExperimentalMemoryManagerReservedMemory":null,"PodPidsLimit":4096,"EnforceCPULimits":true,"CPUCFSQuotaPeriod":100000000,"TopologyManagerPolicy":"none","TopologyManagerPolicyOptions":null,"CgroupVersion":2} Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.304526 4799 topology_manager.go:138] "Creating topology manager with none policy" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.304553 4799 container_manager_linux.go:303] "Creating device plugin manager" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.305096 4799 manager.go:142] "Creating Device Plugin manager" path="/var/lib/kubelet/device-plugins/kubelet.sock" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.305163 4799 server.go:66] "Creating device plugin registration server" version="v1beta1" socket="/var/lib/kubelet/device-plugins/kubelet.sock" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.305447 4799 state_mem.go:36] "Initialized new in-memory state store" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.305632 4799 server.go:1245] "Using root directory" path="/var/lib/kubelet" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.309579 4799 kubelet.go:418] "Attempting to sync node with API server" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.309626 4799 kubelet.go:313] "Adding static pod path" path="/etc/kubernetes/manifests" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.309736 4799 file.go:69] "Watching path" path="/etc/kubernetes/manifests" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.309801 4799 kubelet.go:324] "Adding apiserver pod source" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.309830 4799 apiserver.go:42] "Waiting for node sync before watching apiserver pods" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.315021 4799 kuberuntime_manager.go:262] "Container runtime initialized" containerRuntime="cri-o" version="1.31.5-4.rhaos4.18.gitdad78d5.el9" apiVersion="v1" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.316739 4799 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-server-current.pem". Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.318191 4799 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.145:6443: connect: connection refused Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.318296 4799 kubelet.go:854] "Not starting ClusterTrustBundle informer because we are in static kubelet mode" Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.318192 4799 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.102.83.145:6443: connect: connection refused Oct 10 16:31:47 crc kubenswrapper[4799]: E1010 16:31:47.318530 4799 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.102.83.145:6443: connect: connection refused" logger="UnhandledError" Oct 10 16:31:47 crc kubenswrapper[4799]: E1010 16:31:47.318413 4799 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.145:6443: connect: connection refused" logger="UnhandledError" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.320710 4799 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/portworx-volume" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.320750 4799 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/empty-dir" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.320796 4799 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/git-repo" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.320812 4799 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/host-path" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.320835 4799 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/nfs" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.320849 4799 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/secret" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.320863 4799 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/iscsi" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.320884 4799 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/downward-api" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.320898 4799 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/fc" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.320912 4799 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/configmap" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.320955 4799 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/projected" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.320969 4799 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/local-volume" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.323227 4799 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/csi" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.324336 4799 server.go:1280] "Started kubelet" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.327452 4799 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.145:6443: connect: connection refused Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.328078 4799 ratelimit.go:55] "Setting rate limiting for endpoint" service="podresources" qps=100 burstTokens=10 Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.328856 4799 server.go:236] "Starting to serve the podresources API" endpoint="unix:/var/lib/kubelet/pod-resources/kubelet.sock" Oct 10 16:31:47 crc systemd[1]: Started Kubernetes Kubelet. Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.329682 4799 server.go:163] "Starting to listen" address="0.0.0.0" port=10250 Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.333554 4799 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate rotation is enabled Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.333598 4799 fs_resource_analyzer.go:67] "Starting FS ResourceAnalyzer" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.333860 4799 volume_manager.go:287] "The desired_state_of_world populator starts" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.333903 4799 volume_manager.go:289] "Starting Kubelet Volume Manager" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.334026 4799 desired_state_of_world_populator.go:146] "Desired state populator starts to run" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.334150 4799 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-17 01:52:16.941285628 +0000 UTC Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.334227 4799 certificate_manager.go:356] kubernetes.io/kubelet-serving: Waiting 1617h20m29.607063618s for next certificate rotation Oct 10 16:31:47 crc kubenswrapper[4799]: E1010 16:31:47.334319 4799 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.335382 4799 factory.go:55] Registering systemd factory Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.335416 4799 factory.go:221] Registration of the systemd container factory successfully Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.335681 4799 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.102.83.145:6443: connect: connection refused Oct 10 16:31:47 crc kubenswrapper[4799]: E1010 16:31:47.335859 4799 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.102.83.145:6443: connect: connection refused" logger="UnhandledError" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.335948 4799 factory.go:153] Registering CRI-O factory Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.336027 4799 factory.go:221] Registration of the crio container factory successfully Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.336147 4799 factory.go:219] Registration of the containerd container factory failed: unable to create containerd client: containerd: cannot unix dial containerd api service: dial unix /run/containerd/containerd.sock: connect: no such file or directory Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.336193 4799 factory.go:103] Registering Raw factory Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.336228 4799 manager.go:1196] Started watching for new ooms in manager Oct 10 16:31:47 crc kubenswrapper[4799]: E1010 16:31:47.336998 4799 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.145:6443: connect: connection refused" interval="200ms" Oct 10 16:31:47 crc kubenswrapper[4799]: E1010 16:31:47.334127 4799 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/default/events\": dial tcp 38.102.83.145:6443: connect: connection refused" event="&Event{ObjectMeta:{crc.186d2e45f244613d default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:Starting,Message:Starting kubelet.,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-10-10 16:31:47.323920701 +0000 UTC m=+0.832244856,LastTimestamp:2025-10-10 16:31:47.323920701 +0000 UTC m=+0.832244856,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.340638 4799 manager.go:319] Starting recovery of all containers Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.341380 4799 server.go:460] "Adding debug handlers to kubelet server" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.351498 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" seLinuxMountContext="" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.351608 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" seLinuxMountContext="" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.351629 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" seLinuxMountContext="" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.351648 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" seLinuxMountContext="" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.351667 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" seLinuxMountContext="" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.351689 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" seLinuxMountContext="" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.351707 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf" seLinuxMountContext="" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.351725 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" seLinuxMountContext="" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.351745 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" volumeName="kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" seLinuxMountContext="" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.351794 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" seLinuxMountContext="" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.351814 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" seLinuxMountContext="" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.351836 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" seLinuxMountContext="" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.351854 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" seLinuxMountContext="" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.351875 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" seLinuxMountContext="" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.351896 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" seLinuxMountContext="" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.351915 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" seLinuxMountContext="" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.351935 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" seLinuxMountContext="" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.351954 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" seLinuxMountContext="" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.351972 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" seLinuxMountContext="" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.351990 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" seLinuxMountContext="" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.352009 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" seLinuxMountContext="" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.352026 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" seLinuxMountContext="" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.352046 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" seLinuxMountContext="" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.354291 4799 reconstruct.go:144] "Volume is marked device as uncertain and added into the actual state" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" deviceMountPath="/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.354350 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" seLinuxMountContext="" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.354371 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" seLinuxMountContext="" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.354393 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" seLinuxMountContext="" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.354417 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" seLinuxMountContext="" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.354436 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" seLinuxMountContext="" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.354480 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" seLinuxMountContext="" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.354499 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" seLinuxMountContext="" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.354517 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" seLinuxMountContext="" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.354538 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" seLinuxMountContext="" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.354555 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" seLinuxMountContext="" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.354573 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" seLinuxMountContext="" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.354592 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" seLinuxMountContext="" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.354611 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d751cbb-f2e2-430d-9754-c882a5e924a5" volumeName="kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl" seLinuxMountContext="" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.354629 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" seLinuxMountContext="" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.354646 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" seLinuxMountContext="" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.354664 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5" seLinuxMountContext="" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.354682 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" seLinuxMountContext="" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.354700 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" seLinuxMountContext="" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.354717 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" seLinuxMountContext="" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.354733 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" seLinuxMountContext="" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.356369 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" seLinuxMountContext="" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.356490 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" seLinuxMountContext="" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.356532 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" seLinuxMountContext="" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.356596 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" seLinuxMountContext="" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.356633 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" seLinuxMountContext="" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.356896 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" seLinuxMountContext="" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.356955 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" seLinuxMountContext="" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.356989 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" seLinuxMountContext="" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.357039 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" seLinuxMountContext="" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.357111 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" seLinuxMountContext="" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.357158 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" seLinuxMountContext="" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.357212 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" seLinuxMountContext="" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.357246 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" seLinuxMountContext="" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.357295 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" seLinuxMountContext="" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.357338 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" seLinuxMountContext="" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.357366 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" seLinuxMountContext="" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.357441 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" seLinuxMountContext="" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.357487 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" seLinuxMountContext="" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.357515 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" seLinuxMountContext="" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.357555 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" seLinuxMountContext="" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.357581 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" seLinuxMountContext="" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.357611 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" seLinuxMountContext="" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.357649 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" seLinuxMountContext="" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.357676 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" seLinuxMountContext="" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.357705 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" seLinuxMountContext="" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.357746 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" seLinuxMountContext="" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.357811 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" seLinuxMountContext="" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.357854 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" seLinuxMountContext="" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.357881 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" seLinuxMountContext="" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.357912 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" seLinuxMountContext="" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.357954 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" seLinuxMountContext="" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.357982 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" seLinuxMountContext="" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.358023 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" seLinuxMountContext="" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.358051 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" seLinuxMountContext="" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.358081 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" seLinuxMountContext="" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.358120 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" seLinuxMountContext="" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.358148 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" seLinuxMountContext="" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.358184 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" seLinuxMountContext="" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.358213 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" seLinuxMountContext="" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.358241 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" seLinuxMountContext="" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.358278 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" seLinuxMountContext="" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.358309 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" seLinuxMountContext="" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.358346 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" seLinuxMountContext="" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.358373 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" seLinuxMountContext="" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.358403 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" seLinuxMountContext="" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.358444 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" seLinuxMountContext="" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.358475 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" seLinuxMountContext="" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.358501 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" seLinuxMountContext="" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.358536 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" seLinuxMountContext="" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.358564 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" seLinuxMountContext="" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.358613 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb" seLinuxMountContext="" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.358640 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" seLinuxMountContext="" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.358665 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" seLinuxMountContext="" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.358702 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" seLinuxMountContext="" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.358727 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" seLinuxMountContext="" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.358796 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" seLinuxMountContext="" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.358826 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" seLinuxMountContext="" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.358851 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" seLinuxMountContext="" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.358886 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" seLinuxMountContext="" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.358914 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" seLinuxMountContext="" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.358949 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" seLinuxMountContext="" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.358996 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" seLinuxMountContext="" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.359038 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" seLinuxMountContext="" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.359083 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" seLinuxMountContext="" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.359118 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" seLinuxMountContext="" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.359240 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" seLinuxMountContext="" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.359285 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" seLinuxMountContext="" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.359317 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" seLinuxMountContext="" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.359358 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" seLinuxMountContext="" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.359397 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" seLinuxMountContext="" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.359424 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" seLinuxMountContext="" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.359462 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" seLinuxMountContext="" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.359493 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" seLinuxMountContext="" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.359524 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls" seLinuxMountContext="" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.359567 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="44663579-783b-4372-86d6-acf235a62d72" volumeName="kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" seLinuxMountContext="" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.359594 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" seLinuxMountContext="" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.359632 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" seLinuxMountContext="" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.359662 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" seLinuxMountContext="" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.359688 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" seLinuxMountContext="" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.360974 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" seLinuxMountContext="" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.361019 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" seLinuxMountContext="" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.361058 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" seLinuxMountContext="" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.361082 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" seLinuxMountContext="" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.361130 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" seLinuxMountContext="" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.361151 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" seLinuxMountContext="" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.361173 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" seLinuxMountContext="" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.361204 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" seLinuxMountContext="" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.361224 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" seLinuxMountContext="" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.361264 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" seLinuxMountContext="" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.361306 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" seLinuxMountContext="" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.361330 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" seLinuxMountContext="" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.361359 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" seLinuxMountContext="" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.361379 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" seLinuxMountContext="" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.361403 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" seLinuxMountContext="" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.361423 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" seLinuxMountContext="" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.361443 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" seLinuxMountContext="" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.361485 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" seLinuxMountContext="" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.361509 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" seLinuxMountContext="" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.361541 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" seLinuxMountContext="" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.361560 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" seLinuxMountContext="" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.361582 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" seLinuxMountContext="" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.361607 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" seLinuxMountContext="" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.361629 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" seLinuxMountContext="" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.361667 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" seLinuxMountContext="" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.361695 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" seLinuxMountContext="" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.361715 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" seLinuxMountContext="" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.361743 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" seLinuxMountContext="" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.361794 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" seLinuxMountContext="" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.361817 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" seLinuxMountContext="" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.361841 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" seLinuxMountContext="" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.361863 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" seLinuxMountContext="" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.361890 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" seLinuxMountContext="" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.361925 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" seLinuxMountContext="" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.361945 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert" seLinuxMountContext="" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.361975 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" seLinuxMountContext="" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.361996 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" seLinuxMountContext="" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.362021 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" seLinuxMountContext="" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.362040 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" seLinuxMountContext="" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.362060 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf" seLinuxMountContext="" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.362105 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3b6479f0-333b-4a96-9adf-2099afdc2447" volumeName="kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr" seLinuxMountContext="" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.362128 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" seLinuxMountContext="" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.362160 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script" seLinuxMountContext="" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.362185 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm" seLinuxMountContext="" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.362207 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" seLinuxMountContext="" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.362235 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" seLinuxMountContext="" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.362372 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" seLinuxMountContext="" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.362393 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" seLinuxMountContext="" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.362419 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" seLinuxMountContext="" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.362438 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" seLinuxMountContext="" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.362459 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" seLinuxMountContext="" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.362487 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" seLinuxMountContext="" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.362522 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" seLinuxMountContext="" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.362548 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" seLinuxMountContext="" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.362567 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" seLinuxMountContext="" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.362589 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" seLinuxMountContext="" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.362619 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" seLinuxMountContext="" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.362639 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" seLinuxMountContext="" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.362665 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" seLinuxMountContext="" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.362684 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" seLinuxMountContext="" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.362705 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" seLinuxMountContext="" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.362730 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" seLinuxMountContext="" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.362749 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" seLinuxMountContext="" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.362818 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" seLinuxMountContext="" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.362840 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" seLinuxMountContext="" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.362863 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert" seLinuxMountContext="" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.362903 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" seLinuxMountContext="" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.362922 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides" seLinuxMountContext="" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.362949 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" seLinuxMountContext="" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.362970 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" seLinuxMountContext="" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.362991 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" seLinuxMountContext="" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.363016 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" seLinuxMountContext="" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.363035 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" seLinuxMountContext="" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.363056 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" seLinuxMountContext="" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.363081 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" seLinuxMountContext="" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.363100 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" seLinuxMountContext="" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.363125 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" seLinuxMountContext="" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.363144 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" seLinuxMountContext="" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.363164 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" seLinuxMountContext="" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.363189 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" volumeName="kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" seLinuxMountContext="" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.363209 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" seLinuxMountContext="" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.363250 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" seLinuxMountContext="" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.363270 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" seLinuxMountContext="" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.363292 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49ef4625-1d3a-4a9f-b595-c2433d32326d" volumeName="kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" seLinuxMountContext="" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.363317 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" seLinuxMountContext="" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.363334 4799 reconstruct.go:97] "Volume reconstruction finished" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.363348 4799 reconciler.go:26] "Reconciler: start to sync state" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.375451 4799 manager.go:324] Recovery completed Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.386418 4799 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.388194 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.388586 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.388605 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.389672 4799 cpu_manager.go:225] "Starting CPU manager" policy="none" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.389725 4799 cpu_manager.go:226] "Reconciling" reconcilePeriod="10s" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.389799 4799 state_mem.go:36] "Initialized new in-memory state store" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.399073 4799 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv4" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.401107 4799 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv6" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.401152 4799 status_manager.go:217] "Starting to sync pod status with apiserver" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.401212 4799 kubelet.go:2335] "Starting kubelet main sync loop" Oct 10 16:31:47 crc kubenswrapper[4799]: E1010 16:31:47.401262 4799 kubelet.go:2359] "Skipping pod synchronization" err="[container runtime status check may not have completed yet, PLEG is not healthy: pleg has yet to be successful]" Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.402228 4799 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.102.83.145:6443: connect: connection refused Oct 10 16:31:47 crc kubenswrapper[4799]: E1010 16:31:47.402308 4799 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.102.83.145:6443: connect: connection refused" logger="UnhandledError" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.414502 4799 policy_none.go:49] "None policy: Start" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.415586 4799 memory_manager.go:170] "Starting memorymanager" policy="None" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.415654 4799 state_mem.go:35] "Initializing new in-memory state store" Oct 10 16:31:47 crc kubenswrapper[4799]: E1010 16:31:47.434445 4799 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.492544 4799 manager.go:334] "Starting Device Plugin manager" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.492622 4799 manager.go:513] "Failed to read data from checkpoint" checkpoint="kubelet_internal_checkpoint" err="checkpoint is not found" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.492642 4799 server.go:79] "Starting device plugin registration server" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.493482 4799 eviction_manager.go:189] "Eviction manager: starting control loop" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.494191 4799 container_log_manager.go:189] "Initializing container log rotate workers" workers=1 monitorPeriod="10s" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.494462 4799 plugin_watcher.go:51] "Plugin Watcher Start" path="/var/lib/kubelet/plugins_registry" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.494732 4799 plugin_manager.go:116] "The desired_state_of_world populator (plugin watcher) starts" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.494864 4799 plugin_manager.go:118] "Starting Kubelet Plugin Manager" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.502058 4799 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-machine-config-operator/kube-rbac-proxy-crio-crc","openshift-etcd/etcd-crc","openshift-kube-apiserver/kube-apiserver-crc","openshift-kube-controller-manager/kube-controller-manager-crc","openshift-kube-scheduler/openshift-kube-scheduler-crc"] Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.502155 4799 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.503335 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.503377 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.503407 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.503597 4799 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.503968 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.504006 4799 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.504870 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.504889 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.504920 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.504933 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.504975 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.504994 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.505026 4799 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.505197 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.505227 4799 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.505925 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.505955 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.505970 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.506129 4799 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.506278 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.506322 4799 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.506806 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.506844 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.506860 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.507589 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.507621 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.507636 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:31:47 crc kubenswrapper[4799]: E1010 16:31:47.507681 4799 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.507751 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.507811 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.507825 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.508014 4799 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.508240 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.508296 4799 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.509121 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.509260 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.509383 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.509655 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.509826 4799 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.510451 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.510481 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.510494 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.513963 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.513994 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.514006 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:31:47 crc kubenswrapper[4799]: E1010 16:31:47.538960 4799 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.145:6443: connect: connection refused" interval="400ms" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.565768 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.565850 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.565891 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.565925 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.565959 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.566047 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.566087 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.566162 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.566212 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.566239 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.566263 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.566310 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.566420 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.566521 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.566630 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.594310 4799 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.595720 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.595795 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.595811 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.595846 4799 kubelet_node_status.go:76] "Attempting to register node" node="crc" Oct 10 16:31:47 crc kubenswrapper[4799]: E1010 16:31:47.596341 4799 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.145:6443: connect: connection refused" node="crc" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.668002 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.668067 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.668107 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.668139 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.668172 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.668178 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.668204 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.668230 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.668251 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.668238 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.668279 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.668326 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.668336 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.668370 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.668425 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.668373 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.668483 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.668512 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.668532 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.668552 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.668570 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.668551 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.668611 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.668638 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.668572 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.668676 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.668703 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.668748 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.668875 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.668778 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.796787 4799 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.798356 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.798427 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.798457 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.798513 4799 kubelet_node_status.go:76] "Attempting to register node" node="crc" Oct 10 16:31:47 crc kubenswrapper[4799]: E1010 16:31:47.799286 4799 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.145:6443: connect: connection refused" node="crc" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.837664 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.850591 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.870580 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.888555 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 10 16:31:47 crc kubenswrapper[4799]: I1010 16:31:47.898984 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.908847 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2139d3e2895fc6797b9c76a1b4c9886d.slice/crio-839056ebe01f941cc513f973553d027427d6a51cf4d055233c61ea2aafe0d9ed WatchSource:0}: Error finding container 839056ebe01f941cc513f973553d027427d6a51cf4d055233c61ea2aafe0d9ed: Status 404 returned error can't find the container with id 839056ebe01f941cc513f973553d027427d6a51cf4d055233c61ea2aafe0d9ed Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.910153 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf614b9022728cf315e60c057852e563e.slice/crio-9390c5e231f372cde3bd008dc0169e1b271449d2746696cbe63e28f6acbbf616 WatchSource:0}: Error finding container 9390c5e231f372cde3bd008dc0169e1b271449d2746696cbe63e28f6acbbf616: Status 404 returned error can't find the container with id 9390c5e231f372cde3bd008dc0169e1b271449d2746696cbe63e28f6acbbf616 Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.912901 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd1b160f5dda77d281dd8e69ec8d817f9.slice/crio-8ba25c9cf047661e52275f389b3fadcf3981610d2721db856006dd87ab75a202 WatchSource:0}: Error finding container 8ba25c9cf047661e52275f389b3fadcf3981610d2721db856006dd87ab75a202: Status 404 returned error can't find the container with id 8ba25c9cf047661e52275f389b3fadcf3981610d2721db856006dd87ab75a202 Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.913313 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf4b27818a5e8e43d0dc095d08835c792.slice/crio-def5695080d593d2054a501a2b5ccc9eda74a0f7a08f89abb9a39673bc74a6a5 WatchSource:0}: Error finding container def5695080d593d2054a501a2b5ccc9eda74a0f7a08f89abb9a39673bc74a6a5: Status 404 returned error can't find the container with id def5695080d593d2054a501a2b5ccc9eda74a0f7a08f89abb9a39673bc74a6a5 Oct 10 16:31:47 crc kubenswrapper[4799]: W1010 16:31:47.915967 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3dcd261975c3d6b9a6ad6367fd4facd3.slice/crio-0f9caf4817286d34ef901065176a91be9f7aa2301f595285d7bca0be6e45ed9e WatchSource:0}: Error finding container 0f9caf4817286d34ef901065176a91be9f7aa2301f595285d7bca0be6e45ed9e: Status 404 returned error can't find the container with id 0f9caf4817286d34ef901065176a91be9f7aa2301f595285d7bca0be6e45ed9e Oct 10 16:31:47 crc kubenswrapper[4799]: E1010 16:31:47.940177 4799 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.145:6443: connect: connection refused" interval="800ms" Oct 10 16:31:48 crc kubenswrapper[4799]: W1010 16:31:48.156711 4799 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.145:6443: connect: connection refused Oct 10 16:31:48 crc kubenswrapper[4799]: E1010 16:31:48.156844 4799 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.145:6443: connect: connection refused" logger="UnhandledError" Oct 10 16:31:48 crc kubenswrapper[4799]: I1010 16:31:48.199626 4799 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 10 16:31:48 crc kubenswrapper[4799]: I1010 16:31:48.201291 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:31:48 crc kubenswrapper[4799]: I1010 16:31:48.201340 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:31:48 crc kubenswrapper[4799]: I1010 16:31:48.201355 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:31:48 crc kubenswrapper[4799]: I1010 16:31:48.201389 4799 kubelet_node_status.go:76] "Attempting to register node" node="crc" Oct 10 16:31:48 crc kubenswrapper[4799]: E1010 16:31:48.201825 4799 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.145:6443: connect: connection refused" node="crc" Oct 10 16:31:48 crc kubenswrapper[4799]: I1010 16:31:48.329356 4799 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.145:6443: connect: connection refused Oct 10 16:31:48 crc kubenswrapper[4799]: I1010 16:31:48.404823 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"839056ebe01f941cc513f973553d027427d6a51cf4d055233c61ea2aafe0d9ed"} Oct 10 16:31:48 crc kubenswrapper[4799]: I1010 16:31:48.408800 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"8ba25c9cf047661e52275f389b3fadcf3981610d2721db856006dd87ab75a202"} Oct 10 16:31:48 crc kubenswrapper[4799]: I1010 16:31:48.410465 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"0f9caf4817286d34ef901065176a91be9f7aa2301f595285d7bca0be6e45ed9e"} Oct 10 16:31:48 crc kubenswrapper[4799]: I1010 16:31:48.411738 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"9390c5e231f372cde3bd008dc0169e1b271449d2746696cbe63e28f6acbbf616"} Oct 10 16:31:48 crc kubenswrapper[4799]: I1010 16:31:48.413319 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"def5695080d593d2054a501a2b5ccc9eda74a0f7a08f89abb9a39673bc74a6a5"} Oct 10 16:31:48 crc kubenswrapper[4799]: W1010 16:31:48.695683 4799 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.102.83.145:6443: connect: connection refused Oct 10 16:31:48 crc kubenswrapper[4799]: E1010 16:31:48.695802 4799 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.102.83.145:6443: connect: connection refused" logger="UnhandledError" Oct 10 16:31:48 crc kubenswrapper[4799]: E1010 16:31:48.741322 4799 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.145:6443: connect: connection refused" interval="1.6s" Oct 10 16:31:48 crc kubenswrapper[4799]: W1010 16:31:48.791156 4799 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.102.83.145:6443: connect: connection refused Oct 10 16:31:48 crc kubenswrapper[4799]: E1010 16:31:48.791252 4799 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.102.83.145:6443: connect: connection refused" logger="UnhandledError" Oct 10 16:31:48 crc kubenswrapper[4799]: W1010 16:31:48.846123 4799 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.102.83.145:6443: connect: connection refused Oct 10 16:31:48 crc kubenswrapper[4799]: E1010 16:31:48.846229 4799 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.102.83.145:6443: connect: connection refused" logger="UnhandledError" Oct 10 16:31:49 crc kubenswrapper[4799]: I1010 16:31:49.002702 4799 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 10 16:31:49 crc kubenswrapper[4799]: I1010 16:31:49.005953 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:31:49 crc kubenswrapper[4799]: I1010 16:31:49.006012 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:31:49 crc kubenswrapper[4799]: I1010 16:31:49.006025 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:31:49 crc kubenswrapper[4799]: I1010 16:31:49.006056 4799 kubelet_node_status.go:76] "Attempting to register node" node="crc" Oct 10 16:31:49 crc kubenswrapper[4799]: E1010 16:31:49.006668 4799 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.145:6443: connect: connection refused" node="crc" Oct 10 16:31:49 crc kubenswrapper[4799]: I1010 16:31:49.328490 4799 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.145:6443: connect: connection refused Oct 10 16:31:49 crc kubenswrapper[4799]: I1010 16:31:49.419918 4799 generic.go:334] "Generic (PLEG): container finished" podID="3dcd261975c3d6b9a6ad6367fd4facd3" containerID="91106a41672b01d9f5c61cfc3001b84f024f3b96649bbc9174f3a635fc8034a9" exitCode=0 Oct 10 16:31:49 crc kubenswrapper[4799]: I1010 16:31:49.420011 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerDied","Data":"91106a41672b01d9f5c61cfc3001b84f024f3b96649bbc9174f3a635fc8034a9"} Oct 10 16:31:49 crc kubenswrapper[4799]: I1010 16:31:49.420109 4799 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 10 16:31:49 crc kubenswrapper[4799]: I1010 16:31:49.422222 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:31:49 crc kubenswrapper[4799]: I1010 16:31:49.422285 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:31:49 crc kubenswrapper[4799]: I1010 16:31:49.422305 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:31:49 crc kubenswrapper[4799]: I1010 16:31:49.423947 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"298a1a9571fbe118fe81ff3e7403e298bcde9b683cffab574fbb03d5adc1fb67"} Oct 10 16:31:49 crc kubenswrapper[4799]: I1010 16:31:49.423990 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"f767e89684b9b515da850360aaf9d7a02173395faf0654e9f0b3a4752a3d608b"} Oct 10 16:31:49 crc kubenswrapper[4799]: I1010 16:31:49.426618 4799 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="78cbeb4c6d2770cabbc752b11e5a62f64ec7820bc3a637a944fa252d779e242b" exitCode=0 Oct 10 16:31:49 crc kubenswrapper[4799]: I1010 16:31:49.426730 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"78cbeb4c6d2770cabbc752b11e5a62f64ec7820bc3a637a944fa252d779e242b"} Oct 10 16:31:49 crc kubenswrapper[4799]: I1010 16:31:49.426849 4799 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 10 16:31:49 crc kubenswrapper[4799]: I1010 16:31:49.428510 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:31:49 crc kubenswrapper[4799]: I1010 16:31:49.428555 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:31:49 crc kubenswrapper[4799]: I1010 16:31:49.428574 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:31:49 crc kubenswrapper[4799]: I1010 16:31:49.429703 4799 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="2a48bce1f3530d2a78258c6fa2af4f1530890f7967a26c9e91ca2f20f56cdbe6" exitCode=0 Oct 10 16:31:49 crc kubenswrapper[4799]: I1010 16:31:49.429838 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"2a48bce1f3530d2a78258c6fa2af4f1530890f7967a26c9e91ca2f20f56cdbe6"} Oct 10 16:31:49 crc kubenswrapper[4799]: I1010 16:31:49.429895 4799 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 10 16:31:49 crc kubenswrapper[4799]: I1010 16:31:49.431717 4799 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 10 16:31:49 crc kubenswrapper[4799]: I1010 16:31:49.432114 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:31:49 crc kubenswrapper[4799]: I1010 16:31:49.432163 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:31:49 crc kubenswrapper[4799]: I1010 16:31:49.432185 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:31:49 crc kubenswrapper[4799]: I1010 16:31:49.433050 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:31:49 crc kubenswrapper[4799]: I1010 16:31:49.433108 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:31:49 crc kubenswrapper[4799]: I1010 16:31:49.433136 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:31:49 crc kubenswrapper[4799]: I1010 16:31:49.433593 4799 generic.go:334] "Generic (PLEG): container finished" podID="d1b160f5dda77d281dd8e69ec8d817f9" containerID="3c7561ae8f1ea6cb96c659f004106dfdc36f0a3ad76e66f9dd5b55ad905742df" exitCode=0 Oct 10 16:31:49 crc kubenswrapper[4799]: I1010 16:31:49.433671 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerDied","Data":"3c7561ae8f1ea6cb96c659f004106dfdc36f0a3ad76e66f9dd5b55ad905742df"} Oct 10 16:31:49 crc kubenswrapper[4799]: I1010 16:31:49.433869 4799 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 10 16:31:49 crc kubenswrapper[4799]: I1010 16:31:49.436826 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:31:49 crc kubenswrapper[4799]: I1010 16:31:49.436883 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:31:49 crc kubenswrapper[4799]: I1010 16:31:49.436907 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:31:50 crc kubenswrapper[4799]: I1010 16:31:50.329384 4799 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.145:6443: connect: connection refused Oct 10 16:31:50 crc kubenswrapper[4799]: E1010 16:31:50.342342 4799 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.145:6443: connect: connection refused" interval="3.2s" Oct 10 16:31:50 crc kubenswrapper[4799]: I1010 16:31:50.440844 4799 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="65fb2cd5fa9b5ff0cad85267e4a036c37593a749da171dc2e5e30ba5159ed96d" exitCode=0 Oct 10 16:31:50 crc kubenswrapper[4799]: I1010 16:31:50.440895 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"65fb2cd5fa9b5ff0cad85267e4a036c37593a749da171dc2e5e30ba5159ed96d"} Oct 10 16:31:50 crc kubenswrapper[4799]: I1010 16:31:50.441020 4799 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 10 16:31:50 crc kubenswrapper[4799]: I1010 16:31:50.442601 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:31:50 crc kubenswrapper[4799]: I1010 16:31:50.442656 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:31:50 crc kubenswrapper[4799]: I1010 16:31:50.442675 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:31:50 crc kubenswrapper[4799]: I1010 16:31:50.443691 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"bd57f69503813185900ddde784de4d3582b141416c6310598d416eec17c0beac"} Oct 10 16:31:50 crc kubenswrapper[4799]: I1010 16:31:50.443742 4799 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 10 16:31:50 crc kubenswrapper[4799]: I1010 16:31:50.445358 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:31:50 crc kubenswrapper[4799]: I1010 16:31:50.445384 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:31:50 crc kubenswrapper[4799]: I1010 16:31:50.445394 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:31:50 crc kubenswrapper[4799]: I1010 16:31:50.448910 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"88216eac74e0df9deb1ca1bef893deb2e23a79ffffdbd8a851a67df407eaa470"} Oct 10 16:31:50 crc kubenswrapper[4799]: I1010 16:31:50.448959 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"c48434cdadac2409d0e3baf595e00260b1e3f94b8b9dab62e3f87503a6e888be"} Oct 10 16:31:50 crc kubenswrapper[4799]: I1010 16:31:50.448974 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"38c4fe49eff3373937abdebfb7d58fe9d5c73809375a3dca4f165aab84d6cbd1"} Oct 10 16:31:50 crc kubenswrapper[4799]: I1010 16:31:50.449034 4799 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 10 16:31:50 crc kubenswrapper[4799]: I1010 16:31:50.450120 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:31:50 crc kubenswrapper[4799]: I1010 16:31:50.450148 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:31:50 crc kubenswrapper[4799]: I1010 16:31:50.450157 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:31:50 crc kubenswrapper[4799]: I1010 16:31:50.465441 4799 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 10 16:31:50 crc kubenswrapper[4799]: I1010 16:31:50.465891 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"c3a649a65ab118025ea70d1d7cf71236cb96992671c3bc7659d591640b53f941"} Oct 10 16:31:50 crc kubenswrapper[4799]: I1010 16:31:50.465946 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"92de340d859cff018a661f0a7f7fe209ffae161bf6f39deb005c7148591fc60b"} Oct 10 16:31:50 crc kubenswrapper[4799]: I1010 16:31:50.466726 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:31:50 crc kubenswrapper[4799]: I1010 16:31:50.466775 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:31:50 crc kubenswrapper[4799]: I1010 16:31:50.466788 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:31:50 crc kubenswrapper[4799]: I1010 16:31:50.472436 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"0ba57bc720123daa414f51bf5d3173c6fa0b519947a34816bebc532948fd74ab"} Oct 10 16:31:50 crc kubenswrapper[4799]: I1010 16:31:50.472495 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"7d98759de1f79d9aeb68eb0b3eb21d78d0116f054b5d846c85bd63774b565e73"} Oct 10 16:31:50 crc kubenswrapper[4799]: I1010 16:31:50.472528 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"df6b51b97a9e3dcf9102409dc19f67e69e6e28ebec82dd46083922d5606cc4c2"} Oct 10 16:31:50 crc kubenswrapper[4799]: I1010 16:31:50.607828 4799 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 10 16:31:50 crc kubenswrapper[4799]: I1010 16:31:50.609718 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:31:50 crc kubenswrapper[4799]: I1010 16:31:50.609782 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:31:50 crc kubenswrapper[4799]: I1010 16:31:50.609792 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:31:50 crc kubenswrapper[4799]: I1010 16:31:50.609817 4799 kubelet_node_status.go:76] "Attempting to register node" node="crc" Oct 10 16:31:50 crc kubenswrapper[4799]: E1010 16:31:50.610238 4799 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.145:6443: connect: connection refused" node="crc" Oct 10 16:31:50 crc kubenswrapper[4799]: W1010 16:31:50.644906 4799 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.145:6443: connect: connection refused Oct 10 16:31:50 crc kubenswrapper[4799]: E1010 16:31:50.645010 4799 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.145:6443: connect: connection refused" logger="UnhandledError" Oct 10 16:31:50 crc kubenswrapper[4799]: W1010 16:31:50.659330 4799 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.102.83.145:6443: connect: connection refused Oct 10 16:31:50 crc kubenswrapper[4799]: E1010 16:31:50.659379 4799 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.102.83.145:6443: connect: connection refused" logger="UnhandledError" Oct 10 16:31:51 crc kubenswrapper[4799]: W1010 16:31:51.239114 4799 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.102.83.145:6443: connect: connection refused Oct 10 16:31:51 crc kubenswrapper[4799]: E1010 16:31:51.239268 4799 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.102.83.145:6443: connect: connection refused" logger="UnhandledError" Oct 10 16:31:51 crc kubenswrapper[4799]: I1010 16:31:51.329348 4799 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.145:6443: connect: connection refused Oct 10 16:31:51 crc kubenswrapper[4799]: I1010 16:31:51.481710 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"55af9201526519d123335f21cc2dada8e280f5d90efb03821a45fe469c7b2ede"} Oct 10 16:31:51 crc kubenswrapper[4799]: I1010 16:31:51.481834 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"75fb276a1b4f555aa58d4a862a6f3841984f75958b7ada362d717eca726c41fc"} Oct 10 16:31:51 crc kubenswrapper[4799]: I1010 16:31:51.481789 4799 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 10 16:31:51 crc kubenswrapper[4799]: I1010 16:31:51.483458 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:31:51 crc kubenswrapper[4799]: I1010 16:31:51.483499 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:31:51 crc kubenswrapper[4799]: I1010 16:31:51.483517 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:31:51 crc kubenswrapper[4799]: I1010 16:31:51.485333 4799 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="51ea61becc8c45e5bcb2a2374d503cef3fb940b1618e7501cd05d61fc2a9458f" exitCode=0 Oct 10 16:31:51 crc kubenswrapper[4799]: I1010 16:31:51.485451 4799 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 10 16:31:51 crc kubenswrapper[4799]: I1010 16:31:51.485506 4799 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 10 16:31:51 crc kubenswrapper[4799]: I1010 16:31:51.485554 4799 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 10 16:31:51 crc kubenswrapper[4799]: I1010 16:31:51.485942 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"51ea61becc8c45e5bcb2a2374d503cef3fb940b1618e7501cd05d61fc2a9458f"} Oct 10 16:31:51 crc kubenswrapper[4799]: I1010 16:31:51.486019 4799 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Oct 10 16:31:51 crc kubenswrapper[4799]: I1010 16:31:51.486089 4799 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 10 16:31:51 crc kubenswrapper[4799]: I1010 16:31:51.486676 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:31:51 crc kubenswrapper[4799]: I1010 16:31:51.486785 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:31:51 crc kubenswrapper[4799]: I1010 16:31:51.486818 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:31:51 crc kubenswrapper[4799]: I1010 16:31:51.487043 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:31:51 crc kubenswrapper[4799]: I1010 16:31:51.487073 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:31:51 crc kubenswrapper[4799]: I1010 16:31:51.487089 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:31:51 crc kubenswrapper[4799]: I1010 16:31:51.487128 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:31:51 crc kubenswrapper[4799]: I1010 16:31:51.487150 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:31:51 crc kubenswrapper[4799]: I1010 16:31:51.487162 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:31:51 crc kubenswrapper[4799]: I1010 16:31:51.487920 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:31:51 crc kubenswrapper[4799]: I1010 16:31:51.487986 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:31:51 crc kubenswrapper[4799]: I1010 16:31:51.488015 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:31:51 crc kubenswrapper[4799]: I1010 16:31:51.653607 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 10 16:31:51 crc kubenswrapper[4799]: W1010 16:31:51.745800 4799 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.102.83.145:6443: connect: connection refused Oct 10 16:31:51 crc kubenswrapper[4799]: E1010 16:31:51.745878 4799 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.102.83.145:6443: connect: connection refused" logger="UnhandledError" Oct 10 16:31:51 crc kubenswrapper[4799]: E1010 16:31:51.970706 4799 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/default/events\": dial tcp 38.102.83.145:6443: connect: connection refused" event="&Event{ObjectMeta:{crc.186d2e45f244613d default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:Starting,Message:Starting kubelet.,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-10-10 16:31:47.323920701 +0000 UTC m=+0.832244856,LastTimestamp:2025-10-10 16:31:47.323920701 +0000 UTC m=+0.832244856,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Oct 10 16:31:52 crc kubenswrapper[4799]: I1010 16:31:52.329830 4799 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.145:6443: connect: connection refused Oct 10 16:31:52 crc kubenswrapper[4799]: I1010 16:31:52.498831 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"c0a72be5ffe48f726e63ca3854fcabf6ad7c26f2c3fe432328142da2dc2ceeb5"} Oct 10 16:31:52 crc kubenswrapper[4799]: I1010 16:31:52.498915 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"79f6778c5b703b2b4fc4e59fffc00824fcab6c8f5e2789661665e635a3539195"} Oct 10 16:31:52 crc kubenswrapper[4799]: I1010 16:31:52.498861 4799 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 10 16:31:52 crc kubenswrapper[4799]: I1010 16:31:52.498995 4799 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Oct 10 16:31:52 crc kubenswrapper[4799]: I1010 16:31:52.499062 4799 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 10 16:31:52 crc kubenswrapper[4799]: I1010 16:31:52.501318 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:31:52 crc kubenswrapper[4799]: I1010 16:31:52.501343 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:31:52 crc kubenswrapper[4799]: I1010 16:31:52.501351 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:31:52 crc kubenswrapper[4799]: I1010 16:31:52.502063 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:31:52 crc kubenswrapper[4799]: I1010 16:31:52.502075 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:31:52 crc kubenswrapper[4799]: I1010 16:31:52.502082 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:31:53 crc kubenswrapper[4799]: I1010 16:31:53.505391 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Oct 10 16:31:53 crc kubenswrapper[4799]: I1010 16:31:53.509971 4799 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="55af9201526519d123335f21cc2dada8e280f5d90efb03821a45fe469c7b2ede" exitCode=255 Oct 10 16:31:53 crc kubenswrapper[4799]: I1010 16:31:53.510044 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"55af9201526519d123335f21cc2dada8e280f5d90efb03821a45fe469c7b2ede"} Oct 10 16:31:53 crc kubenswrapper[4799]: I1010 16:31:53.510292 4799 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 10 16:31:53 crc kubenswrapper[4799]: I1010 16:31:53.511865 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:31:53 crc kubenswrapper[4799]: I1010 16:31:53.511908 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:31:53 crc kubenswrapper[4799]: I1010 16:31:53.511958 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:31:53 crc kubenswrapper[4799]: I1010 16:31:53.512951 4799 scope.go:117] "RemoveContainer" containerID="55af9201526519d123335f21cc2dada8e280f5d90efb03821a45fe469c7b2ede" Oct 10 16:31:53 crc kubenswrapper[4799]: I1010 16:31:53.516546 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"b1dad40a84c7f22ffb5d52c708c7e2e03a181c5778793050495c8333ae005731"} Oct 10 16:31:53 crc kubenswrapper[4799]: I1010 16:31:53.516649 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"de5b84380f9fb8448cebe90775342fd17260ffb8c591bbd5156f8a216b80f1da"} Oct 10 16:31:53 crc kubenswrapper[4799]: I1010 16:31:53.516665 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"b42b1b86cbd6dacb03b9afc740a33f67674996a9c5a5b291b71708ae53ccfea8"} Oct 10 16:31:53 crc kubenswrapper[4799]: I1010 16:31:53.516916 4799 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 10 16:31:53 crc kubenswrapper[4799]: I1010 16:31:53.518322 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:31:53 crc kubenswrapper[4799]: I1010 16:31:53.518381 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:31:53 crc kubenswrapper[4799]: I1010 16:31:53.518400 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:31:53 crc kubenswrapper[4799]: I1010 16:31:53.608120 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 10 16:31:53 crc kubenswrapper[4799]: I1010 16:31:53.810485 4799 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 10 16:31:53 crc kubenswrapper[4799]: I1010 16:31:53.811863 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:31:53 crc kubenswrapper[4799]: I1010 16:31:53.811900 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:31:53 crc kubenswrapper[4799]: I1010 16:31:53.811909 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:31:53 crc kubenswrapper[4799]: I1010 16:31:53.811945 4799 kubelet_node_status.go:76] "Attempting to register node" node="crc" Oct 10 16:31:54 crc kubenswrapper[4799]: I1010 16:31:54.521111 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Oct 10 16:31:54 crc kubenswrapper[4799]: I1010 16:31:54.523264 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"10d76c5ba8c54896d2fde57e2806c48857363c495a9f2d9b3f6904334cf2f9be"} Oct 10 16:31:54 crc kubenswrapper[4799]: I1010 16:31:54.523333 4799 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 10 16:31:54 crc kubenswrapper[4799]: I1010 16:31:54.523342 4799 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 10 16:31:54 crc kubenswrapper[4799]: I1010 16:31:54.524687 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:31:54 crc kubenswrapper[4799]: I1010 16:31:54.524718 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:31:54 crc kubenswrapper[4799]: I1010 16:31:54.524729 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:31:54 crc kubenswrapper[4799]: I1010 16:31:54.524860 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:31:54 crc kubenswrapper[4799]: I1010 16:31:54.524913 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:31:54 crc kubenswrapper[4799]: I1010 16:31:54.524933 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:31:54 crc kubenswrapper[4799]: I1010 16:31:54.852972 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 10 16:31:55 crc kubenswrapper[4799]: I1010 16:31:55.283219 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 10 16:31:55 crc kubenswrapper[4799]: I1010 16:31:55.526355 4799 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 10 16:31:55 crc kubenswrapper[4799]: I1010 16:31:55.526432 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 10 16:31:55 crc kubenswrapper[4799]: I1010 16:31:55.527829 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:31:55 crc kubenswrapper[4799]: I1010 16:31:55.527893 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:31:55 crc kubenswrapper[4799]: I1010 16:31:55.527929 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:31:55 crc kubenswrapper[4799]: I1010 16:31:55.945962 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Oct 10 16:31:55 crc kubenswrapper[4799]: I1010 16:31:55.946203 4799 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 10 16:31:55 crc kubenswrapper[4799]: I1010 16:31:55.947982 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:31:55 crc kubenswrapper[4799]: I1010 16:31:55.948047 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:31:55 crc kubenswrapper[4799]: I1010 16:31:55.948065 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:31:56 crc kubenswrapper[4799]: I1010 16:31:56.184667 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 10 16:31:56 crc kubenswrapper[4799]: I1010 16:31:56.184928 4799 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 10 16:31:56 crc kubenswrapper[4799]: I1010 16:31:56.186392 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:31:56 crc kubenswrapper[4799]: I1010 16:31:56.186460 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:31:56 crc kubenswrapper[4799]: I1010 16:31:56.186490 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:31:56 crc kubenswrapper[4799]: I1010 16:31:56.220807 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-etcd/etcd-crc" Oct 10 16:31:56 crc kubenswrapper[4799]: I1010 16:31:56.221088 4799 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 10 16:31:56 crc kubenswrapper[4799]: I1010 16:31:56.222886 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:31:56 crc kubenswrapper[4799]: I1010 16:31:56.222968 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:31:56 crc kubenswrapper[4799]: I1010 16:31:56.222992 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:31:56 crc kubenswrapper[4799]: I1010 16:31:56.517653 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 10 16:31:56 crc kubenswrapper[4799]: I1010 16:31:56.529865 4799 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 10 16:31:56 crc kubenswrapper[4799]: I1010 16:31:56.530150 4799 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 10 16:31:56 crc kubenswrapper[4799]: I1010 16:31:56.531195 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:31:56 crc kubenswrapper[4799]: I1010 16:31:56.531243 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:31:56 crc kubenswrapper[4799]: I1010 16:31:56.531261 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:31:56 crc kubenswrapper[4799]: I1010 16:31:56.531856 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:31:56 crc kubenswrapper[4799]: I1010 16:31:56.531911 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:31:56 crc kubenswrapper[4799]: I1010 16:31:56.531930 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:31:57 crc kubenswrapper[4799]: E1010 16:31:57.507852 4799 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Oct 10 16:31:57 crc kubenswrapper[4799]: I1010 16:31:57.858495 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 10 16:31:57 crc kubenswrapper[4799]: I1010 16:31:57.858829 4799 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 10 16:31:57 crc kubenswrapper[4799]: I1010 16:31:57.860458 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:31:57 crc kubenswrapper[4799]: I1010 16:31:57.860521 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:31:57 crc kubenswrapper[4799]: I1010 16:31:57.860546 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:31:57 crc kubenswrapper[4799]: I1010 16:31:57.881567 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 10 16:31:58 crc kubenswrapper[4799]: I1010 16:31:58.534530 4799 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 10 16:31:58 crc kubenswrapper[4799]: I1010 16:31:58.535577 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:31:58 crc kubenswrapper[4799]: I1010 16:31:58.535641 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:31:58 crc kubenswrapper[4799]: I1010 16:31:58.535660 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:31:58 crc kubenswrapper[4799]: I1010 16:31:58.541185 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 10 16:31:59 crc kubenswrapper[4799]: I1010 16:31:59.518745 4799 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/cluster-policy-controller namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10357/healthz\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" start-of-body= Oct 10 16:31:59 crc kubenswrapper[4799]: I1010 16:31:59.518854 4799 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="cluster-policy-controller" probeResult="failure" output="Get \"https://192.168.126.11:10357/healthz\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Oct 10 16:31:59 crc kubenswrapper[4799]: I1010 16:31:59.536628 4799 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 10 16:31:59 crc kubenswrapper[4799]: I1010 16:31:59.537441 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:31:59 crc kubenswrapper[4799]: I1010 16:31:59.537473 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:31:59 crc kubenswrapper[4799]: I1010 16:31:59.537484 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:01 crc kubenswrapper[4799]: I1010 16:32:01.411108 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-etcd/etcd-crc" Oct 10 16:32:01 crc kubenswrapper[4799]: I1010 16:32:01.411406 4799 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 10 16:32:01 crc kubenswrapper[4799]: I1010 16:32:01.412685 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:01 crc kubenswrapper[4799]: I1010 16:32:01.412719 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:01 crc kubenswrapper[4799]: I1010 16:32:01.412731 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:03 crc kubenswrapper[4799]: I1010 16:32:03.328849 4799 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": net/http: TLS handshake timeout Oct 10 16:32:03 crc kubenswrapper[4799]: E1010 16:32:03.543726 4799 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" interval="6.4s" Oct 10 16:32:03 crc kubenswrapper[4799]: I1010 16:32:03.609436 4799 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Readiness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" start-of-body= Oct 10 16:32:03 crc kubenswrapper[4799]: I1010 16:32:03.609561 4799 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" Oct 10 16:32:03 crc kubenswrapper[4799]: E1010 16:32:03.813870 4799 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": net/http: TLS handshake timeout" node="crc" Oct 10 16:32:03 crc kubenswrapper[4799]: I1010 16:32:03.875855 4799 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 403" start-of-body={"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/livez\"","reason":"Forbidden","details":{},"code":403} Oct 10 16:32:03 crc kubenswrapper[4799]: I1010 16:32:03.875936 4799 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 403" Oct 10 16:32:03 crc kubenswrapper[4799]: I1010 16:32:03.880705 4799 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 403" start-of-body={"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/livez\"","reason":"Forbidden","details":{},"code":403} Oct 10 16:32:03 crc kubenswrapper[4799]: I1010 16:32:03.881012 4799 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 403" Oct 10 16:32:04 crc kubenswrapper[4799]: I1010 16:32:04.869485 4799 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[+]ping ok Oct 10 16:32:04 crc kubenswrapper[4799]: [+]log ok Oct 10 16:32:04 crc kubenswrapper[4799]: [+]etcd ok Oct 10 16:32:04 crc kubenswrapper[4799]: [+]poststarthook/openshift.io-openshift-apiserver-reachable ok Oct 10 16:32:04 crc kubenswrapper[4799]: [+]poststarthook/openshift.io-oauth-apiserver-reachable ok Oct 10 16:32:04 crc kubenswrapper[4799]: [+]poststarthook/start-apiserver-admission-initializer ok Oct 10 16:32:04 crc kubenswrapper[4799]: [+]poststarthook/quota.openshift.io-clusterquotamapping ok Oct 10 16:32:04 crc kubenswrapper[4799]: [+]poststarthook/openshift.io-api-request-count-filter ok Oct 10 16:32:04 crc kubenswrapper[4799]: [+]poststarthook/openshift.io-startkubeinformers ok Oct 10 16:32:04 crc kubenswrapper[4799]: [+]poststarthook/generic-apiserver-start-informers ok Oct 10 16:32:04 crc kubenswrapper[4799]: [+]poststarthook/priority-and-fairness-config-consumer ok Oct 10 16:32:04 crc kubenswrapper[4799]: [+]poststarthook/priority-and-fairness-filter ok Oct 10 16:32:04 crc kubenswrapper[4799]: [+]poststarthook/storage-object-count-tracker-hook ok Oct 10 16:32:04 crc kubenswrapper[4799]: [+]poststarthook/start-apiextensions-informers ok Oct 10 16:32:04 crc kubenswrapper[4799]: [+]poststarthook/start-apiextensions-controllers ok Oct 10 16:32:04 crc kubenswrapper[4799]: [+]poststarthook/crd-informer-synced ok Oct 10 16:32:04 crc kubenswrapper[4799]: [+]poststarthook/start-system-namespaces-controller ok Oct 10 16:32:04 crc kubenswrapper[4799]: [+]poststarthook/start-cluster-authentication-info-controller ok Oct 10 16:32:04 crc kubenswrapper[4799]: [+]poststarthook/start-kube-apiserver-identity-lease-controller ok Oct 10 16:32:04 crc kubenswrapper[4799]: [+]poststarthook/start-kube-apiserver-identity-lease-garbage-collector ok Oct 10 16:32:04 crc kubenswrapper[4799]: [+]poststarthook/start-legacy-token-tracking-controller ok Oct 10 16:32:04 crc kubenswrapper[4799]: [+]poststarthook/start-service-ip-repair-controllers ok Oct 10 16:32:04 crc kubenswrapper[4799]: [-]poststarthook/rbac/bootstrap-roles failed: reason withheld Oct 10 16:32:04 crc kubenswrapper[4799]: [-]poststarthook/scheduling/bootstrap-system-priority-classes failed: reason withheld Oct 10 16:32:04 crc kubenswrapper[4799]: [+]poststarthook/priority-and-fairness-config-producer ok Oct 10 16:32:04 crc kubenswrapper[4799]: [+]poststarthook/bootstrap-controller ok Oct 10 16:32:04 crc kubenswrapper[4799]: [+]poststarthook/aggregator-reload-proxy-client-cert ok Oct 10 16:32:04 crc kubenswrapper[4799]: [+]poststarthook/start-kube-aggregator-informers ok Oct 10 16:32:04 crc kubenswrapper[4799]: [+]poststarthook/apiservice-status-local-available-controller ok Oct 10 16:32:04 crc kubenswrapper[4799]: [+]poststarthook/apiservice-status-remote-available-controller ok Oct 10 16:32:04 crc kubenswrapper[4799]: [+]poststarthook/apiservice-registration-controller ok Oct 10 16:32:04 crc kubenswrapper[4799]: [+]poststarthook/apiservice-wait-for-first-sync ok Oct 10 16:32:04 crc kubenswrapper[4799]: [+]poststarthook/apiservice-discovery-controller ok Oct 10 16:32:04 crc kubenswrapper[4799]: [+]poststarthook/kube-apiserver-autoregistration ok Oct 10 16:32:04 crc kubenswrapper[4799]: [+]autoregister-completion ok Oct 10 16:32:04 crc kubenswrapper[4799]: [+]poststarthook/apiservice-openapi-controller ok Oct 10 16:32:04 crc kubenswrapper[4799]: [+]poststarthook/apiservice-openapiv3-controller ok Oct 10 16:32:04 crc kubenswrapper[4799]: livez check failed Oct 10 16:32:04 crc kubenswrapper[4799]: I1010 16:32:04.869615 4799 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 10 16:32:07 crc kubenswrapper[4799]: E1010 16:32:07.507986 4799 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Oct 10 16:32:08 crc kubenswrapper[4799]: I1010 16:32:08.869810 4799 trace.go:236] Trace[2002996663]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (10-Oct-2025 16:31:56.767) (total time: 12102ms): Oct 10 16:32:08 crc kubenswrapper[4799]: Trace[2002996663]: ---"Objects listed" error: 12102ms (16:32:08.869) Oct 10 16:32:08 crc kubenswrapper[4799]: Trace[2002996663]: [12.102045909s] [12.102045909s] END Oct 10 16:32:08 crc kubenswrapper[4799]: I1010 16:32:08.870411 4799 reflector.go:368] Caches populated for *v1.RuntimeClass from k8s.io/client-go/informers/factory.go:160 Oct 10 16:32:08 crc kubenswrapper[4799]: I1010 16:32:08.872207 4799 trace.go:236] Trace[56142932]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (10-Oct-2025 16:31:56.231) (total time: 12640ms): Oct 10 16:32:08 crc kubenswrapper[4799]: Trace[56142932]: ---"Objects listed" error: 12640ms (16:32:08.872) Oct 10 16:32:08 crc kubenswrapper[4799]: Trace[56142932]: [12.64099507s] [12.64099507s] END Oct 10 16:32:08 crc kubenswrapper[4799]: I1010 16:32:08.872343 4799 reflector.go:368] Caches populated for *v1.CSIDriver from k8s.io/client-go/informers/factory.go:160 Oct 10 16:32:08 crc kubenswrapper[4799]: I1010 16:32:08.874246 4799 reconstruct.go:205] "DevicePaths of reconstructed volumes updated" Oct 10 16:32:08 crc kubenswrapper[4799]: I1010 16:32:08.874272 4799 trace.go:236] Trace[483830156]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (10-Oct-2025 16:31:57.233) (total time: 11640ms): Oct 10 16:32:08 crc kubenswrapper[4799]: Trace[483830156]: ---"Objects listed" error: 11640ms (16:32:08.874) Oct 10 16:32:08 crc kubenswrapper[4799]: Trace[483830156]: [11.6406525s] [11.6406525s] END Oct 10 16:32:08 crc kubenswrapper[4799]: I1010 16:32:08.874819 4799 reflector.go:368] Caches populated for *v1.Node from k8s.io/client-go/informers/factory.go:160 Oct 10 16:32:08 crc kubenswrapper[4799]: I1010 16:32:08.875121 4799 trace.go:236] Trace[1335906768]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (10-Oct-2025 16:31:53.943) (total time: 14931ms): Oct 10 16:32:08 crc kubenswrapper[4799]: Trace[1335906768]: ---"Objects listed" error: 14930ms (16:32:08.874) Oct 10 16:32:08 crc kubenswrapper[4799]: Trace[1335906768]: [14.931117226s] [14.931117226s] END Oct 10 16:32:08 crc kubenswrapper[4799]: I1010 16:32:08.875286 4799 reflector.go:368] Caches populated for *v1.Service from k8s.io/client-go/informers/factory.go:160 Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.320821 4799 apiserver.go:52] "Watching apiserver" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.325266 4799 reflector.go:368] Caches populated for *v1.Pod from pkg/kubelet/config/apiserver.go:66 Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.325577 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-network-operator/iptables-alerter-4ln5h","openshift-network-operator/network-operator-58b4c7f79c-55gtf","openshift-dns/node-resolver-bsdk2","openshift-machine-config-operator/machine-config-daemon-rh8zc","openshift-network-console/networking-console-plugin-85b44fc459-gdk6g","openshift-network-diagnostics/network-check-source-55646444c4-trplf","openshift-network-diagnostics/network-check-target-xd92c","openshift-network-node-identity/network-node-identity-vrzqb"] Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.326108 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.326209 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.326358 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-bsdk2" Oct 10 16:32:09 crc kubenswrapper[4799]: E1010 16:32:09.326382 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.326450 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.326466 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.326514 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Oct 10 16:32:09 crc kubenswrapper[4799]: E1010 16:32:09.326567 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.326628 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.326670 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 10 16:32:09 crc kubenswrapper[4799]: E1010 16:32:09.326918 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.328370 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"openshift-service-ca.crt" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.329747 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"iptables-alerter-script" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.331056 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"openshift-service-ca.crt" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.331127 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"ovnkube-identity-cm" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.331155 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-rbac-proxy" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.331484 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"kube-root-ca.crt" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.331528 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"openshift-service-ca.crt" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.331543 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"kube-root-ca.crt" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.331615 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"node-resolver-dockercfg-kz9s7" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.331670 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"env-overrides" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.331801 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"openshift-service-ca.crt" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.331881 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-operator"/"metrics-tls" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.332400 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.332429 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"kube-root-ca.crt" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.334119 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"proxy-tls" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.334161 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-root-ca.crt" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.335073 4799 desired_state_of_world_populator.go:154] "Finished populating initial desired state of world" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.339045 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-node-identity"/"network-node-identity-cert" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.356506 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.368107 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.378840 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.378989 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.379170 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.379273 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.379363 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.379462 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.379586 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.379681 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.379792 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.379955 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.380042 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.380129 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.380224 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.380314 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.380393 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.380490 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.380598 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.380680 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.380779 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.380878 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.380967 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.381064 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") pod \"44663579-783b-4372-86d6-acf235a62d72\" (UID: \"44663579-783b-4372-86d6-acf235a62d72\") " Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.381153 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.381245 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.381330 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.381420 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.381515 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.381618 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.381710 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.381835 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.382022 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.382123 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.382214 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.382318 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.382429 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.382524 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.382625 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.382723 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.383158 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.379222 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" (OuterVolumeSpecName: "node-bootstrap-token") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "node-bootstrap-token". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.379458 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" (OuterVolumeSpecName: "kube-api-access-xcphl") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "kube-api-access-xcphl". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.386768 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.386888 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.386904 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.380776 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" (OuterVolumeSpecName: "config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.387327 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.381069 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.381297 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.381503 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" (OuterVolumeSpecName: "webhook-certs") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "webhook-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.381581 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" (OuterVolumeSpecName: "images") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.381692 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.382197 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.382323 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" (OuterVolumeSpecName: "config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.382802 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" (OuterVolumeSpecName: "kube-api-access-xcgwh") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "kube-api-access-xcgwh". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.382870 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" (OuterVolumeSpecName: "kube-api-access-vt5rc") pod "44663579-783b-4372-86d6-acf235a62d72" (UID: "44663579-783b-4372-86d6-acf235a62d72"). InnerVolumeSpecName "kube-api-access-vt5rc". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.382894 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" (OuterVolumeSpecName: "kube-api-access-fcqwp") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "kube-api-access-fcqwp". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.382967 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.383236 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" (OuterVolumeSpecName: "kube-api-access-htfz6") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "kube-api-access-htfz6". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.383276 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.383379 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.383408 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:32:09 crc kubenswrapper[4799]: E1010 16:32:09.383435 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-10 16:32:09.883415009 +0000 UTC m=+23.391739124 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.385187 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.385539 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" (OuterVolumeSpecName: "kube-api-access-279lb") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "kube-api-access-279lb". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.385579 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" (OuterVolumeSpecName: "kube-api-access-x7zkh") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "kube-api-access-x7zkh". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.385539 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.386230 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" (OuterVolumeSpecName: "machine-approver-tls") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "machine-approver-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.386989 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" (OuterVolumeSpecName: "default-certificate") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "default-certificate". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.387095 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" (OuterVolumeSpecName: "available-featuregates") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "available-featuregates". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.379795 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.387146 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" (OuterVolumeSpecName: "utilities") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.387173 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.387204 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" (OuterVolumeSpecName: "kube-api-access-2d4wz") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "kube-api-access-2d4wz". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.387388 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" (OuterVolumeSpecName: "utilities") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.387433 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.387587 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.387867 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.387893 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.387931 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.387954 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.387980 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.388006 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.388030 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.388052 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.388076 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.388106 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.388133 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.388154 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.387655 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" (OuterVolumeSpecName: "config") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.387982 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.388170 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" (OuterVolumeSpecName: "kube-api-access-lz9wn") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "kube-api-access-lz9wn". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.388181 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.388274 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.388308 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.388334 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.388357 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.388387 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.388412 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.388437 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.388462 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.388487 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.388512 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.388520 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" (OuterVolumeSpecName: "kube-api-access-d4lsv") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "kube-api-access-d4lsv". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.388537 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.388558 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.388575 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.388594 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.388618 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.388695 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.388719 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.388958 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.388689 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.388982 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.389008 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.389066 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.389103 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.389128 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.389151 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.389168 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.389185 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.389213 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.389231 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.389247 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.389264 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.389280 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.389298 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.389314 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.389332 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.389349 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.389366 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.389362 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" (OuterVolumeSpecName: "serviceca") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "serviceca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.389383 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.389437 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.389467 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.389493 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.389518 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.389560 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.389585 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.389746 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.389819 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.389953 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.390051 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.390398 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.390853 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.390880 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.390899 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.390916 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.390934 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.390955 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.390983 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.391033 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.391149 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.391217 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.391297 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.391331 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.391378 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.391433 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.391478 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.391582 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.391628 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.391653 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.391689 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.391712 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.391728 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.391747 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.391784 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.391804 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.391820 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.391844 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.392442 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" (OuterVolumeSpecName: "config") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.392719 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" (OuterVolumeSpecName: "kube-api-access-v47cf") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "kube-api-access-v47cf". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.392734 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" (OuterVolumeSpecName: "stats-auth") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "stats-auth". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.392892 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.393008 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.393448 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.393621 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.393720 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" (OuterVolumeSpecName: "etcd-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.394848 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.394984 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.395018 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" (OuterVolumeSpecName: "config") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.395360 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" (OuterVolumeSpecName: "cni-sysctl-allowlist") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-sysctl-allowlist". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.395403 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" (OuterVolumeSpecName: "kube-api-access-zgdk5") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "kube-api-access-zgdk5". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.395482 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" (OuterVolumeSpecName: "kube-api-access-jkwtn") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "kube-api-access-jkwtn". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.395614 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.395954 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" (OuterVolumeSpecName: "kube-api-access-8tdtz") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "kube-api-access-8tdtz". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.395986 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" (OuterVolumeSpecName: "audit") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "audit". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.396200 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.396338 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.396624 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.396997 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.397010 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" (OuterVolumeSpecName: "service-ca") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.397313 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" (OuterVolumeSpecName: "kube-api-access-lzf88") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "kube-api-access-lzf88". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.397320 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.397788 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" (OuterVolumeSpecName: "signing-cabundle") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-cabundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.397806 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.398110 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" (OuterVolumeSpecName: "config") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.398335 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.398414 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.398665 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.398672 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" (OuterVolumeSpecName: "kube-api-access-wxkg8") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "kube-api-access-wxkg8". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.398959 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.394727 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.392927 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.400243 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.400277 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.400390 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.400426 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.400457 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.400487 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.400513 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.400540 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.400565 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.400591 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.400618 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.400642 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.400666 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.400692 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.400714 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.400737 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.400774 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.400800 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.400822 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.400846 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") pod \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\" (UID: \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\") " Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.400899 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") pod \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\" (UID: \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\") " Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.400918 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.400963 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.400980 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.400998 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.401015 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.401029 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.401045 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.401061 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.401078 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.401100 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.401121 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.401137 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.401152 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.401177 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.401200 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.401220 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.401236 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.401253 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.401268 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.401284 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.401300 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.401320 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.401338 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.401358 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.401445 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.401535 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.401573 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.401606 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.401636 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.401664 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.401696 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.401722 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.401745 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.401790 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.401779 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.401814 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.401823 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.401837 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.401864 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.401891 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.401918 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.401953 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") pod \"49ef4625-1d3a-4a9f-b595-c2433d32326d\" (UID: \"49ef4625-1d3a-4a9f-b595-c2433d32326d\") " Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.401975 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.401996 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.402021 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.402045 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.402096 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" (OuterVolumeSpecName: "kube-api-access-pj782") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "kube-api-access-pj782". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.402121 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.402214 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.402319 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" (OuterVolumeSpecName: "webhook-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "webhook-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.402514 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hfkr4\" (UniqueName: \"kubernetes.io/projected/6cebefda-e31d-4be2-9bf4-8e1f8ec002cb-kube-api-access-hfkr4\") pod \"machine-config-daemon-rh8zc\" (UID: \"6cebefda-e31d-4be2-9bf4-8e1f8ec002cb\") " pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.402620 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.402664 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/6cebefda-e31d-4be2-9bf4-8e1f8ec002cb-rootfs\") pod \"machine-config-daemon-rh8zc\" (UID: \"6cebefda-e31d-4be2-9bf4-8e1f8ec002cb\") " pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.402698 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.402734 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.402788 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.402815 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.402849 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/823e91d3-003d-4cbb-bc72-004e1708c19d-hosts-file\") pod \"node-resolver-bsdk2\" (UID: \"823e91d3-003d-4cbb-bc72-004e1708c19d\") " pod="openshift-dns/node-resolver-bsdk2" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.402879 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-chgmf\" (UniqueName: \"kubernetes.io/projected/823e91d3-003d-4cbb-bc72-004e1708c19d-kube-api-access-chgmf\") pod \"node-resolver-bsdk2\" (UID: \"823e91d3-003d-4cbb-bc72-004e1708c19d\") " pod="openshift-dns/node-resolver-bsdk2" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.402906 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.402929 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.402957 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.403089 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.403114 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.403137 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/6cebefda-e31d-4be2-9bf4-8e1f8ec002cb-proxy-tls\") pod \"machine-config-daemon-rh8zc\" (UID: \"6cebefda-e31d-4be2-9bf4-8e1f8ec002cb\") " pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.403160 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/6cebefda-e31d-4be2-9bf4-8e1f8ec002cb-mcd-auth-proxy-config\") pod \"machine-config-daemon-rh8zc\" (UID: \"6cebefda-e31d-4be2-9bf4-8e1f8ec002cb\") " pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.403186 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.403216 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.403230 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" (OuterVolumeSpecName: "tmpfs") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "tmpfs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.403246 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.403272 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.403361 4799 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") on node \"crc\" DevicePath \"\"" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.403383 4799 reconciler_common.go:293] "Volume detached for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") on node \"crc\" DevicePath \"\"" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.403398 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") on node \"crc\" DevicePath \"\"" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.403411 4799 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") on node \"crc\" DevicePath \"\"" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.403424 4799 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.403439 4799 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.403452 4799 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") on node \"crc\" DevicePath \"\"" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.403466 4799 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") on node \"crc\" DevicePath \"\"" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.403478 4799 reconciler_common.go:293] "Volume detached for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") on node \"crc\" DevicePath \"\"" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.403491 4799 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") on node \"crc\" DevicePath \"\"" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.403539 4799 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.403551 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") on node \"crc\" DevicePath \"\"" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.403564 4799 reconciler_common.go:293] "Volume detached for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") on node \"crc\" DevicePath \"\"" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.403578 4799 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.403593 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") on node \"crc\" DevicePath \"\"" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.403607 4799 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") on node \"crc\" DevicePath \"\"" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.403620 4799 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") on node \"crc\" DevicePath \"\"" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.403632 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") on node \"crc\" DevicePath \"\"" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.403645 4799 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.403658 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") on node \"crc\" DevicePath \"\"" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.403670 4799 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.403682 4799 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") on node \"crc\" DevicePath \"\"" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.403695 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") on node \"crc\" DevicePath \"\"" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.403709 4799 reconciler_common.go:293] "Volume detached for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") on node \"crc\" DevicePath \"\"" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.403722 4799 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") on node \"crc\" DevicePath \"\"" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.403736 4799 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.403747 4799 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.403777 4799 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.403790 4799 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") on node \"crc\" DevicePath \"\"" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.403811 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") on node \"crc\" DevicePath \"\"" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.403822 4799 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.403835 4799 reconciler_common.go:293] "Volume detached for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") on node \"crc\" DevicePath \"\"" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.403850 4799 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.403862 4799 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") on node \"crc\" DevicePath \"\"" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.403875 4799 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") on node \"crc\" DevicePath \"\"" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.403888 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") on node \"crc\" DevicePath \"\"" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.403902 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") on node \"crc\" DevicePath \"\"" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.403939 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") on node \"crc\" DevicePath \"\"" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.403977 4799 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.403991 4799 reconciler_common.go:293] "Volume detached for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") on node \"crc\" DevicePath \"\"" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.404003 4799 reconciler_common.go:293] "Volume detached for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") on node \"crc\" DevicePath \"\"" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.404056 4799 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.404070 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") on node \"crc\" DevicePath \"\"" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.404083 4799 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.404095 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") on node \"crc\" DevicePath \"\"" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.404108 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") on node \"crc\" DevicePath \"\"" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.404120 4799 reconciler_common.go:293] "Volume detached for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") on node \"crc\" DevicePath \"\"" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.404133 4799 reconciler_common.go:293] "Volume detached for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") on node \"crc\" DevicePath \"\"" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.404146 4799 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.404159 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") on node \"crc\" DevicePath \"\"" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.404172 4799 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") on node \"crc\" DevicePath \"\"" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.404183 4799 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.404196 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") on node \"crc\" DevicePath \"\"" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.404208 4799 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.404220 4799 reconciler_common.go:293] "Volume detached for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") on node \"crc\" DevicePath \"\"" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.404232 4799 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.404245 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") on node \"crc\" DevicePath \"\"" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.404257 4799 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.404269 4799 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") on node \"crc\" DevicePath \"\"" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.404281 4799 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") on node \"crc\" DevicePath \"\"" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.404296 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") on node \"crc\" DevicePath \"\"" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.404307 4799 reconciler_common.go:293] "Volume detached for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") on node \"crc\" DevicePath \"\"" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.404319 4799 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.404332 4799 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") on node \"crc\" DevicePath \"\"" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.404344 4799 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") on node \"crc\" DevicePath \"\"" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.404357 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") on node \"crc\" DevicePath \"\"" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.404369 4799 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.404381 4799 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") on node \"crc\" DevicePath \"\"" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.404395 4799 reconciler_common.go:293] "Volume detached for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") on node \"crc\" DevicePath \"\"" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.404407 4799 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.404420 4799 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.404432 4799 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.404444 4799 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.404457 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") on node \"crc\" DevicePath \"\"" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.404468 4799 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.404485 4799 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") on node \"crc\" DevicePath \"\"" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.404496 4799 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.404523 4799 reconciler_common.go:293] "Volume detached for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") on node \"crc\" DevicePath \"\"" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.403269 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" (OuterVolumeSpecName: "kube-api-access-9xfj7") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "kube-api-access-9xfj7". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.403427 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" (OuterVolumeSpecName: "kube-api-access-w9rds") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "kube-api-access-w9rds". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.403613 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.404880 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.405123 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.405201 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.405222 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.405229 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" (OuterVolumeSpecName: "utilities") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.405435 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.405774 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" (OuterVolumeSpecName: "ovn-control-plane-metrics-cert") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovn-control-plane-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.406141 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" (OuterVolumeSpecName: "console-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.406324 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" (OuterVolumeSpecName: "cert") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.406486 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.406537 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" (OuterVolumeSpecName: "machine-api-operator-tls") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "machine-api-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.406692 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.406779 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-bsdk2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"823e91d3-003d-4cbb-bc72-004e1708c19d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-chgmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:09Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-bsdk2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.407286 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" (OuterVolumeSpecName: "kube-api-access-fqsjt") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "kube-api-access-fqsjt". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.408140 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" (OuterVolumeSpecName: "mcd-auth-proxy-config") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "mcd-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.408318 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" (OuterVolumeSpecName: "kube-api-access-bf2bz") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "kube-api-access-bf2bz". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.408639 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" (OuterVolumeSpecName: "certs") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.408889 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" (OuterVolumeSpecName: "kube-api-access-qs4fp") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "kube-api-access-qs4fp". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.409152 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.409344 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" (OuterVolumeSpecName: "config") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.410294 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.413394 4799 swap_util.go:74] "error creating dir to test if tmpfs noswap is enabled. Assuming not supported" mount path="" error="stat /var/lib/kubelet/plugins/kubernetes.io/empty-dir: no such file or directory" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.414557 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.402202 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.409868 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.409887 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.409982 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.410345 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" (OuterVolumeSpecName: "kube-api-access-tk88c") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "kube-api-access-tk88c". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.410813 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.410873 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" (OuterVolumeSpecName: "etcd-service-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.410785 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" (OuterVolumeSpecName: "kube-api-access-gf66m") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "kube-api-access-gf66m". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.411239 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" (OuterVolumeSpecName: "images") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.411455 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" (OuterVolumeSpecName: "kube-api-access-x2m85") pod "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" (UID: "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d"). InnerVolumeSpecName "kube-api-access-x2m85". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.411469 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:32:09 crc kubenswrapper[4799]: E1010 16:32:09.411926 4799 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Oct 10 16:32:09 crc kubenswrapper[4799]: E1010 16:32:09.418565 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-10 16:32:09.918542285 +0000 UTC m=+23.426866400 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.411944 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" (OuterVolumeSpecName: "config") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.411960 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" (OuterVolumeSpecName: "kube-api-access-jhbk2") pod "bd23aa5c-e532-4e53-bccf-e79f130c5ae8" (UID: "bd23aa5c-e532-4e53-bccf-e79f130c5ae8"). InnerVolumeSpecName "kube-api-access-jhbk2". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.412203 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" (OuterVolumeSpecName: "kube-api-access-d6qdx") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "kube-api-access-d6qdx". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.412560 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" (OuterVolumeSpecName: "apiservice-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "apiservice-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.412878 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.412910 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.413070 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" (OuterVolumeSpecName: "package-server-manager-serving-cert") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "package-server-manager-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.413197 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" (OuterVolumeSpecName: "kube-api-access-pjr6v") pod "49ef4625-1d3a-4a9f-b595-c2433d32326d" (UID: "49ef4625-1d3a-4a9f-b595-c2433d32326d"). InnerVolumeSpecName "kube-api-access-pjr6v". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.413465 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.413571 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" (OuterVolumeSpecName: "kube-api-access-7c4vf") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "kube-api-access-7c4vf". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.413627 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" (OuterVolumeSpecName: "client-ca") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.413649 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" (OuterVolumeSpecName: "kube-api-access-kfwg7") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "kube-api-access-kfwg7". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.413703 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" (OuterVolumeSpecName: "kube-api-access-rnphk") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "kube-api-access-rnphk". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.413720 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.413928 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" (OuterVolumeSpecName: "kube-api-access-x4zgh") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "kube-api-access-x4zgh". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.413994 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" (OuterVolumeSpecName: "client-ca") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.414331 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" (OuterVolumeSpecName: "kube-api-access-6g6sz") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "kube-api-access-6g6sz". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.414351 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" (OuterVolumeSpecName: "service-ca") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.414478 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" (OuterVolumeSpecName: "kube-api-access-mnrrd") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "kube-api-access-mnrrd". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.414537 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.415057 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" (OuterVolumeSpecName: "image-registry-operator-tls") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "image-registry-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.415077 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.417500 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.417588 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" (OuterVolumeSpecName: "image-import-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "image-import-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.418087 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.418111 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" (OuterVolumeSpecName: "kube-api-access-dbsvg") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "kube-api-access-dbsvg". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.421740 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.422981 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.423404 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" (OuterVolumeSpecName: "utilities") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.425873 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" (OuterVolumeSpecName: "kube-api-access-sb6h7") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "kube-api-access-sb6h7". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.425930 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" (OuterVolumeSpecName: "kube-api-access-s4n52") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "kube-api-access-s4n52". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.425938 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.425941 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" (OuterVolumeSpecName: "mcc-auth-proxy-config") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "mcc-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.425988 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.426167 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.426294 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" (OuterVolumeSpecName: "kube-api-access-4d4hj") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "kube-api-access-4d4hj". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.426336 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" (OuterVolumeSpecName: "kube-api-access-cfbct") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "kube-api-access-cfbct". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.426383 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" (OuterVolumeSpecName: "config") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 16:32:09 crc kubenswrapper[4799]: E1010 16:32:09.426588 4799 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.427957 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" (OuterVolumeSpecName: "config") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.426848 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" (OuterVolumeSpecName: "kube-api-access-ngvvp") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "kube-api-access-ngvvp". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.426725 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" (OuterVolumeSpecName: "config") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.426889 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" (OuterVolumeSpecName: "signing-key") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.427327 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 16:32:09 crc kubenswrapper[4799]: E1010 16:32:09.428029 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-10 16:32:09.928003558 +0000 UTC m=+23.436327873 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.427938 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.427773 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" (OuterVolumeSpecName: "kube-api-access-nzwt7") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "kube-api-access-nzwt7". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.428929 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.429163 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" (OuterVolumeSpecName: "kube-api-access-249nr") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "kube-api-access-249nr". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.429162 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b574797-001e-440a-8f4e-c0be86edad0f" path="/var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.429208 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" (OuterVolumeSpecName: "config") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.429564 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.429713 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" (OuterVolumeSpecName: "control-plane-machine-set-operator-tls") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "control-plane-machine-set-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.430193 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1386a44e-36a2-460c-96d0-0359d2b6f0f5" path="/var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.430723 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.431345 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1bf7eb37-55a3-4c65-b768-a94c82151e69" path="/var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.441136 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.441518 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" (OuterVolumeSpecName: "kube-api-access-zkvpv") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "kube-api-access-zkvpv". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 16:32:09 crc kubenswrapper[4799]: E1010 16:32:09.441599 4799 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 10 16:32:09 crc kubenswrapper[4799]: E1010 16:32:09.441621 4799 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 10 16:32:09 crc kubenswrapper[4799]: E1010 16:32:09.441637 4799 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 10 16:32:09 crc kubenswrapper[4799]: E1010 16:32:09.441700 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-10-10 16:32:09.941680559 +0000 UTC m=+23.450004674 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.441926 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" (OuterVolumeSpecName: "multus-daemon-config") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "multus-daemon-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.441957 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.442635 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.442712 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" (OuterVolumeSpecName: "config-volume") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.444607 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.441152 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1d611f23-29be-4491-8495-bee1670e935f" path="/var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.445463 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.445640 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.445948 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.446504 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" (OuterVolumeSpecName: "kube-api-access-w7l8j") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "kube-api-access-w7l8j". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.446919 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" (OuterVolumeSpecName: "kube-api-access-w4xd4") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "kube-api-access-w4xd4". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.447140 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" (OuterVolumeSpecName: "kube-api-access-pcxfs") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "kube-api-access-pcxfs". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.447613 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" (OuterVolumeSpecName: "kube-api-access-6ccd8") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "kube-api-access-6ccd8". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.447669 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.447861 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.448309 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.448608 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" (OuterVolumeSpecName: "config") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.448721 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.449216 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.449612 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" (OuterVolumeSpecName: "config") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 16:32:09 crc kubenswrapper[4799]: E1010 16:32:09.450307 4799 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 10 16:32:09 crc kubenswrapper[4799]: E1010 16:32:09.450389 4799 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 10 16:32:09 crc kubenswrapper[4799]: E1010 16:32:09.450406 4799 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 10 16:32:09 crc kubenswrapper[4799]: E1010 16:32:09.450471 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-10-10 16:32:09.950448516 +0000 UTC m=+23.458772621 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.449293 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.449253 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6cebefda-e31d-4be2-9bf4-8e1f8ec002cb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hfkr4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hfkr4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:09Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-rh8zc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.452475 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" (OuterVolumeSpecName: "kube-api-access-qg5z5") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "kube-api-access-qg5z5". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.453899 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="20b0d48f-5fd6-431c-a545-e3c800c7b866" path="/var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/volumes" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.454145 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.454265 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.454615 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.454704 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" (OuterVolumeSpecName: "kube-api-access-mg5zb") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "kube-api-access-mg5zb". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.454779 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" (OuterVolumeSpecName: "samples-operator-tls") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "samples-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.455035 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.455110 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.455301 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" (OuterVolumeSpecName: "kube-api-access-2w9zh") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "kube-api-access-2w9zh". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.455539 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.455776 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" (OuterVolumeSpecName: "config") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.455969 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.456359 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.456704 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.457127 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" path="/var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.457878 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="22c825df-677d-4ca6-82db-3454ed06e783" path="/var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.460119 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="25e176fe-21b4-4974-b1ed-c8b94f112a7f" path="/var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.460485 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.461665 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" path="/var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.470948 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="31d8b7a1-420e-4252-a5b7-eebe8a111292" path="/var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.472967 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.477050 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3ab1a177-2de0-46d9-b765-d0d0649bb42e" path="/var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/volumes" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.478370 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" path="/var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.478803 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.480278 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="43509403-f426-496e-be36-56cef71462f5" path="/var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.481739 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="44663579-783b-4372-86d6-acf235a62d72" path="/var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/volumes" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.481902 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.483269 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="496e6271-fb68-4057-954e-a0d97a4afa3f" path="/var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.484011 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" path="/var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.484512 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49ef4625-1d3a-4a9f-b595-c2433d32326d" path="/var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/volumes" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.485657 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4bb40260-dbaa-4fb0-84df-5e680505d512" path="/var/lib/kubelet/pods/4bb40260-dbaa-4fb0-84df-5e680505d512/volumes" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.486228 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5441d097-087c-4d9a-baa8-b210afa90fc9" path="/var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.486678 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="57a731c4-ef35-47a8-b875-bfb08a7f8011" path="/var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.486866 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.487618 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.487928 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5b88f790-22fa-440e-b583-365168c0b23d" path="/var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/volumes" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.489931 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5fe579f8-e8a6-4643-bce5-a661393c4dde" path="/var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/volumes" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.489948 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.491332 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6402fda4-df10-493c-b4e5-d0569419652d" path="/var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.497125 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6509e943-70c6-444c-bc41-48a544e36fbd" path="/var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.498429 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6731426b-95fe-49ff-bb5f-40441049fde2" path="/var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/volumes" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.500307 4799 kubelet_volumes.go:152] "Cleaned up orphaned volume subpath from pod" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volume-subpaths/run-systemd/ovnkube-controller/6" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.500501 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volumes" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.504459 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7539238d-5fe0-46ed-884e-1c3b566537ec" path="/var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.505171 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/6cebefda-e31d-4be2-9bf4-8e1f8ec002cb-proxy-tls\") pod \"machine-config-daemon-rh8zc\" (UID: \"6cebefda-e31d-4be2-9bf4-8e1f8ec002cb\") " pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.505273 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/6cebefda-e31d-4be2-9bf4-8e1f8ec002cb-mcd-auth-proxy-config\") pod \"machine-config-daemon-rh8zc\" (UID: \"6cebefda-e31d-4be2-9bf4-8e1f8ec002cb\") " pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.505342 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.505409 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.505475 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hfkr4\" (UniqueName: \"kubernetes.io/projected/6cebefda-e31d-4be2-9bf4-8e1f8ec002cb-kube-api-access-hfkr4\") pod \"machine-config-daemon-rh8zc\" (UID: \"6cebefda-e31d-4be2-9bf4-8e1f8ec002cb\") " pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.505548 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/6cebefda-e31d-4be2-9bf4-8e1f8ec002cb-rootfs\") pod \"machine-config-daemon-rh8zc\" (UID: \"6cebefda-e31d-4be2-9bf4-8e1f8ec002cb\") " pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.505630 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/823e91d3-003d-4cbb-bc72-004e1708c19d-hosts-file\") pod \"node-resolver-bsdk2\" (UID: \"823e91d3-003d-4cbb-bc72-004e1708c19d\") " pod="openshift-dns/node-resolver-bsdk2" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.505702 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-chgmf\" (UniqueName: \"kubernetes.io/projected/823e91d3-003d-4cbb-bc72-004e1708c19d-kube-api-access-chgmf\") pod \"node-resolver-bsdk2\" (UID: \"823e91d3-003d-4cbb-bc72-004e1708c19d\") " pod="openshift-dns/node-resolver-bsdk2" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.505908 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") on node \"crc\" DevicePath \"\"" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.505985 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/6cebefda-e31d-4be2-9bf4-8e1f8ec002cb-rootfs\") pod \"machine-config-daemon-rh8zc\" (UID: \"6cebefda-e31d-4be2-9bf4-8e1f8ec002cb\") " pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.506056 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.506100 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.506100 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/823e91d3-003d-4cbb-bc72-004e1708c19d-hosts-file\") pod \"node-resolver-bsdk2\" (UID: \"823e91d3-003d-4cbb-bc72-004e1708c19d\") " pod="openshift-dns/node-resolver-bsdk2" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.508066 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") on node \"crc\" DevicePath \"\"" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.508198 4799 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") on node \"crc\" DevicePath \"\"" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.508219 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") on node \"crc\" DevicePath \"\"" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.508234 4799 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") on node \"crc\" DevicePath \"\"" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.508253 4799 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.508268 4799 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.508281 4799 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") on node \"crc\" DevicePath \"\"" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.508362 4799 reconciler_common.go:293] "Volume detached for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.508565 4799 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.508581 4799 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.508597 4799 reconciler_common.go:293] "Volume detached for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") on node \"crc\" DevicePath \"\"" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.508613 4799 reconciler_common.go:293] "Volume detached for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") on node \"crc\" DevicePath \"\"" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.508627 4799 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.508641 4799 reconciler_common.go:293] "Volume detached for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") on node \"crc\" DevicePath \"\"" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.508653 4799 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") on node \"crc\" DevicePath \"\"" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.508669 4799 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") on node \"crc\" DevicePath \"\"" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.508683 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") on node \"crc\" DevicePath \"\"" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.508698 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") on node \"crc\" DevicePath \"\"" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.508713 4799 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.508728 4799 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") on node \"crc\" DevicePath \"\"" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.508742 4799 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") on node \"crc\" DevicePath \"\"" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.508776 4799 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") on node \"crc\" DevicePath \"\"" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.508791 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") on node \"crc\" DevicePath \"\"" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.508806 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") on node \"crc\" DevicePath \"\"" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.508819 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") on node \"crc\" DevicePath \"\"" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.508835 4799 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") on node \"crc\" DevicePath \"\"" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.508847 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") on node \"crc\" DevicePath \"\"" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.508861 4799 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") on node \"crc\" DevicePath \"\"" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.508877 4799 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.508895 4799 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") on node \"crc\" DevicePath \"\"" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.508910 4799 reconciler_common.go:293] "Volume detached for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") on node \"crc\" DevicePath \"\"" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.508925 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") on node \"crc\" DevicePath \"\"" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.508938 4799 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.508952 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") on node \"crc\" DevicePath \"\"" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.508964 4799 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.508977 4799 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.508991 4799 reconciler_common.go:293] "Volume detached for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.509003 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") on node \"crc\" DevicePath \"\"" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.509018 4799 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.509031 4799 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") on node \"crc\" DevicePath \"\"" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.509042 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") on node \"crc\" DevicePath \"\"" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.509054 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") on node \"crc\" DevicePath \"\"" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.509066 4799 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.509077 4799 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") on node \"crc\" DevicePath \"\"" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.509091 4799 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.509104 4799 reconciler_common.go:293] "Volume detached for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") on node \"crc\" DevicePath \"\"" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.509115 4799 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.509127 4799 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") on node \"crc\" DevicePath \"\"" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.509139 4799 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.509159 4799 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.509172 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") on node \"crc\" DevicePath \"\"" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.509186 4799 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") on node \"crc\" DevicePath \"\"" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.509200 4799 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") on node \"crc\" DevicePath \"\"" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.509214 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") on node \"crc\" DevicePath \"\"" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.509226 4799 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") on node \"crc\" DevicePath \"\"" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.509239 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") on node \"crc\" DevicePath \"\"" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.509251 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") on node \"crc\" DevicePath \"\"" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.509264 4799 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.509278 4799 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.509292 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") on node \"crc\" DevicePath \"\"" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.509305 4799 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") on node \"crc\" DevicePath \"\"" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.509318 4799 reconciler_common.go:293] "Volume detached for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") on node \"crc\" DevicePath \"\"" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.509331 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") on node \"crc\" DevicePath \"\"" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.509344 4799 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") on node \"crc\" DevicePath \"\"" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.509355 4799 reconciler_common.go:293] "Volume detached for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") on node \"crc\" DevicePath \"\"" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.509367 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") on node \"crc\" DevicePath \"\"" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.509379 4799 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.509393 4799 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") on node \"crc\" DevicePath \"\"" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.509406 4799 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.509418 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") on node \"crc\" DevicePath \"\"" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.509432 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") on node \"crc\" DevicePath \"\"" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.509444 4799 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") on node \"crc\" DevicePath \"\"" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.509457 4799 reconciler_common.go:293] "Volume detached for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") on node \"crc\" DevicePath \"\"" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.509472 4799 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") on node \"crc\" DevicePath \"\"" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.509559 4799 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") on node \"crc\" DevicePath \"\"" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.509570 4799 reconciler_common.go:293] "Volume detached for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") on node \"crc\" DevicePath \"\"" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.509582 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") on node \"crc\" DevicePath \"\"" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.509593 4799 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") on node \"crc\" DevicePath \"\"" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.509605 4799 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") on node \"crc\" DevicePath \"\"" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.509617 4799 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.509629 4799 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") on node \"crc\" DevicePath \"\"" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.509641 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") on node \"crc\" DevicePath \"\"" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.509653 4799 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") on node \"crc\" DevicePath \"\"" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.509665 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") on node \"crc\" DevicePath \"\"" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.509677 4799 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") on node \"crc\" DevicePath \"\"" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.509691 4799 reconciler_common.go:293] "Volume detached for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") on node \"crc\" DevicePath \"\"" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.509702 4799 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.509713 4799 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.509724 4799 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") on node \"crc\" DevicePath \"\"" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.509734 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") on node \"crc\" DevicePath \"\"" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.509746 4799 reconciler_common.go:293] "Volume detached for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.509779 4799 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.509822 4799 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") on node \"crc\" DevicePath \"\"" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.509832 4799 reconciler_common.go:293] "Volume detached for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") on node \"crc\" DevicePath \"\"" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.509843 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") on node \"crc\" DevicePath \"\"" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.509857 4799 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.509870 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") on node \"crc\" DevicePath \"\"" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.509966 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") on node \"crc\" DevicePath \"\"" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.509977 4799 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") on node \"crc\" DevicePath \"\"" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.509988 4799 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") on node \"crc\" DevicePath \"\"" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.510001 4799 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") on node \"crc\" DevicePath \"\"" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.510014 4799 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.510025 4799 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") on node \"crc\" DevicePath \"\"" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.510035 4799 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.510046 4799 reconciler_common.go:293] "Volume detached for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") on node \"crc\" DevicePath \"\"" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.510057 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") on node \"crc\" DevicePath \"\"" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.510068 4799 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") on node \"crc\" DevicePath \"\"" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.510078 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") on node \"crc\" DevicePath \"\"" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.510089 4799 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") on node \"crc\" DevicePath \"\"" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.510101 4799 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") on node \"crc\" DevicePath \"\"" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.510112 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") on node \"crc\" DevicePath \"\"" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.510122 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") on node \"crc\" DevicePath \"\"" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.510133 4799 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") on node \"crc\" DevicePath \"\"" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.510145 4799 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") on node \"crc\" DevicePath \"\"" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.510157 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") on node \"crc\" DevicePath \"\"" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.511113 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7583ce53-e0fe-4a16-9e4d-50516596a136" path="/var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.511315 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/6cebefda-e31d-4be2-9bf4-8e1f8ec002cb-proxy-tls\") pod \"machine-config-daemon-rh8zc\" (UID: \"6cebefda-e31d-4be2-9bf4-8e1f8ec002cb\") " pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.511696 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/6cebefda-e31d-4be2-9bf4-8e1f8ec002cb-mcd-auth-proxy-config\") pod \"machine-config-daemon-rh8zc\" (UID: \"6cebefda-e31d-4be2-9bf4-8e1f8ec002cb\") " pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.511964 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7bb08738-c794-4ee8-9972-3a62ca171029" path="/var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.513668 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="87cf06ed-a83f-41a7-828d-70653580a8cb" path="/var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.514675 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" path="/var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.515236 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="925f1c65-6136-48ba-85aa-3a3b50560753" path="/var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.515863 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" path="/var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/volumes" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.517077 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9d4552c7-cd75-42dd-8880-30dd377c49a4" path="/var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.517545 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" path="/var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/volumes" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.518048 4799 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/cluster-policy-controller namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10357/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.518109 4799 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="cluster-policy-controller" probeResult="failure" output="Get \"https://192.168.126.11:10357/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.518486 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a31745f5-9847-4afe-82a5-3161cc66ca93" path="/var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.519545 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" path="/var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.520326 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6312bbd-5731-4ea0-a20f-81d5a57df44a" path="/var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/volumes" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.520857 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" path="/var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.521834 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" path="/var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.522990 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" path="/var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/volumes" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.523817 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bf126b07-da06-4140-9a57-dfd54fc6b486" path="/var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.524427 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c03ee662-fb2f-4fc4-a2c1-af487c19d254" path="/var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.524507 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-chgmf\" (UniqueName: \"kubernetes.io/projected/823e91d3-003d-4cbb-bc72-004e1708c19d-kube-api-access-chgmf\") pod \"node-resolver-bsdk2\" (UID: \"823e91d3-003d-4cbb-bc72-004e1708c19d\") " pod="openshift-dns/node-resolver-bsdk2" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.525458 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" path="/var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/volumes" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.525973 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e7e6199b-1264-4501-8953-767f51328d08" path="/var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.526934 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="efdd0498-1daa-4136-9a4a-3b948c2293fc" path="/var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/volumes" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.527521 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" path="/var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/volumes" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.528850 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fda69060-fa79-4696-b1a6-7980f124bf7c" path="/var/lib/kubelet/pods/fda69060-fa79-4696-b1a6-7980f124bf7c/volumes" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.529489 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hfkr4\" (UniqueName: \"kubernetes.io/projected/6cebefda-e31d-4be2-9bf4-8e1f8ec002cb-kube-api-access-hfkr4\") pod \"machine-config-daemon-rh8zc\" (UID: \"6cebefda-e31d-4be2-9bf4-8e1f8ec002cb\") " pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.530014 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-gg5hb"] Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.530311 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-additional-cni-plugins-nptcz"] Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.530600 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-gg5hb" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.532610 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-mcwfc"] Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.533603 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-mcwfc" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.533640 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-nptcz" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.537512 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"multus-daemon-config" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.537965 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"openshift-service-ca.crt" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.538348 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"kube-root-ca.crt" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.538594 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"cni-copy-resources" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.541260 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-node-metrics-cert" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.541498 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"env-overrides" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.541641 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"default-cni-sysctl-allowlist" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.541770 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-node-dockercfg-pwtwl" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.541805 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"default-dockercfg-2q5b6" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.541895 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"kube-root-ca.crt" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.541972 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-config" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.541998 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"openshift-service-ca.crt" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.543213 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-script-lib" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.543439 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ancillary-tools-dockercfg-vnmsz" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.548731 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-bsdk2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"823e91d3-003d-4cbb-bc72-004e1708c19d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-chgmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:09Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-bsdk2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.563874 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.567009 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/1.log" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.567537 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.571212 4799 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="10d76c5ba8c54896d2fde57e2806c48857363c495a9f2d9b3f6904334cf2f9be" exitCode=255 Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.571269 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"10d76c5ba8c54896d2fde57e2806c48857363c495a9f2d9b3f6904334cf2f9be"} Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.571331 4799 scope.go:117] "RemoveContainer" containerID="55af9201526519d123335f21cc2dada8e280f5d90efb03821a45fe469c7b2ede" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.572575 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.581137 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.588984 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6cebefda-e31d-4be2-9bf4-8e1f8ec002cb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hfkr4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hfkr4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:09Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-rh8zc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.597148 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.608064 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.610431 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/abe7f2d9-ec99-4724-a01f-cc7096377e07-host-slash\") pod \"ovnkube-node-mcwfc\" (UID: \"abe7f2d9-ec99-4724-a01f-cc7096377e07\") " pod="openshift-ovn-kubernetes/ovnkube-node-mcwfc" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.610464 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/abe7f2d9-ec99-4724-a01f-cc7096377e07-var-lib-openvswitch\") pod \"ovnkube-node-mcwfc\" (UID: \"abe7f2d9-ec99-4724-a01f-cc7096377e07\") " pod="openshift-ovn-kubernetes/ovnkube-node-mcwfc" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.610518 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/abe7f2d9-ec99-4724-a01f-cc7096377e07-host-run-netns\") pod \"ovnkube-node-mcwfc\" (UID: \"abe7f2d9-ec99-4724-a01f-cc7096377e07\") " pod="openshift-ovn-kubernetes/ovnkube-node-mcwfc" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.610540 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/f000ac73-b5de-47c8-a0a7-84bd06475f62-multus-cni-dir\") pod \"multus-gg5hb\" (UID: \"f000ac73-b5de-47c8-a0a7-84bd06475f62\") " pod="openshift-multus/multus-gg5hb" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.610559 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/abe7f2d9-ec99-4724-a01f-cc7096377e07-host-cni-netd\") pod \"ovnkube-node-mcwfc\" (UID: \"abe7f2d9-ec99-4724-a01f-cc7096377e07\") " pod="openshift-ovn-kubernetes/ovnkube-node-mcwfc" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.610604 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/f000ac73-b5de-47c8-a0a7-84bd06475f62-system-cni-dir\") pod \"multus-gg5hb\" (UID: \"f000ac73-b5de-47c8-a0a7-84bd06475f62\") " pod="openshift-multus/multus-gg5hb" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.610624 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/f000ac73-b5de-47c8-a0a7-84bd06475f62-host-run-k8s-cni-cncf-io\") pod \"multus-gg5hb\" (UID: \"f000ac73-b5de-47c8-a0a7-84bd06475f62\") " pod="openshift-multus/multus-gg5hb" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.610703 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/f000ac73-b5de-47c8-a0a7-84bd06475f62-host-var-lib-cni-bin\") pod \"multus-gg5hb\" (UID: \"f000ac73-b5de-47c8-a0a7-84bd06475f62\") " pod="openshift-multus/multus-gg5hb" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.610728 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qfxtj\" (UniqueName: \"kubernetes.io/projected/96840de9-4451-4499-81fa-a19c62239007-kube-api-access-qfxtj\") pod \"multus-additional-cni-plugins-nptcz\" (UID: \"96840de9-4451-4499-81fa-a19c62239007\") " pod="openshift-multus/multus-additional-cni-plugins-nptcz" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.610746 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/f000ac73-b5de-47c8-a0a7-84bd06475f62-host-var-lib-kubelet\") pod \"multus-gg5hb\" (UID: \"f000ac73-b5de-47c8-a0a7-84bd06475f62\") " pod="openshift-multus/multus-gg5hb" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.610792 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/f000ac73-b5de-47c8-a0a7-84bd06475f62-multus-conf-dir\") pod \"multus-gg5hb\" (UID: \"f000ac73-b5de-47c8-a0a7-84bd06475f62\") " pod="openshift-multus/multus-gg5hb" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.610812 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/f000ac73-b5de-47c8-a0a7-84bd06475f62-multus-daemon-config\") pod \"multus-gg5hb\" (UID: \"f000ac73-b5de-47c8-a0a7-84bd06475f62\") " pod="openshift-multus/multus-gg5hb" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.610834 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/f000ac73-b5de-47c8-a0a7-84bd06475f62-etc-kubernetes\") pod \"multus-gg5hb\" (UID: \"f000ac73-b5de-47c8-a0a7-84bd06475f62\") " pod="openshift-multus/multus-gg5hb" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.610871 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/abe7f2d9-ec99-4724-a01f-cc7096377e07-ovnkube-script-lib\") pod \"ovnkube-node-mcwfc\" (UID: \"abe7f2d9-ec99-4724-a01f-cc7096377e07\") " pod="openshift-ovn-kubernetes/ovnkube-node-mcwfc" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.610887 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/96840de9-4451-4499-81fa-a19c62239007-system-cni-dir\") pod \"multus-additional-cni-plugins-nptcz\" (UID: \"96840de9-4451-4499-81fa-a19c62239007\") " pod="openshift-multus/multus-additional-cni-plugins-nptcz" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.611069 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/abe7f2d9-ec99-4724-a01f-cc7096377e07-run-ovn\") pod \"ovnkube-node-mcwfc\" (UID: \"abe7f2d9-ec99-4724-a01f-cc7096377e07\") " pod="openshift-ovn-kubernetes/ovnkube-node-mcwfc" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.611139 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/f000ac73-b5de-47c8-a0a7-84bd06475f62-hostroot\") pod \"multus-gg5hb\" (UID: \"f000ac73-b5de-47c8-a0a7-84bd06475f62\") " pod="openshift-multus/multus-gg5hb" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.611228 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/abe7f2d9-ec99-4724-a01f-cc7096377e07-host-kubelet\") pod \"ovnkube-node-mcwfc\" (UID: \"abe7f2d9-ec99-4724-a01f-cc7096377e07\") " pod="openshift-ovn-kubernetes/ovnkube-node-mcwfc" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.611264 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/abe7f2d9-ec99-4724-a01f-cc7096377e07-run-systemd\") pod \"ovnkube-node-mcwfc\" (UID: \"abe7f2d9-ec99-4724-a01f-cc7096377e07\") " pod="openshift-ovn-kubernetes/ovnkube-node-mcwfc" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.611321 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/abe7f2d9-ec99-4724-a01f-cc7096377e07-run-openvswitch\") pod \"ovnkube-node-mcwfc\" (UID: \"abe7f2d9-ec99-4724-a01f-cc7096377e07\") " pod="openshift-ovn-kubernetes/ovnkube-node-mcwfc" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.611371 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/96840de9-4451-4499-81fa-a19c62239007-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-nptcz\" (UID: \"96840de9-4451-4499-81fa-a19c62239007\") " pod="openshift-multus/multus-additional-cni-plugins-nptcz" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.611393 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w9g7t\" (UniqueName: \"kubernetes.io/projected/f000ac73-b5de-47c8-a0a7-84bd06475f62-kube-api-access-w9g7t\") pod \"multus-gg5hb\" (UID: \"f000ac73-b5de-47c8-a0a7-84bd06475f62\") " pod="openshift-multus/multus-gg5hb" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.611408 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/f000ac73-b5de-47c8-a0a7-84bd06475f62-host-var-lib-cni-multus\") pod \"multus-gg5hb\" (UID: \"f000ac73-b5de-47c8-a0a7-84bd06475f62\") " pod="openshift-multus/multus-gg5hb" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.612874 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/96840de9-4451-4499-81fa-a19c62239007-cni-binary-copy\") pod \"multus-additional-cni-plugins-nptcz\" (UID: \"96840de9-4451-4499-81fa-a19c62239007\") " pod="openshift-multus/multus-additional-cni-plugins-nptcz" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.612935 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/f000ac73-b5de-47c8-a0a7-84bd06475f62-host-run-netns\") pod \"multus-gg5hb\" (UID: \"f000ac73-b5de-47c8-a0a7-84bd06475f62\") " pod="openshift-multus/multus-gg5hb" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.612985 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/abe7f2d9-ec99-4724-a01f-cc7096377e07-host-cni-bin\") pod \"ovnkube-node-mcwfc\" (UID: \"abe7f2d9-ec99-4724-a01f-cc7096377e07\") " pod="openshift-ovn-kubernetes/ovnkube-node-mcwfc" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.613017 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/abe7f2d9-ec99-4724-a01f-cc7096377e07-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-mcwfc\" (UID: \"abe7f2d9-ec99-4724-a01f-cc7096377e07\") " pod="openshift-ovn-kubernetes/ovnkube-node-mcwfc" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.613048 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7pr44\" (UniqueName: \"kubernetes.io/projected/abe7f2d9-ec99-4724-a01f-cc7096377e07-kube-api-access-7pr44\") pod \"ovnkube-node-mcwfc\" (UID: \"abe7f2d9-ec99-4724-a01f-cc7096377e07\") " pod="openshift-ovn-kubernetes/ovnkube-node-mcwfc" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.613083 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/abe7f2d9-ec99-4724-a01f-cc7096377e07-log-socket\") pod \"ovnkube-node-mcwfc\" (UID: \"abe7f2d9-ec99-4724-a01f-cc7096377e07\") " pod="openshift-ovn-kubernetes/ovnkube-node-mcwfc" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.613113 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/abe7f2d9-ec99-4724-a01f-cc7096377e07-host-run-ovn-kubernetes\") pod \"ovnkube-node-mcwfc\" (UID: \"abe7f2d9-ec99-4724-a01f-cc7096377e07\") " pod="openshift-ovn-kubernetes/ovnkube-node-mcwfc" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.613142 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/96840de9-4451-4499-81fa-a19c62239007-tuning-conf-dir\") pod \"multus-additional-cni-plugins-nptcz\" (UID: \"96840de9-4451-4499-81fa-a19c62239007\") " pod="openshift-multus/multus-additional-cni-plugins-nptcz" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.613172 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/f000ac73-b5de-47c8-a0a7-84bd06475f62-cnibin\") pod \"multus-gg5hb\" (UID: \"f000ac73-b5de-47c8-a0a7-84bd06475f62\") " pod="openshift-multus/multus-gg5hb" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.613194 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/f000ac73-b5de-47c8-a0a7-84bd06475f62-os-release\") pod \"multus-gg5hb\" (UID: \"f000ac73-b5de-47c8-a0a7-84bd06475f62\") " pod="openshift-multus/multus-gg5hb" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.613241 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/abe7f2d9-ec99-4724-a01f-cc7096377e07-systemd-units\") pod \"ovnkube-node-mcwfc\" (UID: \"abe7f2d9-ec99-4724-a01f-cc7096377e07\") " pod="openshift-ovn-kubernetes/ovnkube-node-mcwfc" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.613267 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/abe7f2d9-ec99-4724-a01f-cc7096377e07-etc-openvswitch\") pod \"ovnkube-node-mcwfc\" (UID: \"abe7f2d9-ec99-4724-a01f-cc7096377e07\") " pod="openshift-ovn-kubernetes/ovnkube-node-mcwfc" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.613306 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/f000ac73-b5de-47c8-a0a7-84bd06475f62-multus-socket-dir-parent\") pod \"multus-gg5hb\" (UID: \"f000ac73-b5de-47c8-a0a7-84bd06475f62\") " pod="openshift-multus/multus-gg5hb" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.613339 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/abe7f2d9-ec99-4724-a01f-cc7096377e07-node-log\") pod \"ovnkube-node-mcwfc\" (UID: \"abe7f2d9-ec99-4724-a01f-cc7096377e07\") " pod="openshift-ovn-kubernetes/ovnkube-node-mcwfc" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.613371 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/abe7f2d9-ec99-4724-a01f-cc7096377e07-env-overrides\") pod \"ovnkube-node-mcwfc\" (UID: \"abe7f2d9-ec99-4724-a01f-cc7096377e07\") " pod="openshift-ovn-kubernetes/ovnkube-node-mcwfc" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.613404 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/96840de9-4451-4499-81fa-a19c62239007-os-release\") pod \"multus-additional-cni-plugins-nptcz\" (UID: \"96840de9-4451-4499-81fa-a19c62239007\") " pod="openshift-multus/multus-additional-cni-plugins-nptcz" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.613429 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/f000ac73-b5de-47c8-a0a7-84bd06475f62-cni-binary-copy\") pod \"multus-gg5hb\" (UID: \"f000ac73-b5de-47c8-a0a7-84bd06475f62\") " pod="openshift-multus/multus-gg5hb" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.613464 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/abe7f2d9-ec99-4724-a01f-cc7096377e07-ovnkube-config\") pod \"ovnkube-node-mcwfc\" (UID: \"abe7f2d9-ec99-4724-a01f-cc7096377e07\") " pod="openshift-ovn-kubernetes/ovnkube-node-mcwfc" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.613500 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/96840de9-4451-4499-81fa-a19c62239007-cnibin\") pod \"multus-additional-cni-plugins-nptcz\" (UID: \"96840de9-4451-4499-81fa-a19c62239007\") " pod="openshift-multus/multus-additional-cni-plugins-nptcz" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.613536 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/abe7f2d9-ec99-4724-a01f-cc7096377e07-ovn-node-metrics-cert\") pod \"ovnkube-node-mcwfc\" (UID: \"abe7f2d9-ec99-4724-a01f-cc7096377e07\") " pod="openshift-ovn-kubernetes/ovnkube-node-mcwfc" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.613564 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/f000ac73-b5de-47c8-a0a7-84bd06475f62-host-run-multus-certs\") pod \"multus-gg5hb\" (UID: \"f000ac73-b5de-47c8-a0a7-84bd06475f62\") " pod="openshift-multus/multus-gg5hb" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.621099 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gg5hb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f000ac73-b5de-47c8-a0a7-84bd06475f62\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w9g7t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:09Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gg5hb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.631062 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.641252 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.643334 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-bsdk2" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.649819 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6cebefda-e31d-4be2-9bf4-8e1f8ec002cb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hfkr4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hfkr4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:09Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-rh8zc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.654660 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Oct 10 16:32:09 crc kubenswrapper[4799]: W1010 16:32:09.655014 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod823e91d3_003d_4cbb_bc72_004e1708c19d.slice/crio-36b30926061640a6066e293370d53dbff9bd6dc747046d6d002d987be0035729 WatchSource:0}: Error finding container 36b30926061640a6066e293370d53dbff9bd6dc747046d6d002d987be0035729: Status 404 returned error can't find the container with id 36b30926061640a6066e293370d53dbff9bd6dc747046d6d002d987be0035729 Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.665572 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.666610 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mcwfc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"abe7f2d9-ec99-4724-a01f-cc7096377e07\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:09Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-mcwfc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 10 16:32:09 crc kubenswrapper[4799]: W1010 16:32:09.667479 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd75a4c96_2883_4a0b_bab2_0fab2b6c0b49.slice/crio-247d77cf98f8eb958c8329a38cda67b469eec2724808f904cadde1fb50836217 WatchSource:0}: Error finding container 247d77cf98f8eb958c8329a38cda67b469eec2724808f904cadde1fb50836217: Status 404 returned error can't find the container with id 247d77cf98f8eb958c8329a38cda67b469eec2724808f904cadde1fb50836217 Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.675950 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.677587 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.683483 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Oct 10 16:32:09 crc kubenswrapper[4799]: W1010 16:32:09.687994 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod37a5e44f_9a88_4405_be8a_b645485e7312.slice/crio-38507c8f31a377a2d4c9f7c8ef496f8b1a4e6eae57026a9848ffeefc3c93e031 WatchSource:0}: Error finding container 38507c8f31a377a2d4c9f7c8ef496f8b1a4e6eae57026a9848ffeefc3c93e031: Status 404 returned error can't find the container with id 38507c8f31a377a2d4c9f7c8ef496f8b1a4e6eae57026a9848ffeefc3c93e031 Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.688123 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.700042 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gg5hb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f000ac73-b5de-47c8-a0a7-84bd06475f62\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w9g7t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:09Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gg5hb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.710056 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.714147 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/abe7f2d9-ec99-4724-a01f-cc7096377e07-env-overrides\") pod \"ovnkube-node-mcwfc\" (UID: \"abe7f2d9-ec99-4724-a01f-cc7096377e07\") " pod="openshift-ovn-kubernetes/ovnkube-node-mcwfc" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.714184 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/96840de9-4451-4499-81fa-a19c62239007-os-release\") pod \"multus-additional-cni-plugins-nptcz\" (UID: \"96840de9-4451-4499-81fa-a19c62239007\") " pod="openshift-multus/multus-additional-cni-plugins-nptcz" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.714203 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/f000ac73-b5de-47c8-a0a7-84bd06475f62-cni-binary-copy\") pod \"multus-gg5hb\" (UID: \"f000ac73-b5de-47c8-a0a7-84bd06475f62\") " pod="openshift-multus/multus-gg5hb" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.714224 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/abe7f2d9-ec99-4724-a01f-cc7096377e07-ovnkube-config\") pod \"ovnkube-node-mcwfc\" (UID: \"abe7f2d9-ec99-4724-a01f-cc7096377e07\") " pod="openshift-ovn-kubernetes/ovnkube-node-mcwfc" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.714246 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/96840de9-4451-4499-81fa-a19c62239007-cnibin\") pod \"multus-additional-cni-plugins-nptcz\" (UID: \"96840de9-4451-4499-81fa-a19c62239007\") " pod="openshift-multus/multus-additional-cni-plugins-nptcz" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.714266 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/abe7f2d9-ec99-4724-a01f-cc7096377e07-ovn-node-metrics-cert\") pod \"ovnkube-node-mcwfc\" (UID: \"abe7f2d9-ec99-4724-a01f-cc7096377e07\") " pod="openshift-ovn-kubernetes/ovnkube-node-mcwfc" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.714285 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/f000ac73-b5de-47c8-a0a7-84bd06475f62-host-run-multus-certs\") pod \"multus-gg5hb\" (UID: \"f000ac73-b5de-47c8-a0a7-84bd06475f62\") " pod="openshift-multus/multus-gg5hb" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.714305 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/abe7f2d9-ec99-4724-a01f-cc7096377e07-host-slash\") pod \"ovnkube-node-mcwfc\" (UID: \"abe7f2d9-ec99-4724-a01f-cc7096377e07\") " pod="openshift-ovn-kubernetes/ovnkube-node-mcwfc" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.714326 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/abe7f2d9-ec99-4724-a01f-cc7096377e07-var-lib-openvswitch\") pod \"ovnkube-node-mcwfc\" (UID: \"abe7f2d9-ec99-4724-a01f-cc7096377e07\") " pod="openshift-ovn-kubernetes/ovnkube-node-mcwfc" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.714363 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/abe7f2d9-ec99-4724-a01f-cc7096377e07-host-run-netns\") pod \"ovnkube-node-mcwfc\" (UID: \"abe7f2d9-ec99-4724-a01f-cc7096377e07\") " pod="openshift-ovn-kubernetes/ovnkube-node-mcwfc" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.714380 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/f000ac73-b5de-47c8-a0a7-84bd06475f62-multus-cni-dir\") pod \"multus-gg5hb\" (UID: \"f000ac73-b5de-47c8-a0a7-84bd06475f62\") " pod="openshift-multus/multus-gg5hb" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.714458 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/abe7f2d9-ec99-4724-a01f-cc7096377e07-host-slash\") pod \"ovnkube-node-mcwfc\" (UID: \"abe7f2d9-ec99-4724-a01f-cc7096377e07\") " pod="openshift-ovn-kubernetes/ovnkube-node-mcwfc" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.714511 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/f000ac73-b5de-47c8-a0a7-84bd06475f62-host-run-multus-certs\") pod \"multus-gg5hb\" (UID: \"f000ac73-b5de-47c8-a0a7-84bd06475f62\") " pod="openshift-multus/multus-gg5hb" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.714538 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/96840de9-4451-4499-81fa-a19c62239007-os-release\") pod \"multus-additional-cni-plugins-nptcz\" (UID: \"96840de9-4451-4499-81fa-a19c62239007\") " pod="openshift-multus/multus-additional-cni-plugins-nptcz" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.714543 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/abe7f2d9-ec99-4724-a01f-cc7096377e07-var-lib-openvswitch\") pod \"ovnkube-node-mcwfc\" (UID: \"abe7f2d9-ec99-4724-a01f-cc7096377e07\") " pod="openshift-ovn-kubernetes/ovnkube-node-mcwfc" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.714564 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/abe7f2d9-ec99-4724-a01f-cc7096377e07-host-run-netns\") pod \"ovnkube-node-mcwfc\" (UID: \"abe7f2d9-ec99-4724-a01f-cc7096377e07\") " pod="openshift-ovn-kubernetes/ovnkube-node-mcwfc" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.714591 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/96840de9-4451-4499-81fa-a19c62239007-cnibin\") pod \"multus-additional-cni-plugins-nptcz\" (UID: \"96840de9-4451-4499-81fa-a19c62239007\") " pod="openshift-multus/multus-additional-cni-plugins-nptcz" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.714622 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/abe7f2d9-ec99-4724-a01f-cc7096377e07-host-cni-netd\") pod \"ovnkube-node-mcwfc\" (UID: \"abe7f2d9-ec99-4724-a01f-cc7096377e07\") " pod="openshift-ovn-kubernetes/ovnkube-node-mcwfc" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.714647 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/f000ac73-b5de-47c8-a0a7-84bd06475f62-system-cni-dir\") pod \"multus-gg5hb\" (UID: \"f000ac73-b5de-47c8-a0a7-84bd06475f62\") " pod="openshift-multus/multus-gg5hb" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.714895 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/f000ac73-b5de-47c8-a0a7-84bd06475f62-host-run-k8s-cni-cncf-io\") pod \"multus-gg5hb\" (UID: \"f000ac73-b5de-47c8-a0a7-84bd06475f62\") " pod="openshift-multus/multus-gg5hb" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.714922 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/f000ac73-b5de-47c8-a0a7-84bd06475f62-host-var-lib-cni-bin\") pod \"multus-gg5hb\" (UID: \"f000ac73-b5de-47c8-a0a7-84bd06475f62\") " pod="openshift-multus/multus-gg5hb" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.714941 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qfxtj\" (UniqueName: \"kubernetes.io/projected/96840de9-4451-4499-81fa-a19c62239007-kube-api-access-qfxtj\") pod \"multus-additional-cni-plugins-nptcz\" (UID: \"96840de9-4451-4499-81fa-a19c62239007\") " pod="openshift-multus/multus-additional-cni-plugins-nptcz" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.714958 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/f000ac73-b5de-47c8-a0a7-84bd06475f62-host-var-lib-kubelet\") pod \"multus-gg5hb\" (UID: \"f000ac73-b5de-47c8-a0a7-84bd06475f62\") " pod="openshift-multus/multus-gg5hb" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.714976 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/f000ac73-b5de-47c8-a0a7-84bd06475f62-multus-conf-dir\") pod \"multus-gg5hb\" (UID: \"f000ac73-b5de-47c8-a0a7-84bd06475f62\") " pod="openshift-multus/multus-gg5hb" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.714994 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/f000ac73-b5de-47c8-a0a7-84bd06475f62-multus-daemon-config\") pod \"multus-gg5hb\" (UID: \"f000ac73-b5de-47c8-a0a7-84bd06475f62\") " pod="openshift-multus/multus-gg5hb" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.714981 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/f000ac73-b5de-47c8-a0a7-84bd06475f62-host-run-k8s-cni-cncf-io\") pod \"multus-gg5hb\" (UID: \"f000ac73-b5de-47c8-a0a7-84bd06475f62\") " pod="openshift-multus/multus-gg5hb" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.714851 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/abe7f2d9-ec99-4724-a01f-cc7096377e07-host-cni-netd\") pod \"ovnkube-node-mcwfc\" (UID: \"abe7f2d9-ec99-4724-a01f-cc7096377e07\") " pod="openshift-ovn-kubernetes/ovnkube-node-mcwfc" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.714867 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/f000ac73-b5de-47c8-a0a7-84bd06475f62-multus-cni-dir\") pod \"multus-gg5hb\" (UID: \"f000ac73-b5de-47c8-a0a7-84bd06475f62\") " pod="openshift-multus/multus-gg5hb" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.715048 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/abe7f2d9-ec99-4724-a01f-cc7096377e07-env-overrides\") pod \"ovnkube-node-mcwfc\" (UID: \"abe7f2d9-ec99-4724-a01f-cc7096377e07\") " pod="openshift-ovn-kubernetes/ovnkube-node-mcwfc" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.715062 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/f000ac73-b5de-47c8-a0a7-84bd06475f62-etc-kubernetes\") pod \"multus-gg5hb\" (UID: \"f000ac73-b5de-47c8-a0a7-84bd06475f62\") " pod="openshift-multus/multus-gg5hb" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.714828 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/f000ac73-b5de-47c8-a0a7-84bd06475f62-system-cni-dir\") pod \"multus-gg5hb\" (UID: \"f000ac73-b5de-47c8-a0a7-84bd06475f62\") " pod="openshift-multus/multus-gg5hb" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.715017 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/f000ac73-b5de-47c8-a0a7-84bd06475f62-etc-kubernetes\") pod \"multus-gg5hb\" (UID: \"f000ac73-b5de-47c8-a0a7-84bd06475f62\") " pod="openshift-multus/multus-gg5hb" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.715146 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/abe7f2d9-ec99-4724-a01f-cc7096377e07-ovnkube-script-lib\") pod \"ovnkube-node-mcwfc\" (UID: \"abe7f2d9-ec99-4724-a01f-cc7096377e07\") " pod="openshift-ovn-kubernetes/ovnkube-node-mcwfc" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.715103 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/f000ac73-b5de-47c8-a0a7-84bd06475f62-host-var-lib-cni-bin\") pod \"multus-gg5hb\" (UID: \"f000ac73-b5de-47c8-a0a7-84bd06475f62\") " pod="openshift-multus/multus-gg5hb" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.715246 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/96840de9-4451-4499-81fa-a19c62239007-system-cni-dir\") pod \"multus-additional-cni-plugins-nptcz\" (UID: \"96840de9-4451-4499-81fa-a19c62239007\") " pod="openshift-multus/multus-additional-cni-plugins-nptcz" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.715275 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/f000ac73-b5de-47c8-a0a7-84bd06475f62-multus-conf-dir\") pod \"multus-gg5hb\" (UID: \"f000ac73-b5de-47c8-a0a7-84bd06475f62\") " pod="openshift-multus/multus-gg5hb" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.715302 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/abe7f2d9-ec99-4724-a01f-cc7096377e07-run-ovn\") pod \"ovnkube-node-mcwfc\" (UID: \"abe7f2d9-ec99-4724-a01f-cc7096377e07\") " pod="openshift-ovn-kubernetes/ovnkube-node-mcwfc" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.715278 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/abe7f2d9-ec99-4724-a01f-cc7096377e07-run-ovn\") pod \"ovnkube-node-mcwfc\" (UID: \"abe7f2d9-ec99-4724-a01f-cc7096377e07\") " pod="openshift-ovn-kubernetes/ovnkube-node-mcwfc" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.715331 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/f000ac73-b5de-47c8-a0a7-84bd06475f62-host-var-lib-kubelet\") pod \"multus-gg5hb\" (UID: \"f000ac73-b5de-47c8-a0a7-84bd06475f62\") " pod="openshift-multus/multus-gg5hb" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.715245 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/f000ac73-b5de-47c8-a0a7-84bd06475f62-cni-binary-copy\") pod \"multus-gg5hb\" (UID: \"f000ac73-b5de-47c8-a0a7-84bd06475f62\") " pod="openshift-multus/multus-gg5hb" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.715364 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/f000ac73-b5de-47c8-a0a7-84bd06475f62-hostroot\") pod \"multus-gg5hb\" (UID: \"f000ac73-b5de-47c8-a0a7-84bd06475f62\") " pod="openshift-multus/multus-gg5hb" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.715371 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/96840de9-4451-4499-81fa-a19c62239007-system-cni-dir\") pod \"multus-additional-cni-plugins-nptcz\" (UID: \"96840de9-4451-4499-81fa-a19c62239007\") " pod="openshift-multus/multus-additional-cni-plugins-nptcz" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.715405 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/f000ac73-b5de-47c8-a0a7-84bd06475f62-hostroot\") pod \"multus-gg5hb\" (UID: \"f000ac73-b5de-47c8-a0a7-84bd06475f62\") " pod="openshift-multus/multus-gg5hb" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.715500 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/abe7f2d9-ec99-4724-a01f-cc7096377e07-host-kubelet\") pod \"ovnkube-node-mcwfc\" (UID: \"abe7f2d9-ec99-4724-a01f-cc7096377e07\") " pod="openshift-ovn-kubernetes/ovnkube-node-mcwfc" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.715525 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/abe7f2d9-ec99-4724-a01f-cc7096377e07-run-systemd\") pod \"ovnkube-node-mcwfc\" (UID: \"abe7f2d9-ec99-4724-a01f-cc7096377e07\") " pod="openshift-ovn-kubernetes/ovnkube-node-mcwfc" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.715541 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/abe7f2d9-ec99-4724-a01f-cc7096377e07-run-openvswitch\") pod \"ovnkube-node-mcwfc\" (UID: \"abe7f2d9-ec99-4724-a01f-cc7096377e07\") " pod="openshift-ovn-kubernetes/ovnkube-node-mcwfc" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.715594 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/96840de9-4451-4499-81fa-a19c62239007-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-nptcz\" (UID: \"96840de9-4451-4499-81fa-a19c62239007\") " pod="openshift-multus/multus-additional-cni-plugins-nptcz" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.715675 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w9g7t\" (UniqueName: \"kubernetes.io/projected/f000ac73-b5de-47c8-a0a7-84bd06475f62-kube-api-access-w9g7t\") pod \"multus-gg5hb\" (UID: \"f000ac73-b5de-47c8-a0a7-84bd06475f62\") " pod="openshift-multus/multus-gg5hb" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.715730 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/f000ac73-b5de-47c8-a0a7-84bd06475f62-host-var-lib-cni-multus\") pod \"multus-gg5hb\" (UID: \"f000ac73-b5de-47c8-a0a7-84bd06475f62\") " pod="openshift-multus/multus-gg5hb" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.715846 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/96840de9-4451-4499-81fa-a19c62239007-cni-binary-copy\") pod \"multus-additional-cni-plugins-nptcz\" (UID: \"96840de9-4451-4499-81fa-a19c62239007\") " pod="openshift-multus/multus-additional-cni-plugins-nptcz" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.715905 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/abe7f2d9-ec99-4724-a01f-cc7096377e07-host-kubelet\") pod \"ovnkube-node-mcwfc\" (UID: \"abe7f2d9-ec99-4724-a01f-cc7096377e07\") " pod="openshift-ovn-kubernetes/ovnkube-node-mcwfc" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.715917 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/abe7f2d9-ec99-4724-a01f-cc7096377e07-run-openvswitch\") pod \"ovnkube-node-mcwfc\" (UID: \"abe7f2d9-ec99-4724-a01f-cc7096377e07\") " pod="openshift-ovn-kubernetes/ovnkube-node-mcwfc" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.715927 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/f000ac73-b5de-47c8-a0a7-84bd06475f62-multus-daemon-config\") pod \"multus-gg5hb\" (UID: \"f000ac73-b5de-47c8-a0a7-84bd06475f62\") " pod="openshift-multus/multus-gg5hb" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.715956 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/f000ac73-b5de-47c8-a0a7-84bd06475f62-host-var-lib-cni-multus\") pod \"multus-gg5hb\" (UID: \"f000ac73-b5de-47c8-a0a7-84bd06475f62\") " pod="openshift-multus/multus-gg5hb" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.716031 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/abe7f2d9-ec99-4724-a01f-cc7096377e07-run-systemd\") pod \"ovnkube-node-mcwfc\" (UID: \"abe7f2d9-ec99-4724-a01f-cc7096377e07\") " pod="openshift-ovn-kubernetes/ovnkube-node-mcwfc" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.716143 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/f000ac73-b5de-47c8-a0a7-84bd06475f62-host-run-netns\") pod \"multus-gg5hb\" (UID: \"f000ac73-b5de-47c8-a0a7-84bd06475f62\") " pod="openshift-multus/multus-gg5hb" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.716212 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/f000ac73-b5de-47c8-a0a7-84bd06475f62-host-run-netns\") pod \"multus-gg5hb\" (UID: \"f000ac73-b5de-47c8-a0a7-84bd06475f62\") " pod="openshift-multus/multus-gg5hb" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.716263 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/abe7f2d9-ec99-4724-a01f-cc7096377e07-host-cni-bin\") pod \"ovnkube-node-mcwfc\" (UID: \"abe7f2d9-ec99-4724-a01f-cc7096377e07\") " pod="openshift-ovn-kubernetes/ovnkube-node-mcwfc" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.716289 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/abe7f2d9-ec99-4724-a01f-cc7096377e07-host-cni-bin\") pod \"ovnkube-node-mcwfc\" (UID: \"abe7f2d9-ec99-4724-a01f-cc7096377e07\") " pod="openshift-ovn-kubernetes/ovnkube-node-mcwfc" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.716290 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/abe7f2d9-ec99-4724-a01f-cc7096377e07-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-mcwfc\" (UID: \"abe7f2d9-ec99-4724-a01f-cc7096377e07\") " pod="openshift-ovn-kubernetes/ovnkube-node-mcwfc" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.716330 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/abe7f2d9-ec99-4724-a01f-cc7096377e07-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-mcwfc\" (UID: \"abe7f2d9-ec99-4724-a01f-cc7096377e07\") " pod="openshift-ovn-kubernetes/ovnkube-node-mcwfc" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.716382 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7pr44\" (UniqueName: \"kubernetes.io/projected/abe7f2d9-ec99-4724-a01f-cc7096377e07-kube-api-access-7pr44\") pod \"ovnkube-node-mcwfc\" (UID: \"abe7f2d9-ec99-4724-a01f-cc7096377e07\") " pod="openshift-ovn-kubernetes/ovnkube-node-mcwfc" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.716414 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/abe7f2d9-ec99-4724-a01f-cc7096377e07-log-socket\") pod \"ovnkube-node-mcwfc\" (UID: \"abe7f2d9-ec99-4724-a01f-cc7096377e07\") " pod="openshift-ovn-kubernetes/ovnkube-node-mcwfc" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.716437 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/abe7f2d9-ec99-4724-a01f-cc7096377e07-host-run-ovn-kubernetes\") pod \"ovnkube-node-mcwfc\" (UID: \"abe7f2d9-ec99-4724-a01f-cc7096377e07\") " pod="openshift-ovn-kubernetes/ovnkube-node-mcwfc" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.716460 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/96840de9-4451-4499-81fa-a19c62239007-tuning-conf-dir\") pod \"multus-additional-cni-plugins-nptcz\" (UID: \"96840de9-4451-4499-81fa-a19c62239007\") " pod="openshift-multus/multus-additional-cni-plugins-nptcz" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.716485 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/f000ac73-b5de-47c8-a0a7-84bd06475f62-cnibin\") pod \"multus-gg5hb\" (UID: \"f000ac73-b5de-47c8-a0a7-84bd06475f62\") " pod="openshift-multus/multus-gg5hb" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.716493 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/abe7f2d9-ec99-4724-a01f-cc7096377e07-host-run-ovn-kubernetes\") pod \"ovnkube-node-mcwfc\" (UID: \"abe7f2d9-ec99-4724-a01f-cc7096377e07\") " pod="openshift-ovn-kubernetes/ovnkube-node-mcwfc" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.716471 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/abe7f2d9-ec99-4724-a01f-cc7096377e07-log-socket\") pod \"ovnkube-node-mcwfc\" (UID: \"abe7f2d9-ec99-4724-a01f-cc7096377e07\") " pod="openshift-ovn-kubernetes/ovnkube-node-mcwfc" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.716458 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/96840de9-4451-4499-81fa-a19c62239007-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-nptcz\" (UID: \"96840de9-4451-4499-81fa-a19c62239007\") " pod="openshift-multus/multus-additional-cni-plugins-nptcz" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.716571 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/f000ac73-b5de-47c8-a0a7-84bd06475f62-os-release\") pod \"multus-gg5hb\" (UID: \"f000ac73-b5de-47c8-a0a7-84bd06475f62\") " pod="openshift-multus/multus-gg5hb" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.716517 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/f000ac73-b5de-47c8-a0a7-84bd06475f62-os-release\") pod \"multus-gg5hb\" (UID: \"f000ac73-b5de-47c8-a0a7-84bd06475f62\") " pod="openshift-multus/multus-gg5hb" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.716587 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/96840de9-4451-4499-81fa-a19c62239007-cni-binary-copy\") pod \"multus-additional-cni-plugins-nptcz\" (UID: \"96840de9-4451-4499-81fa-a19c62239007\") " pod="openshift-multus/multus-additional-cni-plugins-nptcz" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.716631 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/f000ac73-b5de-47c8-a0a7-84bd06475f62-cnibin\") pod \"multus-gg5hb\" (UID: \"f000ac73-b5de-47c8-a0a7-84bd06475f62\") " pod="openshift-multus/multus-gg5hb" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.716634 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/abe7f2d9-ec99-4724-a01f-cc7096377e07-systemd-units\") pod \"ovnkube-node-mcwfc\" (UID: \"abe7f2d9-ec99-4724-a01f-cc7096377e07\") " pod="openshift-ovn-kubernetes/ovnkube-node-mcwfc" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.716659 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/abe7f2d9-ec99-4724-a01f-cc7096377e07-systemd-units\") pod \"ovnkube-node-mcwfc\" (UID: \"abe7f2d9-ec99-4724-a01f-cc7096377e07\") " pod="openshift-ovn-kubernetes/ovnkube-node-mcwfc" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.716670 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/abe7f2d9-ec99-4724-a01f-cc7096377e07-etc-openvswitch\") pod \"ovnkube-node-mcwfc\" (UID: \"abe7f2d9-ec99-4724-a01f-cc7096377e07\") " pod="openshift-ovn-kubernetes/ovnkube-node-mcwfc" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.716694 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/abe7f2d9-ec99-4724-a01f-cc7096377e07-etc-openvswitch\") pod \"ovnkube-node-mcwfc\" (UID: \"abe7f2d9-ec99-4724-a01f-cc7096377e07\") " pod="openshift-ovn-kubernetes/ovnkube-node-mcwfc" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.716695 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/f000ac73-b5de-47c8-a0a7-84bd06475f62-multus-socket-dir-parent\") pod \"multus-gg5hb\" (UID: \"f000ac73-b5de-47c8-a0a7-84bd06475f62\") " pod="openshift-multus/multus-gg5hb" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.716733 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/f000ac73-b5de-47c8-a0a7-84bd06475f62-multus-socket-dir-parent\") pod \"multus-gg5hb\" (UID: \"f000ac73-b5de-47c8-a0a7-84bd06475f62\") " pod="openshift-multus/multus-gg5hb" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.716734 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/abe7f2d9-ec99-4724-a01f-cc7096377e07-node-log\") pod \"ovnkube-node-mcwfc\" (UID: \"abe7f2d9-ec99-4724-a01f-cc7096377e07\") " pod="openshift-ovn-kubernetes/ovnkube-node-mcwfc" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.717133 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/abe7f2d9-ec99-4724-a01f-cc7096377e07-ovnkube-config\") pod \"ovnkube-node-mcwfc\" (UID: \"abe7f2d9-ec99-4724-a01f-cc7096377e07\") " pod="openshift-ovn-kubernetes/ovnkube-node-mcwfc" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.717605 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/96840de9-4451-4499-81fa-a19c62239007-tuning-conf-dir\") pod \"multus-additional-cni-plugins-nptcz\" (UID: \"96840de9-4451-4499-81fa-a19c62239007\") " pod="openshift-multus/multus-additional-cni-plugins-nptcz" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.717723 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/abe7f2d9-ec99-4724-a01f-cc7096377e07-node-log\") pod \"ovnkube-node-mcwfc\" (UID: \"abe7f2d9-ec99-4724-a01f-cc7096377e07\") " pod="openshift-ovn-kubernetes/ovnkube-node-mcwfc" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.718967 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/abe7f2d9-ec99-4724-a01f-cc7096377e07-ovnkube-script-lib\") pod \"ovnkube-node-mcwfc\" (UID: \"abe7f2d9-ec99-4724-a01f-cc7096377e07\") " pod="openshift-ovn-kubernetes/ovnkube-node-mcwfc" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.720339 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/abe7f2d9-ec99-4724-a01f-cc7096377e07-ovn-node-metrics-cert\") pod \"ovnkube-node-mcwfc\" (UID: \"abe7f2d9-ec99-4724-a01f-cc7096377e07\") " pod="openshift-ovn-kubernetes/ovnkube-node-mcwfc" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.734123 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-bsdk2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"823e91d3-003d-4cbb-bc72-004e1708c19d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-chgmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:09Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-bsdk2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.737485 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qfxtj\" (UniqueName: \"kubernetes.io/projected/96840de9-4451-4499-81fa-a19c62239007-kube-api-access-qfxtj\") pod \"multus-additional-cni-plugins-nptcz\" (UID: \"96840de9-4451-4499-81fa-a19c62239007\") " pod="openshift-multus/multus-additional-cni-plugins-nptcz" Oct 10 16:32:09 crc kubenswrapper[4799]: W1010 16:32:09.738193 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podef543e1b_8068_4ea3_b32a_61027b32e95d.slice/crio-df2ddb52268fb46ee82f69c8a65c4a3198a0ecf237c49ecb901927ca3151cf12 WatchSource:0}: Error finding container df2ddb52268fb46ee82f69c8a65c4a3198a0ecf237c49ecb901927ca3151cf12: Status 404 returned error can't find the container with id df2ddb52268fb46ee82f69c8a65c4a3198a0ecf237c49ecb901927ca3151cf12 Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.739333 4799 scope.go:117] "RemoveContainer" containerID="10d76c5ba8c54896d2fde57e2806c48857363c495a9f2d9b3f6904334cf2f9be" Oct 10 16:32:09 crc kubenswrapper[4799]: E1010 16:32:09.739532 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-apiserver-check-endpoints\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\"" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.739923 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.745394 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w9g7t\" (UniqueName: \"kubernetes.io/projected/f000ac73-b5de-47c8-a0a7-84bd06475f62-kube-api-access-w9g7t\") pod \"multus-gg5hb\" (UID: \"f000ac73-b5de-47c8-a0a7-84bd06475f62\") " pod="openshift-multus/multus-gg5hb" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.752146 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.775287 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7pr44\" (UniqueName: \"kubernetes.io/projected/abe7f2d9-ec99-4724-a01f-cc7096377e07-kube-api-access-7pr44\") pod \"ovnkube-node-mcwfc\" (UID: \"abe7f2d9-ec99-4724-a01f-cc7096377e07\") " pod="openshift-ovn-kubernetes/ovnkube-node-mcwfc" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.801531 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-nptcz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"96840de9-4451-4499-81fa-a19c62239007\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:09Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-nptcz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.810686 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.855914 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.862798 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-gg5hb" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.865638 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.873462 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-nptcz" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.875425 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-nptcz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"96840de9-4451-4499-81fa-a19c62239007\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:09Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-nptcz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.883142 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-mcwfc" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.889459 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.900694 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.908912 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6cebefda-e31d-4be2-9bf4-8e1f8ec002cb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hfkr4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hfkr4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:09Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-rh8zc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 10 16:32:09 crc kubenswrapper[4799]: W1010 16:32:09.913404 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod96840de9_4451_4499_81fa_a19c62239007.slice/crio-c73fe502672f58b1408283afc131959bd416068e76814e1f63db0ff2997407ec WatchSource:0}: Error finding container c73fe502672f58b1408283afc131959bd416068e76814e1f63db0ff2997407ec: Status 404 returned error can't find the container with id c73fe502672f58b1408283afc131959bd416068e76814e1f63db0ff2997407ec Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.918043 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 10 16:32:09 crc kubenswrapper[4799]: E1010 16:32:09.918193 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-10 16:32:10.918167543 +0000 UTC m=+24.426491658 (durationBeforeRetry 1s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.923917 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mcwfc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"abe7f2d9-ec99-4724-a01f-cc7096377e07\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:09Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-mcwfc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.936651 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b9c46c5f-a6db-4cef-b179-b669484bbc75\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:47Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:47Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://df6b51b97a9e3dcf9102409dc19f67e69e6e28ebec82dd46083922d5606cc4c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ba57bc720123daa414f51bf5d3173c6fa0b519947a34816bebc532948fd74ab\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d98759de1f79d9aeb68eb0b3eb21d78d0116f054b5d846c85bd63774b565e73\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://10d76c5ba8c54896d2fde57e2806c48857363c495a9f2d9b3f6904334cf2f9be\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://55af9201526519d123335f21cc2dada8e280f5d90efb03821a45fe469c7b2ede\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-10T16:31:52Z\\\",\\\"message\\\":\\\"W1010 16:31:51.656241 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI1010 16:31:51.656537 1 crypto.go:601] Generating new CA for check-endpoints-signer@1760113911 cert, and key in /tmp/serving-cert-3493803189/serving-signer.crt, /tmp/serving-cert-3493803189/serving-signer.key\\\\nI1010 16:31:51.994611 1 observer_polling.go:159] Starting file observer\\\\nW1010 16:31:51.998107 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI1010 16:31:51.998325 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1010 16:31:51.998970 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3493803189/tls.crt::/tmp/serving-cert-3493803189/tls.key\\\\\\\"\\\\nF1010 16:31:52.700035 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-10T16:31:50Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://10d76c5ba8c54896d2fde57e2806c48857363c495a9f2d9b3f6904334cf2f9be\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"file observer\\\\nW1010 16:32:08.895315 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1010 16:32:08.895450 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1010 16:32:08.898309 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-712278876/tls.crt::/tmp/serving-cert-712278876/tls.key\\\\\\\"\\\\nI1010 16:32:09.168043 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1010 16:32:09.171891 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1010 16:32:09.171914 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1010 16:32:09.171936 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1010 16:32:09.171942 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1010 16:32:09.176341 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1010 16:32:09.176406 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1010 16:32:09.176435 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1010 16:32:09.176460 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1010 16:32:09.176486 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1010 16:32:09.176510 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1010 16:32:09.176533 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1010 16:32:09.176376 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1010 16:32:09.178269 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-10T16:31:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://75fb276a1b4f555aa58d4a862a6f3841984f75958b7ada362d717eca726c41fc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:50Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://78cbeb4c6d2770cabbc752b11e5a62f64ec7820bc3a637a944fa252d779e242b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://78cbeb4c6d2770cabbc752b11e5a62f64ec7820bc3a637a944fa252d779e242b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:31:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:31:47Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.949317 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.963697 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.978786 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gg5hb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f000ac73-b5de-47c8-a0a7-84bd06475f62\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w9g7t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:09Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gg5hb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.988457 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 10 16:32:09 crc kubenswrapper[4799]: I1010 16:32:09.998139 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-bsdk2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"823e91d3-003d-4cbb-bc72-004e1708c19d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-chgmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:09Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-bsdk2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 10 16:32:10 crc kubenswrapper[4799]: I1010 16:32:10.019320 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 10 16:32:10 crc kubenswrapper[4799]: I1010 16:32:10.019356 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 10 16:32:10 crc kubenswrapper[4799]: I1010 16:32:10.019376 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 10 16:32:10 crc kubenswrapper[4799]: I1010 16:32:10.019402 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 10 16:32:10 crc kubenswrapper[4799]: E1010 16:32:10.019488 4799 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 10 16:32:10 crc kubenswrapper[4799]: E1010 16:32:10.019532 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-10 16:32:11.019518976 +0000 UTC m=+24.527843091 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 10 16:32:10 crc kubenswrapper[4799]: E1010 16:32:10.019576 4799 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 10 16:32:10 crc kubenswrapper[4799]: E1010 16:32:10.019586 4799 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Oct 10 16:32:10 crc kubenswrapper[4799]: E1010 16:32:10.019661 4799 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 10 16:32:10 crc kubenswrapper[4799]: E1010 16:32:10.019695 4799 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 10 16:32:10 crc kubenswrapper[4799]: E1010 16:32:10.019705 4799 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 10 16:32:10 crc kubenswrapper[4799]: E1010 16:32:10.019672 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-10 16:32:11.019654659 +0000 UTC m=+24.527978774 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Oct 10 16:32:10 crc kubenswrapper[4799]: E1010 16:32:10.019781 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-10-10 16:32:11.019766361 +0000 UTC m=+24.528090476 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 10 16:32:10 crc kubenswrapper[4799]: E1010 16:32:10.019607 4799 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 10 16:32:10 crc kubenswrapper[4799]: E1010 16:32:10.019794 4799 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 10 16:32:10 crc kubenswrapper[4799]: E1010 16:32:10.019815 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-10-10 16:32:11.019809542 +0000 UTC m=+24.528133657 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 10 16:32:10 crc kubenswrapper[4799]: I1010 16:32:10.214743 4799 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 10 16:32:10 crc kubenswrapper[4799]: I1010 16:32:10.216181 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:10 crc kubenswrapper[4799]: I1010 16:32:10.216371 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:10 crc kubenswrapper[4799]: I1010 16:32:10.216386 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:10 crc kubenswrapper[4799]: I1010 16:32:10.216490 4799 kubelet_node_status.go:76] "Attempting to register node" node="crc" Oct 10 16:32:10 crc kubenswrapper[4799]: I1010 16:32:10.223350 4799 kubelet_node_status.go:115] "Node was previously registered" node="crc" Oct 10 16:32:10 crc kubenswrapper[4799]: I1010 16:32:10.223587 4799 kubelet_node_status.go:79] "Successfully registered node" node="crc" Oct 10 16:32:10 crc kubenswrapper[4799]: I1010 16:32:10.224778 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:10 crc kubenswrapper[4799]: I1010 16:32:10.224842 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:10 crc kubenswrapper[4799]: I1010 16:32:10.224855 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:10 crc kubenswrapper[4799]: I1010 16:32:10.224873 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:10 crc kubenswrapper[4799]: I1010 16:32:10.224884 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:10Z","lastTransitionTime":"2025-10-10T16:32:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:10 crc kubenswrapper[4799]: E1010 16:32:10.238541 4799 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"d99534f1-66d4-4990-b867-b559b1013899\\\",\\\"systemUUID\\\":\\\"19c7da3e-bb2d-454e-9c2c-9c9464638bfe\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 10 16:32:10 crc kubenswrapper[4799]: I1010 16:32:10.242659 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:10 crc kubenswrapper[4799]: I1010 16:32:10.242698 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:10 crc kubenswrapper[4799]: I1010 16:32:10.242707 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:10 crc kubenswrapper[4799]: I1010 16:32:10.242724 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:10 crc kubenswrapper[4799]: I1010 16:32:10.242732 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:10Z","lastTransitionTime":"2025-10-10T16:32:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:10 crc kubenswrapper[4799]: E1010 16:32:10.254793 4799 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"d99534f1-66d4-4990-b867-b559b1013899\\\",\\\"systemUUID\\\":\\\"19c7da3e-bb2d-454e-9c2c-9c9464638bfe\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 10 16:32:10 crc kubenswrapper[4799]: I1010 16:32:10.259270 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:10 crc kubenswrapper[4799]: I1010 16:32:10.259323 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:10 crc kubenswrapper[4799]: I1010 16:32:10.259335 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:10 crc kubenswrapper[4799]: I1010 16:32:10.259354 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:10 crc kubenswrapper[4799]: I1010 16:32:10.259397 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:10Z","lastTransitionTime":"2025-10-10T16:32:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:10 crc kubenswrapper[4799]: E1010 16:32:10.271265 4799 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"d99534f1-66d4-4990-b867-b559b1013899\\\",\\\"systemUUID\\\":\\\"19c7da3e-bb2d-454e-9c2c-9c9464638bfe\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 10 16:32:10 crc kubenswrapper[4799]: I1010 16:32:10.275172 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:10 crc kubenswrapper[4799]: I1010 16:32:10.275215 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:10 crc kubenswrapper[4799]: I1010 16:32:10.275224 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:10 crc kubenswrapper[4799]: I1010 16:32:10.275241 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:10 crc kubenswrapper[4799]: I1010 16:32:10.275250 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:10Z","lastTransitionTime":"2025-10-10T16:32:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:10 crc kubenswrapper[4799]: E1010 16:32:10.285030 4799 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"d99534f1-66d4-4990-b867-b559b1013899\\\",\\\"systemUUID\\\":\\\"19c7da3e-bb2d-454e-9c2c-9c9464638bfe\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 10 16:32:10 crc kubenswrapper[4799]: I1010 16:32:10.286974 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 10 16:32:10 crc kubenswrapper[4799]: I1010 16:32:10.291518 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:10 crc kubenswrapper[4799]: I1010 16:32:10.291565 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:10 crc kubenswrapper[4799]: I1010 16:32:10.291579 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:10 crc kubenswrapper[4799]: I1010 16:32:10.291600 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:10 crc kubenswrapper[4799]: I1010 16:32:10.291612 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:10Z","lastTransitionTime":"2025-10-10T16:32:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:10 crc kubenswrapper[4799]: E1010 16:32:10.300259 4799 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"d99534f1-66d4-4990-b867-b559b1013899\\\",\\\"systemUUID\\\":\\\"19c7da3e-bb2d-454e-9c2c-9c9464638bfe\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 10 16:32:10 crc kubenswrapper[4799]: E1010 16:32:10.300444 4799 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Oct 10 16:32:10 crc kubenswrapper[4799]: I1010 16:32:10.301744 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-nptcz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"96840de9-4451-4499-81fa-a19c62239007\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:09Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-nptcz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 10 16:32:10 crc kubenswrapper[4799]: I1010 16:32:10.302344 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:10 crc kubenswrapper[4799]: I1010 16:32:10.302385 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:10 crc kubenswrapper[4799]: I1010 16:32:10.302410 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:10 crc kubenswrapper[4799]: I1010 16:32:10.302427 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:10 crc kubenswrapper[4799]: I1010 16:32:10.302436 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:10Z","lastTransitionTime":"2025-10-10T16:32:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:10 crc kubenswrapper[4799]: I1010 16:32:10.311138 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 10 16:32:10 crc kubenswrapper[4799]: I1010 16:32:10.323605 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 10 16:32:10 crc kubenswrapper[4799]: I1010 16:32:10.333426 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6cebefda-e31d-4be2-9bf4-8e1f8ec002cb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hfkr4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hfkr4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:09Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-rh8zc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 10 16:32:10 crc kubenswrapper[4799]: I1010 16:32:10.362054 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mcwfc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"abe7f2d9-ec99-4724-a01f-cc7096377e07\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:09Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-mcwfc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 10 16:32:10 crc kubenswrapper[4799]: I1010 16:32:10.401642 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 10 16:32:10 crc kubenswrapper[4799]: I1010 16:32:10.401665 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 10 16:32:10 crc kubenswrapper[4799]: E1010 16:32:10.401789 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 10 16:32:10 crc kubenswrapper[4799]: E1010 16:32:10.401924 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 10 16:32:10 crc kubenswrapper[4799]: I1010 16:32:10.404326 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:10 crc kubenswrapper[4799]: I1010 16:32:10.404365 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:10 crc kubenswrapper[4799]: I1010 16:32:10.404374 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:10 crc kubenswrapper[4799]: I1010 16:32:10.404391 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:10 crc kubenswrapper[4799]: I1010 16:32:10.404404 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:10Z","lastTransitionTime":"2025-10-10T16:32:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:10 crc kubenswrapper[4799]: I1010 16:32:10.415782 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b9c46c5f-a6db-4cef-b179-b669484bbc75\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:47Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:47Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://df6b51b97a9e3dcf9102409dc19f67e69e6e28ebec82dd46083922d5606cc4c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ba57bc720123daa414f51bf5d3173c6fa0b519947a34816bebc532948fd74ab\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d98759de1f79d9aeb68eb0b3eb21d78d0116f054b5d846c85bd63774b565e73\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://10d76c5ba8c54896d2fde57e2806c48857363c495a9f2d9b3f6904334cf2f9be\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://55af9201526519d123335f21cc2dada8e280f5d90efb03821a45fe469c7b2ede\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-10T16:31:52Z\\\",\\\"message\\\":\\\"W1010 16:31:51.656241 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI1010 16:31:51.656537 1 crypto.go:601] Generating new CA for check-endpoints-signer@1760113911 cert, and key in /tmp/serving-cert-3493803189/serving-signer.crt, /tmp/serving-cert-3493803189/serving-signer.key\\\\nI1010 16:31:51.994611 1 observer_polling.go:159] Starting file observer\\\\nW1010 16:31:51.998107 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI1010 16:31:51.998325 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1010 16:31:51.998970 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3493803189/tls.crt::/tmp/serving-cert-3493803189/tls.key\\\\\\\"\\\\nF1010 16:31:52.700035 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-10T16:31:50Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://10d76c5ba8c54896d2fde57e2806c48857363c495a9f2d9b3f6904334cf2f9be\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"file observer\\\\nW1010 16:32:08.895315 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1010 16:32:08.895450 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1010 16:32:08.898309 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-712278876/tls.crt::/tmp/serving-cert-712278876/tls.key\\\\\\\"\\\\nI1010 16:32:09.168043 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1010 16:32:09.171891 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1010 16:32:09.171914 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1010 16:32:09.171936 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1010 16:32:09.171942 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1010 16:32:09.176341 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1010 16:32:09.176406 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1010 16:32:09.176435 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1010 16:32:09.176460 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1010 16:32:09.176486 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1010 16:32:09.176510 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1010 16:32:09.176533 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1010 16:32:09.176376 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1010 16:32:09.178269 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-10T16:31:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://75fb276a1b4f555aa58d4a862a6f3841984f75958b7ada362d717eca726c41fc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:50Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://78cbeb4c6d2770cabbc752b11e5a62f64ec7820bc3a637a944fa252d779e242b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://78cbeb4c6d2770cabbc752b11e5a62f64ec7820bc3a637a944fa252d779e242b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:31:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:31:47Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 10 16:32:10 crc kubenswrapper[4799]: I1010 16:32:10.429767 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 10 16:32:10 crc kubenswrapper[4799]: I1010 16:32:10.449576 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 10 16:32:10 crc kubenswrapper[4799]: I1010 16:32:10.460482 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gg5hb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f000ac73-b5de-47c8-a0a7-84bd06475f62\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w9g7t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:09Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gg5hb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 10 16:32:10 crc kubenswrapper[4799]: I1010 16:32:10.472453 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 10 16:32:10 crc kubenswrapper[4799]: I1010 16:32:10.480903 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 10 16:32:10 crc kubenswrapper[4799]: I1010 16:32:10.488307 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-bsdk2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"823e91d3-003d-4cbb-bc72-004e1708c19d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-chgmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:09Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-bsdk2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 10 16:32:10 crc kubenswrapper[4799]: I1010 16:32:10.507276 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:10 crc kubenswrapper[4799]: I1010 16:32:10.507323 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:10 crc kubenswrapper[4799]: I1010 16:32:10.507333 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:10 crc kubenswrapper[4799]: I1010 16:32:10.507349 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:10 crc kubenswrapper[4799]: I1010 16:32:10.507360 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:10Z","lastTransitionTime":"2025-10-10T16:32:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:10 crc kubenswrapper[4799]: I1010 16:32:10.574902 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-nptcz" event={"ID":"96840de9-4451-4499-81fa-a19c62239007","Type":"ContainerStarted","Data":"d8b7b4526cfbe5d29a5b00c5d82089820b93e5aedbdaace85c4a252fed1b9f53"} Oct 10 16:32:10 crc kubenswrapper[4799]: I1010 16:32:10.574945 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-nptcz" event={"ID":"96840de9-4451-4499-81fa-a19c62239007","Type":"ContainerStarted","Data":"c73fe502672f58b1408283afc131959bd416068e76814e1f63db0ff2997407ec"} Oct 10 16:32:10 crc kubenswrapper[4799]: I1010 16:32:10.576481 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"4e024486dad9853cf7debbd2264eca725e50e74ebd215e1e55595d5f8b7c0403"} Oct 10 16:32:10 crc kubenswrapper[4799]: I1010 16:32:10.576519 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"c3396ed6bea22d063192c09283426aa98e84d5cab5852e305d61f3d583801187"} Oct 10 16:32:10 crc kubenswrapper[4799]: I1010 16:32:10.576534 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"df2ddb52268fb46ee82f69c8a65c4a3198a0ecf237c49ecb901927ca3151cf12"} Oct 10 16:32:10 crc kubenswrapper[4799]: I1010 16:32:10.577384 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"247d77cf98f8eb958c8329a38cda67b469eec2724808f904cadde1fb50836217"} Oct 10 16:32:10 crc kubenswrapper[4799]: I1010 16:32:10.579352 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"5a953803d264b43ecd9f8b8c871b034d8146e73a4974bb8f503d0ca626370616"} Oct 10 16:32:10 crc kubenswrapper[4799]: I1010 16:32:10.579389 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"38507c8f31a377a2d4c9f7c8ef496f8b1a4e6eae57026a9848ffeefc3c93e031"} Oct 10 16:32:10 crc kubenswrapper[4799]: I1010 16:32:10.580986 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/1.log" Oct 10 16:32:10 crc kubenswrapper[4799]: I1010 16:32:10.583435 4799 scope.go:117] "RemoveContainer" containerID="10d76c5ba8c54896d2fde57e2806c48857363c495a9f2d9b3f6904334cf2f9be" Oct 10 16:32:10 crc kubenswrapper[4799]: E1010 16:32:10.583647 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-apiserver-check-endpoints\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\"" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" Oct 10 16:32:10 crc kubenswrapper[4799]: I1010 16:32:10.584969 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-bsdk2" event={"ID":"823e91d3-003d-4cbb-bc72-004e1708c19d","Type":"ContainerStarted","Data":"ec832bfc0c81b98afb4117033b94d2951b042b248148a5f957f3507174b8dbb6"} Oct 10 16:32:10 crc kubenswrapper[4799]: I1010 16:32:10.585005 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-bsdk2" event={"ID":"823e91d3-003d-4cbb-bc72-004e1708c19d","Type":"ContainerStarted","Data":"36b30926061640a6066e293370d53dbff9bd6dc747046d6d002d987be0035729"} Oct 10 16:32:10 crc kubenswrapper[4799]: I1010 16:32:10.590139 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" event={"ID":"6cebefda-e31d-4be2-9bf4-8e1f8ec002cb","Type":"ContainerStarted","Data":"6536b37f839c0b3f6b55d82b3a1674eeccb07ec93e2cb0a3739705b82df4782c"} Oct 10 16:32:10 crc kubenswrapper[4799]: I1010 16:32:10.590180 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" event={"ID":"6cebefda-e31d-4be2-9bf4-8e1f8ec002cb","Type":"ContainerStarted","Data":"0ad00545d7a2fff370e19a55a89365b8c9914cb6286dbf1892d7ad0f399288a5"} Oct 10 16:32:10 crc kubenswrapper[4799]: I1010 16:32:10.590195 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" event={"ID":"6cebefda-e31d-4be2-9bf4-8e1f8ec002cb","Type":"ContainerStarted","Data":"9580ab982f2beb4a01a69322e3ab78775906bf4b78d4135d89910a0bbc8a43a5"} Oct 10 16:32:10 crc kubenswrapper[4799]: I1010 16:32:10.591696 4799 generic.go:334] "Generic (PLEG): container finished" podID="abe7f2d9-ec99-4724-a01f-cc7096377e07" containerID="d2ce36def99eaf908452410a523cd14eb31a5a4dc3ee38d5983ea95d5ee75f83" exitCode=0 Oct 10 16:32:10 crc kubenswrapper[4799]: I1010 16:32:10.591746 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mcwfc" event={"ID":"abe7f2d9-ec99-4724-a01f-cc7096377e07","Type":"ContainerDied","Data":"d2ce36def99eaf908452410a523cd14eb31a5a4dc3ee38d5983ea95d5ee75f83"} Oct 10 16:32:10 crc kubenswrapper[4799]: I1010 16:32:10.591785 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mcwfc" event={"ID":"abe7f2d9-ec99-4724-a01f-cc7096377e07","Type":"ContainerStarted","Data":"5b6b940562569201c6b4876710f0f0e2e4a2e13e9f3db42bf255fa61f31e09e1"} Oct 10 16:32:10 crc kubenswrapper[4799]: I1010 16:32:10.592618 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b9c46c5f-a6db-4cef-b179-b669484bbc75\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:47Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:47Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://df6b51b97a9e3dcf9102409dc19f67e69e6e28ebec82dd46083922d5606cc4c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ba57bc720123daa414f51bf5d3173c6fa0b519947a34816bebc532948fd74ab\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d98759de1f79d9aeb68eb0b3eb21d78d0116f054b5d846c85bd63774b565e73\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://10d76c5ba8c54896d2fde57e2806c48857363c495a9f2d9b3f6904334cf2f9be\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://55af9201526519d123335f21cc2dada8e280f5d90efb03821a45fe469c7b2ede\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-10T16:31:52Z\\\",\\\"message\\\":\\\"W1010 16:31:51.656241 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI1010 16:31:51.656537 1 crypto.go:601] Generating new CA for check-endpoints-signer@1760113911 cert, and key in /tmp/serving-cert-3493803189/serving-signer.crt, /tmp/serving-cert-3493803189/serving-signer.key\\\\nI1010 16:31:51.994611 1 observer_polling.go:159] Starting file observer\\\\nW1010 16:31:51.998107 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI1010 16:31:51.998325 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1010 16:31:51.998970 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3493803189/tls.crt::/tmp/serving-cert-3493803189/tls.key\\\\\\\"\\\\nF1010 16:31:52.700035 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-10T16:31:50Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://10d76c5ba8c54896d2fde57e2806c48857363c495a9f2d9b3f6904334cf2f9be\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"file observer\\\\nW1010 16:32:08.895315 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1010 16:32:08.895450 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1010 16:32:08.898309 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-712278876/tls.crt::/tmp/serving-cert-712278876/tls.key\\\\\\\"\\\\nI1010 16:32:09.168043 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1010 16:32:09.171891 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1010 16:32:09.171914 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1010 16:32:09.171936 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1010 16:32:09.171942 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1010 16:32:09.176341 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1010 16:32:09.176406 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1010 16:32:09.176435 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1010 16:32:09.176460 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1010 16:32:09.176486 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1010 16:32:09.176510 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1010 16:32:09.176533 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1010 16:32:09.176376 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1010 16:32:09.178269 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-10T16:31:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://75fb276a1b4f555aa58d4a862a6f3841984f75958b7ada362d717eca726c41fc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:50Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://78cbeb4c6d2770cabbc752b11e5a62f64ec7820bc3a637a944fa252d779e242b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://78cbeb4c6d2770cabbc752b11e5a62f64ec7820bc3a637a944fa252d779e242b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:31:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:31:47Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 10 16:32:10 crc kubenswrapper[4799]: I1010 16:32:10.600936 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-gg5hb" event={"ID":"f000ac73-b5de-47c8-a0a7-84bd06475f62","Type":"ContainerStarted","Data":"b64f89fec4fec12dd0dab3f95ca2c8a01e43d4ef7cc69a4d012195756f6922ca"} Oct 10 16:32:10 crc kubenswrapper[4799]: I1010 16:32:10.600997 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-gg5hb" event={"ID":"f000ac73-b5de-47c8-a0a7-84bd06475f62","Type":"ContainerStarted","Data":"42d00cbbe4dcc5597327afd398c63cb0361aee2706ae39737ef1b9c2316292ad"} Oct 10 16:32:10 crc kubenswrapper[4799]: I1010 16:32:10.610205 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:10 crc kubenswrapper[4799]: I1010 16:32:10.610244 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:10 crc kubenswrapper[4799]: I1010 16:32:10.610256 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:10 crc kubenswrapper[4799]: I1010 16:32:10.610273 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:10 crc kubenswrapper[4799]: I1010 16:32:10.610284 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:10Z","lastTransitionTime":"2025-10-10T16:32:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:10 crc kubenswrapper[4799]: I1010 16:32:10.615992 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 10 16:32:10 crc kubenswrapper[4799]: I1010 16:32:10.626274 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 10 16:32:10 crc kubenswrapper[4799]: I1010 16:32:10.643925 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6cebefda-e31d-4be2-9bf4-8e1f8ec002cb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hfkr4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hfkr4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:09Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-rh8zc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 10 16:32:10 crc kubenswrapper[4799]: I1010 16:32:10.662545 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mcwfc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"abe7f2d9-ec99-4724-a01f-cc7096377e07\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:09Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-mcwfc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 10 16:32:10 crc kubenswrapper[4799]: I1010 16:32:10.675627 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 10 16:32:10 crc kubenswrapper[4799]: I1010 16:32:10.712108 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:10 crc kubenswrapper[4799]: I1010 16:32:10.712136 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:10 crc kubenswrapper[4799]: I1010 16:32:10.712144 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:10 crc kubenswrapper[4799]: I1010 16:32:10.712209 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:10 crc kubenswrapper[4799]: I1010 16:32:10.712218 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:10Z","lastTransitionTime":"2025-10-10T16:32:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:10 crc kubenswrapper[4799]: I1010 16:32:10.717804 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 10 16:32:10 crc kubenswrapper[4799]: I1010 16:32:10.749735 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gg5hb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f000ac73-b5de-47c8-a0a7-84bd06475f62\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w9g7t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:09Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gg5hb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 10 16:32:10 crc kubenswrapper[4799]: I1010 16:32:10.787096 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-bsdk2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"823e91d3-003d-4cbb-bc72-004e1708c19d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-chgmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:09Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-bsdk2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 10 16:32:10 crc kubenswrapper[4799]: I1010 16:32:10.821399 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:10 crc kubenswrapper[4799]: I1010 16:32:10.821448 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:10 crc kubenswrapper[4799]: I1010 16:32:10.821467 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:10 crc kubenswrapper[4799]: I1010 16:32:10.821491 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:10 crc kubenswrapper[4799]: I1010 16:32:10.821509 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:10Z","lastTransitionTime":"2025-10-10T16:32:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:10 crc kubenswrapper[4799]: I1010 16:32:10.831437 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 10 16:32:10 crc kubenswrapper[4799]: I1010 16:32:10.878689 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 10 16:32:10 crc kubenswrapper[4799]: I1010 16:32:10.921279 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-nptcz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"96840de9-4451-4499-81fa-a19c62239007\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d8b7b4526cfbe5d29a5b00c5d82089820b93e5aedbdaace85c4a252fed1b9f53\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:09Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-nptcz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 10 16:32:10 crc kubenswrapper[4799]: I1010 16:32:10.924252 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:10 crc kubenswrapper[4799]: I1010 16:32:10.924280 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:10 crc kubenswrapper[4799]: I1010 16:32:10.924295 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:10 crc kubenswrapper[4799]: I1010 16:32:10.924314 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:10 crc kubenswrapper[4799]: I1010 16:32:10.924328 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:10Z","lastTransitionTime":"2025-10-10T16:32:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:10 crc kubenswrapper[4799]: I1010 16:32:10.928830 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 10 16:32:10 crc kubenswrapper[4799]: E1010 16:32:10.929196 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-10 16:32:12.929166563 +0000 UTC m=+26.437490718 (durationBeforeRetry 2s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 10 16:32:10 crc kubenswrapper[4799]: I1010 16:32:10.949252 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 10 16:32:10 crc kubenswrapper[4799]: I1010 16:32:10.994167 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4e024486dad9853cf7debbd2264eca725e50e74ebd215e1e55595d5f8b7c0403\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3396ed6bea22d063192c09283426aa98e84d5cab5852e305d61f3d583801187\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 10 16:32:11 crc kubenswrapper[4799]: I1010 16:32:11.026991 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:11 crc kubenswrapper[4799]: I1010 16:32:11.027078 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:11 crc kubenswrapper[4799]: I1010 16:32:11.027152 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:11 crc kubenswrapper[4799]: I1010 16:32:11.027226 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:11 crc kubenswrapper[4799]: I1010 16:32:11.027250 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:11Z","lastTransitionTime":"2025-10-10T16:32:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:11 crc kubenswrapper[4799]: I1010 16:32:11.030371 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 10 16:32:11 crc kubenswrapper[4799]: I1010 16:32:11.030419 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 10 16:32:11 crc kubenswrapper[4799]: I1010 16:32:11.030439 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 10 16:32:11 crc kubenswrapper[4799]: I1010 16:32:11.030460 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 10 16:32:11 crc kubenswrapper[4799]: E1010 16:32:11.030546 4799 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 10 16:32:11 crc kubenswrapper[4799]: E1010 16:32:11.030579 4799 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 10 16:32:11 crc kubenswrapper[4799]: E1010 16:32:11.030597 4799 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 10 16:32:11 crc kubenswrapper[4799]: E1010 16:32:11.030607 4799 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 10 16:32:11 crc kubenswrapper[4799]: E1010 16:32:11.030609 4799 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Oct 10 16:32:11 crc kubenswrapper[4799]: E1010 16:32:11.030649 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-10 16:32:13.030625619 +0000 UTC m=+26.538949754 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 10 16:32:11 crc kubenswrapper[4799]: E1010 16:32:11.030662 4799 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 10 16:32:11 crc kubenswrapper[4799]: E1010 16:32:11.030680 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-10-10 16:32:13.03066823 +0000 UTC m=+26.538992355 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 10 16:32:11 crc kubenswrapper[4799]: E1010 16:32:11.030688 4799 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 10 16:32:11 crc kubenswrapper[4799]: E1010 16:32:11.030700 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-10 16:32:13.03069002 +0000 UTC m=+26.539014285 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Oct 10 16:32:11 crc kubenswrapper[4799]: E1010 16:32:11.030706 4799 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 10 16:32:11 crc kubenswrapper[4799]: E1010 16:32:11.030789 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-10-10 16:32:13.030745331 +0000 UTC m=+26.539069506 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 10 16:32:11 crc kubenswrapper[4799]: I1010 16:32:11.032685 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-nptcz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"96840de9-4451-4499-81fa-a19c62239007\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d8b7b4526cfbe5d29a5b00c5d82089820b93e5aedbdaace85c4a252fed1b9f53\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:09Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-nptcz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 10 16:32:11 crc kubenswrapper[4799]: I1010 16:32:11.074116 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b9c46c5f-a6db-4cef-b179-b669484bbc75\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:47Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:47Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://df6b51b97a9e3dcf9102409dc19f67e69e6e28ebec82dd46083922d5606cc4c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ba57bc720123daa414f51bf5d3173c6fa0b519947a34816bebc532948fd74ab\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d98759de1f79d9aeb68eb0b3eb21d78d0116f054b5d846c85bd63774b565e73\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://10d76c5ba8c54896d2fde57e2806c48857363c495a9f2d9b3f6904334cf2f9be\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://10d76c5ba8c54896d2fde57e2806c48857363c495a9f2d9b3f6904334cf2f9be\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"file observer\\\\nW1010 16:32:08.895315 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1010 16:32:08.895450 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1010 16:32:08.898309 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-712278876/tls.crt::/tmp/serving-cert-712278876/tls.key\\\\\\\"\\\\nI1010 16:32:09.168043 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1010 16:32:09.171891 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1010 16:32:09.171914 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1010 16:32:09.171936 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1010 16:32:09.171942 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1010 16:32:09.176341 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1010 16:32:09.176406 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1010 16:32:09.176435 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1010 16:32:09.176460 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1010 16:32:09.176486 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1010 16:32:09.176510 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1010 16:32:09.176533 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1010 16:32:09.176376 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1010 16:32:09.178269 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-10T16:31:53Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://75fb276a1b4f555aa58d4a862a6f3841984f75958b7ada362d717eca726c41fc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:50Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://78cbeb4c6d2770cabbc752b11e5a62f64ec7820bc3a637a944fa252d779e242b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://78cbeb4c6d2770cabbc752b11e5a62f64ec7820bc3a637a944fa252d779e242b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:31:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:31:47Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 10 16:32:11 crc kubenswrapper[4799]: I1010 16:32:11.109605 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 10 16:32:11 crc kubenswrapper[4799]: I1010 16:32:11.114492 4799 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 10 16:32:11 crc kubenswrapper[4799]: I1010 16:32:11.130427 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:11 crc kubenswrapper[4799]: I1010 16:32:11.130468 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:11 crc kubenswrapper[4799]: I1010 16:32:11.130480 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:11 crc kubenswrapper[4799]: I1010 16:32:11.130517 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:11 crc kubenswrapper[4799]: I1010 16:32:11.130529 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:11Z","lastTransitionTime":"2025-10-10T16:32:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:11 crc kubenswrapper[4799]: I1010 16:32:11.152276 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 10 16:32:11 crc kubenswrapper[4799]: I1010 16:32:11.187973 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6cebefda-e31d-4be2-9bf4-8e1f8ec002cb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6536b37f839c0b3f6b55d82b3a1674eeccb07ec93e2cb0a3739705b82df4782c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hfkr4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ad00545d7a2fff370e19a55a89365b8c9914cb6286dbf1892d7ad0f399288a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hfkr4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:09Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-rh8zc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 10 16:32:11 crc kubenswrapper[4799]: I1010 16:32:11.232376 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:11 crc kubenswrapper[4799]: I1010 16:32:11.232409 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:11 crc kubenswrapper[4799]: I1010 16:32:11.232418 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:11 crc kubenswrapper[4799]: I1010 16:32:11.232433 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:11 crc kubenswrapper[4799]: I1010 16:32:11.232441 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:11Z","lastTransitionTime":"2025-10-10T16:32:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:11 crc kubenswrapper[4799]: I1010 16:32:11.237820 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mcwfc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"abe7f2d9-ec99-4724-a01f-cc7096377e07\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2ce36def99eaf908452410a523cd14eb31a5a4dc3ee38d5983ea95d5ee75f83\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d2ce36def99eaf908452410a523cd14eb31a5a4dc3ee38d5983ea95d5ee75f83\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:09Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-mcwfc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 10 16:32:11 crc kubenswrapper[4799]: I1010 16:32:11.273079 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5a953803d264b43ecd9f8b8c871b034d8146e73a4974bb8f503d0ca626370616\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:11Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:11 crc kubenswrapper[4799]: I1010 16:32:11.311214 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:11Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:11 crc kubenswrapper[4799]: I1010 16:32:11.333855 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:11 crc kubenswrapper[4799]: I1010 16:32:11.333894 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:11 crc kubenswrapper[4799]: I1010 16:32:11.333910 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:11 crc kubenswrapper[4799]: I1010 16:32:11.333929 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:11 crc kubenswrapper[4799]: I1010 16:32:11.333939 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:11Z","lastTransitionTime":"2025-10-10T16:32:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:11 crc kubenswrapper[4799]: I1010 16:32:11.353201 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gg5hb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f000ac73-b5de-47c8-a0a7-84bd06475f62\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b64f89fec4fec12dd0dab3f95ca2c8a01e43d4ef7cc69a4d012195756f6922ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w9g7t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:09Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gg5hb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:11Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:11 crc kubenswrapper[4799]: I1010 16:32:11.389959 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-bsdk2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"823e91d3-003d-4cbb-bc72-004e1708c19d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec832bfc0c81b98afb4117033b94d2951b042b248148a5f957f3507174b8dbb6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-chgmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:09Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-bsdk2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:11Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:11 crc kubenswrapper[4799]: I1010 16:32:11.401409 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 10 16:32:11 crc kubenswrapper[4799]: E1010 16:32:11.401518 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 10 16:32:11 crc kubenswrapper[4799]: I1010 16:32:11.408934 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="01ab3dd5-8196-46d0-ad33-122e2ca51def" path="/var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes" Oct 10 16:32:11 crc kubenswrapper[4799]: I1010 16:32:11.410741 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" path="/var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes" Oct 10 16:32:11 crc kubenswrapper[4799]: I1010 16:32:11.411729 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09efc573-dbb6-4249-bd59-9b87aba8dd28" path="/var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes" Oct 10 16:32:11 crc kubenswrapper[4799]: I1010 16:32:11.413232 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b78653f-4ff9-4508-8672-245ed9b561e3" path="/var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes" Oct 10 16:32:11 crc kubenswrapper[4799]: I1010 16:32:11.413725 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5225d0e4-402f-4861-b410-819f433b1803" path="/var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes" Oct 10 16:32:11 crc kubenswrapper[4799]: I1010 16:32:11.436076 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:11 crc kubenswrapper[4799]: I1010 16:32:11.436101 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:11 crc kubenswrapper[4799]: I1010 16:32:11.436110 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:11 crc kubenswrapper[4799]: I1010 16:32:11.436123 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:11 crc kubenswrapper[4799]: I1010 16:32:11.436133 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:11Z","lastTransitionTime":"2025-10-10T16:32:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:11 crc kubenswrapper[4799]: I1010 16:32:11.438388 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-etcd/etcd-crc" Oct 10 16:32:11 crc kubenswrapper[4799]: I1010 16:32:11.450477 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-etcd/etcd-crc" Oct 10 16:32:11 crc kubenswrapper[4799]: I1010 16:32:11.453312 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd/etcd-crc"] Oct 10 16:32:11 crc kubenswrapper[4799]: I1010 16:32:11.455099 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5a953803d264b43ecd9f8b8c871b034d8146e73a4974bb8f503d0ca626370616\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:11Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:11 crc kubenswrapper[4799]: I1010 16:32:11.491030 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:11Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:11 crc kubenswrapper[4799]: I1010 16:32:11.532300 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gg5hb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f000ac73-b5de-47c8-a0a7-84bd06475f62\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b64f89fec4fec12dd0dab3f95ca2c8a01e43d4ef7cc69a4d012195756f6922ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w9g7t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:09Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gg5hb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:11Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:11 crc kubenswrapper[4799]: I1010 16:32:11.537903 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:11 crc kubenswrapper[4799]: I1010 16:32:11.537930 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:11 crc kubenswrapper[4799]: I1010 16:32:11.537938 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:11 crc kubenswrapper[4799]: I1010 16:32:11.537967 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:11 crc kubenswrapper[4799]: I1010 16:32:11.537977 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:11Z","lastTransitionTime":"2025-10-10T16:32:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:11 crc kubenswrapper[4799]: I1010 16:32:11.568630 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-bsdk2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"823e91d3-003d-4cbb-bc72-004e1708c19d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec832bfc0c81b98afb4117033b94d2951b042b248148a5f957f3507174b8dbb6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-chgmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:09Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-bsdk2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:11Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:11 crc kubenswrapper[4799]: I1010 16:32:11.606051 4799 generic.go:334] "Generic (PLEG): container finished" podID="96840de9-4451-4499-81fa-a19c62239007" containerID="d8b7b4526cfbe5d29a5b00c5d82089820b93e5aedbdaace85c4a252fed1b9f53" exitCode=0 Oct 10 16:32:11 crc kubenswrapper[4799]: I1010 16:32:11.606135 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-nptcz" event={"ID":"96840de9-4451-4499-81fa-a19c62239007","Type":"ContainerDied","Data":"d8b7b4526cfbe5d29a5b00c5d82089820b93e5aedbdaace85c4a252fed1b9f53"} Oct 10 16:32:11 crc kubenswrapper[4799]: I1010 16:32:11.609509 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mcwfc" event={"ID":"abe7f2d9-ec99-4724-a01f-cc7096377e07","Type":"ContainerStarted","Data":"cfe05183ad0b03415525e6aa2a8d52a5d63b8c273113c46326396df5e0c2bb12"} Oct 10 16:32:11 crc kubenswrapper[4799]: I1010 16:32:11.609545 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mcwfc" event={"ID":"abe7f2d9-ec99-4724-a01f-cc7096377e07","Type":"ContainerStarted","Data":"cd261112ca7db4d0d76f6ab29a0347d64dccfff4db42ac9f55d6d7df1443ab23"} Oct 10 16:32:11 crc kubenswrapper[4799]: I1010 16:32:11.609560 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mcwfc" event={"ID":"abe7f2d9-ec99-4724-a01f-cc7096377e07","Type":"ContainerStarted","Data":"8cbc87c392646ebf9c016f8c7b40bcec30e33a0a05ea4a896d1143c5f1086990"} Oct 10 16:32:11 crc kubenswrapper[4799]: I1010 16:32:11.609574 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mcwfc" event={"ID":"abe7f2d9-ec99-4724-a01f-cc7096377e07","Type":"ContainerStarted","Data":"6562d440ce1f1477fd09c15c34ab88e17e1fb2c2cae4b32a7bf8cbdd29f4d5a3"} Oct 10 16:32:11 crc kubenswrapper[4799]: I1010 16:32:11.609590 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mcwfc" event={"ID":"abe7f2d9-ec99-4724-a01f-cc7096377e07","Type":"ContainerStarted","Data":"ff4fcf53aeed6c07f775152de0faa9fa0671848df06d37cbca6ec7097d0024d5"} Oct 10 16:32:11 crc kubenswrapper[4799]: I1010 16:32:11.610290 4799 scope.go:117] "RemoveContainer" containerID="10d76c5ba8c54896d2fde57e2806c48857363c495a9f2d9b3f6904334cf2f9be" Oct 10 16:32:11 crc kubenswrapper[4799]: E1010 16:32:11.610485 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-apiserver-check-endpoints\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\"" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" Oct 10 16:32:11 crc kubenswrapper[4799]: I1010 16:32:11.614742 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:11Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:11 crc kubenswrapper[4799]: I1010 16:32:11.639804 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:11 crc kubenswrapper[4799]: I1010 16:32:11.639833 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:11 crc kubenswrapper[4799]: I1010 16:32:11.639854 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:11 crc kubenswrapper[4799]: I1010 16:32:11.639870 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:11 crc kubenswrapper[4799]: I1010 16:32:11.639879 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:11Z","lastTransitionTime":"2025-10-10T16:32:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:11 crc kubenswrapper[4799]: I1010 16:32:11.663951 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4e024486dad9853cf7debbd2264eca725e50e74ebd215e1e55595d5f8b7c0403\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3396ed6bea22d063192c09283426aa98e84d5cab5852e305d61f3d583801187\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:11Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:11 crc kubenswrapper[4799]: I1010 16:32:11.695791 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-nptcz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"96840de9-4451-4499-81fa-a19c62239007\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d8b7b4526cfbe5d29a5b00c5d82089820b93e5aedbdaace85c4a252fed1b9f53\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:09Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-nptcz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:11Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:11 crc kubenswrapper[4799]: I1010 16:32:11.736290 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b9c46c5f-a6db-4cef-b179-b669484bbc75\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:47Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:47Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://df6b51b97a9e3dcf9102409dc19f67e69e6e28ebec82dd46083922d5606cc4c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ba57bc720123daa414f51bf5d3173c6fa0b519947a34816bebc532948fd74ab\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d98759de1f79d9aeb68eb0b3eb21d78d0116f054b5d846c85bd63774b565e73\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://10d76c5ba8c54896d2fde57e2806c48857363c495a9f2d9b3f6904334cf2f9be\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://10d76c5ba8c54896d2fde57e2806c48857363c495a9f2d9b3f6904334cf2f9be\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"file observer\\\\nW1010 16:32:08.895315 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1010 16:32:08.895450 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1010 16:32:08.898309 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-712278876/tls.crt::/tmp/serving-cert-712278876/tls.key\\\\\\\"\\\\nI1010 16:32:09.168043 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1010 16:32:09.171891 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1010 16:32:09.171914 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1010 16:32:09.171936 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1010 16:32:09.171942 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1010 16:32:09.176341 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1010 16:32:09.176406 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1010 16:32:09.176435 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1010 16:32:09.176460 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1010 16:32:09.176486 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1010 16:32:09.176510 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1010 16:32:09.176533 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1010 16:32:09.176376 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1010 16:32:09.178269 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-10T16:31:53Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://75fb276a1b4f555aa58d4a862a6f3841984f75958b7ada362d717eca726c41fc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:50Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://78cbeb4c6d2770cabbc752b11e5a62f64ec7820bc3a637a944fa252d779e242b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://78cbeb4c6d2770cabbc752b11e5a62f64ec7820bc3a637a944fa252d779e242b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:31:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:31:47Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:11Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:11 crc kubenswrapper[4799]: I1010 16:32:11.746234 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:11 crc kubenswrapper[4799]: I1010 16:32:11.747228 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:11 crc kubenswrapper[4799]: I1010 16:32:11.747266 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:11 crc kubenswrapper[4799]: I1010 16:32:11.747290 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:11 crc kubenswrapper[4799]: I1010 16:32:11.747302 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:11Z","lastTransitionTime":"2025-10-10T16:32:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:11 crc kubenswrapper[4799]: I1010 16:32:11.771995 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:11Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:11 crc kubenswrapper[4799]: I1010 16:32:11.811811 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:11Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:11 crc kubenswrapper[4799]: I1010 16:32:11.850071 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:11 crc kubenswrapper[4799]: I1010 16:32:11.850098 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:11 crc kubenswrapper[4799]: I1010 16:32:11.850120 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:11 crc kubenswrapper[4799]: I1010 16:32:11.850135 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:11 crc kubenswrapper[4799]: I1010 16:32:11.850144 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:11Z","lastTransitionTime":"2025-10-10T16:32:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:11 crc kubenswrapper[4799]: I1010 16:32:11.852319 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6cebefda-e31d-4be2-9bf4-8e1f8ec002cb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6536b37f839c0b3f6b55d82b3a1674eeccb07ec93e2cb0a3739705b82df4782c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hfkr4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ad00545d7a2fff370e19a55a89365b8c9914cb6286dbf1892d7ad0f399288a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hfkr4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:09Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-rh8zc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:11Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:11 crc kubenswrapper[4799]: I1010 16:32:11.945988 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mcwfc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"abe7f2d9-ec99-4724-a01f-cc7096377e07\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2ce36def99eaf908452410a523cd14eb31a5a4dc3ee38d5983ea95d5ee75f83\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d2ce36def99eaf908452410a523cd14eb31a5a4dc3ee38d5983ea95d5ee75f83\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:09Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-mcwfc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:11Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:11 crc kubenswrapper[4799]: I1010 16:32:11.952642 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:11 crc kubenswrapper[4799]: I1010 16:32:11.952678 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:11 crc kubenswrapper[4799]: I1010 16:32:11.952689 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:11 crc kubenswrapper[4799]: I1010 16:32:11.952705 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:11 crc kubenswrapper[4799]: I1010 16:32:11.952716 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:11Z","lastTransitionTime":"2025-10-10T16:32:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:11 crc kubenswrapper[4799]: I1010 16:32:11.959581 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gg5hb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f000ac73-b5de-47c8-a0a7-84bd06475f62\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b64f89fec4fec12dd0dab3f95ca2c8a01e43d4ef7cc69a4d012195756f6922ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w9g7t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:09Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gg5hb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:11Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:11 crc kubenswrapper[4799]: I1010 16:32:11.974513 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5a953803d264b43ecd9f8b8c871b034d8146e73a4974bb8f503d0ca626370616\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:11Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:12 crc kubenswrapper[4799]: I1010 16:32:12.011598 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:12Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:12 crc kubenswrapper[4799]: I1010 16:32:12.055023 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:12 crc kubenswrapper[4799]: I1010 16:32:12.055068 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:12 crc kubenswrapper[4799]: I1010 16:32:12.055081 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:12 crc kubenswrapper[4799]: I1010 16:32:12.055101 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:12 crc kubenswrapper[4799]: I1010 16:32:12.055114 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:12Z","lastTransitionTime":"2025-10-10T16:32:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:12 crc kubenswrapper[4799]: I1010 16:32:12.088176 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"60ae49f7-6d6a-4a62-909f-7aea2b3953f5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c0a72be5ffe48f726e63ca3854fcabf6ad7c26f2c3fe432328142da2dc2ceeb5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b42b1b86cbd6dacb03b9afc740a33f67674996a9c5a5b291b71708ae53ccfea8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://de5b84380f9fb8448cebe90775342fd17260ffb8c591bbd5156f8a216b80f1da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b1dad40a84c7f22ffb5d52c708c7e2e03a181c5778793050495c8333ae005731\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://79f6778c5b703b2b4fc4e59fffc00824fcab6c8f5e2789661665e635a3539195\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a48bce1f3530d2a78258c6fa2af4f1530890f7967a26c9e91ca2f20f56cdbe6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2a48bce1f3530d2a78258c6fa2af4f1530890f7967a26c9e91ca2f20f56cdbe6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:31:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://65fb2cd5fa9b5ff0cad85267e4a036c37593a749da171dc2e5e30ba5159ed96d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://65fb2cd5fa9b5ff0cad85267e4a036c37593a749da171dc2e5e30ba5159ed96d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:31:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://51ea61becc8c45e5bcb2a2374d503cef3fb940b1618e7501cd05d61fc2a9458f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://51ea61becc8c45e5bcb2a2374d503cef3fb940b1618e7501cd05d61fc2a9458f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:31:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:31:47Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:12Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:12 crc kubenswrapper[4799]: I1010 16:32:12.105330 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-bsdk2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"823e91d3-003d-4cbb-bc72-004e1708c19d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec832bfc0c81b98afb4117033b94d2951b042b248148a5f957f3507174b8dbb6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-chgmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:09Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-bsdk2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:12Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:12 crc kubenswrapper[4799]: I1010 16:32:12.133671 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-nptcz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"96840de9-4451-4499-81fa-a19c62239007\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d8b7b4526cfbe5d29a5b00c5d82089820b93e5aedbdaace85c4a252fed1b9f53\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d8b7b4526cfbe5d29a5b00c5d82089820b93e5aedbdaace85c4a252fed1b9f53\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:32:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:09Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-nptcz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:12Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:12 crc kubenswrapper[4799]: I1010 16:32:12.156767 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:12 crc kubenswrapper[4799]: I1010 16:32:12.156800 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:12 crc kubenswrapper[4799]: I1010 16:32:12.156808 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:12 crc kubenswrapper[4799]: I1010 16:32:12.156823 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:12 crc kubenswrapper[4799]: I1010 16:32:12.156832 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:12Z","lastTransitionTime":"2025-10-10T16:32:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:12 crc kubenswrapper[4799]: I1010 16:32:12.172441 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:12Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:12 crc kubenswrapper[4799]: I1010 16:32:12.212309 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4e024486dad9853cf7debbd2264eca725e50e74ebd215e1e55595d5f8b7c0403\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3396ed6bea22d063192c09283426aa98e84d5cab5852e305d61f3d583801187\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:12Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:12 crc kubenswrapper[4799]: I1010 16:32:12.253148 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6cebefda-e31d-4be2-9bf4-8e1f8ec002cb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6536b37f839c0b3f6b55d82b3a1674eeccb07ec93e2cb0a3739705b82df4782c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hfkr4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ad00545d7a2fff370e19a55a89365b8c9914cb6286dbf1892d7ad0f399288a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hfkr4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:09Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-rh8zc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:12Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:12 crc kubenswrapper[4799]: I1010 16:32:12.258811 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:12 crc kubenswrapper[4799]: I1010 16:32:12.258845 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:12 crc kubenswrapper[4799]: I1010 16:32:12.258856 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:12 crc kubenswrapper[4799]: I1010 16:32:12.258869 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:12 crc kubenswrapper[4799]: I1010 16:32:12.258877 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:12Z","lastTransitionTime":"2025-10-10T16:32:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:12 crc kubenswrapper[4799]: I1010 16:32:12.298774 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mcwfc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"abe7f2d9-ec99-4724-a01f-cc7096377e07\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2ce36def99eaf908452410a523cd14eb31a5a4dc3ee38d5983ea95d5ee75f83\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d2ce36def99eaf908452410a523cd14eb31a5a4dc3ee38d5983ea95d5ee75f83\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:09Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-mcwfc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:12Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:12 crc kubenswrapper[4799]: I1010 16:32:12.344685 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b9c46c5f-a6db-4cef-b179-b669484bbc75\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:47Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:47Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://df6b51b97a9e3dcf9102409dc19f67e69e6e28ebec82dd46083922d5606cc4c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ba57bc720123daa414f51bf5d3173c6fa0b519947a34816bebc532948fd74ab\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d98759de1f79d9aeb68eb0b3eb21d78d0116f054b5d846c85bd63774b565e73\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://10d76c5ba8c54896d2fde57e2806c48857363c495a9f2d9b3f6904334cf2f9be\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://10d76c5ba8c54896d2fde57e2806c48857363c495a9f2d9b3f6904334cf2f9be\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"file observer\\\\nW1010 16:32:08.895315 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1010 16:32:08.895450 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1010 16:32:08.898309 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-712278876/tls.crt::/tmp/serving-cert-712278876/tls.key\\\\\\\"\\\\nI1010 16:32:09.168043 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1010 16:32:09.171891 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1010 16:32:09.171914 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1010 16:32:09.171936 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1010 16:32:09.171942 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1010 16:32:09.176341 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1010 16:32:09.176406 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1010 16:32:09.176435 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1010 16:32:09.176460 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1010 16:32:09.176486 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1010 16:32:09.176510 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1010 16:32:09.176533 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1010 16:32:09.176376 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1010 16:32:09.178269 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-10T16:31:53Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://75fb276a1b4f555aa58d4a862a6f3841984f75958b7ada362d717eca726c41fc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:50Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://78cbeb4c6d2770cabbc752b11e5a62f64ec7820bc3a637a944fa252d779e242b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://78cbeb4c6d2770cabbc752b11e5a62f64ec7820bc3a637a944fa252d779e242b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:31:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:31:47Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:12Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:12 crc kubenswrapper[4799]: I1010 16:32:12.360795 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:12 crc kubenswrapper[4799]: I1010 16:32:12.360839 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:12 crc kubenswrapper[4799]: I1010 16:32:12.360852 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:12 crc kubenswrapper[4799]: I1010 16:32:12.360869 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:12 crc kubenswrapper[4799]: I1010 16:32:12.360881 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:12Z","lastTransitionTime":"2025-10-10T16:32:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:12 crc kubenswrapper[4799]: I1010 16:32:12.381398 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:12Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:12 crc kubenswrapper[4799]: I1010 16:32:12.401937 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 10 16:32:12 crc kubenswrapper[4799]: I1010 16:32:12.401977 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 10 16:32:12 crc kubenswrapper[4799]: E1010 16:32:12.402061 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 10 16:32:12 crc kubenswrapper[4799]: E1010 16:32:12.402181 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 10 16:32:12 crc kubenswrapper[4799]: I1010 16:32:12.414987 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:12Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:12 crc kubenswrapper[4799]: I1010 16:32:12.463374 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:12 crc kubenswrapper[4799]: I1010 16:32:12.463411 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:12 crc kubenswrapper[4799]: I1010 16:32:12.463419 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:12 crc kubenswrapper[4799]: I1010 16:32:12.463435 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:12 crc kubenswrapper[4799]: I1010 16:32:12.463444 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:12Z","lastTransitionTime":"2025-10-10T16:32:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:12 crc kubenswrapper[4799]: I1010 16:32:12.565594 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:12 crc kubenswrapper[4799]: I1010 16:32:12.566012 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:12 crc kubenswrapper[4799]: I1010 16:32:12.566056 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:12 crc kubenswrapper[4799]: I1010 16:32:12.566079 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:12 crc kubenswrapper[4799]: I1010 16:32:12.566093 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:12Z","lastTransitionTime":"2025-10-10T16:32:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:12 crc kubenswrapper[4799]: I1010 16:32:12.614490 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"2128a751508cba96a374652d8d80c66c81351fe0d7f800743a1612196fe8ac55"} Oct 10 16:32:12 crc kubenswrapper[4799]: I1010 16:32:12.618580 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mcwfc" event={"ID":"abe7f2d9-ec99-4724-a01f-cc7096377e07","Type":"ContainerStarted","Data":"c7d0e536ad5143941dd18418b1ac7972a1136a841542b950f6891a386d43ca9c"} Oct 10 16:32:12 crc kubenswrapper[4799]: I1010 16:32:12.620534 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-nptcz" event={"ID":"96840de9-4451-4499-81fa-a19c62239007","Type":"ContainerStarted","Data":"0477eb514aef21fcec151973d9b6cf683ced19e9029787b97906438cb94b9f66"} Oct 10 16:32:12 crc kubenswrapper[4799]: I1010 16:32:12.621108 4799 scope.go:117] "RemoveContainer" containerID="10d76c5ba8c54896d2fde57e2806c48857363c495a9f2d9b3f6904334cf2f9be" Oct 10 16:32:12 crc kubenswrapper[4799]: E1010 16:32:12.621257 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-apiserver-check-endpoints\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\"" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" Oct 10 16:32:12 crc kubenswrapper[4799]: I1010 16:32:12.645313 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"60ae49f7-6d6a-4a62-909f-7aea2b3953f5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c0a72be5ffe48f726e63ca3854fcabf6ad7c26f2c3fe432328142da2dc2ceeb5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b42b1b86cbd6dacb03b9afc740a33f67674996a9c5a5b291b71708ae53ccfea8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://de5b84380f9fb8448cebe90775342fd17260ffb8c591bbd5156f8a216b80f1da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b1dad40a84c7f22ffb5d52c708c7e2e03a181c5778793050495c8333ae005731\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://79f6778c5b703b2b4fc4e59fffc00824fcab6c8f5e2789661665e635a3539195\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a48bce1f3530d2a78258c6fa2af4f1530890f7967a26c9e91ca2f20f56cdbe6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2a48bce1f3530d2a78258c6fa2af4f1530890f7967a26c9e91ca2f20f56cdbe6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:31:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://65fb2cd5fa9b5ff0cad85267e4a036c37593a749da171dc2e5e30ba5159ed96d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://65fb2cd5fa9b5ff0cad85267e4a036c37593a749da171dc2e5e30ba5159ed96d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:31:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://51ea61becc8c45e5bcb2a2374d503cef3fb940b1618e7501cd05d61fc2a9458f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://51ea61becc8c45e5bcb2a2374d503cef3fb940b1618e7501cd05d61fc2a9458f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:31:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:31:47Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:12Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:12 crc kubenswrapper[4799]: I1010 16:32:12.655673 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-bsdk2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"823e91d3-003d-4cbb-bc72-004e1708c19d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec832bfc0c81b98afb4117033b94d2951b042b248148a5f957f3507174b8dbb6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-chgmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:09Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-bsdk2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:12Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:12 crc kubenswrapper[4799]: I1010 16:32:12.666823 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2128a751508cba96a374652d8d80c66c81351fe0d7f800743a1612196fe8ac55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:12Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:12 crc kubenswrapper[4799]: I1010 16:32:12.668580 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:12 crc kubenswrapper[4799]: I1010 16:32:12.668879 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:12 crc kubenswrapper[4799]: I1010 16:32:12.669090 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:12 crc kubenswrapper[4799]: I1010 16:32:12.669314 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:12 crc kubenswrapper[4799]: I1010 16:32:12.669498 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:12Z","lastTransitionTime":"2025-10-10T16:32:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:12 crc kubenswrapper[4799]: I1010 16:32:12.678792 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4e024486dad9853cf7debbd2264eca725e50e74ebd215e1e55595d5f8b7c0403\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3396ed6bea22d063192c09283426aa98e84d5cab5852e305d61f3d583801187\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:12Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:12 crc kubenswrapper[4799]: I1010 16:32:12.696487 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-nptcz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"96840de9-4451-4499-81fa-a19c62239007\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d8b7b4526cfbe5d29a5b00c5d82089820b93e5aedbdaace85c4a252fed1b9f53\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d8b7b4526cfbe5d29a5b00c5d82089820b93e5aedbdaace85c4a252fed1b9f53\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:32:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:09Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-nptcz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:12Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:12 crc kubenswrapper[4799]: I1010 16:32:12.712463 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b9c46c5f-a6db-4cef-b179-b669484bbc75\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:47Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:47Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://df6b51b97a9e3dcf9102409dc19f67e69e6e28ebec82dd46083922d5606cc4c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ba57bc720123daa414f51bf5d3173c6fa0b519947a34816bebc532948fd74ab\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d98759de1f79d9aeb68eb0b3eb21d78d0116f054b5d846c85bd63774b565e73\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://10d76c5ba8c54896d2fde57e2806c48857363c495a9f2d9b3f6904334cf2f9be\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://10d76c5ba8c54896d2fde57e2806c48857363c495a9f2d9b3f6904334cf2f9be\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"file observer\\\\nW1010 16:32:08.895315 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1010 16:32:08.895450 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1010 16:32:08.898309 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-712278876/tls.crt::/tmp/serving-cert-712278876/tls.key\\\\\\\"\\\\nI1010 16:32:09.168043 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1010 16:32:09.171891 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1010 16:32:09.171914 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1010 16:32:09.171936 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1010 16:32:09.171942 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1010 16:32:09.176341 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1010 16:32:09.176406 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1010 16:32:09.176435 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1010 16:32:09.176460 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1010 16:32:09.176486 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1010 16:32:09.176510 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1010 16:32:09.176533 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1010 16:32:09.176376 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1010 16:32:09.178269 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-10T16:31:53Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://75fb276a1b4f555aa58d4a862a6f3841984f75958b7ada362d717eca726c41fc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:50Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://78cbeb4c6d2770cabbc752b11e5a62f64ec7820bc3a637a944fa252d779e242b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://78cbeb4c6d2770cabbc752b11e5a62f64ec7820bc3a637a944fa252d779e242b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:31:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:31:47Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:12Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:12 crc kubenswrapper[4799]: I1010 16:32:12.727301 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:12Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:12 crc kubenswrapper[4799]: I1010 16:32:12.749634 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:12Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:12 crc kubenswrapper[4799]: I1010 16:32:12.772289 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:12 crc kubenswrapper[4799]: I1010 16:32:12.772339 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:12 crc kubenswrapper[4799]: I1010 16:32:12.772353 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:12 crc kubenswrapper[4799]: I1010 16:32:12.772378 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:12 crc kubenswrapper[4799]: I1010 16:32:12.772392 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:12Z","lastTransitionTime":"2025-10-10T16:32:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:12 crc kubenswrapper[4799]: I1010 16:32:12.772521 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6cebefda-e31d-4be2-9bf4-8e1f8ec002cb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6536b37f839c0b3f6b55d82b3a1674eeccb07ec93e2cb0a3739705b82df4782c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hfkr4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ad00545d7a2fff370e19a55a89365b8c9914cb6286dbf1892d7ad0f399288a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hfkr4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:09Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-rh8zc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:12Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:12 crc kubenswrapper[4799]: I1010 16:32:12.818333 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mcwfc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"abe7f2d9-ec99-4724-a01f-cc7096377e07\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2ce36def99eaf908452410a523cd14eb31a5a4dc3ee38d5983ea95d5ee75f83\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d2ce36def99eaf908452410a523cd14eb31a5a4dc3ee38d5983ea95d5ee75f83\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:09Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-mcwfc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:12Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:12 crc kubenswrapper[4799]: I1010 16:32:12.856576 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5a953803d264b43ecd9f8b8c871b034d8146e73a4974bb8f503d0ca626370616\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:12Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:12 crc kubenswrapper[4799]: I1010 16:32:12.875025 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:12 crc kubenswrapper[4799]: I1010 16:32:12.875050 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:12 crc kubenswrapper[4799]: I1010 16:32:12.875058 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:12 crc kubenswrapper[4799]: I1010 16:32:12.875072 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:12 crc kubenswrapper[4799]: I1010 16:32:12.875081 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:12Z","lastTransitionTime":"2025-10-10T16:32:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:12 crc kubenswrapper[4799]: I1010 16:32:12.896172 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:12Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:12 crc kubenswrapper[4799]: I1010 16:32:12.930944 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gg5hb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f000ac73-b5de-47c8-a0a7-84bd06475f62\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b64f89fec4fec12dd0dab3f95ca2c8a01e43d4ef7cc69a4d012195756f6922ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w9g7t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:09Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gg5hb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:12Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:12 crc kubenswrapper[4799]: I1010 16:32:12.947059 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 10 16:32:12 crc kubenswrapper[4799]: E1010 16:32:12.947215 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-10 16:32:16.94720135 +0000 UTC m=+30.455525465 (durationBeforeRetry 4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 10 16:32:12 crc kubenswrapper[4799]: I1010 16:32:12.975092 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-nptcz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"96840de9-4451-4499-81fa-a19c62239007\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d8b7b4526cfbe5d29a5b00c5d82089820b93e5aedbdaace85c4a252fed1b9f53\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d8b7b4526cfbe5d29a5b00c5d82089820b93e5aedbdaace85c4a252fed1b9f53\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:32:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0477eb514aef21fcec151973d9b6cf683ced19e9029787b97906438cb94b9f66\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:09Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-nptcz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:12Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:12 crc kubenswrapper[4799]: I1010 16:32:12.976926 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:12 crc kubenswrapper[4799]: I1010 16:32:12.976983 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:12 crc kubenswrapper[4799]: I1010 16:32:12.977002 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:12 crc kubenswrapper[4799]: I1010 16:32:12.977019 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:12 crc kubenswrapper[4799]: I1010 16:32:12.977028 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:12Z","lastTransitionTime":"2025-10-10T16:32:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:13 crc kubenswrapper[4799]: I1010 16:32:13.012852 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2128a751508cba96a374652d8d80c66c81351fe0d7f800743a1612196fe8ac55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:13Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:13 crc kubenswrapper[4799]: I1010 16:32:13.048108 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 10 16:32:13 crc kubenswrapper[4799]: I1010 16:32:13.048155 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 10 16:32:13 crc kubenswrapper[4799]: I1010 16:32:13.048180 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 10 16:32:13 crc kubenswrapper[4799]: I1010 16:32:13.048217 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 10 16:32:13 crc kubenswrapper[4799]: E1010 16:32:13.048330 4799 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Oct 10 16:32:13 crc kubenswrapper[4799]: E1010 16:32:13.048347 4799 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 10 16:32:13 crc kubenswrapper[4799]: E1010 16:32:13.048475 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-10 16:32:17.048458971 +0000 UTC m=+30.556783086 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 10 16:32:13 crc kubenswrapper[4799]: E1010 16:32:13.048347 4799 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 10 16:32:13 crc kubenswrapper[4799]: E1010 16:32:13.048495 4799 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 10 16:32:13 crc kubenswrapper[4799]: E1010 16:32:13.048506 4799 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 10 16:32:13 crc kubenswrapper[4799]: E1010 16:32:13.048532 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-10-10 16:32:17.048523852 +0000 UTC m=+30.556847967 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 10 16:32:13 crc kubenswrapper[4799]: E1010 16:32:13.048365 4799 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 10 16:32:13 crc kubenswrapper[4799]: E1010 16:32:13.048549 4799 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 10 16:32:13 crc kubenswrapper[4799]: E1010 16:32:13.048558 4799 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 10 16:32:13 crc kubenswrapper[4799]: E1010 16:32:13.048582 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-10-10 16:32:17.048575883 +0000 UTC m=+30.556899998 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 10 16:32:13 crc kubenswrapper[4799]: E1010 16:32:13.048604 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-10 16:32:17.048595514 +0000 UTC m=+30.556919629 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Oct 10 16:32:13 crc kubenswrapper[4799]: I1010 16:32:13.058037 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4e024486dad9853cf7debbd2264eca725e50e74ebd215e1e55595d5f8b7c0403\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3396ed6bea22d063192c09283426aa98e84d5cab5852e305d61f3d583801187\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:13Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:13 crc kubenswrapper[4799]: I1010 16:32:13.079356 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:13 crc kubenswrapper[4799]: I1010 16:32:13.079389 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:13 crc kubenswrapper[4799]: I1010 16:32:13.079400 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:13 crc kubenswrapper[4799]: I1010 16:32:13.079416 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:13 crc kubenswrapper[4799]: I1010 16:32:13.079427 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:13Z","lastTransitionTime":"2025-10-10T16:32:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:13 crc kubenswrapper[4799]: I1010 16:32:13.093128 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6cebefda-e31d-4be2-9bf4-8e1f8ec002cb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6536b37f839c0b3f6b55d82b3a1674eeccb07ec93e2cb0a3739705b82df4782c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hfkr4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ad00545d7a2fff370e19a55a89365b8c9914cb6286dbf1892d7ad0f399288a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hfkr4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:09Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-rh8zc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:13Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:13 crc kubenswrapper[4799]: I1010 16:32:13.140160 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mcwfc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"abe7f2d9-ec99-4724-a01f-cc7096377e07\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2ce36def99eaf908452410a523cd14eb31a5a4dc3ee38d5983ea95d5ee75f83\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d2ce36def99eaf908452410a523cd14eb31a5a4dc3ee38d5983ea95d5ee75f83\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:09Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-mcwfc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:13Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:13 crc kubenswrapper[4799]: I1010 16:32:13.176439 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b9c46c5f-a6db-4cef-b179-b669484bbc75\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:47Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:47Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://df6b51b97a9e3dcf9102409dc19f67e69e6e28ebec82dd46083922d5606cc4c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ba57bc720123daa414f51bf5d3173c6fa0b519947a34816bebc532948fd74ab\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d98759de1f79d9aeb68eb0b3eb21d78d0116f054b5d846c85bd63774b565e73\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://10d76c5ba8c54896d2fde57e2806c48857363c495a9f2d9b3f6904334cf2f9be\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://10d76c5ba8c54896d2fde57e2806c48857363c495a9f2d9b3f6904334cf2f9be\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"file observer\\\\nW1010 16:32:08.895315 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1010 16:32:08.895450 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1010 16:32:08.898309 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-712278876/tls.crt::/tmp/serving-cert-712278876/tls.key\\\\\\\"\\\\nI1010 16:32:09.168043 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1010 16:32:09.171891 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1010 16:32:09.171914 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1010 16:32:09.171936 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1010 16:32:09.171942 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1010 16:32:09.176341 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1010 16:32:09.176406 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1010 16:32:09.176435 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1010 16:32:09.176460 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1010 16:32:09.176486 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1010 16:32:09.176510 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1010 16:32:09.176533 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1010 16:32:09.176376 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1010 16:32:09.178269 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-10T16:31:53Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://75fb276a1b4f555aa58d4a862a6f3841984f75958b7ada362d717eca726c41fc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:50Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://78cbeb4c6d2770cabbc752b11e5a62f64ec7820bc3a637a944fa252d779e242b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://78cbeb4c6d2770cabbc752b11e5a62f64ec7820bc3a637a944fa252d779e242b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:31:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:31:47Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:13Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:13 crc kubenswrapper[4799]: I1010 16:32:13.180944 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:13 crc kubenswrapper[4799]: I1010 16:32:13.180964 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:13 crc kubenswrapper[4799]: I1010 16:32:13.180972 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:13 crc kubenswrapper[4799]: I1010 16:32:13.180987 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:13 crc kubenswrapper[4799]: I1010 16:32:13.180996 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:13Z","lastTransitionTime":"2025-10-10T16:32:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:13 crc kubenswrapper[4799]: I1010 16:32:13.213169 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:13Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:13 crc kubenswrapper[4799]: I1010 16:32:13.253382 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:13Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:13 crc kubenswrapper[4799]: I1010 16:32:13.282806 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:13 crc kubenswrapper[4799]: I1010 16:32:13.282863 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:13 crc kubenswrapper[4799]: I1010 16:32:13.282871 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:13 crc kubenswrapper[4799]: I1010 16:32:13.282886 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:13 crc kubenswrapper[4799]: I1010 16:32:13.282898 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:13Z","lastTransitionTime":"2025-10-10T16:32:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:13 crc kubenswrapper[4799]: I1010 16:32:13.305720 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gg5hb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f000ac73-b5de-47c8-a0a7-84bd06475f62\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b64f89fec4fec12dd0dab3f95ca2c8a01e43d4ef7cc69a4d012195756f6922ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w9g7t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:09Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gg5hb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:13Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:13 crc kubenswrapper[4799]: I1010 16:32:13.345702 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5a953803d264b43ecd9f8b8c871b034d8146e73a4974bb8f503d0ca626370616\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:13Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:13 crc kubenswrapper[4799]: I1010 16:32:13.381310 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:13Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:13 crc kubenswrapper[4799]: I1010 16:32:13.385423 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:13 crc kubenswrapper[4799]: I1010 16:32:13.385461 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:13 crc kubenswrapper[4799]: I1010 16:32:13.385472 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:13 crc kubenswrapper[4799]: I1010 16:32:13.385489 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:13 crc kubenswrapper[4799]: I1010 16:32:13.385502 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:13Z","lastTransitionTime":"2025-10-10T16:32:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:13 crc kubenswrapper[4799]: I1010 16:32:13.401522 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 10 16:32:13 crc kubenswrapper[4799]: E1010 16:32:13.401665 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 10 16:32:13 crc kubenswrapper[4799]: I1010 16:32:13.419981 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"60ae49f7-6d6a-4a62-909f-7aea2b3953f5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c0a72be5ffe48f726e63ca3854fcabf6ad7c26f2c3fe432328142da2dc2ceeb5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b42b1b86cbd6dacb03b9afc740a33f67674996a9c5a5b291b71708ae53ccfea8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://de5b84380f9fb8448cebe90775342fd17260ffb8c591bbd5156f8a216b80f1da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b1dad40a84c7f22ffb5d52c708c7e2e03a181c5778793050495c8333ae005731\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://79f6778c5b703b2b4fc4e59fffc00824fcab6c8f5e2789661665e635a3539195\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a48bce1f3530d2a78258c6fa2af4f1530890f7967a26c9e91ca2f20f56cdbe6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2a48bce1f3530d2a78258c6fa2af4f1530890f7967a26c9e91ca2f20f56cdbe6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:31:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://65fb2cd5fa9b5ff0cad85267e4a036c37593a749da171dc2e5e30ba5159ed96d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://65fb2cd5fa9b5ff0cad85267e4a036c37593a749da171dc2e5e30ba5159ed96d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:31:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://51ea61becc8c45e5bcb2a2374d503cef3fb940b1618e7501cd05d61fc2a9458f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://51ea61becc8c45e5bcb2a2374d503cef3fb940b1618e7501cd05d61fc2a9458f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:31:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:31:47Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:13Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:13 crc kubenswrapper[4799]: I1010 16:32:13.451331 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-bsdk2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"823e91d3-003d-4cbb-bc72-004e1708c19d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec832bfc0c81b98afb4117033b94d2951b042b248148a5f957f3507174b8dbb6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-chgmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:09Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-bsdk2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:13Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:13 crc kubenswrapper[4799]: I1010 16:32:13.488149 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:13 crc kubenswrapper[4799]: I1010 16:32:13.488181 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:13 crc kubenswrapper[4799]: I1010 16:32:13.488190 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:13 crc kubenswrapper[4799]: I1010 16:32:13.488204 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:13 crc kubenswrapper[4799]: I1010 16:32:13.488213 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:13Z","lastTransitionTime":"2025-10-10T16:32:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:13 crc kubenswrapper[4799]: I1010 16:32:13.590944 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:13 crc kubenswrapper[4799]: I1010 16:32:13.590995 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:13 crc kubenswrapper[4799]: I1010 16:32:13.591007 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:13 crc kubenswrapper[4799]: I1010 16:32:13.591025 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:13 crc kubenswrapper[4799]: I1010 16:32:13.591038 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:13Z","lastTransitionTime":"2025-10-10T16:32:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:13 crc kubenswrapper[4799]: I1010 16:32:13.625191 4799 generic.go:334] "Generic (PLEG): container finished" podID="96840de9-4451-4499-81fa-a19c62239007" containerID="0477eb514aef21fcec151973d9b6cf683ced19e9029787b97906438cb94b9f66" exitCode=0 Oct 10 16:32:13 crc kubenswrapper[4799]: I1010 16:32:13.625280 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-nptcz" event={"ID":"96840de9-4451-4499-81fa-a19c62239007","Type":"ContainerDied","Data":"0477eb514aef21fcec151973d9b6cf683ced19e9029787b97906438cb94b9f66"} Oct 10 16:32:13 crc kubenswrapper[4799]: I1010 16:32:13.640512 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b9c46c5f-a6db-4cef-b179-b669484bbc75\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:47Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:47Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://df6b51b97a9e3dcf9102409dc19f67e69e6e28ebec82dd46083922d5606cc4c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ba57bc720123daa414f51bf5d3173c6fa0b519947a34816bebc532948fd74ab\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d98759de1f79d9aeb68eb0b3eb21d78d0116f054b5d846c85bd63774b565e73\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://10d76c5ba8c54896d2fde57e2806c48857363c495a9f2d9b3f6904334cf2f9be\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://10d76c5ba8c54896d2fde57e2806c48857363c495a9f2d9b3f6904334cf2f9be\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"file observer\\\\nW1010 16:32:08.895315 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1010 16:32:08.895450 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1010 16:32:08.898309 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-712278876/tls.crt::/tmp/serving-cert-712278876/tls.key\\\\\\\"\\\\nI1010 16:32:09.168043 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1010 16:32:09.171891 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1010 16:32:09.171914 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1010 16:32:09.171936 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1010 16:32:09.171942 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1010 16:32:09.176341 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1010 16:32:09.176406 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1010 16:32:09.176435 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1010 16:32:09.176460 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1010 16:32:09.176486 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1010 16:32:09.176510 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1010 16:32:09.176533 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1010 16:32:09.176376 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1010 16:32:09.178269 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-10T16:31:53Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://75fb276a1b4f555aa58d4a862a6f3841984f75958b7ada362d717eca726c41fc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:50Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://78cbeb4c6d2770cabbc752b11e5a62f64ec7820bc3a637a944fa252d779e242b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://78cbeb4c6d2770cabbc752b11e5a62f64ec7820bc3a637a944fa252d779e242b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:31:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:31:47Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:13Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:13 crc kubenswrapper[4799]: I1010 16:32:13.657853 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:13Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:13 crc kubenswrapper[4799]: I1010 16:32:13.669016 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:13Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:13 crc kubenswrapper[4799]: I1010 16:32:13.682315 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6cebefda-e31d-4be2-9bf4-8e1f8ec002cb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6536b37f839c0b3f6b55d82b3a1674eeccb07ec93e2cb0a3739705b82df4782c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hfkr4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ad00545d7a2fff370e19a55a89365b8c9914cb6286dbf1892d7ad0f399288a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hfkr4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:09Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-rh8zc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:13Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:13 crc kubenswrapper[4799]: I1010 16:32:13.693507 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:13 crc kubenswrapper[4799]: I1010 16:32:13.693554 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:13 crc kubenswrapper[4799]: I1010 16:32:13.693567 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:13 crc kubenswrapper[4799]: I1010 16:32:13.693584 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:13 crc kubenswrapper[4799]: I1010 16:32:13.693600 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:13Z","lastTransitionTime":"2025-10-10T16:32:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:13 crc kubenswrapper[4799]: I1010 16:32:13.700247 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mcwfc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"abe7f2d9-ec99-4724-a01f-cc7096377e07\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2ce36def99eaf908452410a523cd14eb31a5a4dc3ee38d5983ea95d5ee75f83\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d2ce36def99eaf908452410a523cd14eb31a5a4dc3ee38d5983ea95d5ee75f83\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:09Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-mcwfc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:13Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:13 crc kubenswrapper[4799]: I1010 16:32:13.715392 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5a953803d264b43ecd9f8b8c871b034d8146e73a4974bb8f503d0ca626370616\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:13Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:13 crc kubenswrapper[4799]: I1010 16:32:13.730094 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:13Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:13 crc kubenswrapper[4799]: I1010 16:32:13.773485 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gg5hb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f000ac73-b5de-47c8-a0a7-84bd06475f62\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b64f89fec4fec12dd0dab3f95ca2c8a01e43d4ef7cc69a4d012195756f6922ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w9g7t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:09Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gg5hb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:13Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:13 crc kubenswrapper[4799]: I1010 16:32:13.795904 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:13 crc kubenswrapper[4799]: I1010 16:32:13.795936 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:13 crc kubenswrapper[4799]: I1010 16:32:13.795943 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:13 crc kubenswrapper[4799]: I1010 16:32:13.795957 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:13 crc kubenswrapper[4799]: I1010 16:32:13.795966 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:13Z","lastTransitionTime":"2025-10-10T16:32:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:13 crc kubenswrapper[4799]: I1010 16:32:13.816842 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"60ae49f7-6d6a-4a62-909f-7aea2b3953f5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c0a72be5ffe48f726e63ca3854fcabf6ad7c26f2c3fe432328142da2dc2ceeb5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b42b1b86cbd6dacb03b9afc740a33f67674996a9c5a5b291b71708ae53ccfea8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://de5b84380f9fb8448cebe90775342fd17260ffb8c591bbd5156f8a216b80f1da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b1dad40a84c7f22ffb5d52c708c7e2e03a181c5778793050495c8333ae005731\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://79f6778c5b703b2b4fc4e59fffc00824fcab6c8f5e2789661665e635a3539195\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a48bce1f3530d2a78258c6fa2af4f1530890f7967a26c9e91ca2f20f56cdbe6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2a48bce1f3530d2a78258c6fa2af4f1530890f7967a26c9e91ca2f20f56cdbe6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:31:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://65fb2cd5fa9b5ff0cad85267e4a036c37593a749da171dc2e5e30ba5159ed96d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://65fb2cd5fa9b5ff0cad85267e4a036c37593a749da171dc2e5e30ba5159ed96d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:31:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://51ea61becc8c45e5bcb2a2374d503cef3fb940b1618e7501cd05d61fc2a9458f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://51ea61becc8c45e5bcb2a2374d503cef3fb940b1618e7501cd05d61fc2a9458f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:31:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:31:47Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:13Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:13 crc kubenswrapper[4799]: I1010 16:32:13.853964 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-bsdk2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"823e91d3-003d-4cbb-bc72-004e1708c19d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec832bfc0c81b98afb4117033b94d2951b042b248148a5f957f3507174b8dbb6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-chgmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:09Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-bsdk2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:13Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:13 crc kubenswrapper[4799]: I1010 16:32:13.890234 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2128a751508cba96a374652d8d80c66c81351fe0d7f800743a1612196fe8ac55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:13Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:13 crc kubenswrapper[4799]: I1010 16:32:13.898588 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:13 crc kubenswrapper[4799]: I1010 16:32:13.898638 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:13 crc kubenswrapper[4799]: I1010 16:32:13.898653 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:13 crc kubenswrapper[4799]: I1010 16:32:13.898671 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:13 crc kubenswrapper[4799]: I1010 16:32:13.898683 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:13Z","lastTransitionTime":"2025-10-10T16:32:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:13 crc kubenswrapper[4799]: I1010 16:32:13.935809 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4e024486dad9853cf7debbd2264eca725e50e74ebd215e1e55595d5f8b7c0403\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3396ed6bea22d063192c09283426aa98e84d5cab5852e305d61f3d583801187\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:13Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:13 crc kubenswrapper[4799]: I1010 16:32:13.972076 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-nptcz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"96840de9-4451-4499-81fa-a19c62239007\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with incomplete status: [bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d8b7b4526cfbe5d29a5b00c5d82089820b93e5aedbdaace85c4a252fed1b9f53\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d8b7b4526cfbe5d29a5b00c5d82089820b93e5aedbdaace85c4a252fed1b9f53\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:32:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0477eb514aef21fcec151973d9b6cf683ced19e9029787b97906438cb94b9f66\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0477eb514aef21fcec151973d9b6cf683ced19e9029787b97906438cb94b9f66\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:32:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:09Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-nptcz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:13Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:14 crc kubenswrapper[4799]: I1010 16:32:14.000444 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:14 crc kubenswrapper[4799]: I1010 16:32:14.000485 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:14 crc kubenswrapper[4799]: I1010 16:32:14.000496 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:14 crc kubenswrapper[4799]: I1010 16:32:14.000513 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:14 crc kubenswrapper[4799]: I1010 16:32:14.000523 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:14Z","lastTransitionTime":"2025-10-10T16:32:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:14 crc kubenswrapper[4799]: I1010 16:32:14.103218 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:14 crc kubenswrapper[4799]: I1010 16:32:14.103262 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:14 crc kubenswrapper[4799]: I1010 16:32:14.103276 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:14 crc kubenswrapper[4799]: I1010 16:32:14.103294 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:14 crc kubenswrapper[4799]: I1010 16:32:14.103307 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:14Z","lastTransitionTime":"2025-10-10T16:32:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:14 crc kubenswrapper[4799]: I1010 16:32:14.194084 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/node-ca-6wjsp"] Oct 10 16:32:14 crc kubenswrapper[4799]: I1010 16:32:14.194714 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-6wjsp" Oct 10 16:32:14 crc kubenswrapper[4799]: I1010 16:32:14.197553 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"node-ca-dockercfg-4777p" Oct 10 16:32:14 crc kubenswrapper[4799]: I1010 16:32:14.197695 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"image-registry-certificates" Oct 10 16:32:14 crc kubenswrapper[4799]: I1010 16:32:14.198144 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"openshift-service-ca.crt" Oct 10 16:32:14 crc kubenswrapper[4799]: I1010 16:32:14.198383 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"kube-root-ca.crt" Oct 10 16:32:14 crc kubenswrapper[4799]: I1010 16:32:14.206129 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:14 crc kubenswrapper[4799]: I1010 16:32:14.206190 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:14 crc kubenswrapper[4799]: I1010 16:32:14.206209 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:14 crc kubenswrapper[4799]: I1010 16:32:14.206234 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:14 crc kubenswrapper[4799]: I1010 16:32:14.206251 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:14Z","lastTransitionTime":"2025-10-10T16:32:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:14 crc kubenswrapper[4799]: I1010 16:32:14.229669 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"60ae49f7-6d6a-4a62-909f-7aea2b3953f5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c0a72be5ffe48f726e63ca3854fcabf6ad7c26f2c3fe432328142da2dc2ceeb5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b42b1b86cbd6dacb03b9afc740a33f67674996a9c5a5b291b71708ae53ccfea8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://de5b84380f9fb8448cebe90775342fd17260ffb8c591bbd5156f8a216b80f1da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b1dad40a84c7f22ffb5d52c708c7e2e03a181c5778793050495c8333ae005731\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://79f6778c5b703b2b4fc4e59fffc00824fcab6c8f5e2789661665e635a3539195\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a48bce1f3530d2a78258c6fa2af4f1530890f7967a26c9e91ca2f20f56cdbe6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2a48bce1f3530d2a78258c6fa2af4f1530890f7967a26c9e91ca2f20f56cdbe6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:31:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://65fb2cd5fa9b5ff0cad85267e4a036c37593a749da171dc2e5e30ba5159ed96d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://65fb2cd5fa9b5ff0cad85267e4a036c37593a749da171dc2e5e30ba5159ed96d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:31:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://51ea61becc8c45e5bcb2a2374d503cef3fb940b1618e7501cd05d61fc2a9458f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://51ea61becc8c45e5bcb2a2374d503cef3fb940b1618e7501cd05d61fc2a9458f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:31:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:31:47Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:14Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:14 crc kubenswrapper[4799]: I1010 16:32:14.241979 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-bsdk2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"823e91d3-003d-4cbb-bc72-004e1708c19d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec832bfc0c81b98afb4117033b94d2951b042b248148a5f957f3507174b8dbb6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-chgmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:09Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-bsdk2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:14Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:14 crc kubenswrapper[4799]: I1010 16:32:14.255183 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2128a751508cba96a374652d8d80c66c81351fe0d7f800743a1612196fe8ac55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:14Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:14 crc kubenswrapper[4799]: I1010 16:32:14.270570 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4e024486dad9853cf7debbd2264eca725e50e74ebd215e1e55595d5f8b7c0403\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3396ed6bea22d063192c09283426aa98e84d5cab5852e305d61f3d583801187\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:14Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:14 crc kubenswrapper[4799]: I1010 16:32:14.291667 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-nptcz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"96840de9-4451-4499-81fa-a19c62239007\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with incomplete status: [bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d8b7b4526cfbe5d29a5b00c5d82089820b93e5aedbdaace85c4a252fed1b9f53\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d8b7b4526cfbe5d29a5b00c5d82089820b93e5aedbdaace85c4a252fed1b9f53\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:32:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0477eb514aef21fcec151973d9b6cf683ced19e9029787b97906438cb94b9f66\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0477eb514aef21fcec151973d9b6cf683ced19e9029787b97906438cb94b9f66\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:32:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:09Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-nptcz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:14Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:14 crc kubenswrapper[4799]: I1010 16:32:14.308834 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:14 crc kubenswrapper[4799]: I1010 16:32:14.308888 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:14 crc kubenswrapper[4799]: I1010 16:32:14.308904 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:14 crc kubenswrapper[4799]: I1010 16:32:14.308928 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:14 crc kubenswrapper[4799]: I1010 16:32:14.308942 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:14Z","lastTransitionTime":"2025-10-10T16:32:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:14 crc kubenswrapper[4799]: I1010 16:32:14.312146 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mcwfc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"abe7f2d9-ec99-4724-a01f-cc7096377e07\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2ce36def99eaf908452410a523cd14eb31a5a4dc3ee38d5983ea95d5ee75f83\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d2ce36def99eaf908452410a523cd14eb31a5a4dc3ee38d5983ea95d5ee75f83\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:09Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-mcwfc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:14Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:14 crc kubenswrapper[4799]: I1010 16:32:14.337154 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b9c46c5f-a6db-4cef-b179-b669484bbc75\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:47Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:47Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://df6b51b97a9e3dcf9102409dc19f67e69e6e28ebec82dd46083922d5606cc4c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ba57bc720123daa414f51bf5d3173c6fa0b519947a34816bebc532948fd74ab\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d98759de1f79d9aeb68eb0b3eb21d78d0116f054b5d846c85bd63774b565e73\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://10d76c5ba8c54896d2fde57e2806c48857363c495a9f2d9b3f6904334cf2f9be\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://10d76c5ba8c54896d2fde57e2806c48857363c495a9f2d9b3f6904334cf2f9be\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"file observer\\\\nW1010 16:32:08.895315 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1010 16:32:08.895450 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1010 16:32:08.898309 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-712278876/tls.crt::/tmp/serving-cert-712278876/tls.key\\\\\\\"\\\\nI1010 16:32:09.168043 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1010 16:32:09.171891 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1010 16:32:09.171914 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1010 16:32:09.171936 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1010 16:32:09.171942 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1010 16:32:09.176341 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1010 16:32:09.176406 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1010 16:32:09.176435 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1010 16:32:09.176460 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1010 16:32:09.176486 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1010 16:32:09.176510 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1010 16:32:09.176533 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1010 16:32:09.176376 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1010 16:32:09.178269 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-10T16:31:53Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://75fb276a1b4f555aa58d4a862a6f3841984f75958b7ada362d717eca726c41fc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:50Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://78cbeb4c6d2770cabbc752b11e5a62f64ec7820bc3a637a944fa252d779e242b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://78cbeb4c6d2770cabbc752b11e5a62f64ec7820bc3a637a944fa252d779e242b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:31:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:31:47Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:14Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:14 crc kubenswrapper[4799]: I1010 16:32:14.359151 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/76fdb169-eee9-4170-b948-95e26254208b-host\") pod \"node-ca-6wjsp\" (UID: \"76fdb169-eee9-4170-b948-95e26254208b\") " pod="openshift-image-registry/node-ca-6wjsp" Oct 10 16:32:14 crc kubenswrapper[4799]: I1010 16:32:14.359226 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/76fdb169-eee9-4170-b948-95e26254208b-serviceca\") pod \"node-ca-6wjsp\" (UID: \"76fdb169-eee9-4170-b948-95e26254208b\") " pod="openshift-image-registry/node-ca-6wjsp" Oct 10 16:32:14 crc kubenswrapper[4799]: I1010 16:32:14.359437 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2ww66\" (UniqueName: \"kubernetes.io/projected/76fdb169-eee9-4170-b948-95e26254208b-kube-api-access-2ww66\") pod \"node-ca-6wjsp\" (UID: \"76fdb169-eee9-4170-b948-95e26254208b\") " pod="openshift-image-registry/node-ca-6wjsp" Oct 10 16:32:14 crc kubenswrapper[4799]: I1010 16:32:14.376687 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:14Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:14 crc kubenswrapper[4799]: I1010 16:32:14.401878 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 10 16:32:14 crc kubenswrapper[4799]: I1010 16:32:14.401922 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 10 16:32:14 crc kubenswrapper[4799]: E1010 16:32:14.402006 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 10 16:32:14 crc kubenswrapper[4799]: E1010 16:32:14.402083 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 10 16:32:14 crc kubenswrapper[4799]: I1010 16:32:14.411685 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:14 crc kubenswrapper[4799]: I1010 16:32:14.411724 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:14 crc kubenswrapper[4799]: I1010 16:32:14.411733 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:14 crc kubenswrapper[4799]: I1010 16:32:14.411764 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:14 crc kubenswrapper[4799]: I1010 16:32:14.411775 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:14Z","lastTransitionTime":"2025-10-10T16:32:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:14 crc kubenswrapper[4799]: I1010 16:32:14.418418 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:14Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:14 crc kubenswrapper[4799]: I1010 16:32:14.455229 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6cebefda-e31d-4be2-9bf4-8e1f8ec002cb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6536b37f839c0b3f6b55d82b3a1674eeccb07ec93e2cb0a3739705b82df4782c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hfkr4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ad00545d7a2fff370e19a55a89365b8c9914cb6286dbf1892d7ad0f399288a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hfkr4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:09Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-rh8zc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:14Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:14 crc kubenswrapper[4799]: I1010 16:32:14.460607 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2ww66\" (UniqueName: \"kubernetes.io/projected/76fdb169-eee9-4170-b948-95e26254208b-kube-api-access-2ww66\") pod \"node-ca-6wjsp\" (UID: \"76fdb169-eee9-4170-b948-95e26254208b\") " pod="openshift-image-registry/node-ca-6wjsp" Oct 10 16:32:14 crc kubenswrapper[4799]: I1010 16:32:14.460650 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/76fdb169-eee9-4170-b948-95e26254208b-host\") pod \"node-ca-6wjsp\" (UID: \"76fdb169-eee9-4170-b948-95e26254208b\") " pod="openshift-image-registry/node-ca-6wjsp" Oct 10 16:32:14 crc kubenswrapper[4799]: I1010 16:32:14.460682 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/76fdb169-eee9-4170-b948-95e26254208b-serviceca\") pod \"node-ca-6wjsp\" (UID: \"76fdb169-eee9-4170-b948-95e26254208b\") " pod="openshift-image-registry/node-ca-6wjsp" Oct 10 16:32:14 crc kubenswrapper[4799]: I1010 16:32:14.460835 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/76fdb169-eee9-4170-b948-95e26254208b-host\") pod \"node-ca-6wjsp\" (UID: \"76fdb169-eee9-4170-b948-95e26254208b\") " pod="openshift-image-registry/node-ca-6wjsp" Oct 10 16:32:14 crc kubenswrapper[4799]: I1010 16:32:14.461504 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/76fdb169-eee9-4170-b948-95e26254208b-serviceca\") pod \"node-ca-6wjsp\" (UID: \"76fdb169-eee9-4170-b948-95e26254208b\") " pod="openshift-image-registry/node-ca-6wjsp" Oct 10 16:32:14 crc kubenswrapper[4799]: I1010 16:32:14.504806 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2ww66\" (UniqueName: \"kubernetes.io/projected/76fdb169-eee9-4170-b948-95e26254208b-kube-api-access-2ww66\") pod \"node-ca-6wjsp\" (UID: \"76fdb169-eee9-4170-b948-95e26254208b\") " pod="openshift-image-registry/node-ca-6wjsp" Oct 10 16:32:14 crc kubenswrapper[4799]: I1010 16:32:14.511421 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-6wjsp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"76fdb169-eee9-4170-b948-95e26254208b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:14Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:14Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2ww66\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:14Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-6wjsp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:14Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:14 crc kubenswrapper[4799]: I1010 16:32:14.514242 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:14 crc kubenswrapper[4799]: I1010 16:32:14.514300 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:14 crc kubenswrapper[4799]: I1010 16:32:14.514318 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:14 crc kubenswrapper[4799]: I1010 16:32:14.514343 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:14 crc kubenswrapper[4799]: I1010 16:32:14.514361 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:14Z","lastTransitionTime":"2025-10-10T16:32:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:14 crc kubenswrapper[4799]: I1010 16:32:14.518468 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-6wjsp" Oct 10 16:32:14 crc kubenswrapper[4799]: W1010 16:32:14.534147 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod76fdb169_eee9_4170_b948_95e26254208b.slice/crio-3b429ee62bf8380ceb10751c638ed9bd6b3bc3f407ced8c9d5ceb59db8baaeb5 WatchSource:0}: Error finding container 3b429ee62bf8380ceb10751c638ed9bd6b3bc3f407ced8c9d5ceb59db8baaeb5: Status 404 returned error can't find the container with id 3b429ee62bf8380ceb10751c638ed9bd6b3bc3f407ced8c9d5ceb59db8baaeb5 Oct 10 16:32:14 crc kubenswrapper[4799]: I1010 16:32:14.554814 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5a953803d264b43ecd9f8b8c871b034d8146e73a4974bb8f503d0ca626370616\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:14Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:14 crc kubenswrapper[4799]: I1010 16:32:14.592477 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:14Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:14 crc kubenswrapper[4799]: I1010 16:32:14.617648 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:14 crc kubenswrapper[4799]: I1010 16:32:14.617689 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:14 crc kubenswrapper[4799]: I1010 16:32:14.617705 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:14 crc kubenswrapper[4799]: I1010 16:32:14.617726 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:14 crc kubenswrapper[4799]: I1010 16:32:14.617740 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:14Z","lastTransitionTime":"2025-10-10T16:32:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:14 crc kubenswrapper[4799]: I1010 16:32:14.631162 4799 generic.go:334] "Generic (PLEG): container finished" podID="96840de9-4451-4499-81fa-a19c62239007" containerID="b8df7ffc260acc047e334af09b76e6ee2c6dadd8c1fd1ed8860769601c89c6db" exitCode=0 Oct 10 16:32:14 crc kubenswrapper[4799]: I1010 16:32:14.631252 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-nptcz" event={"ID":"96840de9-4451-4499-81fa-a19c62239007","Type":"ContainerDied","Data":"b8df7ffc260acc047e334af09b76e6ee2c6dadd8c1fd1ed8860769601c89c6db"} Oct 10 16:32:14 crc kubenswrapper[4799]: I1010 16:32:14.633027 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gg5hb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f000ac73-b5de-47c8-a0a7-84bd06475f62\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b64f89fec4fec12dd0dab3f95ca2c8a01e43d4ef7cc69a4d012195756f6922ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w9g7t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:09Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gg5hb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:14Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:14 crc kubenswrapper[4799]: I1010 16:32:14.633547 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-6wjsp" event={"ID":"76fdb169-eee9-4170-b948-95e26254208b","Type":"ContainerStarted","Data":"3b429ee62bf8380ceb10751c638ed9bd6b3bc3f407ced8c9d5ceb59db8baaeb5"} Oct 10 16:32:14 crc kubenswrapper[4799]: I1010 16:32:14.641942 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mcwfc" event={"ID":"abe7f2d9-ec99-4724-a01f-cc7096377e07","Type":"ContainerStarted","Data":"08406e220de50ba85f882a05117b5df8c9445a38c026bb85c95fc9f98f2d2cfe"} Oct 10 16:32:14 crc kubenswrapper[4799]: I1010 16:32:14.672589 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2128a751508cba96a374652d8d80c66c81351fe0d7f800743a1612196fe8ac55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:14Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:14 crc kubenswrapper[4799]: I1010 16:32:14.716008 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4e024486dad9853cf7debbd2264eca725e50e74ebd215e1e55595d5f8b7c0403\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3396ed6bea22d063192c09283426aa98e84d5cab5852e305d61f3d583801187\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:14Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:14 crc kubenswrapper[4799]: I1010 16:32:14.719667 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:14 crc kubenswrapper[4799]: I1010 16:32:14.719703 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:14 crc kubenswrapper[4799]: I1010 16:32:14.719711 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:14 crc kubenswrapper[4799]: I1010 16:32:14.719730 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:14 crc kubenswrapper[4799]: I1010 16:32:14.719739 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:14Z","lastTransitionTime":"2025-10-10T16:32:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:14 crc kubenswrapper[4799]: I1010 16:32:14.756453 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-nptcz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"96840de9-4451-4499-81fa-a19c62239007\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with incomplete status: [routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d8b7b4526cfbe5d29a5b00c5d82089820b93e5aedbdaace85c4a252fed1b9f53\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d8b7b4526cfbe5d29a5b00c5d82089820b93e5aedbdaace85c4a252fed1b9f53\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:32:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0477eb514aef21fcec151973d9b6cf683ced19e9029787b97906438cb94b9f66\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0477eb514aef21fcec151973d9b6cf683ced19e9029787b97906438cb94b9f66\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:32:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b8df7ffc260acc047e334af09b76e6ee2c6dadd8c1fd1ed8860769601c89c6db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b8df7ffc260acc047e334af09b76e6ee2c6dadd8c1fd1ed8860769601c89c6db\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:32:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:09Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-nptcz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:14Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:14 crc kubenswrapper[4799]: I1010 16:32:14.796214 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:14Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:14 crc kubenswrapper[4799]: I1010 16:32:14.822669 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:14 crc kubenswrapper[4799]: I1010 16:32:14.822704 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:14 crc kubenswrapper[4799]: I1010 16:32:14.822712 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:14 crc kubenswrapper[4799]: I1010 16:32:14.822726 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:14 crc kubenswrapper[4799]: I1010 16:32:14.822734 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:14Z","lastTransitionTime":"2025-10-10T16:32:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:14 crc kubenswrapper[4799]: I1010 16:32:14.832299 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:14Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:14 crc kubenswrapper[4799]: I1010 16:32:14.871452 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6cebefda-e31d-4be2-9bf4-8e1f8ec002cb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6536b37f839c0b3f6b55d82b3a1674eeccb07ec93e2cb0a3739705b82df4782c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hfkr4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ad00545d7a2fff370e19a55a89365b8c9914cb6286dbf1892d7ad0f399288a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hfkr4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:09Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-rh8zc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:14Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:14 crc kubenswrapper[4799]: I1010 16:32:14.914519 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mcwfc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"abe7f2d9-ec99-4724-a01f-cc7096377e07\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2ce36def99eaf908452410a523cd14eb31a5a4dc3ee38d5983ea95d5ee75f83\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d2ce36def99eaf908452410a523cd14eb31a5a4dc3ee38d5983ea95d5ee75f83\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:09Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-mcwfc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:14Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:14 crc kubenswrapper[4799]: I1010 16:32:14.925252 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:14 crc kubenswrapper[4799]: I1010 16:32:14.925314 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:14 crc kubenswrapper[4799]: I1010 16:32:14.925334 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:14 crc kubenswrapper[4799]: I1010 16:32:14.925361 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:14 crc kubenswrapper[4799]: I1010 16:32:14.925379 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:14Z","lastTransitionTime":"2025-10-10T16:32:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:14 crc kubenswrapper[4799]: I1010 16:32:14.953639 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b9c46c5f-a6db-4cef-b179-b669484bbc75\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:47Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:47Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://df6b51b97a9e3dcf9102409dc19f67e69e6e28ebec82dd46083922d5606cc4c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ba57bc720123daa414f51bf5d3173c6fa0b519947a34816bebc532948fd74ab\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d98759de1f79d9aeb68eb0b3eb21d78d0116f054b5d846c85bd63774b565e73\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://10d76c5ba8c54896d2fde57e2806c48857363c495a9f2d9b3f6904334cf2f9be\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://10d76c5ba8c54896d2fde57e2806c48857363c495a9f2d9b3f6904334cf2f9be\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"file observer\\\\nW1010 16:32:08.895315 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1010 16:32:08.895450 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1010 16:32:08.898309 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-712278876/tls.crt::/tmp/serving-cert-712278876/tls.key\\\\\\\"\\\\nI1010 16:32:09.168043 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1010 16:32:09.171891 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1010 16:32:09.171914 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1010 16:32:09.171936 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1010 16:32:09.171942 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1010 16:32:09.176341 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1010 16:32:09.176406 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1010 16:32:09.176435 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1010 16:32:09.176460 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1010 16:32:09.176486 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1010 16:32:09.176510 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1010 16:32:09.176533 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1010 16:32:09.176376 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1010 16:32:09.178269 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-10T16:31:53Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://75fb276a1b4f555aa58d4a862a6f3841984f75958b7ada362d717eca726c41fc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:50Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://78cbeb4c6d2770cabbc752b11e5a62f64ec7820bc3a637a944fa252d779e242b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://78cbeb4c6d2770cabbc752b11e5a62f64ec7820bc3a637a944fa252d779e242b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:31:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:31:47Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:14Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:14 crc kubenswrapper[4799]: I1010 16:32:14.994173 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5a953803d264b43ecd9f8b8c871b034d8146e73a4974bb8f503d0ca626370616\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:14Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:15 crc kubenswrapper[4799]: I1010 16:32:15.028081 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:15 crc kubenswrapper[4799]: I1010 16:32:15.028138 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:15 crc kubenswrapper[4799]: I1010 16:32:15.028155 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:15 crc kubenswrapper[4799]: I1010 16:32:15.028182 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:15 crc kubenswrapper[4799]: I1010 16:32:15.028204 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:15Z","lastTransitionTime":"2025-10-10T16:32:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:15 crc kubenswrapper[4799]: I1010 16:32:15.033985 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:15Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:15 crc kubenswrapper[4799]: I1010 16:32:15.076160 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gg5hb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f000ac73-b5de-47c8-a0a7-84bd06475f62\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b64f89fec4fec12dd0dab3f95ca2c8a01e43d4ef7cc69a4d012195756f6922ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w9g7t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:09Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gg5hb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:15Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:15 crc kubenswrapper[4799]: I1010 16:32:15.113983 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-6wjsp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"76fdb169-eee9-4170-b948-95e26254208b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:14Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:14Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2ww66\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:14Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-6wjsp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:15Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:15 crc kubenswrapper[4799]: I1010 16:32:15.130580 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:15 crc kubenswrapper[4799]: I1010 16:32:15.130626 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:15 crc kubenswrapper[4799]: I1010 16:32:15.130643 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:15 crc kubenswrapper[4799]: I1010 16:32:15.130668 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:15 crc kubenswrapper[4799]: I1010 16:32:15.130685 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:15Z","lastTransitionTime":"2025-10-10T16:32:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:15 crc kubenswrapper[4799]: I1010 16:32:15.152557 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-bsdk2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"823e91d3-003d-4cbb-bc72-004e1708c19d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec832bfc0c81b98afb4117033b94d2951b042b248148a5f957f3507174b8dbb6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-chgmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:09Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-bsdk2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:15Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:15 crc kubenswrapper[4799]: I1010 16:32:15.198546 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"60ae49f7-6d6a-4a62-909f-7aea2b3953f5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c0a72be5ffe48f726e63ca3854fcabf6ad7c26f2c3fe432328142da2dc2ceeb5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b42b1b86cbd6dacb03b9afc740a33f67674996a9c5a5b291b71708ae53ccfea8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://de5b84380f9fb8448cebe90775342fd17260ffb8c591bbd5156f8a216b80f1da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b1dad40a84c7f22ffb5d52c708c7e2e03a181c5778793050495c8333ae005731\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://79f6778c5b703b2b4fc4e59fffc00824fcab6c8f5e2789661665e635a3539195\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a48bce1f3530d2a78258c6fa2af4f1530890f7967a26c9e91ca2f20f56cdbe6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2a48bce1f3530d2a78258c6fa2af4f1530890f7967a26c9e91ca2f20f56cdbe6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:31:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://65fb2cd5fa9b5ff0cad85267e4a036c37593a749da171dc2e5e30ba5159ed96d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://65fb2cd5fa9b5ff0cad85267e4a036c37593a749da171dc2e5e30ba5159ed96d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:31:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://51ea61becc8c45e5bcb2a2374d503cef3fb940b1618e7501cd05d61fc2a9458f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://51ea61becc8c45e5bcb2a2374d503cef3fb940b1618e7501cd05d61fc2a9458f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:31:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:31:47Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:15Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:15 crc kubenswrapper[4799]: I1010 16:32:15.233540 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:15 crc kubenswrapper[4799]: I1010 16:32:15.233574 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:15 crc kubenswrapper[4799]: I1010 16:32:15.233584 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:15 crc kubenswrapper[4799]: I1010 16:32:15.233597 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:15 crc kubenswrapper[4799]: I1010 16:32:15.233606 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:15Z","lastTransitionTime":"2025-10-10T16:32:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:15 crc kubenswrapper[4799]: I1010 16:32:15.335676 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:15 crc kubenswrapper[4799]: I1010 16:32:15.335729 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:15 crc kubenswrapper[4799]: I1010 16:32:15.335742 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:15 crc kubenswrapper[4799]: I1010 16:32:15.335772 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:15 crc kubenswrapper[4799]: I1010 16:32:15.335783 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:15Z","lastTransitionTime":"2025-10-10T16:32:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:15 crc kubenswrapper[4799]: I1010 16:32:15.402024 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 10 16:32:15 crc kubenswrapper[4799]: E1010 16:32:15.402226 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 10 16:32:15 crc kubenswrapper[4799]: I1010 16:32:15.438959 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:15 crc kubenswrapper[4799]: I1010 16:32:15.438996 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:15 crc kubenswrapper[4799]: I1010 16:32:15.439007 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:15 crc kubenswrapper[4799]: I1010 16:32:15.439026 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:15 crc kubenswrapper[4799]: I1010 16:32:15.439037 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:15Z","lastTransitionTime":"2025-10-10T16:32:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:15 crc kubenswrapper[4799]: I1010 16:32:15.541623 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:15 crc kubenswrapper[4799]: I1010 16:32:15.541676 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:15 crc kubenswrapper[4799]: I1010 16:32:15.541690 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:15 crc kubenswrapper[4799]: I1010 16:32:15.541712 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:15 crc kubenswrapper[4799]: I1010 16:32:15.541727 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:15Z","lastTransitionTime":"2025-10-10T16:32:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:15 crc kubenswrapper[4799]: I1010 16:32:15.644734 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:15 crc kubenswrapper[4799]: I1010 16:32:15.644828 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:15 crc kubenswrapper[4799]: I1010 16:32:15.644854 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:15 crc kubenswrapper[4799]: I1010 16:32:15.644891 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:15 crc kubenswrapper[4799]: I1010 16:32:15.644918 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:15Z","lastTransitionTime":"2025-10-10T16:32:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:15 crc kubenswrapper[4799]: I1010 16:32:15.649727 4799 generic.go:334] "Generic (PLEG): container finished" podID="96840de9-4451-4499-81fa-a19c62239007" containerID="2c89c6973a557239b60077f2b91a5f088955a973ebf8a9776677daa83f18c274" exitCode=0 Oct 10 16:32:15 crc kubenswrapper[4799]: I1010 16:32:15.649949 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-nptcz" event={"ID":"96840de9-4451-4499-81fa-a19c62239007","Type":"ContainerDied","Data":"2c89c6973a557239b60077f2b91a5f088955a973ebf8a9776677daa83f18c274"} Oct 10 16:32:15 crc kubenswrapper[4799]: I1010 16:32:15.653435 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-6wjsp" event={"ID":"76fdb169-eee9-4170-b948-95e26254208b","Type":"ContainerStarted","Data":"5b030264f18288aa7687a91f7918f1ed2c2ad474637e32a054ea8c25b97aef45"} Oct 10 16:32:15 crc kubenswrapper[4799]: I1010 16:32:15.686534 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mcwfc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"abe7f2d9-ec99-4724-a01f-cc7096377e07\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2ce36def99eaf908452410a523cd14eb31a5a4dc3ee38d5983ea95d5ee75f83\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d2ce36def99eaf908452410a523cd14eb31a5a4dc3ee38d5983ea95d5ee75f83\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:09Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-mcwfc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:15Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:15 crc kubenswrapper[4799]: I1010 16:32:15.709842 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b9c46c5f-a6db-4cef-b179-b669484bbc75\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:47Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:47Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://df6b51b97a9e3dcf9102409dc19f67e69e6e28ebec82dd46083922d5606cc4c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ba57bc720123daa414f51bf5d3173c6fa0b519947a34816bebc532948fd74ab\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d98759de1f79d9aeb68eb0b3eb21d78d0116f054b5d846c85bd63774b565e73\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://10d76c5ba8c54896d2fde57e2806c48857363c495a9f2d9b3f6904334cf2f9be\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://10d76c5ba8c54896d2fde57e2806c48857363c495a9f2d9b3f6904334cf2f9be\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"file observer\\\\nW1010 16:32:08.895315 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1010 16:32:08.895450 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1010 16:32:08.898309 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-712278876/tls.crt::/tmp/serving-cert-712278876/tls.key\\\\\\\"\\\\nI1010 16:32:09.168043 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1010 16:32:09.171891 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1010 16:32:09.171914 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1010 16:32:09.171936 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1010 16:32:09.171942 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1010 16:32:09.176341 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1010 16:32:09.176406 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1010 16:32:09.176435 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1010 16:32:09.176460 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1010 16:32:09.176486 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1010 16:32:09.176510 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1010 16:32:09.176533 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1010 16:32:09.176376 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1010 16:32:09.178269 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-10T16:31:53Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://75fb276a1b4f555aa58d4a862a6f3841984f75958b7ada362d717eca726c41fc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:50Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://78cbeb4c6d2770cabbc752b11e5a62f64ec7820bc3a637a944fa252d779e242b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://78cbeb4c6d2770cabbc752b11e5a62f64ec7820bc3a637a944fa252d779e242b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:31:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:31:47Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:15Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:15 crc kubenswrapper[4799]: I1010 16:32:15.731302 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:15Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:15 crc kubenswrapper[4799]: I1010 16:32:15.749484 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:15 crc kubenswrapper[4799]: I1010 16:32:15.749539 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:15 crc kubenswrapper[4799]: I1010 16:32:15.749578 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:15 crc kubenswrapper[4799]: I1010 16:32:15.749606 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:15 crc kubenswrapper[4799]: I1010 16:32:15.749625 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:15Z","lastTransitionTime":"2025-10-10T16:32:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:15 crc kubenswrapper[4799]: I1010 16:32:15.753983 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:15Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:15 crc kubenswrapper[4799]: I1010 16:32:15.773031 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6cebefda-e31d-4be2-9bf4-8e1f8ec002cb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6536b37f839c0b3f6b55d82b3a1674eeccb07ec93e2cb0a3739705b82df4782c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hfkr4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ad00545d7a2fff370e19a55a89365b8c9914cb6286dbf1892d7ad0f399288a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hfkr4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:09Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-rh8zc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:15Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:15 crc kubenswrapper[4799]: I1010 16:32:15.788480 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-6wjsp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"76fdb169-eee9-4170-b948-95e26254208b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:14Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:14Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2ww66\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:14Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-6wjsp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:15Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:15 crc kubenswrapper[4799]: I1010 16:32:15.809377 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5a953803d264b43ecd9f8b8c871b034d8146e73a4974bb8f503d0ca626370616\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:15Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:15 crc kubenswrapper[4799]: I1010 16:32:15.828605 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:15Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:15 crc kubenswrapper[4799]: I1010 16:32:15.846536 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gg5hb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f000ac73-b5de-47c8-a0a7-84bd06475f62\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b64f89fec4fec12dd0dab3f95ca2c8a01e43d4ef7cc69a4d012195756f6922ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w9g7t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:09Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gg5hb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:15Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:15 crc kubenswrapper[4799]: I1010 16:32:15.858550 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:15 crc kubenswrapper[4799]: I1010 16:32:15.858606 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:15 crc kubenswrapper[4799]: I1010 16:32:15.858624 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:15 crc kubenswrapper[4799]: I1010 16:32:15.858649 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:15 crc kubenswrapper[4799]: I1010 16:32:15.858666 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:15Z","lastTransitionTime":"2025-10-10T16:32:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:15 crc kubenswrapper[4799]: I1010 16:32:15.872502 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"60ae49f7-6d6a-4a62-909f-7aea2b3953f5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c0a72be5ffe48f726e63ca3854fcabf6ad7c26f2c3fe432328142da2dc2ceeb5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b42b1b86cbd6dacb03b9afc740a33f67674996a9c5a5b291b71708ae53ccfea8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://de5b84380f9fb8448cebe90775342fd17260ffb8c591bbd5156f8a216b80f1da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b1dad40a84c7f22ffb5d52c708c7e2e03a181c5778793050495c8333ae005731\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://79f6778c5b703b2b4fc4e59fffc00824fcab6c8f5e2789661665e635a3539195\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a48bce1f3530d2a78258c6fa2af4f1530890f7967a26c9e91ca2f20f56cdbe6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2a48bce1f3530d2a78258c6fa2af4f1530890f7967a26c9e91ca2f20f56cdbe6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:31:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://65fb2cd5fa9b5ff0cad85267e4a036c37593a749da171dc2e5e30ba5159ed96d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://65fb2cd5fa9b5ff0cad85267e4a036c37593a749da171dc2e5e30ba5159ed96d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:31:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://51ea61becc8c45e5bcb2a2374d503cef3fb940b1618e7501cd05d61fc2a9458f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://51ea61becc8c45e5bcb2a2374d503cef3fb940b1618e7501cd05d61fc2a9458f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:31:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:31:47Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:15Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:15 crc kubenswrapper[4799]: I1010 16:32:15.889490 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-bsdk2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"823e91d3-003d-4cbb-bc72-004e1708c19d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec832bfc0c81b98afb4117033b94d2951b042b248148a5f957f3507174b8dbb6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-chgmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:09Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-bsdk2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:15Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:15 crc kubenswrapper[4799]: I1010 16:32:15.906902 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2128a751508cba96a374652d8d80c66c81351fe0d7f800743a1612196fe8ac55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:15Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:15 crc kubenswrapper[4799]: I1010 16:32:15.921220 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4e024486dad9853cf7debbd2264eca725e50e74ebd215e1e55595d5f8b7c0403\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3396ed6bea22d063192c09283426aa98e84d5cab5852e305d61f3d583801187\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:15Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:15 crc kubenswrapper[4799]: I1010 16:32:15.940946 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-nptcz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"96840de9-4451-4499-81fa-a19c62239007\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d8b7b4526cfbe5d29a5b00c5d82089820b93e5aedbdaace85c4a252fed1b9f53\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d8b7b4526cfbe5d29a5b00c5d82089820b93e5aedbdaace85c4a252fed1b9f53\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:32:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0477eb514aef21fcec151973d9b6cf683ced19e9029787b97906438cb94b9f66\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0477eb514aef21fcec151973d9b6cf683ced19e9029787b97906438cb94b9f66\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:32:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b8df7ffc260acc047e334af09b76e6ee2c6dadd8c1fd1ed8860769601c89c6db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b8df7ffc260acc047e334af09b76e6ee2c6dadd8c1fd1ed8860769601c89c6db\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:32:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c89c6973a557239b60077f2b91a5f088955a973ebf8a9776677daa83f18c274\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2c89c6973a557239b60077f2b91a5f088955a973ebf8a9776677daa83f18c274\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:32:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:09Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-nptcz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:15Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:15 crc kubenswrapper[4799]: I1010 16:32:15.956662 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gg5hb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f000ac73-b5de-47c8-a0a7-84bd06475f62\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b64f89fec4fec12dd0dab3f95ca2c8a01e43d4ef7cc69a4d012195756f6922ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w9g7t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:09Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gg5hb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:15Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:15 crc kubenswrapper[4799]: I1010 16:32:15.961057 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:15 crc kubenswrapper[4799]: I1010 16:32:15.961116 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:15 crc kubenswrapper[4799]: I1010 16:32:15.961136 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:15 crc kubenswrapper[4799]: I1010 16:32:15.961162 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:15 crc kubenswrapper[4799]: I1010 16:32:15.961181 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:15Z","lastTransitionTime":"2025-10-10T16:32:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:15 crc kubenswrapper[4799]: I1010 16:32:15.968859 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-6wjsp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"76fdb169-eee9-4170-b948-95e26254208b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5b030264f18288aa7687a91f7918f1ed2c2ad474637e32a054ea8c25b97aef45\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2ww66\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:14Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-6wjsp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:15Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:15 crc kubenswrapper[4799]: I1010 16:32:15.987368 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5a953803d264b43ecd9f8b8c871b034d8146e73a4974bb8f503d0ca626370616\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:15Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:16 crc kubenswrapper[4799]: I1010 16:32:16.008725 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:16Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:16 crc kubenswrapper[4799]: I1010 16:32:16.034729 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"60ae49f7-6d6a-4a62-909f-7aea2b3953f5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c0a72be5ffe48f726e63ca3854fcabf6ad7c26f2c3fe432328142da2dc2ceeb5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b42b1b86cbd6dacb03b9afc740a33f67674996a9c5a5b291b71708ae53ccfea8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://de5b84380f9fb8448cebe90775342fd17260ffb8c591bbd5156f8a216b80f1da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b1dad40a84c7f22ffb5d52c708c7e2e03a181c5778793050495c8333ae005731\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://79f6778c5b703b2b4fc4e59fffc00824fcab6c8f5e2789661665e635a3539195\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a48bce1f3530d2a78258c6fa2af4f1530890f7967a26c9e91ca2f20f56cdbe6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2a48bce1f3530d2a78258c6fa2af4f1530890f7967a26c9e91ca2f20f56cdbe6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:31:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://65fb2cd5fa9b5ff0cad85267e4a036c37593a749da171dc2e5e30ba5159ed96d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://65fb2cd5fa9b5ff0cad85267e4a036c37593a749da171dc2e5e30ba5159ed96d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:31:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://51ea61becc8c45e5bcb2a2374d503cef3fb940b1618e7501cd05d61fc2a9458f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://51ea61becc8c45e5bcb2a2374d503cef3fb940b1618e7501cd05d61fc2a9458f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:31:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:31:47Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:16Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:16 crc kubenswrapper[4799]: I1010 16:32:16.047113 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-bsdk2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"823e91d3-003d-4cbb-bc72-004e1708c19d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec832bfc0c81b98afb4117033b94d2951b042b248148a5f957f3507174b8dbb6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-chgmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:09Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-bsdk2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:16Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:16 crc kubenswrapper[4799]: I1010 16:32:16.061796 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-nptcz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"96840de9-4451-4499-81fa-a19c62239007\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d8b7b4526cfbe5d29a5b00c5d82089820b93e5aedbdaace85c4a252fed1b9f53\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d8b7b4526cfbe5d29a5b00c5d82089820b93e5aedbdaace85c4a252fed1b9f53\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:32:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0477eb514aef21fcec151973d9b6cf683ced19e9029787b97906438cb94b9f66\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0477eb514aef21fcec151973d9b6cf683ced19e9029787b97906438cb94b9f66\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:32:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b8df7ffc260acc047e334af09b76e6ee2c6dadd8c1fd1ed8860769601c89c6db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b8df7ffc260acc047e334af09b76e6ee2c6dadd8c1fd1ed8860769601c89c6db\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:32:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c89c6973a557239b60077f2b91a5f088955a973ebf8a9776677daa83f18c274\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2c89c6973a557239b60077f2b91a5f088955a973ebf8a9776677daa83f18c274\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:32:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:09Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-nptcz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:16Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:16 crc kubenswrapper[4799]: I1010 16:32:16.066073 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:16 crc kubenswrapper[4799]: I1010 16:32:16.066142 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:16 crc kubenswrapper[4799]: I1010 16:32:16.066161 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:16 crc kubenswrapper[4799]: I1010 16:32:16.066190 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:16 crc kubenswrapper[4799]: I1010 16:32:16.066208 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:16Z","lastTransitionTime":"2025-10-10T16:32:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:16 crc kubenswrapper[4799]: I1010 16:32:16.072780 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2128a751508cba96a374652d8d80c66c81351fe0d7f800743a1612196fe8ac55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:16Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:16 crc kubenswrapper[4799]: I1010 16:32:16.115734 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4e024486dad9853cf7debbd2264eca725e50e74ebd215e1e55595d5f8b7c0403\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3396ed6bea22d063192c09283426aa98e84d5cab5852e305d61f3d583801187\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:16Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:16 crc kubenswrapper[4799]: I1010 16:32:16.152121 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6cebefda-e31d-4be2-9bf4-8e1f8ec002cb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6536b37f839c0b3f6b55d82b3a1674eeccb07ec93e2cb0a3739705b82df4782c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hfkr4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ad00545d7a2fff370e19a55a89365b8c9914cb6286dbf1892d7ad0f399288a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hfkr4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:09Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-rh8zc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:16Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:16 crc kubenswrapper[4799]: I1010 16:32:16.168321 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:16 crc kubenswrapper[4799]: I1010 16:32:16.168512 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:16 crc kubenswrapper[4799]: I1010 16:32:16.168529 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:16 crc kubenswrapper[4799]: I1010 16:32:16.168547 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:16 crc kubenswrapper[4799]: I1010 16:32:16.168560 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:16Z","lastTransitionTime":"2025-10-10T16:32:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:16 crc kubenswrapper[4799]: I1010 16:32:16.201618 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mcwfc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"abe7f2d9-ec99-4724-a01f-cc7096377e07\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2ce36def99eaf908452410a523cd14eb31a5a4dc3ee38d5983ea95d5ee75f83\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d2ce36def99eaf908452410a523cd14eb31a5a4dc3ee38d5983ea95d5ee75f83\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:09Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-mcwfc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:16Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:16 crc kubenswrapper[4799]: I1010 16:32:16.232706 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b9c46c5f-a6db-4cef-b179-b669484bbc75\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:47Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:47Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://df6b51b97a9e3dcf9102409dc19f67e69e6e28ebec82dd46083922d5606cc4c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ba57bc720123daa414f51bf5d3173c6fa0b519947a34816bebc532948fd74ab\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d98759de1f79d9aeb68eb0b3eb21d78d0116f054b5d846c85bd63774b565e73\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://10d76c5ba8c54896d2fde57e2806c48857363c495a9f2d9b3f6904334cf2f9be\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://10d76c5ba8c54896d2fde57e2806c48857363c495a9f2d9b3f6904334cf2f9be\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"file observer\\\\nW1010 16:32:08.895315 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1010 16:32:08.895450 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1010 16:32:08.898309 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-712278876/tls.crt::/tmp/serving-cert-712278876/tls.key\\\\\\\"\\\\nI1010 16:32:09.168043 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1010 16:32:09.171891 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1010 16:32:09.171914 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1010 16:32:09.171936 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1010 16:32:09.171942 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1010 16:32:09.176341 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1010 16:32:09.176406 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1010 16:32:09.176435 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1010 16:32:09.176460 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1010 16:32:09.176486 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1010 16:32:09.176510 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1010 16:32:09.176533 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1010 16:32:09.176376 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1010 16:32:09.178269 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-10T16:31:53Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://75fb276a1b4f555aa58d4a862a6f3841984f75958b7ada362d717eca726c41fc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:50Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://78cbeb4c6d2770cabbc752b11e5a62f64ec7820bc3a637a944fa252d779e242b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://78cbeb4c6d2770cabbc752b11e5a62f64ec7820bc3a637a944fa252d779e242b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:31:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:31:47Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:16Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:16 crc kubenswrapper[4799]: I1010 16:32:16.270599 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:16 crc kubenswrapper[4799]: I1010 16:32:16.270630 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:16 crc kubenswrapper[4799]: I1010 16:32:16.270641 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:16 crc kubenswrapper[4799]: I1010 16:32:16.270660 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:16 crc kubenswrapper[4799]: I1010 16:32:16.270672 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:16Z","lastTransitionTime":"2025-10-10T16:32:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:16 crc kubenswrapper[4799]: I1010 16:32:16.272435 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:16Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:16 crc kubenswrapper[4799]: I1010 16:32:16.315584 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:16Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:16 crc kubenswrapper[4799]: I1010 16:32:16.375807 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:16 crc kubenswrapper[4799]: I1010 16:32:16.375860 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:16 crc kubenswrapper[4799]: I1010 16:32:16.375883 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:16 crc kubenswrapper[4799]: I1010 16:32:16.375911 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:16 crc kubenswrapper[4799]: I1010 16:32:16.375932 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:16Z","lastTransitionTime":"2025-10-10T16:32:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:16 crc kubenswrapper[4799]: I1010 16:32:16.402523 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 10 16:32:16 crc kubenswrapper[4799]: I1010 16:32:16.402547 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 10 16:32:16 crc kubenswrapper[4799]: E1010 16:32:16.402647 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 10 16:32:16 crc kubenswrapper[4799]: E1010 16:32:16.402838 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 10 16:32:16 crc kubenswrapper[4799]: I1010 16:32:16.479227 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:16 crc kubenswrapper[4799]: I1010 16:32:16.479274 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:16 crc kubenswrapper[4799]: I1010 16:32:16.479288 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:16 crc kubenswrapper[4799]: I1010 16:32:16.479306 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:16 crc kubenswrapper[4799]: I1010 16:32:16.479320 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:16Z","lastTransitionTime":"2025-10-10T16:32:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:16 crc kubenswrapper[4799]: I1010 16:32:16.544353 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 10 16:32:16 crc kubenswrapper[4799]: I1010 16:32:16.549660 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 10 16:32:16 crc kubenswrapper[4799]: I1010 16:32:16.553697 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager/kube-controller-manager-crc"] Oct 10 16:32:16 crc kubenswrapper[4799]: I1010 16:32:16.559813 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2128a751508cba96a374652d8d80c66c81351fe0d7f800743a1612196fe8ac55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:16Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:16 crc kubenswrapper[4799]: I1010 16:32:16.575176 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4e024486dad9853cf7debbd2264eca725e50e74ebd215e1e55595d5f8b7c0403\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3396ed6bea22d063192c09283426aa98e84d5cab5852e305d61f3d583801187\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:16Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:16 crc kubenswrapper[4799]: I1010 16:32:16.583174 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:16 crc kubenswrapper[4799]: I1010 16:32:16.583214 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:16 crc kubenswrapper[4799]: I1010 16:32:16.583226 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:16 crc kubenswrapper[4799]: I1010 16:32:16.583246 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:16 crc kubenswrapper[4799]: I1010 16:32:16.583258 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:16Z","lastTransitionTime":"2025-10-10T16:32:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:16 crc kubenswrapper[4799]: I1010 16:32:16.594805 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-nptcz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"96840de9-4451-4499-81fa-a19c62239007\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d8b7b4526cfbe5d29a5b00c5d82089820b93e5aedbdaace85c4a252fed1b9f53\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d8b7b4526cfbe5d29a5b00c5d82089820b93e5aedbdaace85c4a252fed1b9f53\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:32:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0477eb514aef21fcec151973d9b6cf683ced19e9029787b97906438cb94b9f66\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0477eb514aef21fcec151973d9b6cf683ced19e9029787b97906438cb94b9f66\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:32:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b8df7ffc260acc047e334af09b76e6ee2c6dadd8c1fd1ed8860769601c89c6db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b8df7ffc260acc047e334af09b76e6ee2c6dadd8c1fd1ed8860769601c89c6db\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:32:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c89c6973a557239b60077f2b91a5f088955a973ebf8a9776677daa83f18c274\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2c89c6973a557239b60077f2b91a5f088955a973ebf8a9776677daa83f18c274\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:32:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:09Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-nptcz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:16Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:16 crc kubenswrapper[4799]: I1010 16:32:16.607466 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b9c46c5f-a6db-4cef-b179-b669484bbc75\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:47Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:47Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://df6b51b97a9e3dcf9102409dc19f67e69e6e28ebec82dd46083922d5606cc4c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ba57bc720123daa414f51bf5d3173c6fa0b519947a34816bebc532948fd74ab\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d98759de1f79d9aeb68eb0b3eb21d78d0116f054b5d846c85bd63774b565e73\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://10d76c5ba8c54896d2fde57e2806c48857363c495a9f2d9b3f6904334cf2f9be\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://10d76c5ba8c54896d2fde57e2806c48857363c495a9f2d9b3f6904334cf2f9be\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"file observer\\\\nW1010 16:32:08.895315 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1010 16:32:08.895450 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1010 16:32:08.898309 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-712278876/tls.crt::/tmp/serving-cert-712278876/tls.key\\\\\\\"\\\\nI1010 16:32:09.168043 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1010 16:32:09.171891 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1010 16:32:09.171914 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1010 16:32:09.171936 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1010 16:32:09.171942 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1010 16:32:09.176341 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1010 16:32:09.176406 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1010 16:32:09.176435 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1010 16:32:09.176460 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1010 16:32:09.176486 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1010 16:32:09.176510 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1010 16:32:09.176533 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1010 16:32:09.176376 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1010 16:32:09.178269 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-10T16:31:53Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://75fb276a1b4f555aa58d4a862a6f3841984f75958b7ada362d717eca726c41fc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:50Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://78cbeb4c6d2770cabbc752b11e5a62f64ec7820bc3a637a944fa252d779e242b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://78cbeb4c6d2770cabbc752b11e5a62f64ec7820bc3a637a944fa252d779e242b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:31:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:31:47Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:16Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:16 crc kubenswrapper[4799]: I1010 16:32:16.618981 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:16Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:16 crc kubenswrapper[4799]: I1010 16:32:16.632839 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:16Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:16 crc kubenswrapper[4799]: I1010 16:32:16.644938 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6cebefda-e31d-4be2-9bf4-8e1f8ec002cb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6536b37f839c0b3f6b55d82b3a1674eeccb07ec93e2cb0a3739705b82df4782c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hfkr4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ad00545d7a2fff370e19a55a89365b8c9914cb6286dbf1892d7ad0f399288a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hfkr4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:09Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-rh8zc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:16Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:16 crc kubenswrapper[4799]: I1010 16:32:16.660444 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-nptcz" event={"ID":"96840de9-4451-4499-81fa-a19c62239007","Type":"ContainerStarted","Data":"f647c1c576be194232c6bcaf882fc8f3c67c78a84edd77222d04f1602434d014"} Oct 10 16:32:16 crc kubenswrapper[4799]: I1010 16:32:16.669595 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mcwfc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"abe7f2d9-ec99-4724-a01f-cc7096377e07\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2ce36def99eaf908452410a523cd14eb31a5a4dc3ee38d5983ea95d5ee75f83\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d2ce36def99eaf908452410a523cd14eb31a5a4dc3ee38d5983ea95d5ee75f83\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:09Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-mcwfc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:16Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:16 crc kubenswrapper[4799]: I1010 16:32:16.685171 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:16 crc kubenswrapper[4799]: I1010 16:32:16.685205 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:16 crc kubenswrapper[4799]: I1010 16:32:16.685215 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:16 crc kubenswrapper[4799]: I1010 16:32:16.685232 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:16 crc kubenswrapper[4799]: I1010 16:32:16.685244 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:16Z","lastTransitionTime":"2025-10-10T16:32:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:16 crc kubenswrapper[4799]: I1010 16:32:16.694673 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5a953803d264b43ecd9f8b8c871b034d8146e73a4974bb8f503d0ca626370616\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:16Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:16 crc kubenswrapper[4799]: I1010 16:32:16.731132 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:16Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:16 crc kubenswrapper[4799]: I1010 16:32:16.772244 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gg5hb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f000ac73-b5de-47c8-a0a7-84bd06475f62\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b64f89fec4fec12dd0dab3f95ca2c8a01e43d4ef7cc69a4d012195756f6922ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w9g7t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:09Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gg5hb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:16Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:16 crc kubenswrapper[4799]: I1010 16:32:16.787821 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:16 crc kubenswrapper[4799]: I1010 16:32:16.787879 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:16 crc kubenswrapper[4799]: I1010 16:32:16.787897 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:16 crc kubenswrapper[4799]: I1010 16:32:16.787922 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:16 crc kubenswrapper[4799]: I1010 16:32:16.787939 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:16Z","lastTransitionTime":"2025-10-10T16:32:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:16 crc kubenswrapper[4799]: I1010 16:32:16.811346 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-6wjsp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"76fdb169-eee9-4170-b948-95e26254208b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5b030264f18288aa7687a91f7918f1ed2c2ad474637e32a054ea8c25b97aef45\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2ww66\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:14Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-6wjsp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:16Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:16 crc kubenswrapper[4799]: I1010 16:32:16.862594 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"60ae49f7-6d6a-4a62-909f-7aea2b3953f5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c0a72be5ffe48f726e63ca3854fcabf6ad7c26f2c3fe432328142da2dc2ceeb5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b42b1b86cbd6dacb03b9afc740a33f67674996a9c5a5b291b71708ae53ccfea8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://de5b84380f9fb8448cebe90775342fd17260ffb8c591bbd5156f8a216b80f1da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b1dad40a84c7f22ffb5d52c708c7e2e03a181c5778793050495c8333ae005731\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://79f6778c5b703b2b4fc4e59fffc00824fcab6c8f5e2789661665e635a3539195\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a48bce1f3530d2a78258c6fa2af4f1530890f7967a26c9e91ca2f20f56cdbe6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2a48bce1f3530d2a78258c6fa2af4f1530890f7967a26c9e91ca2f20f56cdbe6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:31:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://65fb2cd5fa9b5ff0cad85267e4a036c37593a749da171dc2e5e30ba5159ed96d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://65fb2cd5fa9b5ff0cad85267e4a036c37593a749da171dc2e5e30ba5159ed96d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:31:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://51ea61becc8c45e5bcb2a2374d503cef3fb940b1618e7501cd05d61fc2a9458f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://51ea61becc8c45e5bcb2a2374d503cef3fb940b1618e7501cd05d61fc2a9458f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:31:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:31:47Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:16Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:16 crc kubenswrapper[4799]: I1010 16:32:16.893575 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:16 crc kubenswrapper[4799]: I1010 16:32:16.893666 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:16 crc kubenswrapper[4799]: I1010 16:32:16.893692 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:16 crc kubenswrapper[4799]: I1010 16:32:16.893726 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:16 crc kubenswrapper[4799]: I1010 16:32:16.893749 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:16Z","lastTransitionTime":"2025-10-10T16:32:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:16 crc kubenswrapper[4799]: I1010 16:32:16.894545 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-bsdk2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"823e91d3-003d-4cbb-bc72-004e1708c19d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec832bfc0c81b98afb4117033b94d2951b042b248148a5f957f3507174b8dbb6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-chgmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:09Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-bsdk2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:16Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:16 crc kubenswrapper[4799]: I1010 16:32:16.934516 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-6wjsp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"76fdb169-eee9-4170-b948-95e26254208b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5b030264f18288aa7687a91f7918f1ed2c2ad474637e32a054ea8c25b97aef45\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2ww66\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:14Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-6wjsp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:16Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:16 crc kubenswrapper[4799]: I1010 16:32:16.972049 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5a953803d264b43ecd9f8b8c871b034d8146e73a4974bb8f503d0ca626370616\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:16Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:16 crc kubenswrapper[4799]: I1010 16:32:16.994781 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 10 16:32:16 crc kubenswrapper[4799]: E1010 16:32:16.994874 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-10 16:32:24.994857627 +0000 UTC m=+38.503181752 (durationBeforeRetry 8s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 10 16:32:16 crc kubenswrapper[4799]: I1010 16:32:16.996215 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:16 crc kubenswrapper[4799]: I1010 16:32:16.996261 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:16 crc kubenswrapper[4799]: I1010 16:32:16.996273 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:16 crc kubenswrapper[4799]: I1010 16:32:16.996293 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:16 crc kubenswrapper[4799]: I1010 16:32:16.996304 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:16Z","lastTransitionTime":"2025-10-10T16:32:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:17 crc kubenswrapper[4799]: I1010 16:32:17.021121 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:17Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:17 crc kubenswrapper[4799]: I1010 16:32:17.056455 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gg5hb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f000ac73-b5de-47c8-a0a7-84bd06475f62\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b64f89fec4fec12dd0dab3f95ca2c8a01e43d4ef7cc69a4d012195756f6922ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w9g7t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:09Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gg5hb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:17Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:17 crc kubenswrapper[4799]: I1010 16:32:17.096260 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 10 16:32:17 crc kubenswrapper[4799]: I1010 16:32:17.096345 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 10 16:32:17 crc kubenswrapper[4799]: I1010 16:32:17.096394 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 10 16:32:17 crc kubenswrapper[4799]: I1010 16:32:17.096434 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 10 16:32:17 crc kubenswrapper[4799]: E1010 16:32:17.096445 4799 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 10 16:32:17 crc kubenswrapper[4799]: E1010 16:32:17.096528 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-10 16:32:25.096508047 +0000 UTC m=+38.604832252 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 10 16:32:17 crc kubenswrapper[4799]: E1010 16:32:17.096557 4799 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Oct 10 16:32:17 crc kubenswrapper[4799]: E1010 16:32:17.096629 4799 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 10 16:32:17 crc kubenswrapper[4799]: E1010 16:32:17.096656 4799 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 10 16:32:17 crc kubenswrapper[4799]: E1010 16:32:17.096663 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-10 16:32:25.09663822 +0000 UTC m=+38.604962405 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Oct 10 16:32:17 crc kubenswrapper[4799]: E1010 16:32:17.096670 4799 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 10 16:32:17 crc kubenswrapper[4799]: E1010 16:32:17.096725 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-10-10 16:32:25.096707131 +0000 UTC m=+38.605031256 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 10 16:32:17 crc kubenswrapper[4799]: E1010 16:32:17.096835 4799 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 10 16:32:17 crc kubenswrapper[4799]: E1010 16:32:17.096884 4799 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 10 16:32:17 crc kubenswrapper[4799]: E1010 16:32:17.096901 4799 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 10 16:32:17 crc kubenswrapper[4799]: E1010 16:32:17.096944 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-10-10 16:32:25.096930927 +0000 UTC m=+38.605255112 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 10 16:32:17 crc kubenswrapper[4799]: I1010 16:32:17.097597 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"60ae49f7-6d6a-4a62-909f-7aea2b3953f5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c0a72be5ffe48f726e63ca3854fcabf6ad7c26f2c3fe432328142da2dc2ceeb5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b42b1b86cbd6dacb03b9afc740a33f67674996a9c5a5b291b71708ae53ccfea8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://de5b84380f9fb8448cebe90775342fd17260ffb8c591bbd5156f8a216b80f1da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b1dad40a84c7f22ffb5d52c708c7e2e03a181c5778793050495c8333ae005731\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://79f6778c5b703b2b4fc4e59fffc00824fcab6c8f5e2789661665e635a3539195\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a48bce1f3530d2a78258c6fa2af4f1530890f7967a26c9e91ca2f20f56cdbe6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2a48bce1f3530d2a78258c6fa2af4f1530890f7967a26c9e91ca2f20f56cdbe6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:31:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://65fb2cd5fa9b5ff0cad85267e4a036c37593a749da171dc2e5e30ba5159ed96d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://65fb2cd5fa9b5ff0cad85267e4a036c37593a749da171dc2e5e30ba5159ed96d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:31:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://51ea61becc8c45e5bcb2a2374d503cef3fb940b1618e7501cd05d61fc2a9458f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://51ea61becc8c45e5bcb2a2374d503cef3fb940b1618e7501cd05d61fc2a9458f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:31:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:31:47Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:17Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:17 crc kubenswrapper[4799]: I1010 16:32:17.098327 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:17 crc kubenswrapper[4799]: I1010 16:32:17.098371 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:17 crc kubenswrapper[4799]: I1010 16:32:17.098385 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:17 crc kubenswrapper[4799]: I1010 16:32:17.098405 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:17 crc kubenswrapper[4799]: I1010 16:32:17.098419 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:17Z","lastTransitionTime":"2025-10-10T16:32:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:17 crc kubenswrapper[4799]: I1010 16:32:17.130304 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-bsdk2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"823e91d3-003d-4cbb-bc72-004e1708c19d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec832bfc0c81b98afb4117033b94d2951b042b248148a5f957f3507174b8dbb6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-chgmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:09Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-bsdk2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:17Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:17 crc kubenswrapper[4799]: I1010 16:32:17.173299 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1bf1784a-776b-49c7-b64b-7ce52860df45\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://298a1a9571fbe118fe81ff3e7403e298bcde9b683cffab574fbb03d5adc1fb67\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f767e89684b9b515da850360aaf9d7a02173395faf0654e9f0b3a4752a3d608b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://92de340d859cff018a661f0a7f7fe209ffae161bf6f39deb005c7148591fc60b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3a649a65ab118025ea70d1d7cf71236cb96992671c3bc7659d591640b53f941\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:31:47Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:17Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:17 crc kubenswrapper[4799]: I1010 16:32:17.200477 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:17 crc kubenswrapper[4799]: I1010 16:32:17.200528 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:17 crc kubenswrapper[4799]: I1010 16:32:17.200541 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:17 crc kubenswrapper[4799]: I1010 16:32:17.200559 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:17 crc kubenswrapper[4799]: I1010 16:32:17.200574 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:17Z","lastTransitionTime":"2025-10-10T16:32:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:17 crc kubenswrapper[4799]: I1010 16:32:17.217913 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2128a751508cba96a374652d8d80c66c81351fe0d7f800743a1612196fe8ac55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:17Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:17 crc kubenswrapper[4799]: I1010 16:32:17.254506 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4e024486dad9853cf7debbd2264eca725e50e74ebd215e1e55595d5f8b7c0403\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3396ed6bea22d063192c09283426aa98e84d5cab5852e305d61f3d583801187\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:17Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:17 crc kubenswrapper[4799]: I1010 16:32:17.297675 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-nptcz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"96840de9-4451-4499-81fa-a19c62239007\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d8b7b4526cfbe5d29a5b00c5d82089820b93e5aedbdaace85c4a252fed1b9f53\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d8b7b4526cfbe5d29a5b00c5d82089820b93e5aedbdaace85c4a252fed1b9f53\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:32:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0477eb514aef21fcec151973d9b6cf683ced19e9029787b97906438cb94b9f66\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0477eb514aef21fcec151973d9b6cf683ced19e9029787b97906438cb94b9f66\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:32:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b8df7ffc260acc047e334af09b76e6ee2c6dadd8c1fd1ed8860769601c89c6db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b8df7ffc260acc047e334af09b76e6ee2c6dadd8c1fd1ed8860769601c89c6db\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:32:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c89c6973a557239b60077f2b91a5f088955a973ebf8a9776677daa83f18c274\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2c89c6973a557239b60077f2b91a5f088955a973ebf8a9776677daa83f18c274\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:32:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:09Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-nptcz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:17Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:17 crc kubenswrapper[4799]: I1010 16:32:17.303836 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:17 crc kubenswrapper[4799]: I1010 16:32:17.303905 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:17 crc kubenswrapper[4799]: I1010 16:32:17.303927 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:17 crc kubenswrapper[4799]: I1010 16:32:17.303955 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:17 crc kubenswrapper[4799]: I1010 16:32:17.303974 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:17Z","lastTransitionTime":"2025-10-10T16:32:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:17 crc kubenswrapper[4799]: I1010 16:32:17.341864 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mcwfc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"abe7f2d9-ec99-4724-a01f-cc7096377e07\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2ce36def99eaf908452410a523cd14eb31a5a4dc3ee38d5983ea95d5ee75f83\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d2ce36def99eaf908452410a523cd14eb31a5a4dc3ee38d5983ea95d5ee75f83\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:09Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-mcwfc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:17Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:17 crc kubenswrapper[4799]: I1010 16:32:17.376450 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b9c46c5f-a6db-4cef-b179-b669484bbc75\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:47Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:47Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://df6b51b97a9e3dcf9102409dc19f67e69e6e28ebec82dd46083922d5606cc4c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ba57bc720123daa414f51bf5d3173c6fa0b519947a34816bebc532948fd74ab\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d98759de1f79d9aeb68eb0b3eb21d78d0116f054b5d846c85bd63774b565e73\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://10d76c5ba8c54896d2fde57e2806c48857363c495a9f2d9b3f6904334cf2f9be\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://10d76c5ba8c54896d2fde57e2806c48857363c495a9f2d9b3f6904334cf2f9be\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"file observer\\\\nW1010 16:32:08.895315 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1010 16:32:08.895450 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1010 16:32:08.898309 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-712278876/tls.crt::/tmp/serving-cert-712278876/tls.key\\\\\\\"\\\\nI1010 16:32:09.168043 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1010 16:32:09.171891 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1010 16:32:09.171914 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1010 16:32:09.171936 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1010 16:32:09.171942 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1010 16:32:09.176341 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1010 16:32:09.176406 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1010 16:32:09.176435 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1010 16:32:09.176460 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1010 16:32:09.176486 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1010 16:32:09.176510 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1010 16:32:09.176533 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1010 16:32:09.176376 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1010 16:32:09.178269 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-10T16:31:53Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://75fb276a1b4f555aa58d4a862a6f3841984f75958b7ada362d717eca726c41fc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:50Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://78cbeb4c6d2770cabbc752b11e5a62f64ec7820bc3a637a944fa252d779e242b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://78cbeb4c6d2770cabbc752b11e5a62f64ec7820bc3a637a944fa252d779e242b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:31:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:31:47Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:17Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:17 crc kubenswrapper[4799]: I1010 16:32:17.402026 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 10 16:32:17 crc kubenswrapper[4799]: E1010 16:32:17.402283 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 10 16:32:17 crc kubenswrapper[4799]: I1010 16:32:17.405794 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:17 crc kubenswrapper[4799]: I1010 16:32:17.405832 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:17 crc kubenswrapper[4799]: I1010 16:32:17.405844 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:17 crc kubenswrapper[4799]: I1010 16:32:17.405861 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:17 crc kubenswrapper[4799]: I1010 16:32:17.405886 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:17Z","lastTransitionTime":"2025-10-10T16:32:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:17 crc kubenswrapper[4799]: I1010 16:32:17.416383 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:17Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:17 crc kubenswrapper[4799]: I1010 16:32:17.452522 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:17Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:17 crc kubenswrapper[4799]: I1010 16:32:17.495665 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6cebefda-e31d-4be2-9bf4-8e1f8ec002cb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6536b37f839c0b3f6b55d82b3a1674eeccb07ec93e2cb0a3739705b82df4782c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hfkr4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ad00545d7a2fff370e19a55a89365b8c9914cb6286dbf1892d7ad0f399288a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hfkr4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:09Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-rh8zc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:17Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:17 crc kubenswrapper[4799]: I1010 16:32:17.507936 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:17 crc kubenswrapper[4799]: I1010 16:32:17.507986 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:17 crc kubenswrapper[4799]: I1010 16:32:17.508007 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:17 crc kubenswrapper[4799]: I1010 16:32:17.508034 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:17 crc kubenswrapper[4799]: I1010 16:32:17.508052 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:17Z","lastTransitionTime":"2025-10-10T16:32:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:17 crc kubenswrapper[4799]: I1010 16:32:17.552180 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"60ae49f7-6d6a-4a62-909f-7aea2b3953f5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c0a72be5ffe48f726e63ca3854fcabf6ad7c26f2c3fe432328142da2dc2ceeb5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b42b1b86cbd6dacb03b9afc740a33f67674996a9c5a5b291b71708ae53ccfea8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://de5b84380f9fb8448cebe90775342fd17260ffb8c591bbd5156f8a216b80f1da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b1dad40a84c7f22ffb5d52c708c7e2e03a181c5778793050495c8333ae005731\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://79f6778c5b703b2b4fc4e59fffc00824fcab6c8f5e2789661665e635a3539195\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a48bce1f3530d2a78258c6fa2af4f1530890f7967a26c9e91ca2f20f56cdbe6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2a48bce1f3530d2a78258c6fa2af4f1530890f7967a26c9e91ca2f20f56cdbe6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:31:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://65fb2cd5fa9b5ff0cad85267e4a036c37593a749da171dc2e5e30ba5159ed96d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://65fb2cd5fa9b5ff0cad85267e4a036c37593a749da171dc2e5e30ba5159ed96d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:31:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://51ea61becc8c45e5bcb2a2374d503cef3fb940b1618e7501cd05d61fc2a9458f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://51ea61becc8c45e5bcb2a2374d503cef3fb940b1618e7501cd05d61fc2a9458f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:31:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:31:47Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:17Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:17 crc kubenswrapper[4799]: I1010 16:32:17.573808 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-bsdk2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"823e91d3-003d-4cbb-bc72-004e1708c19d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec832bfc0c81b98afb4117033b94d2951b042b248148a5f957f3507174b8dbb6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-chgmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:09Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-bsdk2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:17Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:17 crc kubenswrapper[4799]: I1010 16:32:17.615726 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:17 crc kubenswrapper[4799]: I1010 16:32:17.615841 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:17 crc kubenswrapper[4799]: I1010 16:32:17.615861 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:17 crc kubenswrapper[4799]: I1010 16:32:17.615885 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:17 crc kubenswrapper[4799]: I1010 16:32:17.615904 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:17Z","lastTransitionTime":"2025-10-10T16:32:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:17 crc kubenswrapper[4799]: I1010 16:32:17.618449 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1bf1784a-776b-49c7-b64b-7ce52860df45\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://298a1a9571fbe118fe81ff3e7403e298bcde9b683cffab574fbb03d5adc1fb67\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f767e89684b9b515da850360aaf9d7a02173395faf0654e9f0b3a4752a3d608b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://92de340d859cff018a661f0a7f7fe209ffae161bf6f39deb005c7148591fc60b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3a649a65ab118025ea70d1d7cf71236cb96992671c3bc7659d591640b53f941\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:31:47Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:17Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:17 crc kubenswrapper[4799]: I1010 16:32:17.657324 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2128a751508cba96a374652d8d80c66c81351fe0d7f800743a1612196fe8ac55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:17Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:17 crc kubenswrapper[4799]: I1010 16:32:17.673095 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mcwfc" event={"ID":"abe7f2d9-ec99-4724-a01f-cc7096377e07","Type":"ContainerStarted","Data":"ed12cf3d02660853d079aded94ac925b8ee8f016804c32b9ac53791a041883a1"} Oct 10 16:32:17 crc kubenswrapper[4799]: I1010 16:32:17.673406 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-mcwfc" Oct 10 16:32:17 crc kubenswrapper[4799]: I1010 16:32:17.673499 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-mcwfc" Oct 10 16:32:17 crc kubenswrapper[4799]: I1010 16:32:17.701932 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4e024486dad9853cf7debbd2264eca725e50e74ebd215e1e55595d5f8b7c0403\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3396ed6bea22d063192c09283426aa98e84d5cab5852e305d61f3d583801187\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:17Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:17 crc kubenswrapper[4799]: I1010 16:32:17.710359 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-mcwfc" Oct 10 16:32:17 crc kubenswrapper[4799]: I1010 16:32:17.718236 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:17 crc kubenswrapper[4799]: I1010 16:32:17.718270 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:17 crc kubenswrapper[4799]: I1010 16:32:17.718281 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:17 crc kubenswrapper[4799]: I1010 16:32:17.718297 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:17 crc kubenswrapper[4799]: I1010 16:32:17.718308 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:17Z","lastTransitionTime":"2025-10-10T16:32:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:17 crc kubenswrapper[4799]: I1010 16:32:17.746143 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-nptcz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"96840de9-4451-4499-81fa-a19c62239007\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d8b7b4526cfbe5d29a5b00c5d82089820b93e5aedbdaace85c4a252fed1b9f53\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d8b7b4526cfbe5d29a5b00c5d82089820b93e5aedbdaace85c4a252fed1b9f53\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:32:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0477eb514aef21fcec151973d9b6cf683ced19e9029787b97906438cb94b9f66\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0477eb514aef21fcec151973d9b6cf683ced19e9029787b97906438cb94b9f66\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:32:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b8df7ffc260acc047e334af09b76e6ee2c6dadd8c1fd1ed8860769601c89c6db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b8df7ffc260acc047e334af09b76e6ee2c6dadd8c1fd1ed8860769601c89c6db\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:32:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c89c6973a557239b60077f2b91a5f088955a973ebf8a9776677daa83f18c274\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2c89c6973a557239b60077f2b91a5f088955a973ebf8a9776677daa83f18c274\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:32:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:09Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-nptcz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:17Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:17 crc kubenswrapper[4799]: I1010 16:32:17.817952 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mcwfc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"abe7f2d9-ec99-4724-a01f-cc7096377e07\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2ce36def99eaf908452410a523cd14eb31a5a4dc3ee38d5983ea95d5ee75f83\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d2ce36def99eaf908452410a523cd14eb31a5a4dc3ee38d5983ea95d5ee75f83\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:09Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-mcwfc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:17Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:17 crc kubenswrapper[4799]: I1010 16:32:17.820348 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:17 crc kubenswrapper[4799]: I1010 16:32:17.820448 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:17 crc kubenswrapper[4799]: I1010 16:32:17.820558 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:17 crc kubenswrapper[4799]: I1010 16:32:17.820651 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:17 crc kubenswrapper[4799]: I1010 16:32:17.820766 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:17Z","lastTransitionTime":"2025-10-10T16:32:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:17 crc kubenswrapper[4799]: I1010 16:32:17.838561 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b9c46c5f-a6db-4cef-b179-b669484bbc75\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:47Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:47Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://df6b51b97a9e3dcf9102409dc19f67e69e6e28ebec82dd46083922d5606cc4c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ba57bc720123daa414f51bf5d3173c6fa0b519947a34816bebc532948fd74ab\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d98759de1f79d9aeb68eb0b3eb21d78d0116f054b5d846c85bd63774b565e73\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://10d76c5ba8c54896d2fde57e2806c48857363c495a9f2d9b3f6904334cf2f9be\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://10d76c5ba8c54896d2fde57e2806c48857363c495a9f2d9b3f6904334cf2f9be\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"file observer\\\\nW1010 16:32:08.895315 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1010 16:32:08.895450 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1010 16:32:08.898309 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-712278876/tls.crt::/tmp/serving-cert-712278876/tls.key\\\\\\\"\\\\nI1010 16:32:09.168043 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1010 16:32:09.171891 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1010 16:32:09.171914 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1010 16:32:09.171936 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1010 16:32:09.171942 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1010 16:32:09.176341 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1010 16:32:09.176406 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1010 16:32:09.176435 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1010 16:32:09.176460 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1010 16:32:09.176486 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1010 16:32:09.176510 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1010 16:32:09.176533 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1010 16:32:09.176376 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1010 16:32:09.178269 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-10T16:31:53Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://75fb276a1b4f555aa58d4a862a6f3841984f75958b7ada362d717eca726c41fc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:50Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://78cbeb4c6d2770cabbc752b11e5a62f64ec7820bc3a637a944fa252d779e242b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://78cbeb4c6d2770cabbc752b11e5a62f64ec7820bc3a637a944fa252d779e242b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:31:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:31:47Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:17Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:17 crc kubenswrapper[4799]: I1010 16:32:17.851702 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:17Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:17 crc kubenswrapper[4799]: I1010 16:32:17.899796 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:17Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:17 crc kubenswrapper[4799]: I1010 16:32:17.922943 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:17 crc kubenswrapper[4799]: I1010 16:32:17.923125 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:17 crc kubenswrapper[4799]: I1010 16:32:17.923184 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:17 crc kubenswrapper[4799]: I1010 16:32:17.923239 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:17 crc kubenswrapper[4799]: I1010 16:32:17.923323 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:17Z","lastTransitionTime":"2025-10-10T16:32:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:17 crc kubenswrapper[4799]: I1010 16:32:17.932885 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6cebefda-e31d-4be2-9bf4-8e1f8ec002cb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6536b37f839c0b3f6b55d82b3a1674eeccb07ec93e2cb0a3739705b82df4782c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hfkr4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ad00545d7a2fff370e19a55a89365b8c9914cb6286dbf1892d7ad0f399288a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hfkr4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:09Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-rh8zc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:17Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:17 crc kubenswrapper[4799]: I1010 16:32:17.968796 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-6wjsp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"76fdb169-eee9-4170-b948-95e26254208b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5b030264f18288aa7687a91f7918f1ed2c2ad474637e32a054ea8c25b97aef45\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2ww66\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:14Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-6wjsp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:17Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:18 crc kubenswrapper[4799]: I1010 16:32:18.018541 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5a953803d264b43ecd9f8b8c871b034d8146e73a4974bb8f503d0ca626370616\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:18Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:18 crc kubenswrapper[4799]: I1010 16:32:18.026412 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:18 crc kubenswrapper[4799]: I1010 16:32:18.026861 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:18 crc kubenswrapper[4799]: I1010 16:32:18.026949 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:18 crc kubenswrapper[4799]: I1010 16:32:18.027056 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:18 crc kubenswrapper[4799]: I1010 16:32:18.027142 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:18Z","lastTransitionTime":"2025-10-10T16:32:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:18 crc kubenswrapper[4799]: I1010 16:32:18.056636 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:18Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:18 crc kubenswrapper[4799]: I1010 16:32:18.095125 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gg5hb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f000ac73-b5de-47c8-a0a7-84bd06475f62\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b64f89fec4fec12dd0dab3f95ca2c8a01e43d4ef7cc69a4d012195756f6922ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w9g7t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:09Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gg5hb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:18Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:18 crc kubenswrapper[4799]: I1010 16:32:18.129819 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:18 crc kubenswrapper[4799]: I1010 16:32:18.129868 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:18 crc kubenswrapper[4799]: I1010 16:32:18.129883 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:18 crc kubenswrapper[4799]: I1010 16:32:18.129906 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:18 crc kubenswrapper[4799]: I1010 16:32:18.129921 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:18Z","lastTransitionTime":"2025-10-10T16:32:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:18 crc kubenswrapper[4799]: I1010 16:32:18.134829 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:18Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:18 crc kubenswrapper[4799]: I1010 16:32:18.177794 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:18Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:18 crc kubenswrapper[4799]: I1010 16:32:18.211112 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6cebefda-e31d-4be2-9bf4-8e1f8ec002cb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6536b37f839c0b3f6b55d82b3a1674eeccb07ec93e2cb0a3739705b82df4782c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hfkr4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ad00545d7a2fff370e19a55a89365b8c9914cb6286dbf1892d7ad0f399288a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hfkr4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:09Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-rh8zc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:18Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:18 crc kubenswrapper[4799]: I1010 16:32:18.231776 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:18 crc kubenswrapper[4799]: I1010 16:32:18.231815 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:18 crc kubenswrapper[4799]: I1010 16:32:18.231824 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:18 crc kubenswrapper[4799]: I1010 16:32:18.231837 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:18 crc kubenswrapper[4799]: I1010 16:32:18.231847 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:18Z","lastTransitionTime":"2025-10-10T16:32:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:18 crc kubenswrapper[4799]: I1010 16:32:18.257731 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mcwfc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"abe7f2d9-ec99-4724-a01f-cc7096377e07\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8cbc87c392646ebf9c016f8c7b40bcec30e33a0a05ea4a896d1143c5f1086990\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd261112ca7db4d0d76f6ab29a0347d64dccfff4db42ac9f55d6d7df1443ab23\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c7d0e536ad5143941dd18418b1ac7972a1136a841542b950f6891a386d43ca9c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cfe05183ad0b03415525e6aa2a8d52a5d63b8c273113c46326396df5e0c2bb12\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6562d440ce1f1477fd09c15c34ab88e17e1fb2c2cae4b32a7bf8cbdd29f4d5a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff4fcf53aeed6c07f775152de0faa9fa0671848df06d37cbca6ec7097d0024d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed12cf3d02660853d079aded94ac925b8ee8f016804c32b9ac53791a041883a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://08406e220de50ba85f882a05117b5df8c9445a38c026bb85c95fc9f98f2d2cfe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2ce36def99eaf908452410a523cd14eb31a5a4dc3ee38d5983ea95d5ee75f83\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d2ce36def99eaf908452410a523cd14eb31a5a4dc3ee38d5983ea95d5ee75f83\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:09Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-mcwfc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:18Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:18 crc kubenswrapper[4799]: I1010 16:32:18.295715 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b9c46c5f-a6db-4cef-b179-b669484bbc75\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:47Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:47Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://df6b51b97a9e3dcf9102409dc19f67e69e6e28ebec82dd46083922d5606cc4c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ba57bc720123daa414f51bf5d3173c6fa0b519947a34816bebc532948fd74ab\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d98759de1f79d9aeb68eb0b3eb21d78d0116f054b5d846c85bd63774b565e73\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://10d76c5ba8c54896d2fde57e2806c48857363c495a9f2d9b3f6904334cf2f9be\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://10d76c5ba8c54896d2fde57e2806c48857363c495a9f2d9b3f6904334cf2f9be\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"file observer\\\\nW1010 16:32:08.895315 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1010 16:32:08.895450 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1010 16:32:08.898309 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-712278876/tls.crt::/tmp/serving-cert-712278876/tls.key\\\\\\\"\\\\nI1010 16:32:09.168043 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1010 16:32:09.171891 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1010 16:32:09.171914 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1010 16:32:09.171936 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1010 16:32:09.171942 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1010 16:32:09.176341 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1010 16:32:09.176406 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1010 16:32:09.176435 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1010 16:32:09.176460 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1010 16:32:09.176486 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1010 16:32:09.176510 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1010 16:32:09.176533 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1010 16:32:09.176376 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1010 16:32:09.178269 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-10T16:31:53Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://75fb276a1b4f555aa58d4a862a6f3841984f75958b7ada362d717eca726c41fc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:50Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://78cbeb4c6d2770cabbc752b11e5a62f64ec7820bc3a637a944fa252d779e242b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://78cbeb4c6d2770cabbc752b11e5a62f64ec7820bc3a637a944fa252d779e242b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:31:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:31:47Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:18Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:18 crc kubenswrapper[4799]: I1010 16:32:18.331480 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5a953803d264b43ecd9f8b8c871b034d8146e73a4974bb8f503d0ca626370616\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:18Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:18 crc kubenswrapper[4799]: I1010 16:32:18.335185 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:18 crc kubenswrapper[4799]: I1010 16:32:18.335208 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:18 crc kubenswrapper[4799]: I1010 16:32:18.335215 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:18 crc kubenswrapper[4799]: I1010 16:32:18.335230 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:18 crc kubenswrapper[4799]: I1010 16:32:18.335240 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:18Z","lastTransitionTime":"2025-10-10T16:32:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:18 crc kubenswrapper[4799]: I1010 16:32:18.375326 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:18Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:18 crc kubenswrapper[4799]: I1010 16:32:18.402314 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 10 16:32:18 crc kubenswrapper[4799]: I1010 16:32:18.402388 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 10 16:32:18 crc kubenswrapper[4799]: E1010 16:32:18.402461 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 10 16:32:18 crc kubenswrapper[4799]: E1010 16:32:18.402529 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 10 16:32:18 crc kubenswrapper[4799]: I1010 16:32:18.412529 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gg5hb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f000ac73-b5de-47c8-a0a7-84bd06475f62\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b64f89fec4fec12dd0dab3f95ca2c8a01e43d4ef7cc69a4d012195756f6922ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w9g7t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:09Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gg5hb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:18Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:18 crc kubenswrapper[4799]: I1010 16:32:18.438205 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:18 crc kubenswrapper[4799]: I1010 16:32:18.438259 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:18 crc kubenswrapper[4799]: I1010 16:32:18.438272 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:18 crc kubenswrapper[4799]: I1010 16:32:18.438292 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:18 crc kubenswrapper[4799]: I1010 16:32:18.438308 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:18Z","lastTransitionTime":"2025-10-10T16:32:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:18 crc kubenswrapper[4799]: I1010 16:32:18.449873 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-6wjsp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"76fdb169-eee9-4170-b948-95e26254208b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5b030264f18288aa7687a91f7918f1ed2c2ad474637e32a054ea8c25b97aef45\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2ww66\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:14Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-6wjsp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:18Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:18 crc kubenswrapper[4799]: I1010 16:32:18.489854 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-bsdk2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"823e91d3-003d-4cbb-bc72-004e1708c19d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec832bfc0c81b98afb4117033b94d2951b042b248148a5f957f3507174b8dbb6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-chgmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:09Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-bsdk2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:18Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:18 crc kubenswrapper[4799]: I1010 16:32:18.536517 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"60ae49f7-6d6a-4a62-909f-7aea2b3953f5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c0a72be5ffe48f726e63ca3854fcabf6ad7c26f2c3fe432328142da2dc2ceeb5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b42b1b86cbd6dacb03b9afc740a33f67674996a9c5a5b291b71708ae53ccfea8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://de5b84380f9fb8448cebe90775342fd17260ffb8c591bbd5156f8a216b80f1da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b1dad40a84c7f22ffb5d52c708c7e2e03a181c5778793050495c8333ae005731\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://79f6778c5b703b2b4fc4e59fffc00824fcab6c8f5e2789661665e635a3539195\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a48bce1f3530d2a78258c6fa2af4f1530890f7967a26c9e91ca2f20f56cdbe6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2a48bce1f3530d2a78258c6fa2af4f1530890f7967a26c9e91ca2f20f56cdbe6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:31:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://65fb2cd5fa9b5ff0cad85267e4a036c37593a749da171dc2e5e30ba5159ed96d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://65fb2cd5fa9b5ff0cad85267e4a036c37593a749da171dc2e5e30ba5159ed96d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:31:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://51ea61becc8c45e5bcb2a2374d503cef3fb940b1618e7501cd05d61fc2a9458f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://51ea61becc8c45e5bcb2a2374d503cef3fb940b1618e7501cd05d61fc2a9458f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:31:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:31:47Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:18Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:18 crc kubenswrapper[4799]: I1010 16:32:18.541159 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:18 crc kubenswrapper[4799]: I1010 16:32:18.541207 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:18 crc kubenswrapper[4799]: I1010 16:32:18.541223 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:18 crc kubenswrapper[4799]: I1010 16:32:18.541244 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:18 crc kubenswrapper[4799]: I1010 16:32:18.541257 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:18Z","lastTransitionTime":"2025-10-10T16:32:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:18 crc kubenswrapper[4799]: I1010 16:32:18.572810 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2128a751508cba96a374652d8d80c66c81351fe0d7f800743a1612196fe8ac55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:18Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:18 crc kubenswrapper[4799]: I1010 16:32:18.613950 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4e024486dad9853cf7debbd2264eca725e50e74ebd215e1e55595d5f8b7c0403\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3396ed6bea22d063192c09283426aa98e84d5cab5852e305d61f3d583801187\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:18Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:18 crc kubenswrapper[4799]: I1010 16:32:18.643332 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:18 crc kubenswrapper[4799]: I1010 16:32:18.643378 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:18 crc kubenswrapper[4799]: I1010 16:32:18.643392 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:18 crc kubenswrapper[4799]: I1010 16:32:18.643413 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:18 crc kubenswrapper[4799]: I1010 16:32:18.643424 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:18Z","lastTransitionTime":"2025-10-10T16:32:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:18 crc kubenswrapper[4799]: I1010 16:32:18.657210 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-nptcz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"96840de9-4451-4499-81fa-a19c62239007\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d8b7b4526cfbe5d29a5b00c5d82089820b93e5aedbdaace85c4a252fed1b9f53\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d8b7b4526cfbe5d29a5b00c5d82089820b93e5aedbdaace85c4a252fed1b9f53\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:32:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0477eb514aef21fcec151973d9b6cf683ced19e9029787b97906438cb94b9f66\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0477eb514aef21fcec151973d9b6cf683ced19e9029787b97906438cb94b9f66\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:32:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b8df7ffc260acc047e334af09b76e6ee2c6dadd8c1fd1ed8860769601c89c6db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b8df7ffc260acc047e334af09b76e6ee2c6dadd8c1fd1ed8860769601c89c6db\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:32:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c89c6973a557239b60077f2b91a5f088955a973ebf8a9776677daa83f18c274\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2c89c6973a557239b60077f2b91a5f088955a973ebf8a9776677daa83f18c274\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:32:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f647c1c576be194232c6bcaf882fc8f3c67c78a84edd77222d04f1602434d014\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:09Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-nptcz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:18Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:18 crc kubenswrapper[4799]: I1010 16:32:18.680236 4799 generic.go:334] "Generic (PLEG): container finished" podID="96840de9-4451-4499-81fa-a19c62239007" containerID="f647c1c576be194232c6bcaf882fc8f3c67c78a84edd77222d04f1602434d014" exitCode=0 Oct 10 16:32:18 crc kubenswrapper[4799]: I1010 16:32:18.681079 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-nptcz" event={"ID":"96840de9-4451-4499-81fa-a19c62239007","Type":"ContainerDied","Data":"f647c1c576be194232c6bcaf882fc8f3c67c78a84edd77222d04f1602434d014"} Oct 10 16:32:18 crc kubenswrapper[4799]: I1010 16:32:18.681569 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-mcwfc" Oct 10 16:32:18 crc kubenswrapper[4799]: I1010 16:32:18.692562 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1bf1784a-776b-49c7-b64b-7ce52860df45\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://298a1a9571fbe118fe81ff3e7403e298bcde9b683cffab574fbb03d5adc1fb67\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f767e89684b9b515da850360aaf9d7a02173395faf0654e9f0b3a4752a3d608b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://92de340d859cff018a661f0a7f7fe209ffae161bf6f39deb005c7148591fc60b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3a649a65ab118025ea70d1d7cf71236cb96992671c3bc7659d591640b53f941\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:31:47Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:18Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:18 crc kubenswrapper[4799]: I1010 16:32:18.736210 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"60ae49f7-6d6a-4a62-909f-7aea2b3953f5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c0a72be5ffe48f726e63ca3854fcabf6ad7c26f2c3fe432328142da2dc2ceeb5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b42b1b86cbd6dacb03b9afc740a33f67674996a9c5a5b291b71708ae53ccfea8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://de5b84380f9fb8448cebe90775342fd17260ffb8c591bbd5156f8a216b80f1da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b1dad40a84c7f22ffb5d52c708c7e2e03a181c5778793050495c8333ae005731\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://79f6778c5b703b2b4fc4e59fffc00824fcab6c8f5e2789661665e635a3539195\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a48bce1f3530d2a78258c6fa2af4f1530890f7967a26c9e91ca2f20f56cdbe6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2a48bce1f3530d2a78258c6fa2af4f1530890f7967a26c9e91ca2f20f56cdbe6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:31:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://65fb2cd5fa9b5ff0cad85267e4a036c37593a749da171dc2e5e30ba5159ed96d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://65fb2cd5fa9b5ff0cad85267e4a036c37593a749da171dc2e5e30ba5159ed96d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:31:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://51ea61becc8c45e5bcb2a2374d503cef3fb940b1618e7501cd05d61fc2a9458f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://51ea61becc8c45e5bcb2a2374d503cef3fb940b1618e7501cd05d61fc2a9458f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:31:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:31:47Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:18Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:18 crc kubenswrapper[4799]: I1010 16:32:18.747695 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:18 crc kubenswrapper[4799]: I1010 16:32:18.747720 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:18 crc kubenswrapper[4799]: I1010 16:32:18.747730 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:18 crc kubenswrapper[4799]: I1010 16:32:18.747745 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:18 crc kubenswrapper[4799]: I1010 16:32:18.747782 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:18Z","lastTransitionTime":"2025-10-10T16:32:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:18 crc kubenswrapper[4799]: I1010 16:32:18.748288 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-mcwfc" Oct 10 16:32:18 crc kubenswrapper[4799]: I1010 16:32:18.770588 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-bsdk2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"823e91d3-003d-4cbb-bc72-004e1708c19d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec832bfc0c81b98afb4117033b94d2951b042b248148a5f957f3507174b8dbb6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-chgmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:09Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-bsdk2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:18Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:18 crc kubenswrapper[4799]: I1010 16:32:18.811436 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1bf1784a-776b-49c7-b64b-7ce52860df45\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://298a1a9571fbe118fe81ff3e7403e298bcde9b683cffab574fbb03d5adc1fb67\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f767e89684b9b515da850360aaf9d7a02173395faf0654e9f0b3a4752a3d608b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://92de340d859cff018a661f0a7f7fe209ffae161bf6f39deb005c7148591fc60b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3a649a65ab118025ea70d1d7cf71236cb96992671c3bc7659d591640b53f941\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:31:47Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:18Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:18 crc kubenswrapper[4799]: I1010 16:32:18.850840 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:18 crc kubenswrapper[4799]: I1010 16:32:18.851157 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:18 crc kubenswrapper[4799]: I1010 16:32:18.851168 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:18 crc kubenswrapper[4799]: I1010 16:32:18.851186 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:18 crc kubenswrapper[4799]: I1010 16:32:18.851199 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:18Z","lastTransitionTime":"2025-10-10T16:32:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:18 crc kubenswrapper[4799]: I1010 16:32:18.851838 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2128a751508cba96a374652d8d80c66c81351fe0d7f800743a1612196fe8ac55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:18Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:18 crc kubenswrapper[4799]: I1010 16:32:18.892437 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4e024486dad9853cf7debbd2264eca725e50e74ebd215e1e55595d5f8b7c0403\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3396ed6bea22d063192c09283426aa98e84d5cab5852e305d61f3d583801187\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:18Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:18 crc kubenswrapper[4799]: I1010 16:32:18.937597 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-nptcz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"96840de9-4451-4499-81fa-a19c62239007\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d8b7b4526cfbe5d29a5b00c5d82089820b93e5aedbdaace85c4a252fed1b9f53\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d8b7b4526cfbe5d29a5b00c5d82089820b93e5aedbdaace85c4a252fed1b9f53\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:32:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0477eb514aef21fcec151973d9b6cf683ced19e9029787b97906438cb94b9f66\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0477eb514aef21fcec151973d9b6cf683ced19e9029787b97906438cb94b9f66\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:32:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b8df7ffc260acc047e334af09b76e6ee2c6dadd8c1fd1ed8860769601c89c6db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b8df7ffc260acc047e334af09b76e6ee2c6dadd8c1fd1ed8860769601c89c6db\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:32:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c89c6973a557239b60077f2b91a5f088955a973ebf8a9776677daa83f18c274\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2c89c6973a557239b60077f2b91a5f088955a973ebf8a9776677daa83f18c274\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:32:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f647c1c576be194232c6bcaf882fc8f3c67c78a84edd77222d04f1602434d014\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f647c1c576be194232c6bcaf882fc8f3c67c78a84edd77222d04f1602434d014\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:32:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:09Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-nptcz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:18Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:18 crc kubenswrapper[4799]: I1010 16:32:18.953786 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:18 crc kubenswrapper[4799]: I1010 16:32:18.953840 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:18 crc kubenswrapper[4799]: I1010 16:32:18.953857 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:18 crc kubenswrapper[4799]: I1010 16:32:18.953879 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:18 crc kubenswrapper[4799]: I1010 16:32:18.953895 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:18Z","lastTransitionTime":"2025-10-10T16:32:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:18 crc kubenswrapper[4799]: I1010 16:32:18.973862 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b9c46c5f-a6db-4cef-b179-b669484bbc75\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:47Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:47Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://df6b51b97a9e3dcf9102409dc19f67e69e6e28ebec82dd46083922d5606cc4c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ba57bc720123daa414f51bf5d3173c6fa0b519947a34816bebc532948fd74ab\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d98759de1f79d9aeb68eb0b3eb21d78d0116f054b5d846c85bd63774b565e73\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://10d76c5ba8c54896d2fde57e2806c48857363c495a9f2d9b3f6904334cf2f9be\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://10d76c5ba8c54896d2fde57e2806c48857363c495a9f2d9b3f6904334cf2f9be\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"file observer\\\\nW1010 16:32:08.895315 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1010 16:32:08.895450 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1010 16:32:08.898309 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-712278876/tls.crt::/tmp/serving-cert-712278876/tls.key\\\\\\\"\\\\nI1010 16:32:09.168043 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1010 16:32:09.171891 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1010 16:32:09.171914 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1010 16:32:09.171936 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1010 16:32:09.171942 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1010 16:32:09.176341 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1010 16:32:09.176406 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1010 16:32:09.176435 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1010 16:32:09.176460 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1010 16:32:09.176486 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1010 16:32:09.176510 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1010 16:32:09.176533 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1010 16:32:09.176376 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1010 16:32:09.178269 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-10T16:31:53Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://75fb276a1b4f555aa58d4a862a6f3841984f75958b7ada362d717eca726c41fc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:50Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://78cbeb4c6d2770cabbc752b11e5a62f64ec7820bc3a637a944fa252d779e242b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://78cbeb4c6d2770cabbc752b11e5a62f64ec7820bc3a637a944fa252d779e242b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:31:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:31:47Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:18Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:19 crc kubenswrapper[4799]: I1010 16:32:19.017140 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:19Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:19 crc kubenswrapper[4799]: I1010 16:32:19.057388 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:19Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:19 crc kubenswrapper[4799]: I1010 16:32:19.057558 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:19 crc kubenswrapper[4799]: I1010 16:32:19.057597 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:19 crc kubenswrapper[4799]: I1010 16:32:19.057608 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:19 crc kubenswrapper[4799]: I1010 16:32:19.057625 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:19 crc kubenswrapper[4799]: I1010 16:32:19.057636 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:19Z","lastTransitionTime":"2025-10-10T16:32:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:19 crc kubenswrapper[4799]: I1010 16:32:19.093008 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6cebefda-e31d-4be2-9bf4-8e1f8ec002cb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6536b37f839c0b3f6b55d82b3a1674eeccb07ec93e2cb0a3739705b82df4782c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hfkr4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ad00545d7a2fff370e19a55a89365b8c9914cb6286dbf1892d7ad0f399288a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hfkr4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:09Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-rh8zc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:19Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:19 crc kubenswrapper[4799]: I1010 16:32:19.145087 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mcwfc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"abe7f2d9-ec99-4724-a01f-cc7096377e07\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8cbc87c392646ebf9c016f8c7b40bcec30e33a0a05ea4a896d1143c5f1086990\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd261112ca7db4d0d76f6ab29a0347d64dccfff4db42ac9f55d6d7df1443ab23\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c7d0e536ad5143941dd18418b1ac7972a1136a841542b950f6891a386d43ca9c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cfe05183ad0b03415525e6aa2a8d52a5d63b8c273113c46326396df5e0c2bb12\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6562d440ce1f1477fd09c15c34ab88e17e1fb2c2cae4b32a7bf8cbdd29f4d5a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff4fcf53aeed6c07f775152de0faa9fa0671848df06d37cbca6ec7097d0024d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed12cf3d02660853d079aded94ac925b8ee8f016804c32b9ac53791a041883a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://08406e220de50ba85f882a05117b5df8c9445a38c026bb85c95fc9f98f2d2cfe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2ce36def99eaf908452410a523cd14eb31a5a4dc3ee38d5983ea95d5ee75f83\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d2ce36def99eaf908452410a523cd14eb31a5a4dc3ee38d5983ea95d5ee75f83\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:09Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-mcwfc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:19Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:19 crc kubenswrapper[4799]: I1010 16:32:19.160153 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:19 crc kubenswrapper[4799]: I1010 16:32:19.160223 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:19 crc kubenswrapper[4799]: I1010 16:32:19.160240 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:19 crc kubenswrapper[4799]: I1010 16:32:19.160267 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:19 crc kubenswrapper[4799]: I1010 16:32:19.160285 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:19Z","lastTransitionTime":"2025-10-10T16:32:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:19 crc kubenswrapper[4799]: I1010 16:32:19.177532 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5a953803d264b43ecd9f8b8c871b034d8146e73a4974bb8f503d0ca626370616\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:19Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:19 crc kubenswrapper[4799]: I1010 16:32:19.217816 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:19Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:19 crc kubenswrapper[4799]: I1010 16:32:19.256159 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gg5hb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f000ac73-b5de-47c8-a0a7-84bd06475f62\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b64f89fec4fec12dd0dab3f95ca2c8a01e43d4ef7cc69a4d012195756f6922ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w9g7t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:09Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gg5hb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:19Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:19 crc kubenswrapper[4799]: I1010 16:32:19.263470 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:19 crc kubenswrapper[4799]: I1010 16:32:19.263619 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:19 crc kubenswrapper[4799]: I1010 16:32:19.263700 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:19 crc kubenswrapper[4799]: I1010 16:32:19.263801 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:19 crc kubenswrapper[4799]: I1010 16:32:19.263879 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:19Z","lastTransitionTime":"2025-10-10T16:32:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:19 crc kubenswrapper[4799]: I1010 16:32:19.295028 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-6wjsp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"76fdb169-eee9-4170-b948-95e26254208b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5b030264f18288aa7687a91f7918f1ed2c2ad474637e32a054ea8c25b97aef45\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2ww66\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:14Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-6wjsp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:19Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:19 crc kubenswrapper[4799]: I1010 16:32:19.332977 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5a953803d264b43ecd9f8b8c871b034d8146e73a4974bb8f503d0ca626370616\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:19Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:19 crc kubenswrapper[4799]: I1010 16:32:19.366918 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:19 crc kubenswrapper[4799]: I1010 16:32:19.366963 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:19 crc kubenswrapper[4799]: I1010 16:32:19.366975 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:19 crc kubenswrapper[4799]: I1010 16:32:19.366993 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:19 crc kubenswrapper[4799]: I1010 16:32:19.367005 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:19Z","lastTransitionTime":"2025-10-10T16:32:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:19 crc kubenswrapper[4799]: I1010 16:32:19.376222 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:19Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:19 crc kubenswrapper[4799]: I1010 16:32:19.401550 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 10 16:32:19 crc kubenswrapper[4799]: E1010 16:32:19.401723 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 10 16:32:19 crc kubenswrapper[4799]: I1010 16:32:19.420587 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gg5hb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f000ac73-b5de-47c8-a0a7-84bd06475f62\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b64f89fec4fec12dd0dab3f95ca2c8a01e43d4ef7cc69a4d012195756f6922ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w9g7t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:09Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gg5hb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:19Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:19 crc kubenswrapper[4799]: I1010 16:32:19.450651 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-6wjsp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"76fdb169-eee9-4170-b948-95e26254208b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5b030264f18288aa7687a91f7918f1ed2c2ad474637e32a054ea8c25b97aef45\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2ww66\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:14Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-6wjsp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:19Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:19 crc kubenswrapper[4799]: I1010 16:32:19.469533 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:19 crc kubenswrapper[4799]: I1010 16:32:19.469568 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:19 crc kubenswrapper[4799]: I1010 16:32:19.469577 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:19 crc kubenswrapper[4799]: I1010 16:32:19.469592 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:19 crc kubenswrapper[4799]: I1010 16:32:19.469600 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:19Z","lastTransitionTime":"2025-10-10T16:32:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:19 crc kubenswrapper[4799]: I1010 16:32:19.506093 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"60ae49f7-6d6a-4a62-909f-7aea2b3953f5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c0a72be5ffe48f726e63ca3854fcabf6ad7c26f2c3fe432328142da2dc2ceeb5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b42b1b86cbd6dacb03b9afc740a33f67674996a9c5a5b291b71708ae53ccfea8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://de5b84380f9fb8448cebe90775342fd17260ffb8c591bbd5156f8a216b80f1da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b1dad40a84c7f22ffb5d52c708c7e2e03a181c5778793050495c8333ae005731\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://79f6778c5b703b2b4fc4e59fffc00824fcab6c8f5e2789661665e635a3539195\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a48bce1f3530d2a78258c6fa2af4f1530890f7967a26c9e91ca2f20f56cdbe6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2a48bce1f3530d2a78258c6fa2af4f1530890f7967a26c9e91ca2f20f56cdbe6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:31:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://65fb2cd5fa9b5ff0cad85267e4a036c37593a749da171dc2e5e30ba5159ed96d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://65fb2cd5fa9b5ff0cad85267e4a036c37593a749da171dc2e5e30ba5159ed96d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:31:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://51ea61becc8c45e5bcb2a2374d503cef3fb940b1618e7501cd05d61fc2a9458f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://51ea61becc8c45e5bcb2a2374d503cef3fb940b1618e7501cd05d61fc2a9458f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:31:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:31:47Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:19Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:19 crc kubenswrapper[4799]: I1010 16:32:19.533505 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-bsdk2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"823e91d3-003d-4cbb-bc72-004e1708c19d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec832bfc0c81b98afb4117033b94d2951b042b248148a5f957f3507174b8dbb6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-chgmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:09Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-bsdk2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:19Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:19 crc kubenswrapper[4799]: I1010 16:32:19.572614 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:19 crc kubenswrapper[4799]: I1010 16:32:19.572668 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:19 crc kubenswrapper[4799]: I1010 16:32:19.572688 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:19 crc kubenswrapper[4799]: I1010 16:32:19.572715 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:19 crc kubenswrapper[4799]: I1010 16:32:19.572732 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:19Z","lastTransitionTime":"2025-10-10T16:32:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:19 crc kubenswrapper[4799]: I1010 16:32:19.576695 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1bf1784a-776b-49c7-b64b-7ce52860df45\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://298a1a9571fbe118fe81ff3e7403e298bcde9b683cffab574fbb03d5adc1fb67\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f767e89684b9b515da850360aaf9d7a02173395faf0654e9f0b3a4752a3d608b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://92de340d859cff018a661f0a7f7fe209ffae161bf6f39deb005c7148591fc60b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3a649a65ab118025ea70d1d7cf71236cb96992671c3bc7659d591640b53f941\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:31:47Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:19Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:19 crc kubenswrapper[4799]: I1010 16:32:19.616973 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2128a751508cba96a374652d8d80c66c81351fe0d7f800743a1612196fe8ac55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:19Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:19 crc kubenswrapper[4799]: I1010 16:32:19.655521 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4e024486dad9853cf7debbd2264eca725e50e74ebd215e1e55595d5f8b7c0403\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3396ed6bea22d063192c09283426aa98e84d5cab5852e305d61f3d583801187\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:19Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:19 crc kubenswrapper[4799]: I1010 16:32:19.674876 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:19 crc kubenswrapper[4799]: I1010 16:32:19.674915 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:19 crc kubenswrapper[4799]: I1010 16:32:19.674926 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:19 crc kubenswrapper[4799]: I1010 16:32:19.674943 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:19 crc kubenswrapper[4799]: I1010 16:32:19.674956 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:19Z","lastTransitionTime":"2025-10-10T16:32:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:19 crc kubenswrapper[4799]: I1010 16:32:19.686063 4799 generic.go:334] "Generic (PLEG): container finished" podID="96840de9-4451-4499-81fa-a19c62239007" containerID="af9cccbb5d66115ca2db31b1e6738e1aa5f9c948eb65d3db9b5d5f8d9c223a64" exitCode=0 Oct 10 16:32:19 crc kubenswrapper[4799]: I1010 16:32:19.686961 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-nptcz" event={"ID":"96840de9-4451-4499-81fa-a19c62239007","Type":"ContainerDied","Data":"af9cccbb5d66115ca2db31b1e6738e1aa5f9c948eb65d3db9b5d5f8d9c223a64"} Oct 10 16:32:19 crc kubenswrapper[4799]: I1010 16:32:19.696820 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-nptcz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"96840de9-4451-4499-81fa-a19c62239007\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d8b7b4526cfbe5d29a5b00c5d82089820b93e5aedbdaace85c4a252fed1b9f53\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d8b7b4526cfbe5d29a5b00c5d82089820b93e5aedbdaace85c4a252fed1b9f53\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:32:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0477eb514aef21fcec151973d9b6cf683ced19e9029787b97906438cb94b9f66\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0477eb514aef21fcec151973d9b6cf683ced19e9029787b97906438cb94b9f66\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:32:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b8df7ffc260acc047e334af09b76e6ee2c6dadd8c1fd1ed8860769601c89c6db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b8df7ffc260acc047e334af09b76e6ee2c6dadd8c1fd1ed8860769601c89c6db\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:32:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c89c6973a557239b60077f2b91a5f088955a973ebf8a9776677daa83f18c274\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2c89c6973a557239b60077f2b91a5f088955a973ebf8a9776677daa83f18c274\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:32:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f647c1c576be194232c6bcaf882fc8f3c67c78a84edd77222d04f1602434d014\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f647c1c576be194232c6bcaf882fc8f3c67c78a84edd77222d04f1602434d014\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:32:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:09Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-nptcz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:19Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:19 crc kubenswrapper[4799]: I1010 16:32:19.734466 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b9c46c5f-a6db-4cef-b179-b669484bbc75\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:47Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:47Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://df6b51b97a9e3dcf9102409dc19f67e69e6e28ebec82dd46083922d5606cc4c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ba57bc720123daa414f51bf5d3173c6fa0b519947a34816bebc532948fd74ab\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d98759de1f79d9aeb68eb0b3eb21d78d0116f054b5d846c85bd63774b565e73\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://10d76c5ba8c54896d2fde57e2806c48857363c495a9f2d9b3f6904334cf2f9be\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://10d76c5ba8c54896d2fde57e2806c48857363c495a9f2d9b3f6904334cf2f9be\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"file observer\\\\nW1010 16:32:08.895315 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1010 16:32:08.895450 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1010 16:32:08.898309 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-712278876/tls.crt::/tmp/serving-cert-712278876/tls.key\\\\\\\"\\\\nI1010 16:32:09.168043 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1010 16:32:09.171891 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1010 16:32:09.171914 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1010 16:32:09.171936 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1010 16:32:09.171942 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1010 16:32:09.176341 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1010 16:32:09.176406 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1010 16:32:09.176435 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1010 16:32:09.176460 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1010 16:32:09.176486 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1010 16:32:09.176510 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1010 16:32:09.176533 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1010 16:32:09.176376 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1010 16:32:09.178269 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-10T16:31:53Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://75fb276a1b4f555aa58d4a862a6f3841984f75958b7ada362d717eca726c41fc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:50Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://78cbeb4c6d2770cabbc752b11e5a62f64ec7820bc3a637a944fa252d779e242b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://78cbeb4c6d2770cabbc752b11e5a62f64ec7820bc3a637a944fa252d779e242b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:31:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:31:47Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:19Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:19 crc kubenswrapper[4799]: I1010 16:32:19.775231 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:19Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:19 crc kubenswrapper[4799]: I1010 16:32:19.777360 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:19 crc kubenswrapper[4799]: I1010 16:32:19.777412 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:19 crc kubenswrapper[4799]: I1010 16:32:19.777423 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:19 crc kubenswrapper[4799]: I1010 16:32:19.777437 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:19 crc kubenswrapper[4799]: I1010 16:32:19.777446 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:19Z","lastTransitionTime":"2025-10-10T16:32:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:19 crc kubenswrapper[4799]: I1010 16:32:19.811043 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:19Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:19 crc kubenswrapper[4799]: I1010 16:32:19.852492 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6cebefda-e31d-4be2-9bf4-8e1f8ec002cb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6536b37f839c0b3f6b55d82b3a1674eeccb07ec93e2cb0a3739705b82df4782c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hfkr4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ad00545d7a2fff370e19a55a89365b8c9914cb6286dbf1892d7ad0f399288a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hfkr4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:09Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-rh8zc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:19Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:19 crc kubenswrapper[4799]: I1010 16:32:19.882793 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:19 crc kubenswrapper[4799]: I1010 16:32:19.882825 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:19 crc kubenswrapper[4799]: I1010 16:32:19.882833 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:19 crc kubenswrapper[4799]: I1010 16:32:19.882848 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:19 crc kubenswrapper[4799]: I1010 16:32:19.882857 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:19Z","lastTransitionTime":"2025-10-10T16:32:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:19 crc kubenswrapper[4799]: I1010 16:32:19.903489 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mcwfc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"abe7f2d9-ec99-4724-a01f-cc7096377e07\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8cbc87c392646ebf9c016f8c7b40bcec30e33a0a05ea4a896d1143c5f1086990\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd261112ca7db4d0d76f6ab29a0347d64dccfff4db42ac9f55d6d7df1443ab23\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c7d0e536ad5143941dd18418b1ac7972a1136a841542b950f6891a386d43ca9c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cfe05183ad0b03415525e6aa2a8d52a5d63b8c273113c46326396df5e0c2bb12\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6562d440ce1f1477fd09c15c34ab88e17e1fb2c2cae4b32a7bf8cbdd29f4d5a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff4fcf53aeed6c07f775152de0faa9fa0671848df06d37cbca6ec7097d0024d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed12cf3d02660853d079aded94ac925b8ee8f016804c32b9ac53791a041883a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://08406e220de50ba85f882a05117b5df8c9445a38c026bb85c95fc9f98f2d2cfe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2ce36def99eaf908452410a523cd14eb31a5a4dc3ee38d5983ea95d5ee75f83\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d2ce36def99eaf908452410a523cd14eb31a5a4dc3ee38d5983ea95d5ee75f83\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:09Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-mcwfc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:19Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:19 crc kubenswrapper[4799]: I1010 16:32:19.940586 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"60ae49f7-6d6a-4a62-909f-7aea2b3953f5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c0a72be5ffe48f726e63ca3854fcabf6ad7c26f2c3fe432328142da2dc2ceeb5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b42b1b86cbd6dacb03b9afc740a33f67674996a9c5a5b291b71708ae53ccfea8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://de5b84380f9fb8448cebe90775342fd17260ffb8c591bbd5156f8a216b80f1da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b1dad40a84c7f22ffb5d52c708c7e2e03a181c5778793050495c8333ae005731\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://79f6778c5b703b2b4fc4e59fffc00824fcab6c8f5e2789661665e635a3539195\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a48bce1f3530d2a78258c6fa2af4f1530890f7967a26c9e91ca2f20f56cdbe6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2a48bce1f3530d2a78258c6fa2af4f1530890f7967a26c9e91ca2f20f56cdbe6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:31:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://65fb2cd5fa9b5ff0cad85267e4a036c37593a749da171dc2e5e30ba5159ed96d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://65fb2cd5fa9b5ff0cad85267e4a036c37593a749da171dc2e5e30ba5159ed96d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:31:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://51ea61becc8c45e5bcb2a2374d503cef3fb940b1618e7501cd05d61fc2a9458f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://51ea61becc8c45e5bcb2a2374d503cef3fb940b1618e7501cd05d61fc2a9458f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:31:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:31:47Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:19Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:19 crc kubenswrapper[4799]: I1010 16:32:19.970864 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-bsdk2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"823e91d3-003d-4cbb-bc72-004e1708c19d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec832bfc0c81b98afb4117033b94d2951b042b248148a5f957f3507174b8dbb6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-chgmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:09Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-bsdk2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:19Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:19 crc kubenswrapper[4799]: I1010 16:32:19.985142 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:19 crc kubenswrapper[4799]: I1010 16:32:19.985169 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:19 crc kubenswrapper[4799]: I1010 16:32:19.985181 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:19 crc kubenswrapper[4799]: I1010 16:32:19.985195 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:19 crc kubenswrapper[4799]: I1010 16:32:19.985204 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:19Z","lastTransitionTime":"2025-10-10T16:32:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:20 crc kubenswrapper[4799]: I1010 16:32:20.014699 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1bf1784a-776b-49c7-b64b-7ce52860df45\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://298a1a9571fbe118fe81ff3e7403e298bcde9b683cffab574fbb03d5adc1fb67\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f767e89684b9b515da850360aaf9d7a02173395faf0654e9f0b3a4752a3d608b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://92de340d859cff018a661f0a7f7fe209ffae161bf6f39deb005c7148591fc60b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3a649a65ab118025ea70d1d7cf71236cb96992671c3bc7659d591640b53f941\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:31:47Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:20Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:20 crc kubenswrapper[4799]: I1010 16:32:20.055433 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2128a751508cba96a374652d8d80c66c81351fe0d7f800743a1612196fe8ac55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:20Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:20 crc kubenswrapper[4799]: I1010 16:32:20.088199 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:20 crc kubenswrapper[4799]: I1010 16:32:20.088307 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:20 crc kubenswrapper[4799]: I1010 16:32:20.088335 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:20 crc kubenswrapper[4799]: I1010 16:32:20.088368 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:20 crc kubenswrapper[4799]: I1010 16:32:20.088395 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:20Z","lastTransitionTime":"2025-10-10T16:32:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:20 crc kubenswrapper[4799]: I1010 16:32:20.098624 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4e024486dad9853cf7debbd2264eca725e50e74ebd215e1e55595d5f8b7c0403\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3396ed6bea22d063192c09283426aa98e84d5cab5852e305d61f3d583801187\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:20Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:20 crc kubenswrapper[4799]: I1010 16:32:20.145857 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-nptcz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"96840de9-4451-4499-81fa-a19c62239007\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d8b7b4526cfbe5d29a5b00c5d82089820b93e5aedbdaace85c4a252fed1b9f53\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d8b7b4526cfbe5d29a5b00c5d82089820b93e5aedbdaace85c4a252fed1b9f53\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:32:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0477eb514aef21fcec151973d9b6cf683ced19e9029787b97906438cb94b9f66\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0477eb514aef21fcec151973d9b6cf683ced19e9029787b97906438cb94b9f66\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:32:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b8df7ffc260acc047e334af09b76e6ee2c6dadd8c1fd1ed8860769601c89c6db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b8df7ffc260acc047e334af09b76e6ee2c6dadd8c1fd1ed8860769601c89c6db\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:32:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c89c6973a557239b60077f2b91a5f088955a973ebf8a9776677daa83f18c274\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2c89c6973a557239b60077f2b91a5f088955a973ebf8a9776677daa83f18c274\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:32:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f647c1c576be194232c6bcaf882fc8f3c67c78a84edd77222d04f1602434d014\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f647c1c576be194232c6bcaf882fc8f3c67c78a84edd77222d04f1602434d014\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:32:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://af9cccbb5d66115ca2db31b1e6738e1aa5f9c948eb65d3db9b5d5f8d9c223a64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://af9cccbb5d66115ca2db31b1e6738e1aa5f9c948eb65d3db9b5d5f8d9c223a64\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:32:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:09Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-nptcz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:20Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:20 crc kubenswrapper[4799]: I1010 16:32:20.177615 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b9c46c5f-a6db-4cef-b179-b669484bbc75\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:47Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:47Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://df6b51b97a9e3dcf9102409dc19f67e69e6e28ebec82dd46083922d5606cc4c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ba57bc720123daa414f51bf5d3173c6fa0b519947a34816bebc532948fd74ab\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d98759de1f79d9aeb68eb0b3eb21d78d0116f054b5d846c85bd63774b565e73\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://10d76c5ba8c54896d2fde57e2806c48857363c495a9f2d9b3f6904334cf2f9be\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://10d76c5ba8c54896d2fde57e2806c48857363c495a9f2d9b3f6904334cf2f9be\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"file observer\\\\nW1010 16:32:08.895315 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1010 16:32:08.895450 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1010 16:32:08.898309 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-712278876/tls.crt::/tmp/serving-cert-712278876/tls.key\\\\\\\"\\\\nI1010 16:32:09.168043 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1010 16:32:09.171891 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1010 16:32:09.171914 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1010 16:32:09.171936 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1010 16:32:09.171942 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1010 16:32:09.176341 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1010 16:32:09.176406 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1010 16:32:09.176435 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1010 16:32:09.176460 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1010 16:32:09.176486 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1010 16:32:09.176510 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1010 16:32:09.176533 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1010 16:32:09.176376 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1010 16:32:09.178269 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-10T16:31:53Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://75fb276a1b4f555aa58d4a862a6f3841984f75958b7ada362d717eca726c41fc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:50Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://78cbeb4c6d2770cabbc752b11e5a62f64ec7820bc3a637a944fa252d779e242b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://78cbeb4c6d2770cabbc752b11e5a62f64ec7820bc3a637a944fa252d779e242b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:31:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:31:47Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:20Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:20 crc kubenswrapper[4799]: I1010 16:32:20.192524 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:20 crc kubenswrapper[4799]: I1010 16:32:20.192563 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:20 crc kubenswrapper[4799]: I1010 16:32:20.192575 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:20 crc kubenswrapper[4799]: I1010 16:32:20.192591 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:20 crc kubenswrapper[4799]: I1010 16:32:20.192600 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:20Z","lastTransitionTime":"2025-10-10T16:32:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:20 crc kubenswrapper[4799]: I1010 16:32:20.215278 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:20Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:20 crc kubenswrapper[4799]: I1010 16:32:20.257578 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:20Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:20 crc kubenswrapper[4799]: I1010 16:32:20.291432 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6cebefda-e31d-4be2-9bf4-8e1f8ec002cb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6536b37f839c0b3f6b55d82b3a1674eeccb07ec93e2cb0a3739705b82df4782c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hfkr4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ad00545d7a2fff370e19a55a89365b8c9914cb6286dbf1892d7ad0f399288a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hfkr4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:09Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-rh8zc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:20Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:20 crc kubenswrapper[4799]: I1010 16:32:20.294978 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:20 crc kubenswrapper[4799]: I1010 16:32:20.295027 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:20 crc kubenswrapper[4799]: I1010 16:32:20.295047 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:20 crc kubenswrapper[4799]: I1010 16:32:20.295072 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:20 crc kubenswrapper[4799]: I1010 16:32:20.295090 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:20Z","lastTransitionTime":"2025-10-10T16:32:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:20 crc kubenswrapper[4799]: I1010 16:32:20.342968 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mcwfc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"abe7f2d9-ec99-4724-a01f-cc7096377e07\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8cbc87c392646ebf9c016f8c7b40bcec30e33a0a05ea4a896d1143c5f1086990\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd261112ca7db4d0d76f6ab29a0347d64dccfff4db42ac9f55d6d7df1443ab23\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c7d0e536ad5143941dd18418b1ac7972a1136a841542b950f6891a386d43ca9c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cfe05183ad0b03415525e6aa2a8d52a5d63b8c273113c46326396df5e0c2bb12\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6562d440ce1f1477fd09c15c34ab88e17e1fb2c2cae4b32a7bf8cbdd29f4d5a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff4fcf53aeed6c07f775152de0faa9fa0671848df06d37cbca6ec7097d0024d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed12cf3d02660853d079aded94ac925b8ee8f016804c32b9ac53791a041883a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://08406e220de50ba85f882a05117b5df8c9445a38c026bb85c95fc9f98f2d2cfe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2ce36def99eaf908452410a523cd14eb31a5a4dc3ee38d5983ea95d5ee75f83\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d2ce36def99eaf908452410a523cd14eb31a5a4dc3ee38d5983ea95d5ee75f83\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:09Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-mcwfc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:20Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:20 crc kubenswrapper[4799]: I1010 16:32:20.380398 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5a953803d264b43ecd9f8b8c871b034d8146e73a4974bb8f503d0ca626370616\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:20Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:20 crc kubenswrapper[4799]: I1010 16:32:20.398617 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:20 crc kubenswrapper[4799]: I1010 16:32:20.398678 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:20 crc kubenswrapper[4799]: I1010 16:32:20.398701 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:20 crc kubenswrapper[4799]: I1010 16:32:20.398732 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:20 crc kubenswrapper[4799]: I1010 16:32:20.398809 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:20Z","lastTransitionTime":"2025-10-10T16:32:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:20 crc kubenswrapper[4799]: I1010 16:32:20.402283 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 10 16:32:20 crc kubenswrapper[4799]: I1010 16:32:20.402343 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 10 16:32:20 crc kubenswrapper[4799]: E1010 16:32:20.402470 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 10 16:32:20 crc kubenswrapper[4799]: E1010 16:32:20.402664 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 10 16:32:20 crc kubenswrapper[4799]: I1010 16:32:20.417220 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:20Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:20 crc kubenswrapper[4799]: I1010 16:32:20.438570 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:20 crc kubenswrapper[4799]: I1010 16:32:20.438627 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:20 crc kubenswrapper[4799]: I1010 16:32:20.438644 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:20 crc kubenswrapper[4799]: I1010 16:32:20.438670 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:20 crc kubenswrapper[4799]: I1010 16:32:20.438688 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:20Z","lastTransitionTime":"2025-10-10T16:32:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:20 crc kubenswrapper[4799]: E1010 16:32:20.458440 4799 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-10T16:32:20Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:20Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-10T16:32:20Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:20Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-10T16:32:20Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:20Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-10T16:32:20Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:20Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"d99534f1-66d4-4990-b867-b559b1013899\\\",\\\"systemUUID\\\":\\\"19c7da3e-bb2d-454e-9c2c-9c9464638bfe\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:20Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:20 crc kubenswrapper[4799]: I1010 16:32:20.463043 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gg5hb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f000ac73-b5de-47c8-a0a7-84bd06475f62\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b64f89fec4fec12dd0dab3f95ca2c8a01e43d4ef7cc69a4d012195756f6922ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w9g7t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:09Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gg5hb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:20Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:20 crc kubenswrapper[4799]: I1010 16:32:20.463723 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:20 crc kubenswrapper[4799]: I1010 16:32:20.463829 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:20 crc kubenswrapper[4799]: I1010 16:32:20.463846 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:20 crc kubenswrapper[4799]: I1010 16:32:20.463872 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:20 crc kubenswrapper[4799]: I1010 16:32:20.463892 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:20Z","lastTransitionTime":"2025-10-10T16:32:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:20 crc kubenswrapper[4799]: E1010 16:32:20.485714 4799 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-10T16:32:20Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:20Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-10T16:32:20Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:20Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-10T16:32:20Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:20Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-10T16:32:20Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:20Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"d99534f1-66d4-4990-b867-b559b1013899\\\",\\\"systemUUID\\\":\\\"19c7da3e-bb2d-454e-9c2c-9c9464638bfe\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:20Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:20 crc kubenswrapper[4799]: I1010 16:32:20.490548 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:20 crc kubenswrapper[4799]: I1010 16:32:20.490613 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:20 crc kubenswrapper[4799]: I1010 16:32:20.490632 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:20 crc kubenswrapper[4799]: I1010 16:32:20.490656 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:20 crc kubenswrapper[4799]: I1010 16:32:20.490673 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:20Z","lastTransitionTime":"2025-10-10T16:32:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:20 crc kubenswrapper[4799]: I1010 16:32:20.495586 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-6wjsp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"76fdb169-eee9-4170-b948-95e26254208b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5b030264f18288aa7687a91f7918f1ed2c2ad474637e32a054ea8c25b97aef45\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2ww66\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:14Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-6wjsp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:20Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:20 crc kubenswrapper[4799]: E1010 16:32:20.510982 4799 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-10T16:32:20Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:20Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-10T16:32:20Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:20Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-10T16:32:20Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:20Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-10T16:32:20Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:20Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"d99534f1-66d4-4990-b867-b559b1013899\\\",\\\"systemUUID\\\":\\\"19c7da3e-bb2d-454e-9c2c-9c9464638bfe\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:20Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:20 crc kubenswrapper[4799]: I1010 16:32:20.515558 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:20 crc kubenswrapper[4799]: I1010 16:32:20.515593 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:20 crc kubenswrapper[4799]: I1010 16:32:20.515605 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:20 crc kubenswrapper[4799]: I1010 16:32:20.515624 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:20 crc kubenswrapper[4799]: I1010 16:32:20.515636 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:20Z","lastTransitionTime":"2025-10-10T16:32:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:20 crc kubenswrapper[4799]: E1010 16:32:20.534657 4799 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-10T16:32:20Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:20Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-10T16:32:20Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:20Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-10T16:32:20Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:20Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-10T16:32:20Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:20Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"d99534f1-66d4-4990-b867-b559b1013899\\\",\\\"systemUUID\\\":\\\"19c7da3e-bb2d-454e-9c2c-9c9464638bfe\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:20Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:20 crc kubenswrapper[4799]: I1010 16:32:20.542957 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:20 crc kubenswrapper[4799]: I1010 16:32:20.543064 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:20 crc kubenswrapper[4799]: I1010 16:32:20.543091 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:20 crc kubenswrapper[4799]: I1010 16:32:20.543119 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:20 crc kubenswrapper[4799]: I1010 16:32:20.543146 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:20Z","lastTransitionTime":"2025-10-10T16:32:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:20 crc kubenswrapper[4799]: E1010 16:32:20.564536 4799 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-10T16:32:20Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:20Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-10T16:32:20Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:20Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-10T16:32:20Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:20Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-10T16:32:20Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:20Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"d99534f1-66d4-4990-b867-b559b1013899\\\",\\\"systemUUID\\\":\\\"19c7da3e-bb2d-454e-9c2c-9c9464638bfe\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:20Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:20 crc kubenswrapper[4799]: E1010 16:32:20.564796 4799 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Oct 10 16:32:20 crc kubenswrapper[4799]: I1010 16:32:20.567423 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:20 crc kubenswrapper[4799]: I1010 16:32:20.567491 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:20 crc kubenswrapper[4799]: I1010 16:32:20.567541 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:20 crc kubenswrapper[4799]: I1010 16:32:20.567568 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:20 crc kubenswrapper[4799]: I1010 16:32:20.567585 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:20Z","lastTransitionTime":"2025-10-10T16:32:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:20 crc kubenswrapper[4799]: I1010 16:32:20.671098 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:20 crc kubenswrapper[4799]: I1010 16:32:20.671154 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:20 crc kubenswrapper[4799]: I1010 16:32:20.671170 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:20 crc kubenswrapper[4799]: I1010 16:32:20.671195 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:20 crc kubenswrapper[4799]: I1010 16:32:20.671213 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:20Z","lastTransitionTime":"2025-10-10T16:32:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:20 crc kubenswrapper[4799]: I1010 16:32:20.692723 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-mcwfc_abe7f2d9-ec99-4724-a01f-cc7096377e07/ovnkube-controller/0.log" Oct 10 16:32:20 crc kubenswrapper[4799]: I1010 16:32:20.696394 4799 generic.go:334] "Generic (PLEG): container finished" podID="abe7f2d9-ec99-4724-a01f-cc7096377e07" containerID="ed12cf3d02660853d079aded94ac925b8ee8f016804c32b9ac53791a041883a1" exitCode=1 Oct 10 16:32:20 crc kubenswrapper[4799]: I1010 16:32:20.696463 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mcwfc" event={"ID":"abe7f2d9-ec99-4724-a01f-cc7096377e07","Type":"ContainerDied","Data":"ed12cf3d02660853d079aded94ac925b8ee8f016804c32b9ac53791a041883a1"} Oct 10 16:32:20 crc kubenswrapper[4799]: I1010 16:32:20.699278 4799 scope.go:117] "RemoveContainer" containerID="ed12cf3d02660853d079aded94ac925b8ee8f016804c32b9ac53791a041883a1" Oct 10 16:32:20 crc kubenswrapper[4799]: I1010 16:32:20.703809 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-nptcz" event={"ID":"96840de9-4451-4499-81fa-a19c62239007","Type":"ContainerStarted","Data":"995be5ba088a3758758ce5aaf735f0371692c52e49e3992c6478311411c8db42"} Oct 10 16:32:20 crc kubenswrapper[4799]: I1010 16:32:20.738521 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"60ae49f7-6d6a-4a62-909f-7aea2b3953f5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c0a72be5ffe48f726e63ca3854fcabf6ad7c26f2c3fe432328142da2dc2ceeb5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b42b1b86cbd6dacb03b9afc740a33f67674996a9c5a5b291b71708ae53ccfea8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://de5b84380f9fb8448cebe90775342fd17260ffb8c591bbd5156f8a216b80f1da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b1dad40a84c7f22ffb5d52c708c7e2e03a181c5778793050495c8333ae005731\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://79f6778c5b703b2b4fc4e59fffc00824fcab6c8f5e2789661665e635a3539195\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a48bce1f3530d2a78258c6fa2af4f1530890f7967a26c9e91ca2f20f56cdbe6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2a48bce1f3530d2a78258c6fa2af4f1530890f7967a26c9e91ca2f20f56cdbe6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:31:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://65fb2cd5fa9b5ff0cad85267e4a036c37593a749da171dc2e5e30ba5159ed96d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://65fb2cd5fa9b5ff0cad85267e4a036c37593a749da171dc2e5e30ba5159ed96d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:31:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://51ea61becc8c45e5bcb2a2374d503cef3fb940b1618e7501cd05d61fc2a9458f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://51ea61becc8c45e5bcb2a2374d503cef3fb940b1618e7501cd05d61fc2a9458f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:31:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:31:47Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:20Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:20 crc kubenswrapper[4799]: I1010 16:32:20.754028 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-bsdk2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"823e91d3-003d-4cbb-bc72-004e1708c19d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec832bfc0c81b98afb4117033b94d2951b042b248148a5f957f3507174b8dbb6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-chgmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:09Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-bsdk2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:20Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:20 crc kubenswrapper[4799]: I1010 16:32:20.772649 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1bf1784a-776b-49c7-b64b-7ce52860df45\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://298a1a9571fbe118fe81ff3e7403e298bcde9b683cffab574fbb03d5adc1fb67\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f767e89684b9b515da850360aaf9d7a02173395faf0654e9f0b3a4752a3d608b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://92de340d859cff018a661f0a7f7fe209ffae161bf6f39deb005c7148591fc60b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3a649a65ab118025ea70d1d7cf71236cb96992671c3bc7659d591640b53f941\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:31:47Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:20Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:20 crc kubenswrapper[4799]: I1010 16:32:20.776031 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:20 crc kubenswrapper[4799]: I1010 16:32:20.776096 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:20 crc kubenswrapper[4799]: I1010 16:32:20.776114 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:20 crc kubenswrapper[4799]: I1010 16:32:20.776153 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:20 crc kubenswrapper[4799]: I1010 16:32:20.776171 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:20Z","lastTransitionTime":"2025-10-10T16:32:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:20 crc kubenswrapper[4799]: I1010 16:32:20.787441 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2128a751508cba96a374652d8d80c66c81351fe0d7f800743a1612196fe8ac55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:20Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:20 crc kubenswrapper[4799]: I1010 16:32:20.807654 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4e024486dad9853cf7debbd2264eca725e50e74ebd215e1e55595d5f8b7c0403\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3396ed6bea22d063192c09283426aa98e84d5cab5852e305d61f3d583801187\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:20Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:20 crc kubenswrapper[4799]: I1010 16:32:20.828162 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-nptcz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"96840de9-4451-4499-81fa-a19c62239007\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d8b7b4526cfbe5d29a5b00c5d82089820b93e5aedbdaace85c4a252fed1b9f53\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d8b7b4526cfbe5d29a5b00c5d82089820b93e5aedbdaace85c4a252fed1b9f53\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:32:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0477eb514aef21fcec151973d9b6cf683ced19e9029787b97906438cb94b9f66\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0477eb514aef21fcec151973d9b6cf683ced19e9029787b97906438cb94b9f66\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:32:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b8df7ffc260acc047e334af09b76e6ee2c6dadd8c1fd1ed8860769601c89c6db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b8df7ffc260acc047e334af09b76e6ee2c6dadd8c1fd1ed8860769601c89c6db\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:32:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c89c6973a557239b60077f2b91a5f088955a973ebf8a9776677daa83f18c274\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2c89c6973a557239b60077f2b91a5f088955a973ebf8a9776677daa83f18c274\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:32:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f647c1c576be194232c6bcaf882fc8f3c67c78a84edd77222d04f1602434d014\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f647c1c576be194232c6bcaf882fc8f3c67c78a84edd77222d04f1602434d014\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:32:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://af9cccbb5d66115ca2db31b1e6738e1aa5f9c948eb65d3db9b5d5f8d9c223a64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://af9cccbb5d66115ca2db31b1e6738e1aa5f9c948eb65d3db9b5d5f8d9c223a64\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:32:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:09Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-nptcz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:20Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:20 crc kubenswrapper[4799]: I1010 16:32:20.854076 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mcwfc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"abe7f2d9-ec99-4724-a01f-cc7096377e07\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8cbc87c392646ebf9c016f8c7b40bcec30e33a0a05ea4a896d1143c5f1086990\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd261112ca7db4d0d76f6ab29a0347d64dccfff4db42ac9f55d6d7df1443ab23\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c7d0e536ad5143941dd18418b1ac7972a1136a841542b950f6891a386d43ca9c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cfe05183ad0b03415525e6aa2a8d52a5d63b8c273113c46326396df5e0c2bb12\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6562d440ce1f1477fd09c15c34ab88e17e1fb2c2cae4b32a7bf8cbdd29f4d5a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff4fcf53aeed6c07f775152de0faa9fa0671848df06d37cbca6ec7097d0024d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed12cf3d02660853d079aded94ac925b8ee8f016804c32b9ac53791a041883a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ed12cf3d02660853d079aded94ac925b8ee8f016804c32b9ac53791a041883a1\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-10T16:32:20Z\\\",\\\"message\\\":\\\"ithub.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI1010 16:32:20.244796 6040 reflector.go:311] Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1010 16:32:20.244943 6040 reflector.go:311] Stopping reflector *v1alpha1.BaselineAdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1010 16:32:20.245339 6040 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1010 16:32:20.245518 6040 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1010 16:32:20.245703 6040 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1010 16:32:20.245820 6040 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1010 16:32:20.246257 6040 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1010 16:32:20.246279 6040 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1010 16:32:20.246306 6040 factory.go:656] Stopping watch factory\\\\nI1010 16:32:20.246330 6040 handler.go:208] Removed *v1.Node event handler 7\\\\nI1010 16:32:20.246345 6040 handler.go:208] Removed *v1.Node ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://08406e220de50ba85f882a05117b5df8c9445a38c026bb85c95fc9f98f2d2cfe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2ce36def99eaf908452410a523cd14eb31a5a4dc3ee38d5983ea95d5ee75f83\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d2ce36def99eaf908452410a523cd14eb31a5a4dc3ee38d5983ea95d5ee75f83\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:09Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-mcwfc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:20Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:20 crc kubenswrapper[4799]: I1010 16:32:20.876297 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b9c46c5f-a6db-4cef-b179-b669484bbc75\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:47Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:47Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://df6b51b97a9e3dcf9102409dc19f67e69e6e28ebec82dd46083922d5606cc4c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ba57bc720123daa414f51bf5d3173c6fa0b519947a34816bebc532948fd74ab\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d98759de1f79d9aeb68eb0b3eb21d78d0116f054b5d846c85bd63774b565e73\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://10d76c5ba8c54896d2fde57e2806c48857363c495a9f2d9b3f6904334cf2f9be\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://10d76c5ba8c54896d2fde57e2806c48857363c495a9f2d9b3f6904334cf2f9be\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"file observer\\\\nW1010 16:32:08.895315 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1010 16:32:08.895450 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1010 16:32:08.898309 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-712278876/tls.crt::/tmp/serving-cert-712278876/tls.key\\\\\\\"\\\\nI1010 16:32:09.168043 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1010 16:32:09.171891 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1010 16:32:09.171914 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1010 16:32:09.171936 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1010 16:32:09.171942 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1010 16:32:09.176341 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1010 16:32:09.176406 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1010 16:32:09.176435 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1010 16:32:09.176460 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1010 16:32:09.176486 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1010 16:32:09.176510 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1010 16:32:09.176533 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1010 16:32:09.176376 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1010 16:32:09.178269 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-10T16:31:53Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://75fb276a1b4f555aa58d4a862a6f3841984f75958b7ada362d717eca726c41fc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:50Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://78cbeb4c6d2770cabbc752b11e5a62f64ec7820bc3a637a944fa252d779e242b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://78cbeb4c6d2770cabbc752b11e5a62f64ec7820bc3a637a944fa252d779e242b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:31:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:31:47Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:20Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:20 crc kubenswrapper[4799]: I1010 16:32:20.883256 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:20 crc kubenswrapper[4799]: I1010 16:32:20.883287 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:20 crc kubenswrapper[4799]: I1010 16:32:20.883297 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:20 crc kubenswrapper[4799]: I1010 16:32:20.883313 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:20 crc kubenswrapper[4799]: I1010 16:32:20.883323 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:20Z","lastTransitionTime":"2025-10-10T16:32:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:20 crc kubenswrapper[4799]: I1010 16:32:20.893705 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:20Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:20 crc kubenswrapper[4799]: I1010 16:32:20.912075 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:20Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:20 crc kubenswrapper[4799]: I1010 16:32:20.931452 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6cebefda-e31d-4be2-9bf4-8e1f8ec002cb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6536b37f839c0b3f6b55d82b3a1674eeccb07ec93e2cb0a3739705b82df4782c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hfkr4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ad00545d7a2fff370e19a55a89365b8c9914cb6286dbf1892d7ad0f399288a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hfkr4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:09Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-rh8zc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:20Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:20 crc kubenswrapper[4799]: I1010 16:32:20.972813 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-6wjsp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"76fdb169-eee9-4170-b948-95e26254208b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5b030264f18288aa7687a91f7918f1ed2c2ad474637e32a054ea8c25b97aef45\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2ww66\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:14Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-6wjsp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:20Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:20 crc kubenswrapper[4799]: I1010 16:32:20.985249 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:20 crc kubenswrapper[4799]: I1010 16:32:20.985283 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:20 crc kubenswrapper[4799]: I1010 16:32:20.985294 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:20 crc kubenswrapper[4799]: I1010 16:32:20.985310 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:20 crc kubenswrapper[4799]: I1010 16:32:20.985321 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:20Z","lastTransitionTime":"2025-10-10T16:32:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:21 crc kubenswrapper[4799]: I1010 16:32:21.012110 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5a953803d264b43ecd9f8b8c871b034d8146e73a4974bb8f503d0ca626370616\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:21Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:21 crc kubenswrapper[4799]: I1010 16:32:21.055091 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:21Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:21 crc kubenswrapper[4799]: I1010 16:32:21.089499 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:21 crc kubenswrapper[4799]: I1010 16:32:21.089581 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:21 crc kubenswrapper[4799]: I1010 16:32:21.089606 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:21 crc kubenswrapper[4799]: I1010 16:32:21.089642 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:21 crc kubenswrapper[4799]: I1010 16:32:21.089714 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:21Z","lastTransitionTime":"2025-10-10T16:32:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:21 crc kubenswrapper[4799]: I1010 16:32:21.100142 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gg5hb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f000ac73-b5de-47c8-a0a7-84bd06475f62\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b64f89fec4fec12dd0dab3f95ca2c8a01e43d4ef7cc69a4d012195756f6922ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w9g7t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:09Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gg5hb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:21Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:21 crc kubenswrapper[4799]: I1010 16:32:21.134512 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:21Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:21 crc kubenswrapper[4799]: I1010 16:32:21.183038 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gg5hb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f000ac73-b5de-47c8-a0a7-84bd06475f62\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b64f89fec4fec12dd0dab3f95ca2c8a01e43d4ef7cc69a4d012195756f6922ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w9g7t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:09Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gg5hb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:21Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:21 crc kubenswrapper[4799]: I1010 16:32:21.192270 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:21 crc kubenswrapper[4799]: I1010 16:32:21.192325 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:21 crc kubenswrapper[4799]: I1010 16:32:21.192347 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:21 crc kubenswrapper[4799]: I1010 16:32:21.192376 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:21 crc kubenswrapper[4799]: I1010 16:32:21.192397 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:21Z","lastTransitionTime":"2025-10-10T16:32:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:21 crc kubenswrapper[4799]: I1010 16:32:21.213672 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-6wjsp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"76fdb169-eee9-4170-b948-95e26254208b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5b030264f18288aa7687a91f7918f1ed2c2ad474637e32a054ea8c25b97aef45\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2ww66\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:14Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-6wjsp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:21Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:21 crc kubenswrapper[4799]: I1010 16:32:21.261952 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5a953803d264b43ecd9f8b8c871b034d8146e73a4974bb8f503d0ca626370616\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:21Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:21 crc kubenswrapper[4799]: I1010 16:32:21.295607 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:21 crc kubenswrapper[4799]: I1010 16:32:21.295654 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:21 crc kubenswrapper[4799]: I1010 16:32:21.295670 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:21 crc kubenswrapper[4799]: I1010 16:32:21.295694 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:21 crc kubenswrapper[4799]: I1010 16:32:21.295711 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:21Z","lastTransitionTime":"2025-10-10T16:32:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:21 crc kubenswrapper[4799]: I1010 16:32:21.296734 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-bsdk2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"823e91d3-003d-4cbb-bc72-004e1708c19d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec832bfc0c81b98afb4117033b94d2951b042b248148a5f957f3507174b8dbb6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-chgmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:09Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-bsdk2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:21Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:21 crc kubenswrapper[4799]: I1010 16:32:21.352955 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"60ae49f7-6d6a-4a62-909f-7aea2b3953f5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c0a72be5ffe48f726e63ca3854fcabf6ad7c26f2c3fe432328142da2dc2ceeb5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b42b1b86cbd6dacb03b9afc740a33f67674996a9c5a5b291b71708ae53ccfea8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://de5b84380f9fb8448cebe90775342fd17260ffb8c591bbd5156f8a216b80f1da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b1dad40a84c7f22ffb5d52c708c7e2e03a181c5778793050495c8333ae005731\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://79f6778c5b703b2b4fc4e59fffc00824fcab6c8f5e2789661665e635a3539195\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a48bce1f3530d2a78258c6fa2af4f1530890f7967a26c9e91ca2f20f56cdbe6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2a48bce1f3530d2a78258c6fa2af4f1530890f7967a26c9e91ca2f20f56cdbe6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:31:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://65fb2cd5fa9b5ff0cad85267e4a036c37593a749da171dc2e5e30ba5159ed96d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://65fb2cd5fa9b5ff0cad85267e4a036c37593a749da171dc2e5e30ba5159ed96d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:31:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://51ea61becc8c45e5bcb2a2374d503cef3fb940b1618e7501cd05d61fc2a9458f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://51ea61becc8c45e5bcb2a2374d503cef3fb940b1618e7501cd05d61fc2a9458f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:31:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:31:47Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:21Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:21 crc kubenswrapper[4799]: I1010 16:32:21.381743 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4e024486dad9853cf7debbd2264eca725e50e74ebd215e1e55595d5f8b7c0403\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3396ed6bea22d063192c09283426aa98e84d5cab5852e305d61f3d583801187\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:21Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:21 crc kubenswrapper[4799]: I1010 16:32:21.399748 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:21 crc kubenswrapper[4799]: I1010 16:32:21.399815 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:21 crc kubenswrapper[4799]: I1010 16:32:21.399825 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:21 crc kubenswrapper[4799]: I1010 16:32:21.399844 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:21 crc kubenswrapper[4799]: I1010 16:32:21.399888 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:21Z","lastTransitionTime":"2025-10-10T16:32:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:21 crc kubenswrapper[4799]: I1010 16:32:21.402035 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 10 16:32:21 crc kubenswrapper[4799]: E1010 16:32:21.402160 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 10 16:32:21 crc kubenswrapper[4799]: I1010 16:32:21.418567 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-nptcz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"96840de9-4451-4499-81fa-a19c62239007\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://995be5ba088a3758758ce5aaf735f0371692c52e49e3992c6478311411c8db42\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d8b7b4526cfbe5d29a5b00c5d82089820b93e5aedbdaace85c4a252fed1b9f53\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d8b7b4526cfbe5d29a5b00c5d82089820b93e5aedbdaace85c4a252fed1b9f53\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:32:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0477eb514aef21fcec151973d9b6cf683ced19e9029787b97906438cb94b9f66\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0477eb514aef21fcec151973d9b6cf683ced19e9029787b97906438cb94b9f66\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:32:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b8df7ffc260acc047e334af09b76e6ee2c6dadd8c1fd1ed8860769601c89c6db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b8df7ffc260acc047e334af09b76e6ee2c6dadd8c1fd1ed8860769601c89c6db\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:32:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c89c6973a557239b60077f2b91a5f088955a973ebf8a9776677daa83f18c274\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2c89c6973a557239b60077f2b91a5f088955a973ebf8a9776677daa83f18c274\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:32:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f647c1c576be194232c6bcaf882fc8f3c67c78a84edd77222d04f1602434d014\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f647c1c576be194232c6bcaf882fc8f3c67c78a84edd77222d04f1602434d014\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:32:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://af9cccbb5d66115ca2db31b1e6738e1aa5f9c948eb65d3db9b5d5f8d9c223a64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://af9cccbb5d66115ca2db31b1e6738e1aa5f9c948eb65d3db9b5d5f8d9c223a64\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:32:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:09Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-nptcz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:21Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:21 crc kubenswrapper[4799]: I1010 16:32:21.452282 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1bf1784a-776b-49c7-b64b-7ce52860df45\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://298a1a9571fbe118fe81ff3e7403e298bcde9b683cffab574fbb03d5adc1fb67\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f767e89684b9b515da850360aaf9d7a02173395faf0654e9f0b3a4752a3d608b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://92de340d859cff018a661f0a7f7fe209ffae161bf6f39deb005c7148591fc60b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3a649a65ab118025ea70d1d7cf71236cb96992671c3bc7659d591640b53f941\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:31:47Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:21Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:21 crc kubenswrapper[4799]: I1010 16:32:21.493360 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2128a751508cba96a374652d8d80c66c81351fe0d7f800743a1612196fe8ac55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:21Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:21 crc kubenswrapper[4799]: I1010 16:32:21.502470 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:21 crc kubenswrapper[4799]: I1010 16:32:21.502528 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:21 crc kubenswrapper[4799]: I1010 16:32:21.502546 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:21 crc kubenswrapper[4799]: I1010 16:32:21.502572 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:21 crc kubenswrapper[4799]: I1010 16:32:21.502607 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:21Z","lastTransitionTime":"2025-10-10T16:32:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:21 crc kubenswrapper[4799]: I1010 16:32:21.534484 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:21Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:21 crc kubenswrapper[4799]: I1010 16:32:21.570191 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6cebefda-e31d-4be2-9bf4-8e1f8ec002cb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6536b37f839c0b3f6b55d82b3a1674eeccb07ec93e2cb0a3739705b82df4782c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hfkr4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ad00545d7a2fff370e19a55a89365b8c9914cb6286dbf1892d7ad0f399288a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hfkr4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:09Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-rh8zc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:21Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:21 crc kubenswrapper[4799]: I1010 16:32:21.606496 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:21 crc kubenswrapper[4799]: I1010 16:32:21.606531 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:21 crc kubenswrapper[4799]: I1010 16:32:21.606544 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:21 crc kubenswrapper[4799]: I1010 16:32:21.606562 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:21 crc kubenswrapper[4799]: I1010 16:32:21.606573 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:21Z","lastTransitionTime":"2025-10-10T16:32:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:21 crc kubenswrapper[4799]: I1010 16:32:21.625193 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mcwfc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"abe7f2d9-ec99-4724-a01f-cc7096377e07\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8cbc87c392646ebf9c016f8c7b40bcec30e33a0a05ea4a896d1143c5f1086990\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd261112ca7db4d0d76f6ab29a0347d64dccfff4db42ac9f55d6d7df1443ab23\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c7d0e536ad5143941dd18418b1ac7972a1136a841542b950f6891a386d43ca9c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cfe05183ad0b03415525e6aa2a8d52a5d63b8c273113c46326396df5e0c2bb12\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6562d440ce1f1477fd09c15c34ab88e17e1fb2c2cae4b32a7bf8cbdd29f4d5a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff4fcf53aeed6c07f775152de0faa9fa0671848df06d37cbca6ec7097d0024d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed12cf3d02660853d079aded94ac925b8ee8f016804c32b9ac53791a041883a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ed12cf3d02660853d079aded94ac925b8ee8f016804c32b9ac53791a041883a1\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-10T16:32:20Z\\\",\\\"message\\\":\\\"ithub.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI1010 16:32:20.244796 6040 reflector.go:311] Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1010 16:32:20.244943 6040 reflector.go:311] Stopping reflector *v1alpha1.BaselineAdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1010 16:32:20.245339 6040 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1010 16:32:20.245518 6040 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1010 16:32:20.245703 6040 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1010 16:32:20.245820 6040 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1010 16:32:20.246257 6040 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1010 16:32:20.246279 6040 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1010 16:32:20.246306 6040 factory.go:656] Stopping watch factory\\\\nI1010 16:32:20.246330 6040 handler.go:208] Removed *v1.Node event handler 7\\\\nI1010 16:32:20.246345 6040 handler.go:208] Removed *v1.Node ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://08406e220de50ba85f882a05117b5df8c9445a38c026bb85c95fc9f98f2d2cfe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2ce36def99eaf908452410a523cd14eb31a5a4dc3ee38d5983ea95d5ee75f83\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d2ce36def99eaf908452410a523cd14eb31a5a4dc3ee38d5983ea95d5ee75f83\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:09Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-mcwfc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:21Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:21 crc kubenswrapper[4799]: I1010 16:32:21.656092 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b9c46c5f-a6db-4cef-b179-b669484bbc75\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:47Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:47Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://df6b51b97a9e3dcf9102409dc19f67e69e6e28ebec82dd46083922d5606cc4c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ba57bc720123daa414f51bf5d3173c6fa0b519947a34816bebc532948fd74ab\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d98759de1f79d9aeb68eb0b3eb21d78d0116f054b5d846c85bd63774b565e73\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://10d76c5ba8c54896d2fde57e2806c48857363c495a9f2d9b3f6904334cf2f9be\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://10d76c5ba8c54896d2fde57e2806c48857363c495a9f2d9b3f6904334cf2f9be\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"file observer\\\\nW1010 16:32:08.895315 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1010 16:32:08.895450 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1010 16:32:08.898309 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-712278876/tls.crt::/tmp/serving-cert-712278876/tls.key\\\\\\\"\\\\nI1010 16:32:09.168043 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1010 16:32:09.171891 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1010 16:32:09.171914 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1010 16:32:09.171936 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1010 16:32:09.171942 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1010 16:32:09.176341 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1010 16:32:09.176406 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1010 16:32:09.176435 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1010 16:32:09.176460 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1010 16:32:09.176486 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1010 16:32:09.176510 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1010 16:32:09.176533 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1010 16:32:09.176376 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1010 16:32:09.178269 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-10T16:31:53Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://75fb276a1b4f555aa58d4a862a6f3841984f75958b7ada362d717eca726c41fc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:50Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://78cbeb4c6d2770cabbc752b11e5a62f64ec7820bc3a637a944fa252d779e242b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://78cbeb4c6d2770cabbc752b11e5a62f64ec7820bc3a637a944fa252d779e242b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:31:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:31:47Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:21Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:21 crc kubenswrapper[4799]: I1010 16:32:21.698599 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:21Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:21 crc kubenswrapper[4799]: I1010 16:32:21.708589 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:21 crc kubenswrapper[4799]: I1010 16:32:21.708646 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:21 crc kubenswrapper[4799]: I1010 16:32:21.708666 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:21 crc kubenswrapper[4799]: I1010 16:32:21.708694 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:21 crc kubenswrapper[4799]: I1010 16:32:21.708711 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:21Z","lastTransitionTime":"2025-10-10T16:32:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:21 crc kubenswrapper[4799]: I1010 16:32:21.710608 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-mcwfc_abe7f2d9-ec99-4724-a01f-cc7096377e07/ovnkube-controller/0.log" Oct 10 16:32:21 crc kubenswrapper[4799]: I1010 16:32:21.714242 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mcwfc" event={"ID":"abe7f2d9-ec99-4724-a01f-cc7096377e07","Type":"ContainerStarted","Data":"46332137ef0b23ab6df5f7eb7e21459fb1b18c3580232f6c03ba11cb434c42e8"} Oct 10 16:32:21 crc kubenswrapper[4799]: I1010 16:32:21.714810 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-mcwfc" Oct 10 16:32:21 crc kubenswrapper[4799]: I1010 16:32:21.753823 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"60ae49f7-6d6a-4a62-909f-7aea2b3953f5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c0a72be5ffe48f726e63ca3854fcabf6ad7c26f2c3fe432328142da2dc2ceeb5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b42b1b86cbd6dacb03b9afc740a33f67674996a9c5a5b291b71708ae53ccfea8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://de5b84380f9fb8448cebe90775342fd17260ffb8c591bbd5156f8a216b80f1da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b1dad40a84c7f22ffb5d52c708c7e2e03a181c5778793050495c8333ae005731\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://79f6778c5b703b2b4fc4e59fffc00824fcab6c8f5e2789661665e635a3539195\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a48bce1f3530d2a78258c6fa2af4f1530890f7967a26c9e91ca2f20f56cdbe6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2a48bce1f3530d2a78258c6fa2af4f1530890f7967a26c9e91ca2f20f56cdbe6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:31:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://65fb2cd5fa9b5ff0cad85267e4a036c37593a749da171dc2e5e30ba5159ed96d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://65fb2cd5fa9b5ff0cad85267e4a036c37593a749da171dc2e5e30ba5159ed96d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:31:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://51ea61becc8c45e5bcb2a2374d503cef3fb940b1618e7501cd05d61fc2a9458f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://51ea61becc8c45e5bcb2a2374d503cef3fb940b1618e7501cd05d61fc2a9458f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:31:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:31:47Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:21Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:21 crc kubenswrapper[4799]: I1010 16:32:21.772079 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-bsdk2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"823e91d3-003d-4cbb-bc72-004e1708c19d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec832bfc0c81b98afb4117033b94d2951b042b248148a5f957f3507174b8dbb6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-chgmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:09Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-bsdk2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:21Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:21 crc kubenswrapper[4799]: I1010 16:32:21.810908 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:21 crc kubenswrapper[4799]: I1010 16:32:21.810947 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:21 crc kubenswrapper[4799]: I1010 16:32:21.810956 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:21 crc kubenswrapper[4799]: I1010 16:32:21.810972 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:21 crc kubenswrapper[4799]: I1010 16:32:21.810983 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:21Z","lastTransitionTime":"2025-10-10T16:32:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:21 crc kubenswrapper[4799]: I1010 16:32:21.820125 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1bf1784a-776b-49c7-b64b-7ce52860df45\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://298a1a9571fbe118fe81ff3e7403e298bcde9b683cffab574fbb03d5adc1fb67\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f767e89684b9b515da850360aaf9d7a02173395faf0654e9f0b3a4752a3d608b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://92de340d859cff018a661f0a7f7fe209ffae161bf6f39deb005c7148591fc60b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3a649a65ab118025ea70d1d7cf71236cb96992671c3bc7659d591640b53f941\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:31:47Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:21Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:21 crc kubenswrapper[4799]: I1010 16:32:21.869598 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2128a751508cba96a374652d8d80c66c81351fe0d7f800743a1612196fe8ac55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:21Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:21 crc kubenswrapper[4799]: I1010 16:32:21.893421 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4e024486dad9853cf7debbd2264eca725e50e74ebd215e1e55595d5f8b7c0403\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3396ed6bea22d063192c09283426aa98e84d5cab5852e305d61f3d583801187\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:21Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:21 crc kubenswrapper[4799]: I1010 16:32:21.913870 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:21 crc kubenswrapper[4799]: I1010 16:32:21.913915 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:21 crc kubenswrapper[4799]: I1010 16:32:21.913926 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:21 crc kubenswrapper[4799]: I1010 16:32:21.913945 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:21 crc kubenswrapper[4799]: I1010 16:32:21.913957 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:21Z","lastTransitionTime":"2025-10-10T16:32:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:21 crc kubenswrapper[4799]: I1010 16:32:21.940398 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-nptcz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"96840de9-4451-4499-81fa-a19c62239007\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://995be5ba088a3758758ce5aaf735f0371692c52e49e3992c6478311411c8db42\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d8b7b4526cfbe5d29a5b00c5d82089820b93e5aedbdaace85c4a252fed1b9f53\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d8b7b4526cfbe5d29a5b00c5d82089820b93e5aedbdaace85c4a252fed1b9f53\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:32:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0477eb514aef21fcec151973d9b6cf683ced19e9029787b97906438cb94b9f66\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0477eb514aef21fcec151973d9b6cf683ced19e9029787b97906438cb94b9f66\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:32:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b8df7ffc260acc047e334af09b76e6ee2c6dadd8c1fd1ed8860769601c89c6db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b8df7ffc260acc047e334af09b76e6ee2c6dadd8c1fd1ed8860769601c89c6db\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:32:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c89c6973a557239b60077f2b91a5f088955a973ebf8a9776677daa83f18c274\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2c89c6973a557239b60077f2b91a5f088955a973ebf8a9776677daa83f18c274\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:32:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f647c1c576be194232c6bcaf882fc8f3c67c78a84edd77222d04f1602434d014\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f647c1c576be194232c6bcaf882fc8f3c67c78a84edd77222d04f1602434d014\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:32:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://af9cccbb5d66115ca2db31b1e6738e1aa5f9c948eb65d3db9b5d5f8d9c223a64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://af9cccbb5d66115ca2db31b1e6738e1aa5f9c948eb65d3db9b5d5f8d9c223a64\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:32:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:09Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-nptcz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:21Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:21 crc kubenswrapper[4799]: I1010 16:32:21.989400 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mcwfc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"abe7f2d9-ec99-4724-a01f-cc7096377e07\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8cbc87c392646ebf9c016f8c7b40bcec30e33a0a05ea4a896d1143c5f1086990\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd261112ca7db4d0d76f6ab29a0347d64dccfff4db42ac9f55d6d7df1443ab23\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c7d0e536ad5143941dd18418b1ac7972a1136a841542b950f6891a386d43ca9c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cfe05183ad0b03415525e6aa2a8d52a5d63b8c273113c46326396df5e0c2bb12\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6562d440ce1f1477fd09c15c34ab88e17e1fb2c2cae4b32a7bf8cbdd29f4d5a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff4fcf53aeed6c07f775152de0faa9fa0671848df06d37cbca6ec7097d0024d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://46332137ef0b23ab6df5f7eb7e21459fb1b18c3580232f6c03ba11cb434c42e8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ed12cf3d02660853d079aded94ac925b8ee8f016804c32b9ac53791a041883a1\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-10T16:32:20Z\\\",\\\"message\\\":\\\"ithub.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI1010 16:32:20.244796 6040 reflector.go:311] Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1010 16:32:20.244943 6040 reflector.go:311] Stopping reflector *v1alpha1.BaselineAdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1010 16:32:20.245339 6040 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1010 16:32:20.245518 6040 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1010 16:32:20.245703 6040 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1010 16:32:20.245820 6040 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1010 16:32:20.246257 6040 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1010 16:32:20.246279 6040 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1010 16:32:20.246306 6040 factory.go:656] Stopping watch factory\\\\nI1010 16:32:20.246330 6040 handler.go:208] Removed *v1.Node event handler 7\\\\nI1010 16:32:20.246345 6040 handler.go:208] Removed *v1.Node ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:16Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://08406e220de50ba85f882a05117b5df8c9445a38c026bb85c95fc9f98f2d2cfe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2ce36def99eaf908452410a523cd14eb31a5a4dc3ee38d5983ea95d5ee75f83\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d2ce36def99eaf908452410a523cd14eb31a5a4dc3ee38d5983ea95d5ee75f83\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:09Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-mcwfc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:21Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:22 crc kubenswrapper[4799]: I1010 16:32:22.017919 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:22 crc kubenswrapper[4799]: I1010 16:32:22.017993 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:22 crc kubenswrapper[4799]: I1010 16:32:22.018017 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:22 crc kubenswrapper[4799]: I1010 16:32:22.018052 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:22 crc kubenswrapper[4799]: I1010 16:32:22.018072 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:22Z","lastTransitionTime":"2025-10-10T16:32:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:22 crc kubenswrapper[4799]: I1010 16:32:22.021277 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b9c46c5f-a6db-4cef-b179-b669484bbc75\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:47Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:47Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://df6b51b97a9e3dcf9102409dc19f67e69e6e28ebec82dd46083922d5606cc4c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ba57bc720123daa414f51bf5d3173c6fa0b519947a34816bebc532948fd74ab\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d98759de1f79d9aeb68eb0b3eb21d78d0116f054b5d846c85bd63774b565e73\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://10d76c5ba8c54896d2fde57e2806c48857363c495a9f2d9b3f6904334cf2f9be\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://10d76c5ba8c54896d2fde57e2806c48857363c495a9f2d9b3f6904334cf2f9be\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"file observer\\\\nW1010 16:32:08.895315 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1010 16:32:08.895450 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1010 16:32:08.898309 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-712278876/tls.crt::/tmp/serving-cert-712278876/tls.key\\\\\\\"\\\\nI1010 16:32:09.168043 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1010 16:32:09.171891 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1010 16:32:09.171914 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1010 16:32:09.171936 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1010 16:32:09.171942 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1010 16:32:09.176341 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1010 16:32:09.176406 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1010 16:32:09.176435 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1010 16:32:09.176460 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1010 16:32:09.176486 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1010 16:32:09.176510 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1010 16:32:09.176533 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1010 16:32:09.176376 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1010 16:32:09.178269 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-10T16:31:53Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://75fb276a1b4f555aa58d4a862a6f3841984f75958b7ada362d717eca726c41fc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:50Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://78cbeb4c6d2770cabbc752b11e5a62f64ec7820bc3a637a944fa252d779e242b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://78cbeb4c6d2770cabbc752b11e5a62f64ec7820bc3a637a944fa252d779e242b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:31:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:31:47Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:22Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:22 crc kubenswrapper[4799]: I1010 16:32:22.059232 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:22Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:22 crc kubenswrapper[4799]: I1010 16:32:22.097575 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:22Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:22 crc kubenswrapper[4799]: I1010 16:32:22.101490 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-z97c7"] Oct 10 16:32:22 crc kubenswrapper[4799]: I1010 16:32:22.102533 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-z97c7" Oct 10 16:32:22 crc kubenswrapper[4799]: I1010 16:32:22.120713 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:22 crc kubenswrapper[4799]: I1010 16:32:22.120831 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:22 crc kubenswrapper[4799]: I1010 16:32:22.121005 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:22 crc kubenswrapper[4799]: I1010 16:32:22.121043 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:22 crc kubenswrapper[4799]: I1010 16:32:22.121067 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:22Z","lastTransitionTime":"2025-10-10T16:32:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:22 crc kubenswrapper[4799]: I1010 16:32:22.124187 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-control-plane-metrics-cert" Oct 10 16:32:22 crc kubenswrapper[4799]: I1010 16:32:22.144015 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-control-plane-dockercfg-gs7dd" Oct 10 16:32:22 crc kubenswrapper[4799]: I1010 16:32:22.158187 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6f19a8ba-b77f-41ce-a4c6-e970b040dd8c-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-z97c7\" (UID: \"6f19a8ba-b77f-41ce-a4c6-e970b040dd8c\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-z97c7" Oct 10 16:32:22 crc kubenswrapper[4799]: I1010 16:32:22.158275 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6f19a8ba-b77f-41ce-a4c6-e970b040dd8c-env-overrides\") pod \"ovnkube-control-plane-749d76644c-z97c7\" (UID: \"6f19a8ba-b77f-41ce-a4c6-e970b040dd8c\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-z97c7" Oct 10 16:32:22 crc kubenswrapper[4799]: I1010 16:32:22.158322 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6f19a8ba-b77f-41ce-a4c6-e970b040dd8c-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-z97c7\" (UID: \"6f19a8ba-b77f-41ce-a4c6-e970b040dd8c\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-z97c7" Oct 10 16:32:22 crc kubenswrapper[4799]: I1010 16:32:22.158365 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9spwd\" (UniqueName: \"kubernetes.io/projected/6f19a8ba-b77f-41ce-a4c6-e970b040dd8c-kube-api-access-9spwd\") pod \"ovnkube-control-plane-749d76644c-z97c7\" (UID: \"6f19a8ba-b77f-41ce-a4c6-e970b040dd8c\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-z97c7" Oct 10 16:32:22 crc kubenswrapper[4799]: I1010 16:32:22.177204 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6cebefda-e31d-4be2-9bf4-8e1f8ec002cb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6536b37f839c0b3f6b55d82b3a1674eeccb07ec93e2cb0a3739705b82df4782c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hfkr4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ad00545d7a2fff370e19a55a89365b8c9914cb6286dbf1892d7ad0f399288a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hfkr4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:09Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-rh8zc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:22Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:22 crc kubenswrapper[4799]: I1010 16:32:22.215022 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-6wjsp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"76fdb169-eee9-4170-b948-95e26254208b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5b030264f18288aa7687a91f7918f1ed2c2ad474637e32a054ea8c25b97aef45\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2ww66\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:14Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-6wjsp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:22Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:22 crc kubenswrapper[4799]: I1010 16:32:22.225939 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:22 crc kubenswrapper[4799]: I1010 16:32:22.225992 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:22 crc kubenswrapper[4799]: I1010 16:32:22.226009 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:22 crc kubenswrapper[4799]: I1010 16:32:22.226039 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:22 crc kubenswrapper[4799]: I1010 16:32:22.226061 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:22Z","lastTransitionTime":"2025-10-10T16:32:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:22 crc kubenswrapper[4799]: I1010 16:32:22.259576 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5a953803d264b43ecd9f8b8c871b034d8146e73a4974bb8f503d0ca626370616\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:22Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:22 crc kubenswrapper[4799]: I1010 16:32:22.259634 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6f19a8ba-b77f-41ce-a4c6-e970b040dd8c-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-z97c7\" (UID: \"6f19a8ba-b77f-41ce-a4c6-e970b040dd8c\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-z97c7" Oct 10 16:32:22 crc kubenswrapper[4799]: I1010 16:32:22.259826 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9spwd\" (UniqueName: \"kubernetes.io/projected/6f19a8ba-b77f-41ce-a4c6-e970b040dd8c-kube-api-access-9spwd\") pod \"ovnkube-control-plane-749d76644c-z97c7\" (UID: \"6f19a8ba-b77f-41ce-a4c6-e970b040dd8c\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-z97c7" Oct 10 16:32:22 crc kubenswrapper[4799]: I1010 16:32:22.259932 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6f19a8ba-b77f-41ce-a4c6-e970b040dd8c-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-z97c7\" (UID: \"6f19a8ba-b77f-41ce-a4c6-e970b040dd8c\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-z97c7" Oct 10 16:32:22 crc kubenswrapper[4799]: I1010 16:32:22.260019 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6f19a8ba-b77f-41ce-a4c6-e970b040dd8c-env-overrides\") pod \"ovnkube-control-plane-749d76644c-z97c7\" (UID: \"6f19a8ba-b77f-41ce-a4c6-e970b040dd8c\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-z97c7" Oct 10 16:32:22 crc kubenswrapper[4799]: I1010 16:32:22.261103 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6f19a8ba-b77f-41ce-a4c6-e970b040dd8c-env-overrides\") pod \"ovnkube-control-plane-749d76644c-z97c7\" (UID: \"6f19a8ba-b77f-41ce-a4c6-e970b040dd8c\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-z97c7" Oct 10 16:32:22 crc kubenswrapper[4799]: I1010 16:32:22.261332 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6f19a8ba-b77f-41ce-a4c6-e970b040dd8c-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-z97c7\" (UID: \"6f19a8ba-b77f-41ce-a4c6-e970b040dd8c\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-z97c7" Oct 10 16:32:22 crc kubenswrapper[4799]: I1010 16:32:22.268677 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6f19a8ba-b77f-41ce-a4c6-e970b040dd8c-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-z97c7\" (UID: \"6f19a8ba-b77f-41ce-a4c6-e970b040dd8c\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-z97c7" Oct 10 16:32:22 crc kubenswrapper[4799]: I1010 16:32:22.309912 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9spwd\" (UniqueName: \"kubernetes.io/projected/6f19a8ba-b77f-41ce-a4c6-e970b040dd8c-kube-api-access-9spwd\") pod \"ovnkube-control-plane-749d76644c-z97c7\" (UID: \"6f19a8ba-b77f-41ce-a4c6-e970b040dd8c\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-z97c7" Oct 10 16:32:22 crc kubenswrapper[4799]: I1010 16:32:22.316561 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:22Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:22 crc kubenswrapper[4799]: I1010 16:32:22.329462 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:22 crc kubenswrapper[4799]: I1010 16:32:22.329517 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:22 crc kubenswrapper[4799]: I1010 16:32:22.329535 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:22 crc kubenswrapper[4799]: I1010 16:32:22.329562 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:22 crc kubenswrapper[4799]: I1010 16:32:22.329579 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:22Z","lastTransitionTime":"2025-10-10T16:32:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:22 crc kubenswrapper[4799]: I1010 16:32:22.361024 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gg5hb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f000ac73-b5de-47c8-a0a7-84bd06475f62\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b64f89fec4fec12dd0dab3f95ca2c8a01e43d4ef7cc69a4d012195756f6922ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w9g7t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:09Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gg5hb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:22Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:22 crc kubenswrapper[4799]: I1010 16:32:22.394987 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-bsdk2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"823e91d3-003d-4cbb-bc72-004e1708c19d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec832bfc0c81b98afb4117033b94d2951b042b248148a5f957f3507174b8dbb6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-chgmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:09Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-bsdk2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:22Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:22 crc kubenswrapper[4799]: I1010 16:32:22.402217 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 10 16:32:22 crc kubenswrapper[4799]: E1010 16:32:22.402375 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 10 16:32:22 crc kubenswrapper[4799]: I1010 16:32:22.402229 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 10 16:32:22 crc kubenswrapper[4799]: E1010 16:32:22.402506 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 10 16:32:22 crc kubenswrapper[4799]: I1010 16:32:22.426845 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-z97c7" Oct 10 16:32:22 crc kubenswrapper[4799]: I1010 16:32:22.431982 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:22 crc kubenswrapper[4799]: I1010 16:32:22.432041 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:22 crc kubenswrapper[4799]: I1010 16:32:22.432065 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:22 crc kubenswrapper[4799]: I1010 16:32:22.432097 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:22 crc kubenswrapper[4799]: I1010 16:32:22.432119 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:22Z","lastTransitionTime":"2025-10-10T16:32:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:22 crc kubenswrapper[4799]: W1010 16:32:22.446709 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod6f19a8ba_b77f_41ce_a4c6_e970b040dd8c.slice/crio-f3b24462982ef3562ea8d036c108a75169dd488795148f6a69ac0086fab2095e WatchSource:0}: Error finding container f3b24462982ef3562ea8d036c108a75169dd488795148f6a69ac0086fab2095e: Status 404 returned error can't find the container with id f3b24462982ef3562ea8d036c108a75169dd488795148f6a69ac0086fab2095e Oct 10 16:32:22 crc kubenswrapper[4799]: I1010 16:32:22.450660 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"60ae49f7-6d6a-4a62-909f-7aea2b3953f5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c0a72be5ffe48f726e63ca3854fcabf6ad7c26f2c3fe432328142da2dc2ceeb5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b42b1b86cbd6dacb03b9afc740a33f67674996a9c5a5b291b71708ae53ccfea8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://de5b84380f9fb8448cebe90775342fd17260ffb8c591bbd5156f8a216b80f1da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b1dad40a84c7f22ffb5d52c708c7e2e03a181c5778793050495c8333ae005731\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://79f6778c5b703b2b4fc4e59fffc00824fcab6c8f5e2789661665e635a3539195\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a48bce1f3530d2a78258c6fa2af4f1530890f7967a26c9e91ca2f20f56cdbe6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2a48bce1f3530d2a78258c6fa2af4f1530890f7967a26c9e91ca2f20f56cdbe6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:31:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://65fb2cd5fa9b5ff0cad85267e4a036c37593a749da171dc2e5e30ba5159ed96d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://65fb2cd5fa9b5ff0cad85267e4a036c37593a749da171dc2e5e30ba5159ed96d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:31:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://51ea61becc8c45e5bcb2a2374d503cef3fb940b1618e7501cd05d61fc2a9458f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://51ea61becc8c45e5bcb2a2374d503cef3fb940b1618e7501cd05d61fc2a9458f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:31:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:31:47Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:22Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:22 crc kubenswrapper[4799]: I1010 16:32:22.483203 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2128a751508cba96a374652d8d80c66c81351fe0d7f800743a1612196fe8ac55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:22Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:22 crc kubenswrapper[4799]: I1010 16:32:22.519933 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4e024486dad9853cf7debbd2264eca725e50e74ebd215e1e55595d5f8b7c0403\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3396ed6bea22d063192c09283426aa98e84d5cab5852e305d61f3d583801187\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:22Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:22 crc kubenswrapper[4799]: I1010 16:32:22.533988 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:22 crc kubenswrapper[4799]: I1010 16:32:22.534030 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:22 crc kubenswrapper[4799]: I1010 16:32:22.534042 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:22 crc kubenswrapper[4799]: I1010 16:32:22.534062 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:22 crc kubenswrapper[4799]: I1010 16:32:22.534076 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:22Z","lastTransitionTime":"2025-10-10T16:32:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:22 crc kubenswrapper[4799]: I1010 16:32:22.561471 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-nptcz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"96840de9-4451-4499-81fa-a19c62239007\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://995be5ba088a3758758ce5aaf735f0371692c52e49e3992c6478311411c8db42\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d8b7b4526cfbe5d29a5b00c5d82089820b93e5aedbdaace85c4a252fed1b9f53\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d8b7b4526cfbe5d29a5b00c5d82089820b93e5aedbdaace85c4a252fed1b9f53\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:32:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0477eb514aef21fcec151973d9b6cf683ced19e9029787b97906438cb94b9f66\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0477eb514aef21fcec151973d9b6cf683ced19e9029787b97906438cb94b9f66\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:32:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b8df7ffc260acc047e334af09b76e6ee2c6dadd8c1fd1ed8860769601c89c6db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b8df7ffc260acc047e334af09b76e6ee2c6dadd8c1fd1ed8860769601c89c6db\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:32:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c89c6973a557239b60077f2b91a5f088955a973ebf8a9776677daa83f18c274\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2c89c6973a557239b60077f2b91a5f088955a973ebf8a9776677daa83f18c274\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:32:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f647c1c576be194232c6bcaf882fc8f3c67c78a84edd77222d04f1602434d014\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f647c1c576be194232c6bcaf882fc8f3c67c78a84edd77222d04f1602434d014\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:32:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://af9cccbb5d66115ca2db31b1e6738e1aa5f9c948eb65d3db9b5d5f8d9c223a64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://af9cccbb5d66115ca2db31b1e6738e1aa5f9c948eb65d3db9b5d5f8d9c223a64\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:32:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:09Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-nptcz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:22Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:22 crc kubenswrapper[4799]: I1010 16:32:22.598448 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1bf1784a-776b-49c7-b64b-7ce52860df45\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://298a1a9571fbe118fe81ff3e7403e298bcde9b683cffab574fbb03d5adc1fb67\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f767e89684b9b515da850360aaf9d7a02173395faf0654e9f0b3a4752a3d608b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://92de340d859cff018a661f0a7f7fe209ffae161bf6f39deb005c7148591fc60b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3a649a65ab118025ea70d1d7cf71236cb96992671c3bc7659d591640b53f941\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:31:47Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:22Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:22 crc kubenswrapper[4799]: I1010 16:32:22.634483 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:22Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:22 crc kubenswrapper[4799]: I1010 16:32:22.636253 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:22 crc kubenswrapper[4799]: I1010 16:32:22.636324 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:22 crc kubenswrapper[4799]: I1010 16:32:22.636349 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:22 crc kubenswrapper[4799]: I1010 16:32:22.636379 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:22 crc kubenswrapper[4799]: I1010 16:32:22.636402 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:22Z","lastTransitionTime":"2025-10-10T16:32:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:22 crc kubenswrapper[4799]: I1010 16:32:22.678951 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:22Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:22 crc kubenswrapper[4799]: I1010 16:32:22.716293 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6cebefda-e31d-4be2-9bf4-8e1f8ec002cb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6536b37f839c0b3f6b55d82b3a1674eeccb07ec93e2cb0a3739705b82df4782c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hfkr4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ad00545d7a2fff370e19a55a89365b8c9914cb6286dbf1892d7ad0f399288a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hfkr4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:09Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-rh8zc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:22Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:22 crc kubenswrapper[4799]: I1010 16:32:22.726884 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-mcwfc_abe7f2d9-ec99-4724-a01f-cc7096377e07/ovnkube-controller/1.log" Oct 10 16:32:22 crc kubenswrapper[4799]: I1010 16:32:22.728178 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-mcwfc_abe7f2d9-ec99-4724-a01f-cc7096377e07/ovnkube-controller/0.log" Oct 10 16:32:22 crc kubenswrapper[4799]: I1010 16:32:22.731607 4799 generic.go:334] "Generic (PLEG): container finished" podID="abe7f2d9-ec99-4724-a01f-cc7096377e07" containerID="46332137ef0b23ab6df5f7eb7e21459fb1b18c3580232f6c03ba11cb434c42e8" exitCode=1 Oct 10 16:32:22 crc kubenswrapper[4799]: I1010 16:32:22.731871 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mcwfc" event={"ID":"abe7f2d9-ec99-4724-a01f-cc7096377e07","Type":"ContainerDied","Data":"46332137ef0b23ab6df5f7eb7e21459fb1b18c3580232f6c03ba11cb434c42e8"} Oct 10 16:32:22 crc kubenswrapper[4799]: I1010 16:32:22.732040 4799 scope.go:117] "RemoveContainer" containerID="ed12cf3d02660853d079aded94ac925b8ee8f016804c32b9ac53791a041883a1" Oct 10 16:32:22 crc kubenswrapper[4799]: I1010 16:32:22.733681 4799 scope.go:117] "RemoveContainer" containerID="46332137ef0b23ab6df5f7eb7e21459fb1b18c3580232f6c03ba11cb434c42e8" Oct 10 16:32:22 crc kubenswrapper[4799]: E1010 16:32:22.734148 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-mcwfc_openshift-ovn-kubernetes(abe7f2d9-ec99-4724-a01f-cc7096377e07)\"" pod="openshift-ovn-kubernetes/ovnkube-node-mcwfc" podUID="abe7f2d9-ec99-4724-a01f-cc7096377e07" Oct 10 16:32:22 crc kubenswrapper[4799]: I1010 16:32:22.736268 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-z97c7" event={"ID":"6f19a8ba-b77f-41ce-a4c6-e970b040dd8c","Type":"ContainerStarted","Data":"f3b24462982ef3562ea8d036c108a75169dd488795148f6a69ac0086fab2095e"} Oct 10 16:32:22 crc kubenswrapper[4799]: I1010 16:32:22.739498 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:22 crc kubenswrapper[4799]: I1010 16:32:22.739722 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:22 crc kubenswrapper[4799]: I1010 16:32:22.739960 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:22 crc kubenswrapper[4799]: I1010 16:32:22.740170 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:22 crc kubenswrapper[4799]: I1010 16:32:22.740327 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:22Z","lastTransitionTime":"2025-10-10T16:32:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:22 crc kubenswrapper[4799]: I1010 16:32:22.813228 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mcwfc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"abe7f2d9-ec99-4724-a01f-cc7096377e07\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8cbc87c392646ebf9c016f8c7b40bcec30e33a0a05ea4a896d1143c5f1086990\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd261112ca7db4d0d76f6ab29a0347d64dccfff4db42ac9f55d6d7df1443ab23\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c7d0e536ad5143941dd18418b1ac7972a1136a841542b950f6891a386d43ca9c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cfe05183ad0b03415525e6aa2a8d52a5d63b8c273113c46326396df5e0c2bb12\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6562d440ce1f1477fd09c15c34ab88e17e1fb2c2cae4b32a7bf8cbdd29f4d5a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff4fcf53aeed6c07f775152de0faa9fa0671848df06d37cbca6ec7097d0024d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://46332137ef0b23ab6df5f7eb7e21459fb1b18c3580232f6c03ba11cb434c42e8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ed12cf3d02660853d079aded94ac925b8ee8f016804c32b9ac53791a041883a1\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-10T16:32:20Z\\\",\\\"message\\\":\\\"ithub.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI1010 16:32:20.244796 6040 reflector.go:311] Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1010 16:32:20.244943 6040 reflector.go:311] Stopping reflector *v1alpha1.BaselineAdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1010 16:32:20.245339 6040 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1010 16:32:20.245518 6040 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1010 16:32:20.245703 6040 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1010 16:32:20.245820 6040 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1010 16:32:20.246257 6040 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1010 16:32:20.246279 6040 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1010 16:32:20.246306 6040 factory.go:656] Stopping watch factory\\\\nI1010 16:32:20.246330 6040 handler.go:208] Removed *v1.Node event handler 7\\\\nI1010 16:32:20.246345 6040 handler.go:208] Removed *v1.Node ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:16Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://08406e220de50ba85f882a05117b5df8c9445a38c026bb85c95fc9f98f2d2cfe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2ce36def99eaf908452410a523cd14eb31a5a4dc3ee38d5983ea95d5ee75f83\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d2ce36def99eaf908452410a523cd14eb31a5a4dc3ee38d5983ea95d5ee75f83\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:09Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-mcwfc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:22Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:22 crc kubenswrapper[4799]: I1010 16:32:22.833464 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-z97c7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6f19a8ba-b77f-41ce-a4c6-e970b040dd8c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9spwd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9spwd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:22Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-z97c7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:22Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:22 crc kubenswrapper[4799]: I1010 16:32:22.842563 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:22 crc kubenswrapper[4799]: I1010 16:32:22.842618 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:22 crc kubenswrapper[4799]: I1010 16:32:22.842630 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:22 crc kubenswrapper[4799]: I1010 16:32:22.842648 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:22 crc kubenswrapper[4799]: I1010 16:32:22.843037 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:22Z","lastTransitionTime":"2025-10-10T16:32:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:22 crc kubenswrapper[4799]: I1010 16:32:22.847837 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b9c46c5f-a6db-4cef-b179-b669484bbc75\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:47Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:47Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://df6b51b97a9e3dcf9102409dc19f67e69e6e28ebec82dd46083922d5606cc4c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ba57bc720123daa414f51bf5d3173c6fa0b519947a34816bebc532948fd74ab\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d98759de1f79d9aeb68eb0b3eb21d78d0116f054b5d846c85bd63774b565e73\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://10d76c5ba8c54896d2fde57e2806c48857363c495a9f2d9b3f6904334cf2f9be\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://10d76c5ba8c54896d2fde57e2806c48857363c495a9f2d9b3f6904334cf2f9be\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"file observer\\\\nW1010 16:32:08.895315 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1010 16:32:08.895450 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1010 16:32:08.898309 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-712278876/tls.crt::/tmp/serving-cert-712278876/tls.key\\\\\\\"\\\\nI1010 16:32:09.168043 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1010 16:32:09.171891 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1010 16:32:09.171914 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1010 16:32:09.171936 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1010 16:32:09.171942 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1010 16:32:09.176341 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1010 16:32:09.176406 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1010 16:32:09.176435 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1010 16:32:09.176460 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1010 16:32:09.176486 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1010 16:32:09.176510 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1010 16:32:09.176533 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1010 16:32:09.176376 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1010 16:32:09.178269 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-10T16:31:53Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://75fb276a1b4f555aa58d4a862a6f3841984f75958b7ada362d717eca726c41fc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:50Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://78cbeb4c6d2770cabbc752b11e5a62f64ec7820bc3a637a944fa252d779e242b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://78cbeb4c6d2770cabbc752b11e5a62f64ec7820bc3a637a944fa252d779e242b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:31:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:31:47Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:22Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:22 crc kubenswrapper[4799]: I1010 16:32:22.875570 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5a953803d264b43ecd9f8b8c871b034d8146e73a4974bb8f503d0ca626370616\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:22Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:22 crc kubenswrapper[4799]: I1010 16:32:22.911845 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:22Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:22 crc kubenswrapper[4799]: I1010 16:32:22.945863 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:22 crc kubenswrapper[4799]: I1010 16:32:22.945901 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:22 crc kubenswrapper[4799]: I1010 16:32:22.945910 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:22 crc kubenswrapper[4799]: I1010 16:32:22.945924 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:22 crc kubenswrapper[4799]: I1010 16:32:22.945934 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:22Z","lastTransitionTime":"2025-10-10T16:32:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:22 crc kubenswrapper[4799]: I1010 16:32:22.954154 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gg5hb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f000ac73-b5de-47c8-a0a7-84bd06475f62\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b64f89fec4fec12dd0dab3f95ca2c8a01e43d4ef7cc69a4d012195756f6922ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w9g7t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:09Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gg5hb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:22Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:22 crc kubenswrapper[4799]: I1010 16:32:22.991949 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-6wjsp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"76fdb169-eee9-4170-b948-95e26254208b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5b030264f18288aa7687a91f7918f1ed2c2ad474637e32a054ea8c25b97aef45\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2ww66\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:14Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-6wjsp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:22Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:23 crc kubenswrapper[4799]: I1010 16:32:23.035278 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:23Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:23 crc kubenswrapper[4799]: I1010 16:32:23.048428 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:23 crc kubenswrapper[4799]: I1010 16:32:23.048598 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:23 crc kubenswrapper[4799]: I1010 16:32:23.048699 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:23 crc kubenswrapper[4799]: I1010 16:32:23.048793 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:23 crc kubenswrapper[4799]: I1010 16:32:23.048876 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:23Z","lastTransitionTime":"2025-10-10T16:32:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:23 crc kubenswrapper[4799]: I1010 16:32:23.071927 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gg5hb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f000ac73-b5de-47c8-a0a7-84bd06475f62\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b64f89fec4fec12dd0dab3f95ca2c8a01e43d4ef7cc69a4d012195756f6922ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w9g7t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:09Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gg5hb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:23Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:23 crc kubenswrapper[4799]: I1010 16:32:23.111935 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-6wjsp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"76fdb169-eee9-4170-b948-95e26254208b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5b030264f18288aa7687a91f7918f1ed2c2ad474637e32a054ea8c25b97aef45\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2ww66\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:14Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-6wjsp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:23Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:23 crc kubenswrapper[4799]: I1010 16:32:23.152152 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:23 crc kubenswrapper[4799]: I1010 16:32:23.152197 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:23 crc kubenswrapper[4799]: I1010 16:32:23.152216 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:23 crc kubenswrapper[4799]: I1010 16:32:23.152241 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:23 crc kubenswrapper[4799]: I1010 16:32:23.152258 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:23Z","lastTransitionTime":"2025-10-10T16:32:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:23 crc kubenswrapper[4799]: I1010 16:32:23.162488 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5a953803d264b43ecd9f8b8c871b034d8146e73a4974bb8f503d0ca626370616\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:23Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:23 crc kubenswrapper[4799]: I1010 16:32:23.195348 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-bsdk2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"823e91d3-003d-4cbb-bc72-004e1708c19d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec832bfc0c81b98afb4117033b94d2951b042b248148a5f957f3507174b8dbb6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-chgmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:09Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-bsdk2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:23Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:23 crc kubenswrapper[4799]: I1010 16:32:23.253043 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"60ae49f7-6d6a-4a62-909f-7aea2b3953f5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c0a72be5ffe48f726e63ca3854fcabf6ad7c26f2c3fe432328142da2dc2ceeb5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b42b1b86cbd6dacb03b9afc740a33f67674996a9c5a5b291b71708ae53ccfea8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://de5b84380f9fb8448cebe90775342fd17260ffb8c591bbd5156f8a216b80f1da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b1dad40a84c7f22ffb5d52c708c7e2e03a181c5778793050495c8333ae005731\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://79f6778c5b703b2b4fc4e59fffc00824fcab6c8f5e2789661665e635a3539195\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a48bce1f3530d2a78258c6fa2af4f1530890f7967a26c9e91ca2f20f56cdbe6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2a48bce1f3530d2a78258c6fa2af4f1530890f7967a26c9e91ca2f20f56cdbe6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:31:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://65fb2cd5fa9b5ff0cad85267e4a036c37593a749da171dc2e5e30ba5159ed96d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://65fb2cd5fa9b5ff0cad85267e4a036c37593a749da171dc2e5e30ba5159ed96d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:31:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://51ea61becc8c45e5bcb2a2374d503cef3fb940b1618e7501cd05d61fc2a9458f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://51ea61becc8c45e5bcb2a2374d503cef3fb940b1618e7501cd05d61fc2a9458f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:31:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:31:47Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:23Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:23 crc kubenswrapper[4799]: I1010 16:32:23.254695 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:23 crc kubenswrapper[4799]: I1010 16:32:23.254730 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:23 crc kubenswrapper[4799]: I1010 16:32:23.254741 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:23 crc kubenswrapper[4799]: I1010 16:32:23.254781 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:23 crc kubenswrapper[4799]: I1010 16:32:23.254794 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:23Z","lastTransitionTime":"2025-10-10T16:32:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:23 crc kubenswrapper[4799]: I1010 16:32:23.275162 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4e024486dad9853cf7debbd2264eca725e50e74ebd215e1e55595d5f8b7c0403\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3396ed6bea22d063192c09283426aa98e84d5cab5852e305d61f3d583801187\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:23Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:23 crc kubenswrapper[4799]: I1010 16:32:23.327301 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-nptcz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"96840de9-4451-4499-81fa-a19c62239007\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://995be5ba088a3758758ce5aaf735f0371692c52e49e3992c6478311411c8db42\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d8b7b4526cfbe5d29a5b00c5d82089820b93e5aedbdaace85c4a252fed1b9f53\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d8b7b4526cfbe5d29a5b00c5d82089820b93e5aedbdaace85c4a252fed1b9f53\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:32:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0477eb514aef21fcec151973d9b6cf683ced19e9029787b97906438cb94b9f66\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0477eb514aef21fcec151973d9b6cf683ced19e9029787b97906438cb94b9f66\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:32:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b8df7ffc260acc047e334af09b76e6ee2c6dadd8c1fd1ed8860769601c89c6db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b8df7ffc260acc047e334af09b76e6ee2c6dadd8c1fd1ed8860769601c89c6db\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:32:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c89c6973a557239b60077f2b91a5f088955a973ebf8a9776677daa83f18c274\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2c89c6973a557239b60077f2b91a5f088955a973ebf8a9776677daa83f18c274\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:32:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f647c1c576be194232c6bcaf882fc8f3c67c78a84edd77222d04f1602434d014\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f647c1c576be194232c6bcaf882fc8f3c67c78a84edd77222d04f1602434d014\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:32:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://af9cccbb5d66115ca2db31b1e6738e1aa5f9c948eb65d3db9b5d5f8d9c223a64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://af9cccbb5d66115ca2db31b1e6738e1aa5f9c948eb65d3db9b5d5f8d9c223a64\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:32:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:09Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-nptcz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:23Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:23 crc kubenswrapper[4799]: I1010 16:32:23.357577 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:23 crc kubenswrapper[4799]: I1010 16:32:23.357636 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:23 crc kubenswrapper[4799]: I1010 16:32:23.357652 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:23 crc kubenswrapper[4799]: I1010 16:32:23.357677 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:23 crc kubenswrapper[4799]: I1010 16:32:23.357694 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:23Z","lastTransitionTime":"2025-10-10T16:32:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:23 crc kubenswrapper[4799]: I1010 16:32:23.359455 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1bf1784a-776b-49c7-b64b-7ce52860df45\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://298a1a9571fbe118fe81ff3e7403e298bcde9b683cffab574fbb03d5adc1fb67\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f767e89684b9b515da850360aaf9d7a02173395faf0654e9f0b3a4752a3d608b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://92de340d859cff018a661f0a7f7fe209ffae161bf6f39deb005c7148591fc60b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3a649a65ab118025ea70d1d7cf71236cb96992671c3bc7659d591640b53f941\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:31:47Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:23Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:23 crc kubenswrapper[4799]: I1010 16:32:23.397862 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2128a751508cba96a374652d8d80c66c81351fe0d7f800743a1612196fe8ac55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:23Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:23 crc kubenswrapper[4799]: I1010 16:32:23.402226 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 10 16:32:23 crc kubenswrapper[4799]: E1010 16:32:23.402413 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 10 16:32:23 crc kubenswrapper[4799]: I1010 16:32:23.437117 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:23Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:23 crc kubenswrapper[4799]: I1010 16:32:23.460564 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:23 crc kubenswrapper[4799]: I1010 16:32:23.460623 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:23 crc kubenswrapper[4799]: I1010 16:32:23.460645 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:23 crc kubenswrapper[4799]: I1010 16:32:23.460672 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:23 crc kubenswrapper[4799]: I1010 16:32:23.460692 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:23Z","lastTransitionTime":"2025-10-10T16:32:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:23 crc kubenswrapper[4799]: I1010 16:32:23.472617 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6cebefda-e31d-4be2-9bf4-8e1f8ec002cb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6536b37f839c0b3f6b55d82b3a1674eeccb07ec93e2cb0a3739705b82df4782c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hfkr4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ad00545d7a2fff370e19a55a89365b8c9914cb6286dbf1892d7ad0f399288a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hfkr4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:09Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-rh8zc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:23Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:23 crc kubenswrapper[4799]: I1010 16:32:23.520091 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mcwfc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"abe7f2d9-ec99-4724-a01f-cc7096377e07\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8cbc87c392646ebf9c016f8c7b40bcec30e33a0a05ea4a896d1143c5f1086990\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd261112ca7db4d0d76f6ab29a0347d64dccfff4db42ac9f55d6d7df1443ab23\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c7d0e536ad5143941dd18418b1ac7972a1136a841542b950f6891a386d43ca9c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cfe05183ad0b03415525e6aa2a8d52a5d63b8c273113c46326396df5e0c2bb12\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6562d440ce1f1477fd09c15c34ab88e17e1fb2c2cae4b32a7bf8cbdd29f4d5a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff4fcf53aeed6c07f775152de0faa9fa0671848df06d37cbca6ec7097d0024d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://46332137ef0b23ab6df5f7eb7e21459fb1b18c3580232f6c03ba11cb434c42e8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ed12cf3d02660853d079aded94ac925b8ee8f016804c32b9ac53791a041883a1\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-10T16:32:20Z\\\",\\\"message\\\":\\\"ithub.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI1010 16:32:20.244796 6040 reflector.go:311] Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1010 16:32:20.244943 6040 reflector.go:311] Stopping reflector *v1alpha1.BaselineAdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1010 16:32:20.245339 6040 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1010 16:32:20.245518 6040 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1010 16:32:20.245703 6040 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1010 16:32:20.245820 6040 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1010 16:32:20.246257 6040 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1010 16:32:20.246279 6040 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1010 16:32:20.246306 6040 factory.go:656] Stopping watch factory\\\\nI1010 16:32:20.246330 6040 handler.go:208] Removed *v1.Node event handler 7\\\\nI1010 16:32:20.246345 6040 handler.go:208] Removed *v1.Node ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:16Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://46332137ef0b23ab6df5f7eb7e21459fb1b18c3580232f6c03ba11cb434c42e8\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-10T16:32:22Z\\\",\\\"message\\\":\\\"0 16:32:21.839394 6234 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1010 16:32:21.839417 6234 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1010 16:32:21.839439 6234 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI1010 16:32:21.839445 6234 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI1010 16:32:21.839474 6234 handler.go:208] Removed *v1.Pod event handler 6\\\\nI1010 16:32:21.840885 6234 handler.go:208] Removed *v1.Node event handler 7\\\\nI1010 16:32:21.840890 6234 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1010 16:32:21.840892 6234 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1010 16:32:21.841044 6234 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1010 16:32:21.840912 6234 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1010 16:32:21.840941 6234 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1010 16:32:21.841010 6234 handler.go:208] Removed *v1.Node event handler 2\\\\nI1010 16:32:21.841131 6234 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1010 16:32:21.841175 6234 factory.go:656] Stopping watch factory\\\\nI1010 16:32:21.841197 6234 ovnkube.go:599] Stopped ovnkube\\\\nI1010 16:32:21.841225 6234 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1010 16:32:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://08406e220de50ba85f882a05117b5df8c9445a38c026bb85c95fc9f98f2d2cfe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2ce36def99eaf908452410a523cd14eb31a5a4dc3ee38d5983ea95d5ee75f83\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d2ce36def99eaf908452410a523cd14eb31a5a4dc3ee38d5983ea95d5ee75f83\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:09Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-mcwfc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:23Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:23 crc kubenswrapper[4799]: I1010 16:32:23.553733 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-z97c7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6f19a8ba-b77f-41ce-a4c6-e970b040dd8c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9spwd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9spwd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:22Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-z97c7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:23Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:23 crc kubenswrapper[4799]: I1010 16:32:23.564406 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:23 crc kubenswrapper[4799]: I1010 16:32:23.564536 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:23 crc kubenswrapper[4799]: I1010 16:32:23.564574 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:23 crc kubenswrapper[4799]: I1010 16:32:23.564600 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:23 crc kubenswrapper[4799]: I1010 16:32:23.564616 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:23Z","lastTransitionTime":"2025-10-10T16:32:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:23 crc kubenswrapper[4799]: I1010 16:32:23.604592 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b9c46c5f-a6db-4cef-b179-b669484bbc75\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:47Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:47Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://df6b51b97a9e3dcf9102409dc19f67e69e6e28ebec82dd46083922d5606cc4c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ba57bc720123daa414f51bf5d3173c6fa0b519947a34816bebc532948fd74ab\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d98759de1f79d9aeb68eb0b3eb21d78d0116f054b5d846c85bd63774b565e73\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://10d76c5ba8c54896d2fde57e2806c48857363c495a9f2d9b3f6904334cf2f9be\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://10d76c5ba8c54896d2fde57e2806c48857363c495a9f2d9b3f6904334cf2f9be\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"file observer\\\\nW1010 16:32:08.895315 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1010 16:32:08.895450 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1010 16:32:08.898309 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-712278876/tls.crt::/tmp/serving-cert-712278876/tls.key\\\\\\\"\\\\nI1010 16:32:09.168043 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1010 16:32:09.171891 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1010 16:32:09.171914 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1010 16:32:09.171936 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1010 16:32:09.171942 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1010 16:32:09.176341 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1010 16:32:09.176406 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1010 16:32:09.176435 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1010 16:32:09.176460 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1010 16:32:09.176486 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1010 16:32:09.176510 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1010 16:32:09.176533 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1010 16:32:09.176376 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1010 16:32:09.178269 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-10T16:31:53Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://75fb276a1b4f555aa58d4a862a6f3841984f75958b7ada362d717eca726c41fc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:50Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://78cbeb4c6d2770cabbc752b11e5a62f64ec7820bc3a637a944fa252d779e242b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://78cbeb4c6d2770cabbc752b11e5a62f64ec7820bc3a637a944fa252d779e242b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:31:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:31:47Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:23Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:23 crc kubenswrapper[4799]: I1010 16:32:23.607148 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/network-metrics-daemon-k6hch"] Oct 10 16:32:23 crc kubenswrapper[4799]: I1010 16:32:23.607877 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k6hch" Oct 10 16:32:23 crc kubenswrapper[4799]: E1010 16:32:23.607982 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-k6hch" podUID="7903c578-d05e-4ad7-8fd9-f438abf4a085" Oct 10 16:32:23 crc kubenswrapper[4799]: I1010 16:32:23.637002 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:23Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:23 crc kubenswrapper[4799]: I1010 16:32:23.667285 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:23 crc kubenswrapper[4799]: I1010 16:32:23.667342 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:23 crc kubenswrapper[4799]: I1010 16:32:23.667359 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:23 crc kubenswrapper[4799]: I1010 16:32:23.667382 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:23 crc kubenswrapper[4799]: I1010 16:32:23.667399 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:23Z","lastTransitionTime":"2025-10-10T16:32:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:23 crc kubenswrapper[4799]: I1010 16:32:23.682398 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gg5hb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f000ac73-b5de-47c8-a0a7-84bd06475f62\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b64f89fec4fec12dd0dab3f95ca2c8a01e43d4ef7cc69a4d012195756f6922ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w9g7t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:09Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gg5hb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:23Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:23 crc kubenswrapper[4799]: I1010 16:32:23.714912 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-6wjsp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"76fdb169-eee9-4170-b948-95e26254208b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5b030264f18288aa7687a91f7918f1ed2c2ad474637e32a054ea8c25b97aef45\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2ww66\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:14Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-6wjsp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:23Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:23 crc kubenswrapper[4799]: I1010 16:32:23.742598 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-z97c7" event={"ID":"6f19a8ba-b77f-41ce-a4c6-e970b040dd8c","Type":"ContainerStarted","Data":"c9722a694091d19d16b7c08ac22e23532deca8f4bde306a0d651d5524484fd1e"} Oct 10 16:32:23 crc kubenswrapper[4799]: I1010 16:32:23.742684 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-z97c7" event={"ID":"6f19a8ba-b77f-41ce-a4c6-e970b040dd8c","Type":"ContainerStarted","Data":"17b2b63923e40e58b4a3d352781758ecf7c0e63eb913813e0f738d19dfb05676"} Oct 10 16:32:23 crc kubenswrapper[4799]: I1010 16:32:23.745605 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-mcwfc_abe7f2d9-ec99-4724-a01f-cc7096377e07/ovnkube-controller/1.log" Oct 10 16:32:23 crc kubenswrapper[4799]: I1010 16:32:23.750022 4799 scope.go:117] "RemoveContainer" containerID="46332137ef0b23ab6df5f7eb7e21459fb1b18c3580232f6c03ba11cb434c42e8" Oct 10 16:32:23 crc kubenswrapper[4799]: E1010 16:32:23.750212 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-mcwfc_openshift-ovn-kubernetes(abe7f2d9-ec99-4724-a01f-cc7096377e07)\"" pod="openshift-ovn-kubernetes/ovnkube-node-mcwfc" podUID="abe7f2d9-ec99-4724-a01f-cc7096377e07" Oct 10 16:32:23 crc kubenswrapper[4799]: I1010 16:32:23.761099 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5a953803d264b43ecd9f8b8c871b034d8146e73a4974bb8f503d0ca626370616\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:23Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:23 crc kubenswrapper[4799]: I1010 16:32:23.770412 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:23 crc kubenswrapper[4799]: I1010 16:32:23.770474 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:23 crc kubenswrapper[4799]: I1010 16:32:23.770489 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:23 crc kubenswrapper[4799]: I1010 16:32:23.770515 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:23 crc kubenswrapper[4799]: I1010 16:32:23.770531 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:23Z","lastTransitionTime":"2025-10-10T16:32:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:23 crc kubenswrapper[4799]: I1010 16:32:23.779118 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/7903c578-d05e-4ad7-8fd9-f438abf4a085-metrics-certs\") pod \"network-metrics-daemon-k6hch\" (UID: \"7903c578-d05e-4ad7-8fd9-f438abf4a085\") " pod="openshift-multus/network-metrics-daemon-k6hch" Oct 10 16:32:23 crc kubenswrapper[4799]: I1010 16:32:23.779211 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hjhjl\" (UniqueName: \"kubernetes.io/projected/7903c578-d05e-4ad7-8fd9-f438abf4a085-kube-api-access-hjhjl\") pod \"network-metrics-daemon-k6hch\" (UID: \"7903c578-d05e-4ad7-8fd9-f438abf4a085\") " pod="openshift-multus/network-metrics-daemon-k6hch" Oct 10 16:32:23 crc kubenswrapper[4799]: I1010 16:32:23.794116 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:23Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:23 crc kubenswrapper[4799]: I1010 16:32:23.835474 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-k6hch" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7903c578-d05e-4ad7-8fd9-f438abf4a085\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:23Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:23Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hjhjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hjhjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:23Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-k6hch\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:23Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:23 crc kubenswrapper[4799]: I1010 16:32:23.875446 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:23 crc kubenswrapper[4799]: I1010 16:32:23.875510 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:23 crc kubenswrapper[4799]: I1010 16:32:23.875532 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:23 crc kubenswrapper[4799]: I1010 16:32:23.875561 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:23 crc kubenswrapper[4799]: I1010 16:32:23.875584 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:23Z","lastTransitionTime":"2025-10-10T16:32:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:23 crc kubenswrapper[4799]: I1010 16:32:23.880233 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hjhjl\" (UniqueName: \"kubernetes.io/projected/7903c578-d05e-4ad7-8fd9-f438abf4a085-kube-api-access-hjhjl\") pod \"network-metrics-daemon-k6hch\" (UID: \"7903c578-d05e-4ad7-8fd9-f438abf4a085\") " pod="openshift-multus/network-metrics-daemon-k6hch" Oct 10 16:32:23 crc kubenswrapper[4799]: I1010 16:32:23.880495 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/7903c578-d05e-4ad7-8fd9-f438abf4a085-metrics-certs\") pod \"network-metrics-daemon-k6hch\" (UID: \"7903c578-d05e-4ad7-8fd9-f438abf4a085\") " pod="openshift-multus/network-metrics-daemon-k6hch" Oct 10 16:32:23 crc kubenswrapper[4799]: E1010 16:32:23.880700 4799 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Oct 10 16:32:23 crc kubenswrapper[4799]: E1010 16:32:23.880833 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/7903c578-d05e-4ad7-8fd9-f438abf4a085-metrics-certs podName:7903c578-d05e-4ad7-8fd9-f438abf4a085 nodeName:}" failed. No retries permitted until 2025-10-10 16:32:24.380797676 +0000 UTC m=+37.889121831 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/7903c578-d05e-4ad7-8fd9-f438abf4a085-metrics-certs") pod "network-metrics-daemon-k6hch" (UID: "7903c578-d05e-4ad7-8fd9-f438abf4a085") : object "openshift-multus"/"metrics-daemon-secret" not registered Oct 10 16:32:23 crc kubenswrapper[4799]: I1010 16:32:23.891240 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"60ae49f7-6d6a-4a62-909f-7aea2b3953f5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c0a72be5ffe48f726e63ca3854fcabf6ad7c26f2c3fe432328142da2dc2ceeb5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b42b1b86cbd6dacb03b9afc740a33f67674996a9c5a5b291b71708ae53ccfea8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://de5b84380f9fb8448cebe90775342fd17260ffb8c591bbd5156f8a216b80f1da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b1dad40a84c7f22ffb5d52c708c7e2e03a181c5778793050495c8333ae005731\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://79f6778c5b703b2b4fc4e59fffc00824fcab6c8f5e2789661665e635a3539195\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a48bce1f3530d2a78258c6fa2af4f1530890f7967a26c9e91ca2f20f56cdbe6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2a48bce1f3530d2a78258c6fa2af4f1530890f7967a26c9e91ca2f20f56cdbe6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:31:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://65fb2cd5fa9b5ff0cad85267e4a036c37593a749da171dc2e5e30ba5159ed96d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://65fb2cd5fa9b5ff0cad85267e4a036c37593a749da171dc2e5e30ba5159ed96d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:31:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://51ea61becc8c45e5bcb2a2374d503cef3fb940b1618e7501cd05d61fc2a9458f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://51ea61becc8c45e5bcb2a2374d503cef3fb940b1618e7501cd05d61fc2a9458f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:31:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:31:47Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:23Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:23 crc kubenswrapper[4799]: I1010 16:32:23.910841 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hjhjl\" (UniqueName: \"kubernetes.io/projected/7903c578-d05e-4ad7-8fd9-f438abf4a085-kube-api-access-hjhjl\") pod \"network-metrics-daemon-k6hch\" (UID: \"7903c578-d05e-4ad7-8fd9-f438abf4a085\") " pod="openshift-multus/network-metrics-daemon-k6hch" Oct 10 16:32:23 crc kubenswrapper[4799]: I1010 16:32:23.931310 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-bsdk2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"823e91d3-003d-4cbb-bc72-004e1708c19d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec832bfc0c81b98afb4117033b94d2951b042b248148a5f957f3507174b8dbb6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-chgmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:09Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-bsdk2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:23Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:23 crc kubenswrapper[4799]: I1010 16:32:23.978748 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:23 crc kubenswrapper[4799]: I1010 16:32:23.978821 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:23 crc kubenswrapper[4799]: I1010 16:32:23.978835 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:23 crc kubenswrapper[4799]: I1010 16:32:23.978852 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:23 crc kubenswrapper[4799]: I1010 16:32:23.978865 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:23Z","lastTransitionTime":"2025-10-10T16:32:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:23 crc kubenswrapper[4799]: I1010 16:32:23.982337 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-nptcz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"96840de9-4451-4499-81fa-a19c62239007\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://995be5ba088a3758758ce5aaf735f0371692c52e49e3992c6478311411c8db42\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d8b7b4526cfbe5d29a5b00c5d82089820b93e5aedbdaace85c4a252fed1b9f53\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d8b7b4526cfbe5d29a5b00c5d82089820b93e5aedbdaace85c4a252fed1b9f53\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:32:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0477eb514aef21fcec151973d9b6cf683ced19e9029787b97906438cb94b9f66\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0477eb514aef21fcec151973d9b6cf683ced19e9029787b97906438cb94b9f66\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:32:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b8df7ffc260acc047e334af09b76e6ee2c6dadd8c1fd1ed8860769601c89c6db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b8df7ffc260acc047e334af09b76e6ee2c6dadd8c1fd1ed8860769601c89c6db\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:32:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c89c6973a557239b60077f2b91a5f088955a973ebf8a9776677daa83f18c274\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2c89c6973a557239b60077f2b91a5f088955a973ebf8a9776677daa83f18c274\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:32:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f647c1c576be194232c6bcaf882fc8f3c67c78a84edd77222d04f1602434d014\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f647c1c576be194232c6bcaf882fc8f3c67c78a84edd77222d04f1602434d014\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:32:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://af9cccbb5d66115ca2db31b1e6738e1aa5f9c948eb65d3db9b5d5f8d9c223a64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://af9cccbb5d66115ca2db31b1e6738e1aa5f9c948eb65d3db9b5d5f8d9c223a64\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:32:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:09Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-nptcz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:23Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:24 crc kubenswrapper[4799]: I1010 16:32:24.017143 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1bf1784a-776b-49c7-b64b-7ce52860df45\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://298a1a9571fbe118fe81ff3e7403e298bcde9b683cffab574fbb03d5adc1fb67\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f767e89684b9b515da850360aaf9d7a02173395faf0654e9f0b3a4752a3d608b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://92de340d859cff018a661f0a7f7fe209ffae161bf6f39deb005c7148591fc60b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3a649a65ab118025ea70d1d7cf71236cb96992671c3bc7659d591640b53f941\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:31:47Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:24Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:24 crc kubenswrapper[4799]: I1010 16:32:24.056666 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2128a751508cba96a374652d8d80c66c81351fe0d7f800743a1612196fe8ac55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:24Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:24 crc kubenswrapper[4799]: I1010 16:32:24.081653 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:24 crc kubenswrapper[4799]: I1010 16:32:24.081709 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:24 crc kubenswrapper[4799]: I1010 16:32:24.081726 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:24 crc kubenswrapper[4799]: I1010 16:32:24.081781 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:24 crc kubenswrapper[4799]: I1010 16:32:24.081799 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:24Z","lastTransitionTime":"2025-10-10T16:32:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:24 crc kubenswrapper[4799]: I1010 16:32:24.096622 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4e024486dad9853cf7debbd2264eca725e50e74ebd215e1e55595d5f8b7c0403\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3396ed6bea22d063192c09283426aa98e84d5cab5852e305d61f3d583801187\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:24Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:24 crc kubenswrapper[4799]: I1010 16:32:24.135743 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6cebefda-e31d-4be2-9bf4-8e1f8ec002cb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6536b37f839c0b3f6b55d82b3a1674eeccb07ec93e2cb0a3739705b82df4782c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hfkr4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ad00545d7a2fff370e19a55a89365b8c9914cb6286dbf1892d7ad0f399288a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hfkr4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:09Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-rh8zc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:24Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:24 crc kubenswrapper[4799]: I1010 16:32:24.184966 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:24 crc kubenswrapper[4799]: I1010 16:32:24.185026 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:24 crc kubenswrapper[4799]: I1010 16:32:24.185046 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:24 crc kubenswrapper[4799]: I1010 16:32:24.185077 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:24 crc kubenswrapper[4799]: I1010 16:32:24.185104 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:24Z","lastTransitionTime":"2025-10-10T16:32:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:24 crc kubenswrapper[4799]: I1010 16:32:24.188916 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mcwfc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"abe7f2d9-ec99-4724-a01f-cc7096377e07\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8cbc87c392646ebf9c016f8c7b40bcec30e33a0a05ea4a896d1143c5f1086990\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd261112ca7db4d0d76f6ab29a0347d64dccfff4db42ac9f55d6d7df1443ab23\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c7d0e536ad5143941dd18418b1ac7972a1136a841542b950f6891a386d43ca9c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cfe05183ad0b03415525e6aa2a8d52a5d63b8c273113c46326396df5e0c2bb12\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6562d440ce1f1477fd09c15c34ab88e17e1fb2c2cae4b32a7bf8cbdd29f4d5a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff4fcf53aeed6c07f775152de0faa9fa0671848df06d37cbca6ec7097d0024d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://46332137ef0b23ab6df5f7eb7e21459fb1b18c3580232f6c03ba11cb434c42e8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ed12cf3d02660853d079aded94ac925b8ee8f016804c32b9ac53791a041883a1\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-10T16:32:20Z\\\",\\\"message\\\":\\\"ithub.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI1010 16:32:20.244796 6040 reflector.go:311] Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1010 16:32:20.244943 6040 reflector.go:311] Stopping reflector *v1alpha1.BaselineAdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1010 16:32:20.245339 6040 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1010 16:32:20.245518 6040 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1010 16:32:20.245703 6040 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1010 16:32:20.245820 6040 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1010 16:32:20.246257 6040 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1010 16:32:20.246279 6040 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1010 16:32:20.246306 6040 factory.go:656] Stopping watch factory\\\\nI1010 16:32:20.246330 6040 handler.go:208] Removed *v1.Node event handler 7\\\\nI1010 16:32:20.246345 6040 handler.go:208] Removed *v1.Node ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:16Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://46332137ef0b23ab6df5f7eb7e21459fb1b18c3580232f6c03ba11cb434c42e8\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-10T16:32:22Z\\\",\\\"message\\\":\\\"0 16:32:21.839394 6234 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1010 16:32:21.839417 6234 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1010 16:32:21.839439 6234 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI1010 16:32:21.839445 6234 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI1010 16:32:21.839474 6234 handler.go:208] Removed *v1.Pod event handler 6\\\\nI1010 16:32:21.840885 6234 handler.go:208] Removed *v1.Node event handler 7\\\\nI1010 16:32:21.840890 6234 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1010 16:32:21.840892 6234 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1010 16:32:21.841044 6234 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1010 16:32:21.840912 6234 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1010 16:32:21.840941 6234 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1010 16:32:21.841010 6234 handler.go:208] Removed *v1.Node event handler 2\\\\nI1010 16:32:21.841131 6234 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1010 16:32:21.841175 6234 factory.go:656] Stopping watch factory\\\\nI1010 16:32:21.841197 6234 ovnkube.go:599] Stopped ovnkube\\\\nI1010 16:32:21.841225 6234 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1010 16:32:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://08406e220de50ba85f882a05117b5df8c9445a38c026bb85c95fc9f98f2d2cfe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2ce36def99eaf908452410a523cd14eb31a5a4dc3ee38d5983ea95d5ee75f83\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d2ce36def99eaf908452410a523cd14eb31a5a4dc3ee38d5983ea95d5ee75f83\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:09Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-mcwfc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:24Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:24 crc kubenswrapper[4799]: I1010 16:32:24.215929 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-z97c7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6f19a8ba-b77f-41ce-a4c6-e970b040dd8c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9spwd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9spwd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:22Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-z97c7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:24Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:24 crc kubenswrapper[4799]: I1010 16:32:24.260133 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b9c46c5f-a6db-4cef-b179-b669484bbc75\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:47Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:47Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://df6b51b97a9e3dcf9102409dc19f67e69e6e28ebec82dd46083922d5606cc4c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ba57bc720123daa414f51bf5d3173c6fa0b519947a34816bebc532948fd74ab\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d98759de1f79d9aeb68eb0b3eb21d78d0116f054b5d846c85bd63774b565e73\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://10d76c5ba8c54896d2fde57e2806c48857363c495a9f2d9b3f6904334cf2f9be\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://10d76c5ba8c54896d2fde57e2806c48857363c495a9f2d9b3f6904334cf2f9be\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"file observer\\\\nW1010 16:32:08.895315 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1010 16:32:08.895450 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1010 16:32:08.898309 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-712278876/tls.crt::/tmp/serving-cert-712278876/tls.key\\\\\\\"\\\\nI1010 16:32:09.168043 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1010 16:32:09.171891 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1010 16:32:09.171914 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1010 16:32:09.171936 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1010 16:32:09.171942 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1010 16:32:09.176341 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1010 16:32:09.176406 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1010 16:32:09.176435 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1010 16:32:09.176460 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1010 16:32:09.176486 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1010 16:32:09.176510 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1010 16:32:09.176533 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1010 16:32:09.176376 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1010 16:32:09.178269 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-10T16:31:53Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://75fb276a1b4f555aa58d4a862a6f3841984f75958b7ada362d717eca726c41fc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:50Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://78cbeb4c6d2770cabbc752b11e5a62f64ec7820bc3a637a944fa252d779e242b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://78cbeb4c6d2770cabbc752b11e5a62f64ec7820bc3a637a944fa252d779e242b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:31:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:31:47Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:24Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:24 crc kubenswrapper[4799]: I1010 16:32:24.287753 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:24 crc kubenswrapper[4799]: I1010 16:32:24.287846 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:24 crc kubenswrapper[4799]: I1010 16:32:24.287868 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:24 crc kubenswrapper[4799]: I1010 16:32:24.287899 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:24 crc kubenswrapper[4799]: I1010 16:32:24.287922 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:24Z","lastTransitionTime":"2025-10-10T16:32:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:24 crc kubenswrapper[4799]: I1010 16:32:24.295636 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:24Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:24 crc kubenswrapper[4799]: I1010 16:32:24.338725 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:24Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:24 crc kubenswrapper[4799]: I1010 16:32:24.385186 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/7903c578-d05e-4ad7-8fd9-f438abf4a085-metrics-certs\") pod \"network-metrics-daemon-k6hch\" (UID: \"7903c578-d05e-4ad7-8fd9-f438abf4a085\") " pod="openshift-multus/network-metrics-daemon-k6hch" Oct 10 16:32:24 crc kubenswrapper[4799]: E1010 16:32:24.385432 4799 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Oct 10 16:32:24 crc kubenswrapper[4799]: E1010 16:32:24.385585 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/7903c578-d05e-4ad7-8fd9-f438abf4a085-metrics-certs podName:7903c578-d05e-4ad7-8fd9-f438abf4a085 nodeName:}" failed. No retries permitted until 2025-10-10 16:32:25.385550424 +0000 UTC m=+38.893874629 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/7903c578-d05e-4ad7-8fd9-f438abf4a085-metrics-certs") pod "network-metrics-daemon-k6hch" (UID: "7903c578-d05e-4ad7-8fd9-f438abf4a085") : object "openshift-multus"/"metrics-daemon-secret" not registered Oct 10 16:32:24 crc kubenswrapper[4799]: I1010 16:32:24.386074 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"60ae49f7-6d6a-4a62-909f-7aea2b3953f5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c0a72be5ffe48f726e63ca3854fcabf6ad7c26f2c3fe432328142da2dc2ceeb5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b42b1b86cbd6dacb03b9afc740a33f67674996a9c5a5b291b71708ae53ccfea8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://de5b84380f9fb8448cebe90775342fd17260ffb8c591bbd5156f8a216b80f1da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b1dad40a84c7f22ffb5d52c708c7e2e03a181c5778793050495c8333ae005731\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://79f6778c5b703b2b4fc4e59fffc00824fcab6c8f5e2789661665e635a3539195\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a48bce1f3530d2a78258c6fa2af4f1530890f7967a26c9e91ca2f20f56cdbe6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2a48bce1f3530d2a78258c6fa2af4f1530890f7967a26c9e91ca2f20f56cdbe6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:31:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://65fb2cd5fa9b5ff0cad85267e4a036c37593a749da171dc2e5e30ba5159ed96d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://65fb2cd5fa9b5ff0cad85267e4a036c37593a749da171dc2e5e30ba5159ed96d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:31:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://51ea61becc8c45e5bcb2a2374d503cef3fb940b1618e7501cd05d61fc2a9458f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://51ea61becc8c45e5bcb2a2374d503cef3fb940b1618e7501cd05d61fc2a9458f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:31:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:31:47Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:24Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:24 crc kubenswrapper[4799]: I1010 16:32:24.392001 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:24 crc kubenswrapper[4799]: I1010 16:32:24.392048 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:24 crc kubenswrapper[4799]: I1010 16:32:24.392066 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:24 crc kubenswrapper[4799]: I1010 16:32:24.392089 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:24 crc kubenswrapper[4799]: I1010 16:32:24.392106 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:24Z","lastTransitionTime":"2025-10-10T16:32:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:24 crc kubenswrapper[4799]: I1010 16:32:24.402569 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 10 16:32:24 crc kubenswrapper[4799]: E1010 16:32:24.402732 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 10 16:32:24 crc kubenswrapper[4799]: I1010 16:32:24.402880 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 10 16:32:24 crc kubenswrapper[4799]: E1010 16:32:24.402970 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 10 16:32:24 crc kubenswrapper[4799]: I1010 16:32:24.413825 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-bsdk2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"823e91d3-003d-4cbb-bc72-004e1708c19d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec832bfc0c81b98afb4117033b94d2951b042b248148a5f957f3507174b8dbb6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-chgmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:09Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-bsdk2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:24Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:24 crc kubenswrapper[4799]: I1010 16:32:24.457860 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-k6hch" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7903c578-d05e-4ad7-8fd9-f438abf4a085\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:23Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:23Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hjhjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hjhjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:23Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-k6hch\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:24Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:24 crc kubenswrapper[4799]: I1010 16:32:24.494842 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1bf1784a-776b-49c7-b64b-7ce52860df45\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://298a1a9571fbe118fe81ff3e7403e298bcde9b683cffab574fbb03d5adc1fb67\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f767e89684b9b515da850360aaf9d7a02173395faf0654e9f0b3a4752a3d608b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://92de340d859cff018a661f0a7f7fe209ffae161bf6f39deb005c7148591fc60b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3a649a65ab118025ea70d1d7cf71236cb96992671c3bc7659d591640b53f941\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:31:47Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:24Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:24 crc kubenswrapper[4799]: I1010 16:32:24.496221 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:24 crc kubenswrapper[4799]: I1010 16:32:24.496274 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:24 crc kubenswrapper[4799]: I1010 16:32:24.496291 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:24 crc kubenswrapper[4799]: I1010 16:32:24.496326 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:24 crc kubenswrapper[4799]: I1010 16:32:24.496342 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:24Z","lastTransitionTime":"2025-10-10T16:32:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:24 crc kubenswrapper[4799]: I1010 16:32:24.537222 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2128a751508cba96a374652d8d80c66c81351fe0d7f800743a1612196fe8ac55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:24Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:24 crc kubenswrapper[4799]: I1010 16:32:24.578498 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4e024486dad9853cf7debbd2264eca725e50e74ebd215e1e55595d5f8b7c0403\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3396ed6bea22d063192c09283426aa98e84d5cab5852e305d61f3d583801187\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:24Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:24 crc kubenswrapper[4799]: I1010 16:32:24.599041 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:24 crc kubenswrapper[4799]: I1010 16:32:24.599095 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:24 crc kubenswrapper[4799]: I1010 16:32:24.599104 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:24 crc kubenswrapper[4799]: I1010 16:32:24.599120 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:24 crc kubenswrapper[4799]: I1010 16:32:24.599132 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:24Z","lastTransitionTime":"2025-10-10T16:32:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:24 crc kubenswrapper[4799]: I1010 16:32:24.621213 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-nptcz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"96840de9-4451-4499-81fa-a19c62239007\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://995be5ba088a3758758ce5aaf735f0371692c52e49e3992c6478311411c8db42\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d8b7b4526cfbe5d29a5b00c5d82089820b93e5aedbdaace85c4a252fed1b9f53\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d8b7b4526cfbe5d29a5b00c5d82089820b93e5aedbdaace85c4a252fed1b9f53\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:32:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0477eb514aef21fcec151973d9b6cf683ced19e9029787b97906438cb94b9f66\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0477eb514aef21fcec151973d9b6cf683ced19e9029787b97906438cb94b9f66\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:32:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b8df7ffc260acc047e334af09b76e6ee2c6dadd8c1fd1ed8860769601c89c6db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b8df7ffc260acc047e334af09b76e6ee2c6dadd8c1fd1ed8860769601c89c6db\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:32:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c89c6973a557239b60077f2b91a5f088955a973ebf8a9776677daa83f18c274\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2c89c6973a557239b60077f2b91a5f088955a973ebf8a9776677daa83f18c274\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:32:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f647c1c576be194232c6bcaf882fc8f3c67c78a84edd77222d04f1602434d014\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f647c1c576be194232c6bcaf882fc8f3c67c78a84edd77222d04f1602434d014\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:32:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://af9cccbb5d66115ca2db31b1e6738e1aa5f9c948eb65d3db9b5d5f8d9c223a64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://af9cccbb5d66115ca2db31b1e6738e1aa5f9c948eb65d3db9b5d5f8d9c223a64\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:32:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:09Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-nptcz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:24Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:24 crc kubenswrapper[4799]: I1010 16:32:24.653746 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b9c46c5f-a6db-4cef-b179-b669484bbc75\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:47Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:47Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://df6b51b97a9e3dcf9102409dc19f67e69e6e28ebec82dd46083922d5606cc4c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ba57bc720123daa414f51bf5d3173c6fa0b519947a34816bebc532948fd74ab\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d98759de1f79d9aeb68eb0b3eb21d78d0116f054b5d846c85bd63774b565e73\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://10d76c5ba8c54896d2fde57e2806c48857363c495a9f2d9b3f6904334cf2f9be\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://10d76c5ba8c54896d2fde57e2806c48857363c495a9f2d9b3f6904334cf2f9be\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"file observer\\\\nW1010 16:32:08.895315 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1010 16:32:08.895450 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1010 16:32:08.898309 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-712278876/tls.crt::/tmp/serving-cert-712278876/tls.key\\\\\\\"\\\\nI1010 16:32:09.168043 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1010 16:32:09.171891 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1010 16:32:09.171914 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1010 16:32:09.171936 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1010 16:32:09.171942 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1010 16:32:09.176341 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1010 16:32:09.176406 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1010 16:32:09.176435 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1010 16:32:09.176460 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1010 16:32:09.176486 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1010 16:32:09.176510 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1010 16:32:09.176533 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1010 16:32:09.176376 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1010 16:32:09.178269 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-10T16:31:53Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://75fb276a1b4f555aa58d4a862a6f3841984f75958b7ada362d717eca726c41fc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:50Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://78cbeb4c6d2770cabbc752b11e5a62f64ec7820bc3a637a944fa252d779e242b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://78cbeb4c6d2770cabbc752b11e5a62f64ec7820bc3a637a944fa252d779e242b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:31:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:31:47Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:24Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:24 crc kubenswrapper[4799]: I1010 16:32:24.696401 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:24Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:24 crc kubenswrapper[4799]: I1010 16:32:24.701363 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:24 crc kubenswrapper[4799]: I1010 16:32:24.701420 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:24 crc kubenswrapper[4799]: I1010 16:32:24.701441 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:24 crc kubenswrapper[4799]: I1010 16:32:24.701466 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:24 crc kubenswrapper[4799]: I1010 16:32:24.701482 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:24Z","lastTransitionTime":"2025-10-10T16:32:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:24 crc kubenswrapper[4799]: I1010 16:32:24.738067 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:24Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:24 crc kubenswrapper[4799]: I1010 16:32:24.776374 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6cebefda-e31d-4be2-9bf4-8e1f8ec002cb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6536b37f839c0b3f6b55d82b3a1674eeccb07ec93e2cb0a3739705b82df4782c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hfkr4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ad00545d7a2fff370e19a55a89365b8c9914cb6286dbf1892d7ad0f399288a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hfkr4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:09Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-rh8zc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:24Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:24 crc kubenswrapper[4799]: I1010 16:32:24.804631 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:24 crc kubenswrapper[4799]: I1010 16:32:24.804702 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:24 crc kubenswrapper[4799]: I1010 16:32:24.804723 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:24 crc kubenswrapper[4799]: I1010 16:32:24.804795 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:24 crc kubenswrapper[4799]: I1010 16:32:24.804824 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:24Z","lastTransitionTime":"2025-10-10T16:32:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:24 crc kubenswrapper[4799]: I1010 16:32:24.819471 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mcwfc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"abe7f2d9-ec99-4724-a01f-cc7096377e07\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8cbc87c392646ebf9c016f8c7b40bcec30e33a0a05ea4a896d1143c5f1086990\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd261112ca7db4d0d76f6ab29a0347d64dccfff4db42ac9f55d6d7df1443ab23\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c7d0e536ad5143941dd18418b1ac7972a1136a841542b950f6891a386d43ca9c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cfe05183ad0b03415525e6aa2a8d52a5d63b8c273113c46326396df5e0c2bb12\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6562d440ce1f1477fd09c15c34ab88e17e1fb2c2cae4b32a7bf8cbdd29f4d5a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff4fcf53aeed6c07f775152de0faa9fa0671848df06d37cbca6ec7097d0024d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://46332137ef0b23ab6df5f7eb7e21459fb1b18c3580232f6c03ba11cb434c42e8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://46332137ef0b23ab6df5f7eb7e21459fb1b18c3580232f6c03ba11cb434c42e8\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-10T16:32:22Z\\\",\\\"message\\\":\\\"0 16:32:21.839394 6234 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1010 16:32:21.839417 6234 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1010 16:32:21.839439 6234 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI1010 16:32:21.839445 6234 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI1010 16:32:21.839474 6234 handler.go:208] Removed *v1.Pod event handler 6\\\\nI1010 16:32:21.840885 6234 handler.go:208] Removed *v1.Node event handler 7\\\\nI1010 16:32:21.840890 6234 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1010 16:32:21.840892 6234 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1010 16:32:21.841044 6234 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1010 16:32:21.840912 6234 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1010 16:32:21.840941 6234 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1010 16:32:21.841010 6234 handler.go:208] Removed *v1.Node event handler 2\\\\nI1010 16:32:21.841131 6234 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1010 16:32:21.841175 6234 factory.go:656] Stopping watch factory\\\\nI1010 16:32:21.841197 6234 ovnkube.go:599] Stopped ovnkube\\\\nI1010 16:32:21.841225 6234 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1010 16:32:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:20Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-mcwfc_openshift-ovn-kubernetes(abe7f2d9-ec99-4724-a01f-cc7096377e07)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://08406e220de50ba85f882a05117b5df8c9445a38c026bb85c95fc9f98f2d2cfe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2ce36def99eaf908452410a523cd14eb31a5a4dc3ee38d5983ea95d5ee75f83\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d2ce36def99eaf908452410a523cd14eb31a5a4dc3ee38d5983ea95d5ee75f83\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:09Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-mcwfc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:24Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:24 crc kubenswrapper[4799]: I1010 16:32:24.857192 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-z97c7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6f19a8ba-b77f-41ce-a4c6-e970b040dd8c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://17b2b63923e40e58b4a3d352781758ecf7c0e63eb913813e0f738d19dfb05676\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9spwd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c9722a694091d19d16b7c08ac22e23532deca8f4bde306a0d651d5524484fd1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9spwd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:22Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-z97c7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:24Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:24 crc kubenswrapper[4799]: I1010 16:32:24.900505 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5a953803d264b43ecd9f8b8c871b034d8146e73a4974bb8f503d0ca626370616\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:24Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:24 crc kubenswrapper[4799]: I1010 16:32:24.907369 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:24 crc kubenswrapper[4799]: I1010 16:32:24.907419 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:24 crc kubenswrapper[4799]: I1010 16:32:24.907437 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:24 crc kubenswrapper[4799]: I1010 16:32:24.907463 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:24 crc kubenswrapper[4799]: I1010 16:32:24.907481 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:24Z","lastTransitionTime":"2025-10-10T16:32:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:24 crc kubenswrapper[4799]: I1010 16:32:24.935227 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:24Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:24 crc kubenswrapper[4799]: I1010 16:32:24.974385 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gg5hb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f000ac73-b5de-47c8-a0a7-84bd06475f62\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b64f89fec4fec12dd0dab3f95ca2c8a01e43d4ef7cc69a4d012195756f6922ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w9g7t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:09Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gg5hb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:24Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:25 crc kubenswrapper[4799]: I1010 16:32:25.010911 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:25 crc kubenswrapper[4799]: I1010 16:32:25.010984 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:25 crc kubenswrapper[4799]: I1010 16:32:25.011007 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:25 crc kubenswrapper[4799]: I1010 16:32:25.011041 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:25 crc kubenswrapper[4799]: I1010 16:32:25.011065 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:25Z","lastTransitionTime":"2025-10-10T16:32:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:25 crc kubenswrapper[4799]: I1010 16:32:25.016170 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-6wjsp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"76fdb169-eee9-4170-b948-95e26254208b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5b030264f18288aa7687a91f7918f1ed2c2ad474637e32a054ea8c25b97aef45\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2ww66\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:14Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-6wjsp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:25Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:25 crc kubenswrapper[4799]: I1010 16:32:25.091967 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 10 16:32:25 crc kubenswrapper[4799]: E1010 16:32:25.092226 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-10 16:32:41.092197199 +0000 UTC m=+54.600521344 (durationBeforeRetry 16s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 10 16:32:25 crc kubenswrapper[4799]: I1010 16:32:25.114296 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:25 crc kubenswrapper[4799]: I1010 16:32:25.114367 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:25 crc kubenswrapper[4799]: I1010 16:32:25.114385 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:25 crc kubenswrapper[4799]: I1010 16:32:25.114413 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:25 crc kubenswrapper[4799]: I1010 16:32:25.114430 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:25Z","lastTransitionTime":"2025-10-10T16:32:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:25 crc kubenswrapper[4799]: I1010 16:32:25.193534 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 10 16:32:25 crc kubenswrapper[4799]: I1010 16:32:25.193700 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 10 16:32:25 crc kubenswrapper[4799]: E1010 16:32:25.193814 4799 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 10 16:32:25 crc kubenswrapper[4799]: E1010 16:32:25.193861 4799 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 10 16:32:25 crc kubenswrapper[4799]: E1010 16:32:25.193899 4799 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 10 16:32:25 crc kubenswrapper[4799]: E1010 16:32:25.193911 4799 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 10 16:32:25 crc kubenswrapper[4799]: E1010 16:32:25.194003 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-10 16:32:41.193962012 +0000 UTC m=+54.702286167 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 10 16:32:25 crc kubenswrapper[4799]: I1010 16:32:25.193855 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 10 16:32:25 crc kubenswrapper[4799]: E1010 16:32:25.194011 4799 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 10 16:32:25 crc kubenswrapper[4799]: E1010 16:32:25.194110 4799 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 10 16:32:25 crc kubenswrapper[4799]: E1010 16:32:25.194136 4799 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 10 16:32:25 crc kubenswrapper[4799]: E1010 16:32:25.194036 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-10-10 16:32:41.194020763 +0000 UTC m=+54.702344918 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 10 16:32:25 crc kubenswrapper[4799]: I1010 16:32:25.194237 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 10 16:32:25 crc kubenswrapper[4799]: E1010 16:32:25.194314 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-10-10 16:32:41.194289569 +0000 UTC m=+54.702613724 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 10 16:32:25 crc kubenswrapper[4799]: E1010 16:32:25.194318 4799 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Oct 10 16:32:25 crc kubenswrapper[4799]: E1010 16:32:25.194407 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-10 16:32:41.194386392 +0000 UTC m=+54.702710547 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Oct 10 16:32:25 crc kubenswrapper[4799]: I1010 16:32:25.217688 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:25 crc kubenswrapper[4799]: I1010 16:32:25.217741 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:25 crc kubenswrapper[4799]: I1010 16:32:25.217790 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:25 crc kubenswrapper[4799]: I1010 16:32:25.217816 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:25 crc kubenswrapper[4799]: I1010 16:32:25.217843 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:25Z","lastTransitionTime":"2025-10-10T16:32:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:25 crc kubenswrapper[4799]: I1010 16:32:25.321507 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:25 crc kubenswrapper[4799]: I1010 16:32:25.321575 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:25 crc kubenswrapper[4799]: I1010 16:32:25.321594 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:25 crc kubenswrapper[4799]: I1010 16:32:25.321622 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:25 crc kubenswrapper[4799]: I1010 16:32:25.321640 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:25Z","lastTransitionTime":"2025-10-10T16:32:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:25 crc kubenswrapper[4799]: I1010 16:32:25.395919 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/7903c578-d05e-4ad7-8fd9-f438abf4a085-metrics-certs\") pod \"network-metrics-daemon-k6hch\" (UID: \"7903c578-d05e-4ad7-8fd9-f438abf4a085\") " pod="openshift-multus/network-metrics-daemon-k6hch" Oct 10 16:32:25 crc kubenswrapper[4799]: E1010 16:32:25.396133 4799 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Oct 10 16:32:25 crc kubenswrapper[4799]: E1010 16:32:25.396546 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/7903c578-d05e-4ad7-8fd9-f438abf4a085-metrics-certs podName:7903c578-d05e-4ad7-8fd9-f438abf4a085 nodeName:}" failed. No retries permitted until 2025-10-10 16:32:27.396485663 +0000 UTC m=+40.904809818 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/7903c578-d05e-4ad7-8fd9-f438abf4a085-metrics-certs") pod "network-metrics-daemon-k6hch" (UID: "7903c578-d05e-4ad7-8fd9-f438abf4a085") : object "openshift-multus"/"metrics-daemon-secret" not registered Oct 10 16:32:25 crc kubenswrapper[4799]: I1010 16:32:25.401741 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k6hch" Oct 10 16:32:25 crc kubenswrapper[4799]: I1010 16:32:25.401801 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 10 16:32:25 crc kubenswrapper[4799]: E1010 16:32:25.401983 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-k6hch" podUID="7903c578-d05e-4ad7-8fd9-f438abf4a085" Oct 10 16:32:25 crc kubenswrapper[4799]: E1010 16:32:25.402161 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 10 16:32:25 crc kubenswrapper[4799]: I1010 16:32:25.425322 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:25 crc kubenswrapper[4799]: I1010 16:32:25.425386 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:25 crc kubenswrapper[4799]: I1010 16:32:25.425405 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:25 crc kubenswrapper[4799]: I1010 16:32:25.425431 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:25 crc kubenswrapper[4799]: I1010 16:32:25.425450 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:25Z","lastTransitionTime":"2025-10-10T16:32:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:25 crc kubenswrapper[4799]: I1010 16:32:25.528226 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:25 crc kubenswrapper[4799]: I1010 16:32:25.528296 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:25 crc kubenswrapper[4799]: I1010 16:32:25.528324 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:25 crc kubenswrapper[4799]: I1010 16:32:25.528358 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:25 crc kubenswrapper[4799]: I1010 16:32:25.528383 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:25Z","lastTransitionTime":"2025-10-10T16:32:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:25 crc kubenswrapper[4799]: I1010 16:32:25.630906 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:25 crc kubenswrapper[4799]: I1010 16:32:25.630972 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:25 crc kubenswrapper[4799]: I1010 16:32:25.630991 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:25 crc kubenswrapper[4799]: I1010 16:32:25.631017 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:25 crc kubenswrapper[4799]: I1010 16:32:25.631034 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:25Z","lastTransitionTime":"2025-10-10T16:32:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:25 crc kubenswrapper[4799]: I1010 16:32:25.733933 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:25 crc kubenswrapper[4799]: I1010 16:32:25.734001 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:25 crc kubenswrapper[4799]: I1010 16:32:25.734018 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:25 crc kubenswrapper[4799]: I1010 16:32:25.734045 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:25 crc kubenswrapper[4799]: I1010 16:32:25.734062 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:25Z","lastTransitionTime":"2025-10-10T16:32:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:25 crc kubenswrapper[4799]: I1010 16:32:25.837071 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:25 crc kubenswrapper[4799]: I1010 16:32:25.837130 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:25 crc kubenswrapper[4799]: I1010 16:32:25.837149 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:25 crc kubenswrapper[4799]: I1010 16:32:25.837174 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:25 crc kubenswrapper[4799]: I1010 16:32:25.837191 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:25Z","lastTransitionTime":"2025-10-10T16:32:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:25 crc kubenswrapper[4799]: I1010 16:32:25.940462 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:25 crc kubenswrapper[4799]: I1010 16:32:25.940525 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:25 crc kubenswrapper[4799]: I1010 16:32:25.940543 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:25 crc kubenswrapper[4799]: I1010 16:32:25.940571 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:25 crc kubenswrapper[4799]: I1010 16:32:25.940590 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:25Z","lastTransitionTime":"2025-10-10T16:32:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:26 crc kubenswrapper[4799]: I1010 16:32:26.043999 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:26 crc kubenswrapper[4799]: I1010 16:32:26.044068 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:26 crc kubenswrapper[4799]: I1010 16:32:26.044086 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:26 crc kubenswrapper[4799]: I1010 16:32:26.044110 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:26 crc kubenswrapper[4799]: I1010 16:32:26.044132 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:26Z","lastTransitionTime":"2025-10-10T16:32:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:26 crc kubenswrapper[4799]: I1010 16:32:26.148047 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:26 crc kubenswrapper[4799]: I1010 16:32:26.148130 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:26 crc kubenswrapper[4799]: I1010 16:32:26.148149 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:26 crc kubenswrapper[4799]: I1010 16:32:26.148176 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:26 crc kubenswrapper[4799]: I1010 16:32:26.148195 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:26Z","lastTransitionTime":"2025-10-10T16:32:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:26 crc kubenswrapper[4799]: I1010 16:32:26.251566 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:26 crc kubenswrapper[4799]: I1010 16:32:26.251648 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:26 crc kubenswrapper[4799]: I1010 16:32:26.251691 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:26 crc kubenswrapper[4799]: I1010 16:32:26.251727 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:26 crc kubenswrapper[4799]: I1010 16:32:26.251750 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:26Z","lastTransitionTime":"2025-10-10T16:32:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:26 crc kubenswrapper[4799]: I1010 16:32:26.355053 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:26 crc kubenswrapper[4799]: I1010 16:32:26.355108 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:26 crc kubenswrapper[4799]: I1010 16:32:26.355120 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:26 crc kubenswrapper[4799]: I1010 16:32:26.355140 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:26 crc kubenswrapper[4799]: I1010 16:32:26.355150 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:26Z","lastTransitionTime":"2025-10-10T16:32:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:26 crc kubenswrapper[4799]: I1010 16:32:26.401713 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 10 16:32:26 crc kubenswrapper[4799]: E1010 16:32:26.401829 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 10 16:32:26 crc kubenswrapper[4799]: I1010 16:32:26.401713 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 10 16:32:26 crc kubenswrapper[4799]: E1010 16:32:26.401894 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 10 16:32:26 crc kubenswrapper[4799]: I1010 16:32:26.458230 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:26 crc kubenswrapper[4799]: I1010 16:32:26.458282 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:26 crc kubenswrapper[4799]: I1010 16:32:26.458299 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:26 crc kubenswrapper[4799]: I1010 16:32:26.458324 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:26 crc kubenswrapper[4799]: I1010 16:32:26.458342 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:26Z","lastTransitionTime":"2025-10-10T16:32:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:26 crc kubenswrapper[4799]: I1010 16:32:26.561247 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:26 crc kubenswrapper[4799]: I1010 16:32:26.561292 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:26 crc kubenswrapper[4799]: I1010 16:32:26.561303 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:26 crc kubenswrapper[4799]: I1010 16:32:26.561320 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:26 crc kubenswrapper[4799]: I1010 16:32:26.561331 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:26Z","lastTransitionTime":"2025-10-10T16:32:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:26 crc kubenswrapper[4799]: I1010 16:32:26.663315 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:26 crc kubenswrapper[4799]: I1010 16:32:26.663366 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:26 crc kubenswrapper[4799]: I1010 16:32:26.663382 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:26 crc kubenswrapper[4799]: I1010 16:32:26.663405 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:26 crc kubenswrapper[4799]: I1010 16:32:26.663421 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:26Z","lastTransitionTime":"2025-10-10T16:32:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:26 crc kubenswrapper[4799]: I1010 16:32:26.766369 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:26 crc kubenswrapper[4799]: I1010 16:32:26.766447 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:26 crc kubenswrapper[4799]: I1010 16:32:26.766471 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:26 crc kubenswrapper[4799]: I1010 16:32:26.766500 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:26 crc kubenswrapper[4799]: I1010 16:32:26.766520 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:26Z","lastTransitionTime":"2025-10-10T16:32:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:26 crc kubenswrapper[4799]: I1010 16:32:26.869969 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:26 crc kubenswrapper[4799]: I1010 16:32:26.870012 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:26 crc kubenswrapper[4799]: I1010 16:32:26.870022 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:26 crc kubenswrapper[4799]: I1010 16:32:26.870038 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:26 crc kubenswrapper[4799]: I1010 16:32:26.870049 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:26Z","lastTransitionTime":"2025-10-10T16:32:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:26 crc kubenswrapper[4799]: I1010 16:32:26.972539 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:26 crc kubenswrapper[4799]: I1010 16:32:26.972585 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:26 crc kubenswrapper[4799]: I1010 16:32:26.972596 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:26 crc kubenswrapper[4799]: I1010 16:32:26.972612 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:26 crc kubenswrapper[4799]: I1010 16:32:26.972624 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:26Z","lastTransitionTime":"2025-10-10T16:32:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:27 crc kubenswrapper[4799]: I1010 16:32:27.074422 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:27 crc kubenswrapper[4799]: I1010 16:32:27.074473 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:27 crc kubenswrapper[4799]: I1010 16:32:27.074483 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:27 crc kubenswrapper[4799]: I1010 16:32:27.074500 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:27 crc kubenswrapper[4799]: I1010 16:32:27.074512 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:27Z","lastTransitionTime":"2025-10-10T16:32:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:27 crc kubenswrapper[4799]: I1010 16:32:27.176783 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:27 crc kubenswrapper[4799]: I1010 16:32:27.176841 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:27 crc kubenswrapper[4799]: I1010 16:32:27.176857 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:27 crc kubenswrapper[4799]: I1010 16:32:27.176886 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:27 crc kubenswrapper[4799]: I1010 16:32:27.176900 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:27Z","lastTransitionTime":"2025-10-10T16:32:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:27 crc kubenswrapper[4799]: I1010 16:32:27.280051 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:27 crc kubenswrapper[4799]: I1010 16:32:27.280124 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:27 crc kubenswrapper[4799]: I1010 16:32:27.280141 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:27 crc kubenswrapper[4799]: I1010 16:32:27.280167 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:27 crc kubenswrapper[4799]: I1010 16:32:27.280185 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:27Z","lastTransitionTime":"2025-10-10T16:32:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:27 crc kubenswrapper[4799]: I1010 16:32:27.383799 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:27 crc kubenswrapper[4799]: I1010 16:32:27.383863 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:27 crc kubenswrapper[4799]: I1010 16:32:27.383886 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:27 crc kubenswrapper[4799]: I1010 16:32:27.383914 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:27 crc kubenswrapper[4799]: I1010 16:32:27.383934 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:27Z","lastTransitionTime":"2025-10-10T16:32:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:27 crc kubenswrapper[4799]: I1010 16:32:27.402494 4799 scope.go:117] "RemoveContainer" containerID="10d76c5ba8c54896d2fde57e2806c48857363c495a9f2d9b3f6904334cf2f9be" Oct 10 16:32:27 crc kubenswrapper[4799]: I1010 16:32:27.402963 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k6hch" Oct 10 16:32:27 crc kubenswrapper[4799]: I1010 16:32:27.403025 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 10 16:32:27 crc kubenswrapper[4799]: E1010 16:32:27.403312 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-k6hch" podUID="7903c578-d05e-4ad7-8fd9-f438abf4a085" Oct 10 16:32:27 crc kubenswrapper[4799]: E1010 16:32:27.403483 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 10 16:32:27 crc kubenswrapper[4799]: I1010 16:32:27.418578 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/7903c578-d05e-4ad7-8fd9-f438abf4a085-metrics-certs\") pod \"network-metrics-daemon-k6hch\" (UID: \"7903c578-d05e-4ad7-8fd9-f438abf4a085\") " pod="openshift-multus/network-metrics-daemon-k6hch" Oct 10 16:32:27 crc kubenswrapper[4799]: E1010 16:32:27.418826 4799 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Oct 10 16:32:27 crc kubenswrapper[4799]: E1010 16:32:27.418915 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/7903c578-d05e-4ad7-8fd9-f438abf4a085-metrics-certs podName:7903c578-d05e-4ad7-8fd9-f438abf4a085 nodeName:}" failed. No retries permitted until 2025-10-10 16:32:31.418893723 +0000 UTC m=+44.927217848 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/7903c578-d05e-4ad7-8fd9-f438abf4a085-metrics-certs") pod "network-metrics-daemon-k6hch" (UID: "7903c578-d05e-4ad7-8fd9-f438abf4a085") : object "openshift-multus"/"metrics-daemon-secret" not registered Oct 10 16:32:27 crc kubenswrapper[4799]: I1010 16:32:27.425605 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:27Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:27 crc kubenswrapper[4799]: I1010 16:32:27.448087 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6cebefda-e31d-4be2-9bf4-8e1f8ec002cb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6536b37f839c0b3f6b55d82b3a1674eeccb07ec93e2cb0a3739705b82df4782c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hfkr4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ad00545d7a2fff370e19a55a89365b8c9914cb6286dbf1892d7ad0f399288a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hfkr4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:09Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-rh8zc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:27Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:27 crc kubenswrapper[4799]: I1010 16:32:27.488059 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:27 crc kubenswrapper[4799]: I1010 16:32:27.488126 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:27 crc kubenswrapper[4799]: I1010 16:32:27.488144 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:27 crc kubenswrapper[4799]: I1010 16:32:27.488169 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:27 crc kubenswrapper[4799]: I1010 16:32:27.488186 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:27Z","lastTransitionTime":"2025-10-10T16:32:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:27 crc kubenswrapper[4799]: I1010 16:32:27.489352 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mcwfc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"abe7f2d9-ec99-4724-a01f-cc7096377e07\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8cbc87c392646ebf9c016f8c7b40bcec30e33a0a05ea4a896d1143c5f1086990\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd261112ca7db4d0d76f6ab29a0347d64dccfff4db42ac9f55d6d7df1443ab23\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c7d0e536ad5143941dd18418b1ac7972a1136a841542b950f6891a386d43ca9c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cfe05183ad0b03415525e6aa2a8d52a5d63b8c273113c46326396df5e0c2bb12\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6562d440ce1f1477fd09c15c34ab88e17e1fb2c2cae4b32a7bf8cbdd29f4d5a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff4fcf53aeed6c07f775152de0faa9fa0671848df06d37cbca6ec7097d0024d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://46332137ef0b23ab6df5f7eb7e21459fb1b18c3580232f6c03ba11cb434c42e8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://46332137ef0b23ab6df5f7eb7e21459fb1b18c3580232f6c03ba11cb434c42e8\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-10T16:32:22Z\\\",\\\"message\\\":\\\"0 16:32:21.839394 6234 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1010 16:32:21.839417 6234 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1010 16:32:21.839439 6234 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI1010 16:32:21.839445 6234 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI1010 16:32:21.839474 6234 handler.go:208] Removed *v1.Pod event handler 6\\\\nI1010 16:32:21.840885 6234 handler.go:208] Removed *v1.Node event handler 7\\\\nI1010 16:32:21.840890 6234 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1010 16:32:21.840892 6234 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1010 16:32:21.841044 6234 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1010 16:32:21.840912 6234 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1010 16:32:21.840941 6234 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1010 16:32:21.841010 6234 handler.go:208] Removed *v1.Node event handler 2\\\\nI1010 16:32:21.841131 6234 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1010 16:32:21.841175 6234 factory.go:656] Stopping watch factory\\\\nI1010 16:32:21.841197 6234 ovnkube.go:599] Stopped ovnkube\\\\nI1010 16:32:21.841225 6234 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1010 16:32:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:20Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-mcwfc_openshift-ovn-kubernetes(abe7f2d9-ec99-4724-a01f-cc7096377e07)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://08406e220de50ba85f882a05117b5df8c9445a38c026bb85c95fc9f98f2d2cfe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2ce36def99eaf908452410a523cd14eb31a5a4dc3ee38d5983ea95d5ee75f83\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d2ce36def99eaf908452410a523cd14eb31a5a4dc3ee38d5983ea95d5ee75f83\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:09Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-mcwfc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:27Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:27 crc kubenswrapper[4799]: I1010 16:32:27.506078 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-z97c7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6f19a8ba-b77f-41ce-a4c6-e970b040dd8c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://17b2b63923e40e58b4a3d352781758ecf7c0e63eb913813e0f738d19dfb05676\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9spwd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c9722a694091d19d16b7c08ac22e23532deca8f4bde306a0d651d5524484fd1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9spwd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:22Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-z97c7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:27Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:27 crc kubenswrapper[4799]: I1010 16:32:27.526024 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b9c46c5f-a6db-4cef-b179-b669484bbc75\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:47Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:47Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://df6b51b97a9e3dcf9102409dc19f67e69e6e28ebec82dd46083922d5606cc4c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ba57bc720123daa414f51bf5d3173c6fa0b519947a34816bebc532948fd74ab\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d98759de1f79d9aeb68eb0b3eb21d78d0116f054b5d846c85bd63774b565e73\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://10d76c5ba8c54896d2fde57e2806c48857363c495a9f2d9b3f6904334cf2f9be\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://10d76c5ba8c54896d2fde57e2806c48857363c495a9f2d9b3f6904334cf2f9be\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"file observer\\\\nW1010 16:32:08.895315 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1010 16:32:08.895450 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1010 16:32:08.898309 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-712278876/tls.crt::/tmp/serving-cert-712278876/tls.key\\\\\\\"\\\\nI1010 16:32:09.168043 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1010 16:32:09.171891 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1010 16:32:09.171914 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1010 16:32:09.171936 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1010 16:32:09.171942 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1010 16:32:09.176341 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1010 16:32:09.176406 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1010 16:32:09.176435 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1010 16:32:09.176460 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1010 16:32:09.176486 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1010 16:32:09.176510 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1010 16:32:09.176533 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1010 16:32:09.176376 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1010 16:32:09.178269 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-10T16:31:53Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://75fb276a1b4f555aa58d4a862a6f3841984f75958b7ada362d717eca726c41fc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:50Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://78cbeb4c6d2770cabbc752b11e5a62f64ec7820bc3a637a944fa252d779e242b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://78cbeb4c6d2770cabbc752b11e5a62f64ec7820bc3a637a944fa252d779e242b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:31:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:31:47Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:27Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:27 crc kubenswrapper[4799]: I1010 16:32:27.541751 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:27Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:27 crc kubenswrapper[4799]: I1010 16:32:27.558160 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:27Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:27 crc kubenswrapper[4799]: I1010 16:32:27.574841 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gg5hb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f000ac73-b5de-47c8-a0a7-84bd06475f62\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b64f89fec4fec12dd0dab3f95ca2c8a01e43d4ef7cc69a4d012195756f6922ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w9g7t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:09Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gg5hb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:27Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:27 crc kubenswrapper[4799]: I1010 16:32:27.588914 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-6wjsp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"76fdb169-eee9-4170-b948-95e26254208b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5b030264f18288aa7687a91f7918f1ed2c2ad474637e32a054ea8c25b97aef45\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2ww66\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:14Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-6wjsp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:27Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:27 crc kubenswrapper[4799]: I1010 16:32:27.592153 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:27 crc kubenswrapper[4799]: I1010 16:32:27.592197 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:27 crc kubenswrapper[4799]: I1010 16:32:27.592212 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:27 crc kubenswrapper[4799]: I1010 16:32:27.592233 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:27 crc kubenswrapper[4799]: I1010 16:32:27.592247 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:27Z","lastTransitionTime":"2025-10-10T16:32:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:27 crc kubenswrapper[4799]: I1010 16:32:27.609703 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5a953803d264b43ecd9f8b8c871b034d8146e73a4974bb8f503d0ca626370616\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:27Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:27 crc kubenswrapper[4799]: I1010 16:32:27.621207 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-bsdk2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"823e91d3-003d-4cbb-bc72-004e1708c19d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec832bfc0c81b98afb4117033b94d2951b042b248148a5f957f3507174b8dbb6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-chgmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:09Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-bsdk2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:27Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:27 crc kubenswrapper[4799]: I1010 16:32:27.635483 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-k6hch" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7903c578-d05e-4ad7-8fd9-f438abf4a085\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:23Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:23Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hjhjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hjhjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:23Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-k6hch\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:27Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:27 crc kubenswrapper[4799]: I1010 16:32:27.666635 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"60ae49f7-6d6a-4a62-909f-7aea2b3953f5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c0a72be5ffe48f726e63ca3854fcabf6ad7c26f2c3fe432328142da2dc2ceeb5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b42b1b86cbd6dacb03b9afc740a33f67674996a9c5a5b291b71708ae53ccfea8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://de5b84380f9fb8448cebe90775342fd17260ffb8c591bbd5156f8a216b80f1da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b1dad40a84c7f22ffb5d52c708c7e2e03a181c5778793050495c8333ae005731\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://79f6778c5b703b2b4fc4e59fffc00824fcab6c8f5e2789661665e635a3539195\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a48bce1f3530d2a78258c6fa2af4f1530890f7967a26c9e91ca2f20f56cdbe6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2a48bce1f3530d2a78258c6fa2af4f1530890f7967a26c9e91ca2f20f56cdbe6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:31:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://65fb2cd5fa9b5ff0cad85267e4a036c37593a749da171dc2e5e30ba5159ed96d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://65fb2cd5fa9b5ff0cad85267e4a036c37593a749da171dc2e5e30ba5159ed96d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:31:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://51ea61becc8c45e5bcb2a2374d503cef3fb940b1618e7501cd05d61fc2a9458f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://51ea61becc8c45e5bcb2a2374d503cef3fb940b1618e7501cd05d61fc2a9458f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:31:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:31:47Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:27Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:27 crc kubenswrapper[4799]: I1010 16:32:27.690256 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4e024486dad9853cf7debbd2264eca725e50e74ebd215e1e55595d5f8b7c0403\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3396ed6bea22d063192c09283426aa98e84d5cab5852e305d61f3d583801187\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:27Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:27 crc kubenswrapper[4799]: I1010 16:32:27.696052 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:27 crc kubenswrapper[4799]: I1010 16:32:27.696100 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:27 crc kubenswrapper[4799]: I1010 16:32:27.696111 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:27 crc kubenswrapper[4799]: I1010 16:32:27.696130 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:27 crc kubenswrapper[4799]: I1010 16:32:27.696142 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:27Z","lastTransitionTime":"2025-10-10T16:32:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:27 crc kubenswrapper[4799]: I1010 16:32:27.707225 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-nptcz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"96840de9-4451-4499-81fa-a19c62239007\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://995be5ba088a3758758ce5aaf735f0371692c52e49e3992c6478311411c8db42\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d8b7b4526cfbe5d29a5b00c5d82089820b93e5aedbdaace85c4a252fed1b9f53\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d8b7b4526cfbe5d29a5b00c5d82089820b93e5aedbdaace85c4a252fed1b9f53\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:32:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0477eb514aef21fcec151973d9b6cf683ced19e9029787b97906438cb94b9f66\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0477eb514aef21fcec151973d9b6cf683ced19e9029787b97906438cb94b9f66\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:32:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b8df7ffc260acc047e334af09b76e6ee2c6dadd8c1fd1ed8860769601c89c6db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b8df7ffc260acc047e334af09b76e6ee2c6dadd8c1fd1ed8860769601c89c6db\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:32:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c89c6973a557239b60077f2b91a5f088955a973ebf8a9776677daa83f18c274\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2c89c6973a557239b60077f2b91a5f088955a973ebf8a9776677daa83f18c274\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:32:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f647c1c576be194232c6bcaf882fc8f3c67c78a84edd77222d04f1602434d014\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f647c1c576be194232c6bcaf882fc8f3c67c78a84edd77222d04f1602434d014\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:32:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://af9cccbb5d66115ca2db31b1e6738e1aa5f9c948eb65d3db9b5d5f8d9c223a64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://af9cccbb5d66115ca2db31b1e6738e1aa5f9c948eb65d3db9b5d5f8d9c223a64\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:32:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:09Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-nptcz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:27Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:27 crc kubenswrapper[4799]: I1010 16:32:27.727623 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1bf1784a-776b-49c7-b64b-7ce52860df45\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://298a1a9571fbe118fe81ff3e7403e298bcde9b683cffab574fbb03d5adc1fb67\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f767e89684b9b515da850360aaf9d7a02173395faf0654e9f0b3a4752a3d608b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://92de340d859cff018a661f0a7f7fe209ffae161bf6f39deb005c7148591fc60b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3a649a65ab118025ea70d1d7cf71236cb96992671c3bc7659d591640b53f941\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:31:47Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:27Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:27 crc kubenswrapper[4799]: I1010 16:32:27.747476 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2128a751508cba96a374652d8d80c66c81351fe0d7f800743a1612196fe8ac55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:27Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:27 crc kubenswrapper[4799]: I1010 16:32:27.766602 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/1.log" Oct 10 16:32:27 crc kubenswrapper[4799]: I1010 16:32:27.768821 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"f7eb0f742793fbd0bee8e88732ec832748e77d9226a926def177968f24a9cf06"} Oct 10 16:32:27 crc kubenswrapper[4799]: I1010 16:32:27.769325 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 10 16:32:27 crc kubenswrapper[4799]: I1010 16:32:27.790687 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b9c46c5f-a6db-4cef-b179-b669484bbc75\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:47Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:47Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://df6b51b97a9e3dcf9102409dc19f67e69e6e28ebec82dd46083922d5606cc4c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ba57bc720123daa414f51bf5d3173c6fa0b519947a34816bebc532948fd74ab\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d98759de1f79d9aeb68eb0b3eb21d78d0116f054b5d846c85bd63774b565e73\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7eb0f742793fbd0bee8e88732ec832748e77d9226a926def177968f24a9cf06\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://10d76c5ba8c54896d2fde57e2806c48857363c495a9f2d9b3f6904334cf2f9be\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"file observer\\\\nW1010 16:32:08.895315 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1010 16:32:08.895450 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1010 16:32:08.898309 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-712278876/tls.crt::/tmp/serving-cert-712278876/tls.key\\\\\\\"\\\\nI1010 16:32:09.168043 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1010 16:32:09.171891 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1010 16:32:09.171914 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1010 16:32:09.171936 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1010 16:32:09.171942 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1010 16:32:09.176341 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1010 16:32:09.176406 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1010 16:32:09.176435 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1010 16:32:09.176460 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1010 16:32:09.176486 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1010 16:32:09.176510 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1010 16:32:09.176533 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1010 16:32:09.176376 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1010 16:32:09.178269 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-10T16:31:53Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://75fb276a1b4f555aa58d4a862a6f3841984f75958b7ada362d717eca726c41fc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:50Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://78cbeb4c6d2770cabbc752b11e5a62f64ec7820bc3a637a944fa252d779e242b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://78cbeb4c6d2770cabbc752b11e5a62f64ec7820bc3a637a944fa252d779e242b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:31:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:31:47Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:27Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:27 crc kubenswrapper[4799]: I1010 16:32:27.799426 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:27 crc kubenswrapper[4799]: I1010 16:32:27.799471 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:27 crc kubenswrapper[4799]: I1010 16:32:27.799482 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:27 crc kubenswrapper[4799]: I1010 16:32:27.799503 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:27 crc kubenswrapper[4799]: I1010 16:32:27.799515 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:27Z","lastTransitionTime":"2025-10-10T16:32:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:27 crc kubenswrapper[4799]: I1010 16:32:27.809749 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:27Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:27 crc kubenswrapper[4799]: I1010 16:32:27.827649 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:27Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:27 crc kubenswrapper[4799]: I1010 16:32:27.840208 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6cebefda-e31d-4be2-9bf4-8e1f8ec002cb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6536b37f839c0b3f6b55d82b3a1674eeccb07ec93e2cb0a3739705b82df4782c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hfkr4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ad00545d7a2fff370e19a55a89365b8c9914cb6286dbf1892d7ad0f399288a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hfkr4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:09Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-rh8zc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:27Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:27 crc kubenswrapper[4799]: I1010 16:32:27.862260 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mcwfc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"abe7f2d9-ec99-4724-a01f-cc7096377e07\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8cbc87c392646ebf9c016f8c7b40bcec30e33a0a05ea4a896d1143c5f1086990\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd261112ca7db4d0d76f6ab29a0347d64dccfff4db42ac9f55d6d7df1443ab23\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c7d0e536ad5143941dd18418b1ac7972a1136a841542b950f6891a386d43ca9c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cfe05183ad0b03415525e6aa2a8d52a5d63b8c273113c46326396df5e0c2bb12\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6562d440ce1f1477fd09c15c34ab88e17e1fb2c2cae4b32a7bf8cbdd29f4d5a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff4fcf53aeed6c07f775152de0faa9fa0671848df06d37cbca6ec7097d0024d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://46332137ef0b23ab6df5f7eb7e21459fb1b18c3580232f6c03ba11cb434c42e8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://46332137ef0b23ab6df5f7eb7e21459fb1b18c3580232f6c03ba11cb434c42e8\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-10T16:32:22Z\\\",\\\"message\\\":\\\"0 16:32:21.839394 6234 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1010 16:32:21.839417 6234 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1010 16:32:21.839439 6234 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI1010 16:32:21.839445 6234 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI1010 16:32:21.839474 6234 handler.go:208] Removed *v1.Pod event handler 6\\\\nI1010 16:32:21.840885 6234 handler.go:208] Removed *v1.Node event handler 7\\\\nI1010 16:32:21.840890 6234 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1010 16:32:21.840892 6234 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1010 16:32:21.841044 6234 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1010 16:32:21.840912 6234 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1010 16:32:21.840941 6234 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1010 16:32:21.841010 6234 handler.go:208] Removed *v1.Node event handler 2\\\\nI1010 16:32:21.841131 6234 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1010 16:32:21.841175 6234 factory.go:656] Stopping watch factory\\\\nI1010 16:32:21.841197 6234 ovnkube.go:599] Stopped ovnkube\\\\nI1010 16:32:21.841225 6234 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1010 16:32:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:20Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-mcwfc_openshift-ovn-kubernetes(abe7f2d9-ec99-4724-a01f-cc7096377e07)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://08406e220de50ba85f882a05117b5df8c9445a38c026bb85c95fc9f98f2d2cfe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2ce36def99eaf908452410a523cd14eb31a5a4dc3ee38d5983ea95d5ee75f83\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d2ce36def99eaf908452410a523cd14eb31a5a4dc3ee38d5983ea95d5ee75f83\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:09Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-mcwfc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:27Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:27 crc kubenswrapper[4799]: I1010 16:32:27.878129 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-z97c7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6f19a8ba-b77f-41ce-a4c6-e970b040dd8c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://17b2b63923e40e58b4a3d352781758ecf7c0e63eb913813e0f738d19dfb05676\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9spwd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c9722a694091d19d16b7c08ac22e23532deca8f4bde306a0d651d5524484fd1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9spwd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:22Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-z97c7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:27Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:27 crc kubenswrapper[4799]: I1010 16:32:27.894816 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5a953803d264b43ecd9f8b8c871b034d8146e73a4974bb8f503d0ca626370616\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:27Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:27 crc kubenswrapper[4799]: I1010 16:32:27.902648 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:27 crc kubenswrapper[4799]: I1010 16:32:27.902709 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:27 crc kubenswrapper[4799]: I1010 16:32:27.902727 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:27 crc kubenswrapper[4799]: I1010 16:32:27.902749 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:27 crc kubenswrapper[4799]: I1010 16:32:27.902787 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:27Z","lastTransitionTime":"2025-10-10T16:32:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:27 crc kubenswrapper[4799]: I1010 16:32:27.912324 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:27Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:27 crc kubenswrapper[4799]: I1010 16:32:27.926904 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gg5hb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f000ac73-b5de-47c8-a0a7-84bd06475f62\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b64f89fec4fec12dd0dab3f95ca2c8a01e43d4ef7cc69a4d012195756f6922ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w9g7t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:09Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gg5hb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:27Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:27 crc kubenswrapper[4799]: I1010 16:32:27.936732 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-6wjsp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"76fdb169-eee9-4170-b948-95e26254208b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5b030264f18288aa7687a91f7918f1ed2c2ad474637e32a054ea8c25b97aef45\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2ww66\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:14Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-6wjsp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:27Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:27 crc kubenswrapper[4799]: I1010 16:32:27.957691 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"60ae49f7-6d6a-4a62-909f-7aea2b3953f5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c0a72be5ffe48f726e63ca3854fcabf6ad7c26f2c3fe432328142da2dc2ceeb5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b42b1b86cbd6dacb03b9afc740a33f67674996a9c5a5b291b71708ae53ccfea8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://de5b84380f9fb8448cebe90775342fd17260ffb8c591bbd5156f8a216b80f1da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b1dad40a84c7f22ffb5d52c708c7e2e03a181c5778793050495c8333ae005731\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://79f6778c5b703b2b4fc4e59fffc00824fcab6c8f5e2789661665e635a3539195\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a48bce1f3530d2a78258c6fa2af4f1530890f7967a26c9e91ca2f20f56cdbe6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2a48bce1f3530d2a78258c6fa2af4f1530890f7967a26c9e91ca2f20f56cdbe6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:31:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://65fb2cd5fa9b5ff0cad85267e4a036c37593a749da171dc2e5e30ba5159ed96d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://65fb2cd5fa9b5ff0cad85267e4a036c37593a749da171dc2e5e30ba5159ed96d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:31:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://51ea61becc8c45e5bcb2a2374d503cef3fb940b1618e7501cd05d61fc2a9458f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://51ea61becc8c45e5bcb2a2374d503cef3fb940b1618e7501cd05d61fc2a9458f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:31:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:31:47Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:27Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:27 crc kubenswrapper[4799]: I1010 16:32:27.968283 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-bsdk2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"823e91d3-003d-4cbb-bc72-004e1708c19d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec832bfc0c81b98afb4117033b94d2951b042b248148a5f957f3507174b8dbb6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-chgmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:09Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-bsdk2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:27Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:27 crc kubenswrapper[4799]: I1010 16:32:27.979388 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-k6hch" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7903c578-d05e-4ad7-8fd9-f438abf4a085\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:23Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:23Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hjhjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hjhjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:23Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-k6hch\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:27Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:27 crc kubenswrapper[4799]: I1010 16:32:27.991933 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1bf1784a-776b-49c7-b64b-7ce52860df45\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://298a1a9571fbe118fe81ff3e7403e298bcde9b683cffab574fbb03d5adc1fb67\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f767e89684b9b515da850360aaf9d7a02173395faf0654e9f0b3a4752a3d608b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://92de340d859cff018a661f0a7f7fe209ffae161bf6f39deb005c7148591fc60b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3a649a65ab118025ea70d1d7cf71236cb96992671c3bc7659d591640b53f941\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:31:47Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:27Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:28 crc kubenswrapper[4799]: I1010 16:32:28.002852 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2128a751508cba96a374652d8d80c66c81351fe0d7f800743a1612196fe8ac55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:28Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:28 crc kubenswrapper[4799]: I1010 16:32:28.005074 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:28 crc kubenswrapper[4799]: I1010 16:32:28.005105 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:28 crc kubenswrapper[4799]: I1010 16:32:28.005115 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:28 crc kubenswrapper[4799]: I1010 16:32:28.005130 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:28 crc kubenswrapper[4799]: I1010 16:32:28.005140 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:28Z","lastTransitionTime":"2025-10-10T16:32:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:28 crc kubenswrapper[4799]: I1010 16:32:28.015403 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4e024486dad9853cf7debbd2264eca725e50e74ebd215e1e55595d5f8b7c0403\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3396ed6bea22d063192c09283426aa98e84d5cab5852e305d61f3d583801187\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:28Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:28 crc kubenswrapper[4799]: I1010 16:32:28.031610 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-nptcz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"96840de9-4451-4499-81fa-a19c62239007\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://995be5ba088a3758758ce5aaf735f0371692c52e49e3992c6478311411c8db42\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d8b7b4526cfbe5d29a5b00c5d82089820b93e5aedbdaace85c4a252fed1b9f53\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d8b7b4526cfbe5d29a5b00c5d82089820b93e5aedbdaace85c4a252fed1b9f53\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:32:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0477eb514aef21fcec151973d9b6cf683ced19e9029787b97906438cb94b9f66\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0477eb514aef21fcec151973d9b6cf683ced19e9029787b97906438cb94b9f66\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:32:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b8df7ffc260acc047e334af09b76e6ee2c6dadd8c1fd1ed8860769601c89c6db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b8df7ffc260acc047e334af09b76e6ee2c6dadd8c1fd1ed8860769601c89c6db\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:32:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c89c6973a557239b60077f2b91a5f088955a973ebf8a9776677daa83f18c274\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2c89c6973a557239b60077f2b91a5f088955a973ebf8a9776677daa83f18c274\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:32:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f647c1c576be194232c6bcaf882fc8f3c67c78a84edd77222d04f1602434d014\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f647c1c576be194232c6bcaf882fc8f3c67c78a84edd77222d04f1602434d014\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:32:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://af9cccbb5d66115ca2db31b1e6738e1aa5f9c948eb65d3db9b5d5f8d9c223a64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://af9cccbb5d66115ca2db31b1e6738e1aa5f9c948eb65d3db9b5d5f8d9c223a64\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:32:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:09Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-nptcz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:28Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:28 crc kubenswrapper[4799]: I1010 16:32:28.108005 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:28 crc kubenswrapper[4799]: I1010 16:32:28.108041 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:28 crc kubenswrapper[4799]: I1010 16:32:28.108049 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:28 crc kubenswrapper[4799]: I1010 16:32:28.108065 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:28 crc kubenswrapper[4799]: I1010 16:32:28.108074 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:28Z","lastTransitionTime":"2025-10-10T16:32:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:28 crc kubenswrapper[4799]: I1010 16:32:28.210108 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:28 crc kubenswrapper[4799]: I1010 16:32:28.210151 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:28 crc kubenswrapper[4799]: I1010 16:32:28.210163 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:28 crc kubenswrapper[4799]: I1010 16:32:28.210181 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:28 crc kubenswrapper[4799]: I1010 16:32:28.210192 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:28Z","lastTransitionTime":"2025-10-10T16:32:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:28 crc kubenswrapper[4799]: I1010 16:32:28.313300 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:28 crc kubenswrapper[4799]: I1010 16:32:28.313332 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:28 crc kubenswrapper[4799]: I1010 16:32:28.313340 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:28 crc kubenswrapper[4799]: I1010 16:32:28.313356 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:28 crc kubenswrapper[4799]: I1010 16:32:28.313365 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:28Z","lastTransitionTime":"2025-10-10T16:32:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:28 crc kubenswrapper[4799]: I1010 16:32:28.401688 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 10 16:32:28 crc kubenswrapper[4799]: I1010 16:32:28.401704 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 10 16:32:28 crc kubenswrapper[4799]: E1010 16:32:28.401929 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 10 16:32:28 crc kubenswrapper[4799]: E1010 16:32:28.402053 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 10 16:32:28 crc kubenswrapper[4799]: I1010 16:32:28.415822 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:28 crc kubenswrapper[4799]: I1010 16:32:28.415877 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:28 crc kubenswrapper[4799]: I1010 16:32:28.415894 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:28 crc kubenswrapper[4799]: I1010 16:32:28.415919 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:28 crc kubenswrapper[4799]: I1010 16:32:28.415937 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:28Z","lastTransitionTime":"2025-10-10T16:32:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:28 crc kubenswrapper[4799]: I1010 16:32:28.520268 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:28 crc kubenswrapper[4799]: I1010 16:32:28.520325 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:28 crc kubenswrapper[4799]: I1010 16:32:28.520343 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:28 crc kubenswrapper[4799]: I1010 16:32:28.520369 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:28 crc kubenswrapper[4799]: I1010 16:32:28.520387 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:28Z","lastTransitionTime":"2025-10-10T16:32:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:28 crc kubenswrapper[4799]: I1010 16:32:28.623716 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:28 crc kubenswrapper[4799]: I1010 16:32:28.623826 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:28 crc kubenswrapper[4799]: I1010 16:32:28.623849 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:28 crc kubenswrapper[4799]: I1010 16:32:28.623892 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:28 crc kubenswrapper[4799]: I1010 16:32:28.623915 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:28Z","lastTransitionTime":"2025-10-10T16:32:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:28 crc kubenswrapper[4799]: I1010 16:32:28.726904 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:28 crc kubenswrapper[4799]: I1010 16:32:28.726954 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:28 crc kubenswrapper[4799]: I1010 16:32:28.726967 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:28 crc kubenswrapper[4799]: I1010 16:32:28.726986 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:28 crc kubenswrapper[4799]: I1010 16:32:28.726997 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:28Z","lastTransitionTime":"2025-10-10T16:32:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:28 crc kubenswrapper[4799]: I1010 16:32:28.829883 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:28 crc kubenswrapper[4799]: I1010 16:32:28.829944 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:28 crc kubenswrapper[4799]: I1010 16:32:28.829961 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:28 crc kubenswrapper[4799]: I1010 16:32:28.829985 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:28 crc kubenswrapper[4799]: I1010 16:32:28.830007 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:28Z","lastTransitionTime":"2025-10-10T16:32:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:28 crc kubenswrapper[4799]: I1010 16:32:28.933959 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:28 crc kubenswrapper[4799]: I1010 16:32:28.934042 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:28 crc kubenswrapper[4799]: I1010 16:32:28.934066 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:28 crc kubenswrapper[4799]: I1010 16:32:28.934101 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:28 crc kubenswrapper[4799]: I1010 16:32:28.934127 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:28Z","lastTransitionTime":"2025-10-10T16:32:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:29 crc kubenswrapper[4799]: I1010 16:32:29.037335 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:29 crc kubenswrapper[4799]: I1010 16:32:29.037410 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:29 crc kubenswrapper[4799]: I1010 16:32:29.037428 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:29 crc kubenswrapper[4799]: I1010 16:32:29.037454 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:29 crc kubenswrapper[4799]: I1010 16:32:29.037472 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:29Z","lastTransitionTime":"2025-10-10T16:32:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:29 crc kubenswrapper[4799]: I1010 16:32:29.140441 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:29 crc kubenswrapper[4799]: I1010 16:32:29.140494 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:29 crc kubenswrapper[4799]: I1010 16:32:29.140511 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:29 crc kubenswrapper[4799]: I1010 16:32:29.140566 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:29 crc kubenswrapper[4799]: I1010 16:32:29.140583 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:29Z","lastTransitionTime":"2025-10-10T16:32:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:29 crc kubenswrapper[4799]: I1010 16:32:29.243486 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:29 crc kubenswrapper[4799]: I1010 16:32:29.243547 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:29 crc kubenswrapper[4799]: I1010 16:32:29.243564 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:29 crc kubenswrapper[4799]: I1010 16:32:29.243592 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:29 crc kubenswrapper[4799]: I1010 16:32:29.243610 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:29Z","lastTransitionTime":"2025-10-10T16:32:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:29 crc kubenswrapper[4799]: I1010 16:32:29.346540 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:29 crc kubenswrapper[4799]: I1010 16:32:29.346620 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:29 crc kubenswrapper[4799]: I1010 16:32:29.346644 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:29 crc kubenswrapper[4799]: I1010 16:32:29.346677 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:29 crc kubenswrapper[4799]: I1010 16:32:29.346700 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:29Z","lastTransitionTime":"2025-10-10T16:32:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:29 crc kubenswrapper[4799]: I1010 16:32:29.402390 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 10 16:32:29 crc kubenswrapper[4799]: I1010 16:32:29.402491 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k6hch" Oct 10 16:32:29 crc kubenswrapper[4799]: E1010 16:32:29.402574 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 10 16:32:29 crc kubenswrapper[4799]: E1010 16:32:29.402624 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-k6hch" podUID="7903c578-d05e-4ad7-8fd9-f438abf4a085" Oct 10 16:32:29 crc kubenswrapper[4799]: I1010 16:32:29.450006 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:29 crc kubenswrapper[4799]: I1010 16:32:29.450084 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:29 crc kubenswrapper[4799]: I1010 16:32:29.450107 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:29 crc kubenswrapper[4799]: I1010 16:32:29.450140 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:29 crc kubenswrapper[4799]: I1010 16:32:29.450168 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:29Z","lastTransitionTime":"2025-10-10T16:32:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:29 crc kubenswrapper[4799]: I1010 16:32:29.553351 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:29 crc kubenswrapper[4799]: I1010 16:32:29.553412 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:29 crc kubenswrapper[4799]: I1010 16:32:29.553429 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:29 crc kubenswrapper[4799]: I1010 16:32:29.553454 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:29 crc kubenswrapper[4799]: I1010 16:32:29.553472 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:29Z","lastTransitionTime":"2025-10-10T16:32:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:29 crc kubenswrapper[4799]: I1010 16:32:29.656709 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:29 crc kubenswrapper[4799]: I1010 16:32:29.656820 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:29 crc kubenswrapper[4799]: I1010 16:32:29.656843 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:29 crc kubenswrapper[4799]: I1010 16:32:29.656867 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:29 crc kubenswrapper[4799]: I1010 16:32:29.656886 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:29Z","lastTransitionTime":"2025-10-10T16:32:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:29 crc kubenswrapper[4799]: I1010 16:32:29.760174 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:29 crc kubenswrapper[4799]: I1010 16:32:29.760233 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:29 crc kubenswrapper[4799]: I1010 16:32:29.760252 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:29 crc kubenswrapper[4799]: I1010 16:32:29.760277 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:29 crc kubenswrapper[4799]: I1010 16:32:29.760294 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:29Z","lastTransitionTime":"2025-10-10T16:32:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:29 crc kubenswrapper[4799]: I1010 16:32:29.863190 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:29 crc kubenswrapper[4799]: I1010 16:32:29.863247 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:29 crc kubenswrapper[4799]: I1010 16:32:29.863265 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:29 crc kubenswrapper[4799]: I1010 16:32:29.863288 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:29 crc kubenswrapper[4799]: I1010 16:32:29.863307 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:29Z","lastTransitionTime":"2025-10-10T16:32:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:29 crc kubenswrapper[4799]: I1010 16:32:29.965494 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:29 crc kubenswrapper[4799]: I1010 16:32:29.965563 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:29 crc kubenswrapper[4799]: I1010 16:32:29.965647 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:29 crc kubenswrapper[4799]: I1010 16:32:29.965746 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:29 crc kubenswrapper[4799]: I1010 16:32:29.965810 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:29Z","lastTransitionTime":"2025-10-10T16:32:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:30 crc kubenswrapper[4799]: I1010 16:32:30.068683 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:30 crc kubenswrapper[4799]: I1010 16:32:30.068816 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:30 crc kubenswrapper[4799]: I1010 16:32:30.068842 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:30 crc kubenswrapper[4799]: I1010 16:32:30.068877 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:30 crc kubenswrapper[4799]: I1010 16:32:30.068901 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:30Z","lastTransitionTime":"2025-10-10T16:32:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:30 crc kubenswrapper[4799]: I1010 16:32:30.171473 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:30 crc kubenswrapper[4799]: I1010 16:32:30.171544 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:30 crc kubenswrapper[4799]: I1010 16:32:30.171565 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:30 crc kubenswrapper[4799]: I1010 16:32:30.171595 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:30 crc kubenswrapper[4799]: I1010 16:32:30.171622 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:30Z","lastTransitionTime":"2025-10-10T16:32:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:30 crc kubenswrapper[4799]: I1010 16:32:30.275245 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:30 crc kubenswrapper[4799]: I1010 16:32:30.275295 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:30 crc kubenswrapper[4799]: I1010 16:32:30.275312 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:30 crc kubenswrapper[4799]: I1010 16:32:30.275334 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:30 crc kubenswrapper[4799]: I1010 16:32:30.275348 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:30Z","lastTransitionTime":"2025-10-10T16:32:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:30 crc kubenswrapper[4799]: I1010 16:32:30.378465 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:30 crc kubenswrapper[4799]: I1010 16:32:30.378512 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:30 crc kubenswrapper[4799]: I1010 16:32:30.378529 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:30 crc kubenswrapper[4799]: I1010 16:32:30.378553 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:30 crc kubenswrapper[4799]: I1010 16:32:30.378571 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:30Z","lastTransitionTime":"2025-10-10T16:32:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:30 crc kubenswrapper[4799]: I1010 16:32:30.401432 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 10 16:32:30 crc kubenswrapper[4799]: I1010 16:32:30.401433 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 10 16:32:30 crc kubenswrapper[4799]: E1010 16:32:30.401715 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 10 16:32:30 crc kubenswrapper[4799]: E1010 16:32:30.401585 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 10 16:32:30 crc kubenswrapper[4799]: I1010 16:32:30.481905 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:30 crc kubenswrapper[4799]: I1010 16:32:30.481947 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:30 crc kubenswrapper[4799]: I1010 16:32:30.481963 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:30 crc kubenswrapper[4799]: I1010 16:32:30.481986 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:30 crc kubenswrapper[4799]: I1010 16:32:30.482025 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:30Z","lastTransitionTime":"2025-10-10T16:32:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:30 crc kubenswrapper[4799]: I1010 16:32:30.585475 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:30 crc kubenswrapper[4799]: I1010 16:32:30.585538 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:30 crc kubenswrapper[4799]: I1010 16:32:30.585555 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:30 crc kubenswrapper[4799]: I1010 16:32:30.585580 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:30 crc kubenswrapper[4799]: I1010 16:32:30.585643 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:30Z","lastTransitionTime":"2025-10-10T16:32:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:30 crc kubenswrapper[4799]: I1010 16:32:30.612425 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:30 crc kubenswrapper[4799]: I1010 16:32:30.612500 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:30 crc kubenswrapper[4799]: I1010 16:32:30.612553 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:30 crc kubenswrapper[4799]: I1010 16:32:30.612581 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:30 crc kubenswrapper[4799]: I1010 16:32:30.612602 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:30Z","lastTransitionTime":"2025-10-10T16:32:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:30 crc kubenswrapper[4799]: E1010 16:32:30.633160 4799 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-10T16:32:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:30Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-10T16:32:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:30Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-10T16:32:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:30Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-10T16:32:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:30Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"d99534f1-66d4-4990-b867-b559b1013899\\\",\\\"systemUUID\\\":\\\"19c7da3e-bb2d-454e-9c2c-9c9464638bfe\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:30Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:30 crc kubenswrapper[4799]: I1010 16:32:30.639132 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:30 crc kubenswrapper[4799]: I1010 16:32:30.639184 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:30 crc kubenswrapper[4799]: I1010 16:32:30.639207 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:30 crc kubenswrapper[4799]: I1010 16:32:30.639240 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:30 crc kubenswrapper[4799]: I1010 16:32:30.639263 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:30Z","lastTransitionTime":"2025-10-10T16:32:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:30 crc kubenswrapper[4799]: E1010 16:32:30.659631 4799 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-10T16:32:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:30Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-10T16:32:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:30Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-10T16:32:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:30Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-10T16:32:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:30Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"d99534f1-66d4-4990-b867-b559b1013899\\\",\\\"systemUUID\\\":\\\"19c7da3e-bb2d-454e-9c2c-9c9464638bfe\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:30Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:30 crc kubenswrapper[4799]: I1010 16:32:30.665011 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:30 crc kubenswrapper[4799]: I1010 16:32:30.665067 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:30 crc kubenswrapper[4799]: I1010 16:32:30.665090 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:30 crc kubenswrapper[4799]: I1010 16:32:30.665122 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:30 crc kubenswrapper[4799]: I1010 16:32:30.665145 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:30Z","lastTransitionTime":"2025-10-10T16:32:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:30 crc kubenswrapper[4799]: E1010 16:32:30.687962 4799 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-10T16:32:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:30Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-10T16:32:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:30Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-10T16:32:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:30Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-10T16:32:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:30Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"d99534f1-66d4-4990-b867-b559b1013899\\\",\\\"systemUUID\\\":\\\"19c7da3e-bb2d-454e-9c2c-9c9464638bfe\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:30Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:30 crc kubenswrapper[4799]: I1010 16:32:30.693589 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:30 crc kubenswrapper[4799]: I1010 16:32:30.693642 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:30 crc kubenswrapper[4799]: I1010 16:32:30.693659 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:30 crc kubenswrapper[4799]: I1010 16:32:30.693684 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:30 crc kubenswrapper[4799]: I1010 16:32:30.693705 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:30Z","lastTransitionTime":"2025-10-10T16:32:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:30 crc kubenswrapper[4799]: E1010 16:32:30.715244 4799 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-10T16:32:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:30Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-10T16:32:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:30Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-10T16:32:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:30Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-10T16:32:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:30Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"d99534f1-66d4-4990-b867-b559b1013899\\\",\\\"systemUUID\\\":\\\"19c7da3e-bb2d-454e-9c2c-9c9464638bfe\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:30Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:30 crc kubenswrapper[4799]: I1010 16:32:30.720991 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:30 crc kubenswrapper[4799]: I1010 16:32:30.721048 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:30 crc kubenswrapper[4799]: I1010 16:32:30.721066 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:30 crc kubenswrapper[4799]: I1010 16:32:30.721091 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:30 crc kubenswrapper[4799]: I1010 16:32:30.721109 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:30Z","lastTransitionTime":"2025-10-10T16:32:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:30 crc kubenswrapper[4799]: E1010 16:32:30.741209 4799 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-10T16:32:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:30Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-10T16:32:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:30Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-10T16:32:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:30Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-10T16:32:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:30Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"d99534f1-66d4-4990-b867-b559b1013899\\\",\\\"systemUUID\\\":\\\"19c7da3e-bb2d-454e-9c2c-9c9464638bfe\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:30Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:30 crc kubenswrapper[4799]: E1010 16:32:30.741430 4799 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Oct 10 16:32:30 crc kubenswrapper[4799]: I1010 16:32:30.743834 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:30 crc kubenswrapper[4799]: I1010 16:32:30.743904 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:30 crc kubenswrapper[4799]: I1010 16:32:30.743927 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:30 crc kubenswrapper[4799]: I1010 16:32:30.743959 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:30 crc kubenswrapper[4799]: I1010 16:32:30.743982 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:30Z","lastTransitionTime":"2025-10-10T16:32:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:30 crc kubenswrapper[4799]: I1010 16:32:30.846871 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:30 crc kubenswrapper[4799]: I1010 16:32:30.846905 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:30 crc kubenswrapper[4799]: I1010 16:32:30.846916 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:30 crc kubenswrapper[4799]: I1010 16:32:30.846934 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:30 crc kubenswrapper[4799]: I1010 16:32:30.846946 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:30Z","lastTransitionTime":"2025-10-10T16:32:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:30 crc kubenswrapper[4799]: I1010 16:32:30.950354 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:30 crc kubenswrapper[4799]: I1010 16:32:30.950411 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:30 crc kubenswrapper[4799]: I1010 16:32:30.950429 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:30 crc kubenswrapper[4799]: I1010 16:32:30.950455 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:30 crc kubenswrapper[4799]: I1010 16:32:30.950471 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:30Z","lastTransitionTime":"2025-10-10T16:32:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:31 crc kubenswrapper[4799]: I1010 16:32:31.053110 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:31 crc kubenswrapper[4799]: I1010 16:32:31.053158 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:31 crc kubenswrapper[4799]: I1010 16:32:31.053180 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:31 crc kubenswrapper[4799]: I1010 16:32:31.053206 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:31 crc kubenswrapper[4799]: I1010 16:32:31.053227 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:31Z","lastTransitionTime":"2025-10-10T16:32:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:31 crc kubenswrapper[4799]: I1010 16:32:31.156021 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:31 crc kubenswrapper[4799]: I1010 16:32:31.156054 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:31 crc kubenswrapper[4799]: I1010 16:32:31.156064 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:31 crc kubenswrapper[4799]: I1010 16:32:31.156081 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:31 crc kubenswrapper[4799]: I1010 16:32:31.156092 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:31Z","lastTransitionTime":"2025-10-10T16:32:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:31 crc kubenswrapper[4799]: I1010 16:32:31.259091 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:31 crc kubenswrapper[4799]: I1010 16:32:31.259150 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:31 crc kubenswrapper[4799]: I1010 16:32:31.259167 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:31 crc kubenswrapper[4799]: I1010 16:32:31.259196 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:31 crc kubenswrapper[4799]: I1010 16:32:31.259214 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:31Z","lastTransitionTime":"2025-10-10T16:32:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:31 crc kubenswrapper[4799]: I1010 16:32:31.362714 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:31 crc kubenswrapper[4799]: I1010 16:32:31.362832 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:31 crc kubenswrapper[4799]: I1010 16:32:31.362858 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:31 crc kubenswrapper[4799]: I1010 16:32:31.362891 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:31 crc kubenswrapper[4799]: I1010 16:32:31.362911 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:31Z","lastTransitionTime":"2025-10-10T16:32:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:31 crc kubenswrapper[4799]: I1010 16:32:31.401913 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 10 16:32:31 crc kubenswrapper[4799]: I1010 16:32:31.401934 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k6hch" Oct 10 16:32:31 crc kubenswrapper[4799]: E1010 16:32:31.402114 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 10 16:32:31 crc kubenswrapper[4799]: E1010 16:32:31.402325 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-k6hch" podUID="7903c578-d05e-4ad7-8fd9-f438abf4a085" Oct 10 16:32:31 crc kubenswrapper[4799]: I1010 16:32:31.465590 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:31 crc kubenswrapper[4799]: I1010 16:32:31.465793 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:31 crc kubenswrapper[4799]: I1010 16:32:31.465813 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:31 crc kubenswrapper[4799]: I1010 16:32:31.465839 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:31 crc kubenswrapper[4799]: I1010 16:32:31.465886 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:31Z","lastTransitionTime":"2025-10-10T16:32:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:31 crc kubenswrapper[4799]: I1010 16:32:31.478223 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/7903c578-d05e-4ad7-8fd9-f438abf4a085-metrics-certs\") pod \"network-metrics-daemon-k6hch\" (UID: \"7903c578-d05e-4ad7-8fd9-f438abf4a085\") " pod="openshift-multus/network-metrics-daemon-k6hch" Oct 10 16:32:31 crc kubenswrapper[4799]: E1010 16:32:31.478424 4799 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Oct 10 16:32:31 crc kubenswrapper[4799]: E1010 16:32:31.478516 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/7903c578-d05e-4ad7-8fd9-f438abf4a085-metrics-certs podName:7903c578-d05e-4ad7-8fd9-f438abf4a085 nodeName:}" failed. No retries permitted until 2025-10-10 16:32:39.47849353 +0000 UTC m=+52.986817675 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/7903c578-d05e-4ad7-8fd9-f438abf4a085-metrics-certs") pod "network-metrics-daemon-k6hch" (UID: "7903c578-d05e-4ad7-8fd9-f438abf4a085") : object "openshift-multus"/"metrics-daemon-secret" not registered Oct 10 16:32:31 crc kubenswrapper[4799]: I1010 16:32:31.570062 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:31 crc kubenswrapper[4799]: I1010 16:32:31.570131 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:31 crc kubenswrapper[4799]: I1010 16:32:31.570154 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:31 crc kubenswrapper[4799]: I1010 16:32:31.570189 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:31 crc kubenswrapper[4799]: I1010 16:32:31.570213 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:31Z","lastTransitionTime":"2025-10-10T16:32:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:31 crc kubenswrapper[4799]: I1010 16:32:31.674503 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:31 crc kubenswrapper[4799]: I1010 16:32:31.674585 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:31 crc kubenswrapper[4799]: I1010 16:32:31.674616 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:31 crc kubenswrapper[4799]: I1010 16:32:31.674650 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:31 crc kubenswrapper[4799]: I1010 16:32:31.674673 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:31Z","lastTransitionTime":"2025-10-10T16:32:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:31 crc kubenswrapper[4799]: I1010 16:32:31.778174 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:31 crc kubenswrapper[4799]: I1010 16:32:31.778232 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:31 crc kubenswrapper[4799]: I1010 16:32:31.778248 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:31 crc kubenswrapper[4799]: I1010 16:32:31.778273 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:31 crc kubenswrapper[4799]: I1010 16:32:31.778290 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:31Z","lastTransitionTime":"2025-10-10T16:32:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:31 crc kubenswrapper[4799]: I1010 16:32:31.881720 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:31 crc kubenswrapper[4799]: I1010 16:32:31.881834 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:31 crc kubenswrapper[4799]: I1010 16:32:31.881860 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:31 crc kubenswrapper[4799]: I1010 16:32:31.881892 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:31 crc kubenswrapper[4799]: I1010 16:32:31.881924 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:31Z","lastTransitionTime":"2025-10-10T16:32:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:31 crc kubenswrapper[4799]: I1010 16:32:31.985144 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:31 crc kubenswrapper[4799]: I1010 16:32:31.985208 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:31 crc kubenswrapper[4799]: I1010 16:32:31.985226 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:31 crc kubenswrapper[4799]: I1010 16:32:31.985253 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:31 crc kubenswrapper[4799]: I1010 16:32:31.985272 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:31Z","lastTransitionTime":"2025-10-10T16:32:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:32 crc kubenswrapper[4799]: I1010 16:32:32.089876 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:32 crc kubenswrapper[4799]: I1010 16:32:32.089948 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:32 crc kubenswrapper[4799]: I1010 16:32:32.089971 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:32 crc kubenswrapper[4799]: I1010 16:32:32.090003 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:32 crc kubenswrapper[4799]: I1010 16:32:32.090026 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:32Z","lastTransitionTime":"2025-10-10T16:32:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:32 crc kubenswrapper[4799]: I1010 16:32:32.192856 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:32 crc kubenswrapper[4799]: I1010 16:32:32.192915 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:32 crc kubenswrapper[4799]: I1010 16:32:32.192933 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:32 crc kubenswrapper[4799]: I1010 16:32:32.192955 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:32 crc kubenswrapper[4799]: I1010 16:32:32.192972 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:32Z","lastTransitionTime":"2025-10-10T16:32:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:32 crc kubenswrapper[4799]: I1010 16:32:32.296386 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:32 crc kubenswrapper[4799]: I1010 16:32:32.296440 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:32 crc kubenswrapper[4799]: I1010 16:32:32.296456 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:32 crc kubenswrapper[4799]: I1010 16:32:32.296482 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:32 crc kubenswrapper[4799]: I1010 16:32:32.296502 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:32Z","lastTransitionTime":"2025-10-10T16:32:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:32 crc kubenswrapper[4799]: I1010 16:32:32.399232 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:32 crc kubenswrapper[4799]: I1010 16:32:32.399634 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:32 crc kubenswrapper[4799]: I1010 16:32:32.399853 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:32 crc kubenswrapper[4799]: I1010 16:32:32.400077 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:32 crc kubenswrapper[4799]: I1010 16:32:32.400287 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:32Z","lastTransitionTime":"2025-10-10T16:32:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:32 crc kubenswrapper[4799]: I1010 16:32:32.402076 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 10 16:32:32 crc kubenswrapper[4799]: I1010 16:32:32.402111 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 10 16:32:32 crc kubenswrapper[4799]: E1010 16:32:32.402226 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 10 16:32:32 crc kubenswrapper[4799]: E1010 16:32:32.402397 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 10 16:32:32 crc kubenswrapper[4799]: I1010 16:32:32.504099 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:32 crc kubenswrapper[4799]: I1010 16:32:32.504169 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:32 crc kubenswrapper[4799]: I1010 16:32:32.504192 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:32 crc kubenswrapper[4799]: I1010 16:32:32.504222 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:32 crc kubenswrapper[4799]: I1010 16:32:32.504246 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:32Z","lastTransitionTime":"2025-10-10T16:32:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:32 crc kubenswrapper[4799]: I1010 16:32:32.606992 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:32 crc kubenswrapper[4799]: I1010 16:32:32.607270 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:32 crc kubenswrapper[4799]: I1010 16:32:32.607399 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:32 crc kubenswrapper[4799]: I1010 16:32:32.607486 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:32 crc kubenswrapper[4799]: I1010 16:32:32.607572 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:32Z","lastTransitionTime":"2025-10-10T16:32:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:32 crc kubenswrapper[4799]: I1010 16:32:32.710160 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:32 crc kubenswrapper[4799]: I1010 16:32:32.710210 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:32 crc kubenswrapper[4799]: I1010 16:32:32.710228 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:32 crc kubenswrapper[4799]: I1010 16:32:32.710250 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:32 crc kubenswrapper[4799]: I1010 16:32:32.710266 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:32Z","lastTransitionTime":"2025-10-10T16:32:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:32 crc kubenswrapper[4799]: I1010 16:32:32.813581 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:32 crc kubenswrapper[4799]: I1010 16:32:32.813938 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:32 crc kubenswrapper[4799]: I1010 16:32:32.814155 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:32 crc kubenswrapper[4799]: I1010 16:32:32.814420 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:32 crc kubenswrapper[4799]: I1010 16:32:32.814668 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:32Z","lastTransitionTime":"2025-10-10T16:32:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:32 crc kubenswrapper[4799]: I1010 16:32:32.917922 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:32 crc kubenswrapper[4799]: I1010 16:32:32.918361 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:32 crc kubenswrapper[4799]: I1010 16:32:32.918832 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:32 crc kubenswrapper[4799]: I1010 16:32:32.919289 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:32 crc kubenswrapper[4799]: I1010 16:32:32.919718 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:32Z","lastTransitionTime":"2025-10-10T16:32:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:33 crc kubenswrapper[4799]: I1010 16:32:33.022809 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:33 crc kubenswrapper[4799]: I1010 16:32:33.023173 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:33 crc kubenswrapper[4799]: I1010 16:32:33.023392 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:33 crc kubenswrapper[4799]: I1010 16:32:33.023575 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:33 crc kubenswrapper[4799]: I1010 16:32:33.023744 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:33Z","lastTransitionTime":"2025-10-10T16:32:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:33 crc kubenswrapper[4799]: I1010 16:32:33.126893 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:33 crc kubenswrapper[4799]: I1010 16:32:33.126956 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:33 crc kubenswrapper[4799]: I1010 16:32:33.126978 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:33 crc kubenswrapper[4799]: I1010 16:32:33.127006 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:33 crc kubenswrapper[4799]: I1010 16:32:33.127031 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:33Z","lastTransitionTime":"2025-10-10T16:32:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:33 crc kubenswrapper[4799]: I1010 16:32:33.230245 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:33 crc kubenswrapper[4799]: I1010 16:32:33.230589 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:33 crc kubenswrapper[4799]: I1010 16:32:33.230816 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:33 crc kubenswrapper[4799]: I1010 16:32:33.231086 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:33 crc kubenswrapper[4799]: I1010 16:32:33.231270 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:33Z","lastTransitionTime":"2025-10-10T16:32:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:33 crc kubenswrapper[4799]: I1010 16:32:33.334797 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:33 crc kubenswrapper[4799]: I1010 16:32:33.335145 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:33 crc kubenswrapper[4799]: I1010 16:32:33.335295 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:33 crc kubenswrapper[4799]: I1010 16:32:33.335469 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:33 crc kubenswrapper[4799]: I1010 16:32:33.335599 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:33Z","lastTransitionTime":"2025-10-10T16:32:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:33 crc kubenswrapper[4799]: I1010 16:32:33.401904 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k6hch" Oct 10 16:32:33 crc kubenswrapper[4799]: I1010 16:32:33.401910 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 10 16:32:33 crc kubenswrapper[4799]: E1010 16:32:33.402204 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 10 16:32:33 crc kubenswrapper[4799]: E1010 16:32:33.402051 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-k6hch" podUID="7903c578-d05e-4ad7-8fd9-f438abf4a085" Oct 10 16:32:33 crc kubenswrapper[4799]: I1010 16:32:33.438750 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:33 crc kubenswrapper[4799]: I1010 16:32:33.438810 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:33 crc kubenswrapper[4799]: I1010 16:32:33.438819 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:33 crc kubenswrapper[4799]: I1010 16:32:33.438836 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:33 crc kubenswrapper[4799]: I1010 16:32:33.438845 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:33Z","lastTransitionTime":"2025-10-10T16:32:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:33 crc kubenswrapper[4799]: I1010 16:32:33.542234 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:33 crc kubenswrapper[4799]: I1010 16:32:33.542276 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:33 crc kubenswrapper[4799]: I1010 16:32:33.542289 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:33 crc kubenswrapper[4799]: I1010 16:32:33.542312 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:33 crc kubenswrapper[4799]: I1010 16:32:33.542328 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:33Z","lastTransitionTime":"2025-10-10T16:32:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:33 crc kubenswrapper[4799]: I1010 16:32:33.646104 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:33 crc kubenswrapper[4799]: I1010 16:32:33.646174 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:33 crc kubenswrapper[4799]: I1010 16:32:33.646198 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:33 crc kubenswrapper[4799]: I1010 16:32:33.646230 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:33 crc kubenswrapper[4799]: I1010 16:32:33.646252 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:33Z","lastTransitionTime":"2025-10-10T16:32:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:33 crc kubenswrapper[4799]: I1010 16:32:33.749933 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:33 crc kubenswrapper[4799]: I1010 16:32:33.749979 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:33 crc kubenswrapper[4799]: I1010 16:32:33.749996 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:33 crc kubenswrapper[4799]: I1010 16:32:33.750019 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:33 crc kubenswrapper[4799]: I1010 16:32:33.750037 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:33Z","lastTransitionTime":"2025-10-10T16:32:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:33 crc kubenswrapper[4799]: I1010 16:32:33.853927 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:33 crc kubenswrapper[4799]: I1010 16:32:33.854230 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:33 crc kubenswrapper[4799]: I1010 16:32:33.854419 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:33 crc kubenswrapper[4799]: I1010 16:32:33.854549 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:33 crc kubenswrapper[4799]: I1010 16:32:33.854666 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:33Z","lastTransitionTime":"2025-10-10T16:32:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:33 crc kubenswrapper[4799]: I1010 16:32:33.957309 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:33 crc kubenswrapper[4799]: I1010 16:32:33.957358 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:33 crc kubenswrapper[4799]: I1010 16:32:33.957376 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:33 crc kubenswrapper[4799]: I1010 16:32:33.957401 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:33 crc kubenswrapper[4799]: I1010 16:32:33.957417 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:33Z","lastTransitionTime":"2025-10-10T16:32:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:34 crc kubenswrapper[4799]: I1010 16:32:34.060807 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:34 crc kubenswrapper[4799]: I1010 16:32:34.060867 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:34 crc kubenswrapper[4799]: I1010 16:32:34.060905 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:34 crc kubenswrapper[4799]: I1010 16:32:34.060939 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:34 crc kubenswrapper[4799]: I1010 16:32:34.060960 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:34Z","lastTransitionTime":"2025-10-10T16:32:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:34 crc kubenswrapper[4799]: I1010 16:32:34.163441 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:34 crc kubenswrapper[4799]: I1010 16:32:34.163562 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:34 crc kubenswrapper[4799]: I1010 16:32:34.163583 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:34 crc kubenswrapper[4799]: I1010 16:32:34.163604 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:34 crc kubenswrapper[4799]: I1010 16:32:34.163618 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:34Z","lastTransitionTime":"2025-10-10T16:32:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:34 crc kubenswrapper[4799]: I1010 16:32:34.266650 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:34 crc kubenswrapper[4799]: I1010 16:32:34.266734 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:34 crc kubenswrapper[4799]: I1010 16:32:34.266785 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:34 crc kubenswrapper[4799]: I1010 16:32:34.266815 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:34 crc kubenswrapper[4799]: I1010 16:32:34.266833 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:34Z","lastTransitionTime":"2025-10-10T16:32:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:34 crc kubenswrapper[4799]: I1010 16:32:34.370276 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:34 crc kubenswrapper[4799]: I1010 16:32:34.370337 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:34 crc kubenswrapper[4799]: I1010 16:32:34.370357 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:34 crc kubenswrapper[4799]: I1010 16:32:34.370385 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:34 crc kubenswrapper[4799]: I1010 16:32:34.370403 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:34Z","lastTransitionTime":"2025-10-10T16:32:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:34 crc kubenswrapper[4799]: I1010 16:32:34.401792 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 10 16:32:34 crc kubenswrapper[4799]: I1010 16:32:34.401753 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 10 16:32:34 crc kubenswrapper[4799]: E1010 16:32:34.402000 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 10 16:32:34 crc kubenswrapper[4799]: E1010 16:32:34.402322 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 10 16:32:34 crc kubenswrapper[4799]: I1010 16:32:34.473252 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:34 crc kubenswrapper[4799]: I1010 16:32:34.473318 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:34 crc kubenswrapper[4799]: I1010 16:32:34.473335 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:34 crc kubenswrapper[4799]: I1010 16:32:34.473361 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:34 crc kubenswrapper[4799]: I1010 16:32:34.473381 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:34Z","lastTransitionTime":"2025-10-10T16:32:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:34 crc kubenswrapper[4799]: I1010 16:32:34.576498 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:34 crc kubenswrapper[4799]: I1010 16:32:34.576596 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:34 crc kubenswrapper[4799]: I1010 16:32:34.576650 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:34 crc kubenswrapper[4799]: I1010 16:32:34.576675 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:34 crc kubenswrapper[4799]: I1010 16:32:34.576692 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:34Z","lastTransitionTime":"2025-10-10T16:32:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:34 crc kubenswrapper[4799]: I1010 16:32:34.681144 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:34 crc kubenswrapper[4799]: I1010 16:32:34.681206 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:34 crc kubenswrapper[4799]: I1010 16:32:34.681223 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:34 crc kubenswrapper[4799]: I1010 16:32:34.681248 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:34 crc kubenswrapper[4799]: I1010 16:32:34.681265 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:34Z","lastTransitionTime":"2025-10-10T16:32:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:34 crc kubenswrapper[4799]: I1010 16:32:34.783700 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:34 crc kubenswrapper[4799]: I1010 16:32:34.783751 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:34 crc kubenswrapper[4799]: I1010 16:32:34.783793 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:34 crc kubenswrapper[4799]: I1010 16:32:34.783812 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:34 crc kubenswrapper[4799]: I1010 16:32:34.783824 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:34Z","lastTransitionTime":"2025-10-10T16:32:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:34 crc kubenswrapper[4799]: I1010 16:32:34.886617 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:34 crc kubenswrapper[4799]: I1010 16:32:34.886676 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:34 crc kubenswrapper[4799]: I1010 16:32:34.886694 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:34 crc kubenswrapper[4799]: I1010 16:32:34.886722 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:34 crc kubenswrapper[4799]: I1010 16:32:34.886738 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:34Z","lastTransitionTime":"2025-10-10T16:32:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:34 crc kubenswrapper[4799]: I1010 16:32:34.989448 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:34 crc kubenswrapper[4799]: I1010 16:32:34.989514 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:34 crc kubenswrapper[4799]: I1010 16:32:34.989538 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:34 crc kubenswrapper[4799]: I1010 16:32:34.989570 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:34 crc kubenswrapper[4799]: I1010 16:32:34.989596 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:34Z","lastTransitionTime":"2025-10-10T16:32:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:35 crc kubenswrapper[4799]: I1010 16:32:35.092874 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:35 crc kubenswrapper[4799]: I1010 16:32:35.092937 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:35 crc kubenswrapper[4799]: I1010 16:32:35.092956 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:35 crc kubenswrapper[4799]: I1010 16:32:35.092982 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:35 crc kubenswrapper[4799]: I1010 16:32:35.093002 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:35Z","lastTransitionTime":"2025-10-10T16:32:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:35 crc kubenswrapper[4799]: I1010 16:32:35.196336 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:35 crc kubenswrapper[4799]: I1010 16:32:35.196404 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:35 crc kubenswrapper[4799]: I1010 16:32:35.196424 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:35 crc kubenswrapper[4799]: I1010 16:32:35.196448 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:35 crc kubenswrapper[4799]: I1010 16:32:35.196464 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:35Z","lastTransitionTime":"2025-10-10T16:32:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:35 crc kubenswrapper[4799]: I1010 16:32:35.299890 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:35 crc kubenswrapper[4799]: I1010 16:32:35.300010 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:35 crc kubenswrapper[4799]: I1010 16:32:35.300030 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:35 crc kubenswrapper[4799]: I1010 16:32:35.300058 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:35 crc kubenswrapper[4799]: I1010 16:32:35.300075 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:35Z","lastTransitionTime":"2025-10-10T16:32:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:35 crc kubenswrapper[4799]: I1010 16:32:35.401496 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 10 16:32:35 crc kubenswrapper[4799]: I1010 16:32:35.401719 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k6hch" Oct 10 16:32:35 crc kubenswrapper[4799]: E1010 16:32:35.401817 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 10 16:32:35 crc kubenswrapper[4799]: E1010 16:32:35.402025 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-k6hch" podUID="7903c578-d05e-4ad7-8fd9-f438abf4a085" Oct 10 16:32:35 crc kubenswrapper[4799]: I1010 16:32:35.404387 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:35 crc kubenswrapper[4799]: I1010 16:32:35.404423 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:35 crc kubenswrapper[4799]: I1010 16:32:35.404435 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:35 crc kubenswrapper[4799]: I1010 16:32:35.404452 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:35 crc kubenswrapper[4799]: I1010 16:32:35.404464 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:35Z","lastTransitionTime":"2025-10-10T16:32:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:35 crc kubenswrapper[4799]: I1010 16:32:35.507856 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:35 crc kubenswrapper[4799]: I1010 16:32:35.508185 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:35 crc kubenswrapper[4799]: I1010 16:32:35.508352 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:35 crc kubenswrapper[4799]: I1010 16:32:35.508485 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:35 crc kubenswrapper[4799]: I1010 16:32:35.508623 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:35Z","lastTransitionTime":"2025-10-10T16:32:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:35 crc kubenswrapper[4799]: I1010 16:32:35.612182 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:35 crc kubenswrapper[4799]: I1010 16:32:35.612224 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:35 crc kubenswrapper[4799]: I1010 16:32:35.612241 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:35 crc kubenswrapper[4799]: I1010 16:32:35.612264 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:35 crc kubenswrapper[4799]: I1010 16:32:35.612281 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:35Z","lastTransitionTime":"2025-10-10T16:32:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:35 crc kubenswrapper[4799]: I1010 16:32:35.715517 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:35 crc kubenswrapper[4799]: I1010 16:32:35.715897 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:35 crc kubenswrapper[4799]: I1010 16:32:35.716114 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:35 crc kubenswrapper[4799]: I1010 16:32:35.716264 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:35 crc kubenswrapper[4799]: I1010 16:32:35.716407 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:35Z","lastTransitionTime":"2025-10-10T16:32:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:35 crc kubenswrapper[4799]: I1010 16:32:35.819307 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:35 crc kubenswrapper[4799]: I1010 16:32:35.819600 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:35 crc kubenswrapper[4799]: I1010 16:32:35.819726 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:35 crc kubenswrapper[4799]: I1010 16:32:35.819950 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:35 crc kubenswrapper[4799]: I1010 16:32:35.820085 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:35Z","lastTransitionTime":"2025-10-10T16:32:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:35 crc kubenswrapper[4799]: I1010 16:32:35.923270 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:35 crc kubenswrapper[4799]: I1010 16:32:35.923339 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:35 crc kubenswrapper[4799]: I1010 16:32:35.923359 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:35 crc kubenswrapper[4799]: I1010 16:32:35.923389 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:35 crc kubenswrapper[4799]: I1010 16:32:35.923407 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:35Z","lastTransitionTime":"2025-10-10T16:32:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:35 crc kubenswrapper[4799]: I1010 16:32:35.953103 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Oct 10 16:32:35 crc kubenswrapper[4799]: I1010 16:32:35.964015 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler/openshift-kube-scheduler-crc"] Oct 10 16:32:35 crc kubenswrapper[4799]: I1010 16:32:35.977694 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5a953803d264b43ecd9f8b8c871b034d8146e73a4974bb8f503d0ca626370616\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:35Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:35 crc kubenswrapper[4799]: I1010 16:32:35.999709 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:35Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:36 crc kubenswrapper[4799]: I1010 16:32:36.017203 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gg5hb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f000ac73-b5de-47c8-a0a7-84bd06475f62\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b64f89fec4fec12dd0dab3f95ca2c8a01e43d4ef7cc69a4d012195756f6922ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w9g7t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:09Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gg5hb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:36Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:36 crc kubenswrapper[4799]: I1010 16:32:36.026397 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:36 crc kubenswrapper[4799]: I1010 16:32:36.026468 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:36 crc kubenswrapper[4799]: I1010 16:32:36.026486 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:36 crc kubenswrapper[4799]: I1010 16:32:36.026512 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:36 crc kubenswrapper[4799]: I1010 16:32:36.026530 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:36Z","lastTransitionTime":"2025-10-10T16:32:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:36 crc kubenswrapper[4799]: I1010 16:32:36.034602 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-6wjsp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"76fdb169-eee9-4170-b948-95e26254208b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5b030264f18288aa7687a91f7918f1ed2c2ad474637e32a054ea8c25b97aef45\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2ww66\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:14Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-6wjsp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:36Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:36 crc kubenswrapper[4799]: I1010 16:32:36.076203 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"60ae49f7-6d6a-4a62-909f-7aea2b3953f5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c0a72be5ffe48f726e63ca3854fcabf6ad7c26f2c3fe432328142da2dc2ceeb5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b42b1b86cbd6dacb03b9afc740a33f67674996a9c5a5b291b71708ae53ccfea8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://de5b84380f9fb8448cebe90775342fd17260ffb8c591bbd5156f8a216b80f1da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b1dad40a84c7f22ffb5d52c708c7e2e03a181c5778793050495c8333ae005731\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://79f6778c5b703b2b4fc4e59fffc00824fcab6c8f5e2789661665e635a3539195\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a48bce1f3530d2a78258c6fa2af4f1530890f7967a26c9e91ca2f20f56cdbe6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2a48bce1f3530d2a78258c6fa2af4f1530890f7967a26c9e91ca2f20f56cdbe6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:31:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://65fb2cd5fa9b5ff0cad85267e4a036c37593a749da171dc2e5e30ba5159ed96d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://65fb2cd5fa9b5ff0cad85267e4a036c37593a749da171dc2e5e30ba5159ed96d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:31:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://51ea61becc8c45e5bcb2a2374d503cef3fb940b1618e7501cd05d61fc2a9458f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://51ea61becc8c45e5bcb2a2374d503cef3fb940b1618e7501cd05d61fc2a9458f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:31:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:31:47Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:36Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:36 crc kubenswrapper[4799]: I1010 16:32:36.093184 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-bsdk2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"823e91d3-003d-4cbb-bc72-004e1708c19d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec832bfc0c81b98afb4117033b94d2951b042b248148a5f957f3507174b8dbb6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-chgmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:09Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-bsdk2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:36Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:36 crc kubenswrapper[4799]: I1010 16:32:36.107663 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-k6hch" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7903c578-d05e-4ad7-8fd9-f438abf4a085\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:23Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:23Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hjhjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hjhjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:23Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-k6hch\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:36Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:36 crc kubenswrapper[4799]: I1010 16:32:36.129727 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1bf1784a-776b-49c7-b64b-7ce52860df45\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://298a1a9571fbe118fe81ff3e7403e298bcde9b683cffab574fbb03d5adc1fb67\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f767e89684b9b515da850360aaf9d7a02173395faf0654e9f0b3a4752a3d608b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://92de340d859cff018a661f0a7f7fe209ffae161bf6f39deb005c7148591fc60b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3a649a65ab118025ea70d1d7cf71236cb96992671c3bc7659d591640b53f941\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:31:47Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:36Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:36 crc kubenswrapper[4799]: I1010 16:32:36.129938 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:36 crc kubenswrapper[4799]: I1010 16:32:36.129986 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:36 crc kubenswrapper[4799]: I1010 16:32:36.130005 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:36 crc kubenswrapper[4799]: I1010 16:32:36.130031 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:36 crc kubenswrapper[4799]: I1010 16:32:36.130049 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:36Z","lastTransitionTime":"2025-10-10T16:32:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:36 crc kubenswrapper[4799]: I1010 16:32:36.149794 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2128a751508cba96a374652d8d80c66c81351fe0d7f800743a1612196fe8ac55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:36Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:36 crc kubenswrapper[4799]: I1010 16:32:36.172101 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4e024486dad9853cf7debbd2264eca725e50e74ebd215e1e55595d5f8b7c0403\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3396ed6bea22d063192c09283426aa98e84d5cab5852e305d61f3d583801187\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:36Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:36 crc kubenswrapper[4799]: I1010 16:32:36.196569 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-nptcz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"96840de9-4451-4499-81fa-a19c62239007\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://995be5ba088a3758758ce5aaf735f0371692c52e49e3992c6478311411c8db42\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d8b7b4526cfbe5d29a5b00c5d82089820b93e5aedbdaace85c4a252fed1b9f53\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d8b7b4526cfbe5d29a5b00c5d82089820b93e5aedbdaace85c4a252fed1b9f53\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:32:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0477eb514aef21fcec151973d9b6cf683ced19e9029787b97906438cb94b9f66\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0477eb514aef21fcec151973d9b6cf683ced19e9029787b97906438cb94b9f66\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:32:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b8df7ffc260acc047e334af09b76e6ee2c6dadd8c1fd1ed8860769601c89c6db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b8df7ffc260acc047e334af09b76e6ee2c6dadd8c1fd1ed8860769601c89c6db\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:32:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c89c6973a557239b60077f2b91a5f088955a973ebf8a9776677daa83f18c274\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2c89c6973a557239b60077f2b91a5f088955a973ebf8a9776677daa83f18c274\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:32:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f647c1c576be194232c6bcaf882fc8f3c67c78a84edd77222d04f1602434d014\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f647c1c576be194232c6bcaf882fc8f3c67c78a84edd77222d04f1602434d014\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:32:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://af9cccbb5d66115ca2db31b1e6738e1aa5f9c948eb65d3db9b5d5f8d9c223a64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://af9cccbb5d66115ca2db31b1e6738e1aa5f9c948eb65d3db9b5d5f8d9c223a64\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:32:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:09Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-nptcz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:36Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:36 crc kubenswrapper[4799]: I1010 16:32:36.215736 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-z97c7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6f19a8ba-b77f-41ce-a4c6-e970b040dd8c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://17b2b63923e40e58b4a3d352781758ecf7c0e63eb913813e0f738d19dfb05676\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9spwd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c9722a694091d19d16b7c08ac22e23532deca8f4bde306a0d651d5524484fd1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9spwd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:22Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-z97c7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:36Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:36 crc kubenswrapper[4799]: I1010 16:32:36.233042 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:36 crc kubenswrapper[4799]: I1010 16:32:36.233114 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:36 crc kubenswrapper[4799]: I1010 16:32:36.233137 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:36 crc kubenswrapper[4799]: I1010 16:32:36.233168 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:36 crc kubenswrapper[4799]: I1010 16:32:36.233192 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:36Z","lastTransitionTime":"2025-10-10T16:32:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:36 crc kubenswrapper[4799]: I1010 16:32:36.241830 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b9c46c5f-a6db-4cef-b179-b669484bbc75\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:47Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:47Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://df6b51b97a9e3dcf9102409dc19f67e69e6e28ebec82dd46083922d5606cc4c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ba57bc720123daa414f51bf5d3173c6fa0b519947a34816bebc532948fd74ab\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d98759de1f79d9aeb68eb0b3eb21d78d0116f054b5d846c85bd63774b565e73\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7eb0f742793fbd0bee8e88732ec832748e77d9226a926def177968f24a9cf06\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://10d76c5ba8c54896d2fde57e2806c48857363c495a9f2d9b3f6904334cf2f9be\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"file observer\\\\nW1010 16:32:08.895315 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1010 16:32:08.895450 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1010 16:32:08.898309 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-712278876/tls.crt::/tmp/serving-cert-712278876/tls.key\\\\\\\"\\\\nI1010 16:32:09.168043 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1010 16:32:09.171891 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1010 16:32:09.171914 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1010 16:32:09.171936 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1010 16:32:09.171942 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1010 16:32:09.176341 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1010 16:32:09.176406 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1010 16:32:09.176435 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1010 16:32:09.176460 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1010 16:32:09.176486 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1010 16:32:09.176510 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1010 16:32:09.176533 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1010 16:32:09.176376 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1010 16:32:09.178269 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-10T16:31:53Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://75fb276a1b4f555aa58d4a862a6f3841984f75958b7ada362d717eca726c41fc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:50Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://78cbeb4c6d2770cabbc752b11e5a62f64ec7820bc3a637a944fa252d779e242b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://78cbeb4c6d2770cabbc752b11e5a62f64ec7820bc3a637a944fa252d779e242b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:31:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:31:47Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:36Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:36 crc kubenswrapper[4799]: I1010 16:32:36.258485 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:36Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:36 crc kubenswrapper[4799]: I1010 16:32:36.271251 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:36Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:36 crc kubenswrapper[4799]: I1010 16:32:36.284866 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6cebefda-e31d-4be2-9bf4-8e1f8ec002cb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6536b37f839c0b3f6b55d82b3a1674eeccb07ec93e2cb0a3739705b82df4782c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hfkr4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ad00545d7a2fff370e19a55a89365b8c9914cb6286dbf1892d7ad0f399288a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hfkr4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:09Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-rh8zc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:36Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:36 crc kubenswrapper[4799]: I1010 16:32:36.309847 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mcwfc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"abe7f2d9-ec99-4724-a01f-cc7096377e07\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8cbc87c392646ebf9c016f8c7b40bcec30e33a0a05ea4a896d1143c5f1086990\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd261112ca7db4d0d76f6ab29a0347d64dccfff4db42ac9f55d6d7df1443ab23\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c7d0e536ad5143941dd18418b1ac7972a1136a841542b950f6891a386d43ca9c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cfe05183ad0b03415525e6aa2a8d52a5d63b8c273113c46326396df5e0c2bb12\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6562d440ce1f1477fd09c15c34ab88e17e1fb2c2cae4b32a7bf8cbdd29f4d5a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff4fcf53aeed6c07f775152de0faa9fa0671848df06d37cbca6ec7097d0024d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://46332137ef0b23ab6df5f7eb7e21459fb1b18c3580232f6c03ba11cb434c42e8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://46332137ef0b23ab6df5f7eb7e21459fb1b18c3580232f6c03ba11cb434c42e8\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-10T16:32:22Z\\\",\\\"message\\\":\\\"0 16:32:21.839394 6234 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1010 16:32:21.839417 6234 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1010 16:32:21.839439 6234 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI1010 16:32:21.839445 6234 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI1010 16:32:21.839474 6234 handler.go:208] Removed *v1.Pod event handler 6\\\\nI1010 16:32:21.840885 6234 handler.go:208] Removed *v1.Node event handler 7\\\\nI1010 16:32:21.840890 6234 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1010 16:32:21.840892 6234 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1010 16:32:21.841044 6234 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1010 16:32:21.840912 6234 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1010 16:32:21.840941 6234 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1010 16:32:21.841010 6234 handler.go:208] Removed *v1.Node event handler 2\\\\nI1010 16:32:21.841131 6234 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1010 16:32:21.841175 6234 factory.go:656] Stopping watch factory\\\\nI1010 16:32:21.841197 6234 ovnkube.go:599] Stopped ovnkube\\\\nI1010 16:32:21.841225 6234 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1010 16:32:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:20Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-mcwfc_openshift-ovn-kubernetes(abe7f2d9-ec99-4724-a01f-cc7096377e07)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://08406e220de50ba85f882a05117b5df8c9445a38c026bb85c95fc9f98f2d2cfe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2ce36def99eaf908452410a523cd14eb31a5a4dc3ee38d5983ea95d5ee75f83\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d2ce36def99eaf908452410a523cd14eb31a5a4dc3ee38d5983ea95d5ee75f83\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:09Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-mcwfc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:36Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:36 crc kubenswrapper[4799]: I1010 16:32:36.335809 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:36 crc kubenswrapper[4799]: I1010 16:32:36.335846 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:36 crc kubenswrapper[4799]: I1010 16:32:36.335861 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:36 crc kubenswrapper[4799]: I1010 16:32:36.335881 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:36 crc kubenswrapper[4799]: I1010 16:32:36.335896 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:36Z","lastTransitionTime":"2025-10-10T16:32:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:36 crc kubenswrapper[4799]: I1010 16:32:36.401942 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 10 16:32:36 crc kubenswrapper[4799]: I1010 16:32:36.401995 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 10 16:32:36 crc kubenswrapper[4799]: E1010 16:32:36.402076 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 10 16:32:36 crc kubenswrapper[4799]: E1010 16:32:36.402204 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 10 16:32:36 crc kubenswrapper[4799]: I1010 16:32:36.438974 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:36 crc kubenswrapper[4799]: I1010 16:32:36.439027 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:36 crc kubenswrapper[4799]: I1010 16:32:36.439036 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:36 crc kubenswrapper[4799]: I1010 16:32:36.439050 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:36 crc kubenswrapper[4799]: I1010 16:32:36.439059 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:36Z","lastTransitionTime":"2025-10-10T16:32:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:36 crc kubenswrapper[4799]: I1010 16:32:36.542745 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:36 crc kubenswrapper[4799]: I1010 16:32:36.542800 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:36 crc kubenswrapper[4799]: I1010 16:32:36.542815 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:36 crc kubenswrapper[4799]: I1010 16:32:36.542839 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:36 crc kubenswrapper[4799]: I1010 16:32:36.542851 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:36Z","lastTransitionTime":"2025-10-10T16:32:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:36 crc kubenswrapper[4799]: I1010 16:32:36.646063 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:36 crc kubenswrapper[4799]: I1010 16:32:36.646147 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:36 crc kubenswrapper[4799]: I1010 16:32:36.646169 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:36 crc kubenswrapper[4799]: I1010 16:32:36.646206 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:36 crc kubenswrapper[4799]: I1010 16:32:36.646227 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:36Z","lastTransitionTime":"2025-10-10T16:32:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:36 crc kubenswrapper[4799]: I1010 16:32:36.749382 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:36 crc kubenswrapper[4799]: I1010 16:32:36.749435 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:36 crc kubenswrapper[4799]: I1010 16:32:36.749471 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:36 crc kubenswrapper[4799]: I1010 16:32:36.749502 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:36 crc kubenswrapper[4799]: I1010 16:32:36.749523 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:36Z","lastTransitionTime":"2025-10-10T16:32:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:36 crc kubenswrapper[4799]: I1010 16:32:36.852570 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:36 crc kubenswrapper[4799]: I1010 16:32:36.852648 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:36 crc kubenswrapper[4799]: I1010 16:32:36.852682 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:36 crc kubenswrapper[4799]: I1010 16:32:36.852712 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:36 crc kubenswrapper[4799]: I1010 16:32:36.852733 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:36Z","lastTransitionTime":"2025-10-10T16:32:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:36 crc kubenswrapper[4799]: I1010 16:32:36.956168 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:36 crc kubenswrapper[4799]: I1010 16:32:36.956229 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:36 crc kubenswrapper[4799]: I1010 16:32:36.956247 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:36 crc kubenswrapper[4799]: I1010 16:32:36.956275 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:36 crc kubenswrapper[4799]: I1010 16:32:36.956299 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:36Z","lastTransitionTime":"2025-10-10T16:32:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:37 crc kubenswrapper[4799]: I1010 16:32:37.059112 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:37 crc kubenswrapper[4799]: I1010 16:32:37.059173 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:37 crc kubenswrapper[4799]: I1010 16:32:37.059197 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:37 crc kubenswrapper[4799]: I1010 16:32:37.059227 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:37 crc kubenswrapper[4799]: I1010 16:32:37.059248 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:37Z","lastTransitionTime":"2025-10-10T16:32:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:37 crc kubenswrapper[4799]: I1010 16:32:37.162439 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:37 crc kubenswrapper[4799]: I1010 16:32:37.162481 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:37 crc kubenswrapper[4799]: I1010 16:32:37.162496 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:37 crc kubenswrapper[4799]: I1010 16:32:37.162514 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:37 crc kubenswrapper[4799]: I1010 16:32:37.162526 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:37Z","lastTransitionTime":"2025-10-10T16:32:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:37 crc kubenswrapper[4799]: I1010 16:32:37.265892 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:37 crc kubenswrapper[4799]: I1010 16:32:37.265937 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:37 crc kubenswrapper[4799]: I1010 16:32:37.265956 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:37 crc kubenswrapper[4799]: I1010 16:32:37.265979 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:37 crc kubenswrapper[4799]: I1010 16:32:37.265996 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:37Z","lastTransitionTime":"2025-10-10T16:32:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:37 crc kubenswrapper[4799]: I1010 16:32:37.369437 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:37 crc kubenswrapper[4799]: I1010 16:32:37.369495 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:37 crc kubenswrapper[4799]: I1010 16:32:37.369512 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:37 crc kubenswrapper[4799]: I1010 16:32:37.369537 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:37 crc kubenswrapper[4799]: I1010 16:32:37.369554 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:37Z","lastTransitionTime":"2025-10-10T16:32:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:37 crc kubenswrapper[4799]: I1010 16:32:37.401644 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 10 16:32:37 crc kubenswrapper[4799]: E1010 16:32:37.401861 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 10 16:32:37 crc kubenswrapper[4799]: I1010 16:32:37.401977 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k6hch" Oct 10 16:32:37 crc kubenswrapper[4799]: E1010 16:32:37.402643 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-k6hch" podUID="7903c578-d05e-4ad7-8fd9-f438abf4a085" Oct 10 16:32:37 crc kubenswrapper[4799]: I1010 16:32:37.403041 4799 scope.go:117] "RemoveContainer" containerID="46332137ef0b23ab6df5f7eb7e21459fb1b18c3580232f6c03ba11cb434c42e8" Oct 10 16:32:37 crc kubenswrapper[4799]: I1010 16:32:37.435916 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"60ae49f7-6d6a-4a62-909f-7aea2b3953f5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c0a72be5ffe48f726e63ca3854fcabf6ad7c26f2c3fe432328142da2dc2ceeb5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b42b1b86cbd6dacb03b9afc740a33f67674996a9c5a5b291b71708ae53ccfea8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://de5b84380f9fb8448cebe90775342fd17260ffb8c591bbd5156f8a216b80f1da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b1dad40a84c7f22ffb5d52c708c7e2e03a181c5778793050495c8333ae005731\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://79f6778c5b703b2b4fc4e59fffc00824fcab6c8f5e2789661665e635a3539195\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a48bce1f3530d2a78258c6fa2af4f1530890f7967a26c9e91ca2f20f56cdbe6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2a48bce1f3530d2a78258c6fa2af4f1530890f7967a26c9e91ca2f20f56cdbe6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:31:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://65fb2cd5fa9b5ff0cad85267e4a036c37593a749da171dc2e5e30ba5159ed96d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://65fb2cd5fa9b5ff0cad85267e4a036c37593a749da171dc2e5e30ba5159ed96d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:31:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://51ea61becc8c45e5bcb2a2374d503cef3fb940b1618e7501cd05d61fc2a9458f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://51ea61becc8c45e5bcb2a2374d503cef3fb940b1618e7501cd05d61fc2a9458f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:31:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:31:47Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:37Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:37 crc kubenswrapper[4799]: I1010 16:32:37.450082 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f7878cf5-3c6d-4a4a-9ccd-7de395f9ac84\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://38c4fe49eff3373937abdebfb7d58fe9d5c73809375a3dca4f165aab84d6cbd1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c48434cdadac2409d0e3baf595e00260b1e3f94b8b9dab62e3f87503a6e888be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://88216eac74e0df9deb1ca1bef893deb2e23a79ffffdbd8a851a67df407eaa470\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://91106a41672b01d9f5c61cfc3001b84f024f3b96649bbc9174f3a635fc8034a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://91106a41672b01d9f5c61cfc3001b84f024f3b96649bbc9174f3a635fc8034a9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:31:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:31:48Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:31:47Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:37Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:37 crc kubenswrapper[4799]: I1010 16:32:37.462665 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-bsdk2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"823e91d3-003d-4cbb-bc72-004e1708c19d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec832bfc0c81b98afb4117033b94d2951b042b248148a5f957f3507174b8dbb6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-chgmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:09Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-bsdk2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:37Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:37 crc kubenswrapper[4799]: I1010 16:32:37.473070 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:37 crc kubenswrapper[4799]: I1010 16:32:37.473111 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:37 crc kubenswrapper[4799]: I1010 16:32:37.473128 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:37 crc kubenswrapper[4799]: I1010 16:32:37.473152 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:37 crc kubenswrapper[4799]: I1010 16:32:37.473169 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:37Z","lastTransitionTime":"2025-10-10T16:32:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:37 crc kubenswrapper[4799]: I1010 16:32:37.473588 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-k6hch" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7903c578-d05e-4ad7-8fd9-f438abf4a085\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:23Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:23Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hjhjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hjhjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:23Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-k6hch\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:37Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:37 crc kubenswrapper[4799]: I1010 16:32:37.489071 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1bf1784a-776b-49c7-b64b-7ce52860df45\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://298a1a9571fbe118fe81ff3e7403e298bcde9b683cffab574fbb03d5adc1fb67\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f767e89684b9b515da850360aaf9d7a02173395faf0654e9f0b3a4752a3d608b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://92de340d859cff018a661f0a7f7fe209ffae161bf6f39deb005c7148591fc60b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3a649a65ab118025ea70d1d7cf71236cb96992671c3bc7659d591640b53f941\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:31:47Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:37Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:37 crc kubenswrapper[4799]: I1010 16:32:37.501358 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2128a751508cba96a374652d8d80c66c81351fe0d7f800743a1612196fe8ac55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:37Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:37 crc kubenswrapper[4799]: I1010 16:32:37.519879 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4e024486dad9853cf7debbd2264eca725e50e74ebd215e1e55595d5f8b7c0403\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3396ed6bea22d063192c09283426aa98e84d5cab5852e305d61f3d583801187\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:37Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:37 crc kubenswrapper[4799]: I1010 16:32:37.538517 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-nptcz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"96840de9-4451-4499-81fa-a19c62239007\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://995be5ba088a3758758ce5aaf735f0371692c52e49e3992c6478311411c8db42\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d8b7b4526cfbe5d29a5b00c5d82089820b93e5aedbdaace85c4a252fed1b9f53\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d8b7b4526cfbe5d29a5b00c5d82089820b93e5aedbdaace85c4a252fed1b9f53\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:32:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0477eb514aef21fcec151973d9b6cf683ced19e9029787b97906438cb94b9f66\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0477eb514aef21fcec151973d9b6cf683ced19e9029787b97906438cb94b9f66\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:32:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b8df7ffc260acc047e334af09b76e6ee2c6dadd8c1fd1ed8860769601c89c6db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b8df7ffc260acc047e334af09b76e6ee2c6dadd8c1fd1ed8860769601c89c6db\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:32:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c89c6973a557239b60077f2b91a5f088955a973ebf8a9776677daa83f18c274\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2c89c6973a557239b60077f2b91a5f088955a973ebf8a9776677daa83f18c274\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:32:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f647c1c576be194232c6bcaf882fc8f3c67c78a84edd77222d04f1602434d014\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f647c1c576be194232c6bcaf882fc8f3c67c78a84edd77222d04f1602434d014\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:32:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://af9cccbb5d66115ca2db31b1e6738e1aa5f9c948eb65d3db9b5d5f8d9c223a64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://af9cccbb5d66115ca2db31b1e6738e1aa5f9c948eb65d3db9b5d5f8d9c223a64\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:32:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:09Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-nptcz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:37Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:37 crc kubenswrapper[4799]: I1010 16:32:37.557832 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-z97c7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6f19a8ba-b77f-41ce-a4c6-e970b040dd8c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://17b2b63923e40e58b4a3d352781758ecf7c0e63eb913813e0f738d19dfb05676\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9spwd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c9722a694091d19d16b7c08ac22e23532deca8f4bde306a0d651d5524484fd1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9spwd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:22Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-z97c7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:37Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:37 crc kubenswrapper[4799]: I1010 16:32:37.575303 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:37 crc kubenswrapper[4799]: I1010 16:32:37.575340 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:37 crc kubenswrapper[4799]: I1010 16:32:37.575356 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:37 crc kubenswrapper[4799]: I1010 16:32:37.575378 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:37 crc kubenswrapper[4799]: I1010 16:32:37.575395 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:37Z","lastTransitionTime":"2025-10-10T16:32:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:37 crc kubenswrapper[4799]: I1010 16:32:37.576035 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b9c46c5f-a6db-4cef-b179-b669484bbc75\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:47Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:47Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://df6b51b97a9e3dcf9102409dc19f67e69e6e28ebec82dd46083922d5606cc4c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ba57bc720123daa414f51bf5d3173c6fa0b519947a34816bebc532948fd74ab\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d98759de1f79d9aeb68eb0b3eb21d78d0116f054b5d846c85bd63774b565e73\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7eb0f742793fbd0bee8e88732ec832748e77d9226a926def177968f24a9cf06\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://10d76c5ba8c54896d2fde57e2806c48857363c495a9f2d9b3f6904334cf2f9be\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"file observer\\\\nW1010 16:32:08.895315 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1010 16:32:08.895450 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1010 16:32:08.898309 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-712278876/tls.crt::/tmp/serving-cert-712278876/tls.key\\\\\\\"\\\\nI1010 16:32:09.168043 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1010 16:32:09.171891 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1010 16:32:09.171914 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1010 16:32:09.171936 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1010 16:32:09.171942 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1010 16:32:09.176341 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1010 16:32:09.176406 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1010 16:32:09.176435 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1010 16:32:09.176460 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1010 16:32:09.176486 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1010 16:32:09.176510 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1010 16:32:09.176533 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1010 16:32:09.176376 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1010 16:32:09.178269 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-10T16:31:53Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://75fb276a1b4f555aa58d4a862a6f3841984f75958b7ada362d717eca726c41fc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:50Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://78cbeb4c6d2770cabbc752b11e5a62f64ec7820bc3a637a944fa252d779e242b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://78cbeb4c6d2770cabbc752b11e5a62f64ec7820bc3a637a944fa252d779e242b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:31:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:31:47Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:37Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:37 crc kubenswrapper[4799]: I1010 16:32:37.593252 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:37Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:37 crc kubenswrapper[4799]: I1010 16:32:37.608928 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:37Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:37 crc kubenswrapper[4799]: I1010 16:32:37.623295 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6cebefda-e31d-4be2-9bf4-8e1f8ec002cb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6536b37f839c0b3f6b55d82b3a1674eeccb07ec93e2cb0a3739705b82df4782c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hfkr4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ad00545d7a2fff370e19a55a89365b8c9914cb6286dbf1892d7ad0f399288a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hfkr4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:09Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-rh8zc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:37Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:37 crc kubenswrapper[4799]: I1010 16:32:37.641937 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mcwfc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"abe7f2d9-ec99-4724-a01f-cc7096377e07\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8cbc87c392646ebf9c016f8c7b40bcec30e33a0a05ea4a896d1143c5f1086990\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd261112ca7db4d0d76f6ab29a0347d64dccfff4db42ac9f55d6d7df1443ab23\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c7d0e536ad5143941dd18418b1ac7972a1136a841542b950f6891a386d43ca9c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cfe05183ad0b03415525e6aa2a8d52a5d63b8c273113c46326396df5e0c2bb12\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6562d440ce1f1477fd09c15c34ab88e17e1fb2c2cae4b32a7bf8cbdd29f4d5a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff4fcf53aeed6c07f775152de0faa9fa0671848df06d37cbca6ec7097d0024d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://46332137ef0b23ab6df5f7eb7e21459fb1b18c3580232f6c03ba11cb434c42e8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://46332137ef0b23ab6df5f7eb7e21459fb1b18c3580232f6c03ba11cb434c42e8\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-10T16:32:22Z\\\",\\\"message\\\":\\\"0 16:32:21.839394 6234 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1010 16:32:21.839417 6234 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1010 16:32:21.839439 6234 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI1010 16:32:21.839445 6234 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI1010 16:32:21.839474 6234 handler.go:208] Removed *v1.Pod event handler 6\\\\nI1010 16:32:21.840885 6234 handler.go:208] Removed *v1.Node event handler 7\\\\nI1010 16:32:21.840890 6234 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1010 16:32:21.840892 6234 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1010 16:32:21.841044 6234 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1010 16:32:21.840912 6234 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1010 16:32:21.840941 6234 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1010 16:32:21.841010 6234 handler.go:208] Removed *v1.Node event handler 2\\\\nI1010 16:32:21.841131 6234 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1010 16:32:21.841175 6234 factory.go:656] Stopping watch factory\\\\nI1010 16:32:21.841197 6234 ovnkube.go:599] Stopped ovnkube\\\\nI1010 16:32:21.841225 6234 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1010 16:32:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:20Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-mcwfc_openshift-ovn-kubernetes(abe7f2d9-ec99-4724-a01f-cc7096377e07)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://08406e220de50ba85f882a05117b5df8c9445a38c026bb85c95fc9f98f2d2cfe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2ce36def99eaf908452410a523cd14eb31a5a4dc3ee38d5983ea95d5ee75f83\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d2ce36def99eaf908452410a523cd14eb31a5a4dc3ee38d5983ea95d5ee75f83\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:09Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-mcwfc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:37Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:37 crc kubenswrapper[4799]: I1010 16:32:37.658698 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5a953803d264b43ecd9f8b8c871b034d8146e73a4974bb8f503d0ca626370616\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:37Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:37 crc kubenswrapper[4799]: I1010 16:32:37.670334 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:37Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:37 crc kubenswrapper[4799]: I1010 16:32:37.677933 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:37 crc kubenswrapper[4799]: I1010 16:32:37.677991 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:37 crc kubenswrapper[4799]: I1010 16:32:37.678004 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:37 crc kubenswrapper[4799]: I1010 16:32:37.678027 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:37 crc kubenswrapper[4799]: I1010 16:32:37.678042 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:37Z","lastTransitionTime":"2025-10-10T16:32:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:37 crc kubenswrapper[4799]: I1010 16:32:37.682505 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gg5hb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f000ac73-b5de-47c8-a0a7-84bd06475f62\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b64f89fec4fec12dd0dab3f95ca2c8a01e43d4ef7cc69a4d012195756f6922ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w9g7t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:09Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gg5hb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:37Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:37 crc kubenswrapper[4799]: I1010 16:32:37.693570 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-6wjsp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"76fdb169-eee9-4170-b948-95e26254208b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5b030264f18288aa7687a91f7918f1ed2c2ad474637e32a054ea8c25b97aef45\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2ww66\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:14Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-6wjsp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:37Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:37 crc kubenswrapper[4799]: I1010 16:32:37.780624 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:37 crc kubenswrapper[4799]: I1010 16:32:37.780908 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:37 crc kubenswrapper[4799]: I1010 16:32:37.781001 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:37 crc kubenswrapper[4799]: I1010 16:32:37.781098 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:37 crc kubenswrapper[4799]: I1010 16:32:37.781183 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:37Z","lastTransitionTime":"2025-10-10T16:32:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:37 crc kubenswrapper[4799]: I1010 16:32:37.808630 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-mcwfc_abe7f2d9-ec99-4724-a01f-cc7096377e07/ovnkube-controller/1.log" Oct 10 16:32:37 crc kubenswrapper[4799]: I1010 16:32:37.813362 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mcwfc" event={"ID":"abe7f2d9-ec99-4724-a01f-cc7096377e07","Type":"ContainerStarted","Data":"ba46f14de0fd2c356129122dd938e3fdda832ffc5e614ac439926a3f4ec94370"} Oct 10 16:32:37 crc kubenswrapper[4799]: I1010 16:32:37.815431 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-mcwfc" Oct 10 16:32:37 crc kubenswrapper[4799]: I1010 16:32:37.834478 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4e024486dad9853cf7debbd2264eca725e50e74ebd215e1e55595d5f8b7c0403\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3396ed6bea22d063192c09283426aa98e84d5cab5852e305d61f3d583801187\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:37Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:37 crc kubenswrapper[4799]: I1010 16:32:37.868140 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-nptcz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"96840de9-4451-4499-81fa-a19c62239007\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://995be5ba088a3758758ce5aaf735f0371692c52e49e3992c6478311411c8db42\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d8b7b4526cfbe5d29a5b00c5d82089820b93e5aedbdaace85c4a252fed1b9f53\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d8b7b4526cfbe5d29a5b00c5d82089820b93e5aedbdaace85c4a252fed1b9f53\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:32:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0477eb514aef21fcec151973d9b6cf683ced19e9029787b97906438cb94b9f66\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0477eb514aef21fcec151973d9b6cf683ced19e9029787b97906438cb94b9f66\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:32:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b8df7ffc260acc047e334af09b76e6ee2c6dadd8c1fd1ed8860769601c89c6db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b8df7ffc260acc047e334af09b76e6ee2c6dadd8c1fd1ed8860769601c89c6db\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:32:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c89c6973a557239b60077f2b91a5f088955a973ebf8a9776677daa83f18c274\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2c89c6973a557239b60077f2b91a5f088955a973ebf8a9776677daa83f18c274\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:32:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f647c1c576be194232c6bcaf882fc8f3c67c78a84edd77222d04f1602434d014\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f647c1c576be194232c6bcaf882fc8f3c67c78a84edd77222d04f1602434d014\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:32:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://af9cccbb5d66115ca2db31b1e6738e1aa5f9c948eb65d3db9b5d5f8d9c223a64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://af9cccbb5d66115ca2db31b1e6738e1aa5f9c948eb65d3db9b5d5f8d9c223a64\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:32:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:09Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-nptcz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:37Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:37 crc kubenswrapper[4799]: I1010 16:32:37.882849 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1bf1784a-776b-49c7-b64b-7ce52860df45\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://298a1a9571fbe118fe81ff3e7403e298bcde9b683cffab574fbb03d5adc1fb67\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f767e89684b9b515da850360aaf9d7a02173395faf0654e9f0b3a4752a3d608b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://92de340d859cff018a661f0a7f7fe209ffae161bf6f39deb005c7148591fc60b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3a649a65ab118025ea70d1d7cf71236cb96992671c3bc7659d591640b53f941\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:31:47Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:37Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:37 crc kubenswrapper[4799]: I1010 16:32:37.884711 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:37 crc kubenswrapper[4799]: I1010 16:32:37.884743 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:37 crc kubenswrapper[4799]: I1010 16:32:37.884782 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:37 crc kubenswrapper[4799]: I1010 16:32:37.884808 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:37 crc kubenswrapper[4799]: I1010 16:32:37.884825 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:37Z","lastTransitionTime":"2025-10-10T16:32:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:37 crc kubenswrapper[4799]: I1010 16:32:37.903981 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2128a751508cba96a374652d8d80c66c81351fe0d7f800743a1612196fe8ac55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:37Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:37 crc kubenswrapper[4799]: I1010 16:32:37.922119 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:37Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:37 crc kubenswrapper[4799]: I1010 16:32:37.946011 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6cebefda-e31d-4be2-9bf4-8e1f8ec002cb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6536b37f839c0b3f6b55d82b3a1674eeccb07ec93e2cb0a3739705b82df4782c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hfkr4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ad00545d7a2fff370e19a55a89365b8c9914cb6286dbf1892d7ad0f399288a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hfkr4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:09Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-rh8zc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:37Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:37 crc kubenswrapper[4799]: I1010 16:32:37.977116 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mcwfc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"abe7f2d9-ec99-4724-a01f-cc7096377e07\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8cbc87c392646ebf9c016f8c7b40bcec30e33a0a05ea4a896d1143c5f1086990\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd261112ca7db4d0d76f6ab29a0347d64dccfff4db42ac9f55d6d7df1443ab23\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c7d0e536ad5143941dd18418b1ac7972a1136a841542b950f6891a386d43ca9c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cfe05183ad0b03415525e6aa2a8d52a5d63b8c273113c46326396df5e0c2bb12\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6562d440ce1f1477fd09c15c34ab88e17e1fb2c2cae4b32a7bf8cbdd29f4d5a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff4fcf53aeed6c07f775152de0faa9fa0671848df06d37cbca6ec7097d0024d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ba46f14de0fd2c356129122dd938e3fdda832ffc5e614ac439926a3f4ec94370\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://46332137ef0b23ab6df5f7eb7e21459fb1b18c3580232f6c03ba11cb434c42e8\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-10T16:32:22Z\\\",\\\"message\\\":\\\"0 16:32:21.839394 6234 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1010 16:32:21.839417 6234 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1010 16:32:21.839439 6234 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI1010 16:32:21.839445 6234 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI1010 16:32:21.839474 6234 handler.go:208] Removed *v1.Pod event handler 6\\\\nI1010 16:32:21.840885 6234 handler.go:208] Removed *v1.Node event handler 7\\\\nI1010 16:32:21.840890 6234 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1010 16:32:21.840892 6234 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1010 16:32:21.841044 6234 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1010 16:32:21.840912 6234 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1010 16:32:21.840941 6234 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1010 16:32:21.841010 6234 handler.go:208] Removed *v1.Node event handler 2\\\\nI1010 16:32:21.841131 6234 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1010 16:32:21.841175 6234 factory.go:656] Stopping watch factory\\\\nI1010 16:32:21.841197 6234 ovnkube.go:599] Stopped ovnkube\\\\nI1010 16:32:21.841225 6234 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1010 16:32:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:20Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://08406e220de50ba85f882a05117b5df8c9445a38c026bb85c95fc9f98f2d2cfe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2ce36def99eaf908452410a523cd14eb31a5a4dc3ee38d5983ea95d5ee75f83\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d2ce36def99eaf908452410a523cd14eb31a5a4dc3ee38d5983ea95d5ee75f83\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:09Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-mcwfc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:37Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:37 crc kubenswrapper[4799]: I1010 16:32:37.987628 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:37 crc kubenswrapper[4799]: I1010 16:32:37.987677 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:37 crc kubenswrapper[4799]: I1010 16:32:37.987689 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:37 crc kubenswrapper[4799]: I1010 16:32:37.987709 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:37 crc kubenswrapper[4799]: I1010 16:32:37.987720 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:37Z","lastTransitionTime":"2025-10-10T16:32:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:37 crc kubenswrapper[4799]: I1010 16:32:37.992617 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-z97c7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6f19a8ba-b77f-41ce-a4c6-e970b040dd8c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://17b2b63923e40e58b4a3d352781758ecf7c0e63eb913813e0f738d19dfb05676\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9spwd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c9722a694091d19d16b7c08ac22e23532deca8f4bde306a0d651d5524484fd1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9spwd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:22Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-z97c7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:37Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:38 crc kubenswrapper[4799]: I1010 16:32:38.008831 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b9c46c5f-a6db-4cef-b179-b669484bbc75\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:47Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:47Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://df6b51b97a9e3dcf9102409dc19f67e69e6e28ebec82dd46083922d5606cc4c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ba57bc720123daa414f51bf5d3173c6fa0b519947a34816bebc532948fd74ab\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d98759de1f79d9aeb68eb0b3eb21d78d0116f054b5d846c85bd63774b565e73\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7eb0f742793fbd0bee8e88732ec832748e77d9226a926def177968f24a9cf06\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://10d76c5ba8c54896d2fde57e2806c48857363c495a9f2d9b3f6904334cf2f9be\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"file observer\\\\nW1010 16:32:08.895315 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1010 16:32:08.895450 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1010 16:32:08.898309 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-712278876/tls.crt::/tmp/serving-cert-712278876/tls.key\\\\\\\"\\\\nI1010 16:32:09.168043 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1010 16:32:09.171891 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1010 16:32:09.171914 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1010 16:32:09.171936 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1010 16:32:09.171942 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1010 16:32:09.176341 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1010 16:32:09.176406 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1010 16:32:09.176435 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1010 16:32:09.176460 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1010 16:32:09.176486 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1010 16:32:09.176510 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1010 16:32:09.176533 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1010 16:32:09.176376 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1010 16:32:09.178269 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-10T16:31:53Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://75fb276a1b4f555aa58d4a862a6f3841984f75958b7ada362d717eca726c41fc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:50Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://78cbeb4c6d2770cabbc752b11e5a62f64ec7820bc3a637a944fa252d779e242b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://78cbeb4c6d2770cabbc752b11e5a62f64ec7820bc3a637a944fa252d779e242b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:31:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:31:47Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:38Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:38 crc kubenswrapper[4799]: I1010 16:32:38.020539 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:38Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:38 crc kubenswrapper[4799]: I1010 16:32:38.033082 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:38Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:38 crc kubenswrapper[4799]: I1010 16:32:38.045935 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gg5hb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f000ac73-b5de-47c8-a0a7-84bd06475f62\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b64f89fec4fec12dd0dab3f95ca2c8a01e43d4ef7cc69a4d012195756f6922ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w9g7t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:09Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gg5hb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:38Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:38 crc kubenswrapper[4799]: I1010 16:32:38.058071 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-6wjsp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"76fdb169-eee9-4170-b948-95e26254208b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5b030264f18288aa7687a91f7918f1ed2c2ad474637e32a054ea8c25b97aef45\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2ww66\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:14Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-6wjsp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:38Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:38 crc kubenswrapper[4799]: I1010 16:32:38.071327 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5a953803d264b43ecd9f8b8c871b034d8146e73a4974bb8f503d0ca626370616\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:38Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:38 crc kubenswrapper[4799]: I1010 16:32:38.085140 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-bsdk2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"823e91d3-003d-4cbb-bc72-004e1708c19d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec832bfc0c81b98afb4117033b94d2951b042b248148a5f957f3507174b8dbb6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-chgmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:09Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-bsdk2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:38Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:38 crc kubenswrapper[4799]: I1010 16:32:38.089765 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:38 crc kubenswrapper[4799]: I1010 16:32:38.089938 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:38 crc kubenswrapper[4799]: I1010 16:32:38.090028 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:38 crc kubenswrapper[4799]: I1010 16:32:38.090108 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:38 crc kubenswrapper[4799]: I1010 16:32:38.090189 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:38Z","lastTransitionTime":"2025-10-10T16:32:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:38 crc kubenswrapper[4799]: I1010 16:32:38.097984 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-k6hch" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7903c578-d05e-4ad7-8fd9-f438abf4a085\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:23Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:23Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hjhjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hjhjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:23Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-k6hch\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:38Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:38 crc kubenswrapper[4799]: I1010 16:32:38.121513 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"60ae49f7-6d6a-4a62-909f-7aea2b3953f5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c0a72be5ffe48f726e63ca3854fcabf6ad7c26f2c3fe432328142da2dc2ceeb5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b42b1b86cbd6dacb03b9afc740a33f67674996a9c5a5b291b71708ae53ccfea8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://de5b84380f9fb8448cebe90775342fd17260ffb8c591bbd5156f8a216b80f1da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b1dad40a84c7f22ffb5d52c708c7e2e03a181c5778793050495c8333ae005731\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://79f6778c5b703b2b4fc4e59fffc00824fcab6c8f5e2789661665e635a3539195\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a48bce1f3530d2a78258c6fa2af4f1530890f7967a26c9e91ca2f20f56cdbe6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2a48bce1f3530d2a78258c6fa2af4f1530890f7967a26c9e91ca2f20f56cdbe6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:31:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://65fb2cd5fa9b5ff0cad85267e4a036c37593a749da171dc2e5e30ba5159ed96d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://65fb2cd5fa9b5ff0cad85267e4a036c37593a749da171dc2e5e30ba5159ed96d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:31:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://51ea61becc8c45e5bcb2a2374d503cef3fb940b1618e7501cd05d61fc2a9458f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://51ea61becc8c45e5bcb2a2374d503cef3fb940b1618e7501cd05d61fc2a9458f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:31:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:31:47Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:38Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:38 crc kubenswrapper[4799]: I1010 16:32:38.133312 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f7878cf5-3c6d-4a4a-9ccd-7de395f9ac84\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://38c4fe49eff3373937abdebfb7d58fe9d5c73809375a3dca4f165aab84d6cbd1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c48434cdadac2409d0e3baf595e00260b1e3f94b8b9dab62e3f87503a6e888be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://88216eac74e0df9deb1ca1bef893deb2e23a79ffffdbd8a851a67df407eaa470\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://91106a41672b01d9f5c61cfc3001b84f024f3b96649bbc9174f3a635fc8034a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://91106a41672b01d9f5c61cfc3001b84f024f3b96649bbc9174f3a635fc8034a9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:31:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:31:48Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:31:47Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:38Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:38 crc kubenswrapper[4799]: I1010 16:32:38.193569 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:38 crc kubenswrapper[4799]: I1010 16:32:38.193642 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:38 crc kubenswrapper[4799]: I1010 16:32:38.193660 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:38 crc kubenswrapper[4799]: I1010 16:32:38.193681 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:38 crc kubenswrapper[4799]: I1010 16:32:38.193695 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:38Z","lastTransitionTime":"2025-10-10T16:32:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:38 crc kubenswrapper[4799]: I1010 16:32:38.297183 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:38 crc kubenswrapper[4799]: I1010 16:32:38.297277 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:38 crc kubenswrapper[4799]: I1010 16:32:38.297299 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:38 crc kubenswrapper[4799]: I1010 16:32:38.297330 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:38 crc kubenswrapper[4799]: I1010 16:32:38.297356 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:38Z","lastTransitionTime":"2025-10-10T16:32:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:38 crc kubenswrapper[4799]: I1010 16:32:38.400698 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:38 crc kubenswrapper[4799]: I1010 16:32:38.400784 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:38 crc kubenswrapper[4799]: I1010 16:32:38.400809 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:38 crc kubenswrapper[4799]: I1010 16:32:38.400837 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:38 crc kubenswrapper[4799]: I1010 16:32:38.400858 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:38Z","lastTransitionTime":"2025-10-10T16:32:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:38 crc kubenswrapper[4799]: I1010 16:32:38.401613 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 10 16:32:38 crc kubenswrapper[4799]: I1010 16:32:38.401683 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 10 16:32:38 crc kubenswrapper[4799]: E1010 16:32:38.401805 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 10 16:32:38 crc kubenswrapper[4799]: E1010 16:32:38.401880 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 10 16:32:38 crc kubenswrapper[4799]: I1010 16:32:38.504465 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:38 crc kubenswrapper[4799]: I1010 16:32:38.504567 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:38 crc kubenswrapper[4799]: I1010 16:32:38.504593 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:38 crc kubenswrapper[4799]: I1010 16:32:38.504625 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:38 crc kubenswrapper[4799]: I1010 16:32:38.504647 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:38Z","lastTransitionTime":"2025-10-10T16:32:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:38 crc kubenswrapper[4799]: I1010 16:32:38.608348 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:38 crc kubenswrapper[4799]: I1010 16:32:38.608450 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:38 crc kubenswrapper[4799]: I1010 16:32:38.608474 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:38 crc kubenswrapper[4799]: I1010 16:32:38.608507 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:38 crc kubenswrapper[4799]: I1010 16:32:38.608524 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:38Z","lastTransitionTime":"2025-10-10T16:32:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:38 crc kubenswrapper[4799]: I1010 16:32:38.711954 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:38 crc kubenswrapper[4799]: I1010 16:32:38.712016 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:38 crc kubenswrapper[4799]: I1010 16:32:38.712037 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:38 crc kubenswrapper[4799]: I1010 16:32:38.712074 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:38 crc kubenswrapper[4799]: I1010 16:32:38.712093 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:38Z","lastTransitionTime":"2025-10-10T16:32:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:38 crc kubenswrapper[4799]: I1010 16:32:38.815218 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:38 crc kubenswrapper[4799]: I1010 16:32:38.815289 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:38 crc kubenswrapper[4799]: I1010 16:32:38.815308 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:38 crc kubenswrapper[4799]: I1010 16:32:38.815338 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:38 crc kubenswrapper[4799]: I1010 16:32:38.815359 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:38Z","lastTransitionTime":"2025-10-10T16:32:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:38 crc kubenswrapper[4799]: I1010 16:32:38.820096 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-mcwfc_abe7f2d9-ec99-4724-a01f-cc7096377e07/ovnkube-controller/2.log" Oct 10 16:32:38 crc kubenswrapper[4799]: I1010 16:32:38.821257 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-mcwfc_abe7f2d9-ec99-4724-a01f-cc7096377e07/ovnkube-controller/1.log" Oct 10 16:32:38 crc kubenswrapper[4799]: I1010 16:32:38.825495 4799 generic.go:334] "Generic (PLEG): container finished" podID="abe7f2d9-ec99-4724-a01f-cc7096377e07" containerID="ba46f14de0fd2c356129122dd938e3fdda832ffc5e614ac439926a3f4ec94370" exitCode=1 Oct 10 16:32:38 crc kubenswrapper[4799]: I1010 16:32:38.825547 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mcwfc" event={"ID":"abe7f2d9-ec99-4724-a01f-cc7096377e07","Type":"ContainerDied","Data":"ba46f14de0fd2c356129122dd938e3fdda832ffc5e614ac439926a3f4ec94370"} Oct 10 16:32:38 crc kubenswrapper[4799]: I1010 16:32:38.825616 4799 scope.go:117] "RemoveContainer" containerID="46332137ef0b23ab6df5f7eb7e21459fb1b18c3580232f6c03ba11cb434c42e8" Oct 10 16:32:38 crc kubenswrapper[4799]: I1010 16:32:38.826851 4799 scope.go:117] "RemoveContainer" containerID="ba46f14de0fd2c356129122dd938e3fdda832ffc5e614ac439926a3f4ec94370" Oct 10 16:32:38 crc kubenswrapper[4799]: E1010 16:32:38.830442 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-mcwfc_openshift-ovn-kubernetes(abe7f2d9-ec99-4724-a01f-cc7096377e07)\"" pod="openshift-ovn-kubernetes/ovnkube-node-mcwfc" podUID="abe7f2d9-ec99-4724-a01f-cc7096377e07" Oct 10 16:32:38 crc kubenswrapper[4799]: I1010 16:32:38.854566 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"60ae49f7-6d6a-4a62-909f-7aea2b3953f5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c0a72be5ffe48f726e63ca3854fcabf6ad7c26f2c3fe432328142da2dc2ceeb5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b42b1b86cbd6dacb03b9afc740a33f67674996a9c5a5b291b71708ae53ccfea8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://de5b84380f9fb8448cebe90775342fd17260ffb8c591bbd5156f8a216b80f1da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b1dad40a84c7f22ffb5d52c708c7e2e03a181c5778793050495c8333ae005731\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://79f6778c5b703b2b4fc4e59fffc00824fcab6c8f5e2789661665e635a3539195\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a48bce1f3530d2a78258c6fa2af4f1530890f7967a26c9e91ca2f20f56cdbe6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2a48bce1f3530d2a78258c6fa2af4f1530890f7967a26c9e91ca2f20f56cdbe6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:31:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://65fb2cd5fa9b5ff0cad85267e4a036c37593a749da171dc2e5e30ba5159ed96d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://65fb2cd5fa9b5ff0cad85267e4a036c37593a749da171dc2e5e30ba5159ed96d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:31:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://51ea61becc8c45e5bcb2a2374d503cef3fb940b1618e7501cd05d61fc2a9458f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://51ea61becc8c45e5bcb2a2374d503cef3fb940b1618e7501cd05d61fc2a9458f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:31:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:31:47Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:38Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:38 crc kubenswrapper[4799]: I1010 16:32:38.876154 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f7878cf5-3c6d-4a4a-9ccd-7de395f9ac84\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://38c4fe49eff3373937abdebfb7d58fe9d5c73809375a3dca4f165aab84d6cbd1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c48434cdadac2409d0e3baf595e00260b1e3f94b8b9dab62e3f87503a6e888be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://88216eac74e0df9deb1ca1bef893deb2e23a79ffffdbd8a851a67df407eaa470\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://91106a41672b01d9f5c61cfc3001b84f024f3b96649bbc9174f3a635fc8034a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://91106a41672b01d9f5c61cfc3001b84f024f3b96649bbc9174f3a635fc8034a9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:31:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:31:48Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:31:47Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:38Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:38 crc kubenswrapper[4799]: I1010 16:32:38.894293 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-bsdk2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"823e91d3-003d-4cbb-bc72-004e1708c19d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec832bfc0c81b98afb4117033b94d2951b042b248148a5f957f3507174b8dbb6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-chgmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:09Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-bsdk2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:38Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:38 crc kubenswrapper[4799]: I1010 16:32:38.909668 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-k6hch" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7903c578-d05e-4ad7-8fd9-f438abf4a085\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:23Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:23Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hjhjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hjhjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:23Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-k6hch\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:38Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:38 crc kubenswrapper[4799]: I1010 16:32:38.918348 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:38 crc kubenswrapper[4799]: I1010 16:32:38.918406 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:38 crc kubenswrapper[4799]: I1010 16:32:38.918423 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:38 crc kubenswrapper[4799]: I1010 16:32:38.918449 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:38 crc kubenswrapper[4799]: I1010 16:32:38.918466 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:38Z","lastTransitionTime":"2025-10-10T16:32:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:38 crc kubenswrapper[4799]: I1010 16:32:38.930386 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1bf1784a-776b-49c7-b64b-7ce52860df45\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://298a1a9571fbe118fe81ff3e7403e298bcde9b683cffab574fbb03d5adc1fb67\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f767e89684b9b515da850360aaf9d7a02173395faf0654e9f0b3a4752a3d608b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://92de340d859cff018a661f0a7f7fe209ffae161bf6f39deb005c7148591fc60b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3a649a65ab118025ea70d1d7cf71236cb96992671c3bc7659d591640b53f941\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:31:47Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:38Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:38 crc kubenswrapper[4799]: I1010 16:32:38.947880 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2128a751508cba96a374652d8d80c66c81351fe0d7f800743a1612196fe8ac55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:38Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:38 crc kubenswrapper[4799]: I1010 16:32:38.976786 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4e024486dad9853cf7debbd2264eca725e50e74ebd215e1e55595d5f8b7c0403\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3396ed6bea22d063192c09283426aa98e84d5cab5852e305d61f3d583801187\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:38Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:38 crc kubenswrapper[4799]: I1010 16:32:38.995253 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-nptcz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"96840de9-4451-4499-81fa-a19c62239007\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://995be5ba088a3758758ce5aaf735f0371692c52e49e3992c6478311411c8db42\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d8b7b4526cfbe5d29a5b00c5d82089820b93e5aedbdaace85c4a252fed1b9f53\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d8b7b4526cfbe5d29a5b00c5d82089820b93e5aedbdaace85c4a252fed1b9f53\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:32:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0477eb514aef21fcec151973d9b6cf683ced19e9029787b97906438cb94b9f66\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0477eb514aef21fcec151973d9b6cf683ced19e9029787b97906438cb94b9f66\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:32:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b8df7ffc260acc047e334af09b76e6ee2c6dadd8c1fd1ed8860769601c89c6db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b8df7ffc260acc047e334af09b76e6ee2c6dadd8c1fd1ed8860769601c89c6db\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:32:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c89c6973a557239b60077f2b91a5f088955a973ebf8a9776677daa83f18c274\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2c89c6973a557239b60077f2b91a5f088955a973ebf8a9776677daa83f18c274\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:32:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f647c1c576be194232c6bcaf882fc8f3c67c78a84edd77222d04f1602434d014\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f647c1c576be194232c6bcaf882fc8f3c67c78a84edd77222d04f1602434d014\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:32:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://af9cccbb5d66115ca2db31b1e6738e1aa5f9c948eb65d3db9b5d5f8d9c223a64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://af9cccbb5d66115ca2db31b1e6738e1aa5f9c948eb65d3db9b5d5f8d9c223a64\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:32:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:09Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-nptcz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:38Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:39 crc kubenswrapper[4799]: I1010 16:32:39.015816 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b9c46c5f-a6db-4cef-b179-b669484bbc75\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:47Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:47Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://df6b51b97a9e3dcf9102409dc19f67e69e6e28ebec82dd46083922d5606cc4c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ba57bc720123daa414f51bf5d3173c6fa0b519947a34816bebc532948fd74ab\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d98759de1f79d9aeb68eb0b3eb21d78d0116f054b5d846c85bd63774b565e73\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7eb0f742793fbd0bee8e88732ec832748e77d9226a926def177968f24a9cf06\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://10d76c5ba8c54896d2fde57e2806c48857363c495a9f2d9b3f6904334cf2f9be\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"file observer\\\\nW1010 16:32:08.895315 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1010 16:32:08.895450 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1010 16:32:08.898309 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-712278876/tls.crt::/tmp/serving-cert-712278876/tls.key\\\\\\\"\\\\nI1010 16:32:09.168043 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1010 16:32:09.171891 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1010 16:32:09.171914 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1010 16:32:09.171936 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1010 16:32:09.171942 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1010 16:32:09.176341 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1010 16:32:09.176406 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1010 16:32:09.176435 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1010 16:32:09.176460 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1010 16:32:09.176486 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1010 16:32:09.176510 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1010 16:32:09.176533 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1010 16:32:09.176376 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1010 16:32:09.178269 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-10T16:31:53Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://75fb276a1b4f555aa58d4a862a6f3841984f75958b7ada362d717eca726c41fc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:50Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://78cbeb4c6d2770cabbc752b11e5a62f64ec7820bc3a637a944fa252d779e242b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://78cbeb4c6d2770cabbc752b11e5a62f64ec7820bc3a637a944fa252d779e242b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:31:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:31:47Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:39Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:39 crc kubenswrapper[4799]: I1010 16:32:39.021225 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:39 crc kubenswrapper[4799]: I1010 16:32:39.021268 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:39 crc kubenswrapper[4799]: I1010 16:32:39.021280 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:39 crc kubenswrapper[4799]: I1010 16:32:39.021299 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:39 crc kubenswrapper[4799]: I1010 16:32:39.021310 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:39Z","lastTransitionTime":"2025-10-10T16:32:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:39 crc kubenswrapper[4799]: I1010 16:32:39.037637 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:39Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:39 crc kubenswrapper[4799]: I1010 16:32:39.058204 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:39Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:39 crc kubenswrapper[4799]: I1010 16:32:39.076371 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6cebefda-e31d-4be2-9bf4-8e1f8ec002cb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6536b37f839c0b3f6b55d82b3a1674eeccb07ec93e2cb0a3739705b82df4782c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hfkr4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ad00545d7a2fff370e19a55a89365b8c9914cb6286dbf1892d7ad0f399288a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hfkr4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:09Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-rh8zc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:39Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:39 crc kubenswrapper[4799]: I1010 16:32:39.108017 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mcwfc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"abe7f2d9-ec99-4724-a01f-cc7096377e07\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8cbc87c392646ebf9c016f8c7b40bcec30e33a0a05ea4a896d1143c5f1086990\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd261112ca7db4d0d76f6ab29a0347d64dccfff4db42ac9f55d6d7df1443ab23\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c7d0e536ad5143941dd18418b1ac7972a1136a841542b950f6891a386d43ca9c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cfe05183ad0b03415525e6aa2a8d52a5d63b8c273113c46326396df5e0c2bb12\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6562d440ce1f1477fd09c15c34ab88e17e1fb2c2cae4b32a7bf8cbdd29f4d5a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff4fcf53aeed6c07f775152de0faa9fa0671848df06d37cbca6ec7097d0024d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ba46f14de0fd2c356129122dd938e3fdda832ffc5e614ac439926a3f4ec94370\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://46332137ef0b23ab6df5f7eb7e21459fb1b18c3580232f6c03ba11cb434c42e8\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-10T16:32:22Z\\\",\\\"message\\\":\\\"0 16:32:21.839394 6234 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1010 16:32:21.839417 6234 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1010 16:32:21.839439 6234 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI1010 16:32:21.839445 6234 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI1010 16:32:21.839474 6234 handler.go:208] Removed *v1.Pod event handler 6\\\\nI1010 16:32:21.840885 6234 handler.go:208] Removed *v1.Node event handler 7\\\\nI1010 16:32:21.840890 6234 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1010 16:32:21.840892 6234 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1010 16:32:21.841044 6234 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1010 16:32:21.840912 6234 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1010 16:32:21.840941 6234 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1010 16:32:21.841010 6234 handler.go:208] Removed *v1.Node event handler 2\\\\nI1010 16:32:21.841131 6234 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1010 16:32:21.841175 6234 factory.go:656] Stopping watch factory\\\\nI1010 16:32:21.841197 6234 ovnkube.go:599] Stopped ovnkube\\\\nI1010 16:32:21.841225 6234 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1010 16:32:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:20Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ba46f14de0fd2c356129122dd938e3fdda832ffc5e614ac439926a3f4ec94370\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-10T16:32:38Z\\\",\\\"message\\\":\\\"espace event handler 1 for removal\\\\nI1010 16:32:38.395893 6461 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1010 16:32:38.395915 6461 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1010 16:32:38.395923 6461 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1010 16:32:38.395978 6461 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1010 16:32:38.395995 6461 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1010 16:32:38.396011 6461 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1010 16:32:38.396026 6461 handler.go:208] Removed *v1.Node event handler 2\\\\nI1010 16:32:38.396040 6461 handler.go:208] Removed *v1.Node event handler 7\\\\nI1010 16:32:38.396038 6461 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1010 16:32:38.396072 6461 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1010 16:32:38.396142 6461 factory.go:656] Stopping watch factory\\\\nI1010 16:32:38.396169 6461 ovnkube.go:599] Stopped ovnkube\\\\nI1010 16:32:38.396164 6461 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1010 16:32:38.396225 6461 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1010 16:32:38.396236 6461 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1010 16:32:38.396378 6461 ovnkube.go:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://08406e220de50ba85f882a05117b5df8c9445a38c026bb85c95fc9f98f2d2cfe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2ce36def99eaf908452410a523cd14eb31a5a4dc3ee38d5983ea95d5ee75f83\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d2ce36def99eaf908452410a523cd14eb31a5a4dc3ee38d5983ea95d5ee75f83\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:09Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-mcwfc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:39Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:39 crc kubenswrapper[4799]: I1010 16:32:39.127838 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:39 crc kubenswrapper[4799]: I1010 16:32:39.128046 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:39 crc kubenswrapper[4799]: I1010 16:32:39.128134 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:39 crc kubenswrapper[4799]: I1010 16:32:39.128169 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:39 crc kubenswrapper[4799]: I1010 16:32:39.128200 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:39Z","lastTransitionTime":"2025-10-10T16:32:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:39 crc kubenswrapper[4799]: I1010 16:32:39.130433 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-z97c7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6f19a8ba-b77f-41ce-a4c6-e970b040dd8c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://17b2b63923e40e58b4a3d352781758ecf7c0e63eb913813e0f738d19dfb05676\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9spwd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c9722a694091d19d16b7c08ac22e23532deca8f4bde306a0d651d5524484fd1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9spwd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:22Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-z97c7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:39Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:39 crc kubenswrapper[4799]: I1010 16:32:39.149262 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5a953803d264b43ecd9f8b8c871b034d8146e73a4974bb8f503d0ca626370616\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:39Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:39 crc kubenswrapper[4799]: I1010 16:32:39.162187 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:39Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:39 crc kubenswrapper[4799]: I1010 16:32:39.176854 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gg5hb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f000ac73-b5de-47c8-a0a7-84bd06475f62\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b64f89fec4fec12dd0dab3f95ca2c8a01e43d4ef7cc69a4d012195756f6922ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w9g7t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:09Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gg5hb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:39Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:39 crc kubenswrapper[4799]: I1010 16:32:39.192933 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-6wjsp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"76fdb169-eee9-4170-b948-95e26254208b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5b030264f18288aa7687a91f7918f1ed2c2ad474637e32a054ea8c25b97aef45\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2ww66\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:14Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-6wjsp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:39Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:39 crc kubenswrapper[4799]: I1010 16:32:39.231793 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:39 crc kubenswrapper[4799]: I1010 16:32:39.231848 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:39 crc kubenswrapper[4799]: I1010 16:32:39.231867 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:39 crc kubenswrapper[4799]: I1010 16:32:39.231892 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:39 crc kubenswrapper[4799]: I1010 16:32:39.231913 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:39Z","lastTransitionTime":"2025-10-10T16:32:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:39 crc kubenswrapper[4799]: I1010 16:32:39.334864 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:39 crc kubenswrapper[4799]: I1010 16:32:39.334922 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:39 crc kubenswrapper[4799]: I1010 16:32:39.334943 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:39 crc kubenswrapper[4799]: I1010 16:32:39.334972 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:39 crc kubenswrapper[4799]: I1010 16:32:39.334994 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:39Z","lastTransitionTime":"2025-10-10T16:32:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:39 crc kubenswrapper[4799]: I1010 16:32:39.401924 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k6hch" Oct 10 16:32:39 crc kubenswrapper[4799]: I1010 16:32:39.401943 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 10 16:32:39 crc kubenswrapper[4799]: E1010 16:32:39.402518 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-k6hch" podUID="7903c578-d05e-4ad7-8fd9-f438abf4a085" Oct 10 16:32:39 crc kubenswrapper[4799]: E1010 16:32:39.402995 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 10 16:32:39 crc kubenswrapper[4799]: I1010 16:32:39.438288 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:39 crc kubenswrapper[4799]: I1010 16:32:39.438582 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:39 crc kubenswrapper[4799]: I1010 16:32:39.438751 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:39 crc kubenswrapper[4799]: I1010 16:32:39.438912 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:39 crc kubenswrapper[4799]: I1010 16:32:39.439056 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:39Z","lastTransitionTime":"2025-10-10T16:32:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:39 crc kubenswrapper[4799]: I1010 16:32:39.542499 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:39 crc kubenswrapper[4799]: I1010 16:32:39.542552 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:39 crc kubenswrapper[4799]: I1010 16:32:39.542573 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:39 crc kubenswrapper[4799]: I1010 16:32:39.542601 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:39 crc kubenswrapper[4799]: I1010 16:32:39.542621 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:39Z","lastTransitionTime":"2025-10-10T16:32:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:39 crc kubenswrapper[4799]: I1010 16:32:39.567570 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/7903c578-d05e-4ad7-8fd9-f438abf4a085-metrics-certs\") pod \"network-metrics-daemon-k6hch\" (UID: \"7903c578-d05e-4ad7-8fd9-f438abf4a085\") " pod="openshift-multus/network-metrics-daemon-k6hch" Oct 10 16:32:39 crc kubenswrapper[4799]: E1010 16:32:39.567853 4799 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Oct 10 16:32:39 crc kubenswrapper[4799]: E1010 16:32:39.567953 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/7903c578-d05e-4ad7-8fd9-f438abf4a085-metrics-certs podName:7903c578-d05e-4ad7-8fd9-f438abf4a085 nodeName:}" failed. No retries permitted until 2025-10-10 16:32:55.567927909 +0000 UTC m=+69.076252034 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/7903c578-d05e-4ad7-8fd9-f438abf4a085-metrics-certs") pod "network-metrics-daemon-k6hch" (UID: "7903c578-d05e-4ad7-8fd9-f438abf4a085") : object "openshift-multus"/"metrics-daemon-secret" not registered Oct 10 16:32:39 crc kubenswrapper[4799]: I1010 16:32:39.645961 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:39 crc kubenswrapper[4799]: I1010 16:32:39.646016 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:39 crc kubenswrapper[4799]: I1010 16:32:39.646030 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:39 crc kubenswrapper[4799]: I1010 16:32:39.646051 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:39 crc kubenswrapper[4799]: I1010 16:32:39.646064 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:39Z","lastTransitionTime":"2025-10-10T16:32:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:39 crc kubenswrapper[4799]: I1010 16:32:39.749112 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:39 crc kubenswrapper[4799]: I1010 16:32:39.749165 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:39 crc kubenswrapper[4799]: I1010 16:32:39.749186 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:39 crc kubenswrapper[4799]: I1010 16:32:39.749220 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:39 crc kubenswrapper[4799]: I1010 16:32:39.749245 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:39Z","lastTransitionTime":"2025-10-10T16:32:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:39 crc kubenswrapper[4799]: I1010 16:32:39.830835 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-mcwfc_abe7f2d9-ec99-4724-a01f-cc7096377e07/ovnkube-controller/2.log" Oct 10 16:32:39 crc kubenswrapper[4799]: I1010 16:32:39.835159 4799 scope.go:117] "RemoveContainer" containerID="ba46f14de0fd2c356129122dd938e3fdda832ffc5e614ac439926a3f4ec94370" Oct 10 16:32:39 crc kubenswrapper[4799]: E1010 16:32:39.835613 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-mcwfc_openshift-ovn-kubernetes(abe7f2d9-ec99-4724-a01f-cc7096377e07)\"" pod="openshift-ovn-kubernetes/ovnkube-node-mcwfc" podUID="abe7f2d9-ec99-4724-a01f-cc7096377e07" Oct 10 16:32:39 crc kubenswrapper[4799]: I1010 16:32:39.847002 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-6wjsp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"76fdb169-eee9-4170-b948-95e26254208b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5b030264f18288aa7687a91f7918f1ed2c2ad474637e32a054ea8c25b97aef45\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2ww66\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:14Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-6wjsp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:39Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:39 crc kubenswrapper[4799]: I1010 16:32:39.851330 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:39 crc kubenswrapper[4799]: I1010 16:32:39.851383 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:39 crc kubenswrapper[4799]: I1010 16:32:39.851398 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:39 crc kubenswrapper[4799]: I1010 16:32:39.851417 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:39 crc kubenswrapper[4799]: I1010 16:32:39.851430 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:39Z","lastTransitionTime":"2025-10-10T16:32:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:39 crc kubenswrapper[4799]: I1010 16:32:39.864547 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5a953803d264b43ecd9f8b8c871b034d8146e73a4974bb8f503d0ca626370616\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:39Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:39 crc kubenswrapper[4799]: I1010 16:32:39.884151 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:39Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:39 crc kubenswrapper[4799]: I1010 16:32:39.914123 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gg5hb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f000ac73-b5de-47c8-a0a7-84bd06475f62\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b64f89fec4fec12dd0dab3f95ca2c8a01e43d4ef7cc69a4d012195756f6922ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w9g7t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:09Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gg5hb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:39Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:39 crc kubenswrapper[4799]: I1010 16:32:39.948999 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"60ae49f7-6d6a-4a62-909f-7aea2b3953f5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c0a72be5ffe48f726e63ca3854fcabf6ad7c26f2c3fe432328142da2dc2ceeb5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b42b1b86cbd6dacb03b9afc740a33f67674996a9c5a5b291b71708ae53ccfea8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://de5b84380f9fb8448cebe90775342fd17260ffb8c591bbd5156f8a216b80f1da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b1dad40a84c7f22ffb5d52c708c7e2e03a181c5778793050495c8333ae005731\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://79f6778c5b703b2b4fc4e59fffc00824fcab6c8f5e2789661665e635a3539195\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a48bce1f3530d2a78258c6fa2af4f1530890f7967a26c9e91ca2f20f56cdbe6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2a48bce1f3530d2a78258c6fa2af4f1530890f7967a26c9e91ca2f20f56cdbe6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:31:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://65fb2cd5fa9b5ff0cad85267e4a036c37593a749da171dc2e5e30ba5159ed96d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://65fb2cd5fa9b5ff0cad85267e4a036c37593a749da171dc2e5e30ba5159ed96d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:31:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://51ea61becc8c45e5bcb2a2374d503cef3fb940b1618e7501cd05d61fc2a9458f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://51ea61becc8c45e5bcb2a2374d503cef3fb940b1618e7501cd05d61fc2a9458f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:31:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:31:47Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:39Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:39 crc kubenswrapper[4799]: I1010 16:32:39.953975 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:39 crc kubenswrapper[4799]: I1010 16:32:39.954033 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:39 crc kubenswrapper[4799]: I1010 16:32:39.954052 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:39 crc kubenswrapper[4799]: I1010 16:32:39.954077 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:39 crc kubenswrapper[4799]: I1010 16:32:39.954094 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:39Z","lastTransitionTime":"2025-10-10T16:32:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:39 crc kubenswrapper[4799]: I1010 16:32:39.968415 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f7878cf5-3c6d-4a4a-9ccd-7de395f9ac84\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://38c4fe49eff3373937abdebfb7d58fe9d5c73809375a3dca4f165aab84d6cbd1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c48434cdadac2409d0e3baf595e00260b1e3f94b8b9dab62e3f87503a6e888be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://88216eac74e0df9deb1ca1bef893deb2e23a79ffffdbd8a851a67df407eaa470\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://91106a41672b01d9f5c61cfc3001b84f024f3b96649bbc9174f3a635fc8034a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://91106a41672b01d9f5c61cfc3001b84f024f3b96649bbc9174f3a635fc8034a9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:31:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:31:48Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:31:47Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:39Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:39 crc kubenswrapper[4799]: I1010 16:32:39.985136 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-bsdk2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"823e91d3-003d-4cbb-bc72-004e1708c19d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec832bfc0c81b98afb4117033b94d2951b042b248148a5f957f3507174b8dbb6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-chgmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:09Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-bsdk2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:39Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:40 crc kubenswrapper[4799]: I1010 16:32:40.001735 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-k6hch" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7903c578-d05e-4ad7-8fd9-f438abf4a085\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:23Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:23Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hjhjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hjhjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:23Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-k6hch\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:39Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:40 crc kubenswrapper[4799]: I1010 16:32:40.021464 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1bf1784a-776b-49c7-b64b-7ce52860df45\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://298a1a9571fbe118fe81ff3e7403e298bcde9b683cffab574fbb03d5adc1fb67\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f767e89684b9b515da850360aaf9d7a02173395faf0654e9f0b3a4752a3d608b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://92de340d859cff018a661f0a7f7fe209ffae161bf6f39deb005c7148591fc60b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3a649a65ab118025ea70d1d7cf71236cb96992671c3bc7659d591640b53f941\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:31:47Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:40Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:40 crc kubenswrapper[4799]: I1010 16:32:40.039148 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2128a751508cba96a374652d8d80c66c81351fe0d7f800743a1612196fe8ac55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:40Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:40 crc kubenswrapper[4799]: I1010 16:32:40.057906 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:40 crc kubenswrapper[4799]: I1010 16:32:40.057972 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:40 crc kubenswrapper[4799]: I1010 16:32:40.057991 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:40 crc kubenswrapper[4799]: I1010 16:32:40.058023 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:40 crc kubenswrapper[4799]: I1010 16:32:40.058040 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:40Z","lastTransitionTime":"2025-10-10T16:32:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:40 crc kubenswrapper[4799]: I1010 16:32:40.060693 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4e024486dad9853cf7debbd2264eca725e50e74ebd215e1e55595d5f8b7c0403\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3396ed6bea22d063192c09283426aa98e84d5cab5852e305d61f3d583801187\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:40Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:40 crc kubenswrapper[4799]: I1010 16:32:40.084246 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-nptcz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"96840de9-4451-4499-81fa-a19c62239007\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://995be5ba088a3758758ce5aaf735f0371692c52e49e3992c6478311411c8db42\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d8b7b4526cfbe5d29a5b00c5d82089820b93e5aedbdaace85c4a252fed1b9f53\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d8b7b4526cfbe5d29a5b00c5d82089820b93e5aedbdaace85c4a252fed1b9f53\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:32:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0477eb514aef21fcec151973d9b6cf683ced19e9029787b97906438cb94b9f66\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0477eb514aef21fcec151973d9b6cf683ced19e9029787b97906438cb94b9f66\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:32:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b8df7ffc260acc047e334af09b76e6ee2c6dadd8c1fd1ed8860769601c89c6db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b8df7ffc260acc047e334af09b76e6ee2c6dadd8c1fd1ed8860769601c89c6db\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:32:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c89c6973a557239b60077f2b91a5f088955a973ebf8a9776677daa83f18c274\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2c89c6973a557239b60077f2b91a5f088955a973ebf8a9776677daa83f18c274\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:32:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f647c1c576be194232c6bcaf882fc8f3c67c78a84edd77222d04f1602434d014\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f647c1c576be194232c6bcaf882fc8f3c67c78a84edd77222d04f1602434d014\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:32:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://af9cccbb5d66115ca2db31b1e6738e1aa5f9c948eb65d3db9b5d5f8d9c223a64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://af9cccbb5d66115ca2db31b1e6738e1aa5f9c948eb65d3db9b5d5f8d9c223a64\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:32:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:09Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-nptcz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:40Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:40 crc kubenswrapper[4799]: I1010 16:32:40.120054 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mcwfc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"abe7f2d9-ec99-4724-a01f-cc7096377e07\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8cbc87c392646ebf9c016f8c7b40bcec30e33a0a05ea4a896d1143c5f1086990\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd261112ca7db4d0d76f6ab29a0347d64dccfff4db42ac9f55d6d7df1443ab23\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c7d0e536ad5143941dd18418b1ac7972a1136a841542b950f6891a386d43ca9c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cfe05183ad0b03415525e6aa2a8d52a5d63b8c273113c46326396df5e0c2bb12\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6562d440ce1f1477fd09c15c34ab88e17e1fb2c2cae4b32a7bf8cbdd29f4d5a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff4fcf53aeed6c07f775152de0faa9fa0671848df06d37cbca6ec7097d0024d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ba46f14de0fd2c356129122dd938e3fdda832ffc5e614ac439926a3f4ec94370\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ba46f14de0fd2c356129122dd938e3fdda832ffc5e614ac439926a3f4ec94370\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-10T16:32:38Z\\\",\\\"message\\\":\\\"espace event handler 1 for removal\\\\nI1010 16:32:38.395893 6461 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1010 16:32:38.395915 6461 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1010 16:32:38.395923 6461 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1010 16:32:38.395978 6461 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1010 16:32:38.395995 6461 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1010 16:32:38.396011 6461 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1010 16:32:38.396026 6461 handler.go:208] Removed *v1.Node event handler 2\\\\nI1010 16:32:38.396040 6461 handler.go:208] Removed *v1.Node event handler 7\\\\nI1010 16:32:38.396038 6461 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1010 16:32:38.396072 6461 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1010 16:32:38.396142 6461 factory.go:656] Stopping watch factory\\\\nI1010 16:32:38.396169 6461 ovnkube.go:599] Stopped ovnkube\\\\nI1010 16:32:38.396164 6461 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1010 16:32:38.396225 6461 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1010 16:32:38.396236 6461 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1010 16:32:38.396378 6461 ovnkube.go:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:37Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-mcwfc_openshift-ovn-kubernetes(abe7f2d9-ec99-4724-a01f-cc7096377e07)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://08406e220de50ba85f882a05117b5df8c9445a38c026bb85c95fc9f98f2d2cfe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2ce36def99eaf908452410a523cd14eb31a5a4dc3ee38d5983ea95d5ee75f83\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d2ce36def99eaf908452410a523cd14eb31a5a4dc3ee38d5983ea95d5ee75f83\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:09Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-mcwfc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:40Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:40 crc kubenswrapper[4799]: I1010 16:32:40.136221 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-z97c7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6f19a8ba-b77f-41ce-a4c6-e970b040dd8c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://17b2b63923e40e58b4a3d352781758ecf7c0e63eb913813e0f738d19dfb05676\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9spwd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c9722a694091d19d16b7c08ac22e23532deca8f4bde306a0d651d5524484fd1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9spwd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:22Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-z97c7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:40Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:40 crc kubenswrapper[4799]: I1010 16:32:40.157126 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b9c46c5f-a6db-4cef-b179-b669484bbc75\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:47Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:47Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://df6b51b97a9e3dcf9102409dc19f67e69e6e28ebec82dd46083922d5606cc4c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ba57bc720123daa414f51bf5d3173c6fa0b519947a34816bebc532948fd74ab\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d98759de1f79d9aeb68eb0b3eb21d78d0116f054b5d846c85bd63774b565e73\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7eb0f742793fbd0bee8e88732ec832748e77d9226a926def177968f24a9cf06\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://10d76c5ba8c54896d2fde57e2806c48857363c495a9f2d9b3f6904334cf2f9be\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"file observer\\\\nW1010 16:32:08.895315 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1010 16:32:08.895450 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1010 16:32:08.898309 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-712278876/tls.crt::/tmp/serving-cert-712278876/tls.key\\\\\\\"\\\\nI1010 16:32:09.168043 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1010 16:32:09.171891 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1010 16:32:09.171914 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1010 16:32:09.171936 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1010 16:32:09.171942 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1010 16:32:09.176341 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1010 16:32:09.176406 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1010 16:32:09.176435 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1010 16:32:09.176460 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1010 16:32:09.176486 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1010 16:32:09.176510 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1010 16:32:09.176533 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1010 16:32:09.176376 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1010 16:32:09.178269 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-10T16:31:53Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://75fb276a1b4f555aa58d4a862a6f3841984f75958b7ada362d717eca726c41fc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:50Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://78cbeb4c6d2770cabbc752b11e5a62f64ec7820bc3a637a944fa252d779e242b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://78cbeb4c6d2770cabbc752b11e5a62f64ec7820bc3a637a944fa252d779e242b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:31:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:31:47Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:40Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:40 crc kubenswrapper[4799]: I1010 16:32:40.162031 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:40 crc kubenswrapper[4799]: I1010 16:32:40.162075 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:40 crc kubenswrapper[4799]: I1010 16:32:40.162094 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:40 crc kubenswrapper[4799]: I1010 16:32:40.162120 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:40 crc kubenswrapper[4799]: I1010 16:32:40.162138 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:40Z","lastTransitionTime":"2025-10-10T16:32:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:40 crc kubenswrapper[4799]: I1010 16:32:40.180181 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:40Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:40 crc kubenswrapper[4799]: I1010 16:32:40.200828 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:40Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:40 crc kubenswrapper[4799]: I1010 16:32:40.216848 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6cebefda-e31d-4be2-9bf4-8e1f8ec002cb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6536b37f839c0b3f6b55d82b3a1674eeccb07ec93e2cb0a3739705b82df4782c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hfkr4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ad00545d7a2fff370e19a55a89365b8c9914cb6286dbf1892d7ad0f399288a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hfkr4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:09Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-rh8zc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:40Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:40 crc kubenswrapper[4799]: I1010 16:32:40.265686 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:40 crc kubenswrapper[4799]: I1010 16:32:40.265733 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:40 crc kubenswrapper[4799]: I1010 16:32:40.265749 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:40 crc kubenswrapper[4799]: I1010 16:32:40.265824 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:40 crc kubenswrapper[4799]: I1010 16:32:40.265849 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:40Z","lastTransitionTime":"2025-10-10T16:32:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:40 crc kubenswrapper[4799]: I1010 16:32:40.368619 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:40 crc kubenswrapper[4799]: I1010 16:32:40.368698 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:40 crc kubenswrapper[4799]: I1010 16:32:40.368721 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:40 crc kubenswrapper[4799]: I1010 16:32:40.368793 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:40 crc kubenswrapper[4799]: I1010 16:32:40.368820 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:40Z","lastTransitionTime":"2025-10-10T16:32:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:40 crc kubenswrapper[4799]: I1010 16:32:40.401895 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 10 16:32:40 crc kubenswrapper[4799]: I1010 16:32:40.401938 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 10 16:32:40 crc kubenswrapper[4799]: E1010 16:32:40.402058 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 10 16:32:40 crc kubenswrapper[4799]: E1010 16:32:40.402272 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 10 16:32:40 crc kubenswrapper[4799]: I1010 16:32:40.471929 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:40 crc kubenswrapper[4799]: I1010 16:32:40.472025 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:40 crc kubenswrapper[4799]: I1010 16:32:40.472065 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:40 crc kubenswrapper[4799]: I1010 16:32:40.472100 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:40 crc kubenswrapper[4799]: I1010 16:32:40.472123 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:40Z","lastTransitionTime":"2025-10-10T16:32:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:40 crc kubenswrapper[4799]: I1010 16:32:40.575523 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:40 crc kubenswrapper[4799]: I1010 16:32:40.575593 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:40 crc kubenswrapper[4799]: I1010 16:32:40.575613 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:40 crc kubenswrapper[4799]: I1010 16:32:40.575644 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:40 crc kubenswrapper[4799]: I1010 16:32:40.575663 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:40Z","lastTransitionTime":"2025-10-10T16:32:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:40 crc kubenswrapper[4799]: I1010 16:32:40.679176 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:40 crc kubenswrapper[4799]: I1010 16:32:40.679250 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:40 crc kubenswrapper[4799]: I1010 16:32:40.679277 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:40 crc kubenswrapper[4799]: I1010 16:32:40.679315 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:40 crc kubenswrapper[4799]: I1010 16:32:40.679339 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:40Z","lastTransitionTime":"2025-10-10T16:32:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:40 crc kubenswrapper[4799]: I1010 16:32:40.783623 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:40 crc kubenswrapper[4799]: I1010 16:32:40.783700 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:40 crc kubenswrapper[4799]: I1010 16:32:40.783718 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:40 crc kubenswrapper[4799]: I1010 16:32:40.783750 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:40 crc kubenswrapper[4799]: I1010 16:32:40.783801 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:40Z","lastTransitionTime":"2025-10-10T16:32:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:40 crc kubenswrapper[4799]: I1010 16:32:40.887480 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:40 crc kubenswrapper[4799]: I1010 16:32:40.887547 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:40 crc kubenswrapper[4799]: I1010 16:32:40.887563 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:40 crc kubenswrapper[4799]: I1010 16:32:40.887591 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:40 crc kubenswrapper[4799]: I1010 16:32:40.887610 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:40Z","lastTransitionTime":"2025-10-10T16:32:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:40 crc kubenswrapper[4799]: I1010 16:32:40.985241 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:40 crc kubenswrapper[4799]: I1010 16:32:40.985301 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:40 crc kubenswrapper[4799]: I1010 16:32:40.985318 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:40 crc kubenswrapper[4799]: I1010 16:32:40.985342 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:40 crc kubenswrapper[4799]: I1010 16:32:40.985358 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:40Z","lastTransitionTime":"2025-10-10T16:32:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:41 crc kubenswrapper[4799]: E1010 16:32:41.006952 4799 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-10T16:32:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:40Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-10T16:32:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:40Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-10T16:32:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:40Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-10T16:32:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:40Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"d99534f1-66d4-4990-b867-b559b1013899\\\",\\\"systemUUID\\\":\\\"19c7da3e-bb2d-454e-9c2c-9c9464638bfe\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:41Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:41 crc kubenswrapper[4799]: I1010 16:32:41.012516 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:41 crc kubenswrapper[4799]: I1010 16:32:41.012572 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:41 crc kubenswrapper[4799]: I1010 16:32:41.012589 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:41 crc kubenswrapper[4799]: I1010 16:32:41.012616 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:41 crc kubenswrapper[4799]: I1010 16:32:41.012636 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:41Z","lastTransitionTime":"2025-10-10T16:32:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:41 crc kubenswrapper[4799]: E1010 16:32:41.033481 4799 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-10T16:32:41Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:41Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-10T16:32:41Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:41Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-10T16:32:41Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:41Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-10T16:32:41Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:41Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"d99534f1-66d4-4990-b867-b559b1013899\\\",\\\"systemUUID\\\":\\\"19c7da3e-bb2d-454e-9c2c-9c9464638bfe\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:41Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:41 crc kubenswrapper[4799]: I1010 16:32:41.038006 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:41 crc kubenswrapper[4799]: I1010 16:32:41.038060 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:41 crc kubenswrapper[4799]: I1010 16:32:41.038079 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:41 crc kubenswrapper[4799]: I1010 16:32:41.038102 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:41 crc kubenswrapper[4799]: I1010 16:32:41.038120 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:41Z","lastTransitionTime":"2025-10-10T16:32:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:41 crc kubenswrapper[4799]: E1010 16:32:41.058432 4799 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-10T16:32:41Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:41Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-10T16:32:41Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:41Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-10T16:32:41Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:41Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-10T16:32:41Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:41Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"d99534f1-66d4-4990-b867-b559b1013899\\\",\\\"systemUUID\\\":\\\"19c7da3e-bb2d-454e-9c2c-9c9464638bfe\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:41Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:41 crc kubenswrapper[4799]: I1010 16:32:41.064335 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:41 crc kubenswrapper[4799]: I1010 16:32:41.064380 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:41 crc kubenswrapper[4799]: I1010 16:32:41.064396 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:41 crc kubenswrapper[4799]: I1010 16:32:41.064418 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:41 crc kubenswrapper[4799]: I1010 16:32:41.064558 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:41Z","lastTransitionTime":"2025-10-10T16:32:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:41 crc kubenswrapper[4799]: E1010 16:32:41.085142 4799 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-10T16:32:41Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:41Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-10T16:32:41Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:41Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-10T16:32:41Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:41Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-10T16:32:41Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:41Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"d99534f1-66d4-4990-b867-b559b1013899\\\",\\\"systemUUID\\\":\\\"19c7da3e-bb2d-454e-9c2c-9c9464638bfe\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:41Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:41 crc kubenswrapper[4799]: I1010 16:32:41.091014 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:41 crc kubenswrapper[4799]: I1010 16:32:41.091066 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:41 crc kubenswrapper[4799]: I1010 16:32:41.091091 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:41 crc kubenswrapper[4799]: I1010 16:32:41.091118 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:41 crc kubenswrapper[4799]: I1010 16:32:41.091139 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:41Z","lastTransitionTime":"2025-10-10T16:32:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:41 crc kubenswrapper[4799]: E1010 16:32:41.110283 4799 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-10T16:32:41Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:41Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-10T16:32:41Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:41Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-10T16:32:41Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:41Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-10T16:32:41Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:41Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"d99534f1-66d4-4990-b867-b559b1013899\\\",\\\"systemUUID\\\":\\\"19c7da3e-bb2d-454e-9c2c-9c9464638bfe\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:41Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:41 crc kubenswrapper[4799]: E1010 16:32:41.110498 4799 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Oct 10 16:32:41 crc kubenswrapper[4799]: I1010 16:32:41.112602 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:41 crc kubenswrapper[4799]: I1010 16:32:41.112663 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:41 crc kubenswrapper[4799]: I1010 16:32:41.112685 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:41 crc kubenswrapper[4799]: I1010 16:32:41.112711 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:41 crc kubenswrapper[4799]: I1010 16:32:41.112731 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:41Z","lastTransitionTime":"2025-10-10T16:32:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:41 crc kubenswrapper[4799]: I1010 16:32:41.186686 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 10 16:32:41 crc kubenswrapper[4799]: E1010 16:32:41.186947 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-10 16:33:13.186902746 +0000 UTC m=+86.695226881 (durationBeforeRetry 32s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 10 16:32:41 crc kubenswrapper[4799]: I1010 16:32:41.215504 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:41 crc kubenswrapper[4799]: I1010 16:32:41.215569 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:41 crc kubenswrapper[4799]: I1010 16:32:41.215583 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:41 crc kubenswrapper[4799]: I1010 16:32:41.215607 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:41 crc kubenswrapper[4799]: I1010 16:32:41.215625 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:41Z","lastTransitionTime":"2025-10-10T16:32:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:41 crc kubenswrapper[4799]: I1010 16:32:41.287582 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 10 16:32:41 crc kubenswrapper[4799]: I1010 16:32:41.287676 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 10 16:32:41 crc kubenswrapper[4799]: I1010 16:32:41.287731 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 10 16:32:41 crc kubenswrapper[4799]: I1010 16:32:41.287810 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 10 16:32:41 crc kubenswrapper[4799]: E1010 16:32:41.287930 4799 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 10 16:32:41 crc kubenswrapper[4799]: E1010 16:32:41.287985 4799 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 10 16:32:41 crc kubenswrapper[4799]: E1010 16:32:41.287936 4799 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 10 16:32:41 crc kubenswrapper[4799]: E1010 16:32:41.288060 4799 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 10 16:32:41 crc kubenswrapper[4799]: E1010 16:32:41.288013 4799 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Oct 10 16:32:41 crc kubenswrapper[4799]: E1010 16:32:41.288105 4799 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 10 16:32:41 crc kubenswrapper[4799]: E1010 16:32:41.288007 4799 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 10 16:32:41 crc kubenswrapper[4799]: E1010 16:32:41.288130 4799 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 10 16:32:41 crc kubenswrapper[4799]: E1010 16:32:41.288102 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-10 16:33:13.288070284 +0000 UTC m=+86.796394429 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 10 16:32:41 crc kubenswrapper[4799]: E1010 16:32:41.288340 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-10-10 16:33:13.28831132 +0000 UTC m=+86.796635465 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 10 16:32:41 crc kubenswrapper[4799]: E1010 16:32:41.288372 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-10-10 16:33:13.288360141 +0000 UTC m=+86.796684296 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 10 16:32:41 crc kubenswrapper[4799]: E1010 16:32:41.288398 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-10 16:33:13.288388002 +0000 UTC m=+86.796712147 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Oct 10 16:32:41 crc kubenswrapper[4799]: I1010 16:32:41.318643 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:41 crc kubenswrapper[4799]: I1010 16:32:41.318701 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:41 crc kubenswrapper[4799]: I1010 16:32:41.318718 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:41 crc kubenswrapper[4799]: I1010 16:32:41.318743 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:41 crc kubenswrapper[4799]: I1010 16:32:41.318794 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:41Z","lastTransitionTime":"2025-10-10T16:32:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:41 crc kubenswrapper[4799]: I1010 16:32:41.401874 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k6hch" Oct 10 16:32:41 crc kubenswrapper[4799]: E1010 16:32:41.402087 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-k6hch" podUID="7903c578-d05e-4ad7-8fd9-f438abf4a085" Oct 10 16:32:41 crc kubenswrapper[4799]: I1010 16:32:41.402655 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 10 16:32:41 crc kubenswrapper[4799]: E1010 16:32:41.402831 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 10 16:32:41 crc kubenswrapper[4799]: I1010 16:32:41.421004 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:41 crc kubenswrapper[4799]: I1010 16:32:41.421054 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:41 crc kubenswrapper[4799]: I1010 16:32:41.421072 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:41 crc kubenswrapper[4799]: I1010 16:32:41.421094 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:41 crc kubenswrapper[4799]: I1010 16:32:41.421111 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:41Z","lastTransitionTime":"2025-10-10T16:32:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:41 crc kubenswrapper[4799]: I1010 16:32:41.524199 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:41 crc kubenswrapper[4799]: I1010 16:32:41.524309 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:41 crc kubenswrapper[4799]: I1010 16:32:41.524335 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:41 crc kubenswrapper[4799]: I1010 16:32:41.524419 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:41 crc kubenswrapper[4799]: I1010 16:32:41.524490 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:41Z","lastTransitionTime":"2025-10-10T16:32:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:41 crc kubenswrapper[4799]: I1010 16:32:41.627570 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:41 crc kubenswrapper[4799]: I1010 16:32:41.627639 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:41 crc kubenswrapper[4799]: I1010 16:32:41.627656 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:41 crc kubenswrapper[4799]: I1010 16:32:41.627680 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:41 crc kubenswrapper[4799]: I1010 16:32:41.627698 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:41Z","lastTransitionTime":"2025-10-10T16:32:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:41 crc kubenswrapper[4799]: I1010 16:32:41.731475 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:41 crc kubenswrapper[4799]: I1010 16:32:41.731528 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:41 crc kubenswrapper[4799]: I1010 16:32:41.731545 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:41 crc kubenswrapper[4799]: I1010 16:32:41.731569 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:41 crc kubenswrapper[4799]: I1010 16:32:41.731588 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:41Z","lastTransitionTime":"2025-10-10T16:32:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:41 crc kubenswrapper[4799]: I1010 16:32:41.836665 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:41 crc kubenswrapper[4799]: I1010 16:32:41.836824 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:41 crc kubenswrapper[4799]: I1010 16:32:41.836930 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:41 crc kubenswrapper[4799]: I1010 16:32:41.837186 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:41 crc kubenswrapper[4799]: I1010 16:32:41.837263 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:41Z","lastTransitionTime":"2025-10-10T16:32:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:41 crc kubenswrapper[4799]: I1010 16:32:41.940071 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:41 crc kubenswrapper[4799]: I1010 16:32:41.940131 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:41 crc kubenswrapper[4799]: I1010 16:32:41.940148 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:41 crc kubenswrapper[4799]: I1010 16:32:41.940174 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:41 crc kubenswrapper[4799]: I1010 16:32:41.940193 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:41Z","lastTransitionTime":"2025-10-10T16:32:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:42 crc kubenswrapper[4799]: I1010 16:32:42.051135 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:42 crc kubenswrapper[4799]: I1010 16:32:42.051180 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:42 crc kubenswrapper[4799]: I1010 16:32:42.051197 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:42 crc kubenswrapper[4799]: I1010 16:32:42.051222 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:42 crc kubenswrapper[4799]: I1010 16:32:42.051239 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:42Z","lastTransitionTime":"2025-10-10T16:32:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:42 crc kubenswrapper[4799]: I1010 16:32:42.153872 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:42 crc kubenswrapper[4799]: I1010 16:32:42.153925 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:42 crc kubenswrapper[4799]: I1010 16:32:42.153941 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:42 crc kubenswrapper[4799]: I1010 16:32:42.153966 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:42 crc kubenswrapper[4799]: I1010 16:32:42.153983 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:42Z","lastTransitionTime":"2025-10-10T16:32:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:42 crc kubenswrapper[4799]: I1010 16:32:42.256412 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:42 crc kubenswrapper[4799]: I1010 16:32:42.256455 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:42 crc kubenswrapper[4799]: I1010 16:32:42.256467 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:42 crc kubenswrapper[4799]: I1010 16:32:42.256485 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:42 crc kubenswrapper[4799]: I1010 16:32:42.256500 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:42Z","lastTransitionTime":"2025-10-10T16:32:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:42 crc kubenswrapper[4799]: I1010 16:32:42.360175 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:42 crc kubenswrapper[4799]: I1010 16:32:42.360276 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:42 crc kubenswrapper[4799]: I1010 16:32:42.360301 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:42 crc kubenswrapper[4799]: I1010 16:32:42.360330 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:42 crc kubenswrapper[4799]: I1010 16:32:42.360347 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:42Z","lastTransitionTime":"2025-10-10T16:32:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:42 crc kubenswrapper[4799]: I1010 16:32:42.402178 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 10 16:32:42 crc kubenswrapper[4799]: E1010 16:32:42.402340 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 10 16:32:42 crc kubenswrapper[4799]: I1010 16:32:42.402180 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 10 16:32:42 crc kubenswrapper[4799]: E1010 16:32:42.402533 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 10 16:32:42 crc kubenswrapper[4799]: I1010 16:32:42.463138 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:42 crc kubenswrapper[4799]: I1010 16:32:42.463175 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:42 crc kubenswrapper[4799]: I1010 16:32:42.463186 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:42 crc kubenswrapper[4799]: I1010 16:32:42.463202 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:42 crc kubenswrapper[4799]: I1010 16:32:42.463213 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:42Z","lastTransitionTime":"2025-10-10T16:32:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:42 crc kubenswrapper[4799]: I1010 16:32:42.567288 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:42 crc kubenswrapper[4799]: I1010 16:32:42.567345 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:42 crc kubenswrapper[4799]: I1010 16:32:42.567362 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:42 crc kubenswrapper[4799]: I1010 16:32:42.567388 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:42 crc kubenswrapper[4799]: I1010 16:32:42.567407 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:42Z","lastTransitionTime":"2025-10-10T16:32:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:42 crc kubenswrapper[4799]: I1010 16:32:42.670749 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:42 crc kubenswrapper[4799]: I1010 16:32:42.670884 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:42 crc kubenswrapper[4799]: I1010 16:32:42.670919 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:42 crc kubenswrapper[4799]: I1010 16:32:42.670973 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:42 crc kubenswrapper[4799]: I1010 16:32:42.671002 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:42Z","lastTransitionTime":"2025-10-10T16:32:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:42 crc kubenswrapper[4799]: I1010 16:32:42.774667 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:42 crc kubenswrapper[4799]: I1010 16:32:42.774727 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:42 crc kubenswrapper[4799]: I1010 16:32:42.774745 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:42 crc kubenswrapper[4799]: I1010 16:32:42.774991 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:42 crc kubenswrapper[4799]: I1010 16:32:42.775011 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:42Z","lastTransitionTime":"2025-10-10T16:32:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:42 crc kubenswrapper[4799]: I1010 16:32:42.877626 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:42 crc kubenswrapper[4799]: I1010 16:32:42.877689 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:42 crc kubenswrapper[4799]: I1010 16:32:42.877706 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:42 crc kubenswrapper[4799]: I1010 16:32:42.877732 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:42 crc kubenswrapper[4799]: I1010 16:32:42.877749 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:42Z","lastTransitionTime":"2025-10-10T16:32:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:42 crc kubenswrapper[4799]: I1010 16:32:42.981406 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:42 crc kubenswrapper[4799]: I1010 16:32:42.981470 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:42 crc kubenswrapper[4799]: I1010 16:32:42.981487 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:42 crc kubenswrapper[4799]: I1010 16:32:42.981514 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:42 crc kubenswrapper[4799]: I1010 16:32:42.981531 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:42Z","lastTransitionTime":"2025-10-10T16:32:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:43 crc kubenswrapper[4799]: I1010 16:32:43.084804 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:43 crc kubenswrapper[4799]: I1010 16:32:43.084850 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:43 crc kubenswrapper[4799]: I1010 16:32:43.084862 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:43 crc kubenswrapper[4799]: I1010 16:32:43.084880 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:43 crc kubenswrapper[4799]: I1010 16:32:43.084896 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:43Z","lastTransitionTime":"2025-10-10T16:32:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:43 crc kubenswrapper[4799]: I1010 16:32:43.187167 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:43 crc kubenswrapper[4799]: I1010 16:32:43.187203 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:43 crc kubenswrapper[4799]: I1010 16:32:43.187211 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:43 crc kubenswrapper[4799]: I1010 16:32:43.187225 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:43 crc kubenswrapper[4799]: I1010 16:32:43.187234 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:43Z","lastTransitionTime":"2025-10-10T16:32:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:43 crc kubenswrapper[4799]: I1010 16:32:43.290678 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:43 crc kubenswrapper[4799]: I1010 16:32:43.290729 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:43 crc kubenswrapper[4799]: I1010 16:32:43.290742 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:43 crc kubenswrapper[4799]: I1010 16:32:43.290779 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:43 crc kubenswrapper[4799]: I1010 16:32:43.290794 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:43Z","lastTransitionTime":"2025-10-10T16:32:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:43 crc kubenswrapper[4799]: I1010 16:32:43.393899 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:43 crc kubenswrapper[4799]: I1010 16:32:43.393990 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:43 crc kubenswrapper[4799]: I1010 16:32:43.394012 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:43 crc kubenswrapper[4799]: I1010 16:32:43.394067 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:43 crc kubenswrapper[4799]: I1010 16:32:43.394095 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:43Z","lastTransitionTime":"2025-10-10T16:32:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:43 crc kubenswrapper[4799]: I1010 16:32:43.402998 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k6hch" Oct 10 16:32:43 crc kubenswrapper[4799]: I1010 16:32:43.403103 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 10 16:32:43 crc kubenswrapper[4799]: E1010 16:32:43.403199 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-k6hch" podUID="7903c578-d05e-4ad7-8fd9-f438abf4a085" Oct 10 16:32:43 crc kubenswrapper[4799]: E1010 16:32:43.403496 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 10 16:32:43 crc kubenswrapper[4799]: I1010 16:32:43.497384 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:43 crc kubenswrapper[4799]: I1010 16:32:43.497436 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:43 crc kubenswrapper[4799]: I1010 16:32:43.497454 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:43 crc kubenswrapper[4799]: I1010 16:32:43.497479 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:43 crc kubenswrapper[4799]: I1010 16:32:43.497494 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:43Z","lastTransitionTime":"2025-10-10T16:32:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:43 crc kubenswrapper[4799]: I1010 16:32:43.601153 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:43 crc kubenswrapper[4799]: I1010 16:32:43.601219 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:43 crc kubenswrapper[4799]: I1010 16:32:43.601236 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:43 crc kubenswrapper[4799]: I1010 16:32:43.601261 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:43 crc kubenswrapper[4799]: I1010 16:32:43.601281 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:43Z","lastTransitionTime":"2025-10-10T16:32:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:43 crc kubenswrapper[4799]: I1010 16:32:43.614799 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 10 16:32:43 crc kubenswrapper[4799]: I1010 16:32:43.649980 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"60ae49f7-6d6a-4a62-909f-7aea2b3953f5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c0a72be5ffe48f726e63ca3854fcabf6ad7c26f2c3fe432328142da2dc2ceeb5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b42b1b86cbd6dacb03b9afc740a33f67674996a9c5a5b291b71708ae53ccfea8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://de5b84380f9fb8448cebe90775342fd17260ffb8c591bbd5156f8a216b80f1da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b1dad40a84c7f22ffb5d52c708c7e2e03a181c5778793050495c8333ae005731\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://79f6778c5b703b2b4fc4e59fffc00824fcab6c8f5e2789661665e635a3539195\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a48bce1f3530d2a78258c6fa2af4f1530890f7967a26c9e91ca2f20f56cdbe6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2a48bce1f3530d2a78258c6fa2af4f1530890f7967a26c9e91ca2f20f56cdbe6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:31:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://65fb2cd5fa9b5ff0cad85267e4a036c37593a749da171dc2e5e30ba5159ed96d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://65fb2cd5fa9b5ff0cad85267e4a036c37593a749da171dc2e5e30ba5159ed96d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:31:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://51ea61becc8c45e5bcb2a2374d503cef3fb940b1618e7501cd05d61fc2a9458f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://51ea61becc8c45e5bcb2a2374d503cef3fb940b1618e7501cd05d61fc2a9458f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:31:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:31:47Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:43Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:43 crc kubenswrapper[4799]: I1010 16:32:43.671565 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f7878cf5-3c6d-4a4a-9ccd-7de395f9ac84\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://38c4fe49eff3373937abdebfb7d58fe9d5c73809375a3dca4f165aab84d6cbd1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c48434cdadac2409d0e3baf595e00260b1e3f94b8b9dab62e3f87503a6e888be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://88216eac74e0df9deb1ca1bef893deb2e23a79ffffdbd8a851a67df407eaa470\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://91106a41672b01d9f5c61cfc3001b84f024f3b96649bbc9174f3a635fc8034a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://91106a41672b01d9f5c61cfc3001b84f024f3b96649bbc9174f3a635fc8034a9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:31:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:31:48Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:31:47Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:43Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:43 crc kubenswrapper[4799]: I1010 16:32:43.690869 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-bsdk2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"823e91d3-003d-4cbb-bc72-004e1708c19d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec832bfc0c81b98afb4117033b94d2951b042b248148a5f957f3507174b8dbb6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-chgmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:09Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-bsdk2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:43Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:43 crc kubenswrapper[4799]: I1010 16:32:43.705526 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:43 crc kubenswrapper[4799]: I1010 16:32:43.706677 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:43 crc kubenswrapper[4799]: I1010 16:32:43.706739 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:43 crc kubenswrapper[4799]: I1010 16:32:43.706797 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:43 crc kubenswrapper[4799]: I1010 16:32:43.706816 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:43Z","lastTransitionTime":"2025-10-10T16:32:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:43 crc kubenswrapper[4799]: I1010 16:32:43.708626 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-k6hch" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7903c578-d05e-4ad7-8fd9-f438abf4a085\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:23Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:23Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hjhjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hjhjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:23Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-k6hch\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:43Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:43 crc kubenswrapper[4799]: I1010 16:32:43.729398 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1bf1784a-776b-49c7-b64b-7ce52860df45\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://298a1a9571fbe118fe81ff3e7403e298bcde9b683cffab574fbb03d5adc1fb67\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f767e89684b9b515da850360aaf9d7a02173395faf0654e9f0b3a4752a3d608b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://92de340d859cff018a661f0a7f7fe209ffae161bf6f39deb005c7148591fc60b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3a649a65ab118025ea70d1d7cf71236cb96992671c3bc7659d591640b53f941\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:31:47Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:43Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:43 crc kubenswrapper[4799]: I1010 16:32:43.747815 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2128a751508cba96a374652d8d80c66c81351fe0d7f800743a1612196fe8ac55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:43Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:43 crc kubenswrapper[4799]: I1010 16:32:43.768314 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4e024486dad9853cf7debbd2264eca725e50e74ebd215e1e55595d5f8b7c0403\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3396ed6bea22d063192c09283426aa98e84d5cab5852e305d61f3d583801187\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:43Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:43 crc kubenswrapper[4799]: I1010 16:32:43.792398 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-nptcz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"96840de9-4451-4499-81fa-a19c62239007\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://995be5ba088a3758758ce5aaf735f0371692c52e49e3992c6478311411c8db42\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d8b7b4526cfbe5d29a5b00c5d82089820b93e5aedbdaace85c4a252fed1b9f53\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d8b7b4526cfbe5d29a5b00c5d82089820b93e5aedbdaace85c4a252fed1b9f53\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:32:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0477eb514aef21fcec151973d9b6cf683ced19e9029787b97906438cb94b9f66\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0477eb514aef21fcec151973d9b6cf683ced19e9029787b97906438cb94b9f66\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:32:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b8df7ffc260acc047e334af09b76e6ee2c6dadd8c1fd1ed8860769601c89c6db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b8df7ffc260acc047e334af09b76e6ee2c6dadd8c1fd1ed8860769601c89c6db\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:32:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c89c6973a557239b60077f2b91a5f088955a973ebf8a9776677daa83f18c274\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2c89c6973a557239b60077f2b91a5f088955a973ebf8a9776677daa83f18c274\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:32:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f647c1c576be194232c6bcaf882fc8f3c67c78a84edd77222d04f1602434d014\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f647c1c576be194232c6bcaf882fc8f3c67c78a84edd77222d04f1602434d014\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:32:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://af9cccbb5d66115ca2db31b1e6738e1aa5f9c948eb65d3db9b5d5f8d9c223a64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://af9cccbb5d66115ca2db31b1e6738e1aa5f9c948eb65d3db9b5d5f8d9c223a64\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:32:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:09Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-nptcz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:43Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:43 crc kubenswrapper[4799]: I1010 16:32:43.811088 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-z97c7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6f19a8ba-b77f-41ce-a4c6-e970b040dd8c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://17b2b63923e40e58b4a3d352781758ecf7c0e63eb913813e0f738d19dfb05676\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9spwd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c9722a694091d19d16b7c08ac22e23532deca8f4bde306a0d651d5524484fd1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9spwd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:22Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-z97c7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:43Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:43 crc kubenswrapper[4799]: I1010 16:32:43.811153 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:43 crc kubenswrapper[4799]: I1010 16:32:43.811348 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:43 crc kubenswrapper[4799]: I1010 16:32:43.811372 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:43 crc kubenswrapper[4799]: I1010 16:32:43.811404 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:43 crc kubenswrapper[4799]: I1010 16:32:43.811426 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:43Z","lastTransitionTime":"2025-10-10T16:32:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:43 crc kubenswrapper[4799]: I1010 16:32:43.832479 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b9c46c5f-a6db-4cef-b179-b669484bbc75\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://df6b51b97a9e3dcf9102409dc19f67e69e6e28ebec82dd46083922d5606cc4c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ba57bc720123daa414f51bf5d3173c6fa0b519947a34816bebc532948fd74ab\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d98759de1f79d9aeb68eb0b3eb21d78d0116f054b5d846c85bd63774b565e73\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7eb0f742793fbd0bee8e88732ec832748e77d9226a926def177968f24a9cf06\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://10d76c5ba8c54896d2fde57e2806c48857363c495a9f2d9b3f6904334cf2f9be\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"file observer\\\\nW1010 16:32:08.895315 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1010 16:32:08.895450 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1010 16:32:08.898309 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-712278876/tls.crt::/tmp/serving-cert-712278876/tls.key\\\\\\\"\\\\nI1010 16:32:09.168043 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1010 16:32:09.171891 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1010 16:32:09.171914 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1010 16:32:09.171936 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1010 16:32:09.171942 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1010 16:32:09.176341 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1010 16:32:09.176406 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1010 16:32:09.176435 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1010 16:32:09.176460 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1010 16:32:09.176486 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1010 16:32:09.176510 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1010 16:32:09.176533 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1010 16:32:09.176376 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1010 16:32:09.178269 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-10T16:31:53Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://75fb276a1b4f555aa58d4a862a6f3841984f75958b7ada362d717eca726c41fc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:50Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://78cbeb4c6d2770cabbc752b11e5a62f64ec7820bc3a637a944fa252d779e242b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://78cbeb4c6d2770cabbc752b11e5a62f64ec7820bc3a637a944fa252d779e242b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:31:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:31:47Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:43Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:43 crc kubenswrapper[4799]: I1010 16:32:43.852110 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:43Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:43 crc kubenswrapper[4799]: I1010 16:32:43.868975 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:43Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:43 crc kubenswrapper[4799]: I1010 16:32:43.883216 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6cebefda-e31d-4be2-9bf4-8e1f8ec002cb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6536b37f839c0b3f6b55d82b3a1674eeccb07ec93e2cb0a3739705b82df4782c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hfkr4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ad00545d7a2fff370e19a55a89365b8c9914cb6286dbf1892d7ad0f399288a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hfkr4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:09Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-rh8zc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:43Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:43 crc kubenswrapper[4799]: I1010 16:32:43.914356 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mcwfc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"abe7f2d9-ec99-4724-a01f-cc7096377e07\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8cbc87c392646ebf9c016f8c7b40bcec30e33a0a05ea4a896d1143c5f1086990\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd261112ca7db4d0d76f6ab29a0347d64dccfff4db42ac9f55d6d7df1443ab23\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c7d0e536ad5143941dd18418b1ac7972a1136a841542b950f6891a386d43ca9c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cfe05183ad0b03415525e6aa2a8d52a5d63b8c273113c46326396df5e0c2bb12\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6562d440ce1f1477fd09c15c34ab88e17e1fb2c2cae4b32a7bf8cbdd29f4d5a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff4fcf53aeed6c07f775152de0faa9fa0671848df06d37cbca6ec7097d0024d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ba46f14de0fd2c356129122dd938e3fdda832ffc5e614ac439926a3f4ec94370\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ba46f14de0fd2c356129122dd938e3fdda832ffc5e614ac439926a3f4ec94370\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-10T16:32:38Z\\\",\\\"message\\\":\\\"espace event handler 1 for removal\\\\nI1010 16:32:38.395893 6461 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1010 16:32:38.395915 6461 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1010 16:32:38.395923 6461 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1010 16:32:38.395978 6461 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1010 16:32:38.395995 6461 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1010 16:32:38.396011 6461 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1010 16:32:38.396026 6461 handler.go:208] Removed *v1.Node event handler 2\\\\nI1010 16:32:38.396040 6461 handler.go:208] Removed *v1.Node event handler 7\\\\nI1010 16:32:38.396038 6461 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1010 16:32:38.396072 6461 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1010 16:32:38.396142 6461 factory.go:656] Stopping watch factory\\\\nI1010 16:32:38.396169 6461 ovnkube.go:599] Stopped ovnkube\\\\nI1010 16:32:38.396164 6461 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1010 16:32:38.396225 6461 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1010 16:32:38.396236 6461 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1010 16:32:38.396378 6461 ovnkube.go:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:37Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-mcwfc_openshift-ovn-kubernetes(abe7f2d9-ec99-4724-a01f-cc7096377e07)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://08406e220de50ba85f882a05117b5df8c9445a38c026bb85c95fc9f98f2d2cfe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2ce36def99eaf908452410a523cd14eb31a5a4dc3ee38d5983ea95d5ee75f83\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d2ce36def99eaf908452410a523cd14eb31a5a4dc3ee38d5983ea95d5ee75f83\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:09Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-mcwfc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:43Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:43 crc kubenswrapper[4799]: I1010 16:32:43.922298 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:43 crc kubenswrapper[4799]: I1010 16:32:43.922362 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:43 crc kubenswrapper[4799]: I1010 16:32:43.922387 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:43 crc kubenswrapper[4799]: I1010 16:32:43.922417 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:43 crc kubenswrapper[4799]: I1010 16:32:43.922443 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:43Z","lastTransitionTime":"2025-10-10T16:32:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:43 crc kubenswrapper[4799]: I1010 16:32:43.942092 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5a953803d264b43ecd9f8b8c871b034d8146e73a4974bb8f503d0ca626370616\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:43Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:43 crc kubenswrapper[4799]: I1010 16:32:43.960993 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:43Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:43 crc kubenswrapper[4799]: I1010 16:32:43.976310 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gg5hb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f000ac73-b5de-47c8-a0a7-84bd06475f62\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b64f89fec4fec12dd0dab3f95ca2c8a01e43d4ef7cc69a4d012195756f6922ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w9g7t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:09Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gg5hb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:43Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:43 crc kubenswrapper[4799]: I1010 16:32:43.994711 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-6wjsp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"76fdb169-eee9-4170-b948-95e26254208b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5b030264f18288aa7687a91f7918f1ed2c2ad474637e32a054ea8c25b97aef45\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2ww66\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:14Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-6wjsp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:43Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:44 crc kubenswrapper[4799]: I1010 16:32:44.024966 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:44 crc kubenswrapper[4799]: I1010 16:32:44.025037 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:44 crc kubenswrapper[4799]: I1010 16:32:44.025055 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:44 crc kubenswrapper[4799]: I1010 16:32:44.025083 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:44 crc kubenswrapper[4799]: I1010 16:32:44.025102 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:44Z","lastTransitionTime":"2025-10-10T16:32:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:44 crc kubenswrapper[4799]: I1010 16:32:44.128191 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:44 crc kubenswrapper[4799]: I1010 16:32:44.128234 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:44 crc kubenswrapper[4799]: I1010 16:32:44.128244 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:44 crc kubenswrapper[4799]: I1010 16:32:44.128262 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:44 crc kubenswrapper[4799]: I1010 16:32:44.128273 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:44Z","lastTransitionTime":"2025-10-10T16:32:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:44 crc kubenswrapper[4799]: I1010 16:32:44.230860 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:44 crc kubenswrapper[4799]: I1010 16:32:44.230924 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:44 crc kubenswrapper[4799]: I1010 16:32:44.230943 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:44 crc kubenswrapper[4799]: I1010 16:32:44.230969 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:44 crc kubenswrapper[4799]: I1010 16:32:44.230987 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:44Z","lastTransitionTime":"2025-10-10T16:32:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:44 crc kubenswrapper[4799]: I1010 16:32:44.333242 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:44 crc kubenswrapper[4799]: I1010 16:32:44.333292 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:44 crc kubenswrapper[4799]: I1010 16:32:44.333309 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:44 crc kubenswrapper[4799]: I1010 16:32:44.333331 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:44 crc kubenswrapper[4799]: I1010 16:32:44.333349 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:44Z","lastTransitionTime":"2025-10-10T16:32:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:44 crc kubenswrapper[4799]: I1010 16:32:44.401863 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 10 16:32:44 crc kubenswrapper[4799]: I1010 16:32:44.401904 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 10 16:32:44 crc kubenswrapper[4799]: E1010 16:32:44.402113 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 10 16:32:44 crc kubenswrapper[4799]: E1010 16:32:44.402217 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 10 16:32:44 crc kubenswrapper[4799]: I1010 16:32:44.436170 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:44 crc kubenswrapper[4799]: I1010 16:32:44.436223 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:44 crc kubenswrapper[4799]: I1010 16:32:44.436242 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:44 crc kubenswrapper[4799]: I1010 16:32:44.436263 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:44 crc kubenswrapper[4799]: I1010 16:32:44.436279 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:44Z","lastTransitionTime":"2025-10-10T16:32:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:44 crc kubenswrapper[4799]: I1010 16:32:44.538527 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:44 crc kubenswrapper[4799]: I1010 16:32:44.538566 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:44 crc kubenswrapper[4799]: I1010 16:32:44.538577 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:44 crc kubenswrapper[4799]: I1010 16:32:44.538595 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:44 crc kubenswrapper[4799]: I1010 16:32:44.538606 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:44Z","lastTransitionTime":"2025-10-10T16:32:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:44 crc kubenswrapper[4799]: I1010 16:32:44.642125 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:44 crc kubenswrapper[4799]: I1010 16:32:44.642186 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:44 crc kubenswrapper[4799]: I1010 16:32:44.642206 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:44 crc kubenswrapper[4799]: I1010 16:32:44.642232 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:44 crc kubenswrapper[4799]: I1010 16:32:44.642250 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:44Z","lastTransitionTime":"2025-10-10T16:32:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:44 crc kubenswrapper[4799]: I1010 16:32:44.745171 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:44 crc kubenswrapper[4799]: I1010 16:32:44.745227 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:44 crc kubenswrapper[4799]: I1010 16:32:44.745245 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:44 crc kubenswrapper[4799]: I1010 16:32:44.745269 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:44 crc kubenswrapper[4799]: I1010 16:32:44.745285 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:44Z","lastTransitionTime":"2025-10-10T16:32:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:44 crc kubenswrapper[4799]: I1010 16:32:44.847952 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:44 crc kubenswrapper[4799]: I1010 16:32:44.848024 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:44 crc kubenswrapper[4799]: I1010 16:32:44.848044 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:44 crc kubenswrapper[4799]: I1010 16:32:44.848070 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:44 crc kubenswrapper[4799]: I1010 16:32:44.848090 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:44Z","lastTransitionTime":"2025-10-10T16:32:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:44 crc kubenswrapper[4799]: I1010 16:32:44.951138 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:44 crc kubenswrapper[4799]: I1010 16:32:44.951197 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:44 crc kubenswrapper[4799]: I1010 16:32:44.951213 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:44 crc kubenswrapper[4799]: I1010 16:32:44.951243 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:44 crc kubenswrapper[4799]: I1010 16:32:44.951266 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:44Z","lastTransitionTime":"2025-10-10T16:32:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:45 crc kubenswrapper[4799]: I1010 16:32:45.055008 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:45 crc kubenswrapper[4799]: I1010 16:32:45.055065 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:45 crc kubenswrapper[4799]: I1010 16:32:45.055083 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:45 crc kubenswrapper[4799]: I1010 16:32:45.055113 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:45 crc kubenswrapper[4799]: I1010 16:32:45.055132 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:45Z","lastTransitionTime":"2025-10-10T16:32:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:45 crc kubenswrapper[4799]: I1010 16:32:45.158294 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:45 crc kubenswrapper[4799]: I1010 16:32:45.158353 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:45 crc kubenswrapper[4799]: I1010 16:32:45.158370 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:45 crc kubenswrapper[4799]: I1010 16:32:45.158394 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:45 crc kubenswrapper[4799]: I1010 16:32:45.158413 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:45Z","lastTransitionTime":"2025-10-10T16:32:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:45 crc kubenswrapper[4799]: I1010 16:32:45.267321 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:45 crc kubenswrapper[4799]: I1010 16:32:45.267413 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:45 crc kubenswrapper[4799]: I1010 16:32:45.267441 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:45 crc kubenswrapper[4799]: I1010 16:32:45.267475 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:45 crc kubenswrapper[4799]: I1010 16:32:45.267502 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:45Z","lastTransitionTime":"2025-10-10T16:32:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:45 crc kubenswrapper[4799]: I1010 16:32:45.370976 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:45 crc kubenswrapper[4799]: I1010 16:32:45.371039 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:45 crc kubenswrapper[4799]: I1010 16:32:45.371056 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:45 crc kubenswrapper[4799]: I1010 16:32:45.371081 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:45 crc kubenswrapper[4799]: I1010 16:32:45.371099 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:45Z","lastTransitionTime":"2025-10-10T16:32:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:45 crc kubenswrapper[4799]: I1010 16:32:45.402074 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k6hch" Oct 10 16:32:45 crc kubenswrapper[4799]: I1010 16:32:45.402134 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 10 16:32:45 crc kubenswrapper[4799]: E1010 16:32:45.402281 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-k6hch" podUID="7903c578-d05e-4ad7-8fd9-f438abf4a085" Oct 10 16:32:45 crc kubenswrapper[4799]: E1010 16:32:45.402424 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 10 16:32:45 crc kubenswrapper[4799]: I1010 16:32:45.474179 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:45 crc kubenswrapper[4799]: I1010 16:32:45.474232 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:45 crc kubenswrapper[4799]: I1010 16:32:45.474248 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:45 crc kubenswrapper[4799]: I1010 16:32:45.474274 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:45 crc kubenswrapper[4799]: I1010 16:32:45.474291 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:45Z","lastTransitionTime":"2025-10-10T16:32:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:45 crc kubenswrapper[4799]: I1010 16:32:45.578016 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:45 crc kubenswrapper[4799]: I1010 16:32:45.578101 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:45 crc kubenswrapper[4799]: I1010 16:32:45.578133 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:45 crc kubenswrapper[4799]: I1010 16:32:45.578167 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:45 crc kubenswrapper[4799]: I1010 16:32:45.578190 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:45Z","lastTransitionTime":"2025-10-10T16:32:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:45 crc kubenswrapper[4799]: I1010 16:32:45.681567 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:45 crc kubenswrapper[4799]: I1010 16:32:45.681639 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:45 crc kubenswrapper[4799]: I1010 16:32:45.681664 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:45 crc kubenswrapper[4799]: I1010 16:32:45.681696 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:45 crc kubenswrapper[4799]: I1010 16:32:45.681719 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:45Z","lastTransitionTime":"2025-10-10T16:32:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:45 crc kubenswrapper[4799]: I1010 16:32:45.785960 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:45 crc kubenswrapper[4799]: I1010 16:32:45.786022 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:45 crc kubenswrapper[4799]: I1010 16:32:45.786041 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:45 crc kubenswrapper[4799]: I1010 16:32:45.786067 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:45 crc kubenswrapper[4799]: I1010 16:32:45.786085 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:45Z","lastTransitionTime":"2025-10-10T16:32:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:45 crc kubenswrapper[4799]: I1010 16:32:45.889548 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:45 crc kubenswrapper[4799]: I1010 16:32:45.889637 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:45 crc kubenswrapper[4799]: I1010 16:32:45.889657 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:45 crc kubenswrapper[4799]: I1010 16:32:45.889683 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:45 crc kubenswrapper[4799]: I1010 16:32:45.889700 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:45Z","lastTransitionTime":"2025-10-10T16:32:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:45 crc kubenswrapper[4799]: I1010 16:32:45.993438 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:45 crc kubenswrapper[4799]: I1010 16:32:45.993498 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:45 crc kubenswrapper[4799]: I1010 16:32:45.993515 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:45 crc kubenswrapper[4799]: I1010 16:32:45.993540 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:45 crc kubenswrapper[4799]: I1010 16:32:45.993557 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:45Z","lastTransitionTime":"2025-10-10T16:32:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:46 crc kubenswrapper[4799]: I1010 16:32:46.096861 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:46 crc kubenswrapper[4799]: I1010 16:32:46.096923 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:46 crc kubenswrapper[4799]: I1010 16:32:46.096941 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:46 crc kubenswrapper[4799]: I1010 16:32:46.096969 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:46 crc kubenswrapper[4799]: I1010 16:32:46.096986 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:46Z","lastTransitionTime":"2025-10-10T16:32:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:46 crc kubenswrapper[4799]: I1010 16:32:46.200019 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:46 crc kubenswrapper[4799]: I1010 16:32:46.200079 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:46 crc kubenswrapper[4799]: I1010 16:32:46.200097 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:46 crc kubenswrapper[4799]: I1010 16:32:46.200121 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:46 crc kubenswrapper[4799]: I1010 16:32:46.200138 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:46Z","lastTransitionTime":"2025-10-10T16:32:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:46 crc kubenswrapper[4799]: I1010 16:32:46.303392 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:46 crc kubenswrapper[4799]: I1010 16:32:46.303981 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:46 crc kubenswrapper[4799]: I1010 16:32:46.304122 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:46 crc kubenswrapper[4799]: I1010 16:32:46.304265 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:46 crc kubenswrapper[4799]: I1010 16:32:46.304395 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:46Z","lastTransitionTime":"2025-10-10T16:32:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:46 crc kubenswrapper[4799]: I1010 16:32:46.402294 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 10 16:32:46 crc kubenswrapper[4799]: I1010 16:32:46.402446 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 10 16:32:46 crc kubenswrapper[4799]: E1010 16:32:46.402626 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 10 16:32:46 crc kubenswrapper[4799]: E1010 16:32:46.403034 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 10 16:32:46 crc kubenswrapper[4799]: I1010 16:32:46.408077 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:46 crc kubenswrapper[4799]: I1010 16:32:46.408123 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:46 crc kubenswrapper[4799]: I1010 16:32:46.408139 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:46 crc kubenswrapper[4799]: I1010 16:32:46.408163 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:46 crc kubenswrapper[4799]: I1010 16:32:46.408182 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:46Z","lastTransitionTime":"2025-10-10T16:32:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:46 crc kubenswrapper[4799]: I1010 16:32:46.511012 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:46 crc kubenswrapper[4799]: I1010 16:32:46.511080 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:46 crc kubenswrapper[4799]: I1010 16:32:46.511103 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:46 crc kubenswrapper[4799]: I1010 16:32:46.511134 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:46 crc kubenswrapper[4799]: I1010 16:32:46.511156 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:46Z","lastTransitionTime":"2025-10-10T16:32:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:46 crc kubenswrapper[4799]: I1010 16:32:46.614126 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:46 crc kubenswrapper[4799]: I1010 16:32:46.614523 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:46 crc kubenswrapper[4799]: I1010 16:32:46.614661 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:46 crc kubenswrapper[4799]: I1010 16:32:46.614846 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:46 crc kubenswrapper[4799]: I1010 16:32:46.614982 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:46Z","lastTransitionTime":"2025-10-10T16:32:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:46 crc kubenswrapper[4799]: I1010 16:32:46.717159 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:46 crc kubenswrapper[4799]: I1010 16:32:46.717377 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:46 crc kubenswrapper[4799]: I1010 16:32:46.717467 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:46 crc kubenswrapper[4799]: I1010 16:32:46.717536 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:46 crc kubenswrapper[4799]: I1010 16:32:46.717594 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:46Z","lastTransitionTime":"2025-10-10T16:32:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:46 crc kubenswrapper[4799]: I1010 16:32:46.820728 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:46 crc kubenswrapper[4799]: I1010 16:32:46.820848 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:46 crc kubenswrapper[4799]: I1010 16:32:46.820877 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:46 crc kubenswrapper[4799]: I1010 16:32:46.820915 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:46 crc kubenswrapper[4799]: I1010 16:32:46.821014 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:46Z","lastTransitionTime":"2025-10-10T16:32:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:46 crc kubenswrapper[4799]: I1010 16:32:46.924477 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:46 crc kubenswrapper[4799]: I1010 16:32:46.924531 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:46 crc kubenswrapper[4799]: I1010 16:32:46.924545 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:46 crc kubenswrapper[4799]: I1010 16:32:46.924566 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:46 crc kubenswrapper[4799]: I1010 16:32:46.924581 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:46Z","lastTransitionTime":"2025-10-10T16:32:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:47 crc kubenswrapper[4799]: I1010 16:32:47.027784 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:47 crc kubenswrapper[4799]: I1010 16:32:47.027847 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:47 crc kubenswrapper[4799]: I1010 16:32:47.027863 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:47 crc kubenswrapper[4799]: I1010 16:32:47.027888 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:47 crc kubenswrapper[4799]: I1010 16:32:47.027909 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:47Z","lastTransitionTime":"2025-10-10T16:32:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:47 crc kubenswrapper[4799]: I1010 16:32:47.130944 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:47 crc kubenswrapper[4799]: I1010 16:32:47.131017 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:47 crc kubenswrapper[4799]: I1010 16:32:47.131037 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:47 crc kubenswrapper[4799]: I1010 16:32:47.131065 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:47 crc kubenswrapper[4799]: I1010 16:32:47.131086 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:47Z","lastTransitionTime":"2025-10-10T16:32:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:47 crc kubenswrapper[4799]: I1010 16:32:47.233860 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:47 crc kubenswrapper[4799]: I1010 16:32:47.233926 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:47 crc kubenswrapper[4799]: I1010 16:32:47.233949 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:47 crc kubenswrapper[4799]: I1010 16:32:47.233981 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:47 crc kubenswrapper[4799]: I1010 16:32:47.234003 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:47Z","lastTransitionTime":"2025-10-10T16:32:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:47 crc kubenswrapper[4799]: I1010 16:32:47.344309 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:47 crc kubenswrapper[4799]: I1010 16:32:47.344371 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:47 crc kubenswrapper[4799]: I1010 16:32:47.344388 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:47 crc kubenswrapper[4799]: I1010 16:32:47.344412 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:47 crc kubenswrapper[4799]: I1010 16:32:47.344429 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:47Z","lastTransitionTime":"2025-10-10T16:32:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:47 crc kubenswrapper[4799]: I1010 16:32:47.401478 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 10 16:32:47 crc kubenswrapper[4799]: E1010 16:32:47.401654 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 10 16:32:47 crc kubenswrapper[4799]: I1010 16:32:47.401790 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k6hch" Oct 10 16:32:47 crc kubenswrapper[4799]: E1010 16:32:47.402006 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-k6hch" podUID="7903c578-d05e-4ad7-8fd9-f438abf4a085" Oct 10 16:32:47 crc kubenswrapper[4799]: I1010 16:32:47.454113 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:47 crc kubenswrapper[4799]: I1010 16:32:47.454168 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:47 crc kubenswrapper[4799]: I1010 16:32:47.454190 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:47 crc kubenswrapper[4799]: I1010 16:32:47.454222 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:47 crc kubenswrapper[4799]: I1010 16:32:47.454247 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:47Z","lastTransitionTime":"2025-10-10T16:32:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:47 crc kubenswrapper[4799]: I1010 16:32:47.477135 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"60ae49f7-6d6a-4a62-909f-7aea2b3953f5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c0a72be5ffe48f726e63ca3854fcabf6ad7c26f2c3fe432328142da2dc2ceeb5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b42b1b86cbd6dacb03b9afc740a33f67674996a9c5a5b291b71708ae53ccfea8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://de5b84380f9fb8448cebe90775342fd17260ffb8c591bbd5156f8a216b80f1da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b1dad40a84c7f22ffb5d52c708c7e2e03a181c5778793050495c8333ae005731\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://79f6778c5b703b2b4fc4e59fffc00824fcab6c8f5e2789661665e635a3539195\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a48bce1f3530d2a78258c6fa2af4f1530890f7967a26c9e91ca2f20f56cdbe6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2a48bce1f3530d2a78258c6fa2af4f1530890f7967a26c9e91ca2f20f56cdbe6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:31:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://65fb2cd5fa9b5ff0cad85267e4a036c37593a749da171dc2e5e30ba5159ed96d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://65fb2cd5fa9b5ff0cad85267e4a036c37593a749da171dc2e5e30ba5159ed96d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:31:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://51ea61becc8c45e5bcb2a2374d503cef3fb940b1618e7501cd05d61fc2a9458f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://51ea61becc8c45e5bcb2a2374d503cef3fb940b1618e7501cd05d61fc2a9458f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:31:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:31:47Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:47Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:47 crc kubenswrapper[4799]: I1010 16:32:47.491038 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f7878cf5-3c6d-4a4a-9ccd-7de395f9ac84\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://38c4fe49eff3373937abdebfb7d58fe9d5c73809375a3dca4f165aab84d6cbd1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c48434cdadac2409d0e3baf595e00260b1e3f94b8b9dab62e3f87503a6e888be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://88216eac74e0df9deb1ca1bef893deb2e23a79ffffdbd8a851a67df407eaa470\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://91106a41672b01d9f5c61cfc3001b84f024f3b96649bbc9174f3a635fc8034a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://91106a41672b01d9f5c61cfc3001b84f024f3b96649bbc9174f3a635fc8034a9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:31:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:31:48Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:31:47Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:47Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:47 crc kubenswrapper[4799]: I1010 16:32:47.501278 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-bsdk2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"823e91d3-003d-4cbb-bc72-004e1708c19d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec832bfc0c81b98afb4117033b94d2951b042b248148a5f957f3507174b8dbb6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-chgmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:09Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-bsdk2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:47Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:47 crc kubenswrapper[4799]: I1010 16:32:47.513620 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-k6hch" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7903c578-d05e-4ad7-8fd9-f438abf4a085\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:23Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:23Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hjhjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hjhjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:23Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-k6hch\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:47Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:47 crc kubenswrapper[4799]: I1010 16:32:47.529857 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1bf1784a-776b-49c7-b64b-7ce52860df45\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://298a1a9571fbe118fe81ff3e7403e298bcde9b683cffab574fbb03d5adc1fb67\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f767e89684b9b515da850360aaf9d7a02173395faf0654e9f0b3a4752a3d608b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://92de340d859cff018a661f0a7f7fe209ffae161bf6f39deb005c7148591fc60b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3a649a65ab118025ea70d1d7cf71236cb96992671c3bc7659d591640b53f941\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:31:47Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:47Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:47 crc kubenswrapper[4799]: I1010 16:32:47.542800 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2128a751508cba96a374652d8d80c66c81351fe0d7f800743a1612196fe8ac55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:47Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:47 crc kubenswrapper[4799]: I1010 16:32:47.554256 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4e024486dad9853cf7debbd2264eca725e50e74ebd215e1e55595d5f8b7c0403\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3396ed6bea22d063192c09283426aa98e84d5cab5852e305d61f3d583801187\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:47Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:47 crc kubenswrapper[4799]: I1010 16:32:47.556808 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:47 crc kubenswrapper[4799]: I1010 16:32:47.556996 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:47 crc kubenswrapper[4799]: I1010 16:32:47.557127 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:47 crc kubenswrapper[4799]: I1010 16:32:47.557247 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:47 crc kubenswrapper[4799]: I1010 16:32:47.557359 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:47Z","lastTransitionTime":"2025-10-10T16:32:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:47 crc kubenswrapper[4799]: I1010 16:32:47.567363 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-nptcz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"96840de9-4451-4499-81fa-a19c62239007\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://995be5ba088a3758758ce5aaf735f0371692c52e49e3992c6478311411c8db42\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d8b7b4526cfbe5d29a5b00c5d82089820b93e5aedbdaace85c4a252fed1b9f53\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d8b7b4526cfbe5d29a5b00c5d82089820b93e5aedbdaace85c4a252fed1b9f53\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:32:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0477eb514aef21fcec151973d9b6cf683ced19e9029787b97906438cb94b9f66\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0477eb514aef21fcec151973d9b6cf683ced19e9029787b97906438cb94b9f66\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:32:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b8df7ffc260acc047e334af09b76e6ee2c6dadd8c1fd1ed8860769601c89c6db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b8df7ffc260acc047e334af09b76e6ee2c6dadd8c1fd1ed8860769601c89c6db\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:32:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c89c6973a557239b60077f2b91a5f088955a973ebf8a9776677daa83f18c274\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2c89c6973a557239b60077f2b91a5f088955a973ebf8a9776677daa83f18c274\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:32:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f647c1c576be194232c6bcaf882fc8f3c67c78a84edd77222d04f1602434d014\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f647c1c576be194232c6bcaf882fc8f3c67c78a84edd77222d04f1602434d014\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:32:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://af9cccbb5d66115ca2db31b1e6738e1aa5f9c948eb65d3db9b5d5f8d9c223a64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://af9cccbb5d66115ca2db31b1e6738e1aa5f9c948eb65d3db9b5d5f8d9c223a64\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:32:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:09Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-nptcz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:47Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:47 crc kubenswrapper[4799]: I1010 16:32:47.587404 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mcwfc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"abe7f2d9-ec99-4724-a01f-cc7096377e07\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8cbc87c392646ebf9c016f8c7b40bcec30e33a0a05ea4a896d1143c5f1086990\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd261112ca7db4d0d76f6ab29a0347d64dccfff4db42ac9f55d6d7df1443ab23\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c7d0e536ad5143941dd18418b1ac7972a1136a841542b950f6891a386d43ca9c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cfe05183ad0b03415525e6aa2a8d52a5d63b8c273113c46326396df5e0c2bb12\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6562d440ce1f1477fd09c15c34ab88e17e1fb2c2cae4b32a7bf8cbdd29f4d5a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff4fcf53aeed6c07f775152de0faa9fa0671848df06d37cbca6ec7097d0024d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ba46f14de0fd2c356129122dd938e3fdda832ffc5e614ac439926a3f4ec94370\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ba46f14de0fd2c356129122dd938e3fdda832ffc5e614ac439926a3f4ec94370\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-10T16:32:38Z\\\",\\\"message\\\":\\\"espace event handler 1 for removal\\\\nI1010 16:32:38.395893 6461 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1010 16:32:38.395915 6461 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1010 16:32:38.395923 6461 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1010 16:32:38.395978 6461 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1010 16:32:38.395995 6461 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1010 16:32:38.396011 6461 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1010 16:32:38.396026 6461 handler.go:208] Removed *v1.Node event handler 2\\\\nI1010 16:32:38.396040 6461 handler.go:208] Removed *v1.Node event handler 7\\\\nI1010 16:32:38.396038 6461 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1010 16:32:38.396072 6461 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1010 16:32:38.396142 6461 factory.go:656] Stopping watch factory\\\\nI1010 16:32:38.396169 6461 ovnkube.go:599] Stopped ovnkube\\\\nI1010 16:32:38.396164 6461 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1010 16:32:38.396225 6461 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1010 16:32:38.396236 6461 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1010 16:32:38.396378 6461 ovnkube.go:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:37Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-mcwfc_openshift-ovn-kubernetes(abe7f2d9-ec99-4724-a01f-cc7096377e07)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://08406e220de50ba85f882a05117b5df8c9445a38c026bb85c95fc9f98f2d2cfe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2ce36def99eaf908452410a523cd14eb31a5a4dc3ee38d5983ea95d5ee75f83\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d2ce36def99eaf908452410a523cd14eb31a5a4dc3ee38d5983ea95d5ee75f83\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:09Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-mcwfc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:47Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:47 crc kubenswrapper[4799]: I1010 16:32:47.597163 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-z97c7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6f19a8ba-b77f-41ce-a4c6-e970b040dd8c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://17b2b63923e40e58b4a3d352781758ecf7c0e63eb913813e0f738d19dfb05676\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9spwd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c9722a694091d19d16b7c08ac22e23532deca8f4bde306a0d651d5524484fd1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9spwd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:22Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-z97c7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:47Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:47 crc kubenswrapper[4799]: I1010 16:32:47.607580 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b9c46c5f-a6db-4cef-b179-b669484bbc75\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://df6b51b97a9e3dcf9102409dc19f67e69e6e28ebec82dd46083922d5606cc4c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ba57bc720123daa414f51bf5d3173c6fa0b519947a34816bebc532948fd74ab\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d98759de1f79d9aeb68eb0b3eb21d78d0116f054b5d846c85bd63774b565e73\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7eb0f742793fbd0bee8e88732ec832748e77d9226a926def177968f24a9cf06\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://10d76c5ba8c54896d2fde57e2806c48857363c495a9f2d9b3f6904334cf2f9be\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"file observer\\\\nW1010 16:32:08.895315 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1010 16:32:08.895450 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1010 16:32:08.898309 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-712278876/tls.crt::/tmp/serving-cert-712278876/tls.key\\\\\\\"\\\\nI1010 16:32:09.168043 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1010 16:32:09.171891 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1010 16:32:09.171914 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1010 16:32:09.171936 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1010 16:32:09.171942 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1010 16:32:09.176341 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1010 16:32:09.176406 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1010 16:32:09.176435 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1010 16:32:09.176460 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1010 16:32:09.176486 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1010 16:32:09.176510 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1010 16:32:09.176533 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1010 16:32:09.176376 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1010 16:32:09.178269 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-10T16:31:53Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://75fb276a1b4f555aa58d4a862a6f3841984f75958b7ada362d717eca726c41fc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:50Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://78cbeb4c6d2770cabbc752b11e5a62f64ec7820bc3a637a944fa252d779e242b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://78cbeb4c6d2770cabbc752b11e5a62f64ec7820bc3a637a944fa252d779e242b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:31:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:31:47Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:47Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:47 crc kubenswrapper[4799]: I1010 16:32:47.616395 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:47Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:47 crc kubenswrapper[4799]: I1010 16:32:47.628825 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:47Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:47 crc kubenswrapper[4799]: I1010 16:32:47.642410 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6cebefda-e31d-4be2-9bf4-8e1f8ec002cb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6536b37f839c0b3f6b55d82b3a1674eeccb07ec93e2cb0a3739705b82df4782c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hfkr4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ad00545d7a2fff370e19a55a89365b8c9914cb6286dbf1892d7ad0f399288a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hfkr4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:09Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-rh8zc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:47Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:47 crc kubenswrapper[4799]: I1010 16:32:47.653124 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-6wjsp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"76fdb169-eee9-4170-b948-95e26254208b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5b030264f18288aa7687a91f7918f1ed2c2ad474637e32a054ea8c25b97aef45\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2ww66\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:14Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-6wjsp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:47Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:47 crc kubenswrapper[4799]: I1010 16:32:47.660035 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:47 crc kubenswrapper[4799]: I1010 16:32:47.660176 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:47 crc kubenswrapper[4799]: I1010 16:32:47.660274 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:47 crc kubenswrapper[4799]: I1010 16:32:47.660363 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:47 crc kubenswrapper[4799]: I1010 16:32:47.660425 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:47Z","lastTransitionTime":"2025-10-10T16:32:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:47 crc kubenswrapper[4799]: I1010 16:32:47.668988 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5a953803d264b43ecd9f8b8c871b034d8146e73a4974bb8f503d0ca626370616\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:47Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:47 crc kubenswrapper[4799]: I1010 16:32:47.684201 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:47Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:47 crc kubenswrapper[4799]: I1010 16:32:47.700555 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gg5hb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f000ac73-b5de-47c8-a0a7-84bd06475f62\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b64f89fec4fec12dd0dab3f95ca2c8a01e43d4ef7cc69a4d012195756f6922ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w9g7t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:09Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gg5hb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:47Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:47 crc kubenswrapper[4799]: I1010 16:32:47.762462 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:47 crc kubenswrapper[4799]: I1010 16:32:47.762535 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:47 crc kubenswrapper[4799]: I1010 16:32:47.762560 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:47 crc kubenswrapper[4799]: I1010 16:32:47.762591 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:47 crc kubenswrapper[4799]: I1010 16:32:47.762613 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:47Z","lastTransitionTime":"2025-10-10T16:32:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:47 crc kubenswrapper[4799]: I1010 16:32:47.864708 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:47 crc kubenswrapper[4799]: I1010 16:32:47.864799 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:47 crc kubenswrapper[4799]: I1010 16:32:47.864813 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:47 crc kubenswrapper[4799]: I1010 16:32:47.864833 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:47 crc kubenswrapper[4799]: I1010 16:32:47.864847 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:47Z","lastTransitionTime":"2025-10-10T16:32:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:47 crc kubenswrapper[4799]: I1010 16:32:47.967460 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:47 crc kubenswrapper[4799]: I1010 16:32:47.967514 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:47 crc kubenswrapper[4799]: I1010 16:32:47.967532 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:47 crc kubenswrapper[4799]: I1010 16:32:47.967558 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:47 crc kubenswrapper[4799]: I1010 16:32:47.967575 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:47Z","lastTransitionTime":"2025-10-10T16:32:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:48 crc kubenswrapper[4799]: I1010 16:32:48.070554 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:48 crc kubenswrapper[4799]: I1010 16:32:48.070849 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:48 crc kubenswrapper[4799]: I1010 16:32:48.070859 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:48 crc kubenswrapper[4799]: I1010 16:32:48.070875 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:48 crc kubenswrapper[4799]: I1010 16:32:48.070884 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:48Z","lastTransitionTime":"2025-10-10T16:32:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:48 crc kubenswrapper[4799]: I1010 16:32:48.172871 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:48 crc kubenswrapper[4799]: I1010 16:32:48.172919 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:48 crc kubenswrapper[4799]: I1010 16:32:48.172932 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:48 crc kubenswrapper[4799]: I1010 16:32:48.172952 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:48 crc kubenswrapper[4799]: I1010 16:32:48.172965 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:48Z","lastTransitionTime":"2025-10-10T16:32:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:48 crc kubenswrapper[4799]: I1010 16:32:48.275324 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:48 crc kubenswrapper[4799]: I1010 16:32:48.275365 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:48 crc kubenswrapper[4799]: I1010 16:32:48.275374 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:48 crc kubenswrapper[4799]: I1010 16:32:48.275388 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:48 crc kubenswrapper[4799]: I1010 16:32:48.275397 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:48Z","lastTransitionTime":"2025-10-10T16:32:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:48 crc kubenswrapper[4799]: I1010 16:32:48.377850 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:48 crc kubenswrapper[4799]: I1010 16:32:48.377921 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:48 crc kubenswrapper[4799]: I1010 16:32:48.377945 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:48 crc kubenswrapper[4799]: I1010 16:32:48.377976 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:48 crc kubenswrapper[4799]: I1010 16:32:48.377996 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:48Z","lastTransitionTime":"2025-10-10T16:32:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:48 crc kubenswrapper[4799]: I1010 16:32:48.401661 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 10 16:32:48 crc kubenswrapper[4799]: E1010 16:32:48.401840 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 10 16:32:48 crc kubenswrapper[4799]: I1010 16:32:48.402081 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 10 16:32:48 crc kubenswrapper[4799]: E1010 16:32:48.402184 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 10 16:32:48 crc kubenswrapper[4799]: I1010 16:32:48.481254 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:48 crc kubenswrapper[4799]: I1010 16:32:48.481357 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:48 crc kubenswrapper[4799]: I1010 16:32:48.481382 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:48 crc kubenswrapper[4799]: I1010 16:32:48.481413 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:48 crc kubenswrapper[4799]: I1010 16:32:48.481436 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:48Z","lastTransitionTime":"2025-10-10T16:32:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:48 crc kubenswrapper[4799]: I1010 16:32:48.585739 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:48 crc kubenswrapper[4799]: I1010 16:32:48.585823 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:48 crc kubenswrapper[4799]: I1010 16:32:48.585839 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:48 crc kubenswrapper[4799]: I1010 16:32:48.585868 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:48 crc kubenswrapper[4799]: I1010 16:32:48.585884 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:48Z","lastTransitionTime":"2025-10-10T16:32:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:48 crc kubenswrapper[4799]: I1010 16:32:48.688749 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:48 crc kubenswrapper[4799]: I1010 16:32:48.688835 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:48 crc kubenswrapper[4799]: I1010 16:32:48.688852 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:48 crc kubenswrapper[4799]: I1010 16:32:48.688874 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:48 crc kubenswrapper[4799]: I1010 16:32:48.688890 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:48Z","lastTransitionTime":"2025-10-10T16:32:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:48 crc kubenswrapper[4799]: I1010 16:32:48.792047 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:48 crc kubenswrapper[4799]: I1010 16:32:48.792161 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:48 crc kubenswrapper[4799]: I1010 16:32:48.792187 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:48 crc kubenswrapper[4799]: I1010 16:32:48.792218 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:48 crc kubenswrapper[4799]: I1010 16:32:48.792240 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:48Z","lastTransitionTime":"2025-10-10T16:32:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:48 crc kubenswrapper[4799]: I1010 16:32:48.896311 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:48 crc kubenswrapper[4799]: I1010 16:32:48.896391 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:48 crc kubenswrapper[4799]: I1010 16:32:48.896484 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:48 crc kubenswrapper[4799]: I1010 16:32:48.896567 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:48 crc kubenswrapper[4799]: I1010 16:32:48.896596 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:48Z","lastTransitionTime":"2025-10-10T16:32:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:49 crc kubenswrapper[4799]: I1010 16:32:49.000486 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:49 crc kubenswrapper[4799]: I1010 16:32:49.000533 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:49 crc kubenswrapper[4799]: I1010 16:32:49.000550 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:49 crc kubenswrapper[4799]: I1010 16:32:49.000575 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:49 crc kubenswrapper[4799]: I1010 16:32:49.000594 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:49Z","lastTransitionTime":"2025-10-10T16:32:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:49 crc kubenswrapper[4799]: I1010 16:32:49.103189 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:49 crc kubenswrapper[4799]: I1010 16:32:49.103253 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:49 crc kubenswrapper[4799]: I1010 16:32:49.103271 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:49 crc kubenswrapper[4799]: I1010 16:32:49.103295 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:49 crc kubenswrapper[4799]: I1010 16:32:49.103313 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:49Z","lastTransitionTime":"2025-10-10T16:32:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:49 crc kubenswrapper[4799]: I1010 16:32:49.206352 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:49 crc kubenswrapper[4799]: I1010 16:32:49.206444 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:49 crc kubenswrapper[4799]: I1010 16:32:49.206466 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:49 crc kubenswrapper[4799]: I1010 16:32:49.206496 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:49 crc kubenswrapper[4799]: I1010 16:32:49.206520 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:49Z","lastTransitionTime":"2025-10-10T16:32:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:49 crc kubenswrapper[4799]: I1010 16:32:49.309174 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:49 crc kubenswrapper[4799]: I1010 16:32:49.309250 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:49 crc kubenswrapper[4799]: I1010 16:32:49.309272 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:49 crc kubenswrapper[4799]: I1010 16:32:49.309305 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:49 crc kubenswrapper[4799]: I1010 16:32:49.309328 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:49Z","lastTransitionTime":"2025-10-10T16:32:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:49 crc kubenswrapper[4799]: I1010 16:32:49.401673 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 10 16:32:49 crc kubenswrapper[4799]: I1010 16:32:49.401747 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k6hch" Oct 10 16:32:49 crc kubenswrapper[4799]: E1010 16:32:49.401893 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 10 16:32:49 crc kubenswrapper[4799]: E1010 16:32:49.402014 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-k6hch" podUID="7903c578-d05e-4ad7-8fd9-f438abf4a085" Oct 10 16:32:49 crc kubenswrapper[4799]: I1010 16:32:49.412052 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:49 crc kubenswrapper[4799]: I1010 16:32:49.412087 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:49 crc kubenswrapper[4799]: I1010 16:32:49.412097 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:49 crc kubenswrapper[4799]: I1010 16:32:49.412111 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:49 crc kubenswrapper[4799]: I1010 16:32:49.412122 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:49Z","lastTransitionTime":"2025-10-10T16:32:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:49 crc kubenswrapper[4799]: I1010 16:32:49.515157 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:49 crc kubenswrapper[4799]: I1010 16:32:49.515213 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:49 crc kubenswrapper[4799]: I1010 16:32:49.515230 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:49 crc kubenswrapper[4799]: I1010 16:32:49.515254 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:49 crc kubenswrapper[4799]: I1010 16:32:49.515283 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:49Z","lastTransitionTime":"2025-10-10T16:32:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:49 crc kubenswrapper[4799]: I1010 16:32:49.618708 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:49 crc kubenswrapper[4799]: I1010 16:32:49.618779 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:49 crc kubenswrapper[4799]: I1010 16:32:49.618792 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:49 crc kubenswrapper[4799]: I1010 16:32:49.618813 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:49 crc kubenswrapper[4799]: I1010 16:32:49.618825 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:49Z","lastTransitionTime":"2025-10-10T16:32:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:49 crc kubenswrapper[4799]: I1010 16:32:49.721592 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:49 crc kubenswrapper[4799]: I1010 16:32:49.721636 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:49 crc kubenswrapper[4799]: I1010 16:32:49.721649 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:49 crc kubenswrapper[4799]: I1010 16:32:49.721665 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:49 crc kubenswrapper[4799]: I1010 16:32:49.721677 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:49Z","lastTransitionTime":"2025-10-10T16:32:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:49 crc kubenswrapper[4799]: I1010 16:32:49.823947 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:49 crc kubenswrapper[4799]: I1010 16:32:49.824029 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:49 crc kubenswrapper[4799]: I1010 16:32:49.824076 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:49 crc kubenswrapper[4799]: I1010 16:32:49.824100 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:49 crc kubenswrapper[4799]: I1010 16:32:49.824117 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:49Z","lastTransitionTime":"2025-10-10T16:32:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:49 crc kubenswrapper[4799]: I1010 16:32:49.926416 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:49 crc kubenswrapper[4799]: I1010 16:32:49.926472 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:49 crc kubenswrapper[4799]: I1010 16:32:49.926489 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:49 crc kubenswrapper[4799]: I1010 16:32:49.926514 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:49 crc kubenswrapper[4799]: I1010 16:32:49.926535 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:49Z","lastTransitionTime":"2025-10-10T16:32:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:50 crc kubenswrapper[4799]: I1010 16:32:50.030281 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:50 crc kubenswrapper[4799]: I1010 16:32:50.030331 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:50 crc kubenswrapper[4799]: I1010 16:32:50.030349 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:50 crc kubenswrapper[4799]: I1010 16:32:50.030408 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:50 crc kubenswrapper[4799]: I1010 16:32:50.030427 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:50Z","lastTransitionTime":"2025-10-10T16:32:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:50 crc kubenswrapper[4799]: I1010 16:32:50.132721 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:50 crc kubenswrapper[4799]: I1010 16:32:50.132810 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:50 crc kubenswrapper[4799]: I1010 16:32:50.132829 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:50 crc kubenswrapper[4799]: I1010 16:32:50.132855 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:50 crc kubenswrapper[4799]: I1010 16:32:50.132873 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:50Z","lastTransitionTime":"2025-10-10T16:32:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:50 crc kubenswrapper[4799]: I1010 16:32:50.235319 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:50 crc kubenswrapper[4799]: I1010 16:32:50.235386 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:50 crc kubenswrapper[4799]: I1010 16:32:50.235410 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:50 crc kubenswrapper[4799]: I1010 16:32:50.235441 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:50 crc kubenswrapper[4799]: I1010 16:32:50.235464 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:50Z","lastTransitionTime":"2025-10-10T16:32:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:50 crc kubenswrapper[4799]: I1010 16:32:50.337737 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:50 crc kubenswrapper[4799]: I1010 16:32:50.337821 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:50 crc kubenswrapper[4799]: I1010 16:32:50.337840 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:50 crc kubenswrapper[4799]: I1010 16:32:50.337864 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:50 crc kubenswrapper[4799]: I1010 16:32:50.337882 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:50Z","lastTransitionTime":"2025-10-10T16:32:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:50 crc kubenswrapper[4799]: I1010 16:32:50.401430 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 10 16:32:50 crc kubenswrapper[4799]: I1010 16:32:50.401500 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 10 16:32:50 crc kubenswrapper[4799]: E1010 16:32:50.401624 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 10 16:32:50 crc kubenswrapper[4799]: E1010 16:32:50.401787 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 10 16:32:50 crc kubenswrapper[4799]: I1010 16:32:50.441072 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:50 crc kubenswrapper[4799]: I1010 16:32:50.441142 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:50 crc kubenswrapper[4799]: I1010 16:32:50.441161 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:50 crc kubenswrapper[4799]: I1010 16:32:50.441187 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:50 crc kubenswrapper[4799]: I1010 16:32:50.441209 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:50Z","lastTransitionTime":"2025-10-10T16:32:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:50 crc kubenswrapper[4799]: I1010 16:32:50.543807 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:50 crc kubenswrapper[4799]: I1010 16:32:50.543872 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:50 crc kubenswrapper[4799]: I1010 16:32:50.543890 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:50 crc kubenswrapper[4799]: I1010 16:32:50.543921 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:50 crc kubenswrapper[4799]: I1010 16:32:50.543940 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:50Z","lastTransitionTime":"2025-10-10T16:32:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:50 crc kubenswrapper[4799]: I1010 16:32:50.647615 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:50 crc kubenswrapper[4799]: I1010 16:32:50.647668 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:50 crc kubenswrapper[4799]: I1010 16:32:50.647684 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:50 crc kubenswrapper[4799]: I1010 16:32:50.647704 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:50 crc kubenswrapper[4799]: I1010 16:32:50.647721 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:50Z","lastTransitionTime":"2025-10-10T16:32:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:50 crc kubenswrapper[4799]: I1010 16:32:50.751423 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:50 crc kubenswrapper[4799]: I1010 16:32:50.751480 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:50 crc kubenswrapper[4799]: I1010 16:32:50.751493 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:50 crc kubenswrapper[4799]: I1010 16:32:50.751511 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:50 crc kubenswrapper[4799]: I1010 16:32:50.751524 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:50Z","lastTransitionTime":"2025-10-10T16:32:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:50 crc kubenswrapper[4799]: I1010 16:32:50.855711 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:50 crc kubenswrapper[4799]: I1010 16:32:50.855811 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:50 crc kubenswrapper[4799]: I1010 16:32:50.855830 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:50 crc kubenswrapper[4799]: I1010 16:32:50.855859 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:50 crc kubenswrapper[4799]: I1010 16:32:50.855883 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:50Z","lastTransitionTime":"2025-10-10T16:32:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:50 crc kubenswrapper[4799]: I1010 16:32:50.958999 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:50 crc kubenswrapper[4799]: I1010 16:32:50.959090 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:50 crc kubenswrapper[4799]: I1010 16:32:50.959108 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:50 crc kubenswrapper[4799]: I1010 16:32:50.959133 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:50 crc kubenswrapper[4799]: I1010 16:32:50.959181 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:50Z","lastTransitionTime":"2025-10-10T16:32:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:51 crc kubenswrapper[4799]: I1010 16:32:51.061872 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:51 crc kubenswrapper[4799]: I1010 16:32:51.061937 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:51 crc kubenswrapper[4799]: I1010 16:32:51.061961 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:51 crc kubenswrapper[4799]: I1010 16:32:51.061991 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:51 crc kubenswrapper[4799]: I1010 16:32:51.062012 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:51Z","lastTransitionTime":"2025-10-10T16:32:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:51 crc kubenswrapper[4799]: I1010 16:32:51.165618 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:51 crc kubenswrapper[4799]: I1010 16:32:51.165688 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:51 crc kubenswrapper[4799]: I1010 16:32:51.165706 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:51 crc kubenswrapper[4799]: I1010 16:32:51.165732 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:51 crc kubenswrapper[4799]: I1010 16:32:51.165751 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:51Z","lastTransitionTime":"2025-10-10T16:32:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:51 crc kubenswrapper[4799]: I1010 16:32:51.269501 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:51 crc kubenswrapper[4799]: I1010 16:32:51.269564 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:51 crc kubenswrapper[4799]: I1010 16:32:51.269582 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:51 crc kubenswrapper[4799]: I1010 16:32:51.269645 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:51 crc kubenswrapper[4799]: I1010 16:32:51.269664 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:51Z","lastTransitionTime":"2025-10-10T16:32:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:51 crc kubenswrapper[4799]: I1010 16:32:51.348049 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:51 crc kubenswrapper[4799]: I1010 16:32:51.348101 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:51 crc kubenswrapper[4799]: I1010 16:32:51.348114 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:51 crc kubenswrapper[4799]: I1010 16:32:51.348132 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:51 crc kubenswrapper[4799]: I1010 16:32:51.348144 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:51Z","lastTransitionTime":"2025-10-10T16:32:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:51 crc kubenswrapper[4799]: E1010 16:32:51.366371 4799 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-10T16:32:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:51Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-10T16:32:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:51Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-10T16:32:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:51Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-10T16:32:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:51Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"d99534f1-66d4-4990-b867-b559b1013899\\\",\\\"systemUUID\\\":\\\"19c7da3e-bb2d-454e-9c2c-9c9464638bfe\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:51Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:51 crc kubenswrapper[4799]: I1010 16:32:51.372346 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:51 crc kubenswrapper[4799]: I1010 16:32:51.372465 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:51 crc kubenswrapper[4799]: I1010 16:32:51.372535 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:51 crc kubenswrapper[4799]: I1010 16:32:51.372569 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:51 crc kubenswrapper[4799]: I1010 16:32:51.372639 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:51Z","lastTransitionTime":"2025-10-10T16:32:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:51 crc kubenswrapper[4799]: E1010 16:32:51.392549 4799 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-10T16:32:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:51Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-10T16:32:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:51Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-10T16:32:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:51Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-10T16:32:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:51Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"d99534f1-66d4-4990-b867-b559b1013899\\\",\\\"systemUUID\\\":\\\"19c7da3e-bb2d-454e-9c2c-9c9464638bfe\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:51Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:51 crc kubenswrapper[4799]: I1010 16:32:51.399572 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:51 crc kubenswrapper[4799]: I1010 16:32:51.399646 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:51 crc kubenswrapper[4799]: I1010 16:32:51.399664 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:51 crc kubenswrapper[4799]: I1010 16:32:51.399687 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:51 crc kubenswrapper[4799]: I1010 16:32:51.399703 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:51Z","lastTransitionTime":"2025-10-10T16:32:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:51 crc kubenswrapper[4799]: I1010 16:32:51.401469 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 10 16:32:51 crc kubenswrapper[4799]: E1010 16:32:51.401612 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 10 16:32:51 crc kubenswrapper[4799]: I1010 16:32:51.401828 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k6hch" Oct 10 16:32:51 crc kubenswrapper[4799]: E1010 16:32:51.401907 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-k6hch" podUID="7903c578-d05e-4ad7-8fd9-f438abf4a085" Oct 10 16:32:51 crc kubenswrapper[4799]: E1010 16:32:51.420511 4799 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-10T16:32:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:51Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-10T16:32:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:51Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-10T16:32:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:51Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-10T16:32:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:51Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"d99534f1-66d4-4990-b867-b559b1013899\\\",\\\"systemUUID\\\":\\\"19c7da3e-bb2d-454e-9c2c-9c9464638bfe\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:51Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:51 crc kubenswrapper[4799]: I1010 16:32:51.425572 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:51 crc kubenswrapper[4799]: I1010 16:32:51.425618 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:51 crc kubenswrapper[4799]: I1010 16:32:51.425640 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:51 crc kubenswrapper[4799]: I1010 16:32:51.425668 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:51 crc kubenswrapper[4799]: I1010 16:32:51.425693 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:51Z","lastTransitionTime":"2025-10-10T16:32:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:51 crc kubenswrapper[4799]: E1010 16:32:51.444609 4799 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-10T16:32:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:51Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-10T16:32:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:51Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-10T16:32:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:51Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-10T16:32:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:51Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"d99534f1-66d4-4990-b867-b559b1013899\\\",\\\"systemUUID\\\":\\\"19c7da3e-bb2d-454e-9c2c-9c9464638bfe\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:51Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:51 crc kubenswrapper[4799]: I1010 16:32:51.449298 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:51 crc kubenswrapper[4799]: I1010 16:32:51.449353 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:51 crc kubenswrapper[4799]: I1010 16:32:51.449366 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:51 crc kubenswrapper[4799]: I1010 16:32:51.449390 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:51 crc kubenswrapper[4799]: I1010 16:32:51.449403 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:51Z","lastTransitionTime":"2025-10-10T16:32:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:51 crc kubenswrapper[4799]: E1010 16:32:51.466974 4799 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-10T16:32:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:51Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-10T16:32:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:51Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-10T16:32:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:51Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-10T16:32:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:51Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"d99534f1-66d4-4990-b867-b559b1013899\\\",\\\"systemUUID\\\":\\\"19c7da3e-bb2d-454e-9c2c-9c9464638bfe\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:51Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:51 crc kubenswrapper[4799]: E1010 16:32:51.467127 4799 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Oct 10 16:32:51 crc kubenswrapper[4799]: I1010 16:32:51.468977 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:51 crc kubenswrapper[4799]: I1010 16:32:51.469009 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:51 crc kubenswrapper[4799]: I1010 16:32:51.469022 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:51 crc kubenswrapper[4799]: I1010 16:32:51.469064 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:51 crc kubenswrapper[4799]: I1010 16:32:51.469078 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:51Z","lastTransitionTime":"2025-10-10T16:32:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:51 crc kubenswrapper[4799]: I1010 16:32:51.572207 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:51 crc kubenswrapper[4799]: I1010 16:32:51.572247 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:51 crc kubenswrapper[4799]: I1010 16:32:51.572259 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:51 crc kubenswrapper[4799]: I1010 16:32:51.572273 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:51 crc kubenswrapper[4799]: I1010 16:32:51.572284 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:51Z","lastTransitionTime":"2025-10-10T16:32:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:51 crc kubenswrapper[4799]: I1010 16:32:51.674490 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:51 crc kubenswrapper[4799]: I1010 16:32:51.674547 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:51 crc kubenswrapper[4799]: I1010 16:32:51.674562 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:51 crc kubenswrapper[4799]: I1010 16:32:51.674586 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:51 crc kubenswrapper[4799]: I1010 16:32:51.674675 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:51Z","lastTransitionTime":"2025-10-10T16:32:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:51 crc kubenswrapper[4799]: I1010 16:32:51.777246 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:51 crc kubenswrapper[4799]: I1010 16:32:51.777293 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:51 crc kubenswrapper[4799]: I1010 16:32:51.777302 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:51 crc kubenswrapper[4799]: I1010 16:32:51.777320 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:51 crc kubenswrapper[4799]: I1010 16:32:51.777330 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:51Z","lastTransitionTime":"2025-10-10T16:32:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:51 crc kubenswrapper[4799]: I1010 16:32:51.879368 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:51 crc kubenswrapper[4799]: I1010 16:32:51.879425 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:51 crc kubenswrapper[4799]: I1010 16:32:51.879478 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:51 crc kubenswrapper[4799]: I1010 16:32:51.879506 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:51 crc kubenswrapper[4799]: I1010 16:32:51.879523 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:51Z","lastTransitionTime":"2025-10-10T16:32:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:51 crc kubenswrapper[4799]: I1010 16:32:51.982014 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:51 crc kubenswrapper[4799]: I1010 16:32:51.982076 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:51 crc kubenswrapper[4799]: I1010 16:32:51.982095 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:51 crc kubenswrapper[4799]: I1010 16:32:51.982121 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:51 crc kubenswrapper[4799]: I1010 16:32:51.982140 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:51Z","lastTransitionTime":"2025-10-10T16:32:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:52 crc kubenswrapper[4799]: I1010 16:32:52.083936 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:52 crc kubenswrapper[4799]: I1010 16:32:52.084016 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:52 crc kubenswrapper[4799]: I1010 16:32:52.084042 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:52 crc kubenswrapper[4799]: I1010 16:32:52.084076 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:52 crc kubenswrapper[4799]: I1010 16:32:52.084099 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:52Z","lastTransitionTime":"2025-10-10T16:32:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:52 crc kubenswrapper[4799]: I1010 16:32:52.187028 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:52 crc kubenswrapper[4799]: I1010 16:32:52.187097 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:52 crc kubenswrapper[4799]: I1010 16:32:52.187116 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:52 crc kubenswrapper[4799]: I1010 16:32:52.187140 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:52 crc kubenswrapper[4799]: I1010 16:32:52.187157 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:52Z","lastTransitionTime":"2025-10-10T16:32:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:52 crc kubenswrapper[4799]: I1010 16:32:52.289610 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:52 crc kubenswrapper[4799]: I1010 16:32:52.289653 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:52 crc kubenswrapper[4799]: I1010 16:32:52.289665 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:52 crc kubenswrapper[4799]: I1010 16:32:52.289680 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:52 crc kubenswrapper[4799]: I1010 16:32:52.289692 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:52Z","lastTransitionTime":"2025-10-10T16:32:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:52 crc kubenswrapper[4799]: I1010 16:32:52.392295 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:52 crc kubenswrapper[4799]: I1010 16:32:52.392354 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:52 crc kubenswrapper[4799]: I1010 16:32:52.392376 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:52 crc kubenswrapper[4799]: I1010 16:32:52.392403 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:52 crc kubenswrapper[4799]: I1010 16:32:52.392423 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:52Z","lastTransitionTime":"2025-10-10T16:32:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:52 crc kubenswrapper[4799]: I1010 16:32:52.401363 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 10 16:32:52 crc kubenswrapper[4799]: I1010 16:32:52.401391 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 10 16:32:52 crc kubenswrapper[4799]: E1010 16:32:52.401470 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 10 16:32:52 crc kubenswrapper[4799]: E1010 16:32:52.401572 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 10 16:32:52 crc kubenswrapper[4799]: I1010 16:32:52.494671 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:52 crc kubenswrapper[4799]: I1010 16:32:52.494722 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:52 crc kubenswrapper[4799]: I1010 16:32:52.494730 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:52 crc kubenswrapper[4799]: I1010 16:32:52.494747 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:52 crc kubenswrapper[4799]: I1010 16:32:52.494772 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:52Z","lastTransitionTime":"2025-10-10T16:32:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:52 crc kubenswrapper[4799]: I1010 16:32:52.596657 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:52 crc kubenswrapper[4799]: I1010 16:32:52.596719 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:52 crc kubenswrapper[4799]: I1010 16:32:52.596736 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:52 crc kubenswrapper[4799]: I1010 16:32:52.596774 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:52 crc kubenswrapper[4799]: I1010 16:32:52.596789 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:52Z","lastTransitionTime":"2025-10-10T16:32:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:52 crc kubenswrapper[4799]: I1010 16:32:52.699936 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:52 crc kubenswrapper[4799]: I1010 16:32:52.699997 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:52 crc kubenswrapper[4799]: I1010 16:32:52.700009 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:52 crc kubenswrapper[4799]: I1010 16:32:52.700026 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:52 crc kubenswrapper[4799]: I1010 16:32:52.700037 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:52Z","lastTransitionTime":"2025-10-10T16:32:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:52 crc kubenswrapper[4799]: I1010 16:32:52.802299 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:52 crc kubenswrapper[4799]: I1010 16:32:52.802360 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:52 crc kubenswrapper[4799]: I1010 16:32:52.802380 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:52 crc kubenswrapper[4799]: I1010 16:32:52.802402 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:52 crc kubenswrapper[4799]: I1010 16:32:52.802419 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:52Z","lastTransitionTime":"2025-10-10T16:32:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:52 crc kubenswrapper[4799]: I1010 16:32:52.905144 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:52 crc kubenswrapper[4799]: I1010 16:32:52.905192 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:52 crc kubenswrapper[4799]: I1010 16:32:52.905204 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:52 crc kubenswrapper[4799]: I1010 16:32:52.905223 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:52 crc kubenswrapper[4799]: I1010 16:32:52.905236 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:52Z","lastTransitionTime":"2025-10-10T16:32:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:53 crc kubenswrapper[4799]: I1010 16:32:53.007955 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:53 crc kubenswrapper[4799]: I1010 16:32:53.008001 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:53 crc kubenswrapper[4799]: I1010 16:32:53.008062 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:53 crc kubenswrapper[4799]: I1010 16:32:53.008088 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:53 crc kubenswrapper[4799]: I1010 16:32:53.008106 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:53Z","lastTransitionTime":"2025-10-10T16:32:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:53 crc kubenswrapper[4799]: I1010 16:32:53.109925 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:53 crc kubenswrapper[4799]: I1010 16:32:53.109968 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:53 crc kubenswrapper[4799]: I1010 16:32:53.109977 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:53 crc kubenswrapper[4799]: I1010 16:32:53.109992 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:53 crc kubenswrapper[4799]: I1010 16:32:53.110001 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:53Z","lastTransitionTime":"2025-10-10T16:32:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:53 crc kubenswrapper[4799]: I1010 16:32:53.212646 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:53 crc kubenswrapper[4799]: I1010 16:32:53.212693 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:53 crc kubenswrapper[4799]: I1010 16:32:53.212704 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:53 crc kubenswrapper[4799]: I1010 16:32:53.212725 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:53 crc kubenswrapper[4799]: I1010 16:32:53.212739 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:53Z","lastTransitionTime":"2025-10-10T16:32:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:53 crc kubenswrapper[4799]: I1010 16:32:53.315087 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:53 crc kubenswrapper[4799]: I1010 16:32:53.315126 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:53 crc kubenswrapper[4799]: I1010 16:32:53.315134 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:53 crc kubenswrapper[4799]: I1010 16:32:53.315147 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:53 crc kubenswrapper[4799]: I1010 16:32:53.315158 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:53Z","lastTransitionTime":"2025-10-10T16:32:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:53 crc kubenswrapper[4799]: I1010 16:32:53.402264 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k6hch" Oct 10 16:32:53 crc kubenswrapper[4799]: I1010 16:32:53.402278 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 10 16:32:53 crc kubenswrapper[4799]: E1010 16:32:53.402501 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-k6hch" podUID="7903c578-d05e-4ad7-8fd9-f438abf4a085" Oct 10 16:32:53 crc kubenswrapper[4799]: E1010 16:32:53.402608 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 10 16:32:53 crc kubenswrapper[4799]: I1010 16:32:53.417622 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:53 crc kubenswrapper[4799]: I1010 16:32:53.417672 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:53 crc kubenswrapper[4799]: I1010 16:32:53.417689 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:53 crc kubenswrapper[4799]: I1010 16:32:53.417710 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:53 crc kubenswrapper[4799]: I1010 16:32:53.417727 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:53Z","lastTransitionTime":"2025-10-10T16:32:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:53 crc kubenswrapper[4799]: I1010 16:32:53.521142 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:53 crc kubenswrapper[4799]: I1010 16:32:53.521355 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:53 crc kubenswrapper[4799]: I1010 16:32:53.521375 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:53 crc kubenswrapper[4799]: I1010 16:32:53.521397 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:53 crc kubenswrapper[4799]: I1010 16:32:53.521411 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:53Z","lastTransitionTime":"2025-10-10T16:32:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:53 crc kubenswrapper[4799]: I1010 16:32:53.624584 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:53 crc kubenswrapper[4799]: I1010 16:32:53.624651 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:53 crc kubenswrapper[4799]: I1010 16:32:53.624668 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:53 crc kubenswrapper[4799]: I1010 16:32:53.624692 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:53 crc kubenswrapper[4799]: I1010 16:32:53.624709 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:53Z","lastTransitionTime":"2025-10-10T16:32:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:53 crc kubenswrapper[4799]: I1010 16:32:53.728201 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:53 crc kubenswrapper[4799]: I1010 16:32:53.728262 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:53 crc kubenswrapper[4799]: I1010 16:32:53.728285 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:53 crc kubenswrapper[4799]: I1010 16:32:53.728315 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:53 crc kubenswrapper[4799]: I1010 16:32:53.728336 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:53Z","lastTransitionTime":"2025-10-10T16:32:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:53 crc kubenswrapper[4799]: I1010 16:32:53.831008 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:53 crc kubenswrapper[4799]: I1010 16:32:53.831064 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:53 crc kubenswrapper[4799]: I1010 16:32:53.831077 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:53 crc kubenswrapper[4799]: I1010 16:32:53.831094 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:53 crc kubenswrapper[4799]: I1010 16:32:53.831107 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:53Z","lastTransitionTime":"2025-10-10T16:32:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:53 crc kubenswrapper[4799]: I1010 16:32:53.934290 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:53 crc kubenswrapper[4799]: I1010 16:32:53.934335 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:53 crc kubenswrapper[4799]: I1010 16:32:53.934345 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:53 crc kubenswrapper[4799]: I1010 16:32:53.934361 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:53 crc kubenswrapper[4799]: I1010 16:32:53.934374 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:53Z","lastTransitionTime":"2025-10-10T16:32:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:54 crc kubenswrapper[4799]: I1010 16:32:54.037008 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:54 crc kubenswrapper[4799]: I1010 16:32:54.037063 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:54 crc kubenswrapper[4799]: I1010 16:32:54.037081 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:54 crc kubenswrapper[4799]: I1010 16:32:54.037108 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:54 crc kubenswrapper[4799]: I1010 16:32:54.037126 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:54Z","lastTransitionTime":"2025-10-10T16:32:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:54 crc kubenswrapper[4799]: I1010 16:32:54.139420 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:54 crc kubenswrapper[4799]: I1010 16:32:54.139461 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:54 crc kubenswrapper[4799]: I1010 16:32:54.139478 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:54 crc kubenswrapper[4799]: I1010 16:32:54.139501 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:54 crc kubenswrapper[4799]: I1010 16:32:54.139518 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:54Z","lastTransitionTime":"2025-10-10T16:32:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:54 crc kubenswrapper[4799]: I1010 16:32:54.242828 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:54 crc kubenswrapper[4799]: I1010 16:32:54.242888 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:54 crc kubenswrapper[4799]: I1010 16:32:54.242926 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:54 crc kubenswrapper[4799]: I1010 16:32:54.242956 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:54 crc kubenswrapper[4799]: I1010 16:32:54.243078 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:54Z","lastTransitionTime":"2025-10-10T16:32:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:54 crc kubenswrapper[4799]: I1010 16:32:54.345912 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:54 crc kubenswrapper[4799]: I1010 16:32:54.346017 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:54 crc kubenswrapper[4799]: I1010 16:32:54.346042 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:54 crc kubenswrapper[4799]: I1010 16:32:54.346124 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:54 crc kubenswrapper[4799]: I1010 16:32:54.346150 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:54Z","lastTransitionTime":"2025-10-10T16:32:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:54 crc kubenswrapper[4799]: I1010 16:32:54.402004 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 10 16:32:54 crc kubenswrapper[4799]: E1010 16:32:54.402208 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 10 16:32:54 crc kubenswrapper[4799]: I1010 16:32:54.403048 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 10 16:32:54 crc kubenswrapper[4799]: E1010 16:32:54.403124 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 10 16:32:54 crc kubenswrapper[4799]: I1010 16:32:54.403868 4799 scope.go:117] "RemoveContainer" containerID="ba46f14de0fd2c356129122dd938e3fdda832ffc5e614ac439926a3f4ec94370" Oct 10 16:32:54 crc kubenswrapper[4799]: E1010 16:32:54.404218 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-mcwfc_openshift-ovn-kubernetes(abe7f2d9-ec99-4724-a01f-cc7096377e07)\"" pod="openshift-ovn-kubernetes/ovnkube-node-mcwfc" podUID="abe7f2d9-ec99-4724-a01f-cc7096377e07" Oct 10 16:32:54 crc kubenswrapper[4799]: I1010 16:32:54.449409 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:54 crc kubenswrapper[4799]: I1010 16:32:54.449460 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:54 crc kubenswrapper[4799]: I1010 16:32:54.449470 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:54 crc kubenswrapper[4799]: I1010 16:32:54.449483 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:54 crc kubenswrapper[4799]: I1010 16:32:54.449493 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:54Z","lastTransitionTime":"2025-10-10T16:32:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:54 crc kubenswrapper[4799]: I1010 16:32:54.552485 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:54 crc kubenswrapper[4799]: I1010 16:32:54.552553 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:54 crc kubenswrapper[4799]: I1010 16:32:54.552576 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:54 crc kubenswrapper[4799]: I1010 16:32:54.552604 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:54 crc kubenswrapper[4799]: I1010 16:32:54.552625 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:54Z","lastTransitionTime":"2025-10-10T16:32:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:54 crc kubenswrapper[4799]: I1010 16:32:54.655794 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:54 crc kubenswrapper[4799]: I1010 16:32:54.655827 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:54 crc kubenswrapper[4799]: I1010 16:32:54.655837 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:54 crc kubenswrapper[4799]: I1010 16:32:54.655853 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:54 crc kubenswrapper[4799]: I1010 16:32:54.655865 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:54Z","lastTransitionTime":"2025-10-10T16:32:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:54 crc kubenswrapper[4799]: I1010 16:32:54.758905 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:54 crc kubenswrapper[4799]: I1010 16:32:54.758955 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:54 crc kubenswrapper[4799]: I1010 16:32:54.758968 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:54 crc kubenswrapper[4799]: I1010 16:32:54.758990 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:54 crc kubenswrapper[4799]: I1010 16:32:54.759002 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:54Z","lastTransitionTime":"2025-10-10T16:32:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:54 crc kubenswrapper[4799]: I1010 16:32:54.861446 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:54 crc kubenswrapper[4799]: I1010 16:32:54.861502 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:54 crc kubenswrapper[4799]: I1010 16:32:54.861520 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:54 crc kubenswrapper[4799]: I1010 16:32:54.861544 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:54 crc kubenswrapper[4799]: I1010 16:32:54.861560 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:54Z","lastTransitionTime":"2025-10-10T16:32:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:54 crc kubenswrapper[4799]: I1010 16:32:54.964303 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:54 crc kubenswrapper[4799]: I1010 16:32:54.964340 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:54 crc kubenswrapper[4799]: I1010 16:32:54.964352 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:54 crc kubenswrapper[4799]: I1010 16:32:54.964369 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:54 crc kubenswrapper[4799]: I1010 16:32:54.964382 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:54Z","lastTransitionTime":"2025-10-10T16:32:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:55 crc kubenswrapper[4799]: I1010 16:32:55.066868 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:55 crc kubenswrapper[4799]: I1010 16:32:55.066927 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:55 crc kubenswrapper[4799]: I1010 16:32:55.066948 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:55 crc kubenswrapper[4799]: I1010 16:32:55.066977 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:55 crc kubenswrapper[4799]: I1010 16:32:55.066998 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:55Z","lastTransitionTime":"2025-10-10T16:32:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:55 crc kubenswrapper[4799]: I1010 16:32:55.170251 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:55 crc kubenswrapper[4799]: I1010 16:32:55.170323 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:55 crc kubenswrapper[4799]: I1010 16:32:55.170351 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:55 crc kubenswrapper[4799]: I1010 16:32:55.170381 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:55 crc kubenswrapper[4799]: I1010 16:32:55.170404 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:55Z","lastTransitionTime":"2025-10-10T16:32:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:55 crc kubenswrapper[4799]: I1010 16:32:55.273236 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:55 crc kubenswrapper[4799]: I1010 16:32:55.273297 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:55 crc kubenswrapper[4799]: I1010 16:32:55.273319 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:55 crc kubenswrapper[4799]: I1010 16:32:55.273348 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:55 crc kubenswrapper[4799]: I1010 16:32:55.273368 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:55Z","lastTransitionTime":"2025-10-10T16:32:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:55 crc kubenswrapper[4799]: I1010 16:32:55.376489 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:55 crc kubenswrapper[4799]: I1010 16:32:55.376523 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:55 crc kubenswrapper[4799]: I1010 16:32:55.376535 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:55 crc kubenswrapper[4799]: I1010 16:32:55.376550 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:55 crc kubenswrapper[4799]: I1010 16:32:55.376560 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:55Z","lastTransitionTime":"2025-10-10T16:32:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:55 crc kubenswrapper[4799]: I1010 16:32:55.401916 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k6hch" Oct 10 16:32:55 crc kubenswrapper[4799]: I1010 16:32:55.401924 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 10 16:32:55 crc kubenswrapper[4799]: E1010 16:32:55.402106 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-k6hch" podUID="7903c578-d05e-4ad7-8fd9-f438abf4a085" Oct 10 16:32:55 crc kubenswrapper[4799]: E1010 16:32:55.402379 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 10 16:32:55 crc kubenswrapper[4799]: I1010 16:32:55.479131 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:55 crc kubenswrapper[4799]: I1010 16:32:55.479199 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:55 crc kubenswrapper[4799]: I1010 16:32:55.479216 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:55 crc kubenswrapper[4799]: I1010 16:32:55.479240 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:55 crc kubenswrapper[4799]: I1010 16:32:55.479258 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:55Z","lastTransitionTime":"2025-10-10T16:32:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:55 crc kubenswrapper[4799]: I1010 16:32:55.582067 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:55 crc kubenswrapper[4799]: I1010 16:32:55.582115 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:55 crc kubenswrapper[4799]: I1010 16:32:55.582130 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:55 crc kubenswrapper[4799]: I1010 16:32:55.582149 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:55 crc kubenswrapper[4799]: I1010 16:32:55.582165 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:55Z","lastTransitionTime":"2025-10-10T16:32:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:55 crc kubenswrapper[4799]: I1010 16:32:55.617680 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/7903c578-d05e-4ad7-8fd9-f438abf4a085-metrics-certs\") pod \"network-metrics-daemon-k6hch\" (UID: \"7903c578-d05e-4ad7-8fd9-f438abf4a085\") " pod="openshift-multus/network-metrics-daemon-k6hch" Oct 10 16:32:55 crc kubenswrapper[4799]: E1010 16:32:55.617867 4799 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Oct 10 16:32:55 crc kubenswrapper[4799]: E1010 16:32:55.617940 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/7903c578-d05e-4ad7-8fd9-f438abf4a085-metrics-certs podName:7903c578-d05e-4ad7-8fd9-f438abf4a085 nodeName:}" failed. No retries permitted until 2025-10-10 16:33:27.617921162 +0000 UTC m=+101.126245297 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/7903c578-d05e-4ad7-8fd9-f438abf4a085-metrics-certs") pod "network-metrics-daemon-k6hch" (UID: "7903c578-d05e-4ad7-8fd9-f438abf4a085") : object "openshift-multus"/"metrics-daemon-secret" not registered Oct 10 16:32:55 crc kubenswrapper[4799]: I1010 16:32:55.684747 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:55 crc kubenswrapper[4799]: I1010 16:32:55.684804 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:55 crc kubenswrapper[4799]: I1010 16:32:55.684815 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:55 crc kubenswrapper[4799]: I1010 16:32:55.684831 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:55 crc kubenswrapper[4799]: I1010 16:32:55.684843 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:55Z","lastTransitionTime":"2025-10-10T16:32:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:55 crc kubenswrapper[4799]: I1010 16:32:55.786870 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:55 crc kubenswrapper[4799]: I1010 16:32:55.786952 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:55 crc kubenswrapper[4799]: I1010 16:32:55.786971 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:55 crc kubenswrapper[4799]: I1010 16:32:55.786999 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:55 crc kubenswrapper[4799]: I1010 16:32:55.787025 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:55Z","lastTransitionTime":"2025-10-10T16:32:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:55 crc kubenswrapper[4799]: I1010 16:32:55.894246 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:55 crc kubenswrapper[4799]: I1010 16:32:55.894298 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:55 crc kubenswrapper[4799]: I1010 16:32:55.894315 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:55 crc kubenswrapper[4799]: I1010 16:32:55.894340 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:55 crc kubenswrapper[4799]: I1010 16:32:55.894366 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:55Z","lastTransitionTime":"2025-10-10T16:32:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:55 crc kubenswrapper[4799]: I1010 16:32:55.996714 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:55 crc kubenswrapper[4799]: I1010 16:32:55.996782 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:55 crc kubenswrapper[4799]: I1010 16:32:55.996795 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:55 crc kubenswrapper[4799]: I1010 16:32:55.996812 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:55 crc kubenswrapper[4799]: I1010 16:32:55.996824 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:55Z","lastTransitionTime":"2025-10-10T16:32:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:56 crc kubenswrapper[4799]: I1010 16:32:56.098878 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:56 crc kubenswrapper[4799]: I1010 16:32:56.098912 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:56 crc kubenswrapper[4799]: I1010 16:32:56.098922 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:56 crc kubenswrapper[4799]: I1010 16:32:56.098937 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:56 crc kubenswrapper[4799]: I1010 16:32:56.098948 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:56Z","lastTransitionTime":"2025-10-10T16:32:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:56 crc kubenswrapper[4799]: I1010 16:32:56.201742 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:56 crc kubenswrapper[4799]: I1010 16:32:56.201836 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:56 crc kubenswrapper[4799]: I1010 16:32:56.201853 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:56 crc kubenswrapper[4799]: I1010 16:32:56.201878 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:56 crc kubenswrapper[4799]: I1010 16:32:56.201899 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:56Z","lastTransitionTime":"2025-10-10T16:32:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:56 crc kubenswrapper[4799]: I1010 16:32:56.304536 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:56 crc kubenswrapper[4799]: I1010 16:32:56.304574 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:56 crc kubenswrapper[4799]: I1010 16:32:56.304582 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:56 crc kubenswrapper[4799]: I1010 16:32:56.304596 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:56 crc kubenswrapper[4799]: I1010 16:32:56.304604 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:56Z","lastTransitionTime":"2025-10-10T16:32:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:56 crc kubenswrapper[4799]: I1010 16:32:56.402036 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 10 16:32:56 crc kubenswrapper[4799]: I1010 16:32:56.402063 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 10 16:32:56 crc kubenswrapper[4799]: E1010 16:32:56.402305 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 10 16:32:56 crc kubenswrapper[4799]: E1010 16:32:56.402435 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 10 16:32:56 crc kubenswrapper[4799]: I1010 16:32:56.408358 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:56 crc kubenswrapper[4799]: I1010 16:32:56.408413 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:56 crc kubenswrapper[4799]: I1010 16:32:56.408434 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:56 crc kubenswrapper[4799]: I1010 16:32:56.408456 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:56 crc kubenswrapper[4799]: I1010 16:32:56.408473 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:56Z","lastTransitionTime":"2025-10-10T16:32:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:56 crc kubenswrapper[4799]: I1010 16:32:56.511198 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:56 crc kubenswrapper[4799]: I1010 16:32:56.511277 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:56 crc kubenswrapper[4799]: I1010 16:32:56.511295 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:56 crc kubenswrapper[4799]: I1010 16:32:56.511325 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:56 crc kubenswrapper[4799]: I1010 16:32:56.511348 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:56Z","lastTransitionTime":"2025-10-10T16:32:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:56 crc kubenswrapper[4799]: I1010 16:32:56.614043 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:56 crc kubenswrapper[4799]: I1010 16:32:56.614087 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:56 crc kubenswrapper[4799]: I1010 16:32:56.614098 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:56 crc kubenswrapper[4799]: I1010 16:32:56.614117 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:56 crc kubenswrapper[4799]: I1010 16:32:56.614129 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:56Z","lastTransitionTime":"2025-10-10T16:32:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:56 crc kubenswrapper[4799]: I1010 16:32:56.716375 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:56 crc kubenswrapper[4799]: I1010 16:32:56.716432 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:56 crc kubenswrapper[4799]: I1010 16:32:56.716453 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:56 crc kubenswrapper[4799]: I1010 16:32:56.716472 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:56 crc kubenswrapper[4799]: I1010 16:32:56.716486 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:56Z","lastTransitionTime":"2025-10-10T16:32:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:56 crc kubenswrapper[4799]: I1010 16:32:56.819080 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:56 crc kubenswrapper[4799]: I1010 16:32:56.819196 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:56 crc kubenswrapper[4799]: I1010 16:32:56.819219 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:56 crc kubenswrapper[4799]: I1010 16:32:56.819249 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:56 crc kubenswrapper[4799]: I1010 16:32:56.819272 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:56Z","lastTransitionTime":"2025-10-10T16:32:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:56 crc kubenswrapper[4799]: I1010 16:32:56.922540 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:56 crc kubenswrapper[4799]: I1010 16:32:56.922602 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:56 crc kubenswrapper[4799]: I1010 16:32:56.922621 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:56 crc kubenswrapper[4799]: I1010 16:32:56.922646 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:56 crc kubenswrapper[4799]: I1010 16:32:56.922664 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:56Z","lastTransitionTime":"2025-10-10T16:32:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:57 crc kubenswrapper[4799]: I1010 16:32:57.025925 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:57 crc kubenswrapper[4799]: I1010 16:32:57.026008 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:57 crc kubenswrapper[4799]: I1010 16:32:57.026032 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:57 crc kubenswrapper[4799]: I1010 16:32:57.026066 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:57 crc kubenswrapper[4799]: I1010 16:32:57.026091 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:57Z","lastTransitionTime":"2025-10-10T16:32:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:57 crc kubenswrapper[4799]: I1010 16:32:57.129234 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:57 crc kubenswrapper[4799]: I1010 16:32:57.129904 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:57 crc kubenswrapper[4799]: I1010 16:32:57.130003 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:57 crc kubenswrapper[4799]: I1010 16:32:57.130033 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:57 crc kubenswrapper[4799]: I1010 16:32:57.130053 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:57Z","lastTransitionTime":"2025-10-10T16:32:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:57 crc kubenswrapper[4799]: I1010 16:32:57.232803 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:57 crc kubenswrapper[4799]: I1010 16:32:57.233096 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:57 crc kubenswrapper[4799]: I1010 16:32:57.233106 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:57 crc kubenswrapper[4799]: I1010 16:32:57.233122 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:57 crc kubenswrapper[4799]: I1010 16:32:57.233132 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:57Z","lastTransitionTime":"2025-10-10T16:32:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:57 crc kubenswrapper[4799]: I1010 16:32:57.336461 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:57 crc kubenswrapper[4799]: I1010 16:32:57.336539 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:57 crc kubenswrapper[4799]: I1010 16:32:57.336557 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:57 crc kubenswrapper[4799]: I1010 16:32:57.336582 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:57 crc kubenswrapper[4799]: I1010 16:32:57.336600 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:57Z","lastTransitionTime":"2025-10-10T16:32:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:57 crc kubenswrapper[4799]: I1010 16:32:57.402428 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 10 16:32:57 crc kubenswrapper[4799]: E1010 16:32:57.402540 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 10 16:32:57 crc kubenswrapper[4799]: I1010 16:32:57.402999 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k6hch" Oct 10 16:32:57 crc kubenswrapper[4799]: E1010 16:32:57.403289 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-k6hch" podUID="7903c578-d05e-4ad7-8fd9-f438abf4a085" Oct 10 16:32:57 crc kubenswrapper[4799]: I1010 16:32:57.415534 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-z97c7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6f19a8ba-b77f-41ce-a4c6-e970b040dd8c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://17b2b63923e40e58b4a3d352781758ecf7c0e63eb913813e0f738d19dfb05676\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9spwd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c9722a694091d19d16b7c08ac22e23532deca8f4bde306a0d651d5524484fd1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9spwd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:22Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-z97c7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:57Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:57 crc kubenswrapper[4799]: I1010 16:32:57.430346 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b9c46c5f-a6db-4cef-b179-b669484bbc75\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://df6b51b97a9e3dcf9102409dc19f67e69e6e28ebec82dd46083922d5606cc4c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ba57bc720123daa414f51bf5d3173c6fa0b519947a34816bebc532948fd74ab\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d98759de1f79d9aeb68eb0b3eb21d78d0116f054b5d846c85bd63774b565e73\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7eb0f742793fbd0bee8e88732ec832748e77d9226a926def177968f24a9cf06\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://10d76c5ba8c54896d2fde57e2806c48857363c495a9f2d9b3f6904334cf2f9be\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"file observer\\\\nW1010 16:32:08.895315 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1010 16:32:08.895450 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1010 16:32:08.898309 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-712278876/tls.crt::/tmp/serving-cert-712278876/tls.key\\\\\\\"\\\\nI1010 16:32:09.168043 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1010 16:32:09.171891 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1010 16:32:09.171914 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1010 16:32:09.171936 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1010 16:32:09.171942 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1010 16:32:09.176341 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1010 16:32:09.176406 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1010 16:32:09.176435 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1010 16:32:09.176460 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1010 16:32:09.176486 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1010 16:32:09.176510 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1010 16:32:09.176533 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1010 16:32:09.176376 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1010 16:32:09.178269 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-10T16:31:53Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://75fb276a1b4f555aa58d4a862a6f3841984f75958b7ada362d717eca726c41fc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:50Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://78cbeb4c6d2770cabbc752b11e5a62f64ec7820bc3a637a944fa252d779e242b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://78cbeb4c6d2770cabbc752b11e5a62f64ec7820bc3a637a944fa252d779e242b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:31:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:31:47Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:57Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:57 crc kubenswrapper[4799]: I1010 16:32:57.441074 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:57 crc kubenswrapper[4799]: I1010 16:32:57.441159 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:57 crc kubenswrapper[4799]: I1010 16:32:57.441176 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:57 crc kubenswrapper[4799]: I1010 16:32:57.441199 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:57 crc kubenswrapper[4799]: I1010 16:32:57.441219 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:57Z","lastTransitionTime":"2025-10-10T16:32:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:57 crc kubenswrapper[4799]: I1010 16:32:57.442649 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:57Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:57 crc kubenswrapper[4799]: I1010 16:32:57.456814 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:57Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:57 crc kubenswrapper[4799]: I1010 16:32:57.466337 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6cebefda-e31d-4be2-9bf4-8e1f8ec002cb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6536b37f839c0b3f6b55d82b3a1674eeccb07ec93e2cb0a3739705b82df4782c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hfkr4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ad00545d7a2fff370e19a55a89365b8c9914cb6286dbf1892d7ad0f399288a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hfkr4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:09Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-rh8zc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:57Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:57 crc kubenswrapper[4799]: I1010 16:32:57.493068 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mcwfc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"abe7f2d9-ec99-4724-a01f-cc7096377e07\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8cbc87c392646ebf9c016f8c7b40bcec30e33a0a05ea4a896d1143c5f1086990\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd261112ca7db4d0d76f6ab29a0347d64dccfff4db42ac9f55d6d7df1443ab23\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c7d0e536ad5143941dd18418b1ac7972a1136a841542b950f6891a386d43ca9c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cfe05183ad0b03415525e6aa2a8d52a5d63b8c273113c46326396df5e0c2bb12\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6562d440ce1f1477fd09c15c34ab88e17e1fb2c2cae4b32a7bf8cbdd29f4d5a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff4fcf53aeed6c07f775152de0faa9fa0671848df06d37cbca6ec7097d0024d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ba46f14de0fd2c356129122dd938e3fdda832ffc5e614ac439926a3f4ec94370\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ba46f14de0fd2c356129122dd938e3fdda832ffc5e614ac439926a3f4ec94370\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-10T16:32:38Z\\\",\\\"message\\\":\\\"espace event handler 1 for removal\\\\nI1010 16:32:38.395893 6461 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1010 16:32:38.395915 6461 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1010 16:32:38.395923 6461 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1010 16:32:38.395978 6461 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1010 16:32:38.395995 6461 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1010 16:32:38.396011 6461 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1010 16:32:38.396026 6461 handler.go:208] Removed *v1.Node event handler 2\\\\nI1010 16:32:38.396040 6461 handler.go:208] Removed *v1.Node event handler 7\\\\nI1010 16:32:38.396038 6461 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1010 16:32:38.396072 6461 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1010 16:32:38.396142 6461 factory.go:656] Stopping watch factory\\\\nI1010 16:32:38.396169 6461 ovnkube.go:599] Stopped ovnkube\\\\nI1010 16:32:38.396164 6461 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1010 16:32:38.396225 6461 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1010 16:32:38.396236 6461 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1010 16:32:38.396378 6461 ovnkube.go:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:37Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-mcwfc_openshift-ovn-kubernetes(abe7f2d9-ec99-4724-a01f-cc7096377e07)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://08406e220de50ba85f882a05117b5df8c9445a38c026bb85c95fc9f98f2d2cfe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2ce36def99eaf908452410a523cd14eb31a5a4dc3ee38d5983ea95d5ee75f83\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d2ce36def99eaf908452410a523cd14eb31a5a4dc3ee38d5983ea95d5ee75f83\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:09Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-mcwfc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:57Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:57 crc kubenswrapper[4799]: I1010 16:32:57.510092 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5a953803d264b43ecd9f8b8c871b034d8146e73a4974bb8f503d0ca626370616\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:57Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:57 crc kubenswrapper[4799]: I1010 16:32:57.526310 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:57Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:57 crc kubenswrapper[4799]: I1010 16:32:57.543944 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:57 crc kubenswrapper[4799]: I1010 16:32:57.544103 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:57 crc kubenswrapper[4799]: I1010 16:32:57.544135 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:57 crc kubenswrapper[4799]: I1010 16:32:57.544212 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:57 crc kubenswrapper[4799]: I1010 16:32:57.544243 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:57Z","lastTransitionTime":"2025-10-10T16:32:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:57 crc kubenswrapper[4799]: I1010 16:32:57.544882 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gg5hb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f000ac73-b5de-47c8-a0a7-84bd06475f62\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b64f89fec4fec12dd0dab3f95ca2c8a01e43d4ef7cc69a4d012195756f6922ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w9g7t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:09Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gg5hb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:57Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:57 crc kubenswrapper[4799]: I1010 16:32:57.557136 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-6wjsp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"76fdb169-eee9-4170-b948-95e26254208b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5b030264f18288aa7687a91f7918f1ed2c2ad474637e32a054ea8c25b97aef45\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2ww66\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:14Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-6wjsp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:57Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:57 crc kubenswrapper[4799]: I1010 16:32:57.582521 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"60ae49f7-6d6a-4a62-909f-7aea2b3953f5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c0a72be5ffe48f726e63ca3854fcabf6ad7c26f2c3fe432328142da2dc2ceeb5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b42b1b86cbd6dacb03b9afc740a33f67674996a9c5a5b291b71708ae53ccfea8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://de5b84380f9fb8448cebe90775342fd17260ffb8c591bbd5156f8a216b80f1da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b1dad40a84c7f22ffb5d52c708c7e2e03a181c5778793050495c8333ae005731\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://79f6778c5b703b2b4fc4e59fffc00824fcab6c8f5e2789661665e635a3539195\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a48bce1f3530d2a78258c6fa2af4f1530890f7967a26c9e91ca2f20f56cdbe6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2a48bce1f3530d2a78258c6fa2af4f1530890f7967a26c9e91ca2f20f56cdbe6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:31:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://65fb2cd5fa9b5ff0cad85267e4a036c37593a749da171dc2e5e30ba5159ed96d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://65fb2cd5fa9b5ff0cad85267e4a036c37593a749da171dc2e5e30ba5159ed96d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:31:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://51ea61becc8c45e5bcb2a2374d503cef3fb940b1618e7501cd05d61fc2a9458f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://51ea61becc8c45e5bcb2a2374d503cef3fb940b1618e7501cd05d61fc2a9458f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:31:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:31:47Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:57Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:57 crc kubenswrapper[4799]: I1010 16:32:57.598336 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f7878cf5-3c6d-4a4a-9ccd-7de395f9ac84\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://38c4fe49eff3373937abdebfb7d58fe9d5c73809375a3dca4f165aab84d6cbd1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c48434cdadac2409d0e3baf595e00260b1e3f94b8b9dab62e3f87503a6e888be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://88216eac74e0df9deb1ca1bef893deb2e23a79ffffdbd8a851a67df407eaa470\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://91106a41672b01d9f5c61cfc3001b84f024f3b96649bbc9174f3a635fc8034a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://91106a41672b01d9f5c61cfc3001b84f024f3b96649bbc9174f3a635fc8034a9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:31:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:31:48Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:31:47Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:57Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:57 crc kubenswrapper[4799]: I1010 16:32:57.608804 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-bsdk2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"823e91d3-003d-4cbb-bc72-004e1708c19d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec832bfc0c81b98afb4117033b94d2951b042b248148a5f957f3507174b8dbb6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-chgmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:09Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-bsdk2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:57Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:57 crc kubenswrapper[4799]: I1010 16:32:57.621071 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-k6hch" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7903c578-d05e-4ad7-8fd9-f438abf4a085\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:23Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:23Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hjhjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hjhjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:23Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-k6hch\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:57Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:57 crc kubenswrapper[4799]: I1010 16:32:57.634689 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1bf1784a-776b-49c7-b64b-7ce52860df45\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://298a1a9571fbe118fe81ff3e7403e298bcde9b683cffab574fbb03d5adc1fb67\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f767e89684b9b515da850360aaf9d7a02173395faf0654e9f0b3a4752a3d608b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://92de340d859cff018a661f0a7f7fe209ffae161bf6f39deb005c7148591fc60b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3a649a65ab118025ea70d1d7cf71236cb96992671c3bc7659d591640b53f941\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:31:47Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:57Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:57 crc kubenswrapper[4799]: I1010 16:32:57.649225 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:57 crc kubenswrapper[4799]: I1010 16:32:57.649346 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:57 crc kubenswrapper[4799]: I1010 16:32:57.649365 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:57 crc kubenswrapper[4799]: I1010 16:32:57.649387 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:57 crc kubenswrapper[4799]: I1010 16:32:57.649406 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:57Z","lastTransitionTime":"2025-10-10T16:32:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:57 crc kubenswrapper[4799]: I1010 16:32:57.649410 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2128a751508cba96a374652d8d80c66c81351fe0d7f800743a1612196fe8ac55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:57Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:57 crc kubenswrapper[4799]: I1010 16:32:57.665462 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4e024486dad9853cf7debbd2264eca725e50e74ebd215e1e55595d5f8b7c0403\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3396ed6bea22d063192c09283426aa98e84d5cab5852e305d61f3d583801187\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:57Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:57 crc kubenswrapper[4799]: I1010 16:32:57.682644 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-nptcz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"96840de9-4451-4499-81fa-a19c62239007\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://995be5ba088a3758758ce5aaf735f0371692c52e49e3992c6478311411c8db42\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d8b7b4526cfbe5d29a5b00c5d82089820b93e5aedbdaace85c4a252fed1b9f53\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d8b7b4526cfbe5d29a5b00c5d82089820b93e5aedbdaace85c4a252fed1b9f53\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:32:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0477eb514aef21fcec151973d9b6cf683ced19e9029787b97906438cb94b9f66\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0477eb514aef21fcec151973d9b6cf683ced19e9029787b97906438cb94b9f66\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:32:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b8df7ffc260acc047e334af09b76e6ee2c6dadd8c1fd1ed8860769601c89c6db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b8df7ffc260acc047e334af09b76e6ee2c6dadd8c1fd1ed8860769601c89c6db\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:32:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c89c6973a557239b60077f2b91a5f088955a973ebf8a9776677daa83f18c274\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2c89c6973a557239b60077f2b91a5f088955a973ebf8a9776677daa83f18c274\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:32:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f647c1c576be194232c6bcaf882fc8f3c67c78a84edd77222d04f1602434d014\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f647c1c576be194232c6bcaf882fc8f3c67c78a84edd77222d04f1602434d014\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:32:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://af9cccbb5d66115ca2db31b1e6738e1aa5f9c948eb65d3db9b5d5f8d9c223a64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://af9cccbb5d66115ca2db31b1e6738e1aa5f9c948eb65d3db9b5d5f8d9c223a64\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:32:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:09Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-nptcz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:57Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:57 crc kubenswrapper[4799]: I1010 16:32:57.752617 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:57 crc kubenswrapper[4799]: I1010 16:32:57.752662 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:57 crc kubenswrapper[4799]: I1010 16:32:57.752679 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:57 crc kubenswrapper[4799]: I1010 16:32:57.752703 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:57 crc kubenswrapper[4799]: I1010 16:32:57.752720 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:57Z","lastTransitionTime":"2025-10-10T16:32:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:57 crc kubenswrapper[4799]: I1010 16:32:57.855471 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:57 crc kubenswrapper[4799]: I1010 16:32:57.855516 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:57 crc kubenswrapper[4799]: I1010 16:32:57.855532 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:57 crc kubenswrapper[4799]: I1010 16:32:57.855556 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:57 crc kubenswrapper[4799]: I1010 16:32:57.855572 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:57Z","lastTransitionTime":"2025-10-10T16:32:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:57 crc kubenswrapper[4799]: I1010 16:32:57.958397 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:57 crc kubenswrapper[4799]: I1010 16:32:57.958469 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:57 crc kubenswrapper[4799]: I1010 16:32:57.958494 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:57 crc kubenswrapper[4799]: I1010 16:32:57.958523 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:57 crc kubenswrapper[4799]: I1010 16:32:57.958545 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:57Z","lastTransitionTime":"2025-10-10T16:32:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:58 crc kubenswrapper[4799]: I1010 16:32:58.061200 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:58 crc kubenswrapper[4799]: I1010 16:32:58.061250 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:58 crc kubenswrapper[4799]: I1010 16:32:58.061262 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:58 crc kubenswrapper[4799]: I1010 16:32:58.061279 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:58 crc kubenswrapper[4799]: I1010 16:32:58.061290 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:58Z","lastTransitionTime":"2025-10-10T16:32:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:58 crc kubenswrapper[4799]: I1010 16:32:58.164488 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:58 crc kubenswrapper[4799]: I1010 16:32:58.164541 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:58 crc kubenswrapper[4799]: I1010 16:32:58.164570 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:58 crc kubenswrapper[4799]: I1010 16:32:58.164587 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:58 crc kubenswrapper[4799]: I1010 16:32:58.164599 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:58Z","lastTransitionTime":"2025-10-10T16:32:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:58 crc kubenswrapper[4799]: I1010 16:32:58.266678 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:58 crc kubenswrapper[4799]: I1010 16:32:58.266711 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:58 crc kubenswrapper[4799]: I1010 16:32:58.266722 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:58 crc kubenswrapper[4799]: I1010 16:32:58.266737 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:58 crc kubenswrapper[4799]: I1010 16:32:58.266747 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:58Z","lastTransitionTime":"2025-10-10T16:32:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:58 crc kubenswrapper[4799]: I1010 16:32:58.369060 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:58 crc kubenswrapper[4799]: I1010 16:32:58.369115 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:58 crc kubenswrapper[4799]: I1010 16:32:58.369132 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:58 crc kubenswrapper[4799]: I1010 16:32:58.369156 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:58 crc kubenswrapper[4799]: I1010 16:32:58.369173 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:58Z","lastTransitionTime":"2025-10-10T16:32:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:58 crc kubenswrapper[4799]: I1010 16:32:58.401387 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 10 16:32:58 crc kubenswrapper[4799]: I1010 16:32:58.401544 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 10 16:32:58 crc kubenswrapper[4799]: E1010 16:32:58.401738 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 10 16:32:58 crc kubenswrapper[4799]: E1010 16:32:58.401947 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 10 16:32:58 crc kubenswrapper[4799]: I1010 16:32:58.471062 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:58 crc kubenswrapper[4799]: I1010 16:32:58.471093 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:58 crc kubenswrapper[4799]: I1010 16:32:58.471104 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:58 crc kubenswrapper[4799]: I1010 16:32:58.471118 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:58 crc kubenswrapper[4799]: I1010 16:32:58.471129 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:58Z","lastTransitionTime":"2025-10-10T16:32:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:58 crc kubenswrapper[4799]: I1010 16:32:58.573874 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:58 crc kubenswrapper[4799]: I1010 16:32:58.573942 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:58 crc kubenswrapper[4799]: I1010 16:32:58.573973 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:58 crc kubenswrapper[4799]: I1010 16:32:58.573992 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:58 crc kubenswrapper[4799]: I1010 16:32:58.574007 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:58Z","lastTransitionTime":"2025-10-10T16:32:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:58 crc kubenswrapper[4799]: I1010 16:32:58.677043 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:58 crc kubenswrapper[4799]: I1010 16:32:58.677092 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:58 crc kubenswrapper[4799]: I1010 16:32:58.677110 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:58 crc kubenswrapper[4799]: I1010 16:32:58.677133 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:58 crc kubenswrapper[4799]: I1010 16:32:58.677154 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:58Z","lastTransitionTime":"2025-10-10T16:32:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:58 crc kubenswrapper[4799]: I1010 16:32:58.779808 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:58 crc kubenswrapper[4799]: I1010 16:32:58.779867 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:58 crc kubenswrapper[4799]: I1010 16:32:58.779885 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:58 crc kubenswrapper[4799]: I1010 16:32:58.779911 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:58 crc kubenswrapper[4799]: I1010 16:32:58.779929 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:58Z","lastTransitionTime":"2025-10-10T16:32:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:58 crc kubenswrapper[4799]: I1010 16:32:58.882623 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:58 crc kubenswrapper[4799]: I1010 16:32:58.882687 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:58 crc kubenswrapper[4799]: I1010 16:32:58.882705 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:58 crc kubenswrapper[4799]: I1010 16:32:58.882730 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:58 crc kubenswrapper[4799]: I1010 16:32:58.882748 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:58Z","lastTransitionTime":"2025-10-10T16:32:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:58 crc kubenswrapper[4799]: I1010 16:32:58.902718 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-gg5hb_f000ac73-b5de-47c8-a0a7-84bd06475f62/kube-multus/0.log" Oct 10 16:32:58 crc kubenswrapper[4799]: I1010 16:32:58.902773 4799 generic.go:334] "Generic (PLEG): container finished" podID="f000ac73-b5de-47c8-a0a7-84bd06475f62" containerID="b64f89fec4fec12dd0dab3f95ca2c8a01e43d4ef7cc69a4d012195756f6922ca" exitCode=1 Oct 10 16:32:58 crc kubenswrapper[4799]: I1010 16:32:58.902801 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-gg5hb" event={"ID":"f000ac73-b5de-47c8-a0a7-84bd06475f62","Type":"ContainerDied","Data":"b64f89fec4fec12dd0dab3f95ca2c8a01e43d4ef7cc69a4d012195756f6922ca"} Oct 10 16:32:58 crc kubenswrapper[4799]: I1010 16:32:58.903132 4799 scope.go:117] "RemoveContainer" containerID="b64f89fec4fec12dd0dab3f95ca2c8a01e43d4ef7cc69a4d012195756f6922ca" Oct 10 16:32:58 crc kubenswrapper[4799]: I1010 16:32:58.927290 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b9c46c5f-a6db-4cef-b179-b669484bbc75\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://df6b51b97a9e3dcf9102409dc19f67e69e6e28ebec82dd46083922d5606cc4c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ba57bc720123daa414f51bf5d3173c6fa0b519947a34816bebc532948fd74ab\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d98759de1f79d9aeb68eb0b3eb21d78d0116f054b5d846c85bd63774b565e73\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7eb0f742793fbd0bee8e88732ec832748e77d9226a926def177968f24a9cf06\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://10d76c5ba8c54896d2fde57e2806c48857363c495a9f2d9b3f6904334cf2f9be\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"file observer\\\\nW1010 16:32:08.895315 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1010 16:32:08.895450 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1010 16:32:08.898309 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-712278876/tls.crt::/tmp/serving-cert-712278876/tls.key\\\\\\\"\\\\nI1010 16:32:09.168043 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1010 16:32:09.171891 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1010 16:32:09.171914 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1010 16:32:09.171936 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1010 16:32:09.171942 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1010 16:32:09.176341 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1010 16:32:09.176406 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1010 16:32:09.176435 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1010 16:32:09.176460 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1010 16:32:09.176486 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1010 16:32:09.176510 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1010 16:32:09.176533 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1010 16:32:09.176376 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1010 16:32:09.178269 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-10T16:31:53Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://75fb276a1b4f555aa58d4a862a6f3841984f75958b7ada362d717eca726c41fc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:50Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://78cbeb4c6d2770cabbc752b11e5a62f64ec7820bc3a637a944fa252d779e242b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://78cbeb4c6d2770cabbc752b11e5a62f64ec7820bc3a637a944fa252d779e242b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:31:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:31:47Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:58Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:58 crc kubenswrapper[4799]: I1010 16:32:58.945555 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:58Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:58 crc kubenswrapper[4799]: I1010 16:32:58.971137 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:58Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:58 crc kubenswrapper[4799]: I1010 16:32:58.985430 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:58 crc kubenswrapper[4799]: I1010 16:32:58.985475 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:58 crc kubenswrapper[4799]: I1010 16:32:58.985494 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:58 crc kubenswrapper[4799]: I1010 16:32:58.985518 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:58 crc kubenswrapper[4799]: I1010 16:32:58.985535 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:58Z","lastTransitionTime":"2025-10-10T16:32:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:58 crc kubenswrapper[4799]: I1010 16:32:58.989515 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6cebefda-e31d-4be2-9bf4-8e1f8ec002cb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6536b37f839c0b3f6b55d82b3a1674eeccb07ec93e2cb0a3739705b82df4782c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hfkr4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ad00545d7a2fff370e19a55a89365b8c9914cb6286dbf1892d7ad0f399288a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hfkr4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:09Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-rh8zc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:58Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:59 crc kubenswrapper[4799]: I1010 16:32:59.024974 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mcwfc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"abe7f2d9-ec99-4724-a01f-cc7096377e07\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8cbc87c392646ebf9c016f8c7b40bcec30e33a0a05ea4a896d1143c5f1086990\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd261112ca7db4d0d76f6ab29a0347d64dccfff4db42ac9f55d6d7df1443ab23\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c7d0e536ad5143941dd18418b1ac7972a1136a841542b950f6891a386d43ca9c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cfe05183ad0b03415525e6aa2a8d52a5d63b8c273113c46326396df5e0c2bb12\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6562d440ce1f1477fd09c15c34ab88e17e1fb2c2cae4b32a7bf8cbdd29f4d5a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff4fcf53aeed6c07f775152de0faa9fa0671848df06d37cbca6ec7097d0024d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ba46f14de0fd2c356129122dd938e3fdda832ffc5e614ac439926a3f4ec94370\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ba46f14de0fd2c356129122dd938e3fdda832ffc5e614ac439926a3f4ec94370\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-10T16:32:38Z\\\",\\\"message\\\":\\\"espace event handler 1 for removal\\\\nI1010 16:32:38.395893 6461 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1010 16:32:38.395915 6461 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1010 16:32:38.395923 6461 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1010 16:32:38.395978 6461 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1010 16:32:38.395995 6461 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1010 16:32:38.396011 6461 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1010 16:32:38.396026 6461 handler.go:208] Removed *v1.Node event handler 2\\\\nI1010 16:32:38.396040 6461 handler.go:208] Removed *v1.Node event handler 7\\\\nI1010 16:32:38.396038 6461 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1010 16:32:38.396072 6461 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1010 16:32:38.396142 6461 factory.go:656] Stopping watch factory\\\\nI1010 16:32:38.396169 6461 ovnkube.go:599] Stopped ovnkube\\\\nI1010 16:32:38.396164 6461 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1010 16:32:38.396225 6461 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1010 16:32:38.396236 6461 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1010 16:32:38.396378 6461 ovnkube.go:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:37Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-mcwfc_openshift-ovn-kubernetes(abe7f2d9-ec99-4724-a01f-cc7096377e07)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://08406e220de50ba85f882a05117b5df8c9445a38c026bb85c95fc9f98f2d2cfe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2ce36def99eaf908452410a523cd14eb31a5a4dc3ee38d5983ea95d5ee75f83\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d2ce36def99eaf908452410a523cd14eb31a5a4dc3ee38d5983ea95d5ee75f83\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:09Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-mcwfc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:59Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:59 crc kubenswrapper[4799]: I1010 16:32:59.046363 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-z97c7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6f19a8ba-b77f-41ce-a4c6-e970b040dd8c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://17b2b63923e40e58b4a3d352781758ecf7c0e63eb913813e0f738d19dfb05676\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9spwd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c9722a694091d19d16b7c08ac22e23532deca8f4bde306a0d651d5524484fd1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9spwd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:22Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-z97c7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:59Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:59 crc kubenswrapper[4799]: I1010 16:32:59.066813 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5a953803d264b43ecd9f8b8c871b034d8146e73a4974bb8f503d0ca626370616\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:59Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:59 crc kubenswrapper[4799]: I1010 16:32:59.085132 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:59Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:59 crc kubenswrapper[4799]: I1010 16:32:59.089147 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:59 crc kubenswrapper[4799]: I1010 16:32:59.089193 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:59 crc kubenswrapper[4799]: I1010 16:32:59.089210 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:59 crc kubenswrapper[4799]: I1010 16:32:59.089272 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:59 crc kubenswrapper[4799]: I1010 16:32:59.089290 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:59Z","lastTransitionTime":"2025-10-10T16:32:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:59 crc kubenswrapper[4799]: I1010 16:32:59.097419 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gg5hb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f000ac73-b5de-47c8-a0a7-84bd06475f62\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:58Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:58Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b64f89fec4fec12dd0dab3f95ca2c8a01e43d4ef7cc69a4d012195756f6922ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b64f89fec4fec12dd0dab3f95ca2c8a01e43d4ef7cc69a4d012195756f6922ca\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-10T16:32:58Z\\\",\\\"message\\\":\\\"2025-10-10T16:32:13+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_1650ba65-1f10-40d9-a47c-1a8dc4b79e86\\\\n2025-10-10T16:32:13+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_1650ba65-1f10-40d9-a47c-1a8dc4b79e86 to /host/opt/cni/bin/\\\\n2025-10-10T16:32:13Z [verbose] multus-daemon started\\\\n2025-10-10T16:32:13Z [verbose] Readiness Indicator file check\\\\n2025-10-10T16:32:58Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w9g7t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:09Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gg5hb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:59Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:59 crc kubenswrapper[4799]: I1010 16:32:59.108154 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-6wjsp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"76fdb169-eee9-4170-b948-95e26254208b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5b030264f18288aa7687a91f7918f1ed2c2ad474637e32a054ea8c25b97aef45\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2ww66\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:14Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-6wjsp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:59Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:59 crc kubenswrapper[4799]: I1010 16:32:59.128734 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"60ae49f7-6d6a-4a62-909f-7aea2b3953f5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c0a72be5ffe48f726e63ca3854fcabf6ad7c26f2c3fe432328142da2dc2ceeb5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b42b1b86cbd6dacb03b9afc740a33f67674996a9c5a5b291b71708ae53ccfea8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://de5b84380f9fb8448cebe90775342fd17260ffb8c591bbd5156f8a216b80f1da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b1dad40a84c7f22ffb5d52c708c7e2e03a181c5778793050495c8333ae005731\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://79f6778c5b703b2b4fc4e59fffc00824fcab6c8f5e2789661665e635a3539195\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a48bce1f3530d2a78258c6fa2af4f1530890f7967a26c9e91ca2f20f56cdbe6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2a48bce1f3530d2a78258c6fa2af4f1530890f7967a26c9e91ca2f20f56cdbe6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:31:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://65fb2cd5fa9b5ff0cad85267e4a036c37593a749da171dc2e5e30ba5159ed96d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://65fb2cd5fa9b5ff0cad85267e4a036c37593a749da171dc2e5e30ba5159ed96d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:31:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://51ea61becc8c45e5bcb2a2374d503cef3fb940b1618e7501cd05d61fc2a9458f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://51ea61becc8c45e5bcb2a2374d503cef3fb940b1618e7501cd05d61fc2a9458f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:31:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:31:47Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:59Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:59 crc kubenswrapper[4799]: I1010 16:32:59.138393 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f7878cf5-3c6d-4a4a-9ccd-7de395f9ac84\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://38c4fe49eff3373937abdebfb7d58fe9d5c73809375a3dca4f165aab84d6cbd1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c48434cdadac2409d0e3baf595e00260b1e3f94b8b9dab62e3f87503a6e888be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://88216eac74e0df9deb1ca1bef893deb2e23a79ffffdbd8a851a67df407eaa470\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://91106a41672b01d9f5c61cfc3001b84f024f3b96649bbc9174f3a635fc8034a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://91106a41672b01d9f5c61cfc3001b84f024f3b96649bbc9174f3a635fc8034a9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:31:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:31:48Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:31:47Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:59Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:59 crc kubenswrapper[4799]: I1010 16:32:59.147822 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-bsdk2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"823e91d3-003d-4cbb-bc72-004e1708c19d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec832bfc0c81b98afb4117033b94d2951b042b248148a5f957f3507174b8dbb6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-chgmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:09Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-bsdk2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:59Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:59 crc kubenswrapper[4799]: I1010 16:32:59.156633 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-k6hch" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7903c578-d05e-4ad7-8fd9-f438abf4a085\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:23Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:23Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hjhjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hjhjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:23Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-k6hch\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:59Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:59 crc kubenswrapper[4799]: I1010 16:32:59.167831 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1bf1784a-776b-49c7-b64b-7ce52860df45\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://298a1a9571fbe118fe81ff3e7403e298bcde9b683cffab574fbb03d5adc1fb67\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f767e89684b9b515da850360aaf9d7a02173395faf0654e9f0b3a4752a3d608b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://92de340d859cff018a661f0a7f7fe209ffae161bf6f39deb005c7148591fc60b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3a649a65ab118025ea70d1d7cf71236cb96992671c3bc7659d591640b53f941\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:31:47Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:59Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:59 crc kubenswrapper[4799]: I1010 16:32:59.179258 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2128a751508cba96a374652d8d80c66c81351fe0d7f800743a1612196fe8ac55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:59Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:59 crc kubenswrapper[4799]: I1010 16:32:59.192699 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:59 crc kubenswrapper[4799]: I1010 16:32:59.192738 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:59 crc kubenswrapper[4799]: I1010 16:32:59.192769 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:59 crc kubenswrapper[4799]: I1010 16:32:59.192788 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:59 crc kubenswrapper[4799]: I1010 16:32:59.192803 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:59Z","lastTransitionTime":"2025-10-10T16:32:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:59 crc kubenswrapper[4799]: I1010 16:32:59.193147 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4e024486dad9853cf7debbd2264eca725e50e74ebd215e1e55595d5f8b7c0403\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3396ed6bea22d063192c09283426aa98e84d5cab5852e305d61f3d583801187\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:59Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:59 crc kubenswrapper[4799]: I1010 16:32:59.209833 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-nptcz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"96840de9-4451-4499-81fa-a19c62239007\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://995be5ba088a3758758ce5aaf735f0371692c52e49e3992c6478311411c8db42\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d8b7b4526cfbe5d29a5b00c5d82089820b93e5aedbdaace85c4a252fed1b9f53\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d8b7b4526cfbe5d29a5b00c5d82089820b93e5aedbdaace85c4a252fed1b9f53\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:32:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0477eb514aef21fcec151973d9b6cf683ced19e9029787b97906438cb94b9f66\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0477eb514aef21fcec151973d9b6cf683ced19e9029787b97906438cb94b9f66\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:32:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b8df7ffc260acc047e334af09b76e6ee2c6dadd8c1fd1ed8860769601c89c6db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b8df7ffc260acc047e334af09b76e6ee2c6dadd8c1fd1ed8860769601c89c6db\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:32:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c89c6973a557239b60077f2b91a5f088955a973ebf8a9776677daa83f18c274\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2c89c6973a557239b60077f2b91a5f088955a973ebf8a9776677daa83f18c274\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:32:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f647c1c576be194232c6bcaf882fc8f3c67c78a84edd77222d04f1602434d014\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f647c1c576be194232c6bcaf882fc8f3c67c78a84edd77222d04f1602434d014\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:32:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://af9cccbb5d66115ca2db31b1e6738e1aa5f9c948eb65d3db9b5d5f8d9c223a64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://af9cccbb5d66115ca2db31b1e6738e1aa5f9c948eb65d3db9b5d5f8d9c223a64\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:32:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:09Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-nptcz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:59Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:59 crc kubenswrapper[4799]: I1010 16:32:59.294903 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:59 crc kubenswrapper[4799]: I1010 16:32:59.295014 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:59 crc kubenswrapper[4799]: I1010 16:32:59.295033 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:59 crc kubenswrapper[4799]: I1010 16:32:59.295060 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:59 crc kubenswrapper[4799]: I1010 16:32:59.295078 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:59Z","lastTransitionTime":"2025-10-10T16:32:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:59 crc kubenswrapper[4799]: I1010 16:32:59.398566 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:59 crc kubenswrapper[4799]: I1010 16:32:59.398637 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:59 crc kubenswrapper[4799]: I1010 16:32:59.398659 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:59 crc kubenswrapper[4799]: I1010 16:32:59.398691 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:59 crc kubenswrapper[4799]: I1010 16:32:59.398717 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:59Z","lastTransitionTime":"2025-10-10T16:32:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:59 crc kubenswrapper[4799]: I1010 16:32:59.402007 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k6hch" Oct 10 16:32:59 crc kubenswrapper[4799]: I1010 16:32:59.402089 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 10 16:32:59 crc kubenswrapper[4799]: E1010 16:32:59.402221 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-k6hch" podUID="7903c578-d05e-4ad7-8fd9-f438abf4a085" Oct 10 16:32:59 crc kubenswrapper[4799]: E1010 16:32:59.402369 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 10 16:32:59 crc kubenswrapper[4799]: I1010 16:32:59.501867 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:59 crc kubenswrapper[4799]: I1010 16:32:59.501923 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:59 crc kubenswrapper[4799]: I1010 16:32:59.501939 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:59 crc kubenswrapper[4799]: I1010 16:32:59.501963 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:59 crc kubenswrapper[4799]: I1010 16:32:59.501983 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:59Z","lastTransitionTime":"2025-10-10T16:32:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:59 crc kubenswrapper[4799]: I1010 16:32:59.604183 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:59 crc kubenswrapper[4799]: I1010 16:32:59.604223 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:59 crc kubenswrapper[4799]: I1010 16:32:59.604232 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:59 crc kubenswrapper[4799]: I1010 16:32:59.604250 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:59 crc kubenswrapper[4799]: I1010 16:32:59.604261 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:59Z","lastTransitionTime":"2025-10-10T16:32:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:59 crc kubenswrapper[4799]: I1010 16:32:59.706382 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:59 crc kubenswrapper[4799]: I1010 16:32:59.706471 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:59 crc kubenswrapper[4799]: I1010 16:32:59.706494 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:59 crc kubenswrapper[4799]: I1010 16:32:59.706518 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:59 crc kubenswrapper[4799]: I1010 16:32:59.706535 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:59Z","lastTransitionTime":"2025-10-10T16:32:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:59 crc kubenswrapper[4799]: I1010 16:32:59.808110 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:59 crc kubenswrapper[4799]: I1010 16:32:59.808154 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:59 crc kubenswrapper[4799]: I1010 16:32:59.808164 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:59 crc kubenswrapper[4799]: I1010 16:32:59.808178 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:59 crc kubenswrapper[4799]: I1010 16:32:59.808188 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:59Z","lastTransitionTime":"2025-10-10T16:32:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:59 crc kubenswrapper[4799]: I1010 16:32:59.917003 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:32:59 crc kubenswrapper[4799]: I1010 16:32:59.917048 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:32:59 crc kubenswrapper[4799]: I1010 16:32:59.917058 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:32:59 crc kubenswrapper[4799]: I1010 16:32:59.917079 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:32:59 crc kubenswrapper[4799]: I1010 16:32:59.917089 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:32:59Z","lastTransitionTime":"2025-10-10T16:32:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:32:59 crc kubenswrapper[4799]: I1010 16:32:59.917285 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-gg5hb_f000ac73-b5de-47c8-a0a7-84bd06475f62/kube-multus/0.log" Oct 10 16:32:59 crc kubenswrapper[4799]: I1010 16:32:59.917372 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-gg5hb" event={"ID":"f000ac73-b5de-47c8-a0a7-84bd06475f62","Type":"ContainerStarted","Data":"9fa19f17c5ed052d9c266f2da2d4e8338037b397bc2fb5e859f733c6b8c1b69e"} Oct 10 16:32:59 crc kubenswrapper[4799]: I1010 16:32:59.939328 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5a953803d264b43ecd9f8b8c871b034d8146e73a4974bb8f503d0ca626370616\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:59Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:59 crc kubenswrapper[4799]: I1010 16:32:59.958626 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:59Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:59 crc kubenswrapper[4799]: I1010 16:32:59.979180 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gg5hb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f000ac73-b5de-47c8-a0a7-84bd06475f62\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9fa19f17c5ed052d9c266f2da2d4e8338037b397bc2fb5e859f733c6b8c1b69e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b64f89fec4fec12dd0dab3f95ca2c8a01e43d4ef7cc69a4d012195756f6922ca\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-10T16:32:58Z\\\",\\\"message\\\":\\\"2025-10-10T16:32:13+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_1650ba65-1f10-40d9-a47c-1a8dc4b79e86\\\\n2025-10-10T16:32:13+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_1650ba65-1f10-40d9-a47c-1a8dc4b79e86 to /host/opt/cni/bin/\\\\n2025-10-10T16:32:13Z [verbose] multus-daemon started\\\\n2025-10-10T16:32:13Z [verbose] Readiness Indicator file check\\\\n2025-10-10T16:32:58Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:10Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w9g7t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:09Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gg5hb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:59Z is after 2025-08-24T17:21:41Z" Oct 10 16:32:59 crc kubenswrapper[4799]: I1010 16:32:59.990637 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-6wjsp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"76fdb169-eee9-4170-b948-95e26254208b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5b030264f18288aa7687a91f7918f1ed2c2ad474637e32a054ea8c25b97aef45\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2ww66\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:14Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-6wjsp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:32:59Z is after 2025-08-24T17:21:41Z" Oct 10 16:33:00 crc kubenswrapper[4799]: I1010 16:33:00.005857 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f7878cf5-3c6d-4a4a-9ccd-7de395f9ac84\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://38c4fe49eff3373937abdebfb7d58fe9d5c73809375a3dca4f165aab84d6cbd1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c48434cdadac2409d0e3baf595e00260b1e3f94b8b9dab62e3f87503a6e888be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://88216eac74e0df9deb1ca1bef893deb2e23a79ffffdbd8a851a67df407eaa470\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://91106a41672b01d9f5c61cfc3001b84f024f3b96649bbc9174f3a635fc8034a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://91106a41672b01d9f5c61cfc3001b84f024f3b96649bbc9174f3a635fc8034a9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:31:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:31:48Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:31:47Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:33:00Z is after 2025-08-24T17:21:41Z" Oct 10 16:33:00 crc kubenswrapper[4799]: I1010 16:33:00.018330 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-bsdk2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"823e91d3-003d-4cbb-bc72-004e1708c19d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec832bfc0c81b98afb4117033b94d2951b042b248148a5f957f3507174b8dbb6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-chgmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:09Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-bsdk2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:33:00Z is after 2025-08-24T17:21:41Z" Oct 10 16:33:00 crc kubenswrapper[4799]: I1010 16:33:00.019507 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:00 crc kubenswrapper[4799]: I1010 16:33:00.019556 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:00 crc kubenswrapper[4799]: I1010 16:33:00.019566 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:00 crc kubenswrapper[4799]: I1010 16:33:00.019582 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:00 crc kubenswrapper[4799]: I1010 16:33:00.019591 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:00Z","lastTransitionTime":"2025-10-10T16:33:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:00 crc kubenswrapper[4799]: I1010 16:33:00.031409 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-k6hch" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7903c578-d05e-4ad7-8fd9-f438abf4a085\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:23Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:23Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hjhjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hjhjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:23Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-k6hch\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:33:00Z is after 2025-08-24T17:21:41Z" Oct 10 16:33:00 crc kubenswrapper[4799]: I1010 16:33:00.063068 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"60ae49f7-6d6a-4a62-909f-7aea2b3953f5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c0a72be5ffe48f726e63ca3854fcabf6ad7c26f2c3fe432328142da2dc2ceeb5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b42b1b86cbd6dacb03b9afc740a33f67674996a9c5a5b291b71708ae53ccfea8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://de5b84380f9fb8448cebe90775342fd17260ffb8c591bbd5156f8a216b80f1da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b1dad40a84c7f22ffb5d52c708c7e2e03a181c5778793050495c8333ae005731\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://79f6778c5b703b2b4fc4e59fffc00824fcab6c8f5e2789661665e635a3539195\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a48bce1f3530d2a78258c6fa2af4f1530890f7967a26c9e91ca2f20f56cdbe6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2a48bce1f3530d2a78258c6fa2af4f1530890f7967a26c9e91ca2f20f56cdbe6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:31:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://65fb2cd5fa9b5ff0cad85267e4a036c37593a749da171dc2e5e30ba5159ed96d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://65fb2cd5fa9b5ff0cad85267e4a036c37593a749da171dc2e5e30ba5159ed96d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:31:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://51ea61becc8c45e5bcb2a2374d503cef3fb940b1618e7501cd05d61fc2a9458f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://51ea61becc8c45e5bcb2a2374d503cef3fb940b1618e7501cd05d61fc2a9458f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:31:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:31:47Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:33:00Z is after 2025-08-24T17:21:41Z" Oct 10 16:33:00 crc kubenswrapper[4799]: I1010 16:33:00.083180 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2128a751508cba96a374652d8d80c66c81351fe0d7f800743a1612196fe8ac55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:33:00Z is after 2025-08-24T17:21:41Z" Oct 10 16:33:00 crc kubenswrapper[4799]: I1010 16:33:00.103386 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4e024486dad9853cf7debbd2264eca725e50e74ebd215e1e55595d5f8b7c0403\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3396ed6bea22d063192c09283426aa98e84d5cab5852e305d61f3d583801187\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:33:00Z is after 2025-08-24T17:21:41Z" Oct 10 16:33:00 crc kubenswrapper[4799]: I1010 16:33:00.122241 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:00 crc kubenswrapper[4799]: I1010 16:33:00.122359 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:00 crc kubenswrapper[4799]: I1010 16:33:00.122382 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:00 crc kubenswrapper[4799]: I1010 16:33:00.122410 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:00 crc kubenswrapper[4799]: I1010 16:33:00.122427 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:00Z","lastTransitionTime":"2025-10-10T16:33:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:00 crc kubenswrapper[4799]: I1010 16:33:00.132785 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-nptcz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"96840de9-4451-4499-81fa-a19c62239007\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://995be5ba088a3758758ce5aaf735f0371692c52e49e3992c6478311411c8db42\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d8b7b4526cfbe5d29a5b00c5d82089820b93e5aedbdaace85c4a252fed1b9f53\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d8b7b4526cfbe5d29a5b00c5d82089820b93e5aedbdaace85c4a252fed1b9f53\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:32:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0477eb514aef21fcec151973d9b6cf683ced19e9029787b97906438cb94b9f66\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0477eb514aef21fcec151973d9b6cf683ced19e9029787b97906438cb94b9f66\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:32:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b8df7ffc260acc047e334af09b76e6ee2c6dadd8c1fd1ed8860769601c89c6db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b8df7ffc260acc047e334af09b76e6ee2c6dadd8c1fd1ed8860769601c89c6db\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:32:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c89c6973a557239b60077f2b91a5f088955a973ebf8a9776677daa83f18c274\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2c89c6973a557239b60077f2b91a5f088955a973ebf8a9776677daa83f18c274\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:32:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f647c1c576be194232c6bcaf882fc8f3c67c78a84edd77222d04f1602434d014\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f647c1c576be194232c6bcaf882fc8f3c67c78a84edd77222d04f1602434d014\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:32:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://af9cccbb5d66115ca2db31b1e6738e1aa5f9c948eb65d3db9b5d5f8d9c223a64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://af9cccbb5d66115ca2db31b1e6738e1aa5f9c948eb65d3db9b5d5f8d9c223a64\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:32:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:09Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-nptcz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:33:00Z is after 2025-08-24T17:21:41Z" Oct 10 16:33:00 crc kubenswrapper[4799]: I1010 16:33:00.152419 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1bf1784a-776b-49c7-b64b-7ce52860df45\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://298a1a9571fbe118fe81ff3e7403e298bcde9b683cffab574fbb03d5adc1fb67\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f767e89684b9b515da850360aaf9d7a02173395faf0654e9f0b3a4752a3d608b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://92de340d859cff018a661f0a7f7fe209ffae161bf6f39deb005c7148591fc60b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3a649a65ab118025ea70d1d7cf71236cb96992671c3bc7659d591640b53f941\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:31:47Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:33:00Z is after 2025-08-24T17:21:41Z" Oct 10 16:33:00 crc kubenswrapper[4799]: I1010 16:33:00.172019 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:33:00Z is after 2025-08-24T17:21:41Z" Oct 10 16:33:00 crc kubenswrapper[4799]: I1010 16:33:00.189920 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:33:00Z is after 2025-08-24T17:21:41Z" Oct 10 16:33:00 crc kubenswrapper[4799]: I1010 16:33:00.205980 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6cebefda-e31d-4be2-9bf4-8e1f8ec002cb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6536b37f839c0b3f6b55d82b3a1674eeccb07ec93e2cb0a3739705b82df4782c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hfkr4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ad00545d7a2fff370e19a55a89365b8c9914cb6286dbf1892d7ad0f399288a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hfkr4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:09Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-rh8zc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:33:00Z is after 2025-08-24T17:21:41Z" Oct 10 16:33:00 crc kubenswrapper[4799]: I1010 16:33:00.225533 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:00 crc kubenswrapper[4799]: I1010 16:33:00.225605 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:00 crc kubenswrapper[4799]: I1010 16:33:00.225629 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:00 crc kubenswrapper[4799]: I1010 16:33:00.225690 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:00 crc kubenswrapper[4799]: I1010 16:33:00.225715 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:00Z","lastTransitionTime":"2025-10-10T16:33:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:00 crc kubenswrapper[4799]: I1010 16:33:00.238411 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mcwfc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"abe7f2d9-ec99-4724-a01f-cc7096377e07\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8cbc87c392646ebf9c016f8c7b40bcec30e33a0a05ea4a896d1143c5f1086990\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd261112ca7db4d0d76f6ab29a0347d64dccfff4db42ac9f55d6d7df1443ab23\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c7d0e536ad5143941dd18418b1ac7972a1136a841542b950f6891a386d43ca9c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cfe05183ad0b03415525e6aa2a8d52a5d63b8c273113c46326396df5e0c2bb12\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6562d440ce1f1477fd09c15c34ab88e17e1fb2c2cae4b32a7bf8cbdd29f4d5a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff4fcf53aeed6c07f775152de0faa9fa0671848df06d37cbca6ec7097d0024d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ba46f14de0fd2c356129122dd938e3fdda832ffc5e614ac439926a3f4ec94370\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ba46f14de0fd2c356129122dd938e3fdda832ffc5e614ac439926a3f4ec94370\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-10T16:32:38Z\\\",\\\"message\\\":\\\"espace event handler 1 for removal\\\\nI1010 16:32:38.395893 6461 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1010 16:32:38.395915 6461 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1010 16:32:38.395923 6461 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1010 16:32:38.395978 6461 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1010 16:32:38.395995 6461 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1010 16:32:38.396011 6461 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1010 16:32:38.396026 6461 handler.go:208] Removed *v1.Node event handler 2\\\\nI1010 16:32:38.396040 6461 handler.go:208] Removed *v1.Node event handler 7\\\\nI1010 16:32:38.396038 6461 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1010 16:32:38.396072 6461 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1010 16:32:38.396142 6461 factory.go:656] Stopping watch factory\\\\nI1010 16:32:38.396169 6461 ovnkube.go:599] Stopped ovnkube\\\\nI1010 16:32:38.396164 6461 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1010 16:32:38.396225 6461 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1010 16:32:38.396236 6461 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1010 16:32:38.396378 6461 ovnkube.go:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:37Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-mcwfc_openshift-ovn-kubernetes(abe7f2d9-ec99-4724-a01f-cc7096377e07)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://08406e220de50ba85f882a05117b5df8c9445a38c026bb85c95fc9f98f2d2cfe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2ce36def99eaf908452410a523cd14eb31a5a4dc3ee38d5983ea95d5ee75f83\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d2ce36def99eaf908452410a523cd14eb31a5a4dc3ee38d5983ea95d5ee75f83\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:09Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-mcwfc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:33:00Z is after 2025-08-24T17:21:41Z" Oct 10 16:33:00 crc kubenswrapper[4799]: I1010 16:33:00.254845 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-z97c7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6f19a8ba-b77f-41ce-a4c6-e970b040dd8c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://17b2b63923e40e58b4a3d352781758ecf7c0e63eb913813e0f738d19dfb05676\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9spwd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c9722a694091d19d16b7c08ac22e23532deca8f4bde306a0d651d5524484fd1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9spwd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:22Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-z97c7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:33:00Z is after 2025-08-24T17:21:41Z" Oct 10 16:33:00 crc kubenswrapper[4799]: I1010 16:33:00.275143 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b9c46c5f-a6db-4cef-b179-b669484bbc75\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://df6b51b97a9e3dcf9102409dc19f67e69e6e28ebec82dd46083922d5606cc4c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ba57bc720123daa414f51bf5d3173c6fa0b519947a34816bebc532948fd74ab\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d98759de1f79d9aeb68eb0b3eb21d78d0116f054b5d846c85bd63774b565e73\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7eb0f742793fbd0bee8e88732ec832748e77d9226a926def177968f24a9cf06\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://10d76c5ba8c54896d2fde57e2806c48857363c495a9f2d9b3f6904334cf2f9be\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"file observer\\\\nW1010 16:32:08.895315 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1010 16:32:08.895450 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1010 16:32:08.898309 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-712278876/tls.crt::/tmp/serving-cert-712278876/tls.key\\\\\\\"\\\\nI1010 16:32:09.168043 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1010 16:32:09.171891 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1010 16:32:09.171914 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1010 16:32:09.171936 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1010 16:32:09.171942 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1010 16:32:09.176341 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1010 16:32:09.176406 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1010 16:32:09.176435 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1010 16:32:09.176460 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1010 16:32:09.176486 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1010 16:32:09.176510 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1010 16:32:09.176533 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1010 16:32:09.176376 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1010 16:32:09.178269 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-10T16:31:53Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://75fb276a1b4f555aa58d4a862a6f3841984f75958b7ada362d717eca726c41fc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:50Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://78cbeb4c6d2770cabbc752b11e5a62f64ec7820bc3a637a944fa252d779e242b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://78cbeb4c6d2770cabbc752b11e5a62f64ec7820bc3a637a944fa252d779e242b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:31:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:31:47Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:33:00Z is after 2025-08-24T17:21:41Z" Oct 10 16:33:00 crc kubenswrapper[4799]: I1010 16:33:00.327690 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:00 crc kubenswrapper[4799]: I1010 16:33:00.327837 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:00 crc kubenswrapper[4799]: I1010 16:33:00.327855 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:00 crc kubenswrapper[4799]: I1010 16:33:00.327882 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:00 crc kubenswrapper[4799]: I1010 16:33:00.327900 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:00Z","lastTransitionTime":"2025-10-10T16:33:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:00 crc kubenswrapper[4799]: I1010 16:33:00.402045 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 10 16:33:00 crc kubenswrapper[4799]: I1010 16:33:00.402152 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 10 16:33:00 crc kubenswrapper[4799]: E1010 16:33:00.402243 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 10 16:33:00 crc kubenswrapper[4799]: E1010 16:33:00.402410 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 10 16:33:00 crc kubenswrapper[4799]: I1010 16:33:00.430518 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:00 crc kubenswrapper[4799]: I1010 16:33:00.430591 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:00 crc kubenswrapper[4799]: I1010 16:33:00.430609 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:00 crc kubenswrapper[4799]: I1010 16:33:00.430636 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:00 crc kubenswrapper[4799]: I1010 16:33:00.430655 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:00Z","lastTransitionTime":"2025-10-10T16:33:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:00 crc kubenswrapper[4799]: I1010 16:33:00.533297 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:00 crc kubenswrapper[4799]: I1010 16:33:00.533356 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:00 crc kubenswrapper[4799]: I1010 16:33:00.533373 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:00 crc kubenswrapper[4799]: I1010 16:33:00.533399 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:00 crc kubenswrapper[4799]: I1010 16:33:00.533416 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:00Z","lastTransitionTime":"2025-10-10T16:33:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:00 crc kubenswrapper[4799]: I1010 16:33:00.636123 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:00 crc kubenswrapper[4799]: I1010 16:33:00.636160 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:00 crc kubenswrapper[4799]: I1010 16:33:00.636170 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:00 crc kubenswrapper[4799]: I1010 16:33:00.636192 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:00 crc kubenswrapper[4799]: I1010 16:33:00.636204 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:00Z","lastTransitionTime":"2025-10-10T16:33:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:00 crc kubenswrapper[4799]: I1010 16:33:00.738621 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:00 crc kubenswrapper[4799]: I1010 16:33:00.738677 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:00 crc kubenswrapper[4799]: I1010 16:33:00.738695 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:00 crc kubenswrapper[4799]: I1010 16:33:00.738721 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:00 crc kubenswrapper[4799]: I1010 16:33:00.738741 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:00Z","lastTransitionTime":"2025-10-10T16:33:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:00 crc kubenswrapper[4799]: I1010 16:33:00.841454 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:00 crc kubenswrapper[4799]: I1010 16:33:00.841527 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:00 crc kubenswrapper[4799]: I1010 16:33:00.841551 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:00 crc kubenswrapper[4799]: I1010 16:33:00.841583 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:00 crc kubenswrapper[4799]: I1010 16:33:00.841608 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:00Z","lastTransitionTime":"2025-10-10T16:33:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:00 crc kubenswrapper[4799]: I1010 16:33:00.945263 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:00 crc kubenswrapper[4799]: I1010 16:33:00.945335 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:00 crc kubenswrapper[4799]: I1010 16:33:00.945360 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:00 crc kubenswrapper[4799]: I1010 16:33:00.945391 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:00 crc kubenswrapper[4799]: I1010 16:33:00.945411 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:00Z","lastTransitionTime":"2025-10-10T16:33:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:01 crc kubenswrapper[4799]: I1010 16:33:01.048289 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:01 crc kubenswrapper[4799]: I1010 16:33:01.048360 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:01 crc kubenswrapper[4799]: I1010 16:33:01.048381 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:01 crc kubenswrapper[4799]: I1010 16:33:01.048406 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:01 crc kubenswrapper[4799]: I1010 16:33:01.048424 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:01Z","lastTransitionTime":"2025-10-10T16:33:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:01 crc kubenswrapper[4799]: I1010 16:33:01.150750 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:01 crc kubenswrapper[4799]: I1010 16:33:01.150831 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:01 crc kubenswrapper[4799]: I1010 16:33:01.150844 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:01 crc kubenswrapper[4799]: I1010 16:33:01.150865 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:01 crc kubenswrapper[4799]: I1010 16:33:01.150877 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:01Z","lastTransitionTime":"2025-10-10T16:33:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:01 crc kubenswrapper[4799]: I1010 16:33:01.252856 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:01 crc kubenswrapper[4799]: I1010 16:33:01.252901 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:01 crc kubenswrapper[4799]: I1010 16:33:01.252913 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:01 crc kubenswrapper[4799]: I1010 16:33:01.252933 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:01 crc kubenswrapper[4799]: I1010 16:33:01.252944 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:01Z","lastTransitionTime":"2025-10-10T16:33:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:01 crc kubenswrapper[4799]: I1010 16:33:01.356542 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:01 crc kubenswrapper[4799]: I1010 16:33:01.356634 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:01 crc kubenswrapper[4799]: I1010 16:33:01.356653 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:01 crc kubenswrapper[4799]: I1010 16:33:01.356677 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:01 crc kubenswrapper[4799]: I1010 16:33:01.356695 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:01Z","lastTransitionTime":"2025-10-10T16:33:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:01 crc kubenswrapper[4799]: I1010 16:33:01.402737 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k6hch" Oct 10 16:33:01 crc kubenswrapper[4799]: I1010 16:33:01.402861 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 10 16:33:01 crc kubenswrapper[4799]: E1010 16:33:01.402982 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-k6hch" podUID="7903c578-d05e-4ad7-8fd9-f438abf4a085" Oct 10 16:33:01 crc kubenswrapper[4799]: E1010 16:33:01.403220 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 10 16:33:01 crc kubenswrapper[4799]: I1010 16:33:01.460272 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:01 crc kubenswrapper[4799]: I1010 16:33:01.460333 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:01 crc kubenswrapper[4799]: I1010 16:33:01.460351 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:01 crc kubenswrapper[4799]: I1010 16:33:01.460376 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:01 crc kubenswrapper[4799]: I1010 16:33:01.460398 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:01Z","lastTransitionTime":"2025-10-10T16:33:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:01 crc kubenswrapper[4799]: I1010 16:33:01.501709 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:01 crc kubenswrapper[4799]: I1010 16:33:01.501808 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:01 crc kubenswrapper[4799]: I1010 16:33:01.501845 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:01 crc kubenswrapper[4799]: I1010 16:33:01.501873 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:01 crc kubenswrapper[4799]: I1010 16:33:01.501889 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:01Z","lastTransitionTime":"2025-10-10T16:33:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:01 crc kubenswrapper[4799]: E1010 16:33:01.522501 4799 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-10T16:33:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-10T16:33:01Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-10T16:33:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-10T16:33:01Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-10T16:33:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-10T16:33:01Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-10T16:33:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-10T16:33:01Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"d99534f1-66d4-4990-b867-b559b1013899\\\",\\\"systemUUID\\\":\\\"19c7da3e-bb2d-454e-9c2c-9c9464638bfe\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:33:01Z is after 2025-08-24T17:21:41Z" Oct 10 16:33:01 crc kubenswrapper[4799]: I1010 16:33:01.527202 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:01 crc kubenswrapper[4799]: I1010 16:33:01.527256 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:01 crc kubenswrapper[4799]: I1010 16:33:01.527280 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:01 crc kubenswrapper[4799]: I1010 16:33:01.527311 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:01 crc kubenswrapper[4799]: I1010 16:33:01.527334 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:01Z","lastTransitionTime":"2025-10-10T16:33:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:01 crc kubenswrapper[4799]: E1010 16:33:01.547370 4799 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-10T16:33:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-10T16:33:01Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-10T16:33:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-10T16:33:01Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-10T16:33:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-10T16:33:01Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-10T16:33:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-10T16:33:01Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"d99534f1-66d4-4990-b867-b559b1013899\\\",\\\"systemUUID\\\":\\\"19c7da3e-bb2d-454e-9c2c-9c9464638bfe\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:33:01Z is after 2025-08-24T17:21:41Z" Oct 10 16:33:01 crc kubenswrapper[4799]: I1010 16:33:01.552341 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:01 crc kubenswrapper[4799]: I1010 16:33:01.552417 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:01 crc kubenswrapper[4799]: I1010 16:33:01.552440 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:01 crc kubenswrapper[4799]: I1010 16:33:01.552474 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:01 crc kubenswrapper[4799]: I1010 16:33:01.552499 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:01Z","lastTransitionTime":"2025-10-10T16:33:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:01 crc kubenswrapper[4799]: E1010 16:33:01.581852 4799 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-10T16:33:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-10T16:33:01Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-10T16:33:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-10T16:33:01Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-10T16:33:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-10T16:33:01Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-10T16:33:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-10T16:33:01Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"d99534f1-66d4-4990-b867-b559b1013899\\\",\\\"systemUUID\\\":\\\"19c7da3e-bb2d-454e-9c2c-9c9464638bfe\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:33:01Z is after 2025-08-24T17:21:41Z" Oct 10 16:33:01 crc kubenswrapper[4799]: I1010 16:33:01.587335 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:01 crc kubenswrapper[4799]: I1010 16:33:01.587435 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:01 crc kubenswrapper[4799]: I1010 16:33:01.587468 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:01 crc kubenswrapper[4799]: I1010 16:33:01.587499 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:01 crc kubenswrapper[4799]: I1010 16:33:01.587523 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:01Z","lastTransitionTime":"2025-10-10T16:33:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:01 crc kubenswrapper[4799]: E1010 16:33:01.604237 4799 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-10T16:33:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-10T16:33:01Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-10T16:33:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-10T16:33:01Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-10T16:33:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-10T16:33:01Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-10T16:33:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-10T16:33:01Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"d99534f1-66d4-4990-b867-b559b1013899\\\",\\\"systemUUID\\\":\\\"19c7da3e-bb2d-454e-9c2c-9c9464638bfe\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:33:01Z is after 2025-08-24T17:21:41Z" Oct 10 16:33:01 crc kubenswrapper[4799]: I1010 16:33:01.608551 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:01 crc kubenswrapper[4799]: I1010 16:33:01.608586 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:01 crc kubenswrapper[4799]: I1010 16:33:01.608601 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:01 crc kubenswrapper[4799]: I1010 16:33:01.608621 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:01 crc kubenswrapper[4799]: I1010 16:33:01.608634 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:01Z","lastTransitionTime":"2025-10-10T16:33:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:01 crc kubenswrapper[4799]: E1010 16:33:01.629447 4799 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-10T16:33:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-10T16:33:01Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-10T16:33:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-10T16:33:01Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-10T16:33:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-10T16:33:01Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-10T16:33:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-10T16:33:01Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"d99534f1-66d4-4990-b867-b559b1013899\\\",\\\"systemUUID\\\":\\\"19c7da3e-bb2d-454e-9c2c-9c9464638bfe\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:33:01Z is after 2025-08-24T17:21:41Z" Oct 10 16:33:01 crc kubenswrapper[4799]: E1010 16:33:01.629659 4799 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Oct 10 16:33:01 crc kubenswrapper[4799]: I1010 16:33:01.631808 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:01 crc kubenswrapper[4799]: I1010 16:33:01.631869 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:01 crc kubenswrapper[4799]: I1010 16:33:01.631890 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:01 crc kubenswrapper[4799]: I1010 16:33:01.631919 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:01 crc kubenswrapper[4799]: I1010 16:33:01.631943 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:01Z","lastTransitionTime":"2025-10-10T16:33:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:01 crc kubenswrapper[4799]: I1010 16:33:01.735285 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:01 crc kubenswrapper[4799]: I1010 16:33:01.735331 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:01 crc kubenswrapper[4799]: I1010 16:33:01.735350 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:01 crc kubenswrapper[4799]: I1010 16:33:01.735380 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:01 crc kubenswrapper[4799]: I1010 16:33:01.735404 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:01Z","lastTransitionTime":"2025-10-10T16:33:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:01 crc kubenswrapper[4799]: I1010 16:33:01.838506 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:01 crc kubenswrapper[4799]: I1010 16:33:01.838618 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:01 crc kubenswrapper[4799]: I1010 16:33:01.838638 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:01 crc kubenswrapper[4799]: I1010 16:33:01.838666 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:01 crc kubenswrapper[4799]: I1010 16:33:01.838687 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:01Z","lastTransitionTime":"2025-10-10T16:33:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:01 crc kubenswrapper[4799]: I1010 16:33:01.941348 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:01 crc kubenswrapper[4799]: I1010 16:33:01.941420 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:01 crc kubenswrapper[4799]: I1010 16:33:01.941438 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:01 crc kubenswrapper[4799]: I1010 16:33:01.941463 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:01 crc kubenswrapper[4799]: I1010 16:33:01.941481 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:01Z","lastTransitionTime":"2025-10-10T16:33:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:02 crc kubenswrapper[4799]: I1010 16:33:02.045090 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:02 crc kubenswrapper[4799]: I1010 16:33:02.045154 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:02 crc kubenswrapper[4799]: I1010 16:33:02.045171 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:02 crc kubenswrapper[4799]: I1010 16:33:02.045196 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:02 crc kubenswrapper[4799]: I1010 16:33:02.045214 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:02Z","lastTransitionTime":"2025-10-10T16:33:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:02 crc kubenswrapper[4799]: I1010 16:33:02.148701 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:02 crc kubenswrapper[4799]: I1010 16:33:02.148806 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:02 crc kubenswrapper[4799]: I1010 16:33:02.148829 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:02 crc kubenswrapper[4799]: I1010 16:33:02.148853 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:02 crc kubenswrapper[4799]: I1010 16:33:02.148870 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:02Z","lastTransitionTime":"2025-10-10T16:33:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:02 crc kubenswrapper[4799]: I1010 16:33:02.252025 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:02 crc kubenswrapper[4799]: I1010 16:33:02.252122 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:02 crc kubenswrapper[4799]: I1010 16:33:02.252148 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:02 crc kubenswrapper[4799]: I1010 16:33:02.252181 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:02 crc kubenswrapper[4799]: I1010 16:33:02.252205 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:02Z","lastTransitionTime":"2025-10-10T16:33:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:02 crc kubenswrapper[4799]: I1010 16:33:02.355204 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:02 crc kubenswrapper[4799]: I1010 16:33:02.355282 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:02 crc kubenswrapper[4799]: I1010 16:33:02.355301 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:02 crc kubenswrapper[4799]: I1010 16:33:02.355331 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:02 crc kubenswrapper[4799]: I1010 16:33:02.355353 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:02Z","lastTransitionTime":"2025-10-10T16:33:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:02 crc kubenswrapper[4799]: I1010 16:33:02.402447 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 10 16:33:02 crc kubenswrapper[4799]: I1010 16:33:02.402506 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 10 16:33:02 crc kubenswrapper[4799]: E1010 16:33:02.402702 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 10 16:33:02 crc kubenswrapper[4799]: E1010 16:33:02.402882 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 10 16:33:02 crc kubenswrapper[4799]: I1010 16:33:02.457612 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:02 crc kubenswrapper[4799]: I1010 16:33:02.457672 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:02 crc kubenswrapper[4799]: I1010 16:33:02.457691 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:02 crc kubenswrapper[4799]: I1010 16:33:02.457717 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:02 crc kubenswrapper[4799]: I1010 16:33:02.457736 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:02Z","lastTransitionTime":"2025-10-10T16:33:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:02 crc kubenswrapper[4799]: I1010 16:33:02.561233 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:02 crc kubenswrapper[4799]: I1010 16:33:02.561293 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:02 crc kubenswrapper[4799]: I1010 16:33:02.561309 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:02 crc kubenswrapper[4799]: I1010 16:33:02.561474 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:02 crc kubenswrapper[4799]: I1010 16:33:02.561528 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:02Z","lastTransitionTime":"2025-10-10T16:33:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:02 crc kubenswrapper[4799]: I1010 16:33:02.664824 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:02 crc kubenswrapper[4799]: I1010 16:33:02.664904 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:02 crc kubenswrapper[4799]: I1010 16:33:02.664927 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:02 crc kubenswrapper[4799]: I1010 16:33:02.664954 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:02 crc kubenswrapper[4799]: I1010 16:33:02.664974 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:02Z","lastTransitionTime":"2025-10-10T16:33:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:02 crc kubenswrapper[4799]: I1010 16:33:02.767599 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:02 crc kubenswrapper[4799]: I1010 16:33:02.767660 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:02 crc kubenswrapper[4799]: I1010 16:33:02.767676 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:02 crc kubenswrapper[4799]: I1010 16:33:02.767700 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:02 crc kubenswrapper[4799]: I1010 16:33:02.767717 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:02Z","lastTransitionTime":"2025-10-10T16:33:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:02 crc kubenswrapper[4799]: I1010 16:33:02.870307 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:02 crc kubenswrapper[4799]: I1010 16:33:02.870379 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:02 crc kubenswrapper[4799]: I1010 16:33:02.870405 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:02 crc kubenswrapper[4799]: I1010 16:33:02.870437 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:02 crc kubenswrapper[4799]: I1010 16:33:02.870460 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:02Z","lastTransitionTime":"2025-10-10T16:33:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:02 crc kubenswrapper[4799]: I1010 16:33:02.973827 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:02 crc kubenswrapper[4799]: I1010 16:33:02.973884 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:02 crc kubenswrapper[4799]: I1010 16:33:02.973900 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:02 crc kubenswrapper[4799]: I1010 16:33:02.973928 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:02 crc kubenswrapper[4799]: I1010 16:33:02.973948 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:02Z","lastTransitionTime":"2025-10-10T16:33:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:03 crc kubenswrapper[4799]: I1010 16:33:03.077045 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:03 crc kubenswrapper[4799]: I1010 16:33:03.077356 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:03 crc kubenswrapper[4799]: I1010 16:33:03.077498 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:03 crc kubenswrapper[4799]: I1010 16:33:03.077639 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:03 crc kubenswrapper[4799]: I1010 16:33:03.077808 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:03Z","lastTransitionTime":"2025-10-10T16:33:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:03 crc kubenswrapper[4799]: I1010 16:33:03.181231 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:03 crc kubenswrapper[4799]: I1010 16:33:03.181293 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:03 crc kubenswrapper[4799]: I1010 16:33:03.181310 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:03 crc kubenswrapper[4799]: I1010 16:33:03.181344 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:03 crc kubenswrapper[4799]: I1010 16:33:03.181363 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:03Z","lastTransitionTime":"2025-10-10T16:33:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:03 crc kubenswrapper[4799]: I1010 16:33:03.284586 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:03 crc kubenswrapper[4799]: I1010 16:33:03.284651 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:03 crc kubenswrapper[4799]: I1010 16:33:03.284670 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:03 crc kubenswrapper[4799]: I1010 16:33:03.284695 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:03 crc kubenswrapper[4799]: I1010 16:33:03.284712 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:03Z","lastTransitionTime":"2025-10-10T16:33:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:03 crc kubenswrapper[4799]: I1010 16:33:03.386864 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:03 crc kubenswrapper[4799]: I1010 16:33:03.386936 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:03 crc kubenswrapper[4799]: I1010 16:33:03.386960 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:03 crc kubenswrapper[4799]: I1010 16:33:03.386992 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:03 crc kubenswrapper[4799]: I1010 16:33:03.387011 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:03Z","lastTransitionTime":"2025-10-10T16:33:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:03 crc kubenswrapper[4799]: I1010 16:33:03.402514 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k6hch" Oct 10 16:33:03 crc kubenswrapper[4799]: E1010 16:33:03.402680 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-k6hch" podUID="7903c578-d05e-4ad7-8fd9-f438abf4a085" Oct 10 16:33:03 crc kubenswrapper[4799]: I1010 16:33:03.402722 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 10 16:33:03 crc kubenswrapper[4799]: E1010 16:33:03.402903 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 10 16:33:03 crc kubenswrapper[4799]: I1010 16:33:03.490995 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:03 crc kubenswrapper[4799]: I1010 16:33:03.491058 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:03 crc kubenswrapper[4799]: I1010 16:33:03.491076 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:03 crc kubenswrapper[4799]: I1010 16:33:03.491100 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:03 crc kubenswrapper[4799]: I1010 16:33:03.491118 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:03Z","lastTransitionTime":"2025-10-10T16:33:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:03 crc kubenswrapper[4799]: I1010 16:33:03.594249 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:03 crc kubenswrapper[4799]: I1010 16:33:03.594309 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:03 crc kubenswrapper[4799]: I1010 16:33:03.594334 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:03 crc kubenswrapper[4799]: I1010 16:33:03.594364 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:03 crc kubenswrapper[4799]: I1010 16:33:03.594386 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:03Z","lastTransitionTime":"2025-10-10T16:33:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:03 crc kubenswrapper[4799]: I1010 16:33:03.697580 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:03 crc kubenswrapper[4799]: I1010 16:33:03.697647 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:03 crc kubenswrapper[4799]: I1010 16:33:03.697671 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:03 crc kubenswrapper[4799]: I1010 16:33:03.697701 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:03 crc kubenswrapper[4799]: I1010 16:33:03.697723 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:03Z","lastTransitionTime":"2025-10-10T16:33:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:03 crc kubenswrapper[4799]: I1010 16:33:03.800438 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:03 crc kubenswrapper[4799]: I1010 16:33:03.800514 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:03 crc kubenswrapper[4799]: I1010 16:33:03.800549 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:03 crc kubenswrapper[4799]: I1010 16:33:03.800581 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:03 crc kubenswrapper[4799]: I1010 16:33:03.800607 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:03Z","lastTransitionTime":"2025-10-10T16:33:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:03 crc kubenswrapper[4799]: I1010 16:33:03.904027 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:03 crc kubenswrapper[4799]: I1010 16:33:03.904054 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:03 crc kubenswrapper[4799]: I1010 16:33:03.904062 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:03 crc kubenswrapper[4799]: I1010 16:33:03.904147 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:03 crc kubenswrapper[4799]: I1010 16:33:03.904159 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:03Z","lastTransitionTime":"2025-10-10T16:33:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:04 crc kubenswrapper[4799]: I1010 16:33:04.006703 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:04 crc kubenswrapper[4799]: I1010 16:33:04.006841 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:04 crc kubenswrapper[4799]: I1010 16:33:04.006867 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:04 crc kubenswrapper[4799]: I1010 16:33:04.006898 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:04 crc kubenswrapper[4799]: I1010 16:33:04.006919 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:04Z","lastTransitionTime":"2025-10-10T16:33:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:04 crc kubenswrapper[4799]: I1010 16:33:04.109366 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:04 crc kubenswrapper[4799]: I1010 16:33:04.109417 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:04 crc kubenswrapper[4799]: I1010 16:33:04.109433 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:04 crc kubenswrapper[4799]: I1010 16:33:04.109457 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:04 crc kubenswrapper[4799]: I1010 16:33:04.109474 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:04Z","lastTransitionTime":"2025-10-10T16:33:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:04 crc kubenswrapper[4799]: I1010 16:33:04.212214 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:04 crc kubenswrapper[4799]: I1010 16:33:04.212246 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:04 crc kubenswrapper[4799]: I1010 16:33:04.212254 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:04 crc kubenswrapper[4799]: I1010 16:33:04.212268 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:04 crc kubenswrapper[4799]: I1010 16:33:04.212290 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:04Z","lastTransitionTime":"2025-10-10T16:33:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:04 crc kubenswrapper[4799]: I1010 16:33:04.316530 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:04 crc kubenswrapper[4799]: I1010 16:33:04.316617 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:04 crc kubenswrapper[4799]: I1010 16:33:04.316651 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:04 crc kubenswrapper[4799]: I1010 16:33:04.316684 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:04 crc kubenswrapper[4799]: I1010 16:33:04.316708 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:04Z","lastTransitionTime":"2025-10-10T16:33:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:04 crc kubenswrapper[4799]: I1010 16:33:04.402450 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 10 16:33:04 crc kubenswrapper[4799]: I1010 16:33:04.402525 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 10 16:33:04 crc kubenswrapper[4799]: E1010 16:33:04.402579 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 10 16:33:04 crc kubenswrapper[4799]: E1010 16:33:04.402712 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 10 16:33:04 crc kubenswrapper[4799]: I1010 16:33:04.418917 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:04 crc kubenswrapper[4799]: I1010 16:33:04.418967 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:04 crc kubenswrapper[4799]: I1010 16:33:04.418984 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:04 crc kubenswrapper[4799]: I1010 16:33:04.418999 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:04 crc kubenswrapper[4799]: I1010 16:33:04.419010 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:04Z","lastTransitionTime":"2025-10-10T16:33:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:04 crc kubenswrapper[4799]: I1010 16:33:04.521512 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:04 crc kubenswrapper[4799]: I1010 16:33:04.521689 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:04 crc kubenswrapper[4799]: I1010 16:33:04.521713 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:04 crc kubenswrapper[4799]: I1010 16:33:04.521745 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:04 crc kubenswrapper[4799]: I1010 16:33:04.521799 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:04Z","lastTransitionTime":"2025-10-10T16:33:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:04 crc kubenswrapper[4799]: I1010 16:33:04.624859 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:04 crc kubenswrapper[4799]: I1010 16:33:04.624908 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:04 crc kubenswrapper[4799]: I1010 16:33:04.624921 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:04 crc kubenswrapper[4799]: I1010 16:33:04.624940 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:04 crc kubenswrapper[4799]: I1010 16:33:04.624954 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:04Z","lastTransitionTime":"2025-10-10T16:33:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:04 crc kubenswrapper[4799]: I1010 16:33:04.727925 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:04 crc kubenswrapper[4799]: I1010 16:33:04.727969 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:04 crc kubenswrapper[4799]: I1010 16:33:04.727978 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:04 crc kubenswrapper[4799]: I1010 16:33:04.727995 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:04 crc kubenswrapper[4799]: I1010 16:33:04.728005 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:04Z","lastTransitionTime":"2025-10-10T16:33:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:04 crc kubenswrapper[4799]: I1010 16:33:04.830426 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:04 crc kubenswrapper[4799]: I1010 16:33:04.830486 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:04 crc kubenswrapper[4799]: I1010 16:33:04.830502 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:04 crc kubenswrapper[4799]: I1010 16:33:04.830529 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:04 crc kubenswrapper[4799]: I1010 16:33:04.830546 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:04Z","lastTransitionTime":"2025-10-10T16:33:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:04 crc kubenswrapper[4799]: I1010 16:33:04.933328 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:04 crc kubenswrapper[4799]: I1010 16:33:04.933398 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:04 crc kubenswrapper[4799]: I1010 16:33:04.933424 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:04 crc kubenswrapper[4799]: I1010 16:33:04.933456 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:04 crc kubenswrapper[4799]: I1010 16:33:04.933476 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:04Z","lastTransitionTime":"2025-10-10T16:33:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:05 crc kubenswrapper[4799]: I1010 16:33:05.036455 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:05 crc kubenswrapper[4799]: I1010 16:33:05.036516 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:05 crc kubenswrapper[4799]: I1010 16:33:05.036540 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:05 crc kubenswrapper[4799]: I1010 16:33:05.036568 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:05 crc kubenswrapper[4799]: I1010 16:33:05.036590 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:05Z","lastTransitionTime":"2025-10-10T16:33:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:05 crc kubenswrapper[4799]: I1010 16:33:05.139515 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:05 crc kubenswrapper[4799]: I1010 16:33:05.139580 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:05 crc kubenswrapper[4799]: I1010 16:33:05.139596 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:05 crc kubenswrapper[4799]: I1010 16:33:05.139623 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:05 crc kubenswrapper[4799]: I1010 16:33:05.139642 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:05Z","lastTransitionTime":"2025-10-10T16:33:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:05 crc kubenswrapper[4799]: I1010 16:33:05.242982 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:05 crc kubenswrapper[4799]: I1010 16:33:05.243055 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:05 crc kubenswrapper[4799]: I1010 16:33:05.243074 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:05 crc kubenswrapper[4799]: I1010 16:33:05.243100 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:05 crc kubenswrapper[4799]: I1010 16:33:05.243118 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:05Z","lastTransitionTime":"2025-10-10T16:33:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:05 crc kubenswrapper[4799]: I1010 16:33:05.346372 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:05 crc kubenswrapper[4799]: I1010 16:33:05.346478 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:05 crc kubenswrapper[4799]: I1010 16:33:05.346496 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:05 crc kubenswrapper[4799]: I1010 16:33:05.346591 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:05 crc kubenswrapper[4799]: I1010 16:33:05.346612 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:05Z","lastTransitionTime":"2025-10-10T16:33:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:05 crc kubenswrapper[4799]: I1010 16:33:05.401487 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k6hch" Oct 10 16:33:05 crc kubenswrapper[4799]: I1010 16:33:05.401586 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 10 16:33:05 crc kubenswrapper[4799]: E1010 16:33:05.401698 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-k6hch" podUID="7903c578-d05e-4ad7-8fd9-f438abf4a085" Oct 10 16:33:05 crc kubenswrapper[4799]: E1010 16:33:05.402000 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 10 16:33:05 crc kubenswrapper[4799]: I1010 16:33:05.403212 4799 scope.go:117] "RemoveContainer" containerID="ba46f14de0fd2c356129122dd938e3fdda832ffc5e614ac439926a3f4ec94370" Oct 10 16:33:05 crc kubenswrapper[4799]: I1010 16:33:05.449654 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:05 crc kubenswrapper[4799]: I1010 16:33:05.449725 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:05 crc kubenswrapper[4799]: I1010 16:33:05.449743 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:05 crc kubenswrapper[4799]: I1010 16:33:05.449800 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:05 crc kubenswrapper[4799]: I1010 16:33:05.449820 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:05Z","lastTransitionTime":"2025-10-10T16:33:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:05 crc kubenswrapper[4799]: I1010 16:33:05.552596 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:05 crc kubenswrapper[4799]: I1010 16:33:05.552657 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:05 crc kubenswrapper[4799]: I1010 16:33:05.552675 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:05 crc kubenswrapper[4799]: I1010 16:33:05.552701 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:05 crc kubenswrapper[4799]: I1010 16:33:05.552719 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:05Z","lastTransitionTime":"2025-10-10T16:33:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:05 crc kubenswrapper[4799]: I1010 16:33:05.659122 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:05 crc kubenswrapper[4799]: I1010 16:33:05.659180 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:05 crc kubenswrapper[4799]: I1010 16:33:05.659199 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:05 crc kubenswrapper[4799]: I1010 16:33:05.659226 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:05 crc kubenswrapper[4799]: I1010 16:33:05.659244 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:05Z","lastTransitionTime":"2025-10-10T16:33:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:05 crc kubenswrapper[4799]: I1010 16:33:05.761745 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:05 crc kubenswrapper[4799]: I1010 16:33:05.761829 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:05 crc kubenswrapper[4799]: I1010 16:33:05.761846 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:05 crc kubenswrapper[4799]: I1010 16:33:05.761868 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:05 crc kubenswrapper[4799]: I1010 16:33:05.761886 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:05Z","lastTransitionTime":"2025-10-10T16:33:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:05 crc kubenswrapper[4799]: I1010 16:33:05.864994 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:05 crc kubenswrapper[4799]: I1010 16:33:05.865041 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:05 crc kubenswrapper[4799]: I1010 16:33:05.865055 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:05 crc kubenswrapper[4799]: I1010 16:33:05.865074 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:05 crc kubenswrapper[4799]: I1010 16:33:05.865086 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:05Z","lastTransitionTime":"2025-10-10T16:33:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:05 crc kubenswrapper[4799]: I1010 16:33:05.941176 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-mcwfc_abe7f2d9-ec99-4724-a01f-cc7096377e07/ovnkube-controller/2.log" Oct 10 16:33:05 crc kubenswrapper[4799]: I1010 16:33:05.945016 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mcwfc" event={"ID":"abe7f2d9-ec99-4724-a01f-cc7096377e07","Type":"ContainerStarted","Data":"df22025d59e852d7ca86c7739f0dd141f6b388604bcf9ffaabfa48433290db84"} Oct 10 16:33:05 crc kubenswrapper[4799]: I1010 16:33:05.946794 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-mcwfc" Oct 10 16:33:05 crc kubenswrapper[4799]: I1010 16:33:05.963159 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1bf1784a-776b-49c7-b64b-7ce52860df45\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://298a1a9571fbe118fe81ff3e7403e298bcde9b683cffab574fbb03d5adc1fb67\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f767e89684b9b515da850360aaf9d7a02173395faf0654e9f0b3a4752a3d608b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://92de340d859cff018a661f0a7f7fe209ffae161bf6f39deb005c7148591fc60b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3a649a65ab118025ea70d1d7cf71236cb96992671c3bc7659d591640b53f941\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:31:47Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:33:05Z is after 2025-08-24T17:21:41Z" Oct 10 16:33:05 crc kubenswrapper[4799]: I1010 16:33:05.966981 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:05 crc kubenswrapper[4799]: I1010 16:33:05.967016 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:05 crc kubenswrapper[4799]: I1010 16:33:05.967028 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:05 crc kubenswrapper[4799]: I1010 16:33:05.967047 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:05 crc kubenswrapper[4799]: I1010 16:33:05.967059 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:05Z","lastTransitionTime":"2025-10-10T16:33:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:05 crc kubenswrapper[4799]: I1010 16:33:05.976527 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2128a751508cba96a374652d8d80c66c81351fe0d7f800743a1612196fe8ac55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:33:05Z is after 2025-08-24T17:21:41Z" Oct 10 16:33:05 crc kubenswrapper[4799]: I1010 16:33:05.988158 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4e024486dad9853cf7debbd2264eca725e50e74ebd215e1e55595d5f8b7c0403\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3396ed6bea22d063192c09283426aa98e84d5cab5852e305d61f3d583801187\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:33:05Z is after 2025-08-24T17:21:41Z" Oct 10 16:33:06 crc kubenswrapper[4799]: I1010 16:33:06.003976 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-nptcz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"96840de9-4451-4499-81fa-a19c62239007\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://995be5ba088a3758758ce5aaf735f0371692c52e49e3992c6478311411c8db42\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d8b7b4526cfbe5d29a5b00c5d82089820b93e5aedbdaace85c4a252fed1b9f53\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d8b7b4526cfbe5d29a5b00c5d82089820b93e5aedbdaace85c4a252fed1b9f53\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:32:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0477eb514aef21fcec151973d9b6cf683ced19e9029787b97906438cb94b9f66\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0477eb514aef21fcec151973d9b6cf683ced19e9029787b97906438cb94b9f66\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:32:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b8df7ffc260acc047e334af09b76e6ee2c6dadd8c1fd1ed8860769601c89c6db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b8df7ffc260acc047e334af09b76e6ee2c6dadd8c1fd1ed8860769601c89c6db\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:32:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c89c6973a557239b60077f2b91a5f088955a973ebf8a9776677daa83f18c274\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2c89c6973a557239b60077f2b91a5f088955a973ebf8a9776677daa83f18c274\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:32:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f647c1c576be194232c6bcaf882fc8f3c67c78a84edd77222d04f1602434d014\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f647c1c576be194232c6bcaf882fc8f3c67c78a84edd77222d04f1602434d014\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:32:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://af9cccbb5d66115ca2db31b1e6738e1aa5f9c948eb65d3db9b5d5f8d9c223a64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://af9cccbb5d66115ca2db31b1e6738e1aa5f9c948eb65d3db9b5d5f8d9c223a64\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:32:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:09Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-nptcz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:33:06Z is after 2025-08-24T17:21:41Z" Oct 10 16:33:06 crc kubenswrapper[4799]: I1010 16:33:06.020473 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b9c46c5f-a6db-4cef-b179-b669484bbc75\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://df6b51b97a9e3dcf9102409dc19f67e69e6e28ebec82dd46083922d5606cc4c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ba57bc720123daa414f51bf5d3173c6fa0b519947a34816bebc532948fd74ab\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d98759de1f79d9aeb68eb0b3eb21d78d0116f054b5d846c85bd63774b565e73\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7eb0f742793fbd0bee8e88732ec832748e77d9226a926def177968f24a9cf06\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://10d76c5ba8c54896d2fde57e2806c48857363c495a9f2d9b3f6904334cf2f9be\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"file observer\\\\nW1010 16:32:08.895315 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1010 16:32:08.895450 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1010 16:32:08.898309 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-712278876/tls.crt::/tmp/serving-cert-712278876/tls.key\\\\\\\"\\\\nI1010 16:32:09.168043 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1010 16:32:09.171891 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1010 16:32:09.171914 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1010 16:32:09.171936 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1010 16:32:09.171942 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1010 16:32:09.176341 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1010 16:32:09.176406 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1010 16:32:09.176435 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1010 16:32:09.176460 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1010 16:32:09.176486 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1010 16:32:09.176510 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1010 16:32:09.176533 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1010 16:32:09.176376 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1010 16:32:09.178269 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-10T16:31:53Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://75fb276a1b4f555aa58d4a862a6f3841984f75958b7ada362d717eca726c41fc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:50Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://78cbeb4c6d2770cabbc752b11e5a62f64ec7820bc3a637a944fa252d779e242b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://78cbeb4c6d2770cabbc752b11e5a62f64ec7820bc3a637a944fa252d779e242b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:31:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:31:47Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:33:06Z is after 2025-08-24T17:21:41Z" Oct 10 16:33:06 crc kubenswrapper[4799]: I1010 16:33:06.033270 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:33:06Z is after 2025-08-24T17:21:41Z" Oct 10 16:33:06 crc kubenswrapper[4799]: I1010 16:33:06.046013 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:33:06Z is after 2025-08-24T17:21:41Z" Oct 10 16:33:06 crc kubenswrapper[4799]: I1010 16:33:06.060020 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6cebefda-e31d-4be2-9bf4-8e1f8ec002cb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6536b37f839c0b3f6b55d82b3a1674eeccb07ec93e2cb0a3739705b82df4782c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hfkr4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ad00545d7a2fff370e19a55a89365b8c9914cb6286dbf1892d7ad0f399288a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hfkr4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:09Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-rh8zc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:33:06Z is after 2025-08-24T17:21:41Z" Oct 10 16:33:06 crc kubenswrapper[4799]: I1010 16:33:06.069349 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:06 crc kubenswrapper[4799]: I1010 16:33:06.069391 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:06 crc kubenswrapper[4799]: I1010 16:33:06.069403 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:06 crc kubenswrapper[4799]: I1010 16:33:06.069418 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:06 crc kubenswrapper[4799]: I1010 16:33:06.069428 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:06Z","lastTransitionTime":"2025-10-10T16:33:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:06 crc kubenswrapper[4799]: I1010 16:33:06.080956 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mcwfc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"abe7f2d9-ec99-4724-a01f-cc7096377e07\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8cbc87c392646ebf9c016f8c7b40bcec30e33a0a05ea4a896d1143c5f1086990\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd261112ca7db4d0d76f6ab29a0347d64dccfff4db42ac9f55d6d7df1443ab23\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c7d0e536ad5143941dd18418b1ac7972a1136a841542b950f6891a386d43ca9c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cfe05183ad0b03415525e6aa2a8d52a5d63b8c273113c46326396df5e0c2bb12\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6562d440ce1f1477fd09c15c34ab88e17e1fb2c2cae4b32a7bf8cbdd29f4d5a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff4fcf53aeed6c07f775152de0faa9fa0671848df06d37cbca6ec7097d0024d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://df22025d59e852d7ca86c7739f0dd141f6b388604bcf9ffaabfa48433290db84\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ba46f14de0fd2c356129122dd938e3fdda832ffc5e614ac439926a3f4ec94370\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-10T16:32:38Z\\\",\\\"message\\\":\\\"espace event handler 1 for removal\\\\nI1010 16:32:38.395893 6461 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1010 16:32:38.395915 6461 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1010 16:32:38.395923 6461 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1010 16:32:38.395978 6461 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1010 16:32:38.395995 6461 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1010 16:32:38.396011 6461 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1010 16:32:38.396026 6461 handler.go:208] Removed *v1.Node event handler 2\\\\nI1010 16:32:38.396040 6461 handler.go:208] Removed *v1.Node event handler 7\\\\nI1010 16:32:38.396038 6461 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1010 16:32:38.396072 6461 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1010 16:32:38.396142 6461 factory.go:656] Stopping watch factory\\\\nI1010 16:32:38.396169 6461 ovnkube.go:599] Stopped ovnkube\\\\nI1010 16:32:38.396164 6461 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1010 16:32:38.396225 6461 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1010 16:32:38.396236 6461 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1010 16:32:38.396378 6461 ovnkube.go:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:37Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:33:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://08406e220de50ba85f882a05117b5df8c9445a38c026bb85c95fc9f98f2d2cfe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2ce36def99eaf908452410a523cd14eb31a5a4dc3ee38d5983ea95d5ee75f83\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d2ce36def99eaf908452410a523cd14eb31a5a4dc3ee38d5983ea95d5ee75f83\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:09Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-mcwfc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:33:06Z is after 2025-08-24T17:21:41Z" Oct 10 16:33:06 crc kubenswrapper[4799]: I1010 16:33:06.092893 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-z97c7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6f19a8ba-b77f-41ce-a4c6-e970b040dd8c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://17b2b63923e40e58b4a3d352781758ecf7c0e63eb913813e0f738d19dfb05676\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9spwd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c9722a694091d19d16b7c08ac22e23532deca8f4bde306a0d651d5524484fd1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9spwd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:22Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-z97c7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:33:06Z is after 2025-08-24T17:21:41Z" Oct 10 16:33:06 crc kubenswrapper[4799]: I1010 16:33:06.110977 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5a953803d264b43ecd9f8b8c871b034d8146e73a4974bb8f503d0ca626370616\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:33:06Z is after 2025-08-24T17:21:41Z" Oct 10 16:33:06 crc kubenswrapper[4799]: I1010 16:33:06.121375 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:33:06Z is after 2025-08-24T17:21:41Z" Oct 10 16:33:06 crc kubenswrapper[4799]: I1010 16:33:06.132273 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gg5hb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f000ac73-b5de-47c8-a0a7-84bd06475f62\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9fa19f17c5ed052d9c266f2da2d4e8338037b397bc2fb5e859f733c6b8c1b69e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b64f89fec4fec12dd0dab3f95ca2c8a01e43d4ef7cc69a4d012195756f6922ca\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-10T16:32:58Z\\\",\\\"message\\\":\\\"2025-10-10T16:32:13+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_1650ba65-1f10-40d9-a47c-1a8dc4b79e86\\\\n2025-10-10T16:32:13+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_1650ba65-1f10-40d9-a47c-1a8dc4b79e86 to /host/opt/cni/bin/\\\\n2025-10-10T16:32:13Z [verbose] multus-daemon started\\\\n2025-10-10T16:32:13Z [verbose] Readiness Indicator file check\\\\n2025-10-10T16:32:58Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:10Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w9g7t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:09Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gg5hb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:33:06Z is after 2025-08-24T17:21:41Z" Oct 10 16:33:06 crc kubenswrapper[4799]: I1010 16:33:06.141952 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-6wjsp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"76fdb169-eee9-4170-b948-95e26254208b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5b030264f18288aa7687a91f7918f1ed2c2ad474637e32a054ea8c25b97aef45\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2ww66\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:14Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-6wjsp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:33:06Z is after 2025-08-24T17:21:41Z" Oct 10 16:33:06 crc kubenswrapper[4799]: I1010 16:33:06.159125 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"60ae49f7-6d6a-4a62-909f-7aea2b3953f5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c0a72be5ffe48f726e63ca3854fcabf6ad7c26f2c3fe432328142da2dc2ceeb5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b42b1b86cbd6dacb03b9afc740a33f67674996a9c5a5b291b71708ae53ccfea8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://de5b84380f9fb8448cebe90775342fd17260ffb8c591bbd5156f8a216b80f1da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b1dad40a84c7f22ffb5d52c708c7e2e03a181c5778793050495c8333ae005731\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://79f6778c5b703b2b4fc4e59fffc00824fcab6c8f5e2789661665e635a3539195\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a48bce1f3530d2a78258c6fa2af4f1530890f7967a26c9e91ca2f20f56cdbe6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2a48bce1f3530d2a78258c6fa2af4f1530890f7967a26c9e91ca2f20f56cdbe6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:31:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://65fb2cd5fa9b5ff0cad85267e4a036c37593a749da171dc2e5e30ba5159ed96d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://65fb2cd5fa9b5ff0cad85267e4a036c37593a749da171dc2e5e30ba5159ed96d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:31:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://51ea61becc8c45e5bcb2a2374d503cef3fb940b1618e7501cd05d61fc2a9458f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://51ea61becc8c45e5bcb2a2374d503cef3fb940b1618e7501cd05d61fc2a9458f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:31:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:31:47Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:33:06Z is after 2025-08-24T17:21:41Z" Oct 10 16:33:06 crc kubenswrapper[4799]: I1010 16:33:06.170503 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f7878cf5-3c6d-4a4a-9ccd-7de395f9ac84\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://38c4fe49eff3373937abdebfb7d58fe9d5c73809375a3dca4f165aab84d6cbd1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c48434cdadac2409d0e3baf595e00260b1e3f94b8b9dab62e3f87503a6e888be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://88216eac74e0df9deb1ca1bef893deb2e23a79ffffdbd8a851a67df407eaa470\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://91106a41672b01d9f5c61cfc3001b84f024f3b96649bbc9174f3a635fc8034a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://91106a41672b01d9f5c61cfc3001b84f024f3b96649bbc9174f3a635fc8034a9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:31:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:31:48Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:31:47Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:33:06Z is after 2025-08-24T17:21:41Z" Oct 10 16:33:06 crc kubenswrapper[4799]: I1010 16:33:06.171452 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:06 crc kubenswrapper[4799]: I1010 16:33:06.171477 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:06 crc kubenswrapper[4799]: I1010 16:33:06.171487 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:06 crc kubenswrapper[4799]: I1010 16:33:06.171503 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:06 crc kubenswrapper[4799]: I1010 16:33:06.171515 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:06Z","lastTransitionTime":"2025-10-10T16:33:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:06 crc kubenswrapper[4799]: I1010 16:33:06.183096 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-bsdk2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"823e91d3-003d-4cbb-bc72-004e1708c19d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec832bfc0c81b98afb4117033b94d2951b042b248148a5f957f3507174b8dbb6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-chgmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:09Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-bsdk2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:33:06Z is after 2025-08-24T17:21:41Z" Oct 10 16:33:06 crc kubenswrapper[4799]: I1010 16:33:06.195203 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-k6hch" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7903c578-d05e-4ad7-8fd9-f438abf4a085\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:23Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:23Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hjhjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hjhjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:23Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-k6hch\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:33:06Z is after 2025-08-24T17:21:41Z" Oct 10 16:33:06 crc kubenswrapper[4799]: I1010 16:33:06.274022 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:06 crc kubenswrapper[4799]: I1010 16:33:06.274071 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:06 crc kubenswrapper[4799]: I1010 16:33:06.274084 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:06 crc kubenswrapper[4799]: I1010 16:33:06.274103 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:06 crc kubenswrapper[4799]: I1010 16:33:06.274116 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:06Z","lastTransitionTime":"2025-10-10T16:33:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:06 crc kubenswrapper[4799]: I1010 16:33:06.376935 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:06 crc kubenswrapper[4799]: I1010 16:33:06.376968 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:06 crc kubenswrapper[4799]: I1010 16:33:06.376979 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:06 crc kubenswrapper[4799]: I1010 16:33:06.376994 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:06 crc kubenswrapper[4799]: I1010 16:33:06.377005 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:06Z","lastTransitionTime":"2025-10-10T16:33:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:06 crc kubenswrapper[4799]: I1010 16:33:06.402404 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 10 16:33:06 crc kubenswrapper[4799]: I1010 16:33:06.402440 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 10 16:33:06 crc kubenswrapper[4799]: E1010 16:33:06.402557 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 10 16:33:06 crc kubenswrapper[4799]: E1010 16:33:06.402702 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 10 16:33:06 crc kubenswrapper[4799]: I1010 16:33:06.479167 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:06 crc kubenswrapper[4799]: I1010 16:33:06.479227 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:06 crc kubenswrapper[4799]: I1010 16:33:06.479251 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:06 crc kubenswrapper[4799]: I1010 16:33:06.479277 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:06 crc kubenswrapper[4799]: I1010 16:33:06.479297 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:06Z","lastTransitionTime":"2025-10-10T16:33:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:06 crc kubenswrapper[4799]: I1010 16:33:06.582084 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:06 crc kubenswrapper[4799]: I1010 16:33:06.582151 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:06 crc kubenswrapper[4799]: I1010 16:33:06.582170 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:06 crc kubenswrapper[4799]: I1010 16:33:06.582195 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:06 crc kubenswrapper[4799]: I1010 16:33:06.582212 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:06Z","lastTransitionTime":"2025-10-10T16:33:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:06 crc kubenswrapper[4799]: I1010 16:33:06.685083 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:06 crc kubenswrapper[4799]: I1010 16:33:06.685166 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:06 crc kubenswrapper[4799]: I1010 16:33:06.685190 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:06 crc kubenswrapper[4799]: I1010 16:33:06.685220 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:06 crc kubenswrapper[4799]: I1010 16:33:06.685246 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:06Z","lastTransitionTime":"2025-10-10T16:33:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:06 crc kubenswrapper[4799]: I1010 16:33:06.787716 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:06 crc kubenswrapper[4799]: I1010 16:33:06.788042 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:06 crc kubenswrapper[4799]: I1010 16:33:06.788061 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:06 crc kubenswrapper[4799]: I1010 16:33:06.788080 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:06 crc kubenswrapper[4799]: I1010 16:33:06.788092 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:06Z","lastTransitionTime":"2025-10-10T16:33:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:06 crc kubenswrapper[4799]: I1010 16:33:06.890724 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:06 crc kubenswrapper[4799]: I1010 16:33:06.890808 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:06 crc kubenswrapper[4799]: I1010 16:33:06.890826 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:06 crc kubenswrapper[4799]: I1010 16:33:06.890851 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:06 crc kubenswrapper[4799]: I1010 16:33:06.890876 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:06Z","lastTransitionTime":"2025-10-10T16:33:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:06 crc kubenswrapper[4799]: I1010 16:33:06.951598 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-mcwfc_abe7f2d9-ec99-4724-a01f-cc7096377e07/ovnkube-controller/3.log" Oct 10 16:33:06 crc kubenswrapper[4799]: I1010 16:33:06.952730 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-mcwfc_abe7f2d9-ec99-4724-a01f-cc7096377e07/ovnkube-controller/2.log" Oct 10 16:33:06 crc kubenswrapper[4799]: I1010 16:33:06.957025 4799 generic.go:334] "Generic (PLEG): container finished" podID="abe7f2d9-ec99-4724-a01f-cc7096377e07" containerID="df22025d59e852d7ca86c7739f0dd141f6b388604bcf9ffaabfa48433290db84" exitCode=1 Oct 10 16:33:06 crc kubenswrapper[4799]: I1010 16:33:06.957097 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mcwfc" event={"ID":"abe7f2d9-ec99-4724-a01f-cc7096377e07","Type":"ContainerDied","Data":"df22025d59e852d7ca86c7739f0dd141f6b388604bcf9ffaabfa48433290db84"} Oct 10 16:33:06 crc kubenswrapper[4799]: I1010 16:33:06.957159 4799 scope.go:117] "RemoveContainer" containerID="ba46f14de0fd2c356129122dd938e3fdda832ffc5e614ac439926a3f4ec94370" Oct 10 16:33:06 crc kubenswrapper[4799]: I1010 16:33:06.958204 4799 scope.go:117] "RemoveContainer" containerID="df22025d59e852d7ca86c7739f0dd141f6b388604bcf9ffaabfa48433290db84" Oct 10 16:33:06 crc kubenswrapper[4799]: E1010 16:33:06.959207 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-mcwfc_openshift-ovn-kubernetes(abe7f2d9-ec99-4724-a01f-cc7096377e07)\"" pod="openshift-ovn-kubernetes/ovnkube-node-mcwfc" podUID="abe7f2d9-ec99-4724-a01f-cc7096377e07" Oct 10 16:33:06 crc kubenswrapper[4799]: I1010 16:33:06.981696 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1bf1784a-776b-49c7-b64b-7ce52860df45\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://298a1a9571fbe118fe81ff3e7403e298bcde9b683cffab574fbb03d5adc1fb67\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f767e89684b9b515da850360aaf9d7a02173395faf0654e9f0b3a4752a3d608b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://92de340d859cff018a661f0a7f7fe209ffae161bf6f39deb005c7148591fc60b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3a649a65ab118025ea70d1d7cf71236cb96992671c3bc7659d591640b53f941\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:31:47Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:33:06Z is after 2025-08-24T17:21:41Z" Oct 10 16:33:06 crc kubenswrapper[4799]: I1010 16:33:06.994239 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:06 crc kubenswrapper[4799]: I1010 16:33:06.994317 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:06 crc kubenswrapper[4799]: I1010 16:33:06.994354 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:06 crc kubenswrapper[4799]: I1010 16:33:06.994384 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:06 crc kubenswrapper[4799]: I1010 16:33:06.994404 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:06Z","lastTransitionTime":"2025-10-10T16:33:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:07 crc kubenswrapper[4799]: I1010 16:33:07.006609 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2128a751508cba96a374652d8d80c66c81351fe0d7f800743a1612196fe8ac55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:33:07Z is after 2025-08-24T17:21:41Z" Oct 10 16:33:07 crc kubenswrapper[4799]: I1010 16:33:07.026547 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4e024486dad9853cf7debbd2264eca725e50e74ebd215e1e55595d5f8b7c0403\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3396ed6bea22d063192c09283426aa98e84d5cab5852e305d61f3d583801187\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:33:07Z is after 2025-08-24T17:21:41Z" Oct 10 16:33:07 crc kubenswrapper[4799]: I1010 16:33:07.049771 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-nptcz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"96840de9-4451-4499-81fa-a19c62239007\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://995be5ba088a3758758ce5aaf735f0371692c52e49e3992c6478311411c8db42\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d8b7b4526cfbe5d29a5b00c5d82089820b93e5aedbdaace85c4a252fed1b9f53\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d8b7b4526cfbe5d29a5b00c5d82089820b93e5aedbdaace85c4a252fed1b9f53\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:32:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0477eb514aef21fcec151973d9b6cf683ced19e9029787b97906438cb94b9f66\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0477eb514aef21fcec151973d9b6cf683ced19e9029787b97906438cb94b9f66\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:32:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b8df7ffc260acc047e334af09b76e6ee2c6dadd8c1fd1ed8860769601c89c6db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b8df7ffc260acc047e334af09b76e6ee2c6dadd8c1fd1ed8860769601c89c6db\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:32:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c89c6973a557239b60077f2b91a5f088955a973ebf8a9776677daa83f18c274\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2c89c6973a557239b60077f2b91a5f088955a973ebf8a9776677daa83f18c274\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:32:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f647c1c576be194232c6bcaf882fc8f3c67c78a84edd77222d04f1602434d014\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f647c1c576be194232c6bcaf882fc8f3c67c78a84edd77222d04f1602434d014\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:32:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://af9cccbb5d66115ca2db31b1e6738e1aa5f9c948eb65d3db9b5d5f8d9c223a64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://af9cccbb5d66115ca2db31b1e6738e1aa5f9c948eb65d3db9b5d5f8d9c223a64\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:32:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:09Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-nptcz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:33:07Z is after 2025-08-24T17:21:41Z" Oct 10 16:33:07 crc kubenswrapper[4799]: I1010 16:33:07.063865 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b9c46c5f-a6db-4cef-b179-b669484bbc75\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://df6b51b97a9e3dcf9102409dc19f67e69e6e28ebec82dd46083922d5606cc4c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ba57bc720123daa414f51bf5d3173c6fa0b519947a34816bebc532948fd74ab\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d98759de1f79d9aeb68eb0b3eb21d78d0116f054b5d846c85bd63774b565e73\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7eb0f742793fbd0bee8e88732ec832748e77d9226a926def177968f24a9cf06\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://10d76c5ba8c54896d2fde57e2806c48857363c495a9f2d9b3f6904334cf2f9be\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"file observer\\\\nW1010 16:32:08.895315 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1010 16:32:08.895450 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1010 16:32:08.898309 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-712278876/tls.crt::/tmp/serving-cert-712278876/tls.key\\\\\\\"\\\\nI1010 16:32:09.168043 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1010 16:32:09.171891 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1010 16:32:09.171914 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1010 16:32:09.171936 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1010 16:32:09.171942 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1010 16:32:09.176341 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1010 16:32:09.176406 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1010 16:32:09.176435 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1010 16:32:09.176460 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1010 16:32:09.176486 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1010 16:32:09.176510 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1010 16:32:09.176533 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1010 16:32:09.176376 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1010 16:32:09.178269 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-10T16:31:53Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://75fb276a1b4f555aa58d4a862a6f3841984f75958b7ada362d717eca726c41fc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:50Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://78cbeb4c6d2770cabbc752b11e5a62f64ec7820bc3a637a944fa252d779e242b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://78cbeb4c6d2770cabbc752b11e5a62f64ec7820bc3a637a944fa252d779e242b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:31:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:31:47Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:33:07Z is after 2025-08-24T17:21:41Z" Oct 10 16:33:07 crc kubenswrapper[4799]: I1010 16:33:07.077044 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:33:07Z is after 2025-08-24T17:21:41Z" Oct 10 16:33:07 crc kubenswrapper[4799]: I1010 16:33:07.089550 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:33:07Z is after 2025-08-24T17:21:41Z" Oct 10 16:33:07 crc kubenswrapper[4799]: I1010 16:33:07.096818 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:07 crc kubenswrapper[4799]: I1010 16:33:07.096841 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:07 crc kubenswrapper[4799]: I1010 16:33:07.096849 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:07 crc kubenswrapper[4799]: I1010 16:33:07.096865 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:07 crc kubenswrapper[4799]: I1010 16:33:07.096875 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:07Z","lastTransitionTime":"2025-10-10T16:33:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:07 crc kubenswrapper[4799]: I1010 16:33:07.100365 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6cebefda-e31d-4be2-9bf4-8e1f8ec002cb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6536b37f839c0b3f6b55d82b3a1674eeccb07ec93e2cb0a3739705b82df4782c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hfkr4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ad00545d7a2fff370e19a55a89365b8c9914cb6286dbf1892d7ad0f399288a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hfkr4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:09Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-rh8zc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:33:07Z is after 2025-08-24T17:21:41Z" Oct 10 16:33:07 crc kubenswrapper[4799]: I1010 16:33:07.118417 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mcwfc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"abe7f2d9-ec99-4724-a01f-cc7096377e07\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8cbc87c392646ebf9c016f8c7b40bcec30e33a0a05ea4a896d1143c5f1086990\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd261112ca7db4d0d76f6ab29a0347d64dccfff4db42ac9f55d6d7df1443ab23\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c7d0e536ad5143941dd18418b1ac7972a1136a841542b950f6891a386d43ca9c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cfe05183ad0b03415525e6aa2a8d52a5d63b8c273113c46326396df5e0c2bb12\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6562d440ce1f1477fd09c15c34ab88e17e1fb2c2cae4b32a7bf8cbdd29f4d5a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff4fcf53aeed6c07f775152de0faa9fa0671848df06d37cbca6ec7097d0024d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://df22025d59e852d7ca86c7739f0dd141f6b388604bcf9ffaabfa48433290db84\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ba46f14de0fd2c356129122dd938e3fdda832ffc5e614ac439926a3f4ec94370\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-10T16:32:38Z\\\",\\\"message\\\":\\\"espace event handler 1 for removal\\\\nI1010 16:32:38.395893 6461 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1010 16:32:38.395915 6461 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1010 16:32:38.395923 6461 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1010 16:32:38.395978 6461 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1010 16:32:38.395995 6461 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1010 16:32:38.396011 6461 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1010 16:32:38.396026 6461 handler.go:208] Removed *v1.Node event handler 2\\\\nI1010 16:32:38.396040 6461 handler.go:208] Removed *v1.Node event handler 7\\\\nI1010 16:32:38.396038 6461 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1010 16:32:38.396072 6461 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1010 16:32:38.396142 6461 factory.go:656] Stopping watch factory\\\\nI1010 16:32:38.396169 6461 ovnkube.go:599] Stopped ovnkube\\\\nI1010 16:32:38.396164 6461 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1010 16:32:38.396225 6461 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1010 16:32:38.396236 6461 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1010 16:32:38.396378 6461 ovnkube.go:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:37Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://df22025d59e852d7ca86c7739f0dd141f6b388604bcf9ffaabfa48433290db84\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-10T16:33:06Z\\\",\\\"message\\\":\\\"300553 6825 default_network_controller.go:776] Recording success event on pod openshift-network-node-identity/network-node-identity-vrzqb\\\\nF1010 16:33:06.300559 6825 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:33:06Z is after 2025-08-24T17:21:41Z]\\\\nI1010 16:33:06.300565 6825 obj_retry.go:303] Retry object setup: *v1.Pod openshift-etcd/etcd-crc\\\\nI1010 16:33:06.300572 6825 obj_retry.go:365] Adding new object: *v1.Pod openshift-etcd/etcd-crc\\\\nI1010 16:33:06.300560 6825 model_client.go:382] Update operations generated as: [{Op:update Table:Logical_Switch_Port Row:map[addresses:{GoSet:[0a:58:0a:d9:00:3b 10.217.0.59\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-10T16:33:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://08406e220de50ba85f882a05117b5df8c9445a38c026bb85c95fc9f98f2d2cfe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2ce36def99eaf908452410a523cd14eb31a5a4dc3ee38d5983ea95d5ee75f83\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d2ce36def99eaf908452410a523cd14eb31a5a4dc3ee38d5983ea95d5ee75f83\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:09Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-mcwfc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:33:07Z is after 2025-08-24T17:21:41Z" Oct 10 16:33:07 crc kubenswrapper[4799]: I1010 16:33:07.133966 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-z97c7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6f19a8ba-b77f-41ce-a4c6-e970b040dd8c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://17b2b63923e40e58b4a3d352781758ecf7c0e63eb913813e0f738d19dfb05676\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9spwd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c9722a694091d19d16b7c08ac22e23532deca8f4bde306a0d651d5524484fd1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9spwd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:22Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-z97c7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:33:07Z is after 2025-08-24T17:21:41Z" Oct 10 16:33:07 crc kubenswrapper[4799]: I1010 16:33:07.147235 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5a953803d264b43ecd9f8b8c871b034d8146e73a4974bb8f503d0ca626370616\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:33:07Z is after 2025-08-24T17:21:41Z" Oct 10 16:33:07 crc kubenswrapper[4799]: I1010 16:33:07.164433 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:33:07Z is after 2025-08-24T17:21:41Z" Oct 10 16:33:07 crc kubenswrapper[4799]: I1010 16:33:07.182204 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gg5hb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f000ac73-b5de-47c8-a0a7-84bd06475f62\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9fa19f17c5ed052d9c266f2da2d4e8338037b397bc2fb5e859f733c6b8c1b69e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b64f89fec4fec12dd0dab3f95ca2c8a01e43d4ef7cc69a4d012195756f6922ca\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-10T16:32:58Z\\\",\\\"message\\\":\\\"2025-10-10T16:32:13+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_1650ba65-1f10-40d9-a47c-1a8dc4b79e86\\\\n2025-10-10T16:32:13+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_1650ba65-1f10-40d9-a47c-1a8dc4b79e86 to /host/opt/cni/bin/\\\\n2025-10-10T16:32:13Z [verbose] multus-daemon started\\\\n2025-10-10T16:32:13Z [verbose] Readiness Indicator file check\\\\n2025-10-10T16:32:58Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:10Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w9g7t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:09Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gg5hb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:33:07Z is after 2025-08-24T17:21:41Z" Oct 10 16:33:07 crc kubenswrapper[4799]: I1010 16:33:07.198578 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-6wjsp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"76fdb169-eee9-4170-b948-95e26254208b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5b030264f18288aa7687a91f7918f1ed2c2ad474637e32a054ea8c25b97aef45\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2ww66\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:14Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-6wjsp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:33:07Z is after 2025-08-24T17:21:41Z" Oct 10 16:33:07 crc kubenswrapper[4799]: I1010 16:33:07.201685 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:07 crc kubenswrapper[4799]: I1010 16:33:07.201724 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:07 crc kubenswrapper[4799]: I1010 16:33:07.201736 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:07 crc kubenswrapper[4799]: I1010 16:33:07.201772 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:07 crc kubenswrapper[4799]: I1010 16:33:07.201788 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:07Z","lastTransitionTime":"2025-10-10T16:33:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:07 crc kubenswrapper[4799]: I1010 16:33:07.225705 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"60ae49f7-6d6a-4a62-909f-7aea2b3953f5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c0a72be5ffe48f726e63ca3854fcabf6ad7c26f2c3fe432328142da2dc2ceeb5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b42b1b86cbd6dacb03b9afc740a33f67674996a9c5a5b291b71708ae53ccfea8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://de5b84380f9fb8448cebe90775342fd17260ffb8c591bbd5156f8a216b80f1da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b1dad40a84c7f22ffb5d52c708c7e2e03a181c5778793050495c8333ae005731\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://79f6778c5b703b2b4fc4e59fffc00824fcab6c8f5e2789661665e635a3539195\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a48bce1f3530d2a78258c6fa2af4f1530890f7967a26c9e91ca2f20f56cdbe6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2a48bce1f3530d2a78258c6fa2af4f1530890f7967a26c9e91ca2f20f56cdbe6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:31:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://65fb2cd5fa9b5ff0cad85267e4a036c37593a749da171dc2e5e30ba5159ed96d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://65fb2cd5fa9b5ff0cad85267e4a036c37593a749da171dc2e5e30ba5159ed96d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:31:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://51ea61becc8c45e5bcb2a2374d503cef3fb940b1618e7501cd05d61fc2a9458f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://51ea61becc8c45e5bcb2a2374d503cef3fb940b1618e7501cd05d61fc2a9458f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:31:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:31:47Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:33:07Z is after 2025-08-24T17:21:41Z" Oct 10 16:33:07 crc kubenswrapper[4799]: I1010 16:33:07.242899 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f7878cf5-3c6d-4a4a-9ccd-7de395f9ac84\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://38c4fe49eff3373937abdebfb7d58fe9d5c73809375a3dca4f165aab84d6cbd1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c48434cdadac2409d0e3baf595e00260b1e3f94b8b9dab62e3f87503a6e888be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://88216eac74e0df9deb1ca1bef893deb2e23a79ffffdbd8a851a67df407eaa470\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://91106a41672b01d9f5c61cfc3001b84f024f3b96649bbc9174f3a635fc8034a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://91106a41672b01d9f5c61cfc3001b84f024f3b96649bbc9174f3a635fc8034a9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:31:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:31:48Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:31:47Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:33:07Z is after 2025-08-24T17:21:41Z" Oct 10 16:33:07 crc kubenswrapper[4799]: I1010 16:33:07.257865 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-bsdk2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"823e91d3-003d-4cbb-bc72-004e1708c19d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec832bfc0c81b98afb4117033b94d2951b042b248148a5f957f3507174b8dbb6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-chgmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:09Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-bsdk2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:33:07Z is after 2025-08-24T17:21:41Z" Oct 10 16:33:07 crc kubenswrapper[4799]: I1010 16:33:07.273776 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-k6hch" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7903c578-d05e-4ad7-8fd9-f438abf4a085\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:23Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:23Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hjhjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hjhjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:23Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-k6hch\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:33:07Z is after 2025-08-24T17:21:41Z" Oct 10 16:33:07 crc kubenswrapper[4799]: I1010 16:33:07.303819 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:07 crc kubenswrapper[4799]: I1010 16:33:07.303861 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:07 crc kubenswrapper[4799]: I1010 16:33:07.303877 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:07 crc kubenswrapper[4799]: I1010 16:33:07.303898 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:07 crc kubenswrapper[4799]: I1010 16:33:07.303934 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:07Z","lastTransitionTime":"2025-10-10T16:33:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:07 crc kubenswrapper[4799]: I1010 16:33:07.405287 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 10 16:33:07 crc kubenswrapper[4799]: E1010 16:33:07.405464 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 10 16:33:07 crc kubenswrapper[4799]: I1010 16:33:07.405732 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k6hch" Oct 10 16:33:07 crc kubenswrapper[4799]: E1010 16:33:07.405866 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-k6hch" podUID="7903c578-d05e-4ad7-8fd9-f438abf4a085" Oct 10 16:33:07 crc kubenswrapper[4799]: I1010 16:33:07.408495 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:07 crc kubenswrapper[4799]: I1010 16:33:07.408559 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:07 crc kubenswrapper[4799]: I1010 16:33:07.408583 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:07 crc kubenswrapper[4799]: I1010 16:33:07.408616 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:07 crc kubenswrapper[4799]: I1010 16:33:07.408638 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:07Z","lastTransitionTime":"2025-10-10T16:33:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:07 crc kubenswrapper[4799]: I1010 16:33:07.430923 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"60ae49f7-6d6a-4a62-909f-7aea2b3953f5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c0a72be5ffe48f726e63ca3854fcabf6ad7c26f2c3fe432328142da2dc2ceeb5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b42b1b86cbd6dacb03b9afc740a33f67674996a9c5a5b291b71708ae53ccfea8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://de5b84380f9fb8448cebe90775342fd17260ffb8c591bbd5156f8a216b80f1da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b1dad40a84c7f22ffb5d52c708c7e2e03a181c5778793050495c8333ae005731\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://79f6778c5b703b2b4fc4e59fffc00824fcab6c8f5e2789661665e635a3539195\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a48bce1f3530d2a78258c6fa2af4f1530890f7967a26c9e91ca2f20f56cdbe6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2a48bce1f3530d2a78258c6fa2af4f1530890f7967a26c9e91ca2f20f56cdbe6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:31:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://65fb2cd5fa9b5ff0cad85267e4a036c37593a749da171dc2e5e30ba5159ed96d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://65fb2cd5fa9b5ff0cad85267e4a036c37593a749da171dc2e5e30ba5159ed96d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:31:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://51ea61becc8c45e5bcb2a2374d503cef3fb940b1618e7501cd05d61fc2a9458f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://51ea61becc8c45e5bcb2a2374d503cef3fb940b1618e7501cd05d61fc2a9458f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:31:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:31:47Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:33:07Z is after 2025-08-24T17:21:41Z" Oct 10 16:33:07 crc kubenswrapper[4799]: I1010 16:33:07.448831 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f7878cf5-3c6d-4a4a-9ccd-7de395f9ac84\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://38c4fe49eff3373937abdebfb7d58fe9d5c73809375a3dca4f165aab84d6cbd1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c48434cdadac2409d0e3baf595e00260b1e3f94b8b9dab62e3f87503a6e888be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://88216eac74e0df9deb1ca1bef893deb2e23a79ffffdbd8a851a67df407eaa470\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://91106a41672b01d9f5c61cfc3001b84f024f3b96649bbc9174f3a635fc8034a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://91106a41672b01d9f5c61cfc3001b84f024f3b96649bbc9174f3a635fc8034a9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:31:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:31:48Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:31:47Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:33:07Z is after 2025-08-24T17:21:41Z" Oct 10 16:33:07 crc kubenswrapper[4799]: I1010 16:33:07.464797 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-bsdk2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"823e91d3-003d-4cbb-bc72-004e1708c19d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec832bfc0c81b98afb4117033b94d2951b042b248148a5f957f3507174b8dbb6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-chgmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:09Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-bsdk2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:33:07Z is after 2025-08-24T17:21:41Z" Oct 10 16:33:07 crc kubenswrapper[4799]: I1010 16:33:07.480832 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-k6hch" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7903c578-d05e-4ad7-8fd9-f438abf4a085\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:23Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:23Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hjhjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hjhjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:23Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-k6hch\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:33:07Z is after 2025-08-24T17:21:41Z" Oct 10 16:33:07 crc kubenswrapper[4799]: I1010 16:33:07.500746 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1bf1784a-776b-49c7-b64b-7ce52860df45\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://298a1a9571fbe118fe81ff3e7403e298bcde9b683cffab574fbb03d5adc1fb67\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f767e89684b9b515da850360aaf9d7a02173395faf0654e9f0b3a4752a3d608b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://92de340d859cff018a661f0a7f7fe209ffae161bf6f39deb005c7148591fc60b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3a649a65ab118025ea70d1d7cf71236cb96992671c3bc7659d591640b53f941\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:31:47Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:33:07Z is after 2025-08-24T17:21:41Z" Oct 10 16:33:07 crc kubenswrapper[4799]: I1010 16:33:07.510501 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:07 crc kubenswrapper[4799]: I1010 16:33:07.510582 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:07 crc kubenswrapper[4799]: I1010 16:33:07.510598 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:07 crc kubenswrapper[4799]: I1010 16:33:07.510616 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:07 crc kubenswrapper[4799]: I1010 16:33:07.510629 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:07Z","lastTransitionTime":"2025-10-10T16:33:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:07 crc kubenswrapper[4799]: I1010 16:33:07.515975 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2128a751508cba96a374652d8d80c66c81351fe0d7f800743a1612196fe8ac55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:33:07Z is after 2025-08-24T17:21:41Z" Oct 10 16:33:07 crc kubenswrapper[4799]: I1010 16:33:07.536469 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4e024486dad9853cf7debbd2264eca725e50e74ebd215e1e55595d5f8b7c0403\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3396ed6bea22d063192c09283426aa98e84d5cab5852e305d61f3d583801187\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:33:07Z is after 2025-08-24T17:21:41Z" Oct 10 16:33:07 crc kubenswrapper[4799]: I1010 16:33:07.558777 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-nptcz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"96840de9-4451-4499-81fa-a19c62239007\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://995be5ba088a3758758ce5aaf735f0371692c52e49e3992c6478311411c8db42\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d8b7b4526cfbe5d29a5b00c5d82089820b93e5aedbdaace85c4a252fed1b9f53\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d8b7b4526cfbe5d29a5b00c5d82089820b93e5aedbdaace85c4a252fed1b9f53\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:32:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0477eb514aef21fcec151973d9b6cf683ced19e9029787b97906438cb94b9f66\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0477eb514aef21fcec151973d9b6cf683ced19e9029787b97906438cb94b9f66\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:32:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b8df7ffc260acc047e334af09b76e6ee2c6dadd8c1fd1ed8860769601c89c6db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b8df7ffc260acc047e334af09b76e6ee2c6dadd8c1fd1ed8860769601c89c6db\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:32:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c89c6973a557239b60077f2b91a5f088955a973ebf8a9776677daa83f18c274\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2c89c6973a557239b60077f2b91a5f088955a973ebf8a9776677daa83f18c274\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:32:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f647c1c576be194232c6bcaf882fc8f3c67c78a84edd77222d04f1602434d014\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f647c1c576be194232c6bcaf882fc8f3c67c78a84edd77222d04f1602434d014\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:32:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://af9cccbb5d66115ca2db31b1e6738e1aa5f9c948eb65d3db9b5d5f8d9c223a64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://af9cccbb5d66115ca2db31b1e6738e1aa5f9c948eb65d3db9b5d5f8d9c223a64\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:32:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:09Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-nptcz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:33:07Z is after 2025-08-24T17:21:41Z" Oct 10 16:33:07 crc kubenswrapper[4799]: I1010 16:33:07.583984 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b9c46c5f-a6db-4cef-b179-b669484bbc75\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://df6b51b97a9e3dcf9102409dc19f67e69e6e28ebec82dd46083922d5606cc4c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ba57bc720123daa414f51bf5d3173c6fa0b519947a34816bebc532948fd74ab\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d98759de1f79d9aeb68eb0b3eb21d78d0116f054b5d846c85bd63774b565e73\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7eb0f742793fbd0bee8e88732ec832748e77d9226a926def177968f24a9cf06\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://10d76c5ba8c54896d2fde57e2806c48857363c495a9f2d9b3f6904334cf2f9be\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"file observer\\\\nW1010 16:32:08.895315 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1010 16:32:08.895450 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1010 16:32:08.898309 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-712278876/tls.crt::/tmp/serving-cert-712278876/tls.key\\\\\\\"\\\\nI1010 16:32:09.168043 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1010 16:32:09.171891 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1010 16:32:09.171914 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1010 16:32:09.171936 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1010 16:32:09.171942 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1010 16:32:09.176341 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1010 16:32:09.176406 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1010 16:32:09.176435 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1010 16:32:09.176460 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1010 16:32:09.176486 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1010 16:32:09.176510 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1010 16:32:09.176533 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1010 16:32:09.176376 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1010 16:32:09.178269 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-10T16:31:53Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://75fb276a1b4f555aa58d4a862a6f3841984f75958b7ada362d717eca726c41fc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:50Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://78cbeb4c6d2770cabbc752b11e5a62f64ec7820bc3a637a944fa252d779e242b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://78cbeb4c6d2770cabbc752b11e5a62f64ec7820bc3a637a944fa252d779e242b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:31:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:31:47Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:33:07Z is after 2025-08-24T17:21:41Z" Oct 10 16:33:07 crc kubenswrapper[4799]: I1010 16:33:07.602721 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:33:07Z is after 2025-08-24T17:21:41Z" Oct 10 16:33:07 crc kubenswrapper[4799]: I1010 16:33:07.613986 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:07 crc kubenswrapper[4799]: I1010 16:33:07.614083 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:07 crc kubenswrapper[4799]: I1010 16:33:07.614109 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:07 crc kubenswrapper[4799]: I1010 16:33:07.614141 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:07 crc kubenswrapper[4799]: I1010 16:33:07.614163 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:07Z","lastTransitionTime":"2025-10-10T16:33:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:07 crc kubenswrapper[4799]: I1010 16:33:07.622401 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:33:07Z is after 2025-08-24T17:21:41Z" Oct 10 16:33:07 crc kubenswrapper[4799]: I1010 16:33:07.641059 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6cebefda-e31d-4be2-9bf4-8e1f8ec002cb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6536b37f839c0b3f6b55d82b3a1674eeccb07ec93e2cb0a3739705b82df4782c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hfkr4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ad00545d7a2fff370e19a55a89365b8c9914cb6286dbf1892d7ad0f399288a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hfkr4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:09Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-rh8zc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:33:07Z is after 2025-08-24T17:21:41Z" Oct 10 16:33:07 crc kubenswrapper[4799]: I1010 16:33:07.670472 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mcwfc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"abe7f2d9-ec99-4724-a01f-cc7096377e07\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8cbc87c392646ebf9c016f8c7b40bcec30e33a0a05ea4a896d1143c5f1086990\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd261112ca7db4d0d76f6ab29a0347d64dccfff4db42ac9f55d6d7df1443ab23\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c7d0e536ad5143941dd18418b1ac7972a1136a841542b950f6891a386d43ca9c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cfe05183ad0b03415525e6aa2a8d52a5d63b8c273113c46326396df5e0c2bb12\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6562d440ce1f1477fd09c15c34ab88e17e1fb2c2cae4b32a7bf8cbdd29f4d5a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff4fcf53aeed6c07f775152de0faa9fa0671848df06d37cbca6ec7097d0024d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://df22025d59e852d7ca86c7739f0dd141f6b388604bcf9ffaabfa48433290db84\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ba46f14de0fd2c356129122dd938e3fdda832ffc5e614ac439926a3f4ec94370\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-10T16:32:38Z\\\",\\\"message\\\":\\\"espace event handler 1 for removal\\\\nI1010 16:32:38.395893 6461 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1010 16:32:38.395915 6461 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1010 16:32:38.395923 6461 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1010 16:32:38.395978 6461 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1010 16:32:38.395995 6461 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1010 16:32:38.396011 6461 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1010 16:32:38.396026 6461 handler.go:208] Removed *v1.Node event handler 2\\\\nI1010 16:32:38.396040 6461 handler.go:208] Removed *v1.Node event handler 7\\\\nI1010 16:32:38.396038 6461 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1010 16:32:38.396072 6461 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1010 16:32:38.396142 6461 factory.go:656] Stopping watch factory\\\\nI1010 16:32:38.396169 6461 ovnkube.go:599] Stopped ovnkube\\\\nI1010 16:32:38.396164 6461 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1010 16:32:38.396225 6461 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1010 16:32:38.396236 6461 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1010 16:32:38.396378 6461 ovnkube.go:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:37Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://df22025d59e852d7ca86c7739f0dd141f6b388604bcf9ffaabfa48433290db84\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-10T16:33:06Z\\\",\\\"message\\\":\\\"300553 6825 default_network_controller.go:776] Recording success event on pod openshift-network-node-identity/network-node-identity-vrzqb\\\\nF1010 16:33:06.300559 6825 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:33:06Z is after 2025-08-24T17:21:41Z]\\\\nI1010 16:33:06.300565 6825 obj_retry.go:303] Retry object setup: *v1.Pod openshift-etcd/etcd-crc\\\\nI1010 16:33:06.300572 6825 obj_retry.go:365] Adding new object: *v1.Pod openshift-etcd/etcd-crc\\\\nI1010 16:33:06.300560 6825 model_client.go:382] Update operations generated as: [{Op:update Table:Logical_Switch_Port Row:map[addresses:{GoSet:[0a:58:0a:d9:00:3b 10.217.0.59\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-10T16:33:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://08406e220de50ba85f882a05117b5df8c9445a38c026bb85c95fc9f98f2d2cfe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2ce36def99eaf908452410a523cd14eb31a5a4dc3ee38d5983ea95d5ee75f83\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d2ce36def99eaf908452410a523cd14eb31a5a4dc3ee38d5983ea95d5ee75f83\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:09Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-mcwfc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:33:07Z is after 2025-08-24T17:21:41Z" Oct 10 16:33:07 crc kubenswrapper[4799]: I1010 16:33:07.689287 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-z97c7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6f19a8ba-b77f-41ce-a4c6-e970b040dd8c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://17b2b63923e40e58b4a3d352781758ecf7c0e63eb913813e0f738d19dfb05676\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9spwd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c9722a694091d19d16b7c08ac22e23532deca8f4bde306a0d651d5524484fd1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9spwd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:22Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-z97c7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:33:07Z is after 2025-08-24T17:21:41Z" Oct 10 16:33:07 crc kubenswrapper[4799]: I1010 16:33:07.710645 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5a953803d264b43ecd9f8b8c871b034d8146e73a4974bb8f503d0ca626370616\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:33:07Z is after 2025-08-24T17:21:41Z" Oct 10 16:33:07 crc kubenswrapper[4799]: I1010 16:33:07.721909 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:07 crc kubenswrapper[4799]: I1010 16:33:07.721988 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:07 crc kubenswrapper[4799]: I1010 16:33:07.722003 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:07 crc kubenswrapper[4799]: I1010 16:33:07.722055 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:07 crc kubenswrapper[4799]: I1010 16:33:07.722072 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:07Z","lastTransitionTime":"2025-10-10T16:33:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:07 crc kubenswrapper[4799]: I1010 16:33:07.736208 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:33:07Z is after 2025-08-24T17:21:41Z" Oct 10 16:33:07 crc kubenswrapper[4799]: I1010 16:33:07.757394 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gg5hb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f000ac73-b5de-47c8-a0a7-84bd06475f62\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9fa19f17c5ed052d9c266f2da2d4e8338037b397bc2fb5e859f733c6b8c1b69e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b64f89fec4fec12dd0dab3f95ca2c8a01e43d4ef7cc69a4d012195756f6922ca\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-10T16:32:58Z\\\",\\\"message\\\":\\\"2025-10-10T16:32:13+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_1650ba65-1f10-40d9-a47c-1a8dc4b79e86\\\\n2025-10-10T16:32:13+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_1650ba65-1f10-40d9-a47c-1a8dc4b79e86 to /host/opt/cni/bin/\\\\n2025-10-10T16:32:13Z [verbose] multus-daemon started\\\\n2025-10-10T16:32:13Z [verbose] Readiness Indicator file check\\\\n2025-10-10T16:32:58Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:10Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w9g7t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:09Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gg5hb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:33:07Z is after 2025-08-24T17:21:41Z" Oct 10 16:33:07 crc kubenswrapper[4799]: I1010 16:33:07.776322 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-6wjsp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"76fdb169-eee9-4170-b948-95e26254208b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5b030264f18288aa7687a91f7918f1ed2c2ad474637e32a054ea8c25b97aef45\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2ww66\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:14Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-6wjsp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:33:07Z is after 2025-08-24T17:21:41Z" Oct 10 16:33:07 crc kubenswrapper[4799]: I1010 16:33:07.825354 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:07 crc kubenswrapper[4799]: I1010 16:33:07.825425 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:07 crc kubenswrapper[4799]: I1010 16:33:07.825443 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:07 crc kubenswrapper[4799]: I1010 16:33:07.825470 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:07 crc kubenswrapper[4799]: I1010 16:33:07.825488 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:07Z","lastTransitionTime":"2025-10-10T16:33:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:07 crc kubenswrapper[4799]: I1010 16:33:07.927926 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:07 crc kubenswrapper[4799]: I1010 16:33:07.927984 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:07 crc kubenswrapper[4799]: I1010 16:33:07.928003 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:07 crc kubenswrapper[4799]: I1010 16:33:07.928027 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:07 crc kubenswrapper[4799]: I1010 16:33:07.928046 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:07Z","lastTransitionTime":"2025-10-10T16:33:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:07 crc kubenswrapper[4799]: I1010 16:33:07.963916 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-mcwfc_abe7f2d9-ec99-4724-a01f-cc7096377e07/ovnkube-controller/3.log" Oct 10 16:33:07 crc kubenswrapper[4799]: I1010 16:33:07.969099 4799 scope.go:117] "RemoveContainer" containerID="df22025d59e852d7ca86c7739f0dd141f6b388604bcf9ffaabfa48433290db84" Oct 10 16:33:07 crc kubenswrapper[4799]: E1010 16:33:07.969347 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-mcwfc_openshift-ovn-kubernetes(abe7f2d9-ec99-4724-a01f-cc7096377e07)\"" pod="openshift-ovn-kubernetes/ovnkube-node-mcwfc" podUID="abe7f2d9-ec99-4724-a01f-cc7096377e07" Oct 10 16:33:07 crc kubenswrapper[4799]: I1010 16:33:07.988523 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f7878cf5-3c6d-4a4a-9ccd-7de395f9ac84\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://38c4fe49eff3373937abdebfb7d58fe9d5c73809375a3dca4f165aab84d6cbd1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c48434cdadac2409d0e3baf595e00260b1e3f94b8b9dab62e3f87503a6e888be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://88216eac74e0df9deb1ca1bef893deb2e23a79ffffdbd8a851a67df407eaa470\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://91106a41672b01d9f5c61cfc3001b84f024f3b96649bbc9174f3a635fc8034a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://91106a41672b01d9f5c61cfc3001b84f024f3b96649bbc9174f3a635fc8034a9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:31:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:31:48Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:31:47Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:33:07Z is after 2025-08-24T17:21:41Z" Oct 10 16:33:08 crc kubenswrapper[4799]: I1010 16:33:08.004369 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-bsdk2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"823e91d3-003d-4cbb-bc72-004e1708c19d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec832bfc0c81b98afb4117033b94d2951b042b248148a5f957f3507174b8dbb6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-chgmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:09Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-bsdk2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:33:08Z is after 2025-08-24T17:21:41Z" Oct 10 16:33:08 crc kubenswrapper[4799]: I1010 16:33:08.020880 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-k6hch" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7903c578-d05e-4ad7-8fd9-f438abf4a085\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:23Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:23Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hjhjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hjhjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:23Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-k6hch\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:33:08Z is after 2025-08-24T17:21:41Z" Oct 10 16:33:08 crc kubenswrapper[4799]: I1010 16:33:08.030470 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:08 crc kubenswrapper[4799]: I1010 16:33:08.030524 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:08 crc kubenswrapper[4799]: I1010 16:33:08.030543 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:08 crc kubenswrapper[4799]: I1010 16:33:08.030568 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:08 crc kubenswrapper[4799]: I1010 16:33:08.030587 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:08Z","lastTransitionTime":"2025-10-10T16:33:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:08 crc kubenswrapper[4799]: I1010 16:33:08.053721 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"60ae49f7-6d6a-4a62-909f-7aea2b3953f5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c0a72be5ffe48f726e63ca3854fcabf6ad7c26f2c3fe432328142da2dc2ceeb5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b42b1b86cbd6dacb03b9afc740a33f67674996a9c5a5b291b71708ae53ccfea8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://de5b84380f9fb8448cebe90775342fd17260ffb8c591bbd5156f8a216b80f1da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b1dad40a84c7f22ffb5d52c708c7e2e03a181c5778793050495c8333ae005731\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://79f6778c5b703b2b4fc4e59fffc00824fcab6c8f5e2789661665e635a3539195\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a48bce1f3530d2a78258c6fa2af4f1530890f7967a26c9e91ca2f20f56cdbe6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2a48bce1f3530d2a78258c6fa2af4f1530890f7967a26c9e91ca2f20f56cdbe6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:31:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://65fb2cd5fa9b5ff0cad85267e4a036c37593a749da171dc2e5e30ba5159ed96d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://65fb2cd5fa9b5ff0cad85267e4a036c37593a749da171dc2e5e30ba5159ed96d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:31:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://51ea61becc8c45e5bcb2a2374d503cef3fb940b1618e7501cd05d61fc2a9458f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://51ea61becc8c45e5bcb2a2374d503cef3fb940b1618e7501cd05d61fc2a9458f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:31:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:31:47Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:33:08Z is after 2025-08-24T17:21:41Z" Oct 10 16:33:08 crc kubenswrapper[4799]: I1010 16:33:08.073006 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2128a751508cba96a374652d8d80c66c81351fe0d7f800743a1612196fe8ac55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:33:08Z is after 2025-08-24T17:21:41Z" Oct 10 16:33:08 crc kubenswrapper[4799]: I1010 16:33:08.093604 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4e024486dad9853cf7debbd2264eca725e50e74ebd215e1e55595d5f8b7c0403\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3396ed6bea22d063192c09283426aa98e84d5cab5852e305d61f3d583801187\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:33:08Z is after 2025-08-24T17:21:41Z" Oct 10 16:33:08 crc kubenswrapper[4799]: I1010 16:33:08.116348 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-nptcz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"96840de9-4451-4499-81fa-a19c62239007\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://995be5ba088a3758758ce5aaf735f0371692c52e49e3992c6478311411c8db42\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d8b7b4526cfbe5d29a5b00c5d82089820b93e5aedbdaace85c4a252fed1b9f53\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d8b7b4526cfbe5d29a5b00c5d82089820b93e5aedbdaace85c4a252fed1b9f53\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:32:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0477eb514aef21fcec151973d9b6cf683ced19e9029787b97906438cb94b9f66\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0477eb514aef21fcec151973d9b6cf683ced19e9029787b97906438cb94b9f66\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:32:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b8df7ffc260acc047e334af09b76e6ee2c6dadd8c1fd1ed8860769601c89c6db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b8df7ffc260acc047e334af09b76e6ee2c6dadd8c1fd1ed8860769601c89c6db\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:32:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c89c6973a557239b60077f2b91a5f088955a973ebf8a9776677daa83f18c274\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2c89c6973a557239b60077f2b91a5f088955a973ebf8a9776677daa83f18c274\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:32:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f647c1c576be194232c6bcaf882fc8f3c67c78a84edd77222d04f1602434d014\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f647c1c576be194232c6bcaf882fc8f3c67c78a84edd77222d04f1602434d014\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:32:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://af9cccbb5d66115ca2db31b1e6738e1aa5f9c948eb65d3db9b5d5f8d9c223a64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://af9cccbb5d66115ca2db31b1e6738e1aa5f9c948eb65d3db9b5d5f8d9c223a64\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:32:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:09Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-nptcz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:33:08Z is after 2025-08-24T17:21:41Z" Oct 10 16:33:08 crc kubenswrapper[4799]: I1010 16:33:08.133392 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:08 crc kubenswrapper[4799]: I1010 16:33:08.133429 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:08 crc kubenswrapper[4799]: I1010 16:33:08.133440 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:08 crc kubenswrapper[4799]: I1010 16:33:08.133458 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:08 crc kubenswrapper[4799]: I1010 16:33:08.133469 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:08Z","lastTransitionTime":"2025-10-10T16:33:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:08 crc kubenswrapper[4799]: I1010 16:33:08.137430 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1bf1784a-776b-49c7-b64b-7ce52860df45\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://298a1a9571fbe118fe81ff3e7403e298bcde9b683cffab574fbb03d5adc1fb67\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f767e89684b9b515da850360aaf9d7a02173395faf0654e9f0b3a4752a3d608b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://92de340d859cff018a661f0a7f7fe209ffae161bf6f39deb005c7148591fc60b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3a649a65ab118025ea70d1d7cf71236cb96992671c3bc7659d591640b53f941\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:31:47Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:33:08Z is after 2025-08-24T17:21:41Z" Oct 10 16:33:08 crc kubenswrapper[4799]: I1010 16:33:08.154117 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:33:08Z is after 2025-08-24T17:21:41Z" Oct 10 16:33:08 crc kubenswrapper[4799]: I1010 16:33:08.166633 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:33:08Z is after 2025-08-24T17:21:41Z" Oct 10 16:33:08 crc kubenswrapper[4799]: I1010 16:33:08.178601 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6cebefda-e31d-4be2-9bf4-8e1f8ec002cb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6536b37f839c0b3f6b55d82b3a1674eeccb07ec93e2cb0a3739705b82df4782c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hfkr4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ad00545d7a2fff370e19a55a89365b8c9914cb6286dbf1892d7ad0f399288a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hfkr4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:09Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-rh8zc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:33:08Z is after 2025-08-24T17:21:41Z" Oct 10 16:33:08 crc kubenswrapper[4799]: I1010 16:33:08.207371 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mcwfc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"abe7f2d9-ec99-4724-a01f-cc7096377e07\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8cbc87c392646ebf9c016f8c7b40bcec30e33a0a05ea4a896d1143c5f1086990\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd261112ca7db4d0d76f6ab29a0347d64dccfff4db42ac9f55d6d7df1443ab23\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c7d0e536ad5143941dd18418b1ac7972a1136a841542b950f6891a386d43ca9c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cfe05183ad0b03415525e6aa2a8d52a5d63b8c273113c46326396df5e0c2bb12\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6562d440ce1f1477fd09c15c34ab88e17e1fb2c2cae4b32a7bf8cbdd29f4d5a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff4fcf53aeed6c07f775152de0faa9fa0671848df06d37cbca6ec7097d0024d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://df22025d59e852d7ca86c7739f0dd141f6b388604bcf9ffaabfa48433290db84\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://df22025d59e852d7ca86c7739f0dd141f6b388604bcf9ffaabfa48433290db84\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-10T16:33:06Z\\\",\\\"message\\\":\\\"300553 6825 default_network_controller.go:776] Recording success event on pod openshift-network-node-identity/network-node-identity-vrzqb\\\\nF1010 16:33:06.300559 6825 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:33:06Z is after 2025-08-24T17:21:41Z]\\\\nI1010 16:33:06.300565 6825 obj_retry.go:303] Retry object setup: *v1.Pod openshift-etcd/etcd-crc\\\\nI1010 16:33:06.300572 6825 obj_retry.go:365] Adding new object: *v1.Pod openshift-etcd/etcd-crc\\\\nI1010 16:33:06.300560 6825 model_client.go:382] Update operations generated as: [{Op:update Table:Logical_Switch_Port Row:map[addresses:{GoSet:[0a:58:0a:d9:00:3b 10.217.0.59\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-10T16:33:05Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-mcwfc_openshift-ovn-kubernetes(abe7f2d9-ec99-4724-a01f-cc7096377e07)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://08406e220de50ba85f882a05117b5df8c9445a38c026bb85c95fc9f98f2d2cfe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2ce36def99eaf908452410a523cd14eb31a5a4dc3ee38d5983ea95d5ee75f83\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d2ce36def99eaf908452410a523cd14eb31a5a4dc3ee38d5983ea95d5ee75f83\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:09Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-mcwfc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:33:08Z is after 2025-08-24T17:21:41Z" Oct 10 16:33:08 crc kubenswrapper[4799]: I1010 16:33:08.224238 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-z97c7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6f19a8ba-b77f-41ce-a4c6-e970b040dd8c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://17b2b63923e40e58b4a3d352781758ecf7c0e63eb913813e0f738d19dfb05676\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9spwd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c9722a694091d19d16b7c08ac22e23532deca8f4bde306a0d651d5524484fd1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9spwd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:22Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-z97c7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:33:08Z is after 2025-08-24T17:21:41Z" Oct 10 16:33:08 crc kubenswrapper[4799]: I1010 16:33:08.236166 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:08 crc kubenswrapper[4799]: I1010 16:33:08.236217 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:08 crc kubenswrapper[4799]: I1010 16:33:08.236235 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:08 crc kubenswrapper[4799]: I1010 16:33:08.236261 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:08 crc kubenswrapper[4799]: I1010 16:33:08.236279 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:08Z","lastTransitionTime":"2025-10-10T16:33:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:08 crc kubenswrapper[4799]: I1010 16:33:08.243269 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b9c46c5f-a6db-4cef-b179-b669484bbc75\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://df6b51b97a9e3dcf9102409dc19f67e69e6e28ebec82dd46083922d5606cc4c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ba57bc720123daa414f51bf5d3173c6fa0b519947a34816bebc532948fd74ab\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d98759de1f79d9aeb68eb0b3eb21d78d0116f054b5d846c85bd63774b565e73\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7eb0f742793fbd0bee8e88732ec832748e77d9226a926def177968f24a9cf06\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://10d76c5ba8c54896d2fde57e2806c48857363c495a9f2d9b3f6904334cf2f9be\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"file observer\\\\nW1010 16:32:08.895315 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1010 16:32:08.895450 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1010 16:32:08.898309 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-712278876/tls.crt::/tmp/serving-cert-712278876/tls.key\\\\\\\"\\\\nI1010 16:32:09.168043 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1010 16:32:09.171891 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1010 16:32:09.171914 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1010 16:32:09.171936 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1010 16:32:09.171942 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1010 16:32:09.176341 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1010 16:32:09.176406 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1010 16:32:09.176435 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1010 16:32:09.176460 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1010 16:32:09.176486 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1010 16:32:09.176510 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1010 16:32:09.176533 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1010 16:32:09.176376 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1010 16:32:09.178269 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-10T16:31:53Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://75fb276a1b4f555aa58d4a862a6f3841984f75958b7ada362d717eca726c41fc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:50Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://78cbeb4c6d2770cabbc752b11e5a62f64ec7820bc3a637a944fa252d779e242b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://78cbeb4c6d2770cabbc752b11e5a62f64ec7820bc3a637a944fa252d779e242b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:31:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:31:47Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:33:08Z is after 2025-08-24T17:21:41Z" Oct 10 16:33:08 crc kubenswrapper[4799]: I1010 16:33:08.264136 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5a953803d264b43ecd9f8b8c871b034d8146e73a4974bb8f503d0ca626370616\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:33:08Z is after 2025-08-24T17:21:41Z" Oct 10 16:33:08 crc kubenswrapper[4799]: I1010 16:33:08.279626 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:33:08Z is after 2025-08-24T17:21:41Z" Oct 10 16:33:08 crc kubenswrapper[4799]: I1010 16:33:08.301307 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gg5hb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f000ac73-b5de-47c8-a0a7-84bd06475f62\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9fa19f17c5ed052d9c266f2da2d4e8338037b397bc2fb5e859f733c6b8c1b69e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b64f89fec4fec12dd0dab3f95ca2c8a01e43d4ef7cc69a4d012195756f6922ca\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-10T16:32:58Z\\\",\\\"message\\\":\\\"2025-10-10T16:32:13+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_1650ba65-1f10-40d9-a47c-1a8dc4b79e86\\\\n2025-10-10T16:32:13+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_1650ba65-1f10-40d9-a47c-1a8dc4b79e86 to /host/opt/cni/bin/\\\\n2025-10-10T16:32:13Z [verbose] multus-daemon started\\\\n2025-10-10T16:32:13Z [verbose] Readiness Indicator file check\\\\n2025-10-10T16:32:58Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:10Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w9g7t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:09Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gg5hb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:33:08Z is after 2025-08-24T17:21:41Z" Oct 10 16:33:08 crc kubenswrapper[4799]: I1010 16:33:08.313299 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-6wjsp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"76fdb169-eee9-4170-b948-95e26254208b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5b030264f18288aa7687a91f7918f1ed2c2ad474637e32a054ea8c25b97aef45\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2ww66\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:14Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-6wjsp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:33:08Z is after 2025-08-24T17:21:41Z" Oct 10 16:33:08 crc kubenswrapper[4799]: I1010 16:33:08.338534 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:08 crc kubenswrapper[4799]: I1010 16:33:08.338565 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:08 crc kubenswrapper[4799]: I1010 16:33:08.338575 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:08 crc kubenswrapper[4799]: I1010 16:33:08.338597 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:08 crc kubenswrapper[4799]: I1010 16:33:08.338610 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:08Z","lastTransitionTime":"2025-10-10T16:33:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:08 crc kubenswrapper[4799]: I1010 16:33:08.401495 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 10 16:33:08 crc kubenswrapper[4799]: I1010 16:33:08.401495 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 10 16:33:08 crc kubenswrapper[4799]: E1010 16:33:08.401639 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 10 16:33:08 crc kubenswrapper[4799]: E1010 16:33:08.401695 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 10 16:33:08 crc kubenswrapper[4799]: I1010 16:33:08.441475 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:08 crc kubenswrapper[4799]: I1010 16:33:08.441520 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:08 crc kubenswrapper[4799]: I1010 16:33:08.441534 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:08 crc kubenswrapper[4799]: I1010 16:33:08.441556 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:08 crc kubenswrapper[4799]: I1010 16:33:08.441574 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:08Z","lastTransitionTime":"2025-10-10T16:33:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:08 crc kubenswrapper[4799]: I1010 16:33:08.544408 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:08 crc kubenswrapper[4799]: I1010 16:33:08.544457 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:08 crc kubenswrapper[4799]: I1010 16:33:08.544469 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:08 crc kubenswrapper[4799]: I1010 16:33:08.544489 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:08 crc kubenswrapper[4799]: I1010 16:33:08.544502 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:08Z","lastTransitionTime":"2025-10-10T16:33:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:08 crc kubenswrapper[4799]: I1010 16:33:08.647504 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:08 crc kubenswrapper[4799]: I1010 16:33:08.647556 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:08 crc kubenswrapper[4799]: I1010 16:33:08.647575 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:08 crc kubenswrapper[4799]: I1010 16:33:08.647599 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:08 crc kubenswrapper[4799]: I1010 16:33:08.647617 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:08Z","lastTransitionTime":"2025-10-10T16:33:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:08 crc kubenswrapper[4799]: I1010 16:33:08.751577 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:08 crc kubenswrapper[4799]: I1010 16:33:08.751684 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:08 crc kubenswrapper[4799]: I1010 16:33:08.751711 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:08 crc kubenswrapper[4799]: I1010 16:33:08.751745 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:08 crc kubenswrapper[4799]: I1010 16:33:08.751813 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:08Z","lastTransitionTime":"2025-10-10T16:33:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:08 crc kubenswrapper[4799]: I1010 16:33:08.855169 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:08 crc kubenswrapper[4799]: I1010 16:33:08.855279 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:08 crc kubenswrapper[4799]: I1010 16:33:08.855302 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:08 crc kubenswrapper[4799]: I1010 16:33:08.855328 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:08 crc kubenswrapper[4799]: I1010 16:33:08.855347 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:08Z","lastTransitionTime":"2025-10-10T16:33:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:08 crc kubenswrapper[4799]: I1010 16:33:08.959780 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:08 crc kubenswrapper[4799]: I1010 16:33:08.959842 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:08 crc kubenswrapper[4799]: I1010 16:33:08.959865 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:08 crc kubenswrapper[4799]: I1010 16:33:08.959911 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:08 crc kubenswrapper[4799]: I1010 16:33:08.959943 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:08Z","lastTransitionTime":"2025-10-10T16:33:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:09 crc kubenswrapper[4799]: I1010 16:33:09.064099 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:09 crc kubenswrapper[4799]: I1010 16:33:09.064186 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:09 crc kubenswrapper[4799]: I1010 16:33:09.064211 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:09 crc kubenswrapper[4799]: I1010 16:33:09.064243 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:09 crc kubenswrapper[4799]: I1010 16:33:09.064265 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:09Z","lastTransitionTime":"2025-10-10T16:33:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:09 crc kubenswrapper[4799]: I1010 16:33:09.168251 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:09 crc kubenswrapper[4799]: I1010 16:33:09.168347 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:09 crc kubenswrapper[4799]: I1010 16:33:09.168372 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:09 crc kubenswrapper[4799]: I1010 16:33:09.168404 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:09 crc kubenswrapper[4799]: I1010 16:33:09.168429 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:09Z","lastTransitionTime":"2025-10-10T16:33:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:09 crc kubenswrapper[4799]: I1010 16:33:09.271626 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:09 crc kubenswrapper[4799]: I1010 16:33:09.271699 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:09 crc kubenswrapper[4799]: I1010 16:33:09.271716 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:09 crc kubenswrapper[4799]: I1010 16:33:09.271747 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:09 crc kubenswrapper[4799]: I1010 16:33:09.271812 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:09Z","lastTransitionTime":"2025-10-10T16:33:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:09 crc kubenswrapper[4799]: I1010 16:33:09.375109 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:09 crc kubenswrapper[4799]: I1010 16:33:09.375172 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:09 crc kubenswrapper[4799]: I1010 16:33:09.375191 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:09 crc kubenswrapper[4799]: I1010 16:33:09.375216 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:09 crc kubenswrapper[4799]: I1010 16:33:09.375236 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:09Z","lastTransitionTime":"2025-10-10T16:33:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:09 crc kubenswrapper[4799]: I1010 16:33:09.402291 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 10 16:33:09 crc kubenswrapper[4799]: I1010 16:33:09.402376 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k6hch" Oct 10 16:33:09 crc kubenswrapper[4799]: E1010 16:33:09.402477 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 10 16:33:09 crc kubenswrapper[4799]: E1010 16:33:09.402596 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-k6hch" podUID="7903c578-d05e-4ad7-8fd9-f438abf4a085" Oct 10 16:33:09 crc kubenswrapper[4799]: I1010 16:33:09.478366 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:09 crc kubenswrapper[4799]: I1010 16:33:09.478473 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:09 crc kubenswrapper[4799]: I1010 16:33:09.478495 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:09 crc kubenswrapper[4799]: I1010 16:33:09.478520 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:09 crc kubenswrapper[4799]: I1010 16:33:09.478537 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:09Z","lastTransitionTime":"2025-10-10T16:33:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:09 crc kubenswrapper[4799]: I1010 16:33:09.581370 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:09 crc kubenswrapper[4799]: I1010 16:33:09.581435 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:09 crc kubenswrapper[4799]: I1010 16:33:09.581456 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:09 crc kubenswrapper[4799]: I1010 16:33:09.581480 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:09 crc kubenswrapper[4799]: I1010 16:33:09.581497 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:09Z","lastTransitionTime":"2025-10-10T16:33:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:09 crc kubenswrapper[4799]: I1010 16:33:09.684800 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:09 crc kubenswrapper[4799]: I1010 16:33:09.684894 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:09 crc kubenswrapper[4799]: I1010 16:33:09.684969 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:09 crc kubenswrapper[4799]: I1010 16:33:09.685152 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:09 crc kubenswrapper[4799]: I1010 16:33:09.685188 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:09Z","lastTransitionTime":"2025-10-10T16:33:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:09 crc kubenswrapper[4799]: I1010 16:33:09.788178 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:09 crc kubenswrapper[4799]: I1010 16:33:09.788240 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:09 crc kubenswrapper[4799]: I1010 16:33:09.788258 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:09 crc kubenswrapper[4799]: I1010 16:33:09.788289 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:09 crc kubenswrapper[4799]: I1010 16:33:09.788310 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:09Z","lastTransitionTime":"2025-10-10T16:33:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:09 crc kubenswrapper[4799]: I1010 16:33:09.891637 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:09 crc kubenswrapper[4799]: I1010 16:33:09.891701 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:09 crc kubenswrapper[4799]: I1010 16:33:09.891725 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:09 crc kubenswrapper[4799]: I1010 16:33:09.891787 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:09 crc kubenswrapper[4799]: I1010 16:33:09.891815 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:09Z","lastTransitionTime":"2025-10-10T16:33:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:09 crc kubenswrapper[4799]: I1010 16:33:09.995353 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:09 crc kubenswrapper[4799]: I1010 16:33:09.995786 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:09 crc kubenswrapper[4799]: I1010 16:33:09.995808 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:09 crc kubenswrapper[4799]: I1010 16:33:09.995833 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:09 crc kubenswrapper[4799]: I1010 16:33:09.995851 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:09Z","lastTransitionTime":"2025-10-10T16:33:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:10 crc kubenswrapper[4799]: I1010 16:33:10.099296 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:10 crc kubenswrapper[4799]: I1010 16:33:10.099384 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:10 crc kubenswrapper[4799]: I1010 16:33:10.099410 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:10 crc kubenswrapper[4799]: I1010 16:33:10.099437 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:10 crc kubenswrapper[4799]: I1010 16:33:10.099494 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:10Z","lastTransitionTime":"2025-10-10T16:33:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:10 crc kubenswrapper[4799]: I1010 16:33:10.202688 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:10 crc kubenswrapper[4799]: I1010 16:33:10.202745 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:10 crc kubenswrapper[4799]: I1010 16:33:10.202790 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:10 crc kubenswrapper[4799]: I1010 16:33:10.202817 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:10 crc kubenswrapper[4799]: I1010 16:33:10.202838 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:10Z","lastTransitionTime":"2025-10-10T16:33:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:10 crc kubenswrapper[4799]: I1010 16:33:10.306003 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:10 crc kubenswrapper[4799]: I1010 16:33:10.306085 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:10 crc kubenswrapper[4799]: I1010 16:33:10.306110 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:10 crc kubenswrapper[4799]: I1010 16:33:10.306140 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:10 crc kubenswrapper[4799]: I1010 16:33:10.306165 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:10Z","lastTransitionTime":"2025-10-10T16:33:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:10 crc kubenswrapper[4799]: I1010 16:33:10.401647 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 10 16:33:10 crc kubenswrapper[4799]: I1010 16:33:10.401681 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 10 16:33:10 crc kubenswrapper[4799]: E1010 16:33:10.401869 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 10 16:33:10 crc kubenswrapper[4799]: E1010 16:33:10.402064 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 10 16:33:10 crc kubenswrapper[4799]: I1010 16:33:10.409087 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:10 crc kubenswrapper[4799]: I1010 16:33:10.409154 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:10 crc kubenswrapper[4799]: I1010 16:33:10.409182 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:10 crc kubenswrapper[4799]: I1010 16:33:10.409216 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:10 crc kubenswrapper[4799]: I1010 16:33:10.409241 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:10Z","lastTransitionTime":"2025-10-10T16:33:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:10 crc kubenswrapper[4799]: I1010 16:33:10.512140 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:10 crc kubenswrapper[4799]: I1010 16:33:10.512215 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:10 crc kubenswrapper[4799]: I1010 16:33:10.512230 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:10 crc kubenswrapper[4799]: I1010 16:33:10.512246 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:10 crc kubenswrapper[4799]: I1010 16:33:10.512273 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:10Z","lastTransitionTime":"2025-10-10T16:33:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:10 crc kubenswrapper[4799]: I1010 16:33:10.615395 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:10 crc kubenswrapper[4799]: I1010 16:33:10.615482 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:10 crc kubenswrapper[4799]: I1010 16:33:10.615500 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:10 crc kubenswrapper[4799]: I1010 16:33:10.615528 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:10 crc kubenswrapper[4799]: I1010 16:33:10.615547 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:10Z","lastTransitionTime":"2025-10-10T16:33:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:10 crc kubenswrapper[4799]: I1010 16:33:10.718955 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:10 crc kubenswrapper[4799]: I1010 16:33:10.719019 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:10 crc kubenswrapper[4799]: I1010 16:33:10.719038 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:10 crc kubenswrapper[4799]: I1010 16:33:10.719066 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:10 crc kubenswrapper[4799]: I1010 16:33:10.719089 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:10Z","lastTransitionTime":"2025-10-10T16:33:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:10 crc kubenswrapper[4799]: I1010 16:33:10.822486 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:10 crc kubenswrapper[4799]: I1010 16:33:10.822547 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:10 crc kubenswrapper[4799]: I1010 16:33:10.822574 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:10 crc kubenswrapper[4799]: I1010 16:33:10.822603 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:10 crc kubenswrapper[4799]: I1010 16:33:10.822626 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:10Z","lastTransitionTime":"2025-10-10T16:33:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:10 crc kubenswrapper[4799]: I1010 16:33:10.925373 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:10 crc kubenswrapper[4799]: I1010 16:33:10.925445 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:10 crc kubenswrapper[4799]: I1010 16:33:10.925469 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:10 crc kubenswrapper[4799]: I1010 16:33:10.925499 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:10 crc kubenswrapper[4799]: I1010 16:33:10.925520 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:10Z","lastTransitionTime":"2025-10-10T16:33:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:11 crc kubenswrapper[4799]: I1010 16:33:11.028735 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:11 crc kubenswrapper[4799]: I1010 16:33:11.028834 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:11 crc kubenswrapper[4799]: I1010 16:33:11.028860 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:11 crc kubenswrapper[4799]: I1010 16:33:11.028891 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:11 crc kubenswrapper[4799]: I1010 16:33:11.028915 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:11Z","lastTransitionTime":"2025-10-10T16:33:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:11 crc kubenswrapper[4799]: I1010 16:33:11.136742 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:11 crc kubenswrapper[4799]: I1010 16:33:11.136853 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:11 crc kubenswrapper[4799]: I1010 16:33:11.136876 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:11 crc kubenswrapper[4799]: I1010 16:33:11.136906 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:11 crc kubenswrapper[4799]: I1010 16:33:11.136928 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:11Z","lastTransitionTime":"2025-10-10T16:33:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:11 crc kubenswrapper[4799]: I1010 16:33:11.240806 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:11 crc kubenswrapper[4799]: I1010 16:33:11.240879 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:11 crc kubenswrapper[4799]: I1010 16:33:11.240903 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:11 crc kubenswrapper[4799]: I1010 16:33:11.240933 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:11 crc kubenswrapper[4799]: I1010 16:33:11.240954 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:11Z","lastTransitionTime":"2025-10-10T16:33:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:11 crc kubenswrapper[4799]: I1010 16:33:11.344328 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:11 crc kubenswrapper[4799]: I1010 16:33:11.344401 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:11 crc kubenswrapper[4799]: I1010 16:33:11.344422 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:11 crc kubenswrapper[4799]: I1010 16:33:11.344449 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:11 crc kubenswrapper[4799]: I1010 16:33:11.344469 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:11Z","lastTransitionTime":"2025-10-10T16:33:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:11 crc kubenswrapper[4799]: I1010 16:33:11.401956 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k6hch" Oct 10 16:33:11 crc kubenswrapper[4799]: E1010 16:33:11.402159 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-k6hch" podUID="7903c578-d05e-4ad7-8fd9-f438abf4a085" Oct 10 16:33:11 crc kubenswrapper[4799]: I1010 16:33:11.402359 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 10 16:33:11 crc kubenswrapper[4799]: E1010 16:33:11.402576 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 10 16:33:11 crc kubenswrapper[4799]: I1010 16:33:11.417711 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/kube-rbac-proxy-crio-crc"] Oct 10 16:33:11 crc kubenswrapper[4799]: I1010 16:33:11.447727 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:11 crc kubenswrapper[4799]: I1010 16:33:11.447823 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:11 crc kubenswrapper[4799]: I1010 16:33:11.447845 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:11 crc kubenswrapper[4799]: I1010 16:33:11.447871 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:11 crc kubenswrapper[4799]: I1010 16:33:11.447890 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:11Z","lastTransitionTime":"2025-10-10T16:33:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:11 crc kubenswrapper[4799]: I1010 16:33:11.551687 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:11 crc kubenswrapper[4799]: I1010 16:33:11.552245 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:11 crc kubenswrapper[4799]: I1010 16:33:11.552422 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:11 crc kubenswrapper[4799]: I1010 16:33:11.552580 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:11 crc kubenswrapper[4799]: I1010 16:33:11.552717 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:11Z","lastTransitionTime":"2025-10-10T16:33:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:11 crc kubenswrapper[4799]: I1010 16:33:11.656639 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:11 crc kubenswrapper[4799]: I1010 16:33:11.656730 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:11 crc kubenswrapper[4799]: I1010 16:33:11.656751 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:11 crc kubenswrapper[4799]: I1010 16:33:11.656837 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:11 crc kubenswrapper[4799]: I1010 16:33:11.656856 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:11Z","lastTransitionTime":"2025-10-10T16:33:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:11 crc kubenswrapper[4799]: I1010 16:33:11.670445 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:11 crc kubenswrapper[4799]: I1010 16:33:11.670494 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:11 crc kubenswrapper[4799]: I1010 16:33:11.670511 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:11 crc kubenswrapper[4799]: I1010 16:33:11.670536 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:11 crc kubenswrapper[4799]: I1010 16:33:11.670553 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:11Z","lastTransitionTime":"2025-10-10T16:33:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:11 crc kubenswrapper[4799]: E1010 16:33:11.692323 4799 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-10T16:33:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-10T16:33:11Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-10T16:33:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-10T16:33:11Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-10T16:33:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-10T16:33:11Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-10T16:33:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-10T16:33:11Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"d99534f1-66d4-4990-b867-b559b1013899\\\",\\\"systemUUID\\\":\\\"19c7da3e-bb2d-454e-9c2c-9c9464638bfe\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:33:11Z is after 2025-08-24T17:21:41Z" Oct 10 16:33:11 crc kubenswrapper[4799]: I1010 16:33:11.698537 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:11 crc kubenswrapper[4799]: I1010 16:33:11.698609 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:11 crc kubenswrapper[4799]: I1010 16:33:11.698629 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:11 crc kubenswrapper[4799]: I1010 16:33:11.698655 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:11 crc kubenswrapper[4799]: I1010 16:33:11.698676 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:11Z","lastTransitionTime":"2025-10-10T16:33:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:11 crc kubenswrapper[4799]: E1010 16:33:11.720542 4799 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-10T16:33:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-10T16:33:11Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-10T16:33:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-10T16:33:11Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-10T16:33:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-10T16:33:11Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-10T16:33:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-10T16:33:11Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"d99534f1-66d4-4990-b867-b559b1013899\\\",\\\"systemUUID\\\":\\\"19c7da3e-bb2d-454e-9c2c-9c9464638bfe\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:33:11Z is after 2025-08-24T17:21:41Z" Oct 10 16:33:11 crc kubenswrapper[4799]: I1010 16:33:11.726011 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:11 crc kubenswrapper[4799]: I1010 16:33:11.726067 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:11 crc kubenswrapper[4799]: I1010 16:33:11.726086 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:11 crc kubenswrapper[4799]: I1010 16:33:11.726111 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:11 crc kubenswrapper[4799]: I1010 16:33:11.726128 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:11Z","lastTransitionTime":"2025-10-10T16:33:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:11 crc kubenswrapper[4799]: E1010 16:33:11.746107 4799 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-10T16:33:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-10T16:33:11Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-10T16:33:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-10T16:33:11Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-10T16:33:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-10T16:33:11Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-10T16:33:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-10T16:33:11Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"d99534f1-66d4-4990-b867-b559b1013899\\\",\\\"systemUUID\\\":\\\"19c7da3e-bb2d-454e-9c2c-9c9464638bfe\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:33:11Z is after 2025-08-24T17:21:41Z" Oct 10 16:33:11 crc kubenswrapper[4799]: I1010 16:33:11.751336 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:11 crc kubenswrapper[4799]: I1010 16:33:11.751395 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:11 crc kubenswrapper[4799]: I1010 16:33:11.751414 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:11 crc kubenswrapper[4799]: I1010 16:33:11.751441 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:11 crc kubenswrapper[4799]: I1010 16:33:11.751458 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:11Z","lastTransitionTime":"2025-10-10T16:33:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:11 crc kubenswrapper[4799]: E1010 16:33:11.770973 4799 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-10T16:33:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-10T16:33:11Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-10T16:33:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-10T16:33:11Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-10T16:33:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-10T16:33:11Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-10T16:33:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-10T16:33:11Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"d99534f1-66d4-4990-b867-b559b1013899\\\",\\\"systemUUID\\\":\\\"19c7da3e-bb2d-454e-9c2c-9c9464638bfe\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:33:11Z is after 2025-08-24T17:21:41Z" Oct 10 16:33:11 crc kubenswrapper[4799]: I1010 16:33:11.776558 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:11 crc kubenswrapper[4799]: I1010 16:33:11.776636 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:11 crc kubenswrapper[4799]: I1010 16:33:11.776663 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:11 crc kubenswrapper[4799]: I1010 16:33:11.776696 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:11 crc kubenswrapper[4799]: I1010 16:33:11.776720 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:11Z","lastTransitionTime":"2025-10-10T16:33:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:11 crc kubenswrapper[4799]: E1010 16:33:11.798704 4799 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-10T16:33:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-10T16:33:11Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-10T16:33:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-10T16:33:11Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-10T16:33:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-10T16:33:11Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-10T16:33:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-10T16:33:11Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"d99534f1-66d4-4990-b867-b559b1013899\\\",\\\"systemUUID\\\":\\\"19c7da3e-bb2d-454e-9c2c-9c9464638bfe\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:33:11Z is after 2025-08-24T17:21:41Z" Oct 10 16:33:11 crc kubenswrapper[4799]: E1010 16:33:11.799007 4799 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Oct 10 16:33:11 crc kubenswrapper[4799]: I1010 16:33:11.801179 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:11 crc kubenswrapper[4799]: I1010 16:33:11.801235 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:11 crc kubenswrapper[4799]: I1010 16:33:11.801256 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:11 crc kubenswrapper[4799]: I1010 16:33:11.801283 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:11 crc kubenswrapper[4799]: I1010 16:33:11.801305 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:11Z","lastTransitionTime":"2025-10-10T16:33:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:11 crc kubenswrapper[4799]: I1010 16:33:11.904633 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:11 crc kubenswrapper[4799]: I1010 16:33:11.904697 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:11 crc kubenswrapper[4799]: I1010 16:33:11.904718 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:11 crc kubenswrapper[4799]: I1010 16:33:11.904743 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:11 crc kubenswrapper[4799]: I1010 16:33:11.904790 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:11Z","lastTransitionTime":"2025-10-10T16:33:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:12 crc kubenswrapper[4799]: I1010 16:33:12.008194 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:12 crc kubenswrapper[4799]: I1010 16:33:12.008252 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:12 crc kubenswrapper[4799]: I1010 16:33:12.008272 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:12 crc kubenswrapper[4799]: I1010 16:33:12.008302 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:12 crc kubenswrapper[4799]: I1010 16:33:12.008324 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:12Z","lastTransitionTime":"2025-10-10T16:33:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:12 crc kubenswrapper[4799]: I1010 16:33:12.111933 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:12 crc kubenswrapper[4799]: I1010 16:33:12.112005 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:12 crc kubenswrapper[4799]: I1010 16:33:12.112028 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:12 crc kubenswrapper[4799]: I1010 16:33:12.112060 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:12 crc kubenswrapper[4799]: I1010 16:33:12.112083 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:12Z","lastTransitionTime":"2025-10-10T16:33:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:12 crc kubenswrapper[4799]: I1010 16:33:12.215824 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:12 crc kubenswrapper[4799]: I1010 16:33:12.216263 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:12 crc kubenswrapper[4799]: I1010 16:33:12.216415 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:12 crc kubenswrapper[4799]: I1010 16:33:12.216549 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:12 crc kubenswrapper[4799]: I1010 16:33:12.216682 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:12Z","lastTransitionTime":"2025-10-10T16:33:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:12 crc kubenswrapper[4799]: I1010 16:33:12.320210 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:12 crc kubenswrapper[4799]: I1010 16:33:12.320599 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:12 crc kubenswrapper[4799]: I1010 16:33:12.320797 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:12 crc kubenswrapper[4799]: I1010 16:33:12.320966 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:12 crc kubenswrapper[4799]: I1010 16:33:12.321118 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:12Z","lastTransitionTime":"2025-10-10T16:33:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:12 crc kubenswrapper[4799]: I1010 16:33:12.401590 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 10 16:33:12 crc kubenswrapper[4799]: I1010 16:33:12.401683 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 10 16:33:12 crc kubenswrapper[4799]: E1010 16:33:12.402138 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 10 16:33:12 crc kubenswrapper[4799]: E1010 16:33:12.402299 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 10 16:33:12 crc kubenswrapper[4799]: I1010 16:33:12.424164 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:12 crc kubenswrapper[4799]: I1010 16:33:12.424217 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:12 crc kubenswrapper[4799]: I1010 16:33:12.424243 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:12 crc kubenswrapper[4799]: I1010 16:33:12.424272 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:12 crc kubenswrapper[4799]: I1010 16:33:12.424289 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:12Z","lastTransitionTime":"2025-10-10T16:33:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:12 crc kubenswrapper[4799]: I1010 16:33:12.527387 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:12 crc kubenswrapper[4799]: I1010 16:33:12.527447 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:12 crc kubenswrapper[4799]: I1010 16:33:12.527463 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:12 crc kubenswrapper[4799]: I1010 16:33:12.527490 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:12 crc kubenswrapper[4799]: I1010 16:33:12.527509 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:12Z","lastTransitionTime":"2025-10-10T16:33:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:12 crc kubenswrapper[4799]: I1010 16:33:12.631287 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:12 crc kubenswrapper[4799]: I1010 16:33:12.631369 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:12 crc kubenswrapper[4799]: I1010 16:33:12.631388 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:12 crc kubenswrapper[4799]: I1010 16:33:12.631415 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:12 crc kubenswrapper[4799]: I1010 16:33:12.631436 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:12Z","lastTransitionTime":"2025-10-10T16:33:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:12 crc kubenswrapper[4799]: I1010 16:33:12.734915 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:12 crc kubenswrapper[4799]: I1010 16:33:12.734977 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:12 crc kubenswrapper[4799]: I1010 16:33:12.734995 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:12 crc kubenswrapper[4799]: I1010 16:33:12.735019 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:12 crc kubenswrapper[4799]: I1010 16:33:12.735037 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:12Z","lastTransitionTime":"2025-10-10T16:33:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:12 crc kubenswrapper[4799]: I1010 16:33:12.837904 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:12 crc kubenswrapper[4799]: I1010 16:33:12.837960 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:12 crc kubenswrapper[4799]: I1010 16:33:12.837985 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:12 crc kubenswrapper[4799]: I1010 16:33:12.838017 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:12 crc kubenswrapper[4799]: I1010 16:33:12.838041 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:12Z","lastTransitionTime":"2025-10-10T16:33:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:12 crc kubenswrapper[4799]: I1010 16:33:12.942079 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:12 crc kubenswrapper[4799]: I1010 16:33:12.942392 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:12 crc kubenswrapper[4799]: I1010 16:33:12.942572 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:12 crc kubenswrapper[4799]: I1010 16:33:12.942712 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:12 crc kubenswrapper[4799]: I1010 16:33:12.942924 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:12Z","lastTransitionTime":"2025-10-10T16:33:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:13 crc kubenswrapper[4799]: I1010 16:33:13.046601 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:13 crc kubenswrapper[4799]: I1010 16:33:13.046981 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:13 crc kubenswrapper[4799]: I1010 16:33:13.047174 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:13 crc kubenswrapper[4799]: I1010 16:33:13.047337 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:13 crc kubenswrapper[4799]: I1010 16:33:13.047467 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:13Z","lastTransitionTime":"2025-10-10T16:33:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:13 crc kubenswrapper[4799]: I1010 16:33:13.151301 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:13 crc kubenswrapper[4799]: I1010 16:33:13.151362 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:13 crc kubenswrapper[4799]: I1010 16:33:13.151382 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:13 crc kubenswrapper[4799]: I1010 16:33:13.151410 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:13 crc kubenswrapper[4799]: I1010 16:33:13.151428 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:13Z","lastTransitionTime":"2025-10-10T16:33:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:13 crc kubenswrapper[4799]: I1010 16:33:13.243874 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 10 16:33:13 crc kubenswrapper[4799]: E1010 16:33:13.244185 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-10 16:34:17.244144703 +0000 UTC m=+150.752468848 (durationBeforeRetry 1m4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 10 16:33:13 crc kubenswrapper[4799]: I1010 16:33:13.254275 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:13 crc kubenswrapper[4799]: I1010 16:33:13.254313 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:13 crc kubenswrapper[4799]: I1010 16:33:13.254322 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:13 crc kubenswrapper[4799]: I1010 16:33:13.254335 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:13 crc kubenswrapper[4799]: I1010 16:33:13.254345 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:13Z","lastTransitionTime":"2025-10-10T16:33:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:13 crc kubenswrapper[4799]: I1010 16:33:13.345835 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 10 16:33:13 crc kubenswrapper[4799]: I1010 16:33:13.346091 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 10 16:33:13 crc kubenswrapper[4799]: I1010 16:33:13.346152 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 10 16:33:13 crc kubenswrapper[4799]: E1010 16:33:13.346205 4799 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 10 16:33:13 crc kubenswrapper[4799]: E1010 16:33:13.346253 4799 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 10 16:33:13 crc kubenswrapper[4799]: E1010 16:33:13.346277 4799 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 10 16:33:13 crc kubenswrapper[4799]: E1010 16:33:13.346344 4799 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 10 16:33:13 crc kubenswrapper[4799]: E1010 16:33:13.346438 4799 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 10 16:33:13 crc kubenswrapper[4799]: E1010 16:33:13.346459 4799 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 10 16:33:13 crc kubenswrapper[4799]: E1010 16:33:13.346362 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-10-10 16:34:17.346336365 +0000 UTC m=+150.854660510 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 10 16:33:13 crc kubenswrapper[4799]: E1010 16:33:13.346889 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-10-10 16:34:17.346859398 +0000 UTC m=+150.855183553 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 10 16:33:13 crc kubenswrapper[4799]: E1010 16:33:13.346385 4799 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 10 16:33:13 crc kubenswrapper[4799]: E1010 16:33:13.347010 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-10 16:34:17.346995922 +0000 UTC m=+150.855320077 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 10 16:33:13 crc kubenswrapper[4799]: E1010 16:33:13.346531 4799 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Oct 10 16:33:13 crc kubenswrapper[4799]: I1010 16:33:13.347257 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 10 16:33:13 crc kubenswrapper[4799]: E1010 16:33:13.347261 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-10 16:34:17.347197317 +0000 UTC m=+150.855521472 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Oct 10 16:33:13 crc kubenswrapper[4799]: I1010 16:33:13.358479 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:13 crc kubenswrapper[4799]: I1010 16:33:13.358571 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:13 crc kubenswrapper[4799]: I1010 16:33:13.358589 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:13 crc kubenswrapper[4799]: I1010 16:33:13.358614 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:13 crc kubenswrapper[4799]: I1010 16:33:13.358631 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:13Z","lastTransitionTime":"2025-10-10T16:33:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:13 crc kubenswrapper[4799]: I1010 16:33:13.401500 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 10 16:33:13 crc kubenswrapper[4799]: I1010 16:33:13.401577 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k6hch" Oct 10 16:33:13 crc kubenswrapper[4799]: E1010 16:33:13.401671 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 10 16:33:13 crc kubenswrapper[4799]: E1010 16:33:13.401808 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-k6hch" podUID="7903c578-d05e-4ad7-8fd9-f438abf4a085" Oct 10 16:33:13 crc kubenswrapper[4799]: I1010 16:33:13.462633 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:13 crc kubenswrapper[4799]: I1010 16:33:13.462734 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:13 crc kubenswrapper[4799]: I1010 16:33:13.462797 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:13 crc kubenswrapper[4799]: I1010 16:33:13.462833 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:13 crc kubenswrapper[4799]: I1010 16:33:13.462858 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:13Z","lastTransitionTime":"2025-10-10T16:33:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:13 crc kubenswrapper[4799]: I1010 16:33:13.566461 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:13 crc kubenswrapper[4799]: I1010 16:33:13.566519 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:13 crc kubenswrapper[4799]: I1010 16:33:13.566536 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:13 crc kubenswrapper[4799]: I1010 16:33:13.566561 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:13 crc kubenswrapper[4799]: I1010 16:33:13.566608 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:13Z","lastTransitionTime":"2025-10-10T16:33:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:13 crc kubenswrapper[4799]: I1010 16:33:13.669293 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:13 crc kubenswrapper[4799]: I1010 16:33:13.669348 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:13 crc kubenswrapper[4799]: I1010 16:33:13.669366 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:13 crc kubenswrapper[4799]: I1010 16:33:13.669389 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:13 crc kubenswrapper[4799]: I1010 16:33:13.669407 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:13Z","lastTransitionTime":"2025-10-10T16:33:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:13 crc kubenswrapper[4799]: I1010 16:33:13.772804 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:13 crc kubenswrapper[4799]: I1010 16:33:13.772877 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:13 crc kubenswrapper[4799]: I1010 16:33:13.772895 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:13 crc kubenswrapper[4799]: I1010 16:33:13.772925 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:13 crc kubenswrapper[4799]: I1010 16:33:13.772949 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:13Z","lastTransitionTime":"2025-10-10T16:33:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:13 crc kubenswrapper[4799]: I1010 16:33:13.876713 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:13 crc kubenswrapper[4799]: I1010 16:33:13.876795 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:13 crc kubenswrapper[4799]: I1010 16:33:13.876812 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:13 crc kubenswrapper[4799]: I1010 16:33:13.876838 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:13 crc kubenswrapper[4799]: I1010 16:33:13.876857 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:13Z","lastTransitionTime":"2025-10-10T16:33:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:13 crc kubenswrapper[4799]: I1010 16:33:13.980529 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:13 crc kubenswrapper[4799]: I1010 16:33:13.980581 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:13 crc kubenswrapper[4799]: I1010 16:33:13.980598 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:13 crc kubenswrapper[4799]: I1010 16:33:13.980621 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:13 crc kubenswrapper[4799]: I1010 16:33:13.980638 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:13Z","lastTransitionTime":"2025-10-10T16:33:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:14 crc kubenswrapper[4799]: I1010 16:33:14.084427 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:14 crc kubenswrapper[4799]: I1010 16:33:14.084494 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:14 crc kubenswrapper[4799]: I1010 16:33:14.084510 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:14 crc kubenswrapper[4799]: I1010 16:33:14.084540 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:14 crc kubenswrapper[4799]: I1010 16:33:14.084564 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:14Z","lastTransitionTime":"2025-10-10T16:33:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:14 crc kubenswrapper[4799]: I1010 16:33:14.188096 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:14 crc kubenswrapper[4799]: I1010 16:33:14.188182 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:14 crc kubenswrapper[4799]: I1010 16:33:14.188206 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:14 crc kubenswrapper[4799]: I1010 16:33:14.188240 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:14 crc kubenswrapper[4799]: I1010 16:33:14.188263 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:14Z","lastTransitionTime":"2025-10-10T16:33:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:14 crc kubenswrapper[4799]: I1010 16:33:14.291018 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:14 crc kubenswrapper[4799]: I1010 16:33:14.291076 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:14 crc kubenswrapper[4799]: I1010 16:33:14.291097 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:14 crc kubenswrapper[4799]: I1010 16:33:14.291124 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:14 crc kubenswrapper[4799]: I1010 16:33:14.291144 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:14Z","lastTransitionTime":"2025-10-10T16:33:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:14 crc kubenswrapper[4799]: I1010 16:33:14.393996 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:14 crc kubenswrapper[4799]: I1010 16:33:14.394047 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:14 crc kubenswrapper[4799]: I1010 16:33:14.394063 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:14 crc kubenswrapper[4799]: I1010 16:33:14.394088 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:14 crc kubenswrapper[4799]: I1010 16:33:14.394106 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:14Z","lastTransitionTime":"2025-10-10T16:33:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:14 crc kubenswrapper[4799]: I1010 16:33:14.402193 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 10 16:33:14 crc kubenswrapper[4799]: E1010 16:33:14.402339 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 10 16:33:14 crc kubenswrapper[4799]: I1010 16:33:14.402415 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 10 16:33:14 crc kubenswrapper[4799]: E1010 16:33:14.402491 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 10 16:33:14 crc kubenswrapper[4799]: I1010 16:33:14.497179 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:14 crc kubenswrapper[4799]: I1010 16:33:14.497240 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:14 crc kubenswrapper[4799]: I1010 16:33:14.497265 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:14 crc kubenswrapper[4799]: I1010 16:33:14.497291 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:14 crc kubenswrapper[4799]: I1010 16:33:14.497311 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:14Z","lastTransitionTime":"2025-10-10T16:33:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:14 crc kubenswrapper[4799]: I1010 16:33:14.601673 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:14 crc kubenswrapper[4799]: I1010 16:33:14.601836 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:14 crc kubenswrapper[4799]: I1010 16:33:14.601868 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:14 crc kubenswrapper[4799]: I1010 16:33:14.602026 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:14 crc kubenswrapper[4799]: I1010 16:33:14.602053 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:14Z","lastTransitionTime":"2025-10-10T16:33:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:14 crc kubenswrapper[4799]: I1010 16:33:14.705177 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:14 crc kubenswrapper[4799]: I1010 16:33:14.705254 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:14 crc kubenswrapper[4799]: I1010 16:33:14.705279 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:14 crc kubenswrapper[4799]: I1010 16:33:14.705514 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:14 crc kubenswrapper[4799]: I1010 16:33:14.705546 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:14Z","lastTransitionTime":"2025-10-10T16:33:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:14 crc kubenswrapper[4799]: I1010 16:33:14.809515 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:14 crc kubenswrapper[4799]: I1010 16:33:14.809598 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:14 crc kubenswrapper[4799]: I1010 16:33:14.809618 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:14 crc kubenswrapper[4799]: I1010 16:33:14.809652 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:14 crc kubenswrapper[4799]: I1010 16:33:14.809674 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:14Z","lastTransitionTime":"2025-10-10T16:33:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:14 crc kubenswrapper[4799]: I1010 16:33:14.919609 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:14 crc kubenswrapper[4799]: I1010 16:33:14.919721 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:14 crc kubenswrapper[4799]: I1010 16:33:14.919746 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:14 crc kubenswrapper[4799]: I1010 16:33:14.919819 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:14 crc kubenswrapper[4799]: I1010 16:33:14.919847 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:14Z","lastTransitionTime":"2025-10-10T16:33:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:15 crc kubenswrapper[4799]: I1010 16:33:15.023618 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:15 crc kubenswrapper[4799]: I1010 16:33:15.023689 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:15 crc kubenswrapper[4799]: I1010 16:33:15.023709 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:15 crc kubenswrapper[4799]: I1010 16:33:15.023748 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:15 crc kubenswrapper[4799]: I1010 16:33:15.023837 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:15Z","lastTransitionTime":"2025-10-10T16:33:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:15 crc kubenswrapper[4799]: I1010 16:33:15.127180 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:15 crc kubenswrapper[4799]: I1010 16:33:15.127245 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:15 crc kubenswrapper[4799]: I1010 16:33:15.127269 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:15 crc kubenswrapper[4799]: I1010 16:33:15.127301 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:15 crc kubenswrapper[4799]: I1010 16:33:15.127324 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:15Z","lastTransitionTime":"2025-10-10T16:33:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:15 crc kubenswrapper[4799]: I1010 16:33:15.230688 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:15 crc kubenswrapper[4799]: I1010 16:33:15.230735 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:15 crc kubenswrapper[4799]: I1010 16:33:15.230751 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:15 crc kubenswrapper[4799]: I1010 16:33:15.230807 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:15 crc kubenswrapper[4799]: I1010 16:33:15.230826 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:15Z","lastTransitionTime":"2025-10-10T16:33:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:15 crc kubenswrapper[4799]: I1010 16:33:15.334104 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:15 crc kubenswrapper[4799]: I1010 16:33:15.334188 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:15 crc kubenswrapper[4799]: I1010 16:33:15.334212 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:15 crc kubenswrapper[4799]: I1010 16:33:15.334244 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:15 crc kubenswrapper[4799]: I1010 16:33:15.334268 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:15Z","lastTransitionTime":"2025-10-10T16:33:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:15 crc kubenswrapper[4799]: I1010 16:33:15.402347 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 10 16:33:15 crc kubenswrapper[4799]: I1010 16:33:15.402567 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k6hch" Oct 10 16:33:15 crc kubenswrapper[4799]: E1010 16:33:15.402828 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 10 16:33:15 crc kubenswrapper[4799]: E1010 16:33:15.403053 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-k6hch" podUID="7903c578-d05e-4ad7-8fd9-f438abf4a085" Oct 10 16:33:15 crc kubenswrapper[4799]: I1010 16:33:15.437398 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:15 crc kubenswrapper[4799]: I1010 16:33:15.437461 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:15 crc kubenswrapper[4799]: I1010 16:33:15.437478 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:15 crc kubenswrapper[4799]: I1010 16:33:15.437500 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:15 crc kubenswrapper[4799]: I1010 16:33:15.437519 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:15Z","lastTransitionTime":"2025-10-10T16:33:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:15 crc kubenswrapper[4799]: I1010 16:33:15.540234 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:15 crc kubenswrapper[4799]: I1010 16:33:15.540341 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:15 crc kubenswrapper[4799]: I1010 16:33:15.540360 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:15 crc kubenswrapper[4799]: I1010 16:33:15.540422 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:15 crc kubenswrapper[4799]: I1010 16:33:15.540446 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:15Z","lastTransitionTime":"2025-10-10T16:33:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:15 crc kubenswrapper[4799]: I1010 16:33:15.643568 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:15 crc kubenswrapper[4799]: I1010 16:33:15.643628 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:15 crc kubenswrapper[4799]: I1010 16:33:15.643644 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:15 crc kubenswrapper[4799]: I1010 16:33:15.643671 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:15 crc kubenswrapper[4799]: I1010 16:33:15.643690 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:15Z","lastTransitionTime":"2025-10-10T16:33:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:15 crc kubenswrapper[4799]: I1010 16:33:15.747515 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:15 crc kubenswrapper[4799]: I1010 16:33:15.747572 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:15 crc kubenswrapper[4799]: I1010 16:33:15.747589 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:15 crc kubenswrapper[4799]: I1010 16:33:15.747615 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:15 crc kubenswrapper[4799]: I1010 16:33:15.747631 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:15Z","lastTransitionTime":"2025-10-10T16:33:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:15 crc kubenswrapper[4799]: I1010 16:33:15.851383 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:15 crc kubenswrapper[4799]: I1010 16:33:15.851604 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:15 crc kubenswrapper[4799]: I1010 16:33:15.851676 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:15 crc kubenswrapper[4799]: I1010 16:33:15.851711 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:15 crc kubenswrapper[4799]: I1010 16:33:15.851751 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:15Z","lastTransitionTime":"2025-10-10T16:33:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:15 crc kubenswrapper[4799]: I1010 16:33:15.954546 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:15 crc kubenswrapper[4799]: I1010 16:33:15.954620 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:15 crc kubenswrapper[4799]: I1010 16:33:15.954642 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:15 crc kubenswrapper[4799]: I1010 16:33:15.954667 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:15 crc kubenswrapper[4799]: I1010 16:33:15.954686 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:15Z","lastTransitionTime":"2025-10-10T16:33:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:16 crc kubenswrapper[4799]: I1010 16:33:16.058506 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:16 crc kubenswrapper[4799]: I1010 16:33:16.058573 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:16 crc kubenswrapper[4799]: I1010 16:33:16.058591 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:16 crc kubenswrapper[4799]: I1010 16:33:16.058617 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:16 crc kubenswrapper[4799]: I1010 16:33:16.058634 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:16Z","lastTransitionTime":"2025-10-10T16:33:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:16 crc kubenswrapper[4799]: I1010 16:33:16.162490 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:16 crc kubenswrapper[4799]: I1010 16:33:16.162579 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:16 crc kubenswrapper[4799]: I1010 16:33:16.162614 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:16 crc kubenswrapper[4799]: I1010 16:33:16.162646 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:16 crc kubenswrapper[4799]: I1010 16:33:16.162667 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:16Z","lastTransitionTime":"2025-10-10T16:33:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:16 crc kubenswrapper[4799]: I1010 16:33:16.265558 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:16 crc kubenswrapper[4799]: I1010 16:33:16.265620 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:16 crc kubenswrapper[4799]: I1010 16:33:16.265637 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:16 crc kubenswrapper[4799]: I1010 16:33:16.265664 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:16 crc kubenswrapper[4799]: I1010 16:33:16.265683 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:16Z","lastTransitionTime":"2025-10-10T16:33:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:16 crc kubenswrapper[4799]: I1010 16:33:16.368599 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:16 crc kubenswrapper[4799]: I1010 16:33:16.368644 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:16 crc kubenswrapper[4799]: I1010 16:33:16.368655 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:16 crc kubenswrapper[4799]: I1010 16:33:16.368673 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:16 crc kubenswrapper[4799]: I1010 16:33:16.368684 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:16Z","lastTransitionTime":"2025-10-10T16:33:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:16 crc kubenswrapper[4799]: I1010 16:33:16.402174 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 10 16:33:16 crc kubenswrapper[4799]: I1010 16:33:16.402272 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 10 16:33:16 crc kubenswrapper[4799]: E1010 16:33:16.402337 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 10 16:33:16 crc kubenswrapper[4799]: E1010 16:33:16.402536 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 10 16:33:16 crc kubenswrapper[4799]: I1010 16:33:16.471852 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:16 crc kubenswrapper[4799]: I1010 16:33:16.471911 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:16 crc kubenswrapper[4799]: I1010 16:33:16.471932 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:16 crc kubenswrapper[4799]: I1010 16:33:16.471964 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:16 crc kubenswrapper[4799]: I1010 16:33:16.471984 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:16Z","lastTransitionTime":"2025-10-10T16:33:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:16 crc kubenswrapper[4799]: I1010 16:33:16.575678 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:16 crc kubenswrapper[4799]: I1010 16:33:16.575733 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:16 crc kubenswrapper[4799]: I1010 16:33:16.575750 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:16 crc kubenswrapper[4799]: I1010 16:33:16.575822 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:16 crc kubenswrapper[4799]: I1010 16:33:16.575840 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:16Z","lastTransitionTime":"2025-10-10T16:33:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:16 crc kubenswrapper[4799]: I1010 16:33:16.679396 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:16 crc kubenswrapper[4799]: I1010 16:33:16.679455 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:16 crc kubenswrapper[4799]: I1010 16:33:16.679474 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:16 crc kubenswrapper[4799]: I1010 16:33:16.679500 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:16 crc kubenswrapper[4799]: I1010 16:33:16.679520 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:16Z","lastTransitionTime":"2025-10-10T16:33:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:16 crc kubenswrapper[4799]: I1010 16:33:16.782451 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:16 crc kubenswrapper[4799]: I1010 16:33:16.782508 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:16 crc kubenswrapper[4799]: I1010 16:33:16.782524 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:16 crc kubenswrapper[4799]: I1010 16:33:16.782548 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:16 crc kubenswrapper[4799]: I1010 16:33:16.782569 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:16Z","lastTransitionTime":"2025-10-10T16:33:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:16 crc kubenswrapper[4799]: I1010 16:33:16.886008 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:16 crc kubenswrapper[4799]: I1010 16:33:16.886064 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:16 crc kubenswrapper[4799]: I1010 16:33:16.886081 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:16 crc kubenswrapper[4799]: I1010 16:33:16.886106 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:16 crc kubenswrapper[4799]: I1010 16:33:16.886123 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:16Z","lastTransitionTime":"2025-10-10T16:33:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:16 crc kubenswrapper[4799]: I1010 16:33:16.989560 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:16 crc kubenswrapper[4799]: I1010 16:33:16.989651 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:16 crc kubenswrapper[4799]: I1010 16:33:16.989672 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:16 crc kubenswrapper[4799]: I1010 16:33:16.989711 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:16 crc kubenswrapper[4799]: I1010 16:33:16.989733 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:16Z","lastTransitionTime":"2025-10-10T16:33:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:17 crc kubenswrapper[4799]: I1010 16:33:17.092988 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:17 crc kubenswrapper[4799]: I1010 16:33:17.093052 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:17 crc kubenswrapper[4799]: I1010 16:33:17.093074 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:17 crc kubenswrapper[4799]: I1010 16:33:17.093104 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:17 crc kubenswrapper[4799]: I1010 16:33:17.093125 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:17Z","lastTransitionTime":"2025-10-10T16:33:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:17 crc kubenswrapper[4799]: I1010 16:33:17.195684 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:17 crc kubenswrapper[4799]: I1010 16:33:17.195769 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:17 crc kubenswrapper[4799]: I1010 16:33:17.195784 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:17 crc kubenswrapper[4799]: I1010 16:33:17.195820 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:17 crc kubenswrapper[4799]: I1010 16:33:17.195834 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:17Z","lastTransitionTime":"2025-10-10T16:33:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:17 crc kubenswrapper[4799]: I1010 16:33:17.298306 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:17 crc kubenswrapper[4799]: I1010 16:33:17.298354 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:17 crc kubenswrapper[4799]: I1010 16:33:17.298373 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:17 crc kubenswrapper[4799]: I1010 16:33:17.298392 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:17 crc kubenswrapper[4799]: I1010 16:33:17.298405 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:17Z","lastTransitionTime":"2025-10-10T16:33:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:17 crc kubenswrapper[4799]: I1010 16:33:17.400806 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:17 crc kubenswrapper[4799]: I1010 16:33:17.400857 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:17 crc kubenswrapper[4799]: I1010 16:33:17.400872 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:17 crc kubenswrapper[4799]: I1010 16:33:17.400889 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:17 crc kubenswrapper[4799]: I1010 16:33:17.400901 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:17Z","lastTransitionTime":"2025-10-10T16:33:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:17 crc kubenswrapper[4799]: I1010 16:33:17.401807 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k6hch" Oct 10 16:33:17 crc kubenswrapper[4799]: I1010 16:33:17.401826 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 10 16:33:17 crc kubenswrapper[4799]: E1010 16:33:17.402003 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-k6hch" podUID="7903c578-d05e-4ad7-8fd9-f438abf4a085" Oct 10 16:33:17 crc kubenswrapper[4799]: E1010 16:33:17.402138 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 10 16:33:17 crc kubenswrapper[4799]: I1010 16:33:17.430419 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1bf1784a-776b-49c7-b64b-7ce52860df45\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://298a1a9571fbe118fe81ff3e7403e298bcde9b683cffab574fbb03d5adc1fb67\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f767e89684b9b515da850360aaf9d7a02173395faf0654e9f0b3a4752a3d608b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://92de340d859cff018a661f0a7f7fe209ffae161bf6f39deb005c7148591fc60b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3a649a65ab118025ea70d1d7cf71236cb96992671c3bc7659d591640b53f941\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:31:47Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:33:17Z is after 2025-08-24T17:21:41Z" Oct 10 16:33:17 crc kubenswrapper[4799]: I1010 16:33:17.446030 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2128a751508cba96a374652d8d80c66c81351fe0d7f800743a1612196fe8ac55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:33:17Z is after 2025-08-24T17:21:41Z" Oct 10 16:33:17 crc kubenswrapper[4799]: I1010 16:33:17.465085 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4e024486dad9853cf7debbd2264eca725e50e74ebd215e1e55595d5f8b7c0403\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3396ed6bea22d063192c09283426aa98e84d5cab5852e305d61f3d583801187\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:33:17Z is after 2025-08-24T17:21:41Z" Oct 10 16:33:17 crc kubenswrapper[4799]: I1010 16:33:17.489807 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-nptcz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"96840de9-4451-4499-81fa-a19c62239007\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://995be5ba088a3758758ce5aaf735f0371692c52e49e3992c6478311411c8db42\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d8b7b4526cfbe5d29a5b00c5d82089820b93e5aedbdaace85c4a252fed1b9f53\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d8b7b4526cfbe5d29a5b00c5d82089820b93e5aedbdaace85c4a252fed1b9f53\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:32:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0477eb514aef21fcec151973d9b6cf683ced19e9029787b97906438cb94b9f66\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0477eb514aef21fcec151973d9b6cf683ced19e9029787b97906438cb94b9f66\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:32:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b8df7ffc260acc047e334af09b76e6ee2c6dadd8c1fd1ed8860769601c89c6db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b8df7ffc260acc047e334af09b76e6ee2c6dadd8c1fd1ed8860769601c89c6db\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:32:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c89c6973a557239b60077f2b91a5f088955a973ebf8a9776677daa83f18c274\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2c89c6973a557239b60077f2b91a5f088955a973ebf8a9776677daa83f18c274\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:32:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f647c1c576be194232c6bcaf882fc8f3c67c78a84edd77222d04f1602434d014\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f647c1c576be194232c6bcaf882fc8f3c67c78a84edd77222d04f1602434d014\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:32:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://af9cccbb5d66115ca2db31b1e6738e1aa5f9c948eb65d3db9b5d5f8d9c223a64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://af9cccbb5d66115ca2db31b1e6738e1aa5f9c948eb65d3db9b5d5f8d9c223a64\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:32:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:09Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-nptcz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:33:17Z is after 2025-08-24T17:21:41Z" Oct 10 16:33:17 crc kubenswrapper[4799]: I1010 16:33:17.505300 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:17 crc kubenswrapper[4799]: I1010 16:33:17.505382 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:17 crc kubenswrapper[4799]: I1010 16:33:17.505400 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:17 crc kubenswrapper[4799]: I1010 16:33:17.505426 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:17 crc kubenswrapper[4799]: I1010 16:33:17.505446 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:17Z","lastTransitionTime":"2025-10-10T16:33:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:17 crc kubenswrapper[4799]: I1010 16:33:17.508624 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-z97c7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6f19a8ba-b77f-41ce-a4c6-e970b040dd8c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://17b2b63923e40e58b4a3d352781758ecf7c0e63eb913813e0f738d19dfb05676\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9spwd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c9722a694091d19d16b7c08ac22e23532deca8f4bde306a0d651d5524484fd1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9spwd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:22Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-z97c7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:33:17Z is after 2025-08-24T17:21:41Z" Oct 10 16:33:17 crc kubenswrapper[4799]: I1010 16:33:17.532316 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b9c46c5f-a6db-4cef-b179-b669484bbc75\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://df6b51b97a9e3dcf9102409dc19f67e69e6e28ebec82dd46083922d5606cc4c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ba57bc720123daa414f51bf5d3173c6fa0b519947a34816bebc532948fd74ab\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d98759de1f79d9aeb68eb0b3eb21d78d0116f054b5d846c85bd63774b565e73\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7eb0f742793fbd0bee8e88732ec832748e77d9226a926def177968f24a9cf06\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://10d76c5ba8c54896d2fde57e2806c48857363c495a9f2d9b3f6904334cf2f9be\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"file observer\\\\nW1010 16:32:08.895315 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1010 16:32:08.895450 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1010 16:32:08.898309 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-712278876/tls.crt::/tmp/serving-cert-712278876/tls.key\\\\\\\"\\\\nI1010 16:32:09.168043 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1010 16:32:09.171891 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1010 16:32:09.171914 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1010 16:32:09.171936 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1010 16:32:09.171942 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1010 16:32:09.176341 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1010 16:32:09.176406 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1010 16:32:09.176435 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1010 16:32:09.176460 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1010 16:32:09.176486 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1010 16:32:09.176510 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1010 16:32:09.176533 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1010 16:32:09.176376 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1010 16:32:09.178269 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-10T16:31:53Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://75fb276a1b4f555aa58d4a862a6f3841984f75958b7ada362d717eca726c41fc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:50Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://78cbeb4c6d2770cabbc752b11e5a62f64ec7820bc3a637a944fa252d779e242b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://78cbeb4c6d2770cabbc752b11e5a62f64ec7820bc3a637a944fa252d779e242b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:31:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:31:47Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:33:17Z is after 2025-08-24T17:21:41Z" Oct 10 16:33:17 crc kubenswrapper[4799]: I1010 16:33:17.555470 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:33:17Z is after 2025-08-24T17:21:41Z" Oct 10 16:33:17 crc kubenswrapper[4799]: I1010 16:33:17.571861 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:33:17Z is after 2025-08-24T17:21:41Z" Oct 10 16:33:17 crc kubenswrapper[4799]: I1010 16:33:17.587895 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6cebefda-e31d-4be2-9bf4-8e1f8ec002cb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6536b37f839c0b3f6b55d82b3a1674eeccb07ec93e2cb0a3739705b82df4782c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hfkr4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ad00545d7a2fff370e19a55a89365b8c9914cb6286dbf1892d7ad0f399288a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hfkr4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:09Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-rh8zc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:33:17Z is after 2025-08-24T17:21:41Z" Oct 10 16:33:17 crc kubenswrapper[4799]: I1010 16:33:17.608810 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:17 crc kubenswrapper[4799]: I1010 16:33:17.608872 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:17 crc kubenswrapper[4799]: I1010 16:33:17.608885 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:17 crc kubenswrapper[4799]: I1010 16:33:17.608913 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:17 crc kubenswrapper[4799]: I1010 16:33:17.608928 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:17Z","lastTransitionTime":"2025-10-10T16:33:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:17 crc kubenswrapper[4799]: I1010 16:33:17.622096 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mcwfc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"abe7f2d9-ec99-4724-a01f-cc7096377e07\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8cbc87c392646ebf9c016f8c7b40bcec30e33a0a05ea4a896d1143c5f1086990\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd261112ca7db4d0d76f6ab29a0347d64dccfff4db42ac9f55d6d7df1443ab23\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c7d0e536ad5143941dd18418b1ac7972a1136a841542b950f6891a386d43ca9c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cfe05183ad0b03415525e6aa2a8d52a5d63b8c273113c46326396df5e0c2bb12\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6562d440ce1f1477fd09c15c34ab88e17e1fb2c2cae4b32a7bf8cbdd29f4d5a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff4fcf53aeed6c07f775152de0faa9fa0671848df06d37cbca6ec7097d0024d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://df22025d59e852d7ca86c7739f0dd141f6b388604bcf9ffaabfa48433290db84\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://df22025d59e852d7ca86c7739f0dd141f6b388604bcf9ffaabfa48433290db84\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-10T16:33:06Z\\\",\\\"message\\\":\\\"300553 6825 default_network_controller.go:776] Recording success event on pod openshift-network-node-identity/network-node-identity-vrzqb\\\\nF1010 16:33:06.300559 6825 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:33:06Z is after 2025-08-24T17:21:41Z]\\\\nI1010 16:33:06.300565 6825 obj_retry.go:303] Retry object setup: *v1.Pod openshift-etcd/etcd-crc\\\\nI1010 16:33:06.300572 6825 obj_retry.go:365] Adding new object: *v1.Pod openshift-etcd/etcd-crc\\\\nI1010 16:33:06.300560 6825 model_client.go:382] Update operations generated as: [{Op:update Table:Logical_Switch_Port Row:map[addresses:{GoSet:[0a:58:0a:d9:00:3b 10.217.0.59\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-10T16:33:05Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-mcwfc_openshift-ovn-kubernetes(abe7f2d9-ec99-4724-a01f-cc7096377e07)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://08406e220de50ba85f882a05117b5df8c9445a38c026bb85c95fc9f98f2d2cfe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2ce36def99eaf908452410a523cd14eb31a5a4dc3ee38d5983ea95d5ee75f83\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d2ce36def99eaf908452410a523cd14eb31a5a4dc3ee38d5983ea95d5ee75f83\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:09Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-mcwfc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:33:17Z is after 2025-08-24T17:21:41Z" Oct 10 16:33:17 crc kubenswrapper[4799]: I1010 16:33:17.638916 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"66fa2af5-68d0-4ab4-8579-38876a3ce10c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bd57f69503813185900ddde784de4d3582b141416c6310598d416eec17c0beac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c7561ae8f1ea6cb96c659f004106dfdc36f0a3ad76e66f9dd5b55ad905742df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3c7561ae8f1ea6cb96c659f004106dfdc36f0a3ad76e66f9dd5b55ad905742df\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:31:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:31:47Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:33:17Z is after 2025-08-24T17:21:41Z" Oct 10 16:33:17 crc kubenswrapper[4799]: I1010 16:33:17.654263 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5a953803d264b43ecd9f8b8c871b034d8146e73a4974bb8f503d0ca626370616\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:33:17Z is after 2025-08-24T17:21:41Z" Oct 10 16:33:17 crc kubenswrapper[4799]: I1010 16:33:17.675213 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:33:17Z is after 2025-08-24T17:21:41Z" Oct 10 16:33:17 crc kubenswrapper[4799]: I1010 16:33:17.695058 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gg5hb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f000ac73-b5de-47c8-a0a7-84bd06475f62\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9fa19f17c5ed052d9c266f2da2d4e8338037b397bc2fb5e859f733c6b8c1b69e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b64f89fec4fec12dd0dab3f95ca2c8a01e43d4ef7cc69a4d012195756f6922ca\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-10T16:32:58Z\\\",\\\"message\\\":\\\"2025-10-10T16:32:13+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_1650ba65-1f10-40d9-a47c-1a8dc4b79e86\\\\n2025-10-10T16:32:13+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_1650ba65-1f10-40d9-a47c-1a8dc4b79e86 to /host/opt/cni/bin/\\\\n2025-10-10T16:32:13Z [verbose] multus-daemon started\\\\n2025-10-10T16:32:13Z [verbose] Readiness Indicator file check\\\\n2025-10-10T16:32:58Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:10Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w9g7t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:09Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gg5hb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:33:17Z is after 2025-08-24T17:21:41Z" Oct 10 16:33:17 crc kubenswrapper[4799]: I1010 16:33:17.710902 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-6wjsp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"76fdb169-eee9-4170-b948-95e26254208b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5b030264f18288aa7687a91f7918f1ed2c2ad474637e32a054ea8c25b97aef45\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2ww66\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:14Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-6wjsp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:33:17Z is after 2025-08-24T17:21:41Z" Oct 10 16:33:17 crc kubenswrapper[4799]: I1010 16:33:17.712232 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:17 crc kubenswrapper[4799]: I1010 16:33:17.712414 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:17 crc kubenswrapper[4799]: I1010 16:33:17.712430 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:17 crc kubenswrapper[4799]: I1010 16:33:17.712455 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:17 crc kubenswrapper[4799]: I1010 16:33:17.712470 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:17Z","lastTransitionTime":"2025-10-10T16:33:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:17 crc kubenswrapper[4799]: I1010 16:33:17.737443 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"60ae49f7-6d6a-4a62-909f-7aea2b3953f5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c0a72be5ffe48f726e63ca3854fcabf6ad7c26f2c3fe432328142da2dc2ceeb5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b42b1b86cbd6dacb03b9afc740a33f67674996a9c5a5b291b71708ae53ccfea8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://de5b84380f9fb8448cebe90775342fd17260ffb8c591bbd5156f8a216b80f1da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b1dad40a84c7f22ffb5d52c708c7e2e03a181c5778793050495c8333ae005731\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://79f6778c5b703b2b4fc4e59fffc00824fcab6c8f5e2789661665e635a3539195\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a48bce1f3530d2a78258c6fa2af4f1530890f7967a26c9e91ca2f20f56cdbe6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2a48bce1f3530d2a78258c6fa2af4f1530890f7967a26c9e91ca2f20f56cdbe6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:31:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://65fb2cd5fa9b5ff0cad85267e4a036c37593a749da171dc2e5e30ba5159ed96d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://65fb2cd5fa9b5ff0cad85267e4a036c37593a749da171dc2e5e30ba5159ed96d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:31:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://51ea61becc8c45e5bcb2a2374d503cef3fb940b1618e7501cd05d61fc2a9458f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://51ea61becc8c45e5bcb2a2374d503cef3fb940b1618e7501cd05d61fc2a9458f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:31:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:31:47Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:33:17Z is after 2025-08-24T17:21:41Z" Oct 10 16:33:17 crc kubenswrapper[4799]: I1010 16:33:17.754355 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f7878cf5-3c6d-4a4a-9ccd-7de395f9ac84\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://38c4fe49eff3373937abdebfb7d58fe9d5c73809375a3dca4f165aab84d6cbd1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c48434cdadac2409d0e3baf595e00260b1e3f94b8b9dab62e3f87503a6e888be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://88216eac74e0df9deb1ca1bef893deb2e23a79ffffdbd8a851a67df407eaa470\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://91106a41672b01d9f5c61cfc3001b84f024f3b96649bbc9174f3a635fc8034a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://91106a41672b01d9f5c61cfc3001b84f024f3b96649bbc9174f3a635fc8034a9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:31:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:31:48Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:31:47Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:33:17Z is after 2025-08-24T17:21:41Z" Oct 10 16:33:17 crc kubenswrapper[4799]: I1010 16:33:17.767209 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-bsdk2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"823e91d3-003d-4cbb-bc72-004e1708c19d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec832bfc0c81b98afb4117033b94d2951b042b248148a5f957f3507174b8dbb6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-chgmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:09Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-bsdk2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:33:17Z is after 2025-08-24T17:21:41Z" Oct 10 16:33:17 crc kubenswrapper[4799]: I1010 16:33:17.783102 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-k6hch" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7903c578-d05e-4ad7-8fd9-f438abf4a085\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:23Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:23Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hjhjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hjhjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:23Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-k6hch\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:33:17Z is after 2025-08-24T17:21:41Z" Oct 10 16:33:17 crc kubenswrapper[4799]: I1010 16:33:17.815751 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:17 crc kubenswrapper[4799]: I1010 16:33:17.815870 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:17 crc kubenswrapper[4799]: I1010 16:33:17.815887 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:17 crc kubenswrapper[4799]: I1010 16:33:17.815907 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:17 crc kubenswrapper[4799]: I1010 16:33:17.815922 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:17Z","lastTransitionTime":"2025-10-10T16:33:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:17 crc kubenswrapper[4799]: I1010 16:33:17.919336 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:17 crc kubenswrapper[4799]: I1010 16:33:17.919390 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:17 crc kubenswrapper[4799]: I1010 16:33:17.919408 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:17 crc kubenswrapper[4799]: I1010 16:33:17.919436 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:17 crc kubenswrapper[4799]: I1010 16:33:17.919454 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:17Z","lastTransitionTime":"2025-10-10T16:33:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:18 crc kubenswrapper[4799]: I1010 16:33:18.022391 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:18 crc kubenswrapper[4799]: I1010 16:33:18.022431 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:18 crc kubenswrapper[4799]: I1010 16:33:18.022444 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:18 crc kubenswrapper[4799]: I1010 16:33:18.022466 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:18 crc kubenswrapper[4799]: I1010 16:33:18.022478 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:18Z","lastTransitionTime":"2025-10-10T16:33:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:18 crc kubenswrapper[4799]: I1010 16:33:18.124793 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:18 crc kubenswrapper[4799]: I1010 16:33:18.124851 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:18 crc kubenswrapper[4799]: I1010 16:33:18.124871 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:18 crc kubenswrapper[4799]: I1010 16:33:18.124899 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:18 crc kubenswrapper[4799]: I1010 16:33:18.124919 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:18Z","lastTransitionTime":"2025-10-10T16:33:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:18 crc kubenswrapper[4799]: I1010 16:33:18.234304 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:18 crc kubenswrapper[4799]: I1010 16:33:18.234413 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:18 crc kubenswrapper[4799]: I1010 16:33:18.234445 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:18 crc kubenswrapper[4799]: I1010 16:33:18.234489 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:18 crc kubenswrapper[4799]: I1010 16:33:18.234529 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:18Z","lastTransitionTime":"2025-10-10T16:33:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:18 crc kubenswrapper[4799]: I1010 16:33:18.338483 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:18 crc kubenswrapper[4799]: I1010 16:33:18.338531 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:18 crc kubenswrapper[4799]: I1010 16:33:18.338544 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:18 crc kubenswrapper[4799]: I1010 16:33:18.338563 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:18 crc kubenswrapper[4799]: I1010 16:33:18.338576 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:18Z","lastTransitionTime":"2025-10-10T16:33:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:18 crc kubenswrapper[4799]: I1010 16:33:18.401629 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 10 16:33:18 crc kubenswrapper[4799]: I1010 16:33:18.401691 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 10 16:33:18 crc kubenswrapper[4799]: E1010 16:33:18.401792 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 10 16:33:18 crc kubenswrapper[4799]: E1010 16:33:18.401968 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 10 16:33:18 crc kubenswrapper[4799]: I1010 16:33:18.442534 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:18 crc kubenswrapper[4799]: I1010 16:33:18.442592 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:18 crc kubenswrapper[4799]: I1010 16:33:18.442605 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:18 crc kubenswrapper[4799]: I1010 16:33:18.442622 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:18 crc kubenswrapper[4799]: I1010 16:33:18.442638 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:18Z","lastTransitionTime":"2025-10-10T16:33:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:18 crc kubenswrapper[4799]: I1010 16:33:18.546643 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:18 crc kubenswrapper[4799]: I1010 16:33:18.546725 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:18 crc kubenswrapper[4799]: I1010 16:33:18.546793 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:18 crc kubenswrapper[4799]: I1010 16:33:18.546831 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:18 crc kubenswrapper[4799]: I1010 16:33:18.546854 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:18Z","lastTransitionTime":"2025-10-10T16:33:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:18 crc kubenswrapper[4799]: I1010 16:33:18.649714 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:18 crc kubenswrapper[4799]: I1010 16:33:18.649815 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:18 crc kubenswrapper[4799]: I1010 16:33:18.649835 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:18 crc kubenswrapper[4799]: I1010 16:33:18.649871 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:18 crc kubenswrapper[4799]: I1010 16:33:18.649895 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:18Z","lastTransitionTime":"2025-10-10T16:33:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:18 crc kubenswrapper[4799]: I1010 16:33:18.753859 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:18 crc kubenswrapper[4799]: I1010 16:33:18.753924 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:18 crc kubenswrapper[4799]: I1010 16:33:18.753940 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:18 crc kubenswrapper[4799]: I1010 16:33:18.753966 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:18 crc kubenswrapper[4799]: I1010 16:33:18.753985 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:18Z","lastTransitionTime":"2025-10-10T16:33:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:18 crc kubenswrapper[4799]: I1010 16:33:18.858865 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:18 crc kubenswrapper[4799]: I1010 16:33:18.858975 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:18 crc kubenswrapper[4799]: I1010 16:33:18.858995 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:18 crc kubenswrapper[4799]: I1010 16:33:18.859034 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:18 crc kubenswrapper[4799]: I1010 16:33:18.859055 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:18Z","lastTransitionTime":"2025-10-10T16:33:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:18 crc kubenswrapper[4799]: I1010 16:33:18.963310 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:18 crc kubenswrapper[4799]: I1010 16:33:18.963430 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:18 crc kubenswrapper[4799]: I1010 16:33:18.963451 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:18 crc kubenswrapper[4799]: I1010 16:33:18.963489 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:18 crc kubenswrapper[4799]: I1010 16:33:18.963513 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:18Z","lastTransitionTime":"2025-10-10T16:33:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:19 crc kubenswrapper[4799]: I1010 16:33:19.067946 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:19 crc kubenswrapper[4799]: I1010 16:33:19.068035 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:19 crc kubenswrapper[4799]: I1010 16:33:19.068052 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:19 crc kubenswrapper[4799]: I1010 16:33:19.068084 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:19 crc kubenswrapper[4799]: I1010 16:33:19.068103 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:19Z","lastTransitionTime":"2025-10-10T16:33:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:19 crc kubenswrapper[4799]: I1010 16:33:19.172225 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:19 crc kubenswrapper[4799]: I1010 16:33:19.172307 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:19 crc kubenswrapper[4799]: I1010 16:33:19.172330 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:19 crc kubenswrapper[4799]: I1010 16:33:19.172366 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:19 crc kubenswrapper[4799]: I1010 16:33:19.172394 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:19Z","lastTransitionTime":"2025-10-10T16:33:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:19 crc kubenswrapper[4799]: I1010 16:33:19.276345 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:19 crc kubenswrapper[4799]: I1010 16:33:19.276402 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:19 crc kubenswrapper[4799]: I1010 16:33:19.276419 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:19 crc kubenswrapper[4799]: I1010 16:33:19.276446 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:19 crc kubenswrapper[4799]: I1010 16:33:19.276467 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:19Z","lastTransitionTime":"2025-10-10T16:33:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:19 crc kubenswrapper[4799]: I1010 16:33:19.380031 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:19 crc kubenswrapper[4799]: I1010 16:33:19.380104 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:19 crc kubenswrapper[4799]: I1010 16:33:19.380123 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:19 crc kubenswrapper[4799]: I1010 16:33:19.380149 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:19 crc kubenswrapper[4799]: I1010 16:33:19.380167 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:19Z","lastTransitionTime":"2025-10-10T16:33:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:19 crc kubenswrapper[4799]: I1010 16:33:19.402181 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k6hch" Oct 10 16:33:19 crc kubenswrapper[4799]: I1010 16:33:19.402215 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 10 16:33:19 crc kubenswrapper[4799]: E1010 16:33:19.402386 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-k6hch" podUID="7903c578-d05e-4ad7-8fd9-f438abf4a085" Oct 10 16:33:19 crc kubenswrapper[4799]: E1010 16:33:19.402490 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 10 16:33:19 crc kubenswrapper[4799]: I1010 16:33:19.483532 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:19 crc kubenswrapper[4799]: I1010 16:33:19.483607 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:19 crc kubenswrapper[4799]: I1010 16:33:19.483627 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:19 crc kubenswrapper[4799]: I1010 16:33:19.483658 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:19 crc kubenswrapper[4799]: I1010 16:33:19.483679 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:19Z","lastTransitionTime":"2025-10-10T16:33:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:19 crc kubenswrapper[4799]: I1010 16:33:19.587521 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:19 crc kubenswrapper[4799]: I1010 16:33:19.587587 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:19 crc kubenswrapper[4799]: I1010 16:33:19.587605 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:19 crc kubenswrapper[4799]: I1010 16:33:19.587629 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:19 crc kubenswrapper[4799]: I1010 16:33:19.587646 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:19Z","lastTransitionTime":"2025-10-10T16:33:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:19 crc kubenswrapper[4799]: I1010 16:33:19.690721 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:19 crc kubenswrapper[4799]: I1010 16:33:19.690828 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:19 crc kubenswrapper[4799]: I1010 16:33:19.690846 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:19 crc kubenswrapper[4799]: I1010 16:33:19.690870 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:19 crc kubenswrapper[4799]: I1010 16:33:19.690888 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:19Z","lastTransitionTime":"2025-10-10T16:33:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:19 crc kubenswrapper[4799]: I1010 16:33:19.793872 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:19 crc kubenswrapper[4799]: I1010 16:33:19.793973 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:19 crc kubenswrapper[4799]: I1010 16:33:19.793993 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:19 crc kubenswrapper[4799]: I1010 16:33:19.794018 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:19 crc kubenswrapper[4799]: I1010 16:33:19.794039 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:19Z","lastTransitionTime":"2025-10-10T16:33:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:19 crc kubenswrapper[4799]: I1010 16:33:19.896368 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:19 crc kubenswrapper[4799]: I1010 16:33:19.896437 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:19 crc kubenswrapper[4799]: I1010 16:33:19.896454 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:19 crc kubenswrapper[4799]: I1010 16:33:19.896484 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:19 crc kubenswrapper[4799]: I1010 16:33:19.896503 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:19Z","lastTransitionTime":"2025-10-10T16:33:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:20 crc kubenswrapper[4799]: I1010 16:33:19.999978 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:20 crc kubenswrapper[4799]: I1010 16:33:20.000054 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:20 crc kubenswrapper[4799]: I1010 16:33:20.000073 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:20 crc kubenswrapper[4799]: I1010 16:33:20.000104 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:20 crc kubenswrapper[4799]: I1010 16:33:20.000124 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:20Z","lastTransitionTime":"2025-10-10T16:33:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:20 crc kubenswrapper[4799]: I1010 16:33:20.103117 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:20 crc kubenswrapper[4799]: I1010 16:33:20.103181 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:20 crc kubenswrapper[4799]: I1010 16:33:20.103205 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:20 crc kubenswrapper[4799]: I1010 16:33:20.103230 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:20 crc kubenswrapper[4799]: I1010 16:33:20.103247 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:20Z","lastTransitionTime":"2025-10-10T16:33:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:20 crc kubenswrapper[4799]: I1010 16:33:20.206656 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:20 crc kubenswrapper[4799]: I1010 16:33:20.206716 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:20 crc kubenswrapper[4799]: I1010 16:33:20.206735 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:20 crc kubenswrapper[4799]: I1010 16:33:20.206789 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:20 crc kubenswrapper[4799]: I1010 16:33:20.206811 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:20Z","lastTransitionTime":"2025-10-10T16:33:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:20 crc kubenswrapper[4799]: I1010 16:33:20.310474 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:20 crc kubenswrapper[4799]: I1010 16:33:20.310547 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:20 crc kubenswrapper[4799]: I1010 16:33:20.310564 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:20 crc kubenswrapper[4799]: I1010 16:33:20.310593 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:20 crc kubenswrapper[4799]: I1010 16:33:20.310612 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:20Z","lastTransitionTime":"2025-10-10T16:33:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:20 crc kubenswrapper[4799]: I1010 16:33:20.402384 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 10 16:33:20 crc kubenswrapper[4799]: I1010 16:33:20.402432 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 10 16:33:20 crc kubenswrapper[4799]: E1010 16:33:20.402596 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 10 16:33:20 crc kubenswrapper[4799]: E1010 16:33:20.402827 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 10 16:33:20 crc kubenswrapper[4799]: I1010 16:33:20.414040 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:20 crc kubenswrapper[4799]: I1010 16:33:20.414107 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:20 crc kubenswrapper[4799]: I1010 16:33:20.414125 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:20 crc kubenswrapper[4799]: I1010 16:33:20.414155 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:20 crc kubenswrapper[4799]: I1010 16:33:20.414173 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:20Z","lastTransitionTime":"2025-10-10T16:33:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:20 crc kubenswrapper[4799]: I1010 16:33:20.518060 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:20 crc kubenswrapper[4799]: I1010 16:33:20.518161 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:20 crc kubenswrapper[4799]: I1010 16:33:20.518186 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:20 crc kubenswrapper[4799]: I1010 16:33:20.518220 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:20 crc kubenswrapper[4799]: I1010 16:33:20.518243 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:20Z","lastTransitionTime":"2025-10-10T16:33:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:20 crc kubenswrapper[4799]: I1010 16:33:20.622388 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:20 crc kubenswrapper[4799]: I1010 16:33:20.622442 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:20 crc kubenswrapper[4799]: I1010 16:33:20.622454 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:20 crc kubenswrapper[4799]: I1010 16:33:20.622475 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:20 crc kubenswrapper[4799]: I1010 16:33:20.622489 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:20Z","lastTransitionTime":"2025-10-10T16:33:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:20 crc kubenswrapper[4799]: I1010 16:33:20.725414 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:20 crc kubenswrapper[4799]: I1010 16:33:20.725492 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:20 crc kubenswrapper[4799]: I1010 16:33:20.725511 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:20 crc kubenswrapper[4799]: I1010 16:33:20.725537 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:20 crc kubenswrapper[4799]: I1010 16:33:20.725556 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:20Z","lastTransitionTime":"2025-10-10T16:33:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:20 crc kubenswrapper[4799]: I1010 16:33:20.829445 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:20 crc kubenswrapper[4799]: I1010 16:33:20.829515 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:20 crc kubenswrapper[4799]: I1010 16:33:20.829535 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:20 crc kubenswrapper[4799]: I1010 16:33:20.829559 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:20 crc kubenswrapper[4799]: I1010 16:33:20.829572 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:20Z","lastTransitionTime":"2025-10-10T16:33:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:20 crc kubenswrapper[4799]: I1010 16:33:20.932794 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:20 crc kubenswrapper[4799]: I1010 16:33:20.932858 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:20 crc kubenswrapper[4799]: I1010 16:33:20.932877 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:20 crc kubenswrapper[4799]: I1010 16:33:20.932908 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:20 crc kubenswrapper[4799]: I1010 16:33:20.932927 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:20Z","lastTransitionTime":"2025-10-10T16:33:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:21 crc kubenswrapper[4799]: I1010 16:33:21.035717 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:21 crc kubenswrapper[4799]: I1010 16:33:21.035783 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:21 crc kubenswrapper[4799]: I1010 16:33:21.035795 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:21 crc kubenswrapper[4799]: I1010 16:33:21.035814 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:21 crc kubenswrapper[4799]: I1010 16:33:21.035828 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:21Z","lastTransitionTime":"2025-10-10T16:33:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:21 crc kubenswrapper[4799]: I1010 16:33:21.139548 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:21 crc kubenswrapper[4799]: I1010 16:33:21.139616 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:21 crc kubenswrapper[4799]: I1010 16:33:21.139637 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:21 crc kubenswrapper[4799]: I1010 16:33:21.139664 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:21 crc kubenswrapper[4799]: I1010 16:33:21.139683 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:21Z","lastTransitionTime":"2025-10-10T16:33:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:21 crc kubenswrapper[4799]: I1010 16:33:21.242974 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:21 crc kubenswrapper[4799]: I1010 16:33:21.243036 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:21 crc kubenswrapper[4799]: I1010 16:33:21.243054 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:21 crc kubenswrapper[4799]: I1010 16:33:21.243080 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:21 crc kubenswrapper[4799]: I1010 16:33:21.243099 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:21Z","lastTransitionTime":"2025-10-10T16:33:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:21 crc kubenswrapper[4799]: I1010 16:33:21.345907 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:21 crc kubenswrapper[4799]: I1010 16:33:21.346115 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:21 crc kubenswrapper[4799]: I1010 16:33:21.346140 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:21 crc kubenswrapper[4799]: I1010 16:33:21.346170 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:21 crc kubenswrapper[4799]: I1010 16:33:21.346214 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:21Z","lastTransitionTime":"2025-10-10T16:33:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:21 crc kubenswrapper[4799]: I1010 16:33:21.401941 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k6hch" Oct 10 16:33:21 crc kubenswrapper[4799]: I1010 16:33:21.402041 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 10 16:33:21 crc kubenswrapper[4799]: E1010 16:33:21.402153 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-k6hch" podUID="7903c578-d05e-4ad7-8fd9-f438abf4a085" Oct 10 16:33:21 crc kubenswrapper[4799]: E1010 16:33:21.402267 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 10 16:33:21 crc kubenswrapper[4799]: I1010 16:33:21.449587 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:21 crc kubenswrapper[4799]: I1010 16:33:21.450141 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:21 crc kubenswrapper[4799]: I1010 16:33:21.450369 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:21 crc kubenswrapper[4799]: I1010 16:33:21.450566 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:21 crc kubenswrapper[4799]: I1010 16:33:21.450812 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:21Z","lastTransitionTime":"2025-10-10T16:33:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:21 crc kubenswrapper[4799]: I1010 16:33:21.554482 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:21 crc kubenswrapper[4799]: I1010 16:33:21.554607 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:21 crc kubenswrapper[4799]: I1010 16:33:21.554633 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:21 crc kubenswrapper[4799]: I1010 16:33:21.554665 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:21 crc kubenswrapper[4799]: I1010 16:33:21.554686 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:21Z","lastTransitionTime":"2025-10-10T16:33:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:21 crc kubenswrapper[4799]: I1010 16:33:21.656997 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:21 crc kubenswrapper[4799]: I1010 16:33:21.657054 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:21 crc kubenswrapper[4799]: I1010 16:33:21.657073 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:21 crc kubenswrapper[4799]: I1010 16:33:21.657098 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:21 crc kubenswrapper[4799]: I1010 16:33:21.657116 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:21Z","lastTransitionTime":"2025-10-10T16:33:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:21 crc kubenswrapper[4799]: I1010 16:33:21.760175 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:21 crc kubenswrapper[4799]: I1010 16:33:21.760229 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:21 crc kubenswrapper[4799]: I1010 16:33:21.760246 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:21 crc kubenswrapper[4799]: I1010 16:33:21.760272 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:21 crc kubenswrapper[4799]: I1010 16:33:21.760293 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:21Z","lastTransitionTime":"2025-10-10T16:33:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:21 crc kubenswrapper[4799]: I1010 16:33:21.864434 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:21 crc kubenswrapper[4799]: I1010 16:33:21.864493 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:21 crc kubenswrapper[4799]: I1010 16:33:21.864512 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:21 crc kubenswrapper[4799]: I1010 16:33:21.864536 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:21 crc kubenswrapper[4799]: I1010 16:33:21.864555 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:21Z","lastTransitionTime":"2025-10-10T16:33:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:21 crc kubenswrapper[4799]: I1010 16:33:21.968134 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:21 crc kubenswrapper[4799]: I1010 16:33:21.968203 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:21 crc kubenswrapper[4799]: I1010 16:33:21.968220 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:21 crc kubenswrapper[4799]: I1010 16:33:21.968248 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:21 crc kubenswrapper[4799]: I1010 16:33:21.968275 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:21Z","lastTransitionTime":"2025-10-10T16:33:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:22 crc kubenswrapper[4799]: I1010 16:33:22.019444 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:22 crc kubenswrapper[4799]: I1010 16:33:22.019501 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:22 crc kubenswrapper[4799]: I1010 16:33:22.019518 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:22 crc kubenswrapper[4799]: I1010 16:33:22.019542 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:22 crc kubenswrapper[4799]: I1010 16:33:22.019560 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:22Z","lastTransitionTime":"2025-10-10T16:33:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:22 crc kubenswrapper[4799]: E1010 16:33:22.038222 4799 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-10T16:33:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-10T16:33:22Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-10T16:33:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-10T16:33:22Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-10T16:33:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-10T16:33:22Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-10T16:33:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-10T16:33:22Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"d99534f1-66d4-4990-b867-b559b1013899\\\",\\\"systemUUID\\\":\\\"19c7da3e-bb2d-454e-9c2c-9c9464638bfe\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:33:22Z is after 2025-08-24T17:21:41Z" Oct 10 16:33:22 crc kubenswrapper[4799]: I1010 16:33:22.043602 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:22 crc kubenswrapper[4799]: I1010 16:33:22.043666 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:22 crc kubenswrapper[4799]: I1010 16:33:22.043685 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:22 crc kubenswrapper[4799]: I1010 16:33:22.043711 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:22 crc kubenswrapper[4799]: I1010 16:33:22.043729 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:22Z","lastTransitionTime":"2025-10-10T16:33:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:22 crc kubenswrapper[4799]: E1010 16:33:22.065104 4799 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-10T16:33:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-10T16:33:22Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-10T16:33:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-10T16:33:22Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-10T16:33:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-10T16:33:22Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-10T16:33:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-10T16:33:22Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"d99534f1-66d4-4990-b867-b559b1013899\\\",\\\"systemUUID\\\":\\\"19c7da3e-bb2d-454e-9c2c-9c9464638bfe\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:33:22Z is after 2025-08-24T17:21:41Z" Oct 10 16:33:22 crc kubenswrapper[4799]: I1010 16:33:22.070567 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:22 crc kubenswrapper[4799]: I1010 16:33:22.070621 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:22 crc kubenswrapper[4799]: I1010 16:33:22.070639 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:22 crc kubenswrapper[4799]: I1010 16:33:22.070664 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:22 crc kubenswrapper[4799]: I1010 16:33:22.070684 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:22Z","lastTransitionTime":"2025-10-10T16:33:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:22 crc kubenswrapper[4799]: E1010 16:33:22.090904 4799 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-10T16:33:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-10T16:33:22Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-10T16:33:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-10T16:33:22Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-10T16:33:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-10T16:33:22Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-10T16:33:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-10T16:33:22Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"d99534f1-66d4-4990-b867-b559b1013899\\\",\\\"systemUUID\\\":\\\"19c7da3e-bb2d-454e-9c2c-9c9464638bfe\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:33:22Z is after 2025-08-24T17:21:41Z" Oct 10 16:33:22 crc kubenswrapper[4799]: I1010 16:33:22.095984 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:22 crc kubenswrapper[4799]: I1010 16:33:22.096200 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:22 crc kubenswrapper[4799]: I1010 16:33:22.096337 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:22 crc kubenswrapper[4799]: I1010 16:33:22.096480 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:22 crc kubenswrapper[4799]: I1010 16:33:22.096610 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:22Z","lastTransitionTime":"2025-10-10T16:33:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:22 crc kubenswrapper[4799]: E1010 16:33:22.116946 4799 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-10T16:33:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-10T16:33:22Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-10T16:33:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-10T16:33:22Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-10T16:33:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-10T16:33:22Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-10T16:33:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-10T16:33:22Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"d99534f1-66d4-4990-b867-b559b1013899\\\",\\\"systemUUID\\\":\\\"19c7da3e-bb2d-454e-9c2c-9c9464638bfe\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:33:22Z is after 2025-08-24T17:21:41Z" Oct 10 16:33:22 crc kubenswrapper[4799]: I1010 16:33:22.122203 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:22 crc kubenswrapper[4799]: I1010 16:33:22.122270 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:22 crc kubenswrapper[4799]: I1010 16:33:22.122293 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:22 crc kubenswrapper[4799]: I1010 16:33:22.122325 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:22 crc kubenswrapper[4799]: I1010 16:33:22.122347 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:22Z","lastTransitionTime":"2025-10-10T16:33:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:22 crc kubenswrapper[4799]: E1010 16:33:22.143537 4799 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-10T16:33:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-10T16:33:22Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-10T16:33:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-10T16:33:22Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-10T16:33:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-10T16:33:22Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-10T16:33:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-10T16:33:22Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"d99534f1-66d4-4990-b867-b559b1013899\\\",\\\"systemUUID\\\":\\\"19c7da3e-bb2d-454e-9c2c-9c9464638bfe\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:33:22Z is after 2025-08-24T17:21:41Z" Oct 10 16:33:22 crc kubenswrapper[4799]: E1010 16:33:22.143805 4799 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Oct 10 16:33:22 crc kubenswrapper[4799]: I1010 16:33:22.145872 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:22 crc kubenswrapper[4799]: I1010 16:33:22.145948 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:22 crc kubenswrapper[4799]: I1010 16:33:22.145992 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:22 crc kubenswrapper[4799]: I1010 16:33:22.146021 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:22 crc kubenswrapper[4799]: I1010 16:33:22.146041 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:22Z","lastTransitionTime":"2025-10-10T16:33:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:22 crc kubenswrapper[4799]: I1010 16:33:22.250158 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:22 crc kubenswrapper[4799]: I1010 16:33:22.250244 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:22 crc kubenswrapper[4799]: I1010 16:33:22.250263 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:22 crc kubenswrapper[4799]: I1010 16:33:22.250289 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:22 crc kubenswrapper[4799]: I1010 16:33:22.250308 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:22Z","lastTransitionTime":"2025-10-10T16:33:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:22 crc kubenswrapper[4799]: I1010 16:33:22.353173 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:22 crc kubenswrapper[4799]: I1010 16:33:22.353251 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:22 crc kubenswrapper[4799]: I1010 16:33:22.353268 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:22 crc kubenswrapper[4799]: I1010 16:33:22.353296 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:22 crc kubenswrapper[4799]: I1010 16:33:22.353316 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:22Z","lastTransitionTime":"2025-10-10T16:33:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:22 crc kubenswrapper[4799]: I1010 16:33:22.401995 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 10 16:33:22 crc kubenswrapper[4799]: I1010 16:33:22.402042 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 10 16:33:22 crc kubenswrapper[4799]: E1010 16:33:22.402619 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 10 16:33:22 crc kubenswrapper[4799]: E1010 16:33:22.402787 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 10 16:33:22 crc kubenswrapper[4799]: I1010 16:33:22.403217 4799 scope.go:117] "RemoveContainer" containerID="df22025d59e852d7ca86c7739f0dd141f6b388604bcf9ffaabfa48433290db84" Oct 10 16:33:22 crc kubenswrapper[4799]: E1010 16:33:22.403529 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-mcwfc_openshift-ovn-kubernetes(abe7f2d9-ec99-4724-a01f-cc7096377e07)\"" pod="openshift-ovn-kubernetes/ovnkube-node-mcwfc" podUID="abe7f2d9-ec99-4724-a01f-cc7096377e07" Oct 10 16:33:22 crc kubenswrapper[4799]: I1010 16:33:22.456746 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:22 crc kubenswrapper[4799]: I1010 16:33:22.456871 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:22 crc kubenswrapper[4799]: I1010 16:33:22.456894 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:22 crc kubenswrapper[4799]: I1010 16:33:22.456919 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:22 crc kubenswrapper[4799]: I1010 16:33:22.456936 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:22Z","lastTransitionTime":"2025-10-10T16:33:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:22 crc kubenswrapper[4799]: I1010 16:33:22.559777 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:22 crc kubenswrapper[4799]: I1010 16:33:22.559821 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:22 crc kubenswrapper[4799]: I1010 16:33:22.559836 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:22 crc kubenswrapper[4799]: I1010 16:33:22.559856 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:22 crc kubenswrapper[4799]: I1010 16:33:22.559872 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:22Z","lastTransitionTime":"2025-10-10T16:33:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:22 crc kubenswrapper[4799]: I1010 16:33:22.663145 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:22 crc kubenswrapper[4799]: I1010 16:33:22.663211 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:22 crc kubenswrapper[4799]: I1010 16:33:22.663230 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:22 crc kubenswrapper[4799]: I1010 16:33:22.663255 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:22 crc kubenswrapper[4799]: I1010 16:33:22.663273 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:22Z","lastTransitionTime":"2025-10-10T16:33:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:22 crc kubenswrapper[4799]: I1010 16:33:22.766313 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:22 crc kubenswrapper[4799]: I1010 16:33:22.766373 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:22 crc kubenswrapper[4799]: I1010 16:33:22.766389 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:22 crc kubenswrapper[4799]: I1010 16:33:22.766414 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:22 crc kubenswrapper[4799]: I1010 16:33:22.766436 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:22Z","lastTransitionTime":"2025-10-10T16:33:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:22 crc kubenswrapper[4799]: I1010 16:33:22.869971 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:22 crc kubenswrapper[4799]: I1010 16:33:22.870046 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:22 crc kubenswrapper[4799]: I1010 16:33:22.870072 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:22 crc kubenswrapper[4799]: I1010 16:33:22.870104 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:22 crc kubenswrapper[4799]: I1010 16:33:22.870125 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:22Z","lastTransitionTime":"2025-10-10T16:33:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:22 crc kubenswrapper[4799]: I1010 16:33:22.973109 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:22 crc kubenswrapper[4799]: I1010 16:33:22.973186 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:22 crc kubenswrapper[4799]: I1010 16:33:22.973204 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:22 crc kubenswrapper[4799]: I1010 16:33:22.973230 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:22 crc kubenswrapper[4799]: I1010 16:33:22.973249 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:22Z","lastTransitionTime":"2025-10-10T16:33:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:23 crc kubenswrapper[4799]: I1010 16:33:23.075595 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:23 crc kubenswrapper[4799]: I1010 16:33:23.075670 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:23 crc kubenswrapper[4799]: I1010 16:33:23.075693 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:23 crc kubenswrapper[4799]: I1010 16:33:23.075730 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:23 crc kubenswrapper[4799]: I1010 16:33:23.075798 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:23Z","lastTransitionTime":"2025-10-10T16:33:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:23 crc kubenswrapper[4799]: I1010 16:33:23.179459 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:23 crc kubenswrapper[4799]: I1010 16:33:23.179510 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:23 crc kubenswrapper[4799]: I1010 16:33:23.179524 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:23 crc kubenswrapper[4799]: I1010 16:33:23.179544 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:23 crc kubenswrapper[4799]: I1010 16:33:23.179557 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:23Z","lastTransitionTime":"2025-10-10T16:33:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:23 crc kubenswrapper[4799]: I1010 16:33:23.281747 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:23 crc kubenswrapper[4799]: I1010 16:33:23.281833 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:23 crc kubenswrapper[4799]: I1010 16:33:23.281851 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:23 crc kubenswrapper[4799]: I1010 16:33:23.281875 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:23 crc kubenswrapper[4799]: I1010 16:33:23.281893 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:23Z","lastTransitionTime":"2025-10-10T16:33:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:23 crc kubenswrapper[4799]: I1010 16:33:23.385228 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:23 crc kubenswrapper[4799]: I1010 16:33:23.385313 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:23 crc kubenswrapper[4799]: I1010 16:33:23.385325 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:23 crc kubenswrapper[4799]: I1010 16:33:23.385344 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:23 crc kubenswrapper[4799]: I1010 16:33:23.385359 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:23Z","lastTransitionTime":"2025-10-10T16:33:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:23 crc kubenswrapper[4799]: I1010 16:33:23.401992 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 10 16:33:23 crc kubenswrapper[4799]: I1010 16:33:23.401992 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k6hch" Oct 10 16:33:23 crc kubenswrapper[4799]: E1010 16:33:23.402162 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 10 16:33:23 crc kubenswrapper[4799]: E1010 16:33:23.402339 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-k6hch" podUID="7903c578-d05e-4ad7-8fd9-f438abf4a085" Oct 10 16:33:23 crc kubenswrapper[4799]: I1010 16:33:23.487702 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:23 crc kubenswrapper[4799]: I1010 16:33:23.487739 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:23 crc kubenswrapper[4799]: I1010 16:33:23.487747 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:23 crc kubenswrapper[4799]: I1010 16:33:23.487787 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:23 crc kubenswrapper[4799]: I1010 16:33:23.487805 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:23Z","lastTransitionTime":"2025-10-10T16:33:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:23 crc kubenswrapper[4799]: I1010 16:33:23.590264 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:23 crc kubenswrapper[4799]: I1010 16:33:23.590333 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:23 crc kubenswrapper[4799]: I1010 16:33:23.590355 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:23 crc kubenswrapper[4799]: I1010 16:33:23.590386 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:23 crc kubenswrapper[4799]: I1010 16:33:23.590408 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:23Z","lastTransitionTime":"2025-10-10T16:33:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:23 crc kubenswrapper[4799]: I1010 16:33:23.693597 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:23 crc kubenswrapper[4799]: I1010 16:33:23.693684 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:23 crc kubenswrapper[4799]: I1010 16:33:23.693709 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:23 crc kubenswrapper[4799]: I1010 16:33:23.693742 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:23 crc kubenswrapper[4799]: I1010 16:33:23.693788 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:23Z","lastTransitionTime":"2025-10-10T16:33:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:23 crc kubenswrapper[4799]: I1010 16:33:23.797074 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:23 crc kubenswrapper[4799]: I1010 16:33:23.797139 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:23 crc kubenswrapper[4799]: I1010 16:33:23.797162 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:23 crc kubenswrapper[4799]: I1010 16:33:23.797230 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:23 crc kubenswrapper[4799]: I1010 16:33:23.797249 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:23Z","lastTransitionTime":"2025-10-10T16:33:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:23 crc kubenswrapper[4799]: I1010 16:33:23.900866 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:23 crc kubenswrapper[4799]: I1010 16:33:23.900915 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:23 crc kubenswrapper[4799]: I1010 16:33:23.900930 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:23 crc kubenswrapper[4799]: I1010 16:33:23.900952 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:23 crc kubenswrapper[4799]: I1010 16:33:23.900969 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:23Z","lastTransitionTime":"2025-10-10T16:33:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:24 crc kubenswrapper[4799]: I1010 16:33:24.003975 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:24 crc kubenswrapper[4799]: I1010 16:33:24.004037 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:24 crc kubenswrapper[4799]: I1010 16:33:24.004060 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:24 crc kubenswrapper[4799]: I1010 16:33:24.004089 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:24 crc kubenswrapper[4799]: I1010 16:33:24.004107 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:24Z","lastTransitionTime":"2025-10-10T16:33:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:24 crc kubenswrapper[4799]: I1010 16:33:24.106923 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:24 crc kubenswrapper[4799]: I1010 16:33:24.106993 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:24 crc kubenswrapper[4799]: I1010 16:33:24.107017 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:24 crc kubenswrapper[4799]: I1010 16:33:24.107044 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:24 crc kubenswrapper[4799]: I1010 16:33:24.107061 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:24Z","lastTransitionTime":"2025-10-10T16:33:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:24 crc kubenswrapper[4799]: I1010 16:33:24.210158 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:24 crc kubenswrapper[4799]: I1010 16:33:24.210242 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:24 crc kubenswrapper[4799]: I1010 16:33:24.210277 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:24 crc kubenswrapper[4799]: I1010 16:33:24.210314 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:24 crc kubenswrapper[4799]: I1010 16:33:24.210337 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:24Z","lastTransitionTime":"2025-10-10T16:33:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:24 crc kubenswrapper[4799]: I1010 16:33:24.313355 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:24 crc kubenswrapper[4799]: I1010 16:33:24.313408 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:24 crc kubenswrapper[4799]: I1010 16:33:24.313426 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:24 crc kubenswrapper[4799]: I1010 16:33:24.313452 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:24 crc kubenswrapper[4799]: I1010 16:33:24.313470 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:24Z","lastTransitionTime":"2025-10-10T16:33:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:24 crc kubenswrapper[4799]: I1010 16:33:24.401823 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 10 16:33:24 crc kubenswrapper[4799]: I1010 16:33:24.401919 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 10 16:33:24 crc kubenswrapper[4799]: E1010 16:33:24.402101 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 10 16:33:24 crc kubenswrapper[4799]: E1010 16:33:24.402279 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 10 16:33:24 crc kubenswrapper[4799]: I1010 16:33:24.416215 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:24 crc kubenswrapper[4799]: I1010 16:33:24.416269 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:24 crc kubenswrapper[4799]: I1010 16:33:24.416292 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:24 crc kubenswrapper[4799]: I1010 16:33:24.416318 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:24 crc kubenswrapper[4799]: I1010 16:33:24.416341 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:24Z","lastTransitionTime":"2025-10-10T16:33:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:24 crc kubenswrapper[4799]: I1010 16:33:24.519596 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:24 crc kubenswrapper[4799]: I1010 16:33:24.519668 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:24 crc kubenswrapper[4799]: I1010 16:33:24.519687 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:24 crc kubenswrapper[4799]: I1010 16:33:24.519715 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:24 crc kubenswrapper[4799]: I1010 16:33:24.519734 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:24Z","lastTransitionTime":"2025-10-10T16:33:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:24 crc kubenswrapper[4799]: I1010 16:33:24.622782 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:24 crc kubenswrapper[4799]: I1010 16:33:24.622834 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:24 crc kubenswrapper[4799]: I1010 16:33:24.622849 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:24 crc kubenswrapper[4799]: I1010 16:33:24.622866 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:24 crc kubenswrapper[4799]: I1010 16:33:24.622877 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:24Z","lastTransitionTime":"2025-10-10T16:33:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:24 crc kubenswrapper[4799]: I1010 16:33:24.725866 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:24 crc kubenswrapper[4799]: I1010 16:33:24.725928 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:24 crc kubenswrapper[4799]: I1010 16:33:24.725945 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:24 crc kubenswrapper[4799]: I1010 16:33:24.725970 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:24 crc kubenswrapper[4799]: I1010 16:33:24.725987 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:24Z","lastTransitionTime":"2025-10-10T16:33:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:24 crc kubenswrapper[4799]: I1010 16:33:24.829308 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:24 crc kubenswrapper[4799]: I1010 16:33:24.829377 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:24 crc kubenswrapper[4799]: I1010 16:33:24.829400 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:24 crc kubenswrapper[4799]: I1010 16:33:24.829430 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:24 crc kubenswrapper[4799]: I1010 16:33:24.829453 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:24Z","lastTransitionTime":"2025-10-10T16:33:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:24 crc kubenswrapper[4799]: I1010 16:33:24.933111 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:24 crc kubenswrapper[4799]: I1010 16:33:24.933163 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:24 crc kubenswrapper[4799]: I1010 16:33:24.933180 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:24 crc kubenswrapper[4799]: I1010 16:33:24.933204 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:24 crc kubenswrapper[4799]: I1010 16:33:24.933226 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:24Z","lastTransitionTime":"2025-10-10T16:33:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:25 crc kubenswrapper[4799]: I1010 16:33:25.036209 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:25 crc kubenswrapper[4799]: I1010 16:33:25.036316 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:25 crc kubenswrapper[4799]: I1010 16:33:25.036457 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:25 crc kubenswrapper[4799]: I1010 16:33:25.036558 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:25 crc kubenswrapper[4799]: I1010 16:33:25.036587 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:25Z","lastTransitionTime":"2025-10-10T16:33:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:25 crc kubenswrapper[4799]: I1010 16:33:25.139912 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:25 crc kubenswrapper[4799]: I1010 16:33:25.139985 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:25 crc kubenswrapper[4799]: I1010 16:33:25.140013 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:25 crc kubenswrapper[4799]: I1010 16:33:25.140045 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:25 crc kubenswrapper[4799]: I1010 16:33:25.140066 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:25Z","lastTransitionTime":"2025-10-10T16:33:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:25 crc kubenswrapper[4799]: I1010 16:33:25.243585 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:25 crc kubenswrapper[4799]: I1010 16:33:25.243660 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:25 crc kubenswrapper[4799]: I1010 16:33:25.243685 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:25 crc kubenswrapper[4799]: I1010 16:33:25.243717 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:25 crc kubenswrapper[4799]: I1010 16:33:25.243738 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:25Z","lastTransitionTime":"2025-10-10T16:33:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:25 crc kubenswrapper[4799]: I1010 16:33:25.347492 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:25 crc kubenswrapper[4799]: I1010 16:33:25.347613 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:25 crc kubenswrapper[4799]: I1010 16:33:25.347639 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:25 crc kubenswrapper[4799]: I1010 16:33:25.347668 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:25 crc kubenswrapper[4799]: I1010 16:33:25.347690 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:25Z","lastTransitionTime":"2025-10-10T16:33:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:25 crc kubenswrapper[4799]: I1010 16:33:25.402522 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k6hch" Oct 10 16:33:25 crc kubenswrapper[4799]: I1010 16:33:25.402570 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 10 16:33:25 crc kubenswrapper[4799]: E1010 16:33:25.402735 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-k6hch" podUID="7903c578-d05e-4ad7-8fd9-f438abf4a085" Oct 10 16:33:25 crc kubenswrapper[4799]: E1010 16:33:25.402900 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 10 16:33:25 crc kubenswrapper[4799]: I1010 16:33:25.451128 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:25 crc kubenswrapper[4799]: I1010 16:33:25.451208 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:25 crc kubenswrapper[4799]: I1010 16:33:25.451233 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:25 crc kubenswrapper[4799]: I1010 16:33:25.451265 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:25 crc kubenswrapper[4799]: I1010 16:33:25.451288 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:25Z","lastTransitionTime":"2025-10-10T16:33:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:25 crc kubenswrapper[4799]: I1010 16:33:25.554740 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:25 crc kubenswrapper[4799]: I1010 16:33:25.554824 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:25 crc kubenswrapper[4799]: I1010 16:33:25.554842 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:25 crc kubenswrapper[4799]: I1010 16:33:25.554867 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:25 crc kubenswrapper[4799]: I1010 16:33:25.554885 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:25Z","lastTransitionTime":"2025-10-10T16:33:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:25 crc kubenswrapper[4799]: I1010 16:33:25.658542 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:25 crc kubenswrapper[4799]: I1010 16:33:25.658593 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:25 crc kubenswrapper[4799]: I1010 16:33:25.658610 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:25 crc kubenswrapper[4799]: I1010 16:33:25.658636 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:25 crc kubenswrapper[4799]: I1010 16:33:25.658653 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:25Z","lastTransitionTime":"2025-10-10T16:33:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:25 crc kubenswrapper[4799]: I1010 16:33:25.761580 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:25 crc kubenswrapper[4799]: I1010 16:33:25.761648 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:25 crc kubenswrapper[4799]: I1010 16:33:25.761673 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:25 crc kubenswrapper[4799]: I1010 16:33:25.761705 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:25 crc kubenswrapper[4799]: I1010 16:33:25.761728 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:25Z","lastTransitionTime":"2025-10-10T16:33:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:25 crc kubenswrapper[4799]: I1010 16:33:25.864659 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:25 crc kubenswrapper[4799]: I1010 16:33:25.864718 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:25 crc kubenswrapper[4799]: I1010 16:33:25.864739 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:25 crc kubenswrapper[4799]: I1010 16:33:25.864840 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:25 crc kubenswrapper[4799]: I1010 16:33:25.864864 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:25Z","lastTransitionTime":"2025-10-10T16:33:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:25 crc kubenswrapper[4799]: I1010 16:33:25.967799 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:25 crc kubenswrapper[4799]: I1010 16:33:25.967850 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:25 crc kubenswrapper[4799]: I1010 16:33:25.967867 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:25 crc kubenswrapper[4799]: I1010 16:33:25.967893 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:25 crc kubenswrapper[4799]: I1010 16:33:25.967909 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:25Z","lastTransitionTime":"2025-10-10T16:33:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:26 crc kubenswrapper[4799]: I1010 16:33:26.070595 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:26 crc kubenswrapper[4799]: I1010 16:33:26.070648 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:26 crc kubenswrapper[4799]: I1010 16:33:26.070665 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:26 crc kubenswrapper[4799]: I1010 16:33:26.070689 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:26 crc kubenswrapper[4799]: I1010 16:33:26.070705 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:26Z","lastTransitionTime":"2025-10-10T16:33:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:26 crc kubenswrapper[4799]: I1010 16:33:26.174027 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:26 crc kubenswrapper[4799]: I1010 16:33:26.174093 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:26 crc kubenswrapper[4799]: I1010 16:33:26.174111 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:26 crc kubenswrapper[4799]: I1010 16:33:26.174138 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:26 crc kubenswrapper[4799]: I1010 16:33:26.174160 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:26Z","lastTransitionTime":"2025-10-10T16:33:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:26 crc kubenswrapper[4799]: I1010 16:33:26.278351 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:26 crc kubenswrapper[4799]: I1010 16:33:26.278410 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:26 crc kubenswrapper[4799]: I1010 16:33:26.278427 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:26 crc kubenswrapper[4799]: I1010 16:33:26.278454 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:26 crc kubenswrapper[4799]: I1010 16:33:26.278471 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:26Z","lastTransitionTime":"2025-10-10T16:33:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:26 crc kubenswrapper[4799]: I1010 16:33:26.382576 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:26 crc kubenswrapper[4799]: I1010 16:33:26.382638 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:26 crc kubenswrapper[4799]: I1010 16:33:26.382659 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:26 crc kubenswrapper[4799]: I1010 16:33:26.382686 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:26 crc kubenswrapper[4799]: I1010 16:33:26.382705 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:26Z","lastTransitionTime":"2025-10-10T16:33:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:26 crc kubenswrapper[4799]: I1010 16:33:26.401413 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 10 16:33:26 crc kubenswrapper[4799]: I1010 16:33:26.401474 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 10 16:33:26 crc kubenswrapper[4799]: E1010 16:33:26.401813 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 10 16:33:26 crc kubenswrapper[4799]: E1010 16:33:26.402135 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 10 16:33:26 crc kubenswrapper[4799]: I1010 16:33:26.486550 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:26 crc kubenswrapper[4799]: I1010 16:33:26.486652 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:26 crc kubenswrapper[4799]: I1010 16:33:26.486679 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:26 crc kubenswrapper[4799]: I1010 16:33:26.486711 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:26 crc kubenswrapper[4799]: I1010 16:33:26.486735 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:26Z","lastTransitionTime":"2025-10-10T16:33:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:26 crc kubenswrapper[4799]: I1010 16:33:26.590013 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:26 crc kubenswrapper[4799]: I1010 16:33:26.590083 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:26 crc kubenswrapper[4799]: I1010 16:33:26.590103 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:26 crc kubenswrapper[4799]: I1010 16:33:26.590132 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:26 crc kubenswrapper[4799]: I1010 16:33:26.590157 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:26Z","lastTransitionTime":"2025-10-10T16:33:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:26 crc kubenswrapper[4799]: I1010 16:33:26.694118 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:26 crc kubenswrapper[4799]: I1010 16:33:26.694171 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:26 crc kubenswrapper[4799]: I1010 16:33:26.694185 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:26 crc kubenswrapper[4799]: I1010 16:33:26.694206 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:26 crc kubenswrapper[4799]: I1010 16:33:26.694222 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:26Z","lastTransitionTime":"2025-10-10T16:33:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:26 crc kubenswrapper[4799]: I1010 16:33:26.797083 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:26 crc kubenswrapper[4799]: I1010 16:33:26.797154 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:26 crc kubenswrapper[4799]: I1010 16:33:26.797171 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:26 crc kubenswrapper[4799]: I1010 16:33:26.797196 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:26 crc kubenswrapper[4799]: I1010 16:33:26.797217 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:26Z","lastTransitionTime":"2025-10-10T16:33:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:26 crc kubenswrapper[4799]: I1010 16:33:26.899786 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:26 crc kubenswrapper[4799]: I1010 16:33:26.899862 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:26 crc kubenswrapper[4799]: I1010 16:33:26.899879 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:26 crc kubenswrapper[4799]: I1010 16:33:26.899905 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:26 crc kubenswrapper[4799]: I1010 16:33:26.899921 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:26Z","lastTransitionTime":"2025-10-10T16:33:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:27 crc kubenswrapper[4799]: I1010 16:33:27.002157 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:27 crc kubenswrapper[4799]: I1010 16:33:27.002237 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:27 crc kubenswrapper[4799]: I1010 16:33:27.002259 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:27 crc kubenswrapper[4799]: I1010 16:33:27.002294 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:27 crc kubenswrapper[4799]: I1010 16:33:27.002315 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:27Z","lastTransitionTime":"2025-10-10T16:33:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:27 crc kubenswrapper[4799]: I1010 16:33:27.105373 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:27 crc kubenswrapper[4799]: I1010 16:33:27.105480 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:27 crc kubenswrapper[4799]: I1010 16:33:27.105500 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:27 crc kubenswrapper[4799]: I1010 16:33:27.106032 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:27 crc kubenswrapper[4799]: I1010 16:33:27.106307 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:27Z","lastTransitionTime":"2025-10-10T16:33:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:27 crc kubenswrapper[4799]: I1010 16:33:27.210109 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:27 crc kubenswrapper[4799]: I1010 16:33:27.210158 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:27 crc kubenswrapper[4799]: I1010 16:33:27.210174 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:27 crc kubenswrapper[4799]: I1010 16:33:27.210197 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:27 crc kubenswrapper[4799]: I1010 16:33:27.210212 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:27Z","lastTransitionTime":"2025-10-10T16:33:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:27 crc kubenswrapper[4799]: I1010 16:33:27.313031 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:27 crc kubenswrapper[4799]: I1010 16:33:27.313064 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:27 crc kubenswrapper[4799]: I1010 16:33:27.313074 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:27 crc kubenswrapper[4799]: I1010 16:33:27.313088 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:27 crc kubenswrapper[4799]: I1010 16:33:27.313097 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:27Z","lastTransitionTime":"2025-10-10T16:33:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:27 crc kubenswrapper[4799]: I1010 16:33:27.401895 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 10 16:33:27 crc kubenswrapper[4799]: I1010 16:33:27.401993 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k6hch" Oct 10 16:33:27 crc kubenswrapper[4799]: E1010 16:33:27.402265 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 10 16:33:27 crc kubenswrapper[4799]: E1010 16:33:27.402476 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-k6hch" podUID="7903c578-d05e-4ad7-8fd9-f438abf4a085" Oct 10 16:33:27 crc kubenswrapper[4799]: I1010 16:33:27.415383 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:27 crc kubenswrapper[4799]: I1010 16:33:27.415419 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:27 crc kubenswrapper[4799]: I1010 16:33:27.415430 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:27 crc kubenswrapper[4799]: I1010 16:33:27.415444 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:27 crc kubenswrapper[4799]: I1010 16:33:27.415457 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:27Z","lastTransitionTime":"2025-10-10T16:33:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:27 crc kubenswrapper[4799]: I1010 16:33:27.415937 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2128a751508cba96a374652d8d80c66c81351fe0d7f800743a1612196fe8ac55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:33:27Z is after 2025-08-24T17:21:41Z" Oct 10 16:33:27 crc kubenswrapper[4799]: I1010 16:33:27.432681 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4e024486dad9853cf7debbd2264eca725e50e74ebd215e1e55595d5f8b7c0403\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3396ed6bea22d063192c09283426aa98e84d5cab5852e305d61f3d583801187\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:33:27Z is after 2025-08-24T17:21:41Z" Oct 10 16:33:27 crc kubenswrapper[4799]: I1010 16:33:27.457946 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-nptcz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"96840de9-4451-4499-81fa-a19c62239007\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://995be5ba088a3758758ce5aaf735f0371692c52e49e3992c6478311411c8db42\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d8b7b4526cfbe5d29a5b00c5d82089820b93e5aedbdaace85c4a252fed1b9f53\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d8b7b4526cfbe5d29a5b00c5d82089820b93e5aedbdaace85c4a252fed1b9f53\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:32:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0477eb514aef21fcec151973d9b6cf683ced19e9029787b97906438cb94b9f66\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0477eb514aef21fcec151973d9b6cf683ced19e9029787b97906438cb94b9f66\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:32:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b8df7ffc260acc047e334af09b76e6ee2c6dadd8c1fd1ed8860769601c89c6db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b8df7ffc260acc047e334af09b76e6ee2c6dadd8c1fd1ed8860769601c89c6db\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:32:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c89c6973a557239b60077f2b91a5f088955a973ebf8a9776677daa83f18c274\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2c89c6973a557239b60077f2b91a5f088955a973ebf8a9776677daa83f18c274\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:32:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f647c1c576be194232c6bcaf882fc8f3c67c78a84edd77222d04f1602434d014\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f647c1c576be194232c6bcaf882fc8f3c67c78a84edd77222d04f1602434d014\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:32:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://af9cccbb5d66115ca2db31b1e6738e1aa5f9c948eb65d3db9b5d5f8d9c223a64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://af9cccbb5d66115ca2db31b1e6738e1aa5f9c948eb65d3db9b5d5f8d9c223a64\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:32:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:09Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-nptcz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:33:27Z is after 2025-08-24T17:21:41Z" Oct 10 16:33:27 crc kubenswrapper[4799]: I1010 16:33:27.478965 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1bf1784a-776b-49c7-b64b-7ce52860df45\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://298a1a9571fbe118fe81ff3e7403e298bcde9b683cffab574fbb03d5adc1fb67\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f767e89684b9b515da850360aaf9d7a02173395faf0654e9f0b3a4752a3d608b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://92de340d859cff018a661f0a7f7fe209ffae161bf6f39deb005c7148591fc60b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3a649a65ab118025ea70d1d7cf71236cb96992671c3bc7659d591640b53f941\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:31:47Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:33:27Z is after 2025-08-24T17:21:41Z" Oct 10 16:33:27 crc kubenswrapper[4799]: I1010 16:33:27.500281 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:33:27Z is after 2025-08-24T17:21:41Z" Oct 10 16:33:27 crc kubenswrapper[4799]: I1010 16:33:27.517923 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:27 crc kubenswrapper[4799]: I1010 16:33:27.517980 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:27 crc kubenswrapper[4799]: I1010 16:33:27.517998 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:27 crc kubenswrapper[4799]: I1010 16:33:27.518023 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:27 crc kubenswrapper[4799]: I1010 16:33:27.518040 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:27Z","lastTransitionTime":"2025-10-10T16:33:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:27 crc kubenswrapper[4799]: I1010 16:33:27.522977 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:33:27Z is after 2025-08-24T17:21:41Z" Oct 10 16:33:27 crc kubenswrapper[4799]: I1010 16:33:27.542200 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6cebefda-e31d-4be2-9bf4-8e1f8ec002cb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6536b37f839c0b3f6b55d82b3a1674eeccb07ec93e2cb0a3739705b82df4782c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hfkr4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ad00545d7a2fff370e19a55a89365b8c9914cb6286dbf1892d7ad0f399288a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hfkr4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:09Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-rh8zc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:33:27Z is after 2025-08-24T17:21:41Z" Oct 10 16:33:27 crc kubenswrapper[4799]: I1010 16:33:27.577121 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mcwfc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"abe7f2d9-ec99-4724-a01f-cc7096377e07\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8cbc87c392646ebf9c016f8c7b40bcec30e33a0a05ea4a896d1143c5f1086990\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd261112ca7db4d0d76f6ab29a0347d64dccfff4db42ac9f55d6d7df1443ab23\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c7d0e536ad5143941dd18418b1ac7972a1136a841542b950f6891a386d43ca9c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cfe05183ad0b03415525e6aa2a8d52a5d63b8c273113c46326396df5e0c2bb12\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6562d440ce1f1477fd09c15c34ab88e17e1fb2c2cae4b32a7bf8cbdd29f4d5a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff4fcf53aeed6c07f775152de0faa9fa0671848df06d37cbca6ec7097d0024d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://df22025d59e852d7ca86c7739f0dd141f6b388604bcf9ffaabfa48433290db84\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://df22025d59e852d7ca86c7739f0dd141f6b388604bcf9ffaabfa48433290db84\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-10T16:33:06Z\\\",\\\"message\\\":\\\"300553 6825 default_network_controller.go:776] Recording success event on pod openshift-network-node-identity/network-node-identity-vrzqb\\\\nF1010 16:33:06.300559 6825 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:33:06Z is after 2025-08-24T17:21:41Z]\\\\nI1010 16:33:06.300565 6825 obj_retry.go:303] Retry object setup: *v1.Pod openshift-etcd/etcd-crc\\\\nI1010 16:33:06.300572 6825 obj_retry.go:365] Adding new object: *v1.Pod openshift-etcd/etcd-crc\\\\nI1010 16:33:06.300560 6825 model_client.go:382] Update operations generated as: [{Op:update Table:Logical_Switch_Port Row:map[addresses:{GoSet:[0a:58:0a:d9:00:3b 10.217.0.59\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-10T16:33:05Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-mcwfc_openshift-ovn-kubernetes(abe7f2d9-ec99-4724-a01f-cc7096377e07)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://08406e220de50ba85f882a05117b5df8c9445a38c026bb85c95fc9f98f2d2cfe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2ce36def99eaf908452410a523cd14eb31a5a4dc3ee38d5983ea95d5ee75f83\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d2ce36def99eaf908452410a523cd14eb31a5a4dc3ee38d5983ea95d5ee75f83\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:09Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-mcwfc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:33:27Z is after 2025-08-24T17:21:41Z" Oct 10 16:33:27 crc kubenswrapper[4799]: I1010 16:33:27.598083 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-z97c7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6f19a8ba-b77f-41ce-a4c6-e970b040dd8c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://17b2b63923e40e58b4a3d352781758ecf7c0e63eb913813e0f738d19dfb05676\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9spwd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c9722a694091d19d16b7c08ac22e23532deca8f4bde306a0d651d5524484fd1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9spwd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:22Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-z97c7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:33:27Z is after 2025-08-24T17:21:41Z" Oct 10 16:33:27 crc kubenswrapper[4799]: I1010 16:33:27.622492 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:27 crc kubenswrapper[4799]: I1010 16:33:27.622552 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:27 crc kubenswrapper[4799]: I1010 16:33:27.622570 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:27 crc kubenswrapper[4799]: I1010 16:33:27.622602 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:27 crc kubenswrapper[4799]: I1010 16:33:27.622622 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:27Z","lastTransitionTime":"2025-10-10T16:33:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:27 crc kubenswrapper[4799]: I1010 16:33:27.624828 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b9c46c5f-a6db-4cef-b179-b669484bbc75\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://df6b51b97a9e3dcf9102409dc19f67e69e6e28ebec82dd46083922d5606cc4c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ba57bc720123daa414f51bf5d3173c6fa0b519947a34816bebc532948fd74ab\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d98759de1f79d9aeb68eb0b3eb21d78d0116f054b5d846c85bd63774b565e73\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7eb0f742793fbd0bee8e88732ec832748e77d9226a926def177968f24a9cf06\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://10d76c5ba8c54896d2fde57e2806c48857363c495a9f2d9b3f6904334cf2f9be\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"file observer\\\\nW1010 16:32:08.895315 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1010 16:32:08.895450 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1010 16:32:08.898309 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-712278876/tls.crt::/tmp/serving-cert-712278876/tls.key\\\\\\\"\\\\nI1010 16:32:09.168043 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1010 16:32:09.171891 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1010 16:32:09.171914 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1010 16:32:09.171936 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1010 16:32:09.171942 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1010 16:32:09.176341 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1010 16:32:09.176406 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1010 16:32:09.176435 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1010 16:32:09.176460 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1010 16:32:09.176486 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1010 16:32:09.176510 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1010 16:32:09.176533 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1010 16:32:09.176376 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1010 16:32:09.178269 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-10T16:31:53Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://75fb276a1b4f555aa58d4a862a6f3841984f75958b7ada362d717eca726c41fc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:50Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://78cbeb4c6d2770cabbc752b11e5a62f64ec7820bc3a637a944fa252d779e242b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://78cbeb4c6d2770cabbc752b11e5a62f64ec7820bc3a637a944fa252d779e242b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:31:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:31:47Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:33:27Z is after 2025-08-24T17:21:41Z" Oct 10 16:33:27 crc kubenswrapper[4799]: I1010 16:33:27.647010 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5a953803d264b43ecd9f8b8c871b034d8146e73a4974bb8f503d0ca626370616\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:33:27Z is after 2025-08-24T17:21:41Z" Oct 10 16:33:27 crc kubenswrapper[4799]: I1010 16:33:27.669185 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:33:27Z is after 2025-08-24T17:21:41Z" Oct 10 16:33:27 crc kubenswrapper[4799]: I1010 16:33:27.691226 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gg5hb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f000ac73-b5de-47c8-a0a7-84bd06475f62\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9fa19f17c5ed052d9c266f2da2d4e8338037b397bc2fb5e859f733c6b8c1b69e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b64f89fec4fec12dd0dab3f95ca2c8a01e43d4ef7cc69a4d012195756f6922ca\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-10T16:32:58Z\\\",\\\"message\\\":\\\"2025-10-10T16:32:13+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_1650ba65-1f10-40d9-a47c-1a8dc4b79e86\\\\n2025-10-10T16:32:13+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_1650ba65-1f10-40d9-a47c-1a8dc4b79e86 to /host/opt/cni/bin/\\\\n2025-10-10T16:32:13Z [verbose] multus-daemon started\\\\n2025-10-10T16:32:13Z [verbose] Readiness Indicator file check\\\\n2025-10-10T16:32:58Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:10Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w9g7t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:09Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gg5hb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:33:27Z is after 2025-08-24T17:21:41Z" Oct 10 16:33:27 crc kubenswrapper[4799]: I1010 16:33:27.707358 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/7903c578-d05e-4ad7-8fd9-f438abf4a085-metrics-certs\") pod \"network-metrics-daemon-k6hch\" (UID: \"7903c578-d05e-4ad7-8fd9-f438abf4a085\") " pod="openshift-multus/network-metrics-daemon-k6hch" Oct 10 16:33:27 crc kubenswrapper[4799]: E1010 16:33:27.707542 4799 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Oct 10 16:33:27 crc kubenswrapper[4799]: E1010 16:33:27.707623 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/7903c578-d05e-4ad7-8fd9-f438abf4a085-metrics-certs podName:7903c578-d05e-4ad7-8fd9-f438abf4a085 nodeName:}" failed. No retries permitted until 2025-10-10 16:34:31.707600706 +0000 UTC m=+165.215924861 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/7903c578-d05e-4ad7-8fd9-f438abf4a085-metrics-certs") pod "network-metrics-daemon-k6hch" (UID: "7903c578-d05e-4ad7-8fd9-f438abf4a085") : object "openshift-multus"/"metrics-daemon-secret" not registered Oct 10 16:33:27 crc kubenswrapper[4799]: I1010 16:33:27.708328 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-6wjsp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"76fdb169-eee9-4170-b948-95e26254208b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5b030264f18288aa7687a91f7918f1ed2c2ad474637e32a054ea8c25b97aef45\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2ww66\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:14Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-6wjsp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:33:27Z is after 2025-08-24T17:21:41Z" Oct 10 16:33:27 crc kubenswrapper[4799]: I1010 16:33:27.722254 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"66fa2af5-68d0-4ab4-8579-38876a3ce10c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bd57f69503813185900ddde784de4d3582b141416c6310598d416eec17c0beac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c7561ae8f1ea6cb96c659f004106dfdc36f0a3ad76e66f9dd5b55ad905742df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3c7561ae8f1ea6cb96c659f004106dfdc36f0a3ad76e66f9dd5b55ad905742df\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:31:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:31:47Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:33:27Z is after 2025-08-24T17:21:41Z" Oct 10 16:33:27 crc kubenswrapper[4799]: I1010 16:33:27.728948 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:27 crc kubenswrapper[4799]: I1010 16:33:27.729039 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:27 crc kubenswrapper[4799]: I1010 16:33:27.729051 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:27 crc kubenswrapper[4799]: I1010 16:33:27.729072 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:27 crc kubenswrapper[4799]: I1010 16:33:27.729084 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:27Z","lastTransitionTime":"2025-10-10T16:33:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:27 crc kubenswrapper[4799]: I1010 16:33:27.740187 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f7878cf5-3c6d-4a4a-9ccd-7de395f9ac84\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://38c4fe49eff3373937abdebfb7d58fe9d5c73809375a3dca4f165aab84d6cbd1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c48434cdadac2409d0e3baf595e00260b1e3f94b8b9dab62e3f87503a6e888be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://88216eac74e0df9deb1ca1bef893deb2e23a79ffffdbd8a851a67df407eaa470\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://91106a41672b01d9f5c61cfc3001b84f024f3b96649bbc9174f3a635fc8034a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://91106a41672b01d9f5c61cfc3001b84f024f3b96649bbc9174f3a635fc8034a9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:31:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:31:48Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:31:47Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:33:27Z is after 2025-08-24T17:21:41Z" Oct 10 16:33:27 crc kubenswrapper[4799]: I1010 16:33:27.755058 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-bsdk2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"823e91d3-003d-4cbb-bc72-004e1708c19d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec832bfc0c81b98afb4117033b94d2951b042b248148a5f957f3507174b8dbb6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-chgmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:09Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-bsdk2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:33:27Z is after 2025-08-24T17:21:41Z" Oct 10 16:33:27 crc kubenswrapper[4799]: I1010 16:33:27.770037 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-k6hch" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7903c578-d05e-4ad7-8fd9-f438abf4a085\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:23Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:23Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hjhjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hjhjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:23Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-k6hch\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:33:27Z is after 2025-08-24T17:21:41Z" Oct 10 16:33:27 crc kubenswrapper[4799]: I1010 16:33:27.799512 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"60ae49f7-6d6a-4a62-909f-7aea2b3953f5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c0a72be5ffe48f726e63ca3854fcabf6ad7c26f2c3fe432328142da2dc2ceeb5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b42b1b86cbd6dacb03b9afc740a33f67674996a9c5a5b291b71708ae53ccfea8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://de5b84380f9fb8448cebe90775342fd17260ffb8c591bbd5156f8a216b80f1da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b1dad40a84c7f22ffb5d52c708c7e2e03a181c5778793050495c8333ae005731\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://79f6778c5b703b2b4fc4e59fffc00824fcab6c8f5e2789661665e635a3539195\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a48bce1f3530d2a78258c6fa2af4f1530890f7967a26c9e91ca2f20f56cdbe6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2a48bce1f3530d2a78258c6fa2af4f1530890f7967a26c9e91ca2f20f56cdbe6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:31:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://65fb2cd5fa9b5ff0cad85267e4a036c37593a749da171dc2e5e30ba5159ed96d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://65fb2cd5fa9b5ff0cad85267e4a036c37593a749da171dc2e5e30ba5159ed96d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:31:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://51ea61becc8c45e5bcb2a2374d503cef3fb940b1618e7501cd05d61fc2a9458f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://51ea61becc8c45e5bcb2a2374d503cef3fb940b1618e7501cd05d61fc2a9458f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:31:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:31:47Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:33:27Z is after 2025-08-24T17:21:41Z" Oct 10 16:33:27 crc kubenswrapper[4799]: I1010 16:33:27.832354 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:27 crc kubenswrapper[4799]: I1010 16:33:27.832441 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:27 crc kubenswrapper[4799]: I1010 16:33:27.832466 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:27 crc kubenswrapper[4799]: I1010 16:33:27.832498 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:27 crc kubenswrapper[4799]: I1010 16:33:27.832520 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:27Z","lastTransitionTime":"2025-10-10T16:33:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:27 crc kubenswrapper[4799]: I1010 16:33:27.935384 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:27 crc kubenswrapper[4799]: I1010 16:33:27.935500 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:27 crc kubenswrapper[4799]: I1010 16:33:27.935524 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:27 crc kubenswrapper[4799]: I1010 16:33:27.935556 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:27 crc kubenswrapper[4799]: I1010 16:33:27.935577 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:27Z","lastTransitionTime":"2025-10-10T16:33:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:28 crc kubenswrapper[4799]: I1010 16:33:28.038853 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:28 crc kubenswrapper[4799]: I1010 16:33:28.038920 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:28 crc kubenswrapper[4799]: I1010 16:33:28.038943 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:28 crc kubenswrapper[4799]: I1010 16:33:28.038974 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:28 crc kubenswrapper[4799]: I1010 16:33:28.038995 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:28Z","lastTransitionTime":"2025-10-10T16:33:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:28 crc kubenswrapper[4799]: I1010 16:33:28.141457 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:28 crc kubenswrapper[4799]: I1010 16:33:28.141501 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:28 crc kubenswrapper[4799]: I1010 16:33:28.141512 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:28 crc kubenswrapper[4799]: I1010 16:33:28.141530 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:28 crc kubenswrapper[4799]: I1010 16:33:28.141543 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:28Z","lastTransitionTime":"2025-10-10T16:33:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:28 crc kubenswrapper[4799]: I1010 16:33:28.244411 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:28 crc kubenswrapper[4799]: I1010 16:33:28.244456 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:28 crc kubenswrapper[4799]: I1010 16:33:28.244469 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:28 crc kubenswrapper[4799]: I1010 16:33:28.244490 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:28 crc kubenswrapper[4799]: I1010 16:33:28.244503 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:28Z","lastTransitionTime":"2025-10-10T16:33:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:28 crc kubenswrapper[4799]: I1010 16:33:28.347860 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:28 crc kubenswrapper[4799]: I1010 16:33:28.347921 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:28 crc kubenswrapper[4799]: I1010 16:33:28.347938 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:28 crc kubenswrapper[4799]: I1010 16:33:28.347965 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:28 crc kubenswrapper[4799]: I1010 16:33:28.347989 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:28Z","lastTransitionTime":"2025-10-10T16:33:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:28 crc kubenswrapper[4799]: I1010 16:33:28.402013 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 10 16:33:28 crc kubenswrapper[4799]: I1010 16:33:28.402078 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 10 16:33:28 crc kubenswrapper[4799]: E1010 16:33:28.402187 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 10 16:33:28 crc kubenswrapper[4799]: E1010 16:33:28.402324 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 10 16:33:28 crc kubenswrapper[4799]: I1010 16:33:28.450744 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:28 crc kubenswrapper[4799]: I1010 16:33:28.450805 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:28 crc kubenswrapper[4799]: I1010 16:33:28.450814 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:28 crc kubenswrapper[4799]: I1010 16:33:28.450831 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:28 crc kubenswrapper[4799]: I1010 16:33:28.450841 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:28Z","lastTransitionTime":"2025-10-10T16:33:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:28 crc kubenswrapper[4799]: I1010 16:33:28.553363 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:28 crc kubenswrapper[4799]: I1010 16:33:28.553413 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:28 crc kubenswrapper[4799]: I1010 16:33:28.553429 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:28 crc kubenswrapper[4799]: I1010 16:33:28.553453 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:28 crc kubenswrapper[4799]: I1010 16:33:28.553470 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:28Z","lastTransitionTime":"2025-10-10T16:33:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:28 crc kubenswrapper[4799]: I1010 16:33:28.657185 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:28 crc kubenswrapper[4799]: I1010 16:33:28.657243 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:28 crc kubenswrapper[4799]: I1010 16:33:28.657261 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:28 crc kubenswrapper[4799]: I1010 16:33:28.657286 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:28 crc kubenswrapper[4799]: I1010 16:33:28.657303 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:28Z","lastTransitionTime":"2025-10-10T16:33:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:28 crc kubenswrapper[4799]: I1010 16:33:28.760131 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:28 crc kubenswrapper[4799]: I1010 16:33:28.760207 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:28 crc kubenswrapper[4799]: I1010 16:33:28.760225 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:28 crc kubenswrapper[4799]: I1010 16:33:28.760248 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:28 crc kubenswrapper[4799]: I1010 16:33:28.760264 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:28Z","lastTransitionTime":"2025-10-10T16:33:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:28 crc kubenswrapper[4799]: I1010 16:33:28.863431 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:28 crc kubenswrapper[4799]: I1010 16:33:28.863488 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:28 crc kubenswrapper[4799]: I1010 16:33:28.863558 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:28 crc kubenswrapper[4799]: I1010 16:33:28.863586 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:28 crc kubenswrapper[4799]: I1010 16:33:28.863609 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:28Z","lastTransitionTime":"2025-10-10T16:33:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:28 crc kubenswrapper[4799]: I1010 16:33:28.966918 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:28 crc kubenswrapper[4799]: I1010 16:33:28.967212 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:28 crc kubenswrapper[4799]: I1010 16:33:28.967356 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:28 crc kubenswrapper[4799]: I1010 16:33:28.967486 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:28 crc kubenswrapper[4799]: I1010 16:33:28.967607 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:28Z","lastTransitionTime":"2025-10-10T16:33:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:29 crc kubenswrapper[4799]: I1010 16:33:29.070610 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:29 crc kubenswrapper[4799]: I1010 16:33:29.070661 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:29 crc kubenswrapper[4799]: I1010 16:33:29.070678 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:29 crc kubenswrapper[4799]: I1010 16:33:29.070709 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:29 crc kubenswrapper[4799]: I1010 16:33:29.070728 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:29Z","lastTransitionTime":"2025-10-10T16:33:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:29 crc kubenswrapper[4799]: I1010 16:33:29.173443 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:29 crc kubenswrapper[4799]: I1010 16:33:29.173924 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:29 crc kubenswrapper[4799]: I1010 16:33:29.174132 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:29 crc kubenswrapper[4799]: I1010 16:33:29.174331 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:29 crc kubenswrapper[4799]: I1010 16:33:29.174513 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:29Z","lastTransitionTime":"2025-10-10T16:33:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:29 crc kubenswrapper[4799]: I1010 16:33:29.278299 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:29 crc kubenswrapper[4799]: I1010 16:33:29.278360 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:29 crc kubenswrapper[4799]: I1010 16:33:29.278376 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:29 crc kubenswrapper[4799]: I1010 16:33:29.278402 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:29 crc kubenswrapper[4799]: I1010 16:33:29.278421 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:29Z","lastTransitionTime":"2025-10-10T16:33:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:29 crc kubenswrapper[4799]: I1010 16:33:29.382197 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:29 crc kubenswrapper[4799]: I1010 16:33:29.382259 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:29 crc kubenswrapper[4799]: I1010 16:33:29.382277 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:29 crc kubenswrapper[4799]: I1010 16:33:29.382305 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:29 crc kubenswrapper[4799]: I1010 16:33:29.382324 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:29Z","lastTransitionTime":"2025-10-10T16:33:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:29 crc kubenswrapper[4799]: I1010 16:33:29.401961 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 10 16:33:29 crc kubenswrapper[4799]: I1010 16:33:29.402014 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k6hch" Oct 10 16:33:29 crc kubenswrapper[4799]: E1010 16:33:29.402135 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 10 16:33:29 crc kubenswrapper[4799]: E1010 16:33:29.402288 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-k6hch" podUID="7903c578-d05e-4ad7-8fd9-f438abf4a085" Oct 10 16:33:29 crc kubenswrapper[4799]: I1010 16:33:29.485850 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:29 crc kubenswrapper[4799]: I1010 16:33:29.485913 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:29 crc kubenswrapper[4799]: I1010 16:33:29.485929 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:29 crc kubenswrapper[4799]: I1010 16:33:29.485952 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:29 crc kubenswrapper[4799]: I1010 16:33:29.485969 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:29Z","lastTransitionTime":"2025-10-10T16:33:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:29 crc kubenswrapper[4799]: I1010 16:33:29.589152 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:29 crc kubenswrapper[4799]: I1010 16:33:29.589253 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:29 crc kubenswrapper[4799]: I1010 16:33:29.589274 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:29 crc kubenswrapper[4799]: I1010 16:33:29.589313 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:29 crc kubenswrapper[4799]: I1010 16:33:29.589332 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:29Z","lastTransitionTime":"2025-10-10T16:33:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:29 crc kubenswrapper[4799]: I1010 16:33:29.692826 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:29 crc kubenswrapper[4799]: I1010 16:33:29.692964 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:29 crc kubenswrapper[4799]: I1010 16:33:29.692991 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:29 crc kubenswrapper[4799]: I1010 16:33:29.693025 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:29 crc kubenswrapper[4799]: I1010 16:33:29.693048 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:29Z","lastTransitionTime":"2025-10-10T16:33:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:29 crc kubenswrapper[4799]: I1010 16:33:29.795610 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:29 crc kubenswrapper[4799]: I1010 16:33:29.795685 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:29 crc kubenswrapper[4799]: I1010 16:33:29.795708 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:29 crc kubenswrapper[4799]: I1010 16:33:29.795739 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:29 crc kubenswrapper[4799]: I1010 16:33:29.795795 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:29Z","lastTransitionTime":"2025-10-10T16:33:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:29 crc kubenswrapper[4799]: I1010 16:33:29.898182 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:29 crc kubenswrapper[4799]: I1010 16:33:29.898239 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:29 crc kubenswrapper[4799]: I1010 16:33:29.898258 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:29 crc kubenswrapper[4799]: I1010 16:33:29.898285 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:29 crc kubenswrapper[4799]: I1010 16:33:29.898303 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:29Z","lastTransitionTime":"2025-10-10T16:33:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:30 crc kubenswrapper[4799]: I1010 16:33:30.001205 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:30 crc kubenswrapper[4799]: I1010 16:33:30.001277 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:30 crc kubenswrapper[4799]: I1010 16:33:30.001301 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:30 crc kubenswrapper[4799]: I1010 16:33:30.001331 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:30 crc kubenswrapper[4799]: I1010 16:33:30.001358 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:30Z","lastTransitionTime":"2025-10-10T16:33:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:30 crc kubenswrapper[4799]: I1010 16:33:30.104238 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:30 crc kubenswrapper[4799]: I1010 16:33:30.104304 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:30 crc kubenswrapper[4799]: I1010 16:33:30.104322 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:30 crc kubenswrapper[4799]: I1010 16:33:30.104352 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:30 crc kubenswrapper[4799]: I1010 16:33:30.104372 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:30Z","lastTransitionTime":"2025-10-10T16:33:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:30 crc kubenswrapper[4799]: I1010 16:33:30.207504 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:30 crc kubenswrapper[4799]: I1010 16:33:30.207565 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:30 crc kubenswrapper[4799]: I1010 16:33:30.207587 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:30 crc kubenswrapper[4799]: I1010 16:33:30.207616 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:30 crc kubenswrapper[4799]: I1010 16:33:30.207634 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:30Z","lastTransitionTime":"2025-10-10T16:33:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:30 crc kubenswrapper[4799]: I1010 16:33:30.310884 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:30 crc kubenswrapper[4799]: I1010 16:33:30.310996 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:30 crc kubenswrapper[4799]: I1010 16:33:30.311023 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:30 crc kubenswrapper[4799]: I1010 16:33:30.311054 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:30 crc kubenswrapper[4799]: I1010 16:33:30.311076 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:30Z","lastTransitionTime":"2025-10-10T16:33:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:30 crc kubenswrapper[4799]: I1010 16:33:30.401419 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 10 16:33:30 crc kubenswrapper[4799]: I1010 16:33:30.401431 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 10 16:33:30 crc kubenswrapper[4799]: E1010 16:33:30.401655 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 10 16:33:30 crc kubenswrapper[4799]: E1010 16:33:30.401797 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 10 16:33:30 crc kubenswrapper[4799]: I1010 16:33:30.413966 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:30 crc kubenswrapper[4799]: I1010 16:33:30.414086 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:30 crc kubenswrapper[4799]: I1010 16:33:30.414182 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:30 crc kubenswrapper[4799]: I1010 16:33:30.414220 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:30 crc kubenswrapper[4799]: I1010 16:33:30.414249 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:30Z","lastTransitionTime":"2025-10-10T16:33:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:30 crc kubenswrapper[4799]: I1010 16:33:30.517544 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:30 crc kubenswrapper[4799]: I1010 16:33:30.517605 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:30 crc kubenswrapper[4799]: I1010 16:33:30.517625 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:30 crc kubenswrapper[4799]: I1010 16:33:30.517657 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:30 crc kubenswrapper[4799]: I1010 16:33:30.517677 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:30Z","lastTransitionTime":"2025-10-10T16:33:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:30 crc kubenswrapper[4799]: I1010 16:33:30.625081 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:30 crc kubenswrapper[4799]: I1010 16:33:30.625153 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:30 crc kubenswrapper[4799]: I1010 16:33:30.625182 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:30 crc kubenswrapper[4799]: I1010 16:33:30.625211 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:30 crc kubenswrapper[4799]: I1010 16:33:30.625231 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:30Z","lastTransitionTime":"2025-10-10T16:33:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:30 crc kubenswrapper[4799]: I1010 16:33:30.728536 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:30 crc kubenswrapper[4799]: I1010 16:33:30.728620 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:30 crc kubenswrapper[4799]: I1010 16:33:30.728646 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:30 crc kubenswrapper[4799]: I1010 16:33:30.728676 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:30 crc kubenswrapper[4799]: I1010 16:33:30.728698 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:30Z","lastTransitionTime":"2025-10-10T16:33:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:30 crc kubenswrapper[4799]: I1010 16:33:30.831251 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:30 crc kubenswrapper[4799]: I1010 16:33:30.831301 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:30 crc kubenswrapper[4799]: I1010 16:33:30.831313 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:30 crc kubenswrapper[4799]: I1010 16:33:30.831333 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:30 crc kubenswrapper[4799]: I1010 16:33:30.831345 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:30Z","lastTransitionTime":"2025-10-10T16:33:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:30 crc kubenswrapper[4799]: I1010 16:33:30.934685 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:30 crc kubenswrapper[4799]: I1010 16:33:30.934784 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:30 crc kubenswrapper[4799]: I1010 16:33:30.934810 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:30 crc kubenswrapper[4799]: I1010 16:33:30.934839 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:30 crc kubenswrapper[4799]: I1010 16:33:30.934861 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:30Z","lastTransitionTime":"2025-10-10T16:33:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:31 crc kubenswrapper[4799]: I1010 16:33:31.037595 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:31 crc kubenswrapper[4799]: I1010 16:33:31.037650 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:31 crc kubenswrapper[4799]: I1010 16:33:31.037668 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:31 crc kubenswrapper[4799]: I1010 16:33:31.037690 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:31 crc kubenswrapper[4799]: I1010 16:33:31.037707 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:31Z","lastTransitionTime":"2025-10-10T16:33:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:31 crc kubenswrapper[4799]: I1010 16:33:31.140927 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:31 crc kubenswrapper[4799]: I1010 16:33:31.141002 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:31 crc kubenswrapper[4799]: I1010 16:33:31.141027 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:31 crc kubenswrapper[4799]: I1010 16:33:31.141052 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:31 crc kubenswrapper[4799]: I1010 16:33:31.141071 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:31Z","lastTransitionTime":"2025-10-10T16:33:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:31 crc kubenswrapper[4799]: I1010 16:33:31.246054 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:31 crc kubenswrapper[4799]: I1010 16:33:31.246119 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:31 crc kubenswrapper[4799]: I1010 16:33:31.246137 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:31 crc kubenswrapper[4799]: I1010 16:33:31.246163 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:31 crc kubenswrapper[4799]: I1010 16:33:31.246181 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:31Z","lastTransitionTime":"2025-10-10T16:33:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:31 crc kubenswrapper[4799]: I1010 16:33:31.348839 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:31 crc kubenswrapper[4799]: I1010 16:33:31.348909 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:31 crc kubenswrapper[4799]: I1010 16:33:31.348927 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:31 crc kubenswrapper[4799]: I1010 16:33:31.348953 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:31 crc kubenswrapper[4799]: I1010 16:33:31.348971 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:31Z","lastTransitionTime":"2025-10-10T16:33:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:31 crc kubenswrapper[4799]: I1010 16:33:31.403527 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 10 16:33:31 crc kubenswrapper[4799]: I1010 16:33:31.404208 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k6hch" Oct 10 16:33:31 crc kubenswrapper[4799]: E1010 16:33:31.404499 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 10 16:33:31 crc kubenswrapper[4799]: E1010 16:33:31.404632 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-k6hch" podUID="7903c578-d05e-4ad7-8fd9-f438abf4a085" Oct 10 16:33:31 crc kubenswrapper[4799]: I1010 16:33:31.451937 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:31 crc kubenswrapper[4799]: I1010 16:33:31.452037 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:31 crc kubenswrapper[4799]: I1010 16:33:31.452065 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:31 crc kubenswrapper[4799]: I1010 16:33:31.452097 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:31 crc kubenswrapper[4799]: I1010 16:33:31.452121 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:31Z","lastTransitionTime":"2025-10-10T16:33:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:31 crc kubenswrapper[4799]: I1010 16:33:31.555072 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:31 crc kubenswrapper[4799]: I1010 16:33:31.555121 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:31 crc kubenswrapper[4799]: I1010 16:33:31.555138 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:31 crc kubenswrapper[4799]: I1010 16:33:31.555162 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:31 crc kubenswrapper[4799]: I1010 16:33:31.555181 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:31Z","lastTransitionTime":"2025-10-10T16:33:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:31 crc kubenswrapper[4799]: I1010 16:33:31.657833 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:31 crc kubenswrapper[4799]: I1010 16:33:31.658176 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:31 crc kubenswrapper[4799]: I1010 16:33:31.658304 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:31 crc kubenswrapper[4799]: I1010 16:33:31.658626 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:31 crc kubenswrapper[4799]: I1010 16:33:31.658791 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:31Z","lastTransitionTime":"2025-10-10T16:33:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:31 crc kubenswrapper[4799]: I1010 16:33:31.761500 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:31 crc kubenswrapper[4799]: I1010 16:33:31.761579 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:31 crc kubenswrapper[4799]: I1010 16:33:31.761598 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:31 crc kubenswrapper[4799]: I1010 16:33:31.761625 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:31 crc kubenswrapper[4799]: I1010 16:33:31.761644 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:31Z","lastTransitionTime":"2025-10-10T16:33:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:31 crc kubenswrapper[4799]: I1010 16:33:31.864821 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:31 crc kubenswrapper[4799]: I1010 16:33:31.865162 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:31 crc kubenswrapper[4799]: I1010 16:33:31.865332 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:31 crc kubenswrapper[4799]: I1010 16:33:31.865537 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:31 crc kubenswrapper[4799]: I1010 16:33:31.865751 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:31Z","lastTransitionTime":"2025-10-10T16:33:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:31 crc kubenswrapper[4799]: I1010 16:33:31.969025 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:31 crc kubenswrapper[4799]: I1010 16:33:31.969280 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:31 crc kubenswrapper[4799]: I1010 16:33:31.969422 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:31 crc kubenswrapper[4799]: I1010 16:33:31.969558 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:31 crc kubenswrapper[4799]: I1010 16:33:31.969678 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:31Z","lastTransitionTime":"2025-10-10T16:33:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:32 crc kubenswrapper[4799]: I1010 16:33:32.073362 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:32 crc kubenswrapper[4799]: I1010 16:33:32.073671 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:32 crc kubenswrapper[4799]: I1010 16:33:32.073835 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:32 crc kubenswrapper[4799]: I1010 16:33:32.073969 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:32 crc kubenswrapper[4799]: I1010 16:33:32.074102 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:32Z","lastTransitionTime":"2025-10-10T16:33:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:32 crc kubenswrapper[4799]: I1010 16:33:32.177808 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:32 crc kubenswrapper[4799]: I1010 16:33:32.178147 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:32 crc kubenswrapper[4799]: I1010 16:33:32.178347 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:32 crc kubenswrapper[4799]: I1010 16:33:32.178538 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:32 crc kubenswrapper[4799]: I1010 16:33:32.178720 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:32Z","lastTransitionTime":"2025-10-10T16:33:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:32 crc kubenswrapper[4799]: I1010 16:33:32.281503 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:32 crc kubenswrapper[4799]: I1010 16:33:32.281576 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:32 crc kubenswrapper[4799]: I1010 16:33:32.281599 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:32 crc kubenswrapper[4799]: I1010 16:33:32.281627 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:32 crc kubenswrapper[4799]: I1010 16:33:32.281650 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:32Z","lastTransitionTime":"2025-10-10T16:33:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:32 crc kubenswrapper[4799]: I1010 16:33:32.328346 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:32 crc kubenswrapper[4799]: I1010 16:33:32.328419 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:32 crc kubenswrapper[4799]: I1010 16:33:32.328453 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:32 crc kubenswrapper[4799]: I1010 16:33:32.328486 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:32 crc kubenswrapper[4799]: I1010 16:33:32.328505 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:32Z","lastTransitionTime":"2025-10-10T16:33:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:32 crc kubenswrapper[4799]: E1010 16:33:32.351335 4799 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-10T16:33:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-10T16:33:32Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-10T16:33:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-10T16:33:32Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-10T16:33:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-10T16:33:32Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-10T16:33:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-10T16:33:32Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"d99534f1-66d4-4990-b867-b559b1013899\\\",\\\"systemUUID\\\":\\\"19c7da3e-bb2d-454e-9c2c-9c9464638bfe\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:33:32Z is after 2025-08-24T17:21:41Z" Oct 10 16:33:32 crc kubenswrapper[4799]: I1010 16:33:32.357074 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:32 crc kubenswrapper[4799]: I1010 16:33:32.357188 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:32 crc kubenswrapper[4799]: I1010 16:33:32.357257 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:32 crc kubenswrapper[4799]: I1010 16:33:32.357293 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:32 crc kubenswrapper[4799]: I1010 16:33:32.357318 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:32Z","lastTransitionTime":"2025-10-10T16:33:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:32 crc kubenswrapper[4799]: E1010 16:33:32.377235 4799 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-10T16:33:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-10T16:33:32Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-10T16:33:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-10T16:33:32Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-10T16:33:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-10T16:33:32Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-10T16:33:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-10T16:33:32Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"d99534f1-66d4-4990-b867-b559b1013899\\\",\\\"systemUUID\\\":\\\"19c7da3e-bb2d-454e-9c2c-9c9464638bfe\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:33:32Z is after 2025-08-24T17:21:41Z" Oct 10 16:33:32 crc kubenswrapper[4799]: I1010 16:33:32.382906 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:32 crc kubenswrapper[4799]: I1010 16:33:32.382967 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:32 crc kubenswrapper[4799]: I1010 16:33:32.382986 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:32 crc kubenswrapper[4799]: I1010 16:33:32.383011 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:32 crc kubenswrapper[4799]: I1010 16:33:32.383030 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:32Z","lastTransitionTime":"2025-10-10T16:33:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:32 crc kubenswrapper[4799]: E1010 16:33:32.400298 4799 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-10T16:33:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-10T16:33:32Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-10T16:33:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-10T16:33:32Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-10T16:33:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-10T16:33:32Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-10T16:33:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-10T16:33:32Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"d99534f1-66d4-4990-b867-b559b1013899\\\",\\\"systemUUID\\\":\\\"19c7da3e-bb2d-454e-9c2c-9c9464638bfe\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:33:32Z is after 2025-08-24T17:21:41Z" Oct 10 16:33:32 crc kubenswrapper[4799]: I1010 16:33:32.402305 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 10 16:33:32 crc kubenswrapper[4799]: E1010 16:33:32.402820 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 10 16:33:32 crc kubenswrapper[4799]: I1010 16:33:32.402357 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 10 16:33:32 crc kubenswrapper[4799]: E1010 16:33:32.403206 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 10 16:33:32 crc kubenswrapper[4799]: I1010 16:33:32.405880 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:32 crc kubenswrapper[4799]: I1010 16:33:32.405932 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:32 crc kubenswrapper[4799]: I1010 16:33:32.405955 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:32 crc kubenswrapper[4799]: I1010 16:33:32.405991 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:32 crc kubenswrapper[4799]: I1010 16:33:32.406076 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:32Z","lastTransitionTime":"2025-10-10T16:33:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:32 crc kubenswrapper[4799]: E1010 16:33:32.426658 4799 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-10T16:33:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-10T16:33:32Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-10T16:33:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-10T16:33:32Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-10T16:33:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-10T16:33:32Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-10T16:33:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-10T16:33:32Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"d99534f1-66d4-4990-b867-b559b1013899\\\",\\\"systemUUID\\\":\\\"19c7da3e-bb2d-454e-9c2c-9c9464638bfe\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:33:32Z is after 2025-08-24T17:21:41Z" Oct 10 16:33:32 crc kubenswrapper[4799]: I1010 16:33:32.431752 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:32 crc kubenswrapper[4799]: I1010 16:33:32.431876 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:32 crc kubenswrapper[4799]: I1010 16:33:32.431900 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:32 crc kubenswrapper[4799]: I1010 16:33:32.431927 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:32 crc kubenswrapper[4799]: I1010 16:33:32.431947 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:32Z","lastTransitionTime":"2025-10-10T16:33:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:32 crc kubenswrapper[4799]: E1010 16:33:32.449041 4799 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-10T16:33:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-10T16:33:32Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-10T16:33:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-10T16:33:32Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-10T16:33:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-10T16:33:32Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-10T16:33:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-10T16:33:32Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"d99534f1-66d4-4990-b867-b559b1013899\\\",\\\"systemUUID\\\":\\\"19c7da3e-bb2d-454e-9c2c-9c9464638bfe\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:33:32Z is after 2025-08-24T17:21:41Z" Oct 10 16:33:32 crc kubenswrapper[4799]: E1010 16:33:32.449840 4799 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Oct 10 16:33:32 crc kubenswrapper[4799]: I1010 16:33:32.455906 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:32 crc kubenswrapper[4799]: I1010 16:33:32.455953 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:32 crc kubenswrapper[4799]: I1010 16:33:32.455971 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:32 crc kubenswrapper[4799]: I1010 16:33:32.455994 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:32 crc kubenswrapper[4799]: I1010 16:33:32.456012 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:32Z","lastTransitionTime":"2025-10-10T16:33:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:32 crc kubenswrapper[4799]: I1010 16:33:32.559104 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:32 crc kubenswrapper[4799]: I1010 16:33:32.559496 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:32 crc kubenswrapper[4799]: I1010 16:33:32.559670 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:32 crc kubenswrapper[4799]: I1010 16:33:32.559881 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:32 crc kubenswrapper[4799]: I1010 16:33:32.560089 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:32Z","lastTransitionTime":"2025-10-10T16:33:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:32 crc kubenswrapper[4799]: I1010 16:33:32.663375 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:32 crc kubenswrapper[4799]: I1010 16:33:32.663733 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:32 crc kubenswrapper[4799]: I1010 16:33:32.663930 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:32 crc kubenswrapper[4799]: I1010 16:33:32.664086 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:32 crc kubenswrapper[4799]: I1010 16:33:32.664208 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:32Z","lastTransitionTime":"2025-10-10T16:33:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:32 crc kubenswrapper[4799]: I1010 16:33:32.767148 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:32 crc kubenswrapper[4799]: I1010 16:33:32.767198 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:32 crc kubenswrapper[4799]: I1010 16:33:32.767220 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:32 crc kubenswrapper[4799]: I1010 16:33:32.767277 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:32 crc kubenswrapper[4799]: I1010 16:33:32.767300 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:32Z","lastTransitionTime":"2025-10-10T16:33:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:32 crc kubenswrapper[4799]: I1010 16:33:32.870079 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:32 crc kubenswrapper[4799]: I1010 16:33:32.870493 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:32 crc kubenswrapper[4799]: I1010 16:33:32.870908 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:32 crc kubenswrapper[4799]: I1010 16:33:32.871124 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:32 crc kubenswrapper[4799]: I1010 16:33:32.871356 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:32Z","lastTransitionTime":"2025-10-10T16:33:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:32 crc kubenswrapper[4799]: I1010 16:33:32.974662 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:32 crc kubenswrapper[4799]: I1010 16:33:32.974712 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:32 crc kubenswrapper[4799]: I1010 16:33:32.974729 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:32 crc kubenswrapper[4799]: I1010 16:33:32.974782 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:32 crc kubenswrapper[4799]: I1010 16:33:32.974800 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:32Z","lastTransitionTime":"2025-10-10T16:33:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:33 crc kubenswrapper[4799]: I1010 16:33:33.078232 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:33 crc kubenswrapper[4799]: I1010 16:33:33.078350 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:33 crc kubenswrapper[4799]: I1010 16:33:33.078374 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:33 crc kubenswrapper[4799]: I1010 16:33:33.078405 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:33 crc kubenswrapper[4799]: I1010 16:33:33.078428 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:33Z","lastTransitionTime":"2025-10-10T16:33:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:33 crc kubenswrapper[4799]: I1010 16:33:33.181324 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:33 crc kubenswrapper[4799]: I1010 16:33:33.181402 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:33 crc kubenswrapper[4799]: I1010 16:33:33.181426 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:33 crc kubenswrapper[4799]: I1010 16:33:33.181453 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:33 crc kubenswrapper[4799]: I1010 16:33:33.181473 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:33Z","lastTransitionTime":"2025-10-10T16:33:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:33 crc kubenswrapper[4799]: I1010 16:33:33.284292 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:33 crc kubenswrapper[4799]: I1010 16:33:33.284381 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:33 crc kubenswrapper[4799]: I1010 16:33:33.284405 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:33 crc kubenswrapper[4799]: I1010 16:33:33.284437 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:33 crc kubenswrapper[4799]: I1010 16:33:33.284462 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:33Z","lastTransitionTime":"2025-10-10T16:33:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:33 crc kubenswrapper[4799]: I1010 16:33:33.387525 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:33 crc kubenswrapper[4799]: I1010 16:33:33.387587 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:33 crc kubenswrapper[4799]: I1010 16:33:33.387611 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:33 crc kubenswrapper[4799]: I1010 16:33:33.387642 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:33 crc kubenswrapper[4799]: I1010 16:33:33.387668 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:33Z","lastTransitionTime":"2025-10-10T16:33:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:33 crc kubenswrapper[4799]: I1010 16:33:33.401675 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k6hch" Oct 10 16:33:33 crc kubenswrapper[4799]: I1010 16:33:33.402216 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 10 16:33:33 crc kubenswrapper[4799]: E1010 16:33:33.402326 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-k6hch" podUID="7903c578-d05e-4ad7-8fd9-f438abf4a085" Oct 10 16:33:33 crc kubenswrapper[4799]: E1010 16:33:33.402435 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 10 16:33:33 crc kubenswrapper[4799]: I1010 16:33:33.402920 4799 scope.go:117] "RemoveContainer" containerID="df22025d59e852d7ca86c7739f0dd141f6b388604bcf9ffaabfa48433290db84" Oct 10 16:33:33 crc kubenswrapper[4799]: E1010 16:33:33.403222 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-mcwfc_openshift-ovn-kubernetes(abe7f2d9-ec99-4724-a01f-cc7096377e07)\"" pod="openshift-ovn-kubernetes/ovnkube-node-mcwfc" podUID="abe7f2d9-ec99-4724-a01f-cc7096377e07" Oct 10 16:33:33 crc kubenswrapper[4799]: I1010 16:33:33.490465 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:33 crc kubenswrapper[4799]: I1010 16:33:33.490528 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:33 crc kubenswrapper[4799]: I1010 16:33:33.490544 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:33 crc kubenswrapper[4799]: I1010 16:33:33.490568 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:33 crc kubenswrapper[4799]: I1010 16:33:33.490588 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:33Z","lastTransitionTime":"2025-10-10T16:33:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:33 crc kubenswrapper[4799]: I1010 16:33:33.593052 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:33 crc kubenswrapper[4799]: I1010 16:33:33.593108 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:33 crc kubenswrapper[4799]: I1010 16:33:33.593125 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:33 crc kubenswrapper[4799]: I1010 16:33:33.593148 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:33 crc kubenswrapper[4799]: I1010 16:33:33.593165 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:33Z","lastTransitionTime":"2025-10-10T16:33:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:33 crc kubenswrapper[4799]: I1010 16:33:33.696433 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:33 crc kubenswrapper[4799]: I1010 16:33:33.696480 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:33 crc kubenswrapper[4799]: I1010 16:33:33.696497 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:33 crc kubenswrapper[4799]: I1010 16:33:33.696521 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:33 crc kubenswrapper[4799]: I1010 16:33:33.696537 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:33Z","lastTransitionTime":"2025-10-10T16:33:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:33 crc kubenswrapper[4799]: I1010 16:33:33.799137 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:33 crc kubenswrapper[4799]: I1010 16:33:33.799195 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:33 crc kubenswrapper[4799]: I1010 16:33:33.799216 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:33 crc kubenswrapper[4799]: I1010 16:33:33.799244 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:33 crc kubenswrapper[4799]: I1010 16:33:33.799265 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:33Z","lastTransitionTime":"2025-10-10T16:33:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:33 crc kubenswrapper[4799]: I1010 16:33:33.904696 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:33 crc kubenswrapper[4799]: I1010 16:33:33.904749 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:33 crc kubenswrapper[4799]: I1010 16:33:33.904840 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:33 crc kubenswrapper[4799]: I1010 16:33:33.904872 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:33 crc kubenswrapper[4799]: I1010 16:33:33.904895 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:33Z","lastTransitionTime":"2025-10-10T16:33:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:34 crc kubenswrapper[4799]: I1010 16:33:34.008089 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:34 crc kubenswrapper[4799]: I1010 16:33:34.008415 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:34 crc kubenswrapper[4799]: I1010 16:33:34.008629 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:34 crc kubenswrapper[4799]: I1010 16:33:34.009013 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:34 crc kubenswrapper[4799]: I1010 16:33:34.009232 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:34Z","lastTransitionTime":"2025-10-10T16:33:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:34 crc kubenswrapper[4799]: I1010 16:33:34.112131 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:34 crc kubenswrapper[4799]: I1010 16:33:34.112200 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:34 crc kubenswrapper[4799]: I1010 16:33:34.112223 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:34 crc kubenswrapper[4799]: I1010 16:33:34.112253 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:34 crc kubenswrapper[4799]: I1010 16:33:34.112274 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:34Z","lastTransitionTime":"2025-10-10T16:33:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:34 crc kubenswrapper[4799]: I1010 16:33:34.215680 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:34 crc kubenswrapper[4799]: I1010 16:33:34.215814 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:34 crc kubenswrapper[4799]: I1010 16:33:34.215846 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:34 crc kubenswrapper[4799]: I1010 16:33:34.215877 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:34 crc kubenswrapper[4799]: I1010 16:33:34.215898 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:34Z","lastTransitionTime":"2025-10-10T16:33:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:34 crc kubenswrapper[4799]: I1010 16:33:34.318281 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:34 crc kubenswrapper[4799]: I1010 16:33:34.318335 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:34 crc kubenswrapper[4799]: I1010 16:33:34.318360 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:34 crc kubenswrapper[4799]: I1010 16:33:34.318388 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:34 crc kubenswrapper[4799]: I1010 16:33:34.318410 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:34Z","lastTransitionTime":"2025-10-10T16:33:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:34 crc kubenswrapper[4799]: I1010 16:33:34.401793 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 10 16:33:34 crc kubenswrapper[4799]: E1010 16:33:34.401984 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 10 16:33:34 crc kubenswrapper[4799]: I1010 16:33:34.403114 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 10 16:33:34 crc kubenswrapper[4799]: E1010 16:33:34.403395 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 10 16:33:34 crc kubenswrapper[4799]: I1010 16:33:34.422493 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:34 crc kubenswrapper[4799]: I1010 16:33:34.422581 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:34 crc kubenswrapper[4799]: I1010 16:33:34.422608 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:34 crc kubenswrapper[4799]: I1010 16:33:34.422645 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:34 crc kubenswrapper[4799]: I1010 16:33:34.422668 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:34Z","lastTransitionTime":"2025-10-10T16:33:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:34 crc kubenswrapper[4799]: I1010 16:33:34.526739 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:34 crc kubenswrapper[4799]: I1010 16:33:34.526885 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:34 crc kubenswrapper[4799]: I1010 16:33:34.526905 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:34 crc kubenswrapper[4799]: I1010 16:33:34.526935 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:34 crc kubenswrapper[4799]: I1010 16:33:34.526953 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:34Z","lastTransitionTime":"2025-10-10T16:33:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:34 crc kubenswrapper[4799]: I1010 16:33:34.630385 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:34 crc kubenswrapper[4799]: I1010 16:33:34.630463 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:34 crc kubenswrapper[4799]: I1010 16:33:34.630484 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:34 crc kubenswrapper[4799]: I1010 16:33:34.630517 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:34 crc kubenswrapper[4799]: I1010 16:33:34.630544 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:34Z","lastTransitionTime":"2025-10-10T16:33:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:34 crc kubenswrapper[4799]: I1010 16:33:34.734392 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:34 crc kubenswrapper[4799]: I1010 16:33:34.734481 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:34 crc kubenswrapper[4799]: I1010 16:33:34.734499 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:34 crc kubenswrapper[4799]: I1010 16:33:34.734530 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:34 crc kubenswrapper[4799]: I1010 16:33:34.734549 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:34Z","lastTransitionTime":"2025-10-10T16:33:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:34 crc kubenswrapper[4799]: I1010 16:33:34.838835 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:34 crc kubenswrapper[4799]: I1010 16:33:34.838925 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:34 crc kubenswrapper[4799]: I1010 16:33:34.838948 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:34 crc kubenswrapper[4799]: I1010 16:33:34.838985 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:34 crc kubenswrapper[4799]: I1010 16:33:34.839011 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:34Z","lastTransitionTime":"2025-10-10T16:33:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:34 crc kubenswrapper[4799]: I1010 16:33:34.942836 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:34 crc kubenswrapper[4799]: I1010 16:33:34.942903 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:34 crc kubenswrapper[4799]: I1010 16:33:34.942927 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:34 crc kubenswrapper[4799]: I1010 16:33:34.942960 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:34 crc kubenswrapper[4799]: I1010 16:33:34.942984 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:34Z","lastTransitionTime":"2025-10-10T16:33:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:35 crc kubenswrapper[4799]: I1010 16:33:35.047683 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:35 crc kubenswrapper[4799]: I1010 16:33:35.047815 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:35 crc kubenswrapper[4799]: I1010 16:33:35.047837 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:35 crc kubenswrapper[4799]: I1010 16:33:35.047869 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:35 crc kubenswrapper[4799]: I1010 16:33:35.047889 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:35Z","lastTransitionTime":"2025-10-10T16:33:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:35 crc kubenswrapper[4799]: I1010 16:33:35.151549 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:35 crc kubenswrapper[4799]: I1010 16:33:35.151626 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:35 crc kubenswrapper[4799]: I1010 16:33:35.151652 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:35 crc kubenswrapper[4799]: I1010 16:33:35.152101 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:35 crc kubenswrapper[4799]: I1010 16:33:35.152138 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:35Z","lastTransitionTime":"2025-10-10T16:33:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:35 crc kubenswrapper[4799]: I1010 16:33:35.258276 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:35 crc kubenswrapper[4799]: I1010 16:33:35.259007 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:35 crc kubenswrapper[4799]: I1010 16:33:35.259069 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:35 crc kubenswrapper[4799]: I1010 16:33:35.259108 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:35 crc kubenswrapper[4799]: I1010 16:33:35.259133 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:35Z","lastTransitionTime":"2025-10-10T16:33:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:35 crc kubenswrapper[4799]: I1010 16:33:35.362409 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:35 crc kubenswrapper[4799]: I1010 16:33:35.362550 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:35 crc kubenswrapper[4799]: I1010 16:33:35.362572 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:35 crc kubenswrapper[4799]: I1010 16:33:35.362600 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:35 crc kubenswrapper[4799]: I1010 16:33:35.362646 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:35Z","lastTransitionTime":"2025-10-10T16:33:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:35 crc kubenswrapper[4799]: I1010 16:33:35.401819 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 10 16:33:35 crc kubenswrapper[4799]: E1010 16:33:35.401983 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 10 16:33:35 crc kubenswrapper[4799]: I1010 16:33:35.402042 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k6hch" Oct 10 16:33:35 crc kubenswrapper[4799]: E1010 16:33:35.402203 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-k6hch" podUID="7903c578-d05e-4ad7-8fd9-f438abf4a085" Oct 10 16:33:35 crc kubenswrapper[4799]: I1010 16:33:35.467242 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:35 crc kubenswrapper[4799]: I1010 16:33:35.467333 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:35 crc kubenswrapper[4799]: I1010 16:33:35.467356 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:35 crc kubenswrapper[4799]: I1010 16:33:35.467392 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:35 crc kubenswrapper[4799]: I1010 16:33:35.467417 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:35Z","lastTransitionTime":"2025-10-10T16:33:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:35 crc kubenswrapper[4799]: I1010 16:33:35.571500 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:35 crc kubenswrapper[4799]: I1010 16:33:35.571581 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:35 crc kubenswrapper[4799]: I1010 16:33:35.571605 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:35 crc kubenswrapper[4799]: I1010 16:33:35.571641 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:35 crc kubenswrapper[4799]: I1010 16:33:35.571664 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:35Z","lastTransitionTime":"2025-10-10T16:33:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:35 crc kubenswrapper[4799]: I1010 16:33:35.675306 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:35 crc kubenswrapper[4799]: I1010 16:33:35.675415 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:35 crc kubenswrapper[4799]: I1010 16:33:35.675439 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:35 crc kubenswrapper[4799]: I1010 16:33:35.675469 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:35 crc kubenswrapper[4799]: I1010 16:33:35.675488 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:35Z","lastTransitionTime":"2025-10-10T16:33:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:35 crc kubenswrapper[4799]: I1010 16:33:35.778580 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:35 crc kubenswrapper[4799]: I1010 16:33:35.778668 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:35 crc kubenswrapper[4799]: I1010 16:33:35.778694 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:35 crc kubenswrapper[4799]: I1010 16:33:35.778731 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:35 crc kubenswrapper[4799]: I1010 16:33:35.778790 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:35Z","lastTransitionTime":"2025-10-10T16:33:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:35 crc kubenswrapper[4799]: I1010 16:33:35.882888 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:35 crc kubenswrapper[4799]: I1010 16:33:35.882951 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:35 crc kubenswrapper[4799]: I1010 16:33:35.883021 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:35 crc kubenswrapper[4799]: I1010 16:33:35.883051 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:35 crc kubenswrapper[4799]: I1010 16:33:35.883071 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:35Z","lastTransitionTime":"2025-10-10T16:33:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:35 crc kubenswrapper[4799]: I1010 16:33:35.986820 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:35 crc kubenswrapper[4799]: I1010 16:33:35.986911 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:35 crc kubenswrapper[4799]: I1010 16:33:35.986932 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:35 crc kubenswrapper[4799]: I1010 16:33:35.986964 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:35 crc kubenswrapper[4799]: I1010 16:33:35.986988 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:35Z","lastTransitionTime":"2025-10-10T16:33:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:36 crc kubenswrapper[4799]: I1010 16:33:36.090555 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:36 crc kubenswrapper[4799]: I1010 16:33:36.090661 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:36 crc kubenswrapper[4799]: I1010 16:33:36.090682 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:36 crc kubenswrapper[4799]: I1010 16:33:36.090712 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:36 crc kubenswrapper[4799]: I1010 16:33:36.090734 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:36Z","lastTransitionTime":"2025-10-10T16:33:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:36 crc kubenswrapper[4799]: I1010 16:33:36.194678 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:36 crc kubenswrapper[4799]: I1010 16:33:36.194738 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:36 crc kubenswrapper[4799]: I1010 16:33:36.194782 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:36 crc kubenswrapper[4799]: I1010 16:33:36.194813 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:36 crc kubenswrapper[4799]: I1010 16:33:36.194834 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:36Z","lastTransitionTime":"2025-10-10T16:33:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:36 crc kubenswrapper[4799]: I1010 16:33:36.298888 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:36 crc kubenswrapper[4799]: I1010 16:33:36.298968 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:36 crc kubenswrapper[4799]: I1010 16:33:36.298987 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:36 crc kubenswrapper[4799]: I1010 16:33:36.299014 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:36 crc kubenswrapper[4799]: I1010 16:33:36.299029 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:36Z","lastTransitionTime":"2025-10-10T16:33:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:36 crc kubenswrapper[4799]: I1010 16:33:36.401529 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 10 16:33:36 crc kubenswrapper[4799]: I1010 16:33:36.401583 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 10 16:33:36 crc kubenswrapper[4799]: E1010 16:33:36.401715 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 10 16:33:36 crc kubenswrapper[4799]: E1010 16:33:36.401975 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 10 16:33:36 crc kubenswrapper[4799]: I1010 16:33:36.403394 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:36 crc kubenswrapper[4799]: I1010 16:33:36.403484 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:36 crc kubenswrapper[4799]: I1010 16:33:36.403509 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:36 crc kubenswrapper[4799]: I1010 16:33:36.403546 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:36 crc kubenswrapper[4799]: I1010 16:33:36.403572 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:36Z","lastTransitionTime":"2025-10-10T16:33:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:36 crc kubenswrapper[4799]: I1010 16:33:36.507611 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:36 crc kubenswrapper[4799]: I1010 16:33:36.507673 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:36 crc kubenswrapper[4799]: I1010 16:33:36.507691 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:36 crc kubenswrapper[4799]: I1010 16:33:36.507717 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:36 crc kubenswrapper[4799]: I1010 16:33:36.507738 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:36Z","lastTransitionTime":"2025-10-10T16:33:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:36 crc kubenswrapper[4799]: I1010 16:33:36.611838 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:36 crc kubenswrapper[4799]: I1010 16:33:36.611892 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:36 crc kubenswrapper[4799]: I1010 16:33:36.611909 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:36 crc kubenswrapper[4799]: I1010 16:33:36.611934 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:36 crc kubenswrapper[4799]: I1010 16:33:36.611953 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:36Z","lastTransitionTime":"2025-10-10T16:33:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:36 crc kubenswrapper[4799]: I1010 16:33:36.715005 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:36 crc kubenswrapper[4799]: I1010 16:33:36.715091 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:36 crc kubenswrapper[4799]: I1010 16:33:36.715117 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:36 crc kubenswrapper[4799]: I1010 16:33:36.715184 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:36 crc kubenswrapper[4799]: I1010 16:33:36.715209 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:36Z","lastTransitionTime":"2025-10-10T16:33:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:36 crc kubenswrapper[4799]: I1010 16:33:36.818476 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:36 crc kubenswrapper[4799]: I1010 16:33:36.818624 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:36 crc kubenswrapper[4799]: I1010 16:33:36.818654 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:36 crc kubenswrapper[4799]: I1010 16:33:36.818718 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:36 crc kubenswrapper[4799]: I1010 16:33:36.818741 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:36Z","lastTransitionTime":"2025-10-10T16:33:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:36 crc kubenswrapper[4799]: I1010 16:33:36.922936 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:36 crc kubenswrapper[4799]: I1010 16:33:36.923005 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:36 crc kubenswrapper[4799]: I1010 16:33:36.923023 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:36 crc kubenswrapper[4799]: I1010 16:33:36.923057 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:36 crc kubenswrapper[4799]: I1010 16:33:36.923077 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:36Z","lastTransitionTime":"2025-10-10T16:33:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:37 crc kubenswrapper[4799]: I1010 16:33:37.026408 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:37 crc kubenswrapper[4799]: I1010 16:33:37.026457 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:37 crc kubenswrapper[4799]: I1010 16:33:37.026469 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:37 crc kubenswrapper[4799]: I1010 16:33:37.026488 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:37 crc kubenswrapper[4799]: I1010 16:33:37.026501 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:37Z","lastTransitionTime":"2025-10-10T16:33:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:37 crc kubenswrapper[4799]: I1010 16:33:37.129883 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:37 crc kubenswrapper[4799]: I1010 16:33:37.129961 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:37 crc kubenswrapper[4799]: I1010 16:33:37.129982 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:37 crc kubenswrapper[4799]: I1010 16:33:37.130009 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:37 crc kubenswrapper[4799]: I1010 16:33:37.130028 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:37Z","lastTransitionTime":"2025-10-10T16:33:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:37 crc kubenswrapper[4799]: I1010 16:33:37.232717 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:37 crc kubenswrapper[4799]: I1010 16:33:37.232827 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:37 crc kubenswrapper[4799]: I1010 16:33:37.232853 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:37 crc kubenswrapper[4799]: I1010 16:33:37.232888 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:37 crc kubenswrapper[4799]: I1010 16:33:37.232912 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:37Z","lastTransitionTime":"2025-10-10T16:33:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:37 crc kubenswrapper[4799]: I1010 16:33:37.336491 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:37 crc kubenswrapper[4799]: I1010 16:33:37.336565 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:37 crc kubenswrapper[4799]: I1010 16:33:37.336583 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:37 crc kubenswrapper[4799]: I1010 16:33:37.336609 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:37 crc kubenswrapper[4799]: I1010 16:33:37.336627 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:37Z","lastTransitionTime":"2025-10-10T16:33:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:37 crc kubenswrapper[4799]: I1010 16:33:37.401399 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k6hch" Oct 10 16:33:37 crc kubenswrapper[4799]: I1010 16:33:37.401433 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 10 16:33:37 crc kubenswrapper[4799]: E1010 16:33:37.401608 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-k6hch" podUID="7903c578-d05e-4ad7-8fd9-f438abf4a085" Oct 10 16:33:37 crc kubenswrapper[4799]: E1010 16:33:37.401708 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 10 16:33:37 crc kubenswrapper[4799]: I1010 16:33:37.433330 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mcwfc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"abe7f2d9-ec99-4724-a01f-cc7096377e07\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8cbc87c392646ebf9c016f8c7b40bcec30e33a0a05ea4a896d1143c5f1086990\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd261112ca7db4d0d76f6ab29a0347d64dccfff4db42ac9f55d6d7df1443ab23\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c7d0e536ad5143941dd18418b1ac7972a1136a841542b950f6891a386d43ca9c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cfe05183ad0b03415525e6aa2a8d52a5d63b8c273113c46326396df5e0c2bb12\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6562d440ce1f1477fd09c15c34ab88e17e1fb2c2cae4b32a7bf8cbdd29f4d5a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff4fcf53aeed6c07f775152de0faa9fa0671848df06d37cbca6ec7097d0024d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://df22025d59e852d7ca86c7739f0dd141f6b388604bcf9ffaabfa48433290db84\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://df22025d59e852d7ca86c7739f0dd141f6b388604bcf9ffaabfa48433290db84\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-10T16:33:06Z\\\",\\\"message\\\":\\\"300553 6825 default_network_controller.go:776] Recording success event on pod openshift-network-node-identity/network-node-identity-vrzqb\\\\nF1010 16:33:06.300559 6825 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:33:06Z is after 2025-08-24T17:21:41Z]\\\\nI1010 16:33:06.300565 6825 obj_retry.go:303] Retry object setup: *v1.Pod openshift-etcd/etcd-crc\\\\nI1010 16:33:06.300572 6825 obj_retry.go:365] Adding new object: *v1.Pod openshift-etcd/etcd-crc\\\\nI1010 16:33:06.300560 6825 model_client.go:382] Update operations generated as: [{Op:update Table:Logical_Switch_Port Row:map[addresses:{GoSet:[0a:58:0a:d9:00:3b 10.217.0.59\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-10T16:33:05Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-mcwfc_openshift-ovn-kubernetes(abe7f2d9-ec99-4724-a01f-cc7096377e07)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://08406e220de50ba85f882a05117b5df8c9445a38c026bb85c95fc9f98f2d2cfe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2ce36def99eaf908452410a523cd14eb31a5a4dc3ee38d5983ea95d5ee75f83\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d2ce36def99eaf908452410a523cd14eb31a5a4dc3ee38d5983ea95d5ee75f83\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7pr44\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:09Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-mcwfc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:33:37Z is after 2025-08-24T17:21:41Z" Oct 10 16:33:37 crc kubenswrapper[4799]: I1010 16:33:37.439060 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:37 crc kubenswrapper[4799]: I1010 16:33:37.439122 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:37 crc kubenswrapper[4799]: I1010 16:33:37.439145 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:37 crc kubenswrapper[4799]: I1010 16:33:37.439174 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:37 crc kubenswrapper[4799]: I1010 16:33:37.439194 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:37Z","lastTransitionTime":"2025-10-10T16:33:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:37 crc kubenswrapper[4799]: I1010 16:33:37.454086 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-z97c7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6f19a8ba-b77f-41ce-a4c6-e970b040dd8c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://17b2b63923e40e58b4a3d352781758ecf7c0e63eb913813e0f738d19dfb05676\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9spwd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c9722a694091d19d16b7c08ac22e23532deca8f4bde306a0d651d5524484fd1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9spwd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:22Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-z97c7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:33:37Z is after 2025-08-24T17:21:41Z" Oct 10 16:33:37 crc kubenswrapper[4799]: I1010 16:33:37.479497 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b9c46c5f-a6db-4cef-b179-b669484bbc75\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://df6b51b97a9e3dcf9102409dc19f67e69e6e28ebec82dd46083922d5606cc4c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ba57bc720123daa414f51bf5d3173c6fa0b519947a34816bebc532948fd74ab\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d98759de1f79d9aeb68eb0b3eb21d78d0116f054b5d846c85bd63774b565e73\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7eb0f742793fbd0bee8e88732ec832748e77d9226a926def177968f24a9cf06\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://10d76c5ba8c54896d2fde57e2806c48857363c495a9f2d9b3f6904334cf2f9be\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"file observer\\\\nW1010 16:32:08.895315 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1010 16:32:08.895450 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1010 16:32:08.898309 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-712278876/tls.crt::/tmp/serving-cert-712278876/tls.key\\\\\\\"\\\\nI1010 16:32:09.168043 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1010 16:32:09.171891 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1010 16:32:09.171914 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1010 16:32:09.171936 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1010 16:32:09.171942 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1010 16:32:09.176341 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1010 16:32:09.176406 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1010 16:32:09.176435 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1010 16:32:09.176460 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1010 16:32:09.176486 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1010 16:32:09.176510 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1010 16:32:09.176533 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1010 16:32:09.176376 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1010 16:32:09.178269 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-10T16:31:53Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://75fb276a1b4f555aa58d4a862a6f3841984f75958b7ada362d717eca726c41fc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:50Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://78cbeb4c6d2770cabbc752b11e5a62f64ec7820bc3a637a944fa252d779e242b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://78cbeb4c6d2770cabbc752b11e5a62f64ec7820bc3a637a944fa252d779e242b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:31:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:31:47Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:33:37Z is after 2025-08-24T17:21:41Z" Oct 10 16:33:37 crc kubenswrapper[4799]: I1010 16:33:37.505203 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:33:37Z is after 2025-08-24T17:21:41Z" Oct 10 16:33:37 crc kubenswrapper[4799]: I1010 16:33:37.525852 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:33:37Z is after 2025-08-24T17:21:41Z" Oct 10 16:33:37 crc kubenswrapper[4799]: I1010 16:33:37.539814 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6cebefda-e31d-4be2-9bf4-8e1f8ec002cb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6536b37f839c0b3f6b55d82b3a1674eeccb07ec93e2cb0a3739705b82df4782c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hfkr4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ad00545d7a2fff370e19a55a89365b8c9914cb6286dbf1892d7ad0f399288a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hfkr4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:09Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-rh8zc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:33:37Z is after 2025-08-24T17:21:41Z" Oct 10 16:33:37 crc kubenswrapper[4799]: I1010 16:33:37.544063 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:37 crc kubenswrapper[4799]: I1010 16:33:37.544101 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:37 crc kubenswrapper[4799]: I1010 16:33:37.544143 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:37 crc kubenswrapper[4799]: I1010 16:33:37.544209 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:37 crc kubenswrapper[4799]: I1010 16:33:37.544232 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:37Z","lastTransitionTime":"2025-10-10T16:33:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:37 crc kubenswrapper[4799]: I1010 16:33:37.552719 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-6wjsp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"76fdb169-eee9-4170-b948-95e26254208b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5b030264f18288aa7687a91f7918f1ed2c2ad474637e32a054ea8c25b97aef45\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2ww66\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:14Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-6wjsp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:33:37Z is after 2025-08-24T17:21:41Z" Oct 10 16:33:37 crc kubenswrapper[4799]: I1010 16:33:37.569525 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"66fa2af5-68d0-4ab4-8579-38876a3ce10c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bd57f69503813185900ddde784de4d3582b141416c6310598d416eec17c0beac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c7561ae8f1ea6cb96c659f004106dfdc36f0a3ad76e66f9dd5b55ad905742df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3c7561ae8f1ea6cb96c659f004106dfdc36f0a3ad76e66f9dd5b55ad905742df\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:31:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:31:47Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:33:37Z is after 2025-08-24T17:21:41Z" Oct 10 16:33:37 crc kubenswrapper[4799]: I1010 16:33:37.588606 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5a953803d264b43ecd9f8b8c871b034d8146e73a4974bb8f503d0ca626370616\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:33:37Z is after 2025-08-24T17:21:41Z" Oct 10 16:33:37 crc kubenswrapper[4799]: I1010 16:33:37.607141 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:33:37Z is after 2025-08-24T17:21:41Z" Oct 10 16:33:37 crc kubenswrapper[4799]: I1010 16:33:37.625533 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gg5hb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f000ac73-b5de-47c8-a0a7-84bd06475f62\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9fa19f17c5ed052d9c266f2da2d4e8338037b397bc2fb5e859f733c6b8c1b69e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b64f89fec4fec12dd0dab3f95ca2c8a01e43d4ef7cc69a4d012195756f6922ca\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-10T16:32:58Z\\\",\\\"message\\\":\\\"2025-10-10T16:32:13+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_1650ba65-1f10-40d9-a47c-1a8dc4b79e86\\\\n2025-10-10T16:32:13+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_1650ba65-1f10-40d9-a47c-1a8dc4b79e86 to /host/opt/cni/bin/\\\\n2025-10-10T16:32:13Z [verbose] multus-daemon started\\\\n2025-10-10T16:32:13Z [verbose] Readiness Indicator file check\\\\n2025-10-10T16:32:58Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:10Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w9g7t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:09Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gg5hb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:33:37Z is after 2025-08-24T17:21:41Z" Oct 10 16:33:37 crc kubenswrapper[4799]: I1010 16:33:37.648339 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:37 crc kubenswrapper[4799]: I1010 16:33:37.648397 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:37 crc kubenswrapper[4799]: I1010 16:33:37.648409 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:37 crc kubenswrapper[4799]: I1010 16:33:37.648430 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:37 crc kubenswrapper[4799]: I1010 16:33:37.648445 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:37Z","lastTransitionTime":"2025-10-10T16:33:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:37 crc kubenswrapper[4799]: I1010 16:33:37.664906 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"60ae49f7-6d6a-4a62-909f-7aea2b3953f5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c0a72be5ffe48f726e63ca3854fcabf6ad7c26f2c3fe432328142da2dc2ceeb5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b42b1b86cbd6dacb03b9afc740a33f67674996a9c5a5b291b71708ae53ccfea8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://de5b84380f9fb8448cebe90775342fd17260ffb8c591bbd5156f8a216b80f1da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b1dad40a84c7f22ffb5d52c708c7e2e03a181c5778793050495c8333ae005731\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://79f6778c5b703b2b4fc4e59fffc00824fcab6c8f5e2789661665e635a3539195\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a48bce1f3530d2a78258c6fa2af4f1530890f7967a26c9e91ca2f20f56cdbe6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2a48bce1f3530d2a78258c6fa2af4f1530890f7967a26c9e91ca2f20f56cdbe6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:31:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://65fb2cd5fa9b5ff0cad85267e4a036c37593a749da171dc2e5e30ba5159ed96d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://65fb2cd5fa9b5ff0cad85267e4a036c37593a749da171dc2e5e30ba5159ed96d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:31:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://51ea61becc8c45e5bcb2a2374d503cef3fb940b1618e7501cd05d61fc2a9458f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://51ea61becc8c45e5bcb2a2374d503cef3fb940b1618e7501cd05d61fc2a9458f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:31:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:31:47Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:33:37Z is after 2025-08-24T17:21:41Z" Oct 10 16:33:37 crc kubenswrapper[4799]: I1010 16:33:37.683309 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f7878cf5-3c6d-4a4a-9ccd-7de395f9ac84\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://38c4fe49eff3373937abdebfb7d58fe9d5c73809375a3dca4f165aab84d6cbd1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c48434cdadac2409d0e3baf595e00260b1e3f94b8b9dab62e3f87503a6e888be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://88216eac74e0df9deb1ca1bef893deb2e23a79ffffdbd8a851a67df407eaa470\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://91106a41672b01d9f5c61cfc3001b84f024f3b96649bbc9174f3a635fc8034a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://91106a41672b01d9f5c61cfc3001b84f024f3b96649bbc9174f3a635fc8034a9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:31:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:31:48Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:31:47Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:33:37Z is after 2025-08-24T17:21:41Z" Oct 10 16:33:37 crc kubenswrapper[4799]: I1010 16:33:37.696600 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-bsdk2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"823e91d3-003d-4cbb-bc72-004e1708c19d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec832bfc0c81b98afb4117033b94d2951b042b248148a5f957f3507174b8dbb6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-chgmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:09Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-bsdk2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:33:37Z is after 2025-08-24T17:21:41Z" Oct 10 16:33:37 crc kubenswrapper[4799]: I1010 16:33:37.713187 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-k6hch" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7903c578-d05e-4ad7-8fd9-f438abf4a085\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:23Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:23Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hjhjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hjhjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:23Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-k6hch\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:33:37Z is after 2025-08-24T17:21:41Z" Oct 10 16:33:37 crc kubenswrapper[4799]: I1010 16:33:37.733634 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1bf1784a-776b-49c7-b64b-7ce52860df45\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:31:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://298a1a9571fbe118fe81ff3e7403e298bcde9b683cffab574fbb03d5adc1fb67\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f767e89684b9b515da850360aaf9d7a02173395faf0654e9f0b3a4752a3d608b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://92de340d859cff018a661f0a7f7fe209ffae161bf6f39deb005c7148591fc60b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3a649a65ab118025ea70d1d7cf71236cb96992671c3bc7659d591640b53f941\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:31:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:31:47Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:33:37Z is after 2025-08-24T17:21:41Z" Oct 10 16:33:37 crc kubenswrapper[4799]: I1010 16:33:37.750694 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2128a751508cba96a374652d8d80c66c81351fe0d7f800743a1612196fe8ac55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:33:37Z is after 2025-08-24T17:21:41Z" Oct 10 16:33:37 crc kubenswrapper[4799]: I1010 16:33:37.751938 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:37 crc kubenswrapper[4799]: I1010 16:33:37.752130 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:37 crc kubenswrapper[4799]: I1010 16:33:37.752162 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:37 crc kubenswrapper[4799]: I1010 16:33:37.752934 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:37 crc kubenswrapper[4799]: I1010 16:33:37.753006 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:37Z","lastTransitionTime":"2025-10-10T16:33:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:37 crc kubenswrapper[4799]: I1010 16:33:37.767736 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4e024486dad9853cf7debbd2264eca725e50e74ebd215e1e55595d5f8b7c0403\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3396ed6bea22d063192c09283426aa98e84d5cab5852e305d61f3d583801187\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:33:37Z is after 2025-08-24T17:21:41Z" Oct 10 16:33:37 crc kubenswrapper[4799]: I1010 16:33:37.790880 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-nptcz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"96840de9-4451-4499-81fa-a19c62239007\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-10T16:32:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://995be5ba088a3758758ce5aaf735f0371692c52e49e3992c6478311411c8db42\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-10T16:32:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d8b7b4526cfbe5d29a5b00c5d82089820b93e5aedbdaace85c4a252fed1b9f53\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d8b7b4526cfbe5d29a5b00c5d82089820b93e5aedbdaace85c4a252fed1b9f53\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:32:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0477eb514aef21fcec151973d9b6cf683ced19e9029787b97906438cb94b9f66\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0477eb514aef21fcec151973d9b6cf683ced19e9029787b97906438cb94b9f66\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:32:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b8df7ffc260acc047e334af09b76e6ee2c6dadd8c1fd1ed8860769601c89c6db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b8df7ffc260acc047e334af09b76e6ee2c6dadd8c1fd1ed8860769601c89c6db\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:32:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c89c6973a557239b60077f2b91a5f088955a973ebf8a9776677daa83f18c274\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2c89c6973a557239b60077f2b91a5f088955a973ebf8a9776677daa83f18c274\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:32:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f647c1c576be194232c6bcaf882fc8f3c67c78a84edd77222d04f1602434d014\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f647c1c576be194232c6bcaf882fc8f3c67c78a84edd77222d04f1602434d014\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:32:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://af9cccbb5d66115ca2db31b1e6738e1aa5f9c948eb65d3db9b5d5f8d9c223a64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://af9cccbb5d66115ca2db31b1e6738e1aa5f9c948eb65d3db9b5d5f8d9c223a64\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-10T16:32:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-10T16:32:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qfxtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-10T16:32:09Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-nptcz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-10T16:33:37Z is after 2025-08-24T17:21:41Z" Oct 10 16:33:37 crc kubenswrapper[4799]: I1010 16:33:37.856196 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:37 crc kubenswrapper[4799]: I1010 16:33:37.856293 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:37 crc kubenswrapper[4799]: I1010 16:33:37.856320 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:37 crc kubenswrapper[4799]: I1010 16:33:37.856363 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:37 crc kubenswrapper[4799]: I1010 16:33:37.856394 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:37Z","lastTransitionTime":"2025-10-10T16:33:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:37 crc kubenswrapper[4799]: I1010 16:33:37.959102 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:37 crc kubenswrapper[4799]: I1010 16:33:37.959154 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:37 crc kubenswrapper[4799]: I1010 16:33:37.959171 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:37 crc kubenswrapper[4799]: I1010 16:33:37.959199 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:37 crc kubenswrapper[4799]: I1010 16:33:37.959217 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:37Z","lastTransitionTime":"2025-10-10T16:33:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:38 crc kubenswrapper[4799]: I1010 16:33:38.061846 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:38 crc kubenswrapper[4799]: I1010 16:33:38.061916 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:38 crc kubenswrapper[4799]: I1010 16:33:38.061928 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:38 crc kubenswrapper[4799]: I1010 16:33:38.061973 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:38 crc kubenswrapper[4799]: I1010 16:33:38.061983 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:38Z","lastTransitionTime":"2025-10-10T16:33:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:38 crc kubenswrapper[4799]: I1010 16:33:38.165242 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:38 crc kubenswrapper[4799]: I1010 16:33:38.165309 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:38 crc kubenswrapper[4799]: I1010 16:33:38.165328 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:38 crc kubenswrapper[4799]: I1010 16:33:38.165356 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:38 crc kubenswrapper[4799]: I1010 16:33:38.165373 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:38Z","lastTransitionTime":"2025-10-10T16:33:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:38 crc kubenswrapper[4799]: I1010 16:33:38.269396 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:38 crc kubenswrapper[4799]: I1010 16:33:38.269472 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:38 crc kubenswrapper[4799]: I1010 16:33:38.269494 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:38 crc kubenswrapper[4799]: I1010 16:33:38.269526 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:38 crc kubenswrapper[4799]: I1010 16:33:38.269545 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:38Z","lastTransitionTime":"2025-10-10T16:33:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:38 crc kubenswrapper[4799]: I1010 16:33:38.373145 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:38 crc kubenswrapper[4799]: I1010 16:33:38.373515 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:38 crc kubenswrapper[4799]: I1010 16:33:38.373672 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:38 crc kubenswrapper[4799]: I1010 16:33:38.373860 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:38 crc kubenswrapper[4799]: I1010 16:33:38.374063 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:38Z","lastTransitionTime":"2025-10-10T16:33:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:38 crc kubenswrapper[4799]: I1010 16:33:38.401846 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 10 16:33:38 crc kubenswrapper[4799]: I1010 16:33:38.401914 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 10 16:33:38 crc kubenswrapper[4799]: E1010 16:33:38.402689 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 10 16:33:38 crc kubenswrapper[4799]: E1010 16:33:38.403501 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 10 16:33:38 crc kubenswrapper[4799]: I1010 16:33:38.477561 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:38 crc kubenswrapper[4799]: I1010 16:33:38.477629 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:38 crc kubenswrapper[4799]: I1010 16:33:38.477650 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:38 crc kubenswrapper[4799]: I1010 16:33:38.477680 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:38 crc kubenswrapper[4799]: I1010 16:33:38.477698 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:38Z","lastTransitionTime":"2025-10-10T16:33:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:38 crc kubenswrapper[4799]: I1010 16:33:38.582322 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:38 crc kubenswrapper[4799]: I1010 16:33:38.582466 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:38 crc kubenswrapper[4799]: I1010 16:33:38.582543 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:38 crc kubenswrapper[4799]: I1010 16:33:38.582620 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:38 crc kubenswrapper[4799]: I1010 16:33:38.582647 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:38Z","lastTransitionTime":"2025-10-10T16:33:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:38 crc kubenswrapper[4799]: I1010 16:33:38.686058 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:38 crc kubenswrapper[4799]: I1010 16:33:38.686531 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:38 crc kubenswrapper[4799]: I1010 16:33:38.686814 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:38 crc kubenswrapper[4799]: I1010 16:33:38.687096 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:38 crc kubenswrapper[4799]: I1010 16:33:38.687326 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:38Z","lastTransitionTime":"2025-10-10T16:33:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:38 crc kubenswrapper[4799]: I1010 16:33:38.790689 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:38 crc kubenswrapper[4799]: I1010 16:33:38.790815 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:38 crc kubenswrapper[4799]: I1010 16:33:38.790846 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:38 crc kubenswrapper[4799]: I1010 16:33:38.790873 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:38 crc kubenswrapper[4799]: I1010 16:33:38.790892 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:38Z","lastTransitionTime":"2025-10-10T16:33:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:38 crc kubenswrapper[4799]: I1010 16:33:38.893987 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:38 crc kubenswrapper[4799]: I1010 16:33:38.894055 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:38 crc kubenswrapper[4799]: I1010 16:33:38.894069 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:38 crc kubenswrapper[4799]: I1010 16:33:38.894094 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:38 crc kubenswrapper[4799]: I1010 16:33:38.894108 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:38Z","lastTransitionTime":"2025-10-10T16:33:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:38 crc kubenswrapper[4799]: I1010 16:33:38.998088 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:38 crc kubenswrapper[4799]: I1010 16:33:38.998145 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:38 crc kubenswrapper[4799]: I1010 16:33:38.998162 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:38 crc kubenswrapper[4799]: I1010 16:33:38.998188 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:38 crc kubenswrapper[4799]: I1010 16:33:38.998206 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:38Z","lastTransitionTime":"2025-10-10T16:33:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:39 crc kubenswrapper[4799]: I1010 16:33:39.101939 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:39 crc kubenswrapper[4799]: I1010 16:33:39.102096 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:39 crc kubenswrapper[4799]: I1010 16:33:39.102126 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:39 crc kubenswrapper[4799]: I1010 16:33:39.102622 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:39 crc kubenswrapper[4799]: I1010 16:33:39.102964 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:39Z","lastTransitionTime":"2025-10-10T16:33:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:39 crc kubenswrapper[4799]: I1010 16:33:39.206820 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:39 crc kubenswrapper[4799]: I1010 16:33:39.206909 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:39 crc kubenswrapper[4799]: I1010 16:33:39.206929 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:39 crc kubenswrapper[4799]: I1010 16:33:39.206972 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:39 crc kubenswrapper[4799]: I1010 16:33:39.206992 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:39Z","lastTransitionTime":"2025-10-10T16:33:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:39 crc kubenswrapper[4799]: I1010 16:33:39.311369 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:39 crc kubenswrapper[4799]: I1010 16:33:39.311443 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:39 crc kubenswrapper[4799]: I1010 16:33:39.311467 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:39 crc kubenswrapper[4799]: I1010 16:33:39.311505 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:39 crc kubenswrapper[4799]: I1010 16:33:39.311532 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:39Z","lastTransitionTime":"2025-10-10T16:33:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:39 crc kubenswrapper[4799]: I1010 16:33:39.401675 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k6hch" Oct 10 16:33:39 crc kubenswrapper[4799]: I1010 16:33:39.401710 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 10 16:33:39 crc kubenswrapper[4799]: E1010 16:33:39.401953 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-k6hch" podUID="7903c578-d05e-4ad7-8fd9-f438abf4a085" Oct 10 16:33:39 crc kubenswrapper[4799]: E1010 16:33:39.402115 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 10 16:33:39 crc kubenswrapper[4799]: I1010 16:33:39.414971 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:39 crc kubenswrapper[4799]: I1010 16:33:39.415030 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:39 crc kubenswrapper[4799]: I1010 16:33:39.415052 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:39 crc kubenswrapper[4799]: I1010 16:33:39.415083 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:39 crc kubenswrapper[4799]: I1010 16:33:39.415106 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:39Z","lastTransitionTime":"2025-10-10T16:33:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:39 crc kubenswrapper[4799]: I1010 16:33:39.519417 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:39 crc kubenswrapper[4799]: I1010 16:33:39.519498 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:39 crc kubenswrapper[4799]: I1010 16:33:39.519517 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:39 crc kubenswrapper[4799]: I1010 16:33:39.519549 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:39 crc kubenswrapper[4799]: I1010 16:33:39.519567 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:39Z","lastTransitionTime":"2025-10-10T16:33:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:39 crc kubenswrapper[4799]: I1010 16:33:39.624062 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:39 crc kubenswrapper[4799]: I1010 16:33:39.624147 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:39 crc kubenswrapper[4799]: I1010 16:33:39.624173 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:39 crc kubenswrapper[4799]: I1010 16:33:39.624211 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:39 crc kubenswrapper[4799]: I1010 16:33:39.624234 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:39Z","lastTransitionTime":"2025-10-10T16:33:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:39 crc kubenswrapper[4799]: I1010 16:33:39.728656 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:39 crc kubenswrapper[4799]: I1010 16:33:39.728739 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:39 crc kubenswrapper[4799]: I1010 16:33:39.728797 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:39 crc kubenswrapper[4799]: I1010 16:33:39.728829 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:39 crc kubenswrapper[4799]: I1010 16:33:39.728852 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:39Z","lastTransitionTime":"2025-10-10T16:33:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:39 crc kubenswrapper[4799]: I1010 16:33:39.832288 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:39 crc kubenswrapper[4799]: I1010 16:33:39.832372 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:39 crc kubenswrapper[4799]: I1010 16:33:39.832402 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:39 crc kubenswrapper[4799]: I1010 16:33:39.832437 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:39 crc kubenswrapper[4799]: I1010 16:33:39.832462 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:39Z","lastTransitionTime":"2025-10-10T16:33:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:39 crc kubenswrapper[4799]: I1010 16:33:39.935389 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:39 crc kubenswrapper[4799]: I1010 16:33:39.935460 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:39 crc kubenswrapper[4799]: I1010 16:33:39.935482 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:39 crc kubenswrapper[4799]: I1010 16:33:39.935511 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:39 crc kubenswrapper[4799]: I1010 16:33:39.935530 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:39Z","lastTransitionTime":"2025-10-10T16:33:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:40 crc kubenswrapper[4799]: I1010 16:33:40.039016 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:40 crc kubenswrapper[4799]: I1010 16:33:40.039079 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:40 crc kubenswrapper[4799]: I1010 16:33:40.039097 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:40 crc kubenswrapper[4799]: I1010 16:33:40.039120 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:40 crc kubenswrapper[4799]: I1010 16:33:40.039139 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:40Z","lastTransitionTime":"2025-10-10T16:33:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:40 crc kubenswrapper[4799]: I1010 16:33:40.143460 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:40 crc kubenswrapper[4799]: I1010 16:33:40.143538 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:40 crc kubenswrapper[4799]: I1010 16:33:40.143562 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:40 crc kubenswrapper[4799]: I1010 16:33:40.143594 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:40 crc kubenswrapper[4799]: I1010 16:33:40.143614 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:40Z","lastTransitionTime":"2025-10-10T16:33:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:40 crc kubenswrapper[4799]: I1010 16:33:40.249140 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:40 crc kubenswrapper[4799]: I1010 16:33:40.249272 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:40 crc kubenswrapper[4799]: I1010 16:33:40.249293 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:40 crc kubenswrapper[4799]: I1010 16:33:40.249359 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:40 crc kubenswrapper[4799]: I1010 16:33:40.249382 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:40Z","lastTransitionTime":"2025-10-10T16:33:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:40 crc kubenswrapper[4799]: I1010 16:33:40.353429 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:40 crc kubenswrapper[4799]: I1010 16:33:40.353504 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:40 crc kubenswrapper[4799]: I1010 16:33:40.353528 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:40 crc kubenswrapper[4799]: I1010 16:33:40.353561 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:40 crc kubenswrapper[4799]: I1010 16:33:40.353584 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:40Z","lastTransitionTime":"2025-10-10T16:33:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:40 crc kubenswrapper[4799]: I1010 16:33:40.402501 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 10 16:33:40 crc kubenswrapper[4799]: I1010 16:33:40.402518 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 10 16:33:40 crc kubenswrapper[4799]: E1010 16:33:40.402657 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 10 16:33:40 crc kubenswrapper[4799]: E1010 16:33:40.402859 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 10 16:33:40 crc kubenswrapper[4799]: I1010 16:33:40.456858 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:40 crc kubenswrapper[4799]: I1010 16:33:40.456917 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:40 crc kubenswrapper[4799]: I1010 16:33:40.456940 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:40 crc kubenswrapper[4799]: I1010 16:33:40.456966 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:40 crc kubenswrapper[4799]: I1010 16:33:40.456984 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:40Z","lastTransitionTime":"2025-10-10T16:33:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:40 crc kubenswrapper[4799]: I1010 16:33:40.559787 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:40 crc kubenswrapper[4799]: I1010 16:33:40.559831 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:40 crc kubenswrapper[4799]: I1010 16:33:40.559841 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:40 crc kubenswrapper[4799]: I1010 16:33:40.559854 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:40 crc kubenswrapper[4799]: I1010 16:33:40.559864 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:40Z","lastTransitionTime":"2025-10-10T16:33:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:40 crc kubenswrapper[4799]: I1010 16:33:40.663395 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:40 crc kubenswrapper[4799]: I1010 16:33:40.663465 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:40 crc kubenswrapper[4799]: I1010 16:33:40.663486 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:40 crc kubenswrapper[4799]: I1010 16:33:40.663516 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:40 crc kubenswrapper[4799]: I1010 16:33:40.663536 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:40Z","lastTransitionTime":"2025-10-10T16:33:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:40 crc kubenswrapper[4799]: I1010 16:33:40.767120 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:40 crc kubenswrapper[4799]: I1010 16:33:40.767178 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:40 crc kubenswrapper[4799]: I1010 16:33:40.767201 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:40 crc kubenswrapper[4799]: I1010 16:33:40.767234 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:40 crc kubenswrapper[4799]: I1010 16:33:40.767257 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:40Z","lastTransitionTime":"2025-10-10T16:33:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:40 crc kubenswrapper[4799]: I1010 16:33:40.870123 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:40 crc kubenswrapper[4799]: I1010 16:33:40.870186 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:40 crc kubenswrapper[4799]: I1010 16:33:40.870204 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:40 crc kubenswrapper[4799]: I1010 16:33:40.870228 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:40 crc kubenswrapper[4799]: I1010 16:33:40.870243 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:40Z","lastTransitionTime":"2025-10-10T16:33:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:40 crc kubenswrapper[4799]: I1010 16:33:40.973948 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:40 crc kubenswrapper[4799]: I1010 16:33:40.974017 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:40 crc kubenswrapper[4799]: I1010 16:33:40.974042 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:40 crc kubenswrapper[4799]: I1010 16:33:40.974078 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:40 crc kubenswrapper[4799]: I1010 16:33:40.974103 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:40Z","lastTransitionTime":"2025-10-10T16:33:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:41 crc kubenswrapper[4799]: I1010 16:33:41.077380 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:41 crc kubenswrapper[4799]: I1010 16:33:41.077433 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:41 crc kubenswrapper[4799]: I1010 16:33:41.077445 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:41 crc kubenswrapper[4799]: I1010 16:33:41.077476 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:41 crc kubenswrapper[4799]: I1010 16:33:41.077489 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:41Z","lastTransitionTime":"2025-10-10T16:33:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:41 crc kubenswrapper[4799]: I1010 16:33:41.181487 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:41 crc kubenswrapper[4799]: I1010 16:33:41.181547 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:41 crc kubenswrapper[4799]: I1010 16:33:41.181566 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:41 crc kubenswrapper[4799]: I1010 16:33:41.181598 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:41 crc kubenswrapper[4799]: I1010 16:33:41.181620 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:41Z","lastTransitionTime":"2025-10-10T16:33:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:41 crc kubenswrapper[4799]: I1010 16:33:41.286908 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:41 crc kubenswrapper[4799]: I1010 16:33:41.287018 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:41 crc kubenswrapper[4799]: I1010 16:33:41.287052 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:41 crc kubenswrapper[4799]: I1010 16:33:41.287094 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:41 crc kubenswrapper[4799]: I1010 16:33:41.287138 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:41Z","lastTransitionTime":"2025-10-10T16:33:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:41 crc kubenswrapper[4799]: I1010 16:33:41.391832 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:41 crc kubenswrapper[4799]: I1010 16:33:41.391901 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:41 crc kubenswrapper[4799]: I1010 16:33:41.391929 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:41 crc kubenswrapper[4799]: I1010 16:33:41.391961 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:41 crc kubenswrapper[4799]: I1010 16:33:41.391983 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:41Z","lastTransitionTime":"2025-10-10T16:33:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:41 crc kubenswrapper[4799]: I1010 16:33:41.406849 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k6hch" Oct 10 16:33:41 crc kubenswrapper[4799]: E1010 16:33:41.407078 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-k6hch" podUID="7903c578-d05e-4ad7-8fd9-f438abf4a085" Oct 10 16:33:41 crc kubenswrapper[4799]: I1010 16:33:41.407408 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 10 16:33:41 crc kubenswrapper[4799]: E1010 16:33:41.407527 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 10 16:33:41 crc kubenswrapper[4799]: I1010 16:33:41.495646 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:41 crc kubenswrapper[4799]: I1010 16:33:41.495822 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:41 crc kubenswrapper[4799]: I1010 16:33:41.495844 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:41 crc kubenswrapper[4799]: I1010 16:33:41.495951 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:41 crc kubenswrapper[4799]: I1010 16:33:41.495975 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:41Z","lastTransitionTime":"2025-10-10T16:33:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:41 crc kubenswrapper[4799]: I1010 16:33:41.600095 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:41 crc kubenswrapper[4799]: I1010 16:33:41.600162 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:41 crc kubenswrapper[4799]: I1010 16:33:41.600183 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:41 crc kubenswrapper[4799]: I1010 16:33:41.600217 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:41 crc kubenswrapper[4799]: I1010 16:33:41.600239 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:41Z","lastTransitionTime":"2025-10-10T16:33:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:41 crc kubenswrapper[4799]: I1010 16:33:41.703175 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:41 crc kubenswrapper[4799]: I1010 16:33:41.703240 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:41 crc kubenswrapper[4799]: I1010 16:33:41.703268 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:41 crc kubenswrapper[4799]: I1010 16:33:41.703305 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:41 crc kubenswrapper[4799]: I1010 16:33:41.703328 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:41Z","lastTransitionTime":"2025-10-10T16:33:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:41 crc kubenswrapper[4799]: I1010 16:33:41.806499 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:41 crc kubenswrapper[4799]: I1010 16:33:41.806580 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:41 crc kubenswrapper[4799]: I1010 16:33:41.806606 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:41 crc kubenswrapper[4799]: I1010 16:33:41.806637 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:41 crc kubenswrapper[4799]: I1010 16:33:41.806660 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:41Z","lastTransitionTime":"2025-10-10T16:33:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:41 crc kubenswrapper[4799]: I1010 16:33:41.910299 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:41 crc kubenswrapper[4799]: I1010 16:33:41.910370 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:41 crc kubenswrapper[4799]: I1010 16:33:41.910393 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:41 crc kubenswrapper[4799]: I1010 16:33:41.910438 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:41 crc kubenswrapper[4799]: I1010 16:33:41.910460 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:41Z","lastTransitionTime":"2025-10-10T16:33:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:42 crc kubenswrapper[4799]: I1010 16:33:42.013921 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:42 crc kubenswrapper[4799]: I1010 16:33:42.013982 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:42 crc kubenswrapper[4799]: I1010 16:33:42.014006 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:42 crc kubenswrapper[4799]: I1010 16:33:42.014037 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:42 crc kubenswrapper[4799]: I1010 16:33:42.014074 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:42Z","lastTransitionTime":"2025-10-10T16:33:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:42 crc kubenswrapper[4799]: I1010 16:33:42.116874 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:42 crc kubenswrapper[4799]: I1010 16:33:42.117017 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:42 crc kubenswrapper[4799]: I1010 16:33:42.117030 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:42 crc kubenswrapper[4799]: I1010 16:33:42.117047 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:42 crc kubenswrapper[4799]: I1010 16:33:42.117061 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:42Z","lastTransitionTime":"2025-10-10T16:33:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:42 crc kubenswrapper[4799]: I1010 16:33:42.219917 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:42 crc kubenswrapper[4799]: I1010 16:33:42.219976 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:42 crc kubenswrapper[4799]: I1010 16:33:42.219996 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:42 crc kubenswrapper[4799]: I1010 16:33:42.220021 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:42 crc kubenswrapper[4799]: I1010 16:33:42.220041 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:42Z","lastTransitionTime":"2025-10-10T16:33:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:42 crc kubenswrapper[4799]: I1010 16:33:42.323260 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:42 crc kubenswrapper[4799]: I1010 16:33:42.323377 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:42 crc kubenswrapper[4799]: I1010 16:33:42.323410 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:42 crc kubenswrapper[4799]: I1010 16:33:42.323446 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:42 crc kubenswrapper[4799]: I1010 16:33:42.323469 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:42Z","lastTransitionTime":"2025-10-10T16:33:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:42 crc kubenswrapper[4799]: I1010 16:33:42.402351 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 10 16:33:42 crc kubenswrapper[4799]: I1010 16:33:42.402472 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 10 16:33:42 crc kubenswrapper[4799]: E1010 16:33:42.402592 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 10 16:33:42 crc kubenswrapper[4799]: E1010 16:33:42.402739 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 10 16:33:42 crc kubenswrapper[4799]: I1010 16:33:42.426581 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:42 crc kubenswrapper[4799]: I1010 16:33:42.426654 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:42 crc kubenswrapper[4799]: I1010 16:33:42.426673 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:42 crc kubenswrapper[4799]: I1010 16:33:42.426706 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:42 crc kubenswrapper[4799]: I1010 16:33:42.426723 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:42Z","lastTransitionTime":"2025-10-10T16:33:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:42 crc kubenswrapper[4799]: I1010 16:33:42.529910 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:42 crc kubenswrapper[4799]: I1010 16:33:42.529972 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:42 crc kubenswrapper[4799]: I1010 16:33:42.529991 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:42 crc kubenswrapper[4799]: I1010 16:33:42.530018 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:42 crc kubenswrapper[4799]: I1010 16:33:42.530039 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:42Z","lastTransitionTime":"2025-10-10T16:33:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:42 crc kubenswrapper[4799]: I1010 16:33:42.538470 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 10 16:33:42 crc kubenswrapper[4799]: I1010 16:33:42.538521 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 10 16:33:42 crc kubenswrapper[4799]: I1010 16:33:42.538539 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 10 16:33:42 crc kubenswrapper[4799]: I1010 16:33:42.538560 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 10 16:33:42 crc kubenswrapper[4799]: I1010 16:33:42.538576 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-10T16:33:42Z","lastTransitionTime":"2025-10-10T16:33:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 10 16:33:42 crc kubenswrapper[4799]: I1010 16:33:42.605654 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-version/cluster-version-operator-5c965bbfc6-hqwvt"] Oct 10 16:33:42 crc kubenswrapper[4799]: I1010 16:33:42.606276 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-hqwvt" Oct 10 16:33:42 crc kubenswrapper[4799]: I1010 16:33:42.609615 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"default-dockercfg-gxtc4" Oct 10 16:33:42 crc kubenswrapper[4799]: I1010 16:33:42.609652 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"openshift-service-ca.crt" Oct 10 16:33:42 crc kubenswrapper[4799]: I1010 16:33:42.611460 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"kube-root-ca.crt" Oct 10 16:33:42 crc kubenswrapper[4799]: I1010 16:33:42.613289 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"cluster-version-operator-serving-cert" Oct 10 16:33:42 crc kubenswrapper[4799]: I1010 16:33:42.639496 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" podStartSLOduration=67.637670433 podStartE2EDuration="1m7.637670433s" podCreationTimestamp="2025-10-10 16:32:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-10 16:33:42.637178071 +0000 UTC m=+116.145502226" watchObservedRunningTime="2025-10-10 16:33:42.637670433 +0000 UTC m=+116.145994608" Oct 10 16:33:42 crc kubenswrapper[4799]: I1010 16:33:42.667119 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/node-resolver-bsdk2" podStartSLOduration=94.667085525 podStartE2EDuration="1m34.667085525s" podCreationTimestamp="2025-10-10 16:32:08 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-10 16:33:42.655537348 +0000 UTC m=+116.163861553" watchObservedRunningTime="2025-10-10 16:33:42.667085525 +0000 UTC m=+116.175409690" Oct 10 16:33:42 crc kubenswrapper[4799]: I1010 16:33:42.694923 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd/etcd-crc" podStartSLOduration=91.694896567 podStartE2EDuration="1m31.694896567s" podCreationTimestamp="2025-10-10 16:32:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-10 16:33:42.69419985 +0000 UTC m=+116.202523975" watchObservedRunningTime="2025-10-10 16:33:42.694896567 +0000 UTC m=+116.203220702" Oct 10 16:33:42 crc kubenswrapper[4799]: I1010 16:33:42.767791 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-additional-cni-plugins-nptcz" podStartSLOduration=93.767734599 podStartE2EDuration="1m33.767734599s" podCreationTimestamp="2025-10-10 16:32:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-10 16:33:42.7541222 +0000 UTC m=+116.262446325" watchObservedRunningTime="2025-10-10 16:33:42.767734599 +0000 UTC m=+116.276058754" Oct 10 16:33:42 crc kubenswrapper[4799]: I1010 16:33:42.786030 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c55e4d6a-3286-4456-831d-dc67345a0d4f-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-hqwvt\" (UID: \"c55e4d6a-3286-4456-831d-dc67345a0d4f\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-hqwvt" Oct 10 16:33:42 crc kubenswrapper[4799]: I1010 16:33:42.786102 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/c55e4d6a-3286-4456-831d-dc67345a0d4f-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-hqwvt\" (UID: \"c55e4d6a-3286-4456-831d-dc67345a0d4f\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-hqwvt" Oct 10 16:33:42 crc kubenswrapper[4799]: I1010 16:33:42.786167 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/c55e4d6a-3286-4456-831d-dc67345a0d4f-service-ca\") pod \"cluster-version-operator-5c965bbfc6-hqwvt\" (UID: \"c55e4d6a-3286-4456-831d-dc67345a0d4f\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-hqwvt" Oct 10 16:33:42 crc kubenswrapper[4799]: I1010 16:33:42.786205 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/c55e4d6a-3286-4456-831d-dc67345a0d4f-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-hqwvt\" (UID: \"c55e4d6a-3286-4456-831d-dc67345a0d4f\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-hqwvt" Oct 10 16:33:42 crc kubenswrapper[4799]: I1010 16:33:42.786345 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/c55e4d6a-3286-4456-831d-dc67345a0d4f-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-hqwvt\" (UID: \"c55e4d6a-3286-4456-831d-dc67345a0d4f\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-hqwvt" Oct 10 16:33:42 crc kubenswrapper[4799]: I1010 16:33:42.795706 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podStartSLOduration=86.795685054 podStartE2EDuration="1m26.795685054s" podCreationTimestamp="2025-10-10 16:32:16 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-10 16:33:42.788518226 +0000 UTC m=+116.296842361" watchObservedRunningTime="2025-10-10 16:33:42.795685054 +0000 UTC m=+116.304009169" Oct 10 16:33:42 crc kubenswrapper[4799]: I1010 16:33:42.838833 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podStartSLOduration=94.838811147 podStartE2EDuration="1m34.838811147s" podCreationTimestamp="2025-10-10 16:32:08 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-10 16:33:42.838222522 +0000 UTC m=+116.346546687" watchObservedRunningTime="2025-10-10 16:33:42.838811147 +0000 UTC m=+116.347135292" Oct 10 16:33:42 crc kubenswrapper[4799]: I1010 16:33:42.882796 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-z97c7" podStartSLOduration=92.882772001 podStartE2EDuration="1m32.882772001s" podCreationTimestamp="2025-10-10 16:32:10 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-10 16:33:42.88232706 +0000 UTC m=+116.390651215" watchObservedRunningTime="2025-10-10 16:33:42.882772001 +0000 UTC m=+116.391096126" Oct 10 16:33:42 crc kubenswrapper[4799]: I1010 16:33:42.887827 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c55e4d6a-3286-4456-831d-dc67345a0d4f-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-hqwvt\" (UID: \"c55e4d6a-3286-4456-831d-dc67345a0d4f\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-hqwvt" Oct 10 16:33:42 crc kubenswrapper[4799]: I1010 16:33:42.887902 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/c55e4d6a-3286-4456-831d-dc67345a0d4f-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-hqwvt\" (UID: \"c55e4d6a-3286-4456-831d-dc67345a0d4f\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-hqwvt" Oct 10 16:33:42 crc kubenswrapper[4799]: I1010 16:33:42.887967 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/c55e4d6a-3286-4456-831d-dc67345a0d4f-service-ca\") pod \"cluster-version-operator-5c965bbfc6-hqwvt\" (UID: \"c55e4d6a-3286-4456-831d-dc67345a0d4f\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-hqwvt" Oct 10 16:33:42 crc kubenswrapper[4799]: I1010 16:33:42.888007 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/c55e4d6a-3286-4456-831d-dc67345a0d4f-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-hqwvt\" (UID: \"c55e4d6a-3286-4456-831d-dc67345a0d4f\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-hqwvt" Oct 10 16:33:42 crc kubenswrapper[4799]: I1010 16:33:42.888064 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/c55e4d6a-3286-4456-831d-dc67345a0d4f-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-hqwvt\" (UID: \"c55e4d6a-3286-4456-831d-dc67345a0d4f\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-hqwvt" Oct 10 16:33:42 crc kubenswrapper[4799]: I1010 16:33:42.888117 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/c55e4d6a-3286-4456-831d-dc67345a0d4f-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-hqwvt\" (UID: \"c55e4d6a-3286-4456-831d-dc67345a0d4f\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-hqwvt" Oct 10 16:33:42 crc kubenswrapper[4799]: I1010 16:33:42.888181 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/c55e4d6a-3286-4456-831d-dc67345a0d4f-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-hqwvt\" (UID: \"c55e4d6a-3286-4456-831d-dc67345a0d4f\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-hqwvt" Oct 10 16:33:42 crc kubenswrapper[4799]: I1010 16:33:42.888890 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/c55e4d6a-3286-4456-831d-dc67345a0d4f-service-ca\") pod \"cluster-version-operator-5c965bbfc6-hqwvt\" (UID: \"c55e4d6a-3286-4456-831d-dc67345a0d4f\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-hqwvt" Oct 10 16:33:42 crc kubenswrapper[4799]: I1010 16:33:42.896297 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c55e4d6a-3286-4456-831d-dc67345a0d4f-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-hqwvt\" (UID: \"c55e4d6a-3286-4456-831d-dc67345a0d4f\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-hqwvt" Oct 10 16:33:42 crc kubenswrapper[4799]: I1010 16:33:42.909625 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-crc" podStartSLOduration=93.909600798 podStartE2EDuration="1m33.909600798s" podCreationTimestamp="2025-10-10 16:32:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-10 16:33:42.909161717 +0000 UTC m=+116.417485832" watchObservedRunningTime="2025-10-10 16:33:42.909600798 +0000 UTC m=+116.417924923" Oct 10 16:33:42 crc kubenswrapper[4799]: I1010 16:33:42.914094 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/c55e4d6a-3286-4456-831d-dc67345a0d4f-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-hqwvt\" (UID: \"c55e4d6a-3286-4456-831d-dc67345a0d4f\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-hqwvt" Oct 10 16:33:42 crc kubenswrapper[4799]: I1010 16:33:42.926366 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-hqwvt" Oct 10 16:33:42 crc kubenswrapper[4799]: I1010 16:33:42.973006 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-gg5hb" podStartSLOduration=93.972984535 podStartE2EDuration="1m33.972984535s" podCreationTimestamp="2025-10-10 16:32:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-10 16:33:42.972262417 +0000 UTC m=+116.480586582" watchObservedRunningTime="2025-10-10 16:33:42.972984535 +0000 UTC m=+116.481308660" Oct 10 16:33:42 crc kubenswrapper[4799]: I1010 16:33:42.986225 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/node-ca-6wjsp" podStartSLOduration=93.986198183 podStartE2EDuration="1m33.986198183s" podCreationTimestamp="2025-10-10 16:32:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-10 16:33:42.984978053 +0000 UTC m=+116.493302168" watchObservedRunningTime="2025-10-10 16:33:42.986198183 +0000 UTC m=+116.494522308" Oct 10 16:33:43 crc kubenswrapper[4799]: I1010 16:33:43.010139 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" podStartSLOduration=32.010110528 podStartE2EDuration="32.010110528s" podCreationTimestamp="2025-10-10 16:33:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-10 16:33:43.009563165 +0000 UTC m=+116.517887280" watchObservedRunningTime="2025-10-10 16:33:43.010110528 +0000 UTC m=+116.518434663" Oct 10 16:33:43 crc kubenswrapper[4799]: I1010 16:33:43.097084 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-hqwvt" event={"ID":"c55e4d6a-3286-4456-831d-dc67345a0d4f","Type":"ContainerStarted","Data":"4b93c121002f69a52977f6bb18342d7d2d6a1c55c8b20b1c40be24406ed7e28e"} Oct 10 16:33:43 crc kubenswrapper[4799]: I1010 16:33:43.097150 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-hqwvt" event={"ID":"c55e4d6a-3286-4456-831d-dc67345a0d4f","Type":"ContainerStarted","Data":"5fea8893aedf4b2d342394c6d3e37b53fd9f41df075dc2e2abf81f7f0206119c"} Oct 10 16:33:43 crc kubenswrapper[4799]: I1010 16:33:43.112198 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-hqwvt" podStartSLOduration=94.112177367 podStartE2EDuration="1m34.112177367s" podCreationTimestamp="2025-10-10 16:32:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-10 16:33:43.110902786 +0000 UTC m=+116.619226921" watchObservedRunningTime="2025-10-10 16:33:43.112177367 +0000 UTC m=+116.620501492" Oct 10 16:33:43 crc kubenswrapper[4799]: I1010 16:33:43.402266 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k6hch" Oct 10 16:33:43 crc kubenswrapper[4799]: I1010 16:33:43.402308 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 10 16:33:43 crc kubenswrapper[4799]: E1010 16:33:43.402385 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-k6hch" podUID="7903c578-d05e-4ad7-8fd9-f438abf4a085" Oct 10 16:33:43 crc kubenswrapper[4799]: E1010 16:33:43.402500 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 10 16:33:44 crc kubenswrapper[4799]: I1010 16:33:44.401912 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 10 16:33:44 crc kubenswrapper[4799]: E1010 16:33:44.402081 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 10 16:33:44 crc kubenswrapper[4799]: I1010 16:33:44.402889 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 10 16:33:44 crc kubenswrapper[4799]: E1010 16:33:44.403228 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 10 16:33:45 crc kubenswrapper[4799]: I1010 16:33:45.107673 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-gg5hb_f000ac73-b5de-47c8-a0a7-84bd06475f62/kube-multus/1.log" Oct 10 16:33:45 crc kubenswrapper[4799]: I1010 16:33:45.108678 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-gg5hb_f000ac73-b5de-47c8-a0a7-84bd06475f62/kube-multus/0.log" Oct 10 16:33:45 crc kubenswrapper[4799]: I1010 16:33:45.108799 4799 generic.go:334] "Generic (PLEG): container finished" podID="f000ac73-b5de-47c8-a0a7-84bd06475f62" containerID="9fa19f17c5ed052d9c266f2da2d4e8338037b397bc2fb5e859f733c6b8c1b69e" exitCode=1 Oct 10 16:33:45 crc kubenswrapper[4799]: I1010 16:33:45.108847 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-gg5hb" event={"ID":"f000ac73-b5de-47c8-a0a7-84bd06475f62","Type":"ContainerDied","Data":"9fa19f17c5ed052d9c266f2da2d4e8338037b397bc2fb5e859f733c6b8c1b69e"} Oct 10 16:33:45 crc kubenswrapper[4799]: I1010 16:33:45.109035 4799 scope.go:117] "RemoveContainer" containerID="b64f89fec4fec12dd0dab3f95ca2c8a01e43d4ef7cc69a4d012195756f6922ca" Oct 10 16:33:45 crc kubenswrapper[4799]: I1010 16:33:45.109690 4799 scope.go:117] "RemoveContainer" containerID="9fa19f17c5ed052d9c266f2da2d4e8338037b397bc2fb5e859f733c6b8c1b69e" Oct 10 16:33:45 crc kubenswrapper[4799]: E1010 16:33:45.110215 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-multus pod=multus-gg5hb_openshift-multus(f000ac73-b5de-47c8-a0a7-84bd06475f62)\"" pod="openshift-multus/multus-gg5hb" podUID="f000ac73-b5de-47c8-a0a7-84bd06475f62" Oct 10 16:33:45 crc kubenswrapper[4799]: I1010 16:33:45.402442 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k6hch" Oct 10 16:33:45 crc kubenswrapper[4799]: E1010 16:33:45.402635 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-k6hch" podUID="7903c578-d05e-4ad7-8fd9-f438abf4a085" Oct 10 16:33:45 crc kubenswrapper[4799]: I1010 16:33:45.402965 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 10 16:33:45 crc kubenswrapper[4799]: E1010 16:33:45.403067 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 10 16:33:45 crc kubenswrapper[4799]: I1010 16:33:45.404352 4799 scope.go:117] "RemoveContainer" containerID="df22025d59e852d7ca86c7739f0dd141f6b388604bcf9ffaabfa48433290db84" Oct 10 16:33:45 crc kubenswrapper[4799]: E1010 16:33:45.404590 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-mcwfc_openshift-ovn-kubernetes(abe7f2d9-ec99-4724-a01f-cc7096377e07)\"" pod="openshift-ovn-kubernetes/ovnkube-node-mcwfc" podUID="abe7f2d9-ec99-4724-a01f-cc7096377e07" Oct 10 16:33:46 crc kubenswrapper[4799]: I1010 16:33:46.114428 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-gg5hb_f000ac73-b5de-47c8-a0a7-84bd06475f62/kube-multus/1.log" Oct 10 16:33:46 crc kubenswrapper[4799]: I1010 16:33:46.402159 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 10 16:33:46 crc kubenswrapper[4799]: I1010 16:33:46.402168 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 10 16:33:46 crc kubenswrapper[4799]: E1010 16:33:46.402354 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 10 16:33:46 crc kubenswrapper[4799]: E1010 16:33:46.402500 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 10 16:33:47 crc kubenswrapper[4799]: E1010 16:33:47.381384 4799 kubelet_node_status.go:497] "Node not becoming ready in time after startup" Oct 10 16:33:47 crc kubenswrapper[4799]: I1010 16:33:47.401595 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 10 16:33:47 crc kubenswrapper[4799]: E1010 16:33:47.402537 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 10 16:33:47 crc kubenswrapper[4799]: I1010 16:33:47.402735 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k6hch" Oct 10 16:33:47 crc kubenswrapper[4799]: E1010 16:33:47.403302 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-k6hch" podUID="7903c578-d05e-4ad7-8fd9-f438abf4a085" Oct 10 16:33:47 crc kubenswrapper[4799]: E1010 16:33:47.521656 4799 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Oct 10 16:33:48 crc kubenswrapper[4799]: I1010 16:33:48.402454 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 10 16:33:48 crc kubenswrapper[4799]: I1010 16:33:48.402531 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 10 16:33:48 crc kubenswrapper[4799]: E1010 16:33:48.402611 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 10 16:33:48 crc kubenswrapper[4799]: E1010 16:33:48.402743 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 10 16:33:49 crc kubenswrapper[4799]: I1010 16:33:49.402253 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 10 16:33:49 crc kubenswrapper[4799]: E1010 16:33:49.402443 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 10 16:33:49 crc kubenswrapper[4799]: I1010 16:33:49.402791 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k6hch" Oct 10 16:33:49 crc kubenswrapper[4799]: E1010 16:33:49.404014 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-k6hch" podUID="7903c578-d05e-4ad7-8fd9-f438abf4a085" Oct 10 16:33:50 crc kubenswrapper[4799]: I1010 16:33:50.402111 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 10 16:33:50 crc kubenswrapper[4799]: I1010 16:33:50.402135 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 10 16:33:50 crc kubenswrapper[4799]: E1010 16:33:50.402284 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 10 16:33:50 crc kubenswrapper[4799]: E1010 16:33:50.402444 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 10 16:33:51 crc kubenswrapper[4799]: I1010 16:33:51.401932 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 10 16:33:51 crc kubenswrapper[4799]: I1010 16:33:51.402089 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k6hch" Oct 10 16:33:51 crc kubenswrapper[4799]: E1010 16:33:51.402320 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 10 16:33:51 crc kubenswrapper[4799]: E1010 16:33:51.402466 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-k6hch" podUID="7903c578-d05e-4ad7-8fd9-f438abf4a085" Oct 10 16:33:52 crc kubenswrapper[4799]: I1010 16:33:52.401723 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 10 16:33:52 crc kubenswrapper[4799]: E1010 16:33:52.402073 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 10 16:33:52 crc kubenswrapper[4799]: I1010 16:33:52.402909 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 10 16:33:52 crc kubenswrapper[4799]: E1010 16:33:52.403069 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 10 16:33:52 crc kubenswrapper[4799]: E1010 16:33:52.522803 4799 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Oct 10 16:33:53 crc kubenswrapper[4799]: I1010 16:33:53.402531 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 10 16:33:53 crc kubenswrapper[4799]: I1010 16:33:53.402611 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k6hch" Oct 10 16:33:53 crc kubenswrapper[4799]: E1010 16:33:53.402815 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 10 16:33:53 crc kubenswrapper[4799]: E1010 16:33:53.402935 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-k6hch" podUID="7903c578-d05e-4ad7-8fd9-f438abf4a085" Oct 10 16:33:54 crc kubenswrapper[4799]: I1010 16:33:54.401697 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 10 16:33:54 crc kubenswrapper[4799]: I1010 16:33:54.401870 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 10 16:33:54 crc kubenswrapper[4799]: E1010 16:33:54.402255 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 10 16:33:54 crc kubenswrapper[4799]: E1010 16:33:54.402376 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 10 16:33:55 crc kubenswrapper[4799]: I1010 16:33:55.401644 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k6hch" Oct 10 16:33:55 crc kubenswrapper[4799]: I1010 16:33:55.401829 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 10 16:33:55 crc kubenswrapper[4799]: E1010 16:33:55.401904 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-k6hch" podUID="7903c578-d05e-4ad7-8fd9-f438abf4a085" Oct 10 16:33:55 crc kubenswrapper[4799]: E1010 16:33:55.401984 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 10 16:33:56 crc kubenswrapper[4799]: I1010 16:33:56.402299 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 10 16:33:56 crc kubenswrapper[4799]: E1010 16:33:56.402455 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 10 16:33:56 crc kubenswrapper[4799]: I1010 16:33:56.402478 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 10 16:33:56 crc kubenswrapper[4799]: E1010 16:33:56.403509 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 10 16:33:56 crc kubenswrapper[4799]: I1010 16:33:56.403584 4799 scope.go:117] "RemoveContainer" containerID="df22025d59e852d7ca86c7739f0dd141f6b388604bcf9ffaabfa48433290db84" Oct 10 16:33:57 crc kubenswrapper[4799]: I1010 16:33:57.154036 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-mcwfc_abe7f2d9-ec99-4724-a01f-cc7096377e07/ovnkube-controller/3.log" Oct 10 16:33:57 crc kubenswrapper[4799]: I1010 16:33:57.156377 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mcwfc" event={"ID":"abe7f2d9-ec99-4724-a01f-cc7096377e07","Type":"ContainerStarted","Data":"7dd9264bc068801f661655f2e77814e97ed7f61382f1e64dc3f9c3ffd546bdb3"} Oct 10 16:33:57 crc kubenswrapper[4799]: I1010 16:33:57.156803 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-mcwfc" Oct 10 16:33:57 crc kubenswrapper[4799]: I1010 16:33:57.178710 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-mcwfc" podStartSLOduration=108.178691094 podStartE2EDuration="1m48.178691094s" podCreationTimestamp="2025-10-10 16:32:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-10 16:33:57.178673263 +0000 UTC m=+130.686997398" watchObservedRunningTime="2025-10-10 16:33:57.178691094 +0000 UTC m=+130.687015209" Oct 10 16:33:57 crc kubenswrapper[4799]: I1010 16:33:57.353004 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-k6hch"] Oct 10 16:33:57 crc kubenswrapper[4799]: I1010 16:33:57.353138 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k6hch" Oct 10 16:33:57 crc kubenswrapper[4799]: E1010 16:33:57.353282 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-k6hch" podUID="7903c578-d05e-4ad7-8fd9-f438abf4a085" Oct 10 16:33:57 crc kubenswrapper[4799]: I1010 16:33:57.402441 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 10 16:33:57 crc kubenswrapper[4799]: E1010 16:33:57.405183 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 10 16:33:57 crc kubenswrapper[4799]: E1010 16:33:57.523969 4799 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Oct 10 16:33:58 crc kubenswrapper[4799]: I1010 16:33:58.402288 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 10 16:33:58 crc kubenswrapper[4799]: I1010 16:33:58.402329 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 10 16:33:58 crc kubenswrapper[4799]: E1010 16:33:58.403074 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 10 16:33:58 crc kubenswrapper[4799]: E1010 16:33:58.403167 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 10 16:33:59 crc kubenswrapper[4799]: I1010 16:33:59.402281 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 10 16:33:59 crc kubenswrapper[4799]: I1010 16:33:59.402504 4799 scope.go:117] "RemoveContainer" containerID="9fa19f17c5ed052d9c266f2da2d4e8338037b397bc2fb5e859f733c6b8c1b69e" Oct 10 16:33:59 crc kubenswrapper[4799]: E1010 16:33:59.402689 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 10 16:33:59 crc kubenswrapper[4799]: I1010 16:33:59.403028 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k6hch" Oct 10 16:33:59 crc kubenswrapper[4799]: E1010 16:33:59.403267 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-k6hch" podUID="7903c578-d05e-4ad7-8fd9-f438abf4a085" Oct 10 16:34:00 crc kubenswrapper[4799]: I1010 16:34:00.170482 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-gg5hb_f000ac73-b5de-47c8-a0a7-84bd06475f62/kube-multus/1.log" Oct 10 16:34:00 crc kubenswrapper[4799]: I1010 16:34:00.170562 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-gg5hb" event={"ID":"f000ac73-b5de-47c8-a0a7-84bd06475f62","Type":"ContainerStarted","Data":"8c9c3aaa13091c5803bad24fd1f3a1d3fe4da491900046f26bf4316a55987309"} Oct 10 16:34:00 crc kubenswrapper[4799]: I1010 16:34:00.401959 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 10 16:34:00 crc kubenswrapper[4799]: I1010 16:34:00.401975 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 10 16:34:00 crc kubenswrapper[4799]: E1010 16:34:00.402166 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 10 16:34:00 crc kubenswrapper[4799]: E1010 16:34:00.402331 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 10 16:34:01 crc kubenswrapper[4799]: I1010 16:34:01.402133 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 10 16:34:01 crc kubenswrapper[4799]: I1010 16:34:01.402161 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k6hch" Oct 10 16:34:01 crc kubenswrapper[4799]: E1010 16:34:01.402310 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 10 16:34:01 crc kubenswrapper[4799]: E1010 16:34:01.402390 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-k6hch" podUID="7903c578-d05e-4ad7-8fd9-f438abf4a085" Oct 10 16:34:02 crc kubenswrapper[4799]: I1010 16:34:02.402414 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 10 16:34:02 crc kubenswrapper[4799]: I1010 16:34:02.402441 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 10 16:34:02 crc kubenswrapper[4799]: E1010 16:34:02.402597 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 10 16:34:02 crc kubenswrapper[4799]: E1010 16:34:02.402725 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 10 16:34:02 crc kubenswrapper[4799]: I1010 16:34:02.876332 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeReady" Oct 10 16:34:02 crc kubenswrapper[4799]: I1010 16:34:02.942245 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-w26tc"] Oct 10 16:34:02 crc kubenswrapper[4799]: I1010 16:34:02.944368 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-w26tc" Oct 10 16:34:02 crc kubenswrapper[4799]: I1010 16:34:02.949950 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-dockercfg-mfbb7" Oct 10 16:34:02 crc kubenswrapper[4799]: I1010 16:34:02.950657 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"machine-api-operator-images" Oct 10 16:34:02 crc kubenswrapper[4799]: I1010 16:34:02.951108 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-tls" Oct 10 16:34:02 crc kubenswrapper[4799]: I1010 16:34:02.951442 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-root-ca.crt" Oct 10 16:34:02 crc kubenswrapper[4799]: I1010 16:34:02.951719 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"openshift-service-ca.crt" Oct 10 16:34:02 crc kubenswrapper[4799]: I1010 16:34:02.956971 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-2ksq6"] Oct 10 16:34:02 crc kubenswrapper[4799]: I1010 16:34:02.960061 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-2ksq6" Oct 10 16:34:02 crc kubenswrapper[4799]: I1010 16:34:02.964498 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-rbac-proxy" Oct 10 16:34:02 crc kubenswrapper[4799]: I1010 16:34:02.965051 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"encryption-config-1" Oct 10 16:34:02 crc kubenswrapper[4799]: I1010 16:34:02.965597 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"etcd-client" Oct 10 16:34:02 crc kubenswrapper[4799]: I1010 16:34:02.968218 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"openshift-apiserver-sa-dockercfg-djjff" Oct 10 16:34:02 crc kubenswrapper[4799]: I1010 16:34:02.969255 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"openshift-service-ca.crt" Oct 10 16:34:02 crc kubenswrapper[4799]: I1010 16:34:02.969813 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"config" Oct 10 16:34:02 crc kubenswrapper[4799]: I1010 16:34:02.970079 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"audit-1" Oct 10 16:34:02 crc kubenswrapper[4799]: I1010 16:34:02.970242 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"etcd-serving-ca" Oct 10 16:34:02 crc kubenswrapper[4799]: I1010 16:34:02.970615 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"image-import-ca" Oct 10 16:34:02 crc kubenswrapper[4799]: I1010 16:34:02.970784 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"kube-root-ca.crt" Oct 10 16:34:02 crc kubenswrapper[4799]: I1010 16:34:02.979077 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"serving-cert" Oct 10 16:34:02 crc kubenswrapper[4799]: I1010 16:34:02.983153 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-mjjtc"] Oct 10 16:34:02 crc kubenswrapper[4799]: I1010 16:34:02.983640 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-fvpc8"] Oct 10 16:34:02 crc kubenswrapper[4799]: I1010 16:34:02.983963 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-fvpc8" Oct 10 16:34:02 crc kubenswrapper[4799]: I1010 16:34:02.984005 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-mjjtc" Oct 10 16:34:02 crc kubenswrapper[4799]: I1010 16:34:02.990222 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"trusted-ca-bundle" Oct 10 16:34:02 crc kubenswrapper[4799]: I1010 16:34:02.990619 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-w2jsl"] Oct 10 16:34:02 crc kubenswrapper[4799]: I1010 16:34:02.991151 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-w2jsl" Oct 10 16:34:02 crc kubenswrapper[4799]: I1010 16:34:02.991845 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/downloads-7954f5f757-plk2p"] Oct 10 16:34:02 crc kubenswrapper[4799]: I1010 16:34:02.992197 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-plk2p" Oct 10 16:34:02 crc kubenswrapper[4799]: I1010 16:34:02.993291 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-f9d7485db-8lvfs"] Oct 10 16:34:02 crc kubenswrapper[4799]: I1010 16:34:02.993553 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"trusted-ca-bundle" Oct 10 16:34:02 crc kubenswrapper[4799]: I1010 16:34:02.993735 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Oct 10 16:34:02 crc kubenswrapper[4799]: I1010 16:34:02.993827 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"oauth-apiserver-sa-dockercfg-6r2bq" Oct 10 16:34:02 crc kubenswrapper[4799]: I1010 16:34:02.994005 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"encryption-config-1" Oct 10 16:34:02 crc kubenswrapper[4799]: I1010 16:34:02.994208 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Oct 10 16:34:02 crc kubenswrapper[4799]: I1010 16:34:02.994254 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"audit-1" Oct 10 16:34:02 crc kubenswrapper[4799]: I1010 16:34:02.994271 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"etcd-serving-ca" Oct 10 16:34:02 crc kubenswrapper[4799]: I1010 16:34:02.994399 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"kube-root-ca.crt" Oct 10 16:34:02 crc kubenswrapper[4799]: I1010 16:34:02.994416 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Oct 10 16:34:02 crc kubenswrapper[4799]: I1010 16:34:02.994454 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"etcd-client" Oct 10 16:34:02 crc kubenswrapper[4799]: I1010 16:34:02.994473 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"serving-cert" Oct 10 16:34:02 crc kubenswrapper[4799]: I1010 16:34:02.994420 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Oct 10 16:34:02 crc kubenswrapper[4799]: I1010 16:34:02.993826 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-8lvfs" Oct 10 16:34:02 crc kubenswrapper[4799]: I1010 16:34:02.994568 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"openshift-service-ca.crt" Oct 10 16:34:02 crc kubenswrapper[4799]: I1010 16:34:02.995009 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Oct 10 16:34:02 crc kubenswrapper[4799]: I1010 16:34:02.995354 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-hqz8m"] Oct 10 16:34:02 crc kubenswrapper[4799]: I1010 16:34:02.995878 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-hqz8m" Oct 10 16:34:02 crc kubenswrapper[4799]: I1010 16:34:02.996616 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-n8sc6"] Oct 10 16:34:02 crc kubenswrapper[4799]: I1010 16:34:02.997557 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-n8sc6" Oct 10 16:34:02 crc kubenswrapper[4799]: I1010 16:34:02.998252 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-5sgz5"] Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:02.999514 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"openshift-service-ca.crt" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:02.999797 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-5sgz5" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:02.999877 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"oauth-serving-cert" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.000142 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-nsrr7"] Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.000816 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-nsrr7" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.001258 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-56xl2"] Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.001596 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-56xl2" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.010162 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q4xnj\" (UniqueName: \"kubernetes.io/projected/cd6b9ce8-4fe8-405e-9399-354b7d8ee20b-kube-api-access-q4xnj\") pod \"apiserver-76f77b778f-2ksq6\" (UID: \"cd6b9ce8-4fe8-405e-9399-354b7d8ee20b\") " pod="openshift-apiserver/apiserver-76f77b778f-2ksq6" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.010189 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/cd6b9ce8-4fe8-405e-9399-354b7d8ee20b-audit\") pod \"apiserver-76f77b778f-2ksq6\" (UID: \"cd6b9ce8-4fe8-405e-9399-354b7d8ee20b\") " pod="openshift-apiserver/apiserver-76f77b778f-2ksq6" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.010207 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/cd6b9ce8-4fe8-405e-9399-354b7d8ee20b-node-pullsecrets\") pod \"apiserver-76f77b778f-2ksq6\" (UID: \"cd6b9ce8-4fe8-405e-9399-354b7d8ee20b\") " pod="openshift-apiserver/apiserver-76f77b778f-2ksq6" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.010224 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/cd6b9ce8-4fe8-405e-9399-354b7d8ee20b-etcd-client\") pod \"apiserver-76f77b778f-2ksq6\" (UID: \"cd6b9ce8-4fe8-405e-9399-354b7d8ee20b\") " pod="openshift-apiserver/apiserver-76f77b778f-2ksq6" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.010243 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/159d9178-1402-4232-a9df-ad4389bed9b0-images\") pod \"machine-api-operator-5694c8668f-w26tc\" (UID: \"159d9178-1402-4232-a9df-ad4389bed9b0\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-w26tc" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.010263 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cd6b9ce8-4fe8-405e-9399-354b7d8ee20b-config\") pod \"apiserver-76f77b778f-2ksq6\" (UID: \"cd6b9ce8-4fe8-405e-9399-354b7d8ee20b\") " pod="openshift-apiserver/apiserver-76f77b778f-2ksq6" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.010286 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/cd6b9ce8-4fe8-405e-9399-354b7d8ee20b-audit-dir\") pod \"apiserver-76f77b778f-2ksq6\" (UID: \"cd6b9ce8-4fe8-405e-9399-354b7d8ee20b\") " pod="openshift-apiserver/apiserver-76f77b778f-2ksq6" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.010300 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/cd6b9ce8-4fe8-405e-9399-354b7d8ee20b-image-import-ca\") pod \"apiserver-76f77b778f-2ksq6\" (UID: \"cd6b9ce8-4fe8-405e-9399-354b7d8ee20b\") " pod="openshift-apiserver/apiserver-76f77b778f-2ksq6" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.010317 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/cd6b9ce8-4fe8-405e-9399-354b7d8ee20b-serving-cert\") pod \"apiserver-76f77b778f-2ksq6\" (UID: \"cd6b9ce8-4fe8-405e-9399-354b7d8ee20b\") " pod="openshift-apiserver/apiserver-76f77b778f-2ksq6" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.010331 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/cd6b9ce8-4fe8-405e-9399-354b7d8ee20b-etcd-serving-ca\") pod \"apiserver-76f77b778f-2ksq6\" (UID: \"cd6b9ce8-4fe8-405e-9399-354b7d8ee20b\") " pod="openshift-apiserver/apiserver-76f77b778f-2ksq6" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.010359 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sfc2x\" (UniqueName: \"kubernetes.io/projected/159d9178-1402-4232-a9df-ad4389bed9b0-kube-api-access-sfc2x\") pod \"machine-api-operator-5694c8668f-w26tc\" (UID: \"159d9178-1402-4232-a9df-ad4389bed9b0\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-w26tc" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.010377 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/159d9178-1402-4232-a9df-ad4389bed9b0-config\") pod \"machine-api-operator-5694c8668f-w26tc\" (UID: \"159d9178-1402-4232-a9df-ad4389bed9b0\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-w26tc" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.010391 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/cd6b9ce8-4fe8-405e-9399-354b7d8ee20b-trusted-ca-bundle\") pod \"apiserver-76f77b778f-2ksq6\" (UID: \"cd6b9ce8-4fe8-405e-9399-354b7d8ee20b\") " pod="openshift-apiserver/apiserver-76f77b778f-2ksq6" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.010416 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/159d9178-1402-4232-a9df-ad4389bed9b0-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-w26tc\" (UID: \"159d9178-1402-4232-a9df-ad4389bed9b0\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-w26tc" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.010434 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/cd6b9ce8-4fe8-405e-9399-354b7d8ee20b-encryption-config\") pod \"apiserver-76f77b778f-2ksq6\" (UID: \"cd6b9ce8-4fe8-405e-9399-354b7d8ee20b\") " pod="openshift-apiserver/apiserver-76f77b778f-2ksq6" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.011624 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.011730 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-oauth-config" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.011745 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.011842 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-dockercfg-f62pw" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.011904 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"service-ca" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.011916 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"default-dockercfg-chnjx" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.011932 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.011994 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.012039 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"samples-operator-tls" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.012109 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.012113 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"kube-root-ca.crt" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.011631 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"console-config" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.012188 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"cluster-samples-operator-dockercfg-xpp9w" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.012281 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"cluster-image-registry-operator-dockercfg-m4qtx" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.012316 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-operator-tls" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.013121 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console-operator/console-operator-58897d9998-6h8q7"] Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.013513 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-mvftm"] Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.013807 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-mvftm" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.013901 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-6h8q7" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.014610 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-hsg4j"] Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.015357 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-hsg4j" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.027748 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-nlsfc"] Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.028689 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-nlsfc" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.029388 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.029710 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"openshift-service-ca.crt" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.029957 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"openshift-service-ca.crt" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.030075 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-error" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.030390 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-serving-cert" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.030417 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-cliconfig" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.030810 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-serving-cert" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.031043 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"openshift-service-ca.crt" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.031245 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.031340 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"kube-root-ca.crt" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.031392 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"kube-root-ca.crt" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.031927 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-service-ca.crt" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.031956 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-machine-approver/machine-approver-56656f9798-w9v24"] Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.032069 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-config" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.032213 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"kube-root-ca.crt" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.032504 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"config-operator-serving-cert" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.032638 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-dockercfg-vw8fw" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.033024 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-provider-selection" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.033447 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.034102 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-w9v24" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.035358 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"openshift-config-operator-dockercfg-7pc5z" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.035609 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"audit" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.035952 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-service-ca" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.036057 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-idp-0-file-data" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.036122 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-cqz89"] Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.037128 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-cqz89" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.039822 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-qc5c7"] Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.041623 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-89gcz"] Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.041727 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-qc5c7" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.042299 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-89gcz" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.052087 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"openshift-service-ca.crt" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.052245 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-config" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.055118 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-serving-cert" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.055295 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-service-ca-bundle" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.055311 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"openshift-service-ca.crt" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.055434 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-operator-config" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.055548 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"kube-root-ca.crt" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.055848 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-dockercfg-xtcjv" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.056452 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-ca-bundle" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.056453 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-serving-cert" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.056776 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-session" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.056910 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-client" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.056936 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"kube-root-ca.crt" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.057214 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"console-operator-config" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.057377 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"oauth-openshift-dockercfg-znhcc" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.057466 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-serving-cert" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.058387 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"dns-operator-dockercfg-9mqw5" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.058675 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"console-operator-dockercfg-4xjcr" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.059346 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-router-certs" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.068798 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"metrics-tls" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.069348 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"serving-cert" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.069694 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-service-ca.crt" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.069847 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"kube-root-ca.crt" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.070052 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"openshift-service-ca.crt" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.070682 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-x7vr5"] Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.071434 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress/router-default-5444994796-qpw4g"] Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.071877 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-6mnhh"] Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.072247 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-x7vr5" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.072278 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-6mnhh" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.072280 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-prqrg"] Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.072432 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-qpw4g" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.073143 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-dockercfg-r9srn" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.073367 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-prqrg" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.076493 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"kube-root-ca.crt" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.078392 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"trusted-ca-bundle" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.080424 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-login" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.080723 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-cvkkx"] Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.082566 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"trusted-ca" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.082657 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-hkvxt"] Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.082935 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-cvkkx" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.083695 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-zphkz"] Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.084159 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-zphkz" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.084214 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-hkvxt" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.084247 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"trusted-ca" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.087231 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"kube-root-ca.crt" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.088822 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-98snb"] Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.089938 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-98snb" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.091894 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-ocp-branding-template" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.094250 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-trusted-ca-bundle" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.102521 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-5r8jg"] Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.103228 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-5r8jg" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.106883 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-9kfj6"] Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.107187 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-rbac-proxy" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.107683 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-9kfj6" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.108232 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-mgdrk"] Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.108791 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-mgdrk" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.110437 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-2cggl"] Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.110811 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/cd6b9ce8-4fe8-405e-9399-354b7d8ee20b-etcd-client\") pod \"apiserver-76f77b778f-2ksq6\" (UID: \"cd6b9ce8-4fe8-405e-9399-354b7d8ee20b\") " pod="openshift-apiserver/apiserver-76f77b778f-2ksq6" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.110865 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a63337d7-2cc9-49b7-af06-550a24f39ff0-trusted-ca\") pod \"console-operator-58897d9998-6h8q7\" (UID: \"a63337d7-2cc9-49b7-af06-550a24f39ff0\") " pod="openshift-console-operator/console-operator-58897d9998-6h8q7" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.110882 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/dd0fbd79-76a9-4a87-a67b-20e782993376-config\") pod \"controller-manager-879f6c89f-fvpc8\" (UID: \"dd0fbd79-76a9-4a87-a67b-20e782993376\") " pod="openshift-controller-manager/controller-manager-879f6c89f-fvpc8" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.110900 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2dff39f3-58aa-4d27-a85f-c1a09bb3d83e-config\") pod \"etcd-operator-b45778765-mvftm\" (UID: \"2dff39f3-58aa-4d27-a85f-c1a09bb3d83e\") " pod="openshift-etcd-operator/etcd-operator-b45778765-mvftm" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.110914 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/eec5360d-85e3-4785-98b2-10ba224ffdfe-config\") pod \"machine-approver-56656f9798-w9v24\" (UID: \"eec5360d-85e3-4785-98b2-10ba224ffdfe\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-w9v24" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.110930 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/159d9178-1402-4232-a9df-ad4389bed9b0-images\") pod \"machine-api-operator-5694c8668f-w26tc\" (UID: \"159d9178-1402-4232-a9df-ad4389bed9b0\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-w26tc" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.110947 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c37fc2e5-d729-4258-8506-22f328f5927a-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-x7vr5\" (UID: \"c37fc2e5-d729-4258-8506-22f328f5927a\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-x7vr5" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.110961 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5mmzh\" (UniqueName: \"kubernetes.io/projected/eb807790-1011-4dfc-842e-fd4106f7a6c3-kube-api-access-5mmzh\") pod \"ingress-operator-5b745b69d9-cqz89\" (UID: \"eb807790-1011-4dfc-842e-fd4106f7a6c3\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-cqz89" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.110978 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cd6b9ce8-4fe8-405e-9399-354b7d8ee20b-config\") pod \"apiserver-76f77b778f-2ksq6\" (UID: \"cd6b9ce8-4fe8-405e-9399-354b7d8ee20b\") " pod="openshift-apiserver/apiserver-76f77b778f-2ksq6" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.110995 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/cd6b9ce8-4fe8-405e-9399-354b7d8ee20b-audit-dir\") pod \"apiserver-76f77b778f-2ksq6\" (UID: \"cd6b9ce8-4fe8-405e-9399-354b7d8ee20b\") " pod="openshift-apiserver/apiserver-76f77b778f-2ksq6" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.111012 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p54j8\" (UniqueName: \"kubernetes.io/projected/6a574afe-31ee-4706-90c0-a9c477f5bce7-kube-api-access-p54j8\") pod \"router-default-5444994796-qpw4g\" (UID: \"6a574afe-31ee-4706-90c0-a9c477f5bce7\") " pod="openshift-ingress/router-default-5444994796-qpw4g" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.111027 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/0691553d-e534-4c08-b56e-d99bd02e53fa-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-56xl2\" (UID: \"0691553d-e534-4c08-b56e-d99bd02e53fa\") " pod="openshift-authentication/oauth-openshift-558db77b4-56xl2" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.111051 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/0691553d-e534-4c08-b56e-d99bd02e53fa-audit-policies\") pod \"oauth-openshift-558db77b4-56xl2\" (UID: \"0691553d-e534-4c08-b56e-d99bd02e53fa\") " pod="openshift-authentication/oauth-openshift-558db77b4-56xl2" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.111066 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/0691553d-e534-4c08-b56e-d99bd02e53fa-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-56xl2\" (UID: \"0691553d-e534-4c08-b56e-d99bd02e53fa\") " pod="openshift-authentication/oauth-openshift-558db77b4-56xl2" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.111080 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/eb807790-1011-4dfc-842e-fd4106f7a6c3-trusted-ca\") pod \"ingress-operator-5b745b69d9-cqz89\" (UID: \"eb807790-1011-4dfc-842e-fd4106f7a6c3\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-cqz89" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.111122 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/eec5360d-85e3-4785-98b2-10ba224ffdfe-auth-proxy-config\") pod \"machine-approver-56656f9798-w9v24\" (UID: \"eec5360d-85e3-4785-98b2-10ba224ffdfe\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-w9v24" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.111141 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/cd6b9ce8-4fe8-405e-9399-354b7d8ee20b-image-import-ca\") pod \"apiserver-76f77b778f-2ksq6\" (UID: \"cd6b9ce8-4fe8-405e-9399-354b7d8ee20b\") " pod="openshift-apiserver/apiserver-76f77b778f-2ksq6" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.111175 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-2cggl" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.111190 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/0691553d-e534-4c08-b56e-d99bd02e53fa-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-56xl2\" (UID: \"0691553d-e534-4c08-b56e-d99bd02e53fa\") " pod="openshift-authentication/oauth-openshift-558db77b4-56xl2" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.111461 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6f91a26e-3654-415e-9ebc-91034a85cbc1-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-nsrr7\" (UID: \"6f91a26e-3654-415e-9ebc-91034a85cbc1\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-nsrr7" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.111483 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/f8bab52f-8a27-495e-80d2-9794b984939e-client-ca\") pod \"route-controller-manager-6576b87f9c-w2jsl\" (UID: \"f8bab52f-8a27-495e-80d2-9794b984939e\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-w2jsl" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.111503 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/6a574afe-31ee-4706-90c0-a9c477f5bce7-stats-auth\") pod \"router-default-5444994796-qpw4g\" (UID: \"6a574afe-31ee-4706-90c0-a9c477f5bce7\") " pod="openshift-ingress/router-default-5444994796-qpw4g" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.111519 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3f2b6b90-47a2-4e3c-8394-13a961d92c03-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-hsg4j\" (UID: \"3f2b6b90-47a2-4e3c-8394-13a961d92c03\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-hsg4j" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.111536 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hxgnl\" (UniqueName: \"kubernetes.io/projected/6f91a26e-3654-415e-9ebc-91034a85cbc1-kube-api-access-hxgnl\") pod \"openshift-controller-manager-operator-756b6f6bc6-nsrr7\" (UID: \"6f91a26e-3654-415e-9ebc-91034a85cbc1\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-nsrr7" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.111555 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/6a574afe-31ee-4706-90c0-a9c477f5bce7-metrics-certs\") pod \"router-default-5444994796-qpw4g\" (UID: \"6a574afe-31ee-4706-90c0-a9c477f5bce7\") " pod="openshift-ingress/router-default-5444994796-qpw4g" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.111570 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/dd0fbd79-76a9-4a87-a67b-20e782993376-serving-cert\") pod \"controller-manager-879f6c89f-fvpc8\" (UID: \"dd0fbd79-76a9-4a87-a67b-20e782993376\") " pod="openshift-controller-manager/controller-manager-879f6c89f-fvpc8" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.111586 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/dd0fbd79-76a9-4a87-a67b-20e782993376-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-fvpc8\" (UID: \"dd0fbd79-76a9-4a87-a67b-20e782993376\") " pod="openshift-controller-manager/controller-manager-879f6c89f-fvpc8" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.111602 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n879q\" (UniqueName: \"kubernetes.io/projected/98460888-57af-4ef6-a390-c0a592164ddb-kube-api-access-n879q\") pod \"dns-operator-744455d44c-nlsfc\" (UID: \"98460888-57af-4ef6-a390-c0a592164ddb\") " pod="openshift-dns-operator/dns-operator-744455d44c-nlsfc" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.111625 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/cd6b9ce8-4fe8-405e-9399-354b7d8ee20b-etcd-serving-ca\") pod \"apiserver-76f77b778f-2ksq6\" (UID: \"cd6b9ce8-4fe8-405e-9399-354b7d8ee20b\") " pod="openshift-apiserver/apiserver-76f77b778f-2ksq6" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.111641 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/cd6b9ce8-4fe8-405e-9399-354b7d8ee20b-serving-cert\") pod \"apiserver-76f77b778f-2ksq6\" (UID: \"cd6b9ce8-4fe8-405e-9399-354b7d8ee20b\") " pod="openshift-apiserver/apiserver-76f77b778f-2ksq6" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.111663 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/6a574afe-31ee-4706-90c0-a9c477f5bce7-default-certificate\") pod \"router-default-5444994796-qpw4g\" (UID: \"6a574afe-31ee-4706-90c0-a9c477f5bce7\") " pod="openshift-ingress/router-default-5444994796-qpw4g" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.111679 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/089d7979-5043-482c-a464-b62a385a3f16-auth-proxy-config\") pod \"machine-config-operator-74547568cd-prqrg\" (UID: \"089d7979-5043-482c-a464-b62a385a3f16\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-prqrg" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.111695 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jpxlh\" (UniqueName: \"kubernetes.io/projected/089d7979-5043-482c-a464-b62a385a3f16-kube-api-access-jpxlh\") pod \"machine-config-operator-74547568cd-prqrg\" (UID: \"089d7979-5043-482c-a464-b62a385a3f16\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-prqrg" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.111712 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/0691553d-e534-4c08-b56e-d99bd02e53fa-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-56xl2\" (UID: \"0691553d-e534-4c08-b56e-d99bd02e53fa\") " pod="openshift-authentication/oauth-openshift-558db77b4-56xl2" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.111733 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sfc2x\" (UniqueName: \"kubernetes.io/projected/159d9178-1402-4232-a9df-ad4389bed9b0-kube-api-access-sfc2x\") pod \"machine-api-operator-5694c8668f-w26tc\" (UID: \"159d9178-1402-4232-a9df-ad4389bed9b0\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-w26tc" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.111748 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/0691553d-e534-4c08-b56e-d99bd02e53fa-audit-dir\") pod \"oauth-openshift-558db77b4-56xl2\" (UID: \"0691553d-e534-4c08-b56e-d99bd02e53fa\") " pod="openshift-authentication/oauth-openshift-558db77b4-56xl2" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.111782 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/0691553d-e534-4c08-b56e-d99bd02e53fa-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-56xl2\" (UID: \"0691553d-e534-4c08-b56e-d99bd02e53fa\") " pod="openshift-authentication/oauth-openshift-558db77b4-56xl2" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.111802 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f237a066-51a9-475f-80b5-1627fb073e16-serving-cert\") pod \"authentication-operator-69f744f599-qc5c7\" (UID: \"f237a066-51a9-475f-80b5-1627fb073e16\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-qc5c7" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.111817 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/159d9178-1402-4232-a9df-ad4389bed9b0-config\") pod \"machine-api-operator-5694c8668f-w26tc\" (UID: \"159d9178-1402-4232-a9df-ad4389bed9b0\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-w26tc" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.111833 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/713c24fb-e821-419a-b996-9661a6cbf57b-audit-policies\") pod \"apiserver-7bbb656c7d-mjjtc\" (UID: \"713c24fb-e821-419a-b996-9661a6cbf57b\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-mjjtc" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.111850 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/713c24fb-e821-419a-b996-9661a6cbf57b-etcd-client\") pod \"apiserver-7bbb656c7d-mjjtc\" (UID: \"713c24fb-e821-419a-b996-9661a6cbf57b\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-mjjtc" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.111866 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2dff39f3-58aa-4d27-a85f-c1a09bb3d83e-serving-cert\") pod \"etcd-operator-b45778765-mvftm\" (UID: \"2dff39f3-58aa-4d27-a85f-c1a09bb3d83e\") " pod="openshift-etcd-operator/etcd-operator-b45778765-mvftm" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.111881 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vw6fc\" (UniqueName: \"kubernetes.io/projected/3f2b6b90-47a2-4e3c-8394-13a961d92c03-kube-api-access-vw6fc\") pod \"openshift-apiserver-operator-796bbdcf4f-hsg4j\" (UID: \"3f2b6b90-47a2-4e3c-8394-13a961d92c03\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-hsg4j" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.111900 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/cd6b9ce8-4fe8-405e-9399-354b7d8ee20b-trusted-ca-bundle\") pod \"apiserver-76f77b778f-2ksq6\" (UID: \"cd6b9ce8-4fe8-405e-9399-354b7d8ee20b\") " pod="openshift-apiserver/apiserver-76f77b778f-2ksq6" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.111917 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-87lxb\" (UniqueName: \"kubernetes.io/projected/a63337d7-2cc9-49b7-af06-550a24f39ff0-kube-api-access-87lxb\") pod \"console-operator-58897d9998-6h8q7\" (UID: \"a63337d7-2cc9-49b7-af06-550a24f39ff0\") " pod="openshift-console-operator/console-operator-58897d9998-6h8q7" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.111933 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/2dff39f3-58aa-4d27-a85f-c1a09bb3d83e-etcd-service-ca\") pod \"etcd-operator-b45778765-mvftm\" (UID: \"2dff39f3-58aa-4d27-a85f-c1a09bb3d83e\") " pod="openshift-etcd-operator/etcd-operator-b45778765-mvftm" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.111950 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/713c24fb-e821-419a-b996-9661a6cbf57b-encryption-config\") pod \"apiserver-7bbb656c7d-mjjtc\" (UID: \"713c24fb-e821-419a-b996-9661a6cbf57b\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-mjjtc" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.111965 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/98460888-57af-4ef6-a390-c0a592164ddb-metrics-tls\") pod \"dns-operator-744455d44c-nlsfc\" (UID: \"98460888-57af-4ef6-a390-c0a592164ddb\") " pod="openshift-dns-operator/dns-operator-744455d44c-nlsfc" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.111982 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f237a066-51a9-475f-80b5-1627fb073e16-config\") pod \"authentication-operator-69f744f599-qc5c7\" (UID: \"f237a066-51a9-475f-80b5-1627fb073e16\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-qc5c7" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.112000 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a63337d7-2cc9-49b7-af06-550a24f39ff0-config\") pod \"console-operator-58897d9998-6h8q7\" (UID: \"a63337d7-2cc9-49b7-af06-550a24f39ff0\") " pod="openshift-console-operator/console-operator-58897d9998-6h8q7" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.112015 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/0691553d-e534-4c08-b56e-d99bd02e53fa-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-56xl2\" (UID: \"0691553d-e534-4c08-b56e-d99bd02e53fa\") " pod="openshift-authentication/oauth-openshift-558db77b4-56xl2" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.112043 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6a574afe-31ee-4706-90c0-a9c477f5bce7-service-ca-bundle\") pod \"router-default-5444994796-qpw4g\" (UID: \"6a574afe-31ee-4706-90c0-a9c477f5bce7\") " pod="openshift-ingress/router-default-5444994796-qpw4g" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.112112 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2bsdq\" (UniqueName: \"kubernetes.io/projected/dd0fbd79-76a9-4a87-a67b-20e782993376-kube-api-access-2bsdq\") pod \"controller-manager-879f6c89f-fvpc8\" (UID: \"dd0fbd79-76a9-4a87-a67b-20e782993376\") " pod="openshift-controller-manager/controller-manager-879f6c89f-fvpc8" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.112131 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cvt5n\" (UniqueName: \"kubernetes.io/projected/2dff39f3-58aa-4d27-a85f-c1a09bb3d83e-kube-api-access-cvt5n\") pod \"etcd-operator-b45778765-mvftm\" (UID: \"2dff39f3-58aa-4d27-a85f-c1a09bb3d83e\") " pod="openshift-etcd-operator/etcd-operator-b45778765-mvftm" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.112145 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c37fc2e5-d729-4258-8506-22f328f5927a-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-x7vr5\" (UID: \"c37fc2e5-d729-4258-8506-22f328f5927a\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-x7vr5" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.112160 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/f237a066-51a9-475f-80b5-1627fb073e16-service-ca-bundle\") pod \"authentication-operator-69f744f599-qc5c7\" (UID: \"f237a066-51a9-475f-80b5-1627fb073e16\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-qc5c7" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.112177 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/089d7979-5043-482c-a464-b62a385a3f16-proxy-tls\") pod \"machine-config-operator-74547568cd-prqrg\" (UID: \"089d7979-5043-482c-a464-b62a385a3f16\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-prqrg" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.112193 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/0691553d-e534-4c08-b56e-d99bd02e53fa-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-56xl2\" (UID: \"0691553d-e534-4c08-b56e-d99bd02e53fa\") " pod="openshift-authentication/oauth-openshift-558db77b4-56xl2" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.112207 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b89bb\" (UniqueName: \"kubernetes.io/projected/a8700150-e5e3-4b1b-a3c8-f7e781ce8f1f-kube-api-access-b89bb\") pod \"cluster-image-registry-operator-dc59b4c8b-hqz8m\" (UID: \"a8700150-e5e3-4b1b-a3c8-f7e781ce8f1f\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-hqz8m" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.112229 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/159d9178-1402-4232-a9df-ad4389bed9b0-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-w26tc\" (UID: \"159d9178-1402-4232-a9df-ad4389bed9b0\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-w26tc" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.112245 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a63337d7-2cc9-49b7-af06-550a24f39ff0-serving-cert\") pod \"console-operator-58897d9998-6h8q7\" (UID: \"a63337d7-2cc9-49b7-af06-550a24f39ff0\") " pod="openshift-console-operator/console-operator-58897d9998-6h8q7" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.112262 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7vssf\" (UniqueName: \"kubernetes.io/projected/a958b529-c3f0-4131-be7b-4d81a3c25499-kube-api-access-7vssf\") pod \"control-plane-machine-set-operator-78cbb6b69f-6mnhh\" (UID: \"a958b529-c3f0-4131-be7b-4d81a3c25499\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-6mnhh" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.112279 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tsxnv\" (UniqueName: \"kubernetes.io/projected/e56235b4-8348-4fae-af0a-639fcacfc997-kube-api-access-tsxnv\") pod \"downloads-7954f5f757-plk2p\" (UID: \"e56235b4-8348-4fae-af0a-639fcacfc997\") " pod="openshift-console/downloads-7954f5f757-plk2p" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.112296 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/0691553d-e534-4c08-b56e-d99bd02e53fa-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-56xl2\" (UID: \"0691553d-e534-4c08-b56e-d99bd02e53fa\") " pod="openshift-authentication/oauth-openshift-558db77b4-56xl2" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.112311 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/a8700150-e5e3-4b1b-a3c8-f7e781ce8f1f-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-hqz8m\" (UID: \"a8700150-e5e3-4b1b-a3c8-f7e781ce8f1f\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-hqz8m" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.112327 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/713c24fb-e821-419a-b996-9661a6cbf57b-audit-dir\") pod \"apiserver-7bbb656c7d-mjjtc\" (UID: \"713c24fb-e821-419a-b996-9661a6cbf57b\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-mjjtc" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.112343 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/eb807790-1011-4dfc-842e-fd4106f7a6c3-bound-sa-token\") pod \"ingress-operator-5b745b69d9-cqz89\" (UID: \"eb807790-1011-4dfc-842e-fd4106f7a6c3\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-cqz89" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.112361 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/cd6b9ce8-4fe8-405e-9399-354b7d8ee20b-encryption-config\") pod \"apiserver-76f77b778f-2ksq6\" (UID: \"cd6b9ce8-4fe8-405e-9399-354b7d8ee20b\") " pod="openshift-apiserver/apiserver-76f77b778f-2ksq6" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.112378 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/089d7979-5043-482c-a464-b62a385a3f16-images\") pod \"machine-config-operator-74547568cd-prqrg\" (UID: \"089d7979-5043-482c-a464-b62a385a3f16\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-prqrg" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.112393 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/dd0fbd79-76a9-4a87-a67b-20e782993376-client-ca\") pod \"controller-manager-879f6c89f-fvpc8\" (UID: \"dd0fbd79-76a9-4a87-a67b-20e782993376\") " pod="openshift-controller-manager/controller-manager-879f6c89f-fvpc8" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.112410 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/f237a066-51a9-475f-80b5-1627fb073e16-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-qc5c7\" (UID: \"f237a066-51a9-475f-80b5-1627fb073e16\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-qc5c7" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.112431 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6f91a26e-3654-415e-9ebc-91034a85cbc1-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-nsrr7\" (UID: \"6f91a26e-3654-415e-9ebc-91034a85cbc1\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-nsrr7" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.112454 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pvq5g\" (UniqueName: \"kubernetes.io/projected/713c24fb-e821-419a-b996-9661a6cbf57b-kube-api-access-pvq5g\") pod \"apiserver-7bbb656c7d-mjjtc\" (UID: \"713c24fb-e821-419a-b996-9661a6cbf57b\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-mjjtc" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.112469 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/18a6e757-37be-47ce-98ea-bda0221cab2c-serving-cert\") pod \"openshift-config-operator-7777fb866f-5sgz5\" (UID: \"18a6e757-37be-47ce-98ea-bda0221cab2c\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-5sgz5" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.112485 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/0691553d-e534-4c08-b56e-d99bd02e53fa-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-56xl2\" (UID: \"0691553d-e534-4c08-b56e-d99bd02e53fa\") " pod="openshift-authentication/oauth-openshift-558db77b4-56xl2" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.112503 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/eec5360d-85e3-4785-98b2-10ba224ffdfe-machine-approver-tls\") pod \"machine-approver-56656f9798-w9v24\" (UID: \"eec5360d-85e3-4785-98b2-10ba224ffdfe\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-w9v24" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.112526 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-55fpk\" (UniqueName: \"kubernetes.io/projected/f8bab52f-8a27-495e-80d2-9794b984939e-kube-api-access-55fpk\") pod \"route-controller-manager-6576b87f9c-w2jsl\" (UID: \"f8bab52f-8a27-495e-80d2-9794b984939e\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-w2jsl" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.112543 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3f2b6b90-47a2-4e3c-8394-13a961d92c03-config\") pod \"openshift-apiserver-operator-796bbdcf4f-hsg4j\" (UID: \"3f2b6b90-47a2-4e3c-8394-13a961d92c03\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-hsg4j" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.112556 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/eb807790-1011-4dfc-842e-fd4106f7a6c3-metrics-tls\") pod \"ingress-operator-5b745b69d9-cqz89\" (UID: \"eb807790-1011-4dfc-842e-fd4106f7a6c3\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-cqz89" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.112578 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/713c24fb-e821-419a-b996-9661a6cbf57b-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-mjjtc\" (UID: \"713c24fb-e821-419a-b996-9661a6cbf57b\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-mjjtc" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.112597 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q4xnj\" (UniqueName: \"kubernetes.io/projected/cd6b9ce8-4fe8-405e-9399-354b7d8ee20b-kube-api-access-q4xnj\") pod \"apiserver-76f77b778f-2ksq6\" (UID: \"cd6b9ce8-4fe8-405e-9399-354b7d8ee20b\") " pod="openshift-apiserver/apiserver-76f77b778f-2ksq6" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.112613 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/cd6b9ce8-4fe8-405e-9399-354b7d8ee20b-audit\") pod \"apiserver-76f77b778f-2ksq6\" (UID: \"cd6b9ce8-4fe8-405e-9399-354b7d8ee20b\") " pod="openshift-apiserver/apiserver-76f77b778f-2ksq6" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.112632 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/713c24fb-e821-419a-b996-9661a6cbf57b-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-mjjtc\" (UID: \"713c24fb-e821-419a-b996-9661a6cbf57b\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-mjjtc" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.112977 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/cd6b9ce8-4fe8-405e-9399-354b7d8ee20b-node-pullsecrets\") pod \"apiserver-76f77b778f-2ksq6\" (UID: \"cd6b9ce8-4fe8-405e-9399-354b7d8ee20b\") " pod="openshift-apiserver/apiserver-76f77b778f-2ksq6" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.113269 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/18a6e757-37be-47ce-98ea-bda0221cab2c-available-featuregates\") pod \"openshift-config-operator-7777fb866f-5sgz5\" (UID: \"18a6e757-37be-47ce-98ea-bda0221cab2c\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-5sgz5" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.113295 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h27xd\" (UniqueName: \"kubernetes.io/projected/18a6e757-37be-47ce-98ea-bda0221cab2c-kube-api-access-h27xd\") pod \"openshift-config-operator-7777fb866f-5sgz5\" (UID: \"18a6e757-37be-47ce-98ea-bda0221cab2c\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-5sgz5" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.113312 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gmcp9\" (UniqueName: \"kubernetes.io/projected/0691553d-e534-4c08-b56e-d99bd02e53fa-kube-api-access-gmcp9\") pod \"oauth-openshift-558db77b4-56xl2\" (UID: \"0691553d-e534-4c08-b56e-d99bd02e53fa\") " pod="openshift-authentication/oauth-openshift-558db77b4-56xl2" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.113326 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r4jgn\" (UniqueName: \"kubernetes.io/projected/eec5360d-85e3-4785-98b2-10ba224ffdfe-kube-api-access-r4jgn\") pod \"machine-approver-56656f9798-w9v24\" (UID: \"eec5360d-85e3-4785-98b2-10ba224ffdfe\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-w9v24" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.113342 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5gcfj\" (UniqueName: \"kubernetes.io/projected/f237a066-51a9-475f-80b5-1627fb073e16-kube-api-access-5gcfj\") pod \"authentication-operator-69f744f599-qc5c7\" (UID: \"f237a066-51a9-475f-80b5-1627fb073e16\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-qc5c7" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.113360 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/0691553d-e534-4c08-b56e-d99bd02e53fa-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-56xl2\" (UID: \"0691553d-e534-4c08-b56e-d99bd02e53fa\") " pod="openshift-authentication/oauth-openshift-558db77b4-56xl2" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.113378 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/c37fc2e5-d729-4258-8506-22f328f5927a-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-x7vr5\" (UID: \"c37fc2e5-d729-4258-8506-22f328f5927a\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-x7vr5" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.113394 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a8700150-e5e3-4b1b-a3c8-f7e781ce8f1f-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-hqz8m\" (UID: \"a8700150-e5e3-4b1b-a3c8-f7e781ce8f1f\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-hqz8m" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.113411 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f8bab52f-8a27-495e-80d2-9794b984939e-config\") pod \"route-controller-manager-6576b87f9c-w2jsl\" (UID: \"f8bab52f-8a27-495e-80d2-9794b984939e\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-w2jsl" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.113430 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/0691553d-e534-4c08-b56e-d99bd02e53fa-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-56xl2\" (UID: \"0691553d-e534-4c08-b56e-d99bd02e53fa\") " pod="openshift-authentication/oauth-openshift-558db77b4-56xl2" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.113446 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/2dff39f3-58aa-4d27-a85f-c1a09bb3d83e-etcd-client\") pod \"etcd-operator-b45778765-mvftm\" (UID: \"2dff39f3-58aa-4d27-a85f-c1a09bb3d83e\") " pod="openshift-etcd-operator/etcd-operator-b45778765-mvftm" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.113464 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/713c24fb-e821-419a-b996-9661a6cbf57b-serving-cert\") pod \"apiserver-7bbb656c7d-mjjtc\" (UID: \"713c24fb-e821-419a-b996-9661a6cbf57b\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-mjjtc" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.113486 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/a958b529-c3f0-4131-be7b-4d81a3c25499-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-6mnhh\" (UID: \"a958b529-c3f0-4131-be7b-4d81a3c25499\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-6mnhh" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.113569 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f8bab52f-8a27-495e-80d2-9794b984939e-serving-cert\") pod \"route-controller-manager-6576b87f9c-w2jsl\" (UID: \"f8bab52f-8a27-495e-80d2-9794b984939e\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-w2jsl" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.113589 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/2dff39f3-58aa-4d27-a85f-c1a09bb3d83e-etcd-ca\") pod \"etcd-operator-b45778765-mvftm\" (UID: \"2dff39f3-58aa-4d27-a85f-c1a09bb3d83e\") " pod="openshift-etcd-operator/etcd-operator-b45778765-mvftm" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.113606 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a8700150-e5e3-4b1b-a3c8-f7e781ce8f1f-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-hqz8m\" (UID: \"a8700150-e5e3-4b1b-a3c8-f7e781ce8f1f\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-hqz8m" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.113714 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cd6b9ce8-4fe8-405e-9399-354b7d8ee20b-config\") pod \"apiserver-76f77b778f-2ksq6\" (UID: \"cd6b9ce8-4fe8-405e-9399-354b7d8ee20b\") " pod="openshift-apiserver/apiserver-76f77b778f-2ksq6" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.115998 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/159d9178-1402-4232-a9df-ad4389bed9b0-images\") pod \"machine-api-operator-5694c8668f-w26tc\" (UID: \"159d9178-1402-4232-a9df-ad4389bed9b0\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-w26tc" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.116677 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-vfgnk"] Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.116736 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/cd6b9ce8-4fe8-405e-9399-354b7d8ee20b-audit-dir\") pod \"apiserver-76f77b778f-2ksq6\" (UID: \"cd6b9ce8-4fe8-405e-9399-354b7d8ee20b\") " pod="openshift-apiserver/apiserver-76f77b778f-2ksq6" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.121657 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/cd6b9ce8-4fe8-405e-9399-354b7d8ee20b-etcd-serving-ca\") pod \"apiserver-76f77b778f-2ksq6\" (UID: \"cd6b9ce8-4fe8-405e-9399-354b7d8ee20b\") " pod="openshift-apiserver/apiserver-76f77b778f-2ksq6" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.122233 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/cd6b9ce8-4fe8-405e-9399-354b7d8ee20b-node-pullsecrets\") pod \"apiserver-76f77b778f-2ksq6\" (UID: \"cd6b9ce8-4fe8-405e-9399-354b7d8ee20b\") " pod="openshift-apiserver/apiserver-76f77b778f-2ksq6" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.122514 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/cd6b9ce8-4fe8-405e-9399-354b7d8ee20b-image-import-ca\") pod \"apiserver-76f77b778f-2ksq6\" (UID: \"cd6b9ce8-4fe8-405e-9399-354b7d8ee20b\") " pod="openshift-apiserver/apiserver-76f77b778f-2ksq6" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.123513 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/159d9178-1402-4232-a9df-ad4389bed9b0-config\") pod \"machine-api-operator-5694c8668f-w26tc\" (UID: \"159d9178-1402-4232-a9df-ad4389bed9b0\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-w26tc" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.123847 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/cd6b9ce8-4fe8-405e-9399-354b7d8ee20b-audit\") pod \"apiserver-76f77b778f-2ksq6\" (UID: \"cd6b9ce8-4fe8-405e-9399-354b7d8ee20b\") " pod="openshift-apiserver/apiserver-76f77b778f-2ksq6" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.124090 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29335230-q6qww"] Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.124381 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-vfgnk" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.124884 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-wzwz7"] Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.125044 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29335230-q6qww" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.127447 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"machine-approver-config" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.127904 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-mn6dn"] Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.128084 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-wzwz7" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.130013 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-vwbn8"] Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.130917 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-vwbn8" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.131160 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-mn6dn" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.131225 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/cd6b9ce8-4fe8-405e-9399-354b7d8ee20b-etcd-client\") pod \"apiserver-76f77b778f-2ksq6\" (UID: \"cd6b9ce8-4fe8-405e-9399-354b7d8ee20b\") " pod="openshift-apiserver/apiserver-76f77b778f-2ksq6" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.131622 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/cd6b9ce8-4fe8-405e-9399-354b7d8ee20b-trusted-ca-bundle\") pod \"apiserver-76f77b778f-2ksq6\" (UID: \"cd6b9ce8-4fe8-405e-9399-354b7d8ee20b\") " pod="openshift-apiserver/apiserver-76f77b778f-2ksq6" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.132807 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-8qhsm"] Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.134360 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/cd6b9ce8-4fe8-405e-9399-354b7d8ee20b-serving-cert\") pod \"apiserver-76f77b778f-2ksq6\" (UID: \"cd6b9ce8-4fe8-405e-9399-354b7d8ee20b\") " pod="openshift-apiserver/apiserver-76f77b778f-2ksq6" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.136507 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-fvpc8"] Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.136723 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-8qhsm" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.142843 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-w2jsl"] Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.143469 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-mjjtc"] Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.146345 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/159d9178-1402-4232-a9df-ad4389bed9b0-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-w26tc\" (UID: \"159d9178-1402-4232-a9df-ad4389bed9b0\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-w26tc" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.150002 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-root-ca.crt" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.151107 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-2ksq6"] Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.153325 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-plk2p"] Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.161317 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-server-jmpls"] Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.162160 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/cd6b9ce8-4fe8-405e-9399-354b7d8ee20b-encryption-config\") pod \"apiserver-76f77b778f-2ksq6\" (UID: \"cd6b9ce8-4fe8-405e-9399-354b7d8ee20b\") " pod="openshift-apiserver/apiserver-76f77b778f-2ksq6" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.162737 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-prqrg"] Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.162879 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-jmpls" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.164492 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-hsg4j"] Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.166780 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-tls" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.166973 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-hkvxt"] Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.168215 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-89gcz"] Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.170209 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-x7vr5"] Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.171841 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-5r8jg"] Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.172961 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-cqz89"] Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.173831 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-6mnhh"] Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.175348 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-6h8q7"] Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.178733 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-w26tc"] Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.178980 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-mvftm"] Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.179854 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-98snb"] Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.181075 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-8lvfs"] Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.182788 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-hqz8m"] Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.184988 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-nsrr7"] Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.186724 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-n8sc6"] Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.186842 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-sa-dockercfg-nl2j4" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.188849 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-wzwz7"] Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.189532 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns/dns-default-r7x5v"] Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.190502 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-r7x5v" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.190969 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-9g6sf"] Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.193058 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-zphkz"] Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.193161 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-9g6sf" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.194202 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-cvkkx"] Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.196275 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-nlsfc"] Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.197688 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-qc5c7"] Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.199036 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-5sgz5"] Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.200245 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-vfgnk"] Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.201410 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-2cggl"] Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.202689 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-56xl2"] Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.204055 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-r7x5v"] Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.205173 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-9kfj6"] Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.206541 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-mn6dn"] Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.206825 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"openshift-service-ca.crt" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.208249 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-vwbn8"] Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.209266 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-mgdrk"] Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.212105 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-8qhsm"] Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.213264 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29335230-q6qww"] Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.214207 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a63337d7-2cc9-49b7-af06-550a24f39ff0-trusted-ca\") pod \"console-operator-58897d9998-6h8q7\" (UID: \"a63337d7-2cc9-49b7-af06-550a24f39ff0\") " pod="openshift-console-operator/console-operator-58897d9998-6h8q7" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.214235 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/dd0fbd79-76a9-4a87-a67b-20e782993376-config\") pod \"controller-manager-879f6c89f-fvpc8\" (UID: \"dd0fbd79-76a9-4a87-a67b-20e782993376\") " pod="openshift-controller-manager/controller-manager-879f6c89f-fvpc8" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.214253 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2dff39f3-58aa-4d27-a85f-c1a09bb3d83e-config\") pod \"etcd-operator-b45778765-mvftm\" (UID: \"2dff39f3-58aa-4d27-a85f-c1a09bb3d83e\") " pod="openshift-etcd-operator/etcd-operator-b45778765-mvftm" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.214295 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/eec5360d-85e3-4785-98b2-10ba224ffdfe-config\") pod \"machine-approver-56656f9798-w9v24\" (UID: \"eec5360d-85e3-4785-98b2-10ba224ffdfe\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-w9v24" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.214325 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c37fc2e5-d729-4258-8506-22f328f5927a-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-x7vr5\" (UID: \"c37fc2e5-d729-4258-8506-22f328f5927a\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-x7vr5" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.214342 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5mmzh\" (UniqueName: \"kubernetes.io/projected/eb807790-1011-4dfc-842e-fd4106f7a6c3-kube-api-access-5mmzh\") pod \"ingress-operator-5b745b69d9-cqz89\" (UID: \"eb807790-1011-4dfc-842e-fd4106f7a6c3\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-cqz89" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.214360 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p54j8\" (UniqueName: \"kubernetes.io/projected/6a574afe-31ee-4706-90c0-a9c477f5bce7-kube-api-access-p54j8\") pod \"router-default-5444994796-qpw4g\" (UID: \"6a574afe-31ee-4706-90c0-a9c477f5bce7\") " pod="openshift-ingress/router-default-5444994796-qpw4g" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.214389 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/0691553d-e534-4c08-b56e-d99bd02e53fa-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-56xl2\" (UID: \"0691553d-e534-4c08-b56e-d99bd02e53fa\") " pod="openshift-authentication/oauth-openshift-558db77b4-56xl2" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.214414 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/0691553d-e534-4c08-b56e-d99bd02e53fa-audit-policies\") pod \"oauth-openshift-558db77b4-56xl2\" (UID: \"0691553d-e534-4c08-b56e-d99bd02e53fa\") " pod="openshift-authentication/oauth-openshift-558db77b4-56xl2" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.214433 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/0691553d-e534-4c08-b56e-d99bd02e53fa-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-56xl2\" (UID: \"0691553d-e534-4c08-b56e-d99bd02e53fa\") " pod="openshift-authentication/oauth-openshift-558db77b4-56xl2" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.214449 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/eb807790-1011-4dfc-842e-fd4106f7a6c3-trusted-ca\") pod \"ingress-operator-5b745b69d9-cqz89\" (UID: \"eb807790-1011-4dfc-842e-fd4106f7a6c3\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-cqz89" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.214462 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/eec5360d-85e3-4785-98b2-10ba224ffdfe-auth-proxy-config\") pod \"machine-approver-56656f9798-w9v24\" (UID: \"eec5360d-85e3-4785-98b2-10ba224ffdfe\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-w9v24" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.214477 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/0691553d-e534-4c08-b56e-d99bd02e53fa-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-56xl2\" (UID: \"0691553d-e534-4c08-b56e-d99bd02e53fa\") " pod="openshift-authentication/oauth-openshift-558db77b4-56xl2" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.214494 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6f91a26e-3654-415e-9ebc-91034a85cbc1-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-nsrr7\" (UID: \"6f91a26e-3654-415e-9ebc-91034a85cbc1\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-nsrr7" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.214510 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/f8bab52f-8a27-495e-80d2-9794b984939e-client-ca\") pod \"route-controller-manager-6576b87f9c-w2jsl\" (UID: \"f8bab52f-8a27-495e-80d2-9794b984939e\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-w2jsl" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.214524 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/6a574afe-31ee-4706-90c0-a9c477f5bce7-stats-auth\") pod \"router-default-5444994796-qpw4g\" (UID: \"6a574afe-31ee-4706-90c0-a9c477f5bce7\") " pod="openshift-ingress/router-default-5444994796-qpw4g" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.214539 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3f2b6b90-47a2-4e3c-8394-13a961d92c03-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-hsg4j\" (UID: \"3f2b6b90-47a2-4e3c-8394-13a961d92c03\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-hsg4j" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.214553 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hxgnl\" (UniqueName: \"kubernetes.io/projected/6f91a26e-3654-415e-9ebc-91034a85cbc1-kube-api-access-hxgnl\") pod \"openshift-controller-manager-operator-756b6f6bc6-nsrr7\" (UID: \"6f91a26e-3654-415e-9ebc-91034a85cbc1\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-nsrr7" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.214568 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/6a574afe-31ee-4706-90c0-a9c477f5bce7-metrics-certs\") pod \"router-default-5444994796-qpw4g\" (UID: \"6a574afe-31ee-4706-90c0-a9c477f5bce7\") " pod="openshift-ingress/router-default-5444994796-qpw4g" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.214581 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/dd0fbd79-76a9-4a87-a67b-20e782993376-serving-cert\") pod \"controller-manager-879f6c89f-fvpc8\" (UID: \"dd0fbd79-76a9-4a87-a67b-20e782993376\") " pod="openshift-controller-manager/controller-manager-879f6c89f-fvpc8" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.214596 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/dd0fbd79-76a9-4a87-a67b-20e782993376-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-fvpc8\" (UID: \"dd0fbd79-76a9-4a87-a67b-20e782993376\") " pod="openshift-controller-manager/controller-manager-879f6c89f-fvpc8" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.214633 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-9g6sf"] Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.215010 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2dff39f3-58aa-4d27-a85f-c1a09bb3d83e-config\") pod \"etcd-operator-b45778765-mvftm\" (UID: \"2dff39f3-58aa-4d27-a85f-c1a09bb3d83e\") " pod="openshift-etcd-operator/etcd-operator-b45778765-mvftm" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.215493 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-canary/ingress-canary-bbvkz"] Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.215646 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a63337d7-2cc9-49b7-af06-550a24f39ff0-trusted-ca\") pod \"console-operator-58897d9998-6h8q7\" (UID: \"a63337d7-2cc9-49b7-af06-550a24f39ff0\") " pod="openshift-console-operator/console-operator-58897d9998-6h8q7" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.215684 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/dd0fbd79-76a9-4a87-a67b-20e782993376-config\") pod \"controller-manager-879f6c89f-fvpc8\" (UID: \"dd0fbd79-76a9-4a87-a67b-20e782993376\") " pod="openshift-controller-manager/controller-manager-879f6c89f-fvpc8" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.216005 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/eec5360d-85e3-4785-98b2-10ba224ffdfe-auth-proxy-config\") pod \"machine-approver-56656f9798-w9v24\" (UID: \"eec5360d-85e3-4785-98b2-10ba224ffdfe\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-w9v24" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.216075 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-bbvkz" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.216135 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/eec5360d-85e3-4785-98b2-10ba224ffdfe-config\") pod \"machine-approver-56656f9798-w9v24\" (UID: \"eec5360d-85e3-4785-98b2-10ba224ffdfe\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-w9v24" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.216219 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/0691553d-e534-4c08-b56e-d99bd02e53fa-audit-policies\") pod \"oauth-openshift-558db77b4-56xl2\" (UID: \"0691553d-e534-4c08-b56e-d99bd02e53fa\") " pod="openshift-authentication/oauth-openshift-558db77b4-56xl2" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.214612 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n879q\" (UniqueName: \"kubernetes.io/projected/98460888-57af-4ef6-a390-c0a592164ddb-kube-api-access-n879q\") pod \"dns-operator-744455d44c-nlsfc\" (UID: \"98460888-57af-4ef6-a390-c0a592164ddb\") " pod="openshift-dns-operator/dns-operator-744455d44c-nlsfc" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.217627 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6f91a26e-3654-415e-9ebc-91034a85cbc1-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-nsrr7\" (UID: \"6f91a26e-3654-415e-9ebc-91034a85cbc1\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-nsrr7" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.217650 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/6a574afe-31ee-4706-90c0-a9c477f5bce7-default-certificate\") pod \"router-default-5444994796-qpw4g\" (UID: \"6a574afe-31ee-4706-90c0-a9c477f5bce7\") " pod="openshift-ingress/router-default-5444994796-qpw4g" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.217564 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/f8bab52f-8a27-495e-80d2-9794b984939e-client-ca\") pod \"route-controller-manager-6576b87f9c-w2jsl\" (UID: \"f8bab52f-8a27-495e-80d2-9794b984939e\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-w2jsl" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.217937 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/089d7979-5043-482c-a464-b62a385a3f16-auth-proxy-config\") pod \"machine-config-operator-74547568cd-prqrg\" (UID: \"089d7979-5043-482c-a464-b62a385a3f16\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-prqrg" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.218160 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jpxlh\" (UniqueName: \"kubernetes.io/projected/089d7979-5043-482c-a464-b62a385a3f16-kube-api-access-jpxlh\") pod \"machine-config-operator-74547568cd-prqrg\" (UID: \"089d7979-5043-482c-a464-b62a385a3f16\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-prqrg" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.218254 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/0691553d-e534-4c08-b56e-d99bd02e53fa-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-56xl2\" (UID: \"0691553d-e534-4c08-b56e-d99bd02e53fa\") " pod="openshift-authentication/oauth-openshift-558db77b4-56xl2" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.218359 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/0691553d-e534-4c08-b56e-d99bd02e53fa-audit-dir\") pod \"oauth-openshift-558db77b4-56xl2\" (UID: \"0691553d-e534-4c08-b56e-d99bd02e53fa\") " pod="openshift-authentication/oauth-openshift-558db77b4-56xl2" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.218491 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/0691553d-e534-4c08-b56e-d99bd02e53fa-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-56xl2\" (UID: \"0691553d-e534-4c08-b56e-d99bd02e53fa\") " pod="openshift-authentication/oauth-openshift-558db77b4-56xl2" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.218541 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/0691553d-e534-4c08-b56e-d99bd02e53fa-audit-dir\") pod \"oauth-openshift-558db77b4-56xl2\" (UID: \"0691553d-e534-4c08-b56e-d99bd02e53fa\") " pod="openshift-authentication/oauth-openshift-558db77b4-56xl2" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.218502 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/0691553d-e534-4c08-b56e-d99bd02e53fa-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-56xl2\" (UID: \"0691553d-e534-4c08-b56e-d99bd02e53fa\") " pod="openshift-authentication/oauth-openshift-558db77b4-56xl2" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.218743 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/089d7979-5043-482c-a464-b62a385a3f16-auth-proxy-config\") pod \"machine-config-operator-74547568cd-prqrg\" (UID: \"089d7979-5043-482c-a464-b62a385a3f16\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-prqrg" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.218857 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3f2b6b90-47a2-4e3c-8394-13a961d92c03-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-hsg4j\" (UID: \"3f2b6b90-47a2-4e3c-8394-13a961d92c03\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-hsg4j" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.218901 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/0691553d-e534-4c08-b56e-d99bd02e53fa-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-56xl2\" (UID: \"0691553d-e534-4c08-b56e-d99bd02e53fa\") " pod="openshift-authentication/oauth-openshift-558db77b4-56xl2" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.218743 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f237a066-51a9-475f-80b5-1627fb073e16-serving-cert\") pod \"authentication-operator-69f744f599-qc5c7\" (UID: \"f237a066-51a9-475f-80b5-1627fb073e16\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-qc5c7" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.219065 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/713c24fb-e821-419a-b996-9661a6cbf57b-audit-policies\") pod \"apiserver-7bbb656c7d-mjjtc\" (UID: \"713c24fb-e821-419a-b996-9661a6cbf57b\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-mjjtc" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.219085 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/713c24fb-e821-419a-b996-9661a6cbf57b-etcd-client\") pod \"apiserver-7bbb656c7d-mjjtc\" (UID: \"713c24fb-e821-419a-b996-9661a6cbf57b\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-mjjtc" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.219100 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2dff39f3-58aa-4d27-a85f-c1a09bb3d83e-serving-cert\") pod \"etcd-operator-b45778765-mvftm\" (UID: \"2dff39f3-58aa-4d27-a85f-c1a09bb3d83e\") " pod="openshift-etcd-operator/etcd-operator-b45778765-mvftm" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.219118 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vw6fc\" (UniqueName: \"kubernetes.io/projected/3f2b6b90-47a2-4e3c-8394-13a961d92c03-kube-api-access-vw6fc\") pod \"openshift-apiserver-operator-796bbdcf4f-hsg4j\" (UID: \"3f2b6b90-47a2-4e3c-8394-13a961d92c03\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-hsg4j" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.219125 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-bbvkz"] Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.219137 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-87lxb\" (UniqueName: \"kubernetes.io/projected/a63337d7-2cc9-49b7-af06-550a24f39ff0-kube-api-access-87lxb\") pod \"console-operator-58897d9998-6h8q7\" (UID: \"a63337d7-2cc9-49b7-af06-550a24f39ff0\") " pod="openshift-console-operator/console-operator-58897d9998-6h8q7" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.219197 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/2dff39f3-58aa-4d27-a85f-c1a09bb3d83e-etcd-service-ca\") pod \"etcd-operator-b45778765-mvftm\" (UID: \"2dff39f3-58aa-4d27-a85f-c1a09bb3d83e\") " pod="openshift-etcd-operator/etcd-operator-b45778765-mvftm" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.219226 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/713c24fb-e821-419a-b996-9661a6cbf57b-encryption-config\") pod \"apiserver-7bbb656c7d-mjjtc\" (UID: \"713c24fb-e821-419a-b996-9661a6cbf57b\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-mjjtc" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.219249 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/98460888-57af-4ef6-a390-c0a592164ddb-metrics-tls\") pod \"dns-operator-744455d44c-nlsfc\" (UID: \"98460888-57af-4ef6-a390-c0a592164ddb\") " pod="openshift-dns-operator/dns-operator-744455d44c-nlsfc" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.219276 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f237a066-51a9-475f-80b5-1627fb073e16-config\") pod \"authentication-operator-69f744f599-qc5c7\" (UID: \"f237a066-51a9-475f-80b5-1627fb073e16\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-qc5c7" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.219300 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a63337d7-2cc9-49b7-af06-550a24f39ff0-config\") pod \"console-operator-58897d9998-6h8q7\" (UID: \"a63337d7-2cc9-49b7-af06-550a24f39ff0\") " pod="openshift-console-operator/console-operator-58897d9998-6h8q7" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.219322 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/0691553d-e534-4c08-b56e-d99bd02e53fa-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-56xl2\" (UID: \"0691553d-e534-4c08-b56e-d99bd02e53fa\") " pod="openshift-authentication/oauth-openshift-558db77b4-56xl2" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.219361 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6a574afe-31ee-4706-90c0-a9c477f5bce7-service-ca-bundle\") pod \"router-default-5444994796-qpw4g\" (UID: \"6a574afe-31ee-4706-90c0-a9c477f5bce7\") " pod="openshift-ingress/router-default-5444994796-qpw4g" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.219385 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2bsdq\" (UniqueName: \"kubernetes.io/projected/dd0fbd79-76a9-4a87-a67b-20e782993376-kube-api-access-2bsdq\") pod \"controller-manager-879f6c89f-fvpc8\" (UID: \"dd0fbd79-76a9-4a87-a67b-20e782993376\") " pod="openshift-controller-manager/controller-manager-879f6c89f-fvpc8" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.219408 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cvt5n\" (UniqueName: \"kubernetes.io/projected/2dff39f3-58aa-4d27-a85f-c1a09bb3d83e-kube-api-access-cvt5n\") pod \"etcd-operator-b45778765-mvftm\" (UID: \"2dff39f3-58aa-4d27-a85f-c1a09bb3d83e\") " pod="openshift-etcd-operator/etcd-operator-b45778765-mvftm" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.219431 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c37fc2e5-d729-4258-8506-22f328f5927a-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-x7vr5\" (UID: \"c37fc2e5-d729-4258-8506-22f328f5927a\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-x7vr5" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.219456 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/f237a066-51a9-475f-80b5-1627fb073e16-service-ca-bundle\") pod \"authentication-operator-69f744f599-qc5c7\" (UID: \"f237a066-51a9-475f-80b5-1627fb073e16\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-qc5c7" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.219483 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/089d7979-5043-482c-a464-b62a385a3f16-proxy-tls\") pod \"machine-config-operator-74547568cd-prqrg\" (UID: \"089d7979-5043-482c-a464-b62a385a3f16\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-prqrg" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.219507 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/0691553d-e534-4c08-b56e-d99bd02e53fa-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-56xl2\" (UID: \"0691553d-e534-4c08-b56e-d99bd02e53fa\") " pod="openshift-authentication/oauth-openshift-558db77b4-56xl2" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.219528 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b89bb\" (UniqueName: \"kubernetes.io/projected/a8700150-e5e3-4b1b-a3c8-f7e781ce8f1f-kube-api-access-b89bb\") pod \"cluster-image-registry-operator-dc59b4c8b-hqz8m\" (UID: \"a8700150-e5e3-4b1b-a3c8-f7e781ce8f1f\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-hqz8m" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.219561 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a63337d7-2cc9-49b7-af06-550a24f39ff0-serving-cert\") pod \"console-operator-58897d9998-6h8q7\" (UID: \"a63337d7-2cc9-49b7-af06-550a24f39ff0\") " pod="openshift-console-operator/console-operator-58897d9998-6h8q7" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.219585 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7vssf\" (UniqueName: \"kubernetes.io/projected/a958b529-c3f0-4131-be7b-4d81a3c25499-kube-api-access-7vssf\") pod \"control-plane-machine-set-operator-78cbb6b69f-6mnhh\" (UID: \"a958b529-c3f0-4131-be7b-4d81a3c25499\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-6mnhh" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.219606 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tsxnv\" (UniqueName: \"kubernetes.io/projected/e56235b4-8348-4fae-af0a-639fcacfc997-kube-api-access-tsxnv\") pod \"downloads-7954f5f757-plk2p\" (UID: \"e56235b4-8348-4fae-af0a-639fcacfc997\") " pod="openshift-console/downloads-7954f5f757-plk2p" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.219631 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/0691553d-e534-4c08-b56e-d99bd02e53fa-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-56xl2\" (UID: \"0691553d-e534-4c08-b56e-d99bd02e53fa\") " pod="openshift-authentication/oauth-openshift-558db77b4-56xl2" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.219652 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/a8700150-e5e3-4b1b-a3c8-f7e781ce8f1f-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-hqz8m\" (UID: \"a8700150-e5e3-4b1b-a3c8-f7e781ce8f1f\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-hqz8m" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.219664 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/713c24fb-e821-419a-b996-9661a6cbf57b-audit-policies\") pod \"apiserver-7bbb656c7d-mjjtc\" (UID: \"713c24fb-e821-419a-b996-9661a6cbf57b\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-mjjtc" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.219677 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/713c24fb-e821-419a-b996-9661a6cbf57b-audit-dir\") pod \"apiserver-7bbb656c7d-mjjtc\" (UID: \"713c24fb-e821-419a-b996-9661a6cbf57b\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-mjjtc" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.219693 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/0691553d-e534-4c08-b56e-d99bd02e53fa-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-56xl2\" (UID: \"0691553d-e534-4c08-b56e-d99bd02e53fa\") " pod="openshift-authentication/oauth-openshift-558db77b4-56xl2" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.219699 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/eb807790-1011-4dfc-842e-fd4106f7a6c3-bound-sa-token\") pod \"ingress-operator-5b745b69d9-cqz89\" (UID: \"eb807790-1011-4dfc-842e-fd4106f7a6c3\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-cqz89" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.219830 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/089d7979-5043-482c-a464-b62a385a3f16-images\") pod \"machine-config-operator-74547568cd-prqrg\" (UID: \"089d7979-5043-482c-a464-b62a385a3f16\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-prqrg" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.219856 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/dd0fbd79-76a9-4a87-a67b-20e782993376-client-ca\") pod \"controller-manager-879f6c89f-fvpc8\" (UID: \"dd0fbd79-76a9-4a87-a67b-20e782993376\") " pod="openshift-controller-manager/controller-manager-879f6c89f-fvpc8" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.219876 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/f237a066-51a9-475f-80b5-1627fb073e16-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-qc5c7\" (UID: \"f237a066-51a9-475f-80b5-1627fb073e16\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-qc5c7" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.219901 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6f91a26e-3654-415e-9ebc-91034a85cbc1-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-nsrr7\" (UID: \"6f91a26e-3654-415e-9ebc-91034a85cbc1\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-nsrr7" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.219938 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pvq5g\" (UniqueName: \"kubernetes.io/projected/713c24fb-e821-419a-b996-9661a6cbf57b-kube-api-access-pvq5g\") pod \"apiserver-7bbb656c7d-mjjtc\" (UID: \"713c24fb-e821-419a-b996-9661a6cbf57b\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-mjjtc" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.219960 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/18a6e757-37be-47ce-98ea-bda0221cab2c-serving-cert\") pod \"openshift-config-operator-7777fb866f-5sgz5\" (UID: \"18a6e757-37be-47ce-98ea-bda0221cab2c\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-5sgz5" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.219981 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/0691553d-e534-4c08-b56e-d99bd02e53fa-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-56xl2\" (UID: \"0691553d-e534-4c08-b56e-d99bd02e53fa\") " pod="openshift-authentication/oauth-openshift-558db77b4-56xl2" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.220005 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/eec5360d-85e3-4785-98b2-10ba224ffdfe-machine-approver-tls\") pod \"machine-approver-56656f9798-w9v24\" (UID: \"eec5360d-85e3-4785-98b2-10ba224ffdfe\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-w9v24" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.220032 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-55fpk\" (UniqueName: \"kubernetes.io/projected/f8bab52f-8a27-495e-80d2-9794b984939e-kube-api-access-55fpk\") pod \"route-controller-manager-6576b87f9c-w2jsl\" (UID: \"f8bab52f-8a27-495e-80d2-9794b984939e\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-w2jsl" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.220038 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/dd0fbd79-76a9-4a87-a67b-20e782993376-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-fvpc8\" (UID: \"dd0fbd79-76a9-4a87-a67b-20e782993376\") " pod="openshift-controller-manager/controller-manager-879f6c89f-fvpc8" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.220048 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3f2b6b90-47a2-4e3c-8394-13a961d92c03-config\") pod \"openshift-apiserver-operator-796bbdcf4f-hsg4j\" (UID: \"3f2b6b90-47a2-4e3c-8394-13a961d92c03\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-hsg4j" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.220066 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/eb807790-1011-4dfc-842e-fd4106f7a6c3-metrics-tls\") pod \"ingress-operator-5b745b69d9-cqz89\" (UID: \"eb807790-1011-4dfc-842e-fd4106f7a6c3\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-cqz89" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.220090 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/713c24fb-e821-419a-b996-9661a6cbf57b-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-mjjtc\" (UID: \"713c24fb-e821-419a-b996-9661a6cbf57b\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-mjjtc" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.220117 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/713c24fb-e821-419a-b996-9661a6cbf57b-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-mjjtc\" (UID: \"713c24fb-e821-419a-b996-9661a6cbf57b\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-mjjtc" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.220137 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/18a6e757-37be-47ce-98ea-bda0221cab2c-available-featuregates\") pod \"openshift-config-operator-7777fb866f-5sgz5\" (UID: \"18a6e757-37be-47ce-98ea-bda0221cab2c\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-5sgz5" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.220160 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h27xd\" (UniqueName: \"kubernetes.io/projected/18a6e757-37be-47ce-98ea-bda0221cab2c-kube-api-access-h27xd\") pod \"openshift-config-operator-7777fb866f-5sgz5\" (UID: \"18a6e757-37be-47ce-98ea-bda0221cab2c\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-5sgz5" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.220191 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gmcp9\" (UniqueName: \"kubernetes.io/projected/0691553d-e534-4c08-b56e-d99bd02e53fa-kube-api-access-gmcp9\") pod \"oauth-openshift-558db77b4-56xl2\" (UID: \"0691553d-e534-4c08-b56e-d99bd02e53fa\") " pod="openshift-authentication/oauth-openshift-558db77b4-56xl2" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.220214 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r4jgn\" (UniqueName: \"kubernetes.io/projected/eec5360d-85e3-4785-98b2-10ba224ffdfe-kube-api-access-r4jgn\") pod \"machine-approver-56656f9798-w9v24\" (UID: \"eec5360d-85e3-4785-98b2-10ba224ffdfe\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-w9v24" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.220243 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5gcfj\" (UniqueName: \"kubernetes.io/projected/f237a066-51a9-475f-80b5-1627fb073e16-kube-api-access-5gcfj\") pod \"authentication-operator-69f744f599-qc5c7\" (UID: \"f237a066-51a9-475f-80b5-1627fb073e16\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-qc5c7" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.220268 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/0691553d-e534-4c08-b56e-d99bd02e53fa-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-56xl2\" (UID: \"0691553d-e534-4c08-b56e-d99bd02e53fa\") " pod="openshift-authentication/oauth-openshift-558db77b4-56xl2" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.220293 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/c37fc2e5-d729-4258-8506-22f328f5927a-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-x7vr5\" (UID: \"c37fc2e5-d729-4258-8506-22f328f5927a\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-x7vr5" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.220315 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a8700150-e5e3-4b1b-a3c8-f7e781ce8f1f-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-hqz8m\" (UID: \"a8700150-e5e3-4b1b-a3c8-f7e781ce8f1f\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-hqz8m" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.220343 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f8bab52f-8a27-495e-80d2-9794b984939e-config\") pod \"route-controller-manager-6576b87f9c-w2jsl\" (UID: \"f8bab52f-8a27-495e-80d2-9794b984939e\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-w2jsl" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.220368 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/0691553d-e534-4c08-b56e-d99bd02e53fa-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-56xl2\" (UID: \"0691553d-e534-4c08-b56e-d99bd02e53fa\") " pod="openshift-authentication/oauth-openshift-558db77b4-56xl2" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.220391 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/2dff39f3-58aa-4d27-a85f-c1a09bb3d83e-etcd-client\") pod \"etcd-operator-b45778765-mvftm\" (UID: \"2dff39f3-58aa-4d27-a85f-c1a09bb3d83e\") " pod="openshift-etcd-operator/etcd-operator-b45778765-mvftm" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.220417 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/713c24fb-e821-419a-b996-9661a6cbf57b-serving-cert\") pod \"apiserver-7bbb656c7d-mjjtc\" (UID: \"713c24fb-e821-419a-b996-9661a6cbf57b\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-mjjtc" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.220438 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/a958b529-c3f0-4131-be7b-4d81a3c25499-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-6mnhh\" (UID: \"a958b529-c3f0-4131-be7b-4d81a3c25499\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-6mnhh" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.220461 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f8bab52f-8a27-495e-80d2-9794b984939e-serving-cert\") pod \"route-controller-manager-6576b87f9c-w2jsl\" (UID: \"f8bab52f-8a27-495e-80d2-9794b984939e\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-w2jsl" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.220485 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/2dff39f3-58aa-4d27-a85f-c1a09bb3d83e-etcd-ca\") pod \"etcd-operator-b45778765-mvftm\" (UID: \"2dff39f3-58aa-4d27-a85f-c1a09bb3d83e\") " pod="openshift-etcd-operator/etcd-operator-b45778765-mvftm" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.220505 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a8700150-e5e3-4b1b-a3c8-f7e781ce8f1f-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-hqz8m\" (UID: \"a8700150-e5e3-4b1b-a3c8-f7e781ce8f1f\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-hqz8m" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.220983 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/0691553d-e534-4c08-b56e-d99bd02e53fa-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-56xl2\" (UID: \"0691553d-e534-4c08-b56e-d99bd02e53fa\") " pod="openshift-authentication/oauth-openshift-558db77b4-56xl2" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.220992 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/0691553d-e534-4c08-b56e-d99bd02e53fa-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-56xl2\" (UID: \"0691553d-e534-4c08-b56e-d99bd02e53fa\") " pod="openshift-authentication/oauth-openshift-558db77b4-56xl2" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.220986 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/2dff39f3-58aa-4d27-a85f-c1a09bb3d83e-etcd-service-ca\") pod \"etcd-operator-b45778765-mvftm\" (UID: \"2dff39f3-58aa-4d27-a85f-c1a09bb3d83e\") " pod="openshift-etcd-operator/etcd-operator-b45778765-mvftm" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.221023 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/713c24fb-e821-419a-b996-9661a6cbf57b-audit-dir\") pod \"apiserver-7bbb656c7d-mjjtc\" (UID: \"713c24fb-e821-419a-b996-9661a6cbf57b\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-mjjtc" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.221877 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a63337d7-2cc9-49b7-af06-550a24f39ff0-config\") pod \"console-operator-58897d9998-6h8q7\" (UID: \"a63337d7-2cc9-49b7-af06-550a24f39ff0\") " pod="openshift-console-operator/console-operator-58897d9998-6h8q7" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.222261 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/713c24fb-e821-419a-b996-9661a6cbf57b-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-mjjtc\" (UID: \"713c24fb-e821-419a-b996-9661a6cbf57b\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-mjjtc" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.222259 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/0691553d-e534-4c08-b56e-d99bd02e53fa-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-56xl2\" (UID: \"0691553d-e534-4c08-b56e-d99bd02e53fa\") " pod="openshift-authentication/oauth-openshift-558db77b4-56xl2" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.224027 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/713c24fb-e821-419a-b996-9661a6cbf57b-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-mjjtc\" (UID: \"713c24fb-e821-419a-b996-9661a6cbf57b\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-mjjtc" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.224881 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/0691553d-e534-4c08-b56e-d99bd02e53fa-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-56xl2\" (UID: \"0691553d-e534-4c08-b56e-d99bd02e53fa\") " pod="openshift-authentication/oauth-openshift-558db77b4-56xl2" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.225011 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/713c24fb-e821-419a-b996-9661a6cbf57b-encryption-config\") pod \"apiserver-7bbb656c7d-mjjtc\" (UID: \"713c24fb-e821-419a-b996-9661a6cbf57b\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-mjjtc" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.225118 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/dd0fbd79-76a9-4a87-a67b-20e782993376-client-ca\") pod \"controller-manager-879f6c89f-fvpc8\" (UID: \"dd0fbd79-76a9-4a87-a67b-20e782993376\") " pod="openshift-controller-manager/controller-manager-879f6c89f-fvpc8" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.225124 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/18a6e757-37be-47ce-98ea-bda0221cab2c-available-featuregates\") pod \"openshift-config-operator-7777fb866f-5sgz5\" (UID: \"18a6e757-37be-47ce-98ea-bda0221cab2c\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-5sgz5" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.225376 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3f2b6b90-47a2-4e3c-8394-13a961d92c03-config\") pod \"openshift-apiserver-operator-796bbdcf4f-hsg4j\" (UID: \"3f2b6b90-47a2-4e3c-8394-13a961d92c03\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-hsg4j" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.225794 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/dd0fbd79-76a9-4a87-a67b-20e782993376-serving-cert\") pod \"controller-manager-879f6c89f-fvpc8\" (UID: \"dd0fbd79-76a9-4a87-a67b-20e782993376\") " pod="openshift-controller-manager/controller-manager-879f6c89f-fvpc8" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.225968 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a63337d7-2cc9-49b7-af06-550a24f39ff0-serving-cert\") pod \"console-operator-58897d9998-6h8q7\" (UID: \"a63337d7-2cc9-49b7-af06-550a24f39ff0\") " pod="openshift-console-operator/console-operator-58897d9998-6h8q7" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.226057 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f8bab52f-8a27-495e-80d2-9794b984939e-config\") pod \"route-controller-manager-6576b87f9c-w2jsl\" (UID: \"f8bab52f-8a27-495e-80d2-9794b984939e\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-w2jsl" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.226066 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/0691553d-e534-4c08-b56e-d99bd02e53fa-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-56xl2\" (UID: \"0691553d-e534-4c08-b56e-d99bd02e53fa\") " pod="openshift-authentication/oauth-openshift-558db77b4-56xl2" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.226148 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/713c24fb-e821-419a-b996-9661a6cbf57b-serving-cert\") pod \"apiserver-7bbb656c7d-mjjtc\" (UID: \"713c24fb-e821-419a-b996-9661a6cbf57b\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-mjjtc" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.226579 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/713c24fb-e821-419a-b996-9661a6cbf57b-etcd-client\") pod \"apiserver-7bbb656c7d-mjjtc\" (UID: \"713c24fb-e821-419a-b996-9661a6cbf57b\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-mjjtc" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.226673 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/a8700150-e5e3-4b1b-a3c8-f7e781ce8f1f-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-hqz8m\" (UID: \"a8700150-e5e3-4b1b-a3c8-f7e781ce8f1f\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-hqz8m" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.227078 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/18a6e757-37be-47ce-98ea-bda0221cab2c-serving-cert\") pod \"openshift-config-operator-7777fb866f-5sgz5\" (UID: \"18a6e757-37be-47ce-98ea-bda0221cab2c\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-5sgz5" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.227120 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6f91a26e-3654-415e-9ebc-91034a85cbc1-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-nsrr7\" (UID: \"6f91a26e-3654-415e-9ebc-91034a85cbc1\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-nsrr7" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.227591 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/0691553d-e534-4c08-b56e-d99bd02e53fa-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-56xl2\" (UID: \"0691553d-e534-4c08-b56e-d99bd02e53fa\") " pod="openshift-authentication/oauth-openshift-558db77b4-56xl2" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.228490 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/2dff39f3-58aa-4d27-a85f-c1a09bb3d83e-etcd-ca\") pod \"etcd-operator-b45778765-mvftm\" (UID: \"2dff39f3-58aa-4d27-a85f-c1a09bb3d83e\") " pod="openshift-etcd-operator/etcd-operator-b45778765-mvftm" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.228850 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"openshift-service-ca.crt" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.229470 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2dff39f3-58aa-4d27-a85f-c1a09bb3d83e-serving-cert\") pod \"etcd-operator-b45778765-mvftm\" (UID: \"2dff39f3-58aa-4d27-a85f-c1a09bb3d83e\") " pod="openshift-etcd-operator/etcd-operator-b45778765-mvftm" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.229742 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a8700150-e5e3-4b1b-a3c8-f7e781ce8f1f-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-hqz8m\" (UID: \"a8700150-e5e3-4b1b-a3c8-f7e781ce8f1f\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-hqz8m" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.230201 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/0691553d-e534-4c08-b56e-d99bd02e53fa-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-56xl2\" (UID: \"0691553d-e534-4c08-b56e-d99bd02e53fa\") " pod="openshift-authentication/oauth-openshift-558db77b4-56xl2" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.230228 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/98460888-57af-4ef6-a390-c0a592164ddb-metrics-tls\") pod \"dns-operator-744455d44c-nlsfc\" (UID: \"98460888-57af-4ef6-a390-c0a592164ddb\") " pod="openshift-dns-operator/dns-operator-744455d44c-nlsfc" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.230601 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f8bab52f-8a27-495e-80d2-9794b984939e-serving-cert\") pod \"route-controller-manager-6576b87f9c-w2jsl\" (UID: \"f8bab52f-8a27-495e-80d2-9794b984939e\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-w2jsl" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.233453 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/2dff39f3-58aa-4d27-a85f-c1a09bb3d83e-etcd-client\") pod \"etcd-operator-b45778765-mvftm\" (UID: \"2dff39f3-58aa-4d27-a85f-c1a09bb3d83e\") " pod="openshift-etcd-operator/etcd-operator-b45778765-mvftm" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.236934 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/0691553d-e534-4c08-b56e-d99bd02e53fa-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-56xl2\" (UID: \"0691553d-e534-4c08-b56e-d99bd02e53fa\") " pod="openshift-authentication/oauth-openshift-558db77b4-56xl2" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.238436 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/eec5360d-85e3-4785-98b2-10ba224ffdfe-machine-approver-tls\") pod \"machine-approver-56656f9798-w9v24\" (UID: \"eec5360d-85e3-4785-98b2-10ba224ffdfe\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-w9v24" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.248304 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"ingress-operator-dockercfg-7lnqk" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.271702 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"trusted-ca" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.277826 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/eb807790-1011-4dfc-842e-fd4106f7a6c3-trusted-ca\") pod \"ingress-operator-5b745b69d9-cqz89\" (UID: \"eb807790-1011-4dfc-842e-fd4106f7a6c3\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-cqz89" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.286867 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"kube-root-ca.crt" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.306850 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"metrics-tls" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.316535 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/eb807790-1011-4dfc-842e-fd4106f7a6c3-metrics-tls\") pod \"ingress-operator-5b745b69d9-cqz89\" (UID: \"eb807790-1011-4dfc-842e-fd4106f7a6c3\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-cqz89" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.327109 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"openshift-service-ca.crt" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.346746 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"authentication-operator-dockercfg-mz9bj" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.367044 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"serving-cert" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.374268 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f237a066-51a9-475f-80b5-1627fb073e16-serving-cert\") pod \"authentication-operator-69f744f599-qc5c7\" (UID: \"f237a066-51a9-475f-80b5-1627fb073e16\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-qc5c7" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.386931 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"authentication-operator-config" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.392680 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f237a066-51a9-475f-80b5-1627fb073e16-config\") pod \"authentication-operator-69f744f599-qc5c7\" (UID: \"f237a066-51a9-475f-80b5-1627fb073e16\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-qc5c7" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.401537 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k6hch" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.401547 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.414087 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"trusted-ca-bundle" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.422647 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/f237a066-51a9-475f-80b5-1627fb073e16-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-qc5c7\" (UID: \"f237a066-51a9-475f-80b5-1627fb073e16\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-qc5c7" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.427530 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"service-ca-bundle" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.431285 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/f237a066-51a9-475f-80b5-1627fb073e16-service-ca-bundle\") pod \"authentication-operator-69f744f599-qc5c7\" (UID: \"f237a066-51a9-475f-80b5-1627fb073e16\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-qc5c7" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.447923 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"kube-root-ca.crt" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.467776 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"installation-pull-secrets" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.488662 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"registry-dockercfg-kzzsd" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.506874 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-tls" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.527898 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-config" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.531697 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c37fc2e5-d729-4258-8506-22f328f5927a-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-x7vr5\" (UID: \"c37fc2e5-d729-4258-8506-22f328f5927a\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-x7vr5" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.549051 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-dockercfg-qt55r" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.567259 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"kube-scheduler-operator-serving-cert" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.579587 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c37fc2e5-d729-4258-8506-22f328f5927a-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-x7vr5\" (UID: \"c37fc2e5-d729-4258-8506-22f328f5927a\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-x7vr5" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.587588 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"kube-root-ca.crt" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.607741 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-certs-default" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.613048 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/6a574afe-31ee-4706-90c0-a9c477f5bce7-default-certificate\") pod \"router-default-5444994796-qpw4g\" (UID: \"6a574afe-31ee-4706-90c0-a9c477f5bce7\") " pod="openshift-ingress/router-default-5444994796-qpw4g" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.627196 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"kube-root-ca.crt" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.647813 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-stats-default" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.661910 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/6a574afe-31ee-4706-90c0-a9c477f5bce7-stats-auth\") pod \"router-default-5444994796-qpw4g\" (UID: \"6a574afe-31ee-4706-90c0-a9c477f5bce7\") " pod="openshift-ingress/router-default-5444994796-qpw4g" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.667855 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-tls" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.683825 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/a958b529-c3f0-4131-be7b-4d81a3c25499-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-6mnhh\" (UID: \"a958b529-c3f0-4131-be7b-4d81a3c25499\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-6mnhh" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.688024 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"openshift-service-ca.crt" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.708575 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-metrics-certs-default" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.721538 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/6a574afe-31ee-4706-90c0-a9c477f5bce7-metrics-certs\") pod \"router-default-5444994796-qpw4g\" (UID: \"6a574afe-31ee-4706-90c0-a9c477f5bce7\") " pod="openshift-ingress/router-default-5444994796-qpw4g" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.728016 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"service-ca-bundle" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.732087 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6a574afe-31ee-4706-90c0-a9c477f5bce7-service-ca-bundle\") pod \"router-default-5444994796-qpw4g\" (UID: \"6a574afe-31ee-4706-90c0-a9c477f5bce7\") " pod="openshift-ingress/router-default-5444994796-qpw4g" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.747979 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-dockercfg-zdk86" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.767450 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-dockercfg-k9rxt" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.788078 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"machine-config-operator-images" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.792391 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/089d7979-5043-482c-a464-b62a385a3f16-images\") pod \"machine-config-operator-74547568cd-prqrg\" (UID: \"089d7979-5043-482c-a464-b62a385a3f16\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-prqrg" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.808228 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-operator-dockercfg-98p87" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.827693 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mco-proxy-tls" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.835809 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/089d7979-5043-482c-a464-b62a385a3f16-proxy-tls\") pod \"machine-config-operator-74547568cd-prqrg\" (UID: \"089d7979-5043-482c-a464-b62a385a3f16\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-prqrg" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.867906 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-serving-cert" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.888356 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-dockercfg-gkqpw" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.907828 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-config" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.928050 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-root-ca.crt" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.947235 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"config" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.967951 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"kube-storage-version-migrator-operator-dockercfg-2bh8d" Oct 10 16:34:03 crc kubenswrapper[4799]: I1010 16:34:03.988732 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"serving-cert" Oct 10 16:34:04 crc kubenswrapper[4799]: I1010 16:34:04.009921 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-root-ca.crt" Oct 10 16:34:04 crc kubenswrapper[4799]: I1010 16:34:04.027936 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"kube-root-ca.crt" Oct 10 16:34:04 crc kubenswrapper[4799]: I1010 16:34:04.048875 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"openshift-service-ca.crt" Oct 10 16:34:04 crc kubenswrapper[4799]: I1010 16:34:04.069540 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-dockercfg-x57mr" Oct 10 16:34:04 crc kubenswrapper[4799]: I1010 16:34:04.085896 4799 request.go:700] Waited for 1.001367405s due to client-side throttling, not priority and fairness, request: GET:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver-operator/secrets?fieldSelector=metadata.name%3Dkube-apiserver-operator-serving-cert&limit=500&resourceVersion=0 Oct 10 16:34:04 crc kubenswrapper[4799]: I1010 16:34:04.088539 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-serving-cert" Oct 10 16:34:04 crc kubenswrapper[4799]: I1010 16:34:04.108861 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-config" Oct 10 16:34:04 crc kubenswrapper[4799]: I1010 16:34:04.128485 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mcc-proxy-tls" Oct 10 16:34:04 crc kubenswrapper[4799]: I1010 16:34:04.147915 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-controller-dockercfg-c2lfx" Oct 10 16:34:04 crc kubenswrapper[4799]: I1010 16:34:04.168281 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"openshift-service-ca.crt" Oct 10 16:34:04 crc kubenswrapper[4799]: I1010 16:34:04.187803 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"packageserver-service-cert" Oct 10 16:34:04 crc kubenswrapper[4799]: I1010 16:34:04.208971 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serviceaccount-dockercfg-rq7zk" Oct 10 16:34:04 crc kubenswrapper[4799]: I1010 16:34:04.228672 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"kube-root-ca.crt" Oct 10 16:34:04 crc kubenswrapper[4799]: I1010 16:34:04.268336 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"openshift-service-ca.crt" Oct 10 16:34:04 crc kubenswrapper[4799]: I1010 16:34:04.287846 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator"/"kube-storage-version-migrator-sa-dockercfg-5xfcg" Oct 10 16:34:04 crc kubenswrapper[4799]: I1010 16:34:04.308826 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"kube-root-ca.crt" Oct 10 16:34:04 crc kubenswrapper[4799]: I1010 16:34:04.328595 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"openshift-service-ca.crt" Oct 10 16:34:04 crc kubenswrapper[4799]: I1010 16:34:04.348844 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"service-ca-operator-dockercfg-rg9jl" Oct 10 16:34:04 crc kubenswrapper[4799]: I1010 16:34:04.368217 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"serving-cert" Oct 10 16:34:04 crc kubenswrapper[4799]: I1010 16:34:04.388056 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"service-ca-operator-config" Oct 10 16:34:04 crc kubenswrapper[4799]: I1010 16:34:04.402537 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 10 16:34:04 crc kubenswrapper[4799]: I1010 16:34:04.402621 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 10 16:34:04 crc kubenswrapper[4799]: I1010 16:34:04.408291 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"kube-root-ca.crt" Oct 10 16:34:04 crc kubenswrapper[4799]: I1010 16:34:04.428349 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"openshift-service-ca.crt" Oct 10 16:34:04 crc kubenswrapper[4799]: I1010 16:34:04.448450 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"service-ca-dockercfg-pn86c" Oct 10 16:34:04 crc kubenswrapper[4799]: I1010 16:34:04.467906 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"signing-key" Oct 10 16:34:04 crc kubenswrapper[4799]: I1010 16:34:04.488240 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"signing-cabundle" Oct 10 16:34:04 crc kubenswrapper[4799]: I1010 16:34:04.508216 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"kube-root-ca.crt" Oct 10 16:34:04 crc kubenswrapper[4799]: I1010 16:34:04.554842 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q4xnj\" (UniqueName: \"kubernetes.io/projected/cd6b9ce8-4fe8-405e-9399-354b7d8ee20b-kube-api-access-q4xnj\") pod \"apiserver-76f77b778f-2ksq6\" (UID: \"cd6b9ce8-4fe8-405e-9399-354b7d8ee20b\") " pod="openshift-apiserver/apiserver-76f77b778f-2ksq6" Oct 10 16:34:04 crc kubenswrapper[4799]: I1010 16:34:04.567494 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"pprof-cert" Oct 10 16:34:04 crc kubenswrapper[4799]: I1010 16:34:04.572849 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sfc2x\" (UniqueName: \"kubernetes.io/projected/159d9178-1402-4232-a9df-ad4389bed9b0-kube-api-access-sfc2x\") pod \"machine-api-operator-5694c8668f-w26tc\" (UID: \"159d9178-1402-4232-a9df-ad4389bed9b0\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-w26tc" Oct 10 16:34:04 crc kubenswrapper[4799]: I1010 16:34:04.587993 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Oct 10 16:34:04 crc kubenswrapper[4799]: I1010 16:34:04.608091 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Oct 10 16:34:04 crc kubenswrapper[4799]: I1010 16:34:04.628602 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serving-cert" Oct 10 16:34:04 crc kubenswrapper[4799]: I1010 16:34:04.652861 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-admission-controller-secret" Oct 10 16:34:04 crc kubenswrapper[4799]: I1010 16:34:04.668269 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ac-dockercfg-9lkdf" Oct 10 16:34:04 crc kubenswrapper[4799]: I1010 16:34:04.688731 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"catalog-operator-serving-cert" Oct 10 16:34:04 crc kubenswrapper[4799]: I1010 16:34:04.708291 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"openshift-service-ca.crt" Oct 10 16:34:04 crc kubenswrapper[4799]: I1010 16:34:04.728263 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-dockercfg-5nsgg" Oct 10 16:34:04 crc kubenswrapper[4799]: I1010 16:34:04.757351 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"marketplace-trusted-ca" Oct 10 16:34:04 crc kubenswrapper[4799]: I1010 16:34:04.767775 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-metrics" Oct 10 16:34:04 crc kubenswrapper[4799]: I1010 16:34:04.784197 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-w26tc" Oct 10 16:34:04 crc kubenswrapper[4799]: I1010 16:34:04.788402 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"kube-root-ca.crt" Oct 10 16:34:04 crc kubenswrapper[4799]: I1010 16:34:04.800676 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-2ksq6" Oct 10 16:34:04 crc kubenswrapper[4799]: I1010 16:34:04.807911 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"package-server-manager-serving-cert" Oct 10 16:34:04 crc kubenswrapper[4799]: I1010 16:34:04.827417 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-dockercfg-qx5rd" Oct 10 16:34:04 crc kubenswrapper[4799]: I1010 16:34:04.847738 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"node-bootstrapper-token" Oct 10 16:34:04 crc kubenswrapper[4799]: I1010 16:34:04.868648 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-tls" Oct 10 16:34:04 crc kubenswrapper[4799]: I1010 16:34:04.887492 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"dns-default" Oct 10 16:34:04 crc kubenswrapper[4799]: I1010 16:34:04.908804 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-default-metrics-tls" Oct 10 16:34:04 crc kubenswrapper[4799]: I1010 16:34:04.928374 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-dockercfg-jwfmh" Oct 10 16:34:04 crc kubenswrapper[4799]: I1010 16:34:04.948409 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"openshift-service-ca.crt" Oct 10 16:34:04 crc kubenswrapper[4799]: I1010 16:34:04.968370 4799 reflector.go:368] Caches populated for *v1.Secret from object-"hostpath-provisioner"/"csi-hostpath-provisioner-sa-dockercfg-qd74k" Oct 10 16:34:04 crc kubenswrapper[4799]: I1010 16:34:04.988026 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"kube-root-ca.crt" Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.015253 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-2ksq6"] Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.028464 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p54j8\" (UniqueName: \"kubernetes.io/projected/6a574afe-31ee-4706-90c0-a9c477f5bce7-kube-api-access-p54j8\") pod \"router-default-5444994796-qpw4g\" (UID: \"6a574afe-31ee-4706-90c0-a9c477f5bce7\") " pod="openshift-ingress/router-default-5444994796-qpw4g" Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.041156 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-w26tc"] Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.043610 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5mmzh\" (UniqueName: \"kubernetes.io/projected/eb807790-1011-4dfc-842e-fd4106f7a6c3-kube-api-access-5mmzh\") pod \"ingress-operator-5b745b69d9-cqz89\" (UID: \"eb807790-1011-4dfc-842e-fd4106f7a6c3\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-cqz89" Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.047101 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"openshift-service-ca.crt" Oct 10 16:34:05 crc kubenswrapper[4799]: W1010 16:34:05.050210 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod159d9178_1402_4232_a9df_ad4389bed9b0.slice/crio-88ad9b685371744965003c2a62fe17c4b0f4a9b647dccc64e1f37e38e66463d3 WatchSource:0}: Error finding container 88ad9b685371744965003c2a62fe17c4b0f4a9b647dccc64e1f37e38e66463d3: Status 404 returned error can't find the container with id 88ad9b685371744965003c2a62fe17c4b0f4a9b647dccc64e1f37e38e66463d3 Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.066214 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-qpw4g" Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.067452 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"kube-root-ca.crt" Oct 10 16:34:05 crc kubenswrapper[4799]: W1010 16:34:05.077201 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod6a574afe_31ee_4706_90c0_a9c477f5bce7.slice/crio-73b61580e13932d1edbf3732c09550781c867654e41abb1d72834467b6356253 WatchSource:0}: Error finding container 73b61580e13932d1edbf3732c09550781c867654e41abb1d72834467b6356253: Status 404 returned error can't find the container with id 73b61580e13932d1edbf3732c09550781c867654e41abb1d72834467b6356253 Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.086483 4799 request.go:700] Waited for 1.870060449s due to client-side throttling, not priority and fairness, request: GET:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-ingress-canary/secrets?fieldSelector=metadata.name%3Ddefault-dockercfg-2llfx&limit=500&resourceVersion=0 Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.087909 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"default-dockercfg-2llfx" Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.124090 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hxgnl\" (UniqueName: \"kubernetes.io/projected/6f91a26e-3654-415e-9ebc-91034a85cbc1-kube-api-access-hxgnl\") pod \"openshift-controller-manager-operator-756b6f6bc6-nsrr7\" (UID: \"6f91a26e-3654-415e-9ebc-91034a85cbc1\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-nsrr7" Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.129304 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"canary-serving-cert" Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.161177 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n879q\" (UniqueName: \"kubernetes.io/projected/98460888-57af-4ef6-a390-c0a592164ddb-kube-api-access-n879q\") pod \"dns-operator-744455d44c-nlsfc\" (UID: \"98460888-57af-4ef6-a390-c0a592164ddb\") " pod="openshift-dns-operator/dns-operator-744455d44c-nlsfc" Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.183613 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jpxlh\" (UniqueName: \"kubernetes.io/projected/089d7979-5043-482c-a464-b62a385a3f16-kube-api-access-jpxlh\") pod \"machine-config-operator-74547568cd-prqrg\" (UID: \"089d7979-5043-482c-a464-b62a385a3f16\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-prqrg" Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.190201 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-qpw4g" event={"ID":"6a574afe-31ee-4706-90c0-a9c477f5bce7","Type":"ContainerStarted","Data":"73b61580e13932d1edbf3732c09550781c867654e41abb1d72834467b6356253"} Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.191648 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-w26tc" event={"ID":"159d9178-1402-4232-a9df-ad4389bed9b0","Type":"ContainerStarted","Data":"88ad9b685371744965003c2a62fe17c4b0f4a9b647dccc64e1f37e38e66463d3"} Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.192705 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-2ksq6" event={"ID":"cd6b9ce8-4fe8-405e-9399-354b7d8ee20b","Type":"ContainerStarted","Data":"7cd056f90853898b6119a7145e79e61acced3c6d61b8563f638b45a201faff7e"} Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.202004 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-87lxb\" (UniqueName: \"kubernetes.io/projected/a63337d7-2cc9-49b7-af06-550a24f39ff0-kube-api-access-87lxb\") pod \"console-operator-58897d9998-6h8q7\" (UID: \"a63337d7-2cc9-49b7-af06-550a24f39ff0\") " pod="openshift-console-operator/console-operator-58897d9998-6h8q7" Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.228280 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/eb807790-1011-4dfc-842e-fd4106f7a6c3-bound-sa-token\") pod \"ingress-operator-5b745b69d9-cqz89\" (UID: \"eb807790-1011-4dfc-842e-fd4106f7a6c3\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-cqz89" Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.244605 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7vssf\" (UniqueName: \"kubernetes.io/projected/a958b529-c3f0-4131-be7b-4d81a3c25499-kube-api-access-7vssf\") pod \"control-plane-machine-set-operator-78cbb6b69f-6mnhh\" (UID: \"a958b529-c3f0-4131-be7b-4d81a3c25499\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-6mnhh" Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.267706 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a8700150-e5e3-4b1b-a3c8-f7e781ce8f1f-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-hqz8m\" (UID: \"a8700150-e5e3-4b1b-a3c8-f7e781ce8f1f\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-hqz8m" Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.276079 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-nsrr7" Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.283573 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vw6fc\" (UniqueName: \"kubernetes.io/projected/3f2b6b90-47a2-4e3c-8394-13a961d92c03-kube-api-access-vw6fc\") pod \"openshift-apiserver-operator-796bbdcf4f-hsg4j\" (UID: \"3f2b6b90-47a2-4e3c-8394-13a961d92c03\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-hsg4j" Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.296515 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-6h8q7" Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.303618 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-hsg4j" Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.307834 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2bsdq\" (UniqueName: \"kubernetes.io/projected/dd0fbd79-76a9-4a87-a67b-20e782993376-kube-api-access-2bsdq\") pod \"controller-manager-879f6c89f-fvpc8\" (UID: \"dd0fbd79-76a9-4a87-a67b-20e782993376\") " pod="openshift-controller-manager/controller-manager-879f6c89f-fvpc8" Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.310221 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-nlsfc" Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.326315 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cvt5n\" (UniqueName: \"kubernetes.io/projected/2dff39f3-58aa-4d27-a85f-c1a09bb3d83e-kube-api-access-cvt5n\") pod \"etcd-operator-b45778765-mvftm\" (UID: \"2dff39f3-58aa-4d27-a85f-c1a09bb3d83e\") " pod="openshift-etcd-operator/etcd-operator-b45778765-mvftm" Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.338184 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-cqz89" Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.344704 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h27xd\" (UniqueName: \"kubernetes.io/projected/18a6e757-37be-47ce-98ea-bda0221cab2c-kube-api-access-h27xd\") pod \"openshift-config-operator-7777fb866f-5sgz5\" (UID: \"18a6e757-37be-47ce-98ea-bda0221cab2c\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-5sgz5" Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.368000 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b89bb\" (UniqueName: \"kubernetes.io/projected/a8700150-e5e3-4b1b-a3c8-f7e781ce8f1f-kube-api-access-b89bb\") pod \"cluster-image-registry-operator-dc59b4c8b-hqz8m\" (UID: \"a8700150-e5e3-4b1b-a3c8-f7e781ce8f1f\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-hqz8m" Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.374051 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-6mnhh" Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.379539 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-prqrg" Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.387632 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-55fpk\" (UniqueName: \"kubernetes.io/projected/f8bab52f-8a27-495e-80d2-9794b984939e-kube-api-access-55fpk\") pod \"route-controller-manager-6576b87f9c-w2jsl\" (UID: \"f8bab52f-8a27-495e-80d2-9794b984939e\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-w2jsl" Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.412551 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pvq5g\" (UniqueName: \"kubernetes.io/projected/713c24fb-e821-419a-b996-9661a6cbf57b-kube-api-access-pvq5g\") pod \"apiserver-7bbb656c7d-mjjtc\" (UID: \"713c24fb-e821-419a-b996-9661a6cbf57b\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-mjjtc" Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.430359 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-fvpc8" Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.434282 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tsxnv\" (UniqueName: \"kubernetes.io/projected/e56235b4-8348-4fae-af0a-639fcacfc997-kube-api-access-tsxnv\") pod \"downloads-7954f5f757-plk2p\" (UID: \"e56235b4-8348-4fae-af0a-639fcacfc997\") " pod="openshift-console/downloads-7954f5f757-plk2p" Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.440469 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-mjjtc" Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.444841 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5gcfj\" (UniqueName: \"kubernetes.io/projected/f237a066-51a9-475f-80b5-1627fb073e16-kube-api-access-5gcfj\") pod \"authentication-operator-69f744f599-qc5c7\" (UID: \"f237a066-51a9-475f-80b5-1627fb073e16\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-qc5c7" Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.459920 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-w2jsl" Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.466369 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gmcp9\" (UniqueName: \"kubernetes.io/projected/0691553d-e534-4c08-b56e-d99bd02e53fa-kube-api-access-gmcp9\") pod \"oauth-openshift-558db77b4-56xl2\" (UID: \"0691553d-e534-4c08-b56e-d99bd02e53fa\") " pod="openshift-authentication/oauth-openshift-558db77b4-56xl2" Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.477031 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-plk2p" Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.481629 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r4jgn\" (UniqueName: \"kubernetes.io/projected/eec5360d-85e3-4785-98b2-10ba224ffdfe-kube-api-access-r4jgn\") pod \"machine-approver-56656f9798-w9v24\" (UID: \"eec5360d-85e3-4785-98b2-10ba224ffdfe\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-w9v24" Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.498837 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-nsrr7"] Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.502737 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/c37fc2e5-d729-4258-8506-22f328f5927a-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-x7vr5\" (UID: \"c37fc2e5-d729-4258-8506-22f328f5927a\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-x7vr5" Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.508248 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-hqz8m" Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.508458 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-secret" Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.527246 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"openshift-service-ca.crt" Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.557522 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-sa-dockercfg-d427c" Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.565307 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-5sgz5" Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.567861 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"kube-root-ca.crt" Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.582858 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-56xl2" Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.591555 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-mvftm" Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.617013 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-w9v24" Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.633231 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-console"/"networking-console-plugin-cert" Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.642869 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-hsg4j"] Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.644504 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-qc5c7" Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.647270 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-console"/"networking-console-plugin" Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.658947 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-x7vr5" Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.660201 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/2866fbf1-3a49-4e4c-867b-86a40ae85ebe-service-ca\") pod \"console-f9d7485db-8lvfs\" (UID: \"2866fbf1-3a49-4e4c-867b-86a40ae85ebe\") " pod="openshift-console/console-f9d7485db-8lvfs" Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.660230 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/60ab14da-0f2e-48cc-873a-44eaf0662ed8-bound-sa-token\") pod \"image-registry-697d97f7c8-89gcz\" (UID: \"60ab14da-0f2e-48cc-873a-44eaf0662ed8\") " pod="openshift-image-registry/image-registry-697d97f7c8-89gcz" Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.660251 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/5284905f-e20b-49be-8cef-df7f96e4460d-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-n8sc6\" (UID: \"5284905f-e20b-49be-8cef-df7f96e4460d\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-n8sc6" Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.660271 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/60ab14da-0f2e-48cc-873a-44eaf0662ed8-registry-tls\") pod \"image-registry-697d97f7c8-89gcz\" (UID: \"60ab14da-0f2e-48cc-873a-44eaf0662ed8\") " pod="openshift-image-registry/image-registry-697d97f7c8-89gcz" Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.660287 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/2866fbf1-3a49-4e4c-867b-86a40ae85ebe-console-oauth-config\") pod \"console-f9d7485db-8lvfs\" (UID: \"2866fbf1-3a49-4e4c-867b-86a40ae85ebe\") " pod="openshift-console/console-f9d7485db-8lvfs" Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.660321 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-82xb2\" (UniqueName: \"kubernetes.io/projected/5284905f-e20b-49be-8cef-df7f96e4460d-kube-api-access-82xb2\") pod \"cluster-samples-operator-665b6dd947-n8sc6\" (UID: \"5284905f-e20b-49be-8cef-df7f96e4460d\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-n8sc6" Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.660413 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/60ab14da-0f2e-48cc-873a-44eaf0662ed8-registry-certificates\") pod \"image-registry-697d97f7c8-89gcz\" (UID: \"60ab14da-0f2e-48cc-873a-44eaf0662ed8\") " pod="openshift-image-registry/image-registry-697d97f7c8-89gcz" Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.660434 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7bfnt\" (UniqueName: \"kubernetes.io/projected/60ab14da-0f2e-48cc-873a-44eaf0662ed8-kube-api-access-7bfnt\") pod \"image-registry-697d97f7c8-89gcz\" (UID: \"60ab14da-0f2e-48cc-873a-44eaf0662ed8\") " pod="openshift-image-registry/image-registry-697d97f7c8-89gcz" Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.660460 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fwssh\" (UniqueName: \"kubernetes.io/projected/2866fbf1-3a49-4e4c-867b-86a40ae85ebe-kube-api-access-fwssh\") pod \"console-f9d7485db-8lvfs\" (UID: \"2866fbf1-3a49-4e4c-867b-86a40ae85ebe\") " pod="openshift-console/console-f9d7485db-8lvfs" Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.660484 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-89gcz\" (UID: \"60ab14da-0f2e-48cc-873a-44eaf0662ed8\") " pod="openshift-image-registry/image-registry-697d97f7c8-89gcz" Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.660501 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/2866fbf1-3a49-4e4c-867b-86a40ae85ebe-oauth-serving-cert\") pod \"console-f9d7485db-8lvfs\" (UID: \"2866fbf1-3a49-4e4c-867b-86a40ae85ebe\") " pod="openshift-console/console-f9d7485db-8lvfs" Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.660528 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/60ab14da-0f2e-48cc-873a-44eaf0662ed8-installation-pull-secrets\") pod \"image-registry-697d97f7c8-89gcz\" (UID: \"60ab14da-0f2e-48cc-873a-44eaf0662ed8\") " pod="openshift-image-registry/image-registry-697d97f7c8-89gcz" Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.660543 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/2866fbf1-3a49-4e4c-867b-86a40ae85ebe-console-config\") pod \"console-f9d7485db-8lvfs\" (UID: \"2866fbf1-3a49-4e4c-867b-86a40ae85ebe\") " pod="openshift-console/console-f9d7485db-8lvfs" Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.660557 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/2866fbf1-3a49-4e4c-867b-86a40ae85ebe-trusted-ca-bundle\") pod \"console-f9d7485db-8lvfs\" (UID: \"2866fbf1-3a49-4e4c-867b-86a40ae85ebe\") " pod="openshift-console/console-f9d7485db-8lvfs" Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.660574 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/60ab14da-0f2e-48cc-873a-44eaf0662ed8-trusted-ca\") pod \"image-registry-697d97f7c8-89gcz\" (UID: \"60ab14da-0f2e-48cc-873a-44eaf0662ed8\") " pod="openshift-image-registry/image-registry-697d97f7c8-89gcz" Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.660606 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/2866fbf1-3a49-4e4c-867b-86a40ae85ebe-console-serving-cert\") pod \"console-f9d7485db-8lvfs\" (UID: \"2866fbf1-3a49-4e4c-867b-86a40ae85ebe\") " pod="openshift-console/console-f9d7485db-8lvfs" Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.660622 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/60ab14da-0f2e-48cc-873a-44eaf0662ed8-ca-trust-extracted\") pod \"image-registry-697d97f7c8-89gcz\" (UID: \"60ab14da-0f2e-48cc-873a-44eaf0662ed8\") " pod="openshift-image-registry/image-registry-697d97f7c8-89gcz" Oct 10 16:34:05 crc kubenswrapper[4799]: E1010 16:34:05.661515 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-10 16:34:06.161501098 +0000 UTC m=+139.669825213 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-89gcz" (UID: "60ab14da-0f2e-48cc-873a-44eaf0662ed8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.730692 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-prqrg"] Oct 10 16:34:05 crc kubenswrapper[4799]: W1010 16:34:05.760119 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod089d7979_5043_482c_a464_b62a385a3f16.slice/crio-83be8b10b4ac5d9c841774366acbb1de689f8fb50cc6e3f24ca23ed48c6bc071 WatchSource:0}: Error finding container 83be8b10b4ac5d9c841774366acbb1de689f8fb50cc6e3f24ca23ed48c6bc071: Status 404 returned error can't find the container with id 83be8b10b4ac5d9c841774366acbb1de689f8fb50cc6e3f24ca23ed48c6bc071 Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.761183 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.761363 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/18c902fe-0322-4ada-b042-7693b4a5f024-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-wzwz7\" (UID: \"18c902fe-0322-4ada-b042-7693b4a5f024\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-wzwz7" Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.761390 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n769b\" (UniqueName: \"kubernetes.io/projected/281cf811-d3fe-4cd9-9292-74d81584ca10-kube-api-access-n769b\") pod \"dns-default-r7x5v\" (UID: \"281cf811-d3fe-4cd9-9292-74d81584ca10\") " pod="openshift-dns/dns-default-r7x5v" Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.761419 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lmg5b\" (UniqueName: \"kubernetes.io/projected/1b6f04fa-503f-4596-a740-e807679f686e-kube-api-access-lmg5b\") pod \"service-ca-9c57cc56f-2cggl\" (UID: \"1b6f04fa-503f-4596-a740-e807679f686e\") " pod="openshift-service-ca/service-ca-9c57cc56f-2cggl" Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.761447 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/ae03eb1e-0698-4aef-9a6f-7708d92adf66-mountpoint-dir\") pod \"csi-hostpathplugin-9g6sf\" (UID: \"ae03eb1e-0698-4aef-9a6f-7708d92adf66\") " pod="hostpath-provisioner/csi-hostpathplugin-9g6sf" Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.761483 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/2866fbf1-3a49-4e4c-867b-86a40ae85ebe-console-serving-cert\") pod \"console-f9d7485db-8lvfs\" (UID: \"2866fbf1-3a49-4e4c-867b-86a40ae85ebe\") " pod="openshift-console/console-f9d7485db-8lvfs" Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.761519 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/60ab14da-0f2e-48cc-873a-44eaf0662ed8-ca-trust-extracted\") pod \"image-registry-697d97f7c8-89gcz\" (UID: \"60ab14da-0f2e-48cc-873a-44eaf0662ed8\") " pod="openshift-image-registry/image-registry-697d97f7c8-89gcz" Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.761534 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e1269123-5f8e-40e4-87cf-577d7b148684-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-cvkkx\" (UID: \"e1269123-5f8e-40e4-87cf-577d7b148684\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-cvkkx" Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.761561 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/25cd298f-ccde-4805-801d-2d486c7e45da-config-volume\") pod \"collect-profiles-29335230-q6qww\" (UID: \"25cd298f-ccde-4805-801d-2d486c7e45da\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29335230-q6qww" Oct 10 16:34:05 crc kubenswrapper[4799]: E1010 16:34:05.761611 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-10 16:34:06.261581868 +0000 UTC m=+139.769905993 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.761661 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e1269123-5f8e-40e4-87cf-577d7b148684-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-cvkkx\" (UID: \"e1269123-5f8e-40e4-87cf-577d7b148684\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-cvkkx" Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.761722 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/2866fbf1-3a49-4e4c-867b-86a40ae85ebe-service-ca\") pod \"console-f9d7485db-8lvfs\" (UID: \"2866fbf1-3a49-4e4c-867b-86a40ae85ebe\") " pod="openshift-console/console-f9d7485db-8lvfs" Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.761748 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-djh9d\" (UniqueName: \"kubernetes.io/projected/d4b5557a-bd3b-419f-bd0c-e3ed6d1f8def-kube-api-access-djh9d\") pod \"migrator-59844c95c7-9kfj6\" (UID: \"d4b5557a-bd3b-419f-bd0c-e3ed6d1f8def\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-9kfj6" Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.761804 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/60ab14da-0f2e-48cc-873a-44eaf0662ed8-bound-sa-token\") pod \"image-registry-697d97f7c8-89gcz\" (UID: \"60ab14da-0f2e-48cc-873a-44eaf0662ed8\") " pod="openshift-image-registry/image-registry-697d97f7c8-89gcz" Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.761826 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f50c760d-39a2-4717-bf30-942dcd170900-serving-cert\") pod \"service-ca-operator-777779d784-mgdrk\" (UID: \"f50c760d-39a2-4717-bf30-942dcd170900\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-mgdrk" Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.761849 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e1269123-5f8e-40e4-87cf-577d7b148684-config\") pod \"kube-controller-manager-operator-78b949d7b-cvkkx\" (UID: \"e1269123-5f8e-40e4-87cf-577d7b148684\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-cvkkx" Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.761907 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/ee5210db-86a0-4346-bfc4-e4a6460eaae1-tmpfs\") pod \"packageserver-d55dfcdfc-5r8jg\" (UID: \"ee5210db-86a0-4346-bfc4-e4a6460eaae1\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-5r8jg" Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.761936 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f50c760d-39a2-4717-bf30-942dcd170900-config\") pod \"service-ca-operator-777779d784-mgdrk\" (UID: \"f50c760d-39a2-4717-bf30-942dcd170900\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-mgdrk" Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.761958 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lj5cr\" (UniqueName: \"kubernetes.io/projected/f50c760d-39a2-4717-bf30-942dcd170900-kube-api-access-lj5cr\") pod \"service-ca-operator-777779d784-mgdrk\" (UID: \"f50c760d-39a2-4717-bf30-942dcd170900\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-mgdrk" Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.762012 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/5284905f-e20b-49be-8cef-df7f96e4460d-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-n8sc6\" (UID: \"5284905f-e20b-49be-8cef-df7f96e4460d\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-n8sc6" Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.762112 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/60ab14da-0f2e-48cc-873a-44eaf0662ed8-registry-tls\") pod \"image-registry-697d97f7c8-89gcz\" (UID: \"60ab14da-0f2e-48cc-873a-44eaf0662ed8\") " pod="openshift-image-registry/image-registry-697d97f7c8-89gcz" Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.762136 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/ae03eb1e-0698-4aef-9a6f-7708d92adf66-csi-data-dir\") pod \"csi-hostpathplugin-9g6sf\" (UID: \"ae03eb1e-0698-4aef-9a6f-7708d92adf66\") " pod="hostpath-provisioner/csi-hostpathplugin-9g6sf" Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.762157 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qpr82\" (UniqueName: \"kubernetes.io/projected/01118562-5441-420c-8d4c-2a983e584de3-kube-api-access-qpr82\") pod \"ingress-canary-bbvkz\" (UID: \"01118562-5441-420c-8d4c-2a983e584de3\") " pod="openshift-ingress-canary/ingress-canary-bbvkz" Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.762210 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/2866fbf1-3a49-4e4c-867b-86a40ae85ebe-console-oauth-config\") pod \"console-f9d7485db-8lvfs\" (UID: \"2866fbf1-3a49-4e4c-867b-86a40ae85ebe\") " pod="openshift-console/console-f9d7485db-8lvfs" Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.762279 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/1b6f04fa-503f-4596-a740-e807679f686e-signing-key\") pod \"service-ca-9c57cc56f-2cggl\" (UID: \"1b6f04fa-503f-4596-a740-e807679f686e\") " pod="openshift-service-ca/service-ca-9c57cc56f-2cggl" Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.762304 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-82xb2\" (UniqueName: \"kubernetes.io/projected/5284905f-e20b-49be-8cef-df7f96e4460d-kube-api-access-82xb2\") pod \"cluster-samples-operator-665b6dd947-n8sc6\" (UID: \"5284905f-e20b-49be-8cef-df7f96e4460d\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-n8sc6" Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.762366 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fv5rp\" (UniqueName: \"kubernetes.io/projected/bb7dd02d-b8d9-4954-aaee-df6a63ea0708-kube-api-access-fv5rp\") pod \"package-server-manager-789f6589d5-8qhsm\" (UID: \"bb7dd02d-b8d9-4954-aaee-df6a63ea0708\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-8qhsm" Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.762389 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/e7393869-51bf-4974-97c7-33adfe1b44f5-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-mn6dn\" (UID: \"e7393869-51bf-4974-97c7-33adfe1b44f5\") " pod="openshift-marketplace/marketplace-operator-79b997595-mn6dn" Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.762428 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/281cf811-d3fe-4cd9-9292-74d81584ca10-config-volume\") pod \"dns-default-r7x5v\" (UID: \"281cf811-d3fe-4cd9-9292-74d81584ca10\") " pod="openshift-dns/dns-default-r7x5v" Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.762451 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/ae03eb1e-0698-4aef-9a6f-7708d92adf66-registration-dir\") pod \"csi-hostpathplugin-9g6sf\" (UID: \"ae03eb1e-0698-4aef-9a6f-7708d92adf66\") " pod="hostpath-provisioner/csi-hostpathplugin-9g6sf" Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.762484 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/cb1d3fef-2917-4e7b-916d-3b6381e25c33-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-hkvxt\" (UID: \"cb1d3fef-2917-4e7b-916d-3b6381e25c33\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-hkvxt" Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.762521 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/bb7dd02d-b8d9-4954-aaee-df6a63ea0708-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-8qhsm\" (UID: \"bb7dd02d-b8d9-4954-aaee-df6a63ea0708\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-8qhsm" Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.762572 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/ae03eb1e-0698-4aef-9a6f-7708d92adf66-socket-dir\") pod \"csi-hostpathplugin-9g6sf\" (UID: \"ae03eb1e-0698-4aef-9a6f-7708d92adf66\") " pod="hostpath-provisioner/csi-hostpathplugin-9g6sf" Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.762612 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/928b4fca-64a2-4c2c-a1b2-38bb069c13c8-config\") pod \"kube-apiserver-operator-766d6c64bb-zphkz\" (UID: \"928b4fca-64a2-4c2c-a1b2-38bb069c13c8\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-zphkz" Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.763132 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/60ab14da-0f2e-48cc-873a-44eaf0662ed8-ca-trust-extracted\") pod \"image-registry-697d97f7c8-89gcz\" (UID: \"60ab14da-0f2e-48cc-873a-44eaf0662ed8\") " pod="openshift-image-registry/image-registry-697d97f7c8-89gcz" Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.763611 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gmb7b\" (UniqueName: \"kubernetes.io/projected/ae03eb1e-0698-4aef-9a6f-7708d92adf66-kube-api-access-gmb7b\") pod \"csi-hostpathplugin-9g6sf\" (UID: \"ae03eb1e-0698-4aef-9a6f-7708d92adf66\") " pod="hostpath-provisioner/csi-hostpathplugin-9g6sf" Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.763637 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/1b6f04fa-503f-4596-a740-e807679f686e-signing-cabundle\") pod \"service-ca-9c57cc56f-2cggl\" (UID: \"1b6f04fa-503f-4596-a740-e807679f686e\") " pod="openshift-service-ca/service-ca-9c57cc56f-2cggl" Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.763696 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gc7vg\" (UniqueName: \"kubernetes.io/projected/c1b734d0-de3c-4519-8fb7-8408961ea09b-kube-api-access-gc7vg\") pod \"machine-config-server-jmpls\" (UID: \"c1b734d0-de3c-4519-8fb7-8408961ea09b\") " pod="openshift-machine-config-operator/machine-config-server-jmpls" Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.763944 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/977bd741-4d59-48b0-ab48-c22f6eecdb2e-proxy-tls\") pod \"machine-config-controller-84d6567774-98snb\" (UID: \"977bd741-4d59-48b0-ab48-c22f6eecdb2e\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-98snb" Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.763966 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/60ab14da-0f2e-48cc-873a-44eaf0662ed8-registry-certificates\") pod \"image-registry-697d97f7c8-89gcz\" (UID: \"60ab14da-0f2e-48cc-873a-44eaf0662ed8\") " pod="openshift-image-registry/image-registry-697d97f7c8-89gcz" Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.763983 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ee5210db-86a0-4346-bfc4-e4a6460eaae1-webhook-cert\") pod \"packageserver-d55dfcdfc-5r8jg\" (UID: \"ee5210db-86a0-4346-bfc4-e4a6460eaae1\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-5r8jg" Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.764011 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/928b4fca-64a2-4c2c-a1b2-38bb069c13c8-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-zphkz\" (UID: \"928b4fca-64a2-4c2c-a1b2-38bb069c13c8\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-zphkz" Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.764027 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p9rwx\" (UniqueName: \"kubernetes.io/projected/977bd741-4d59-48b0-ab48-c22f6eecdb2e-kube-api-access-p9rwx\") pod \"machine-config-controller-84d6567774-98snb\" (UID: \"977bd741-4d59-48b0-ab48-c22f6eecdb2e\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-98snb" Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.764046 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g4knh\" (UniqueName: \"kubernetes.io/projected/c0095ab7-a332-4923-b1ac-90abd65087d5-kube-api-access-g4knh\") pod \"olm-operator-6b444d44fb-vfgnk\" (UID: \"c0095ab7-a332-4923-b1ac-90abd65087d5\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-vfgnk" Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.764063 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/e7393869-51bf-4974-97c7-33adfe1b44f5-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-mn6dn\" (UID: \"e7393869-51bf-4974-97c7-33adfe1b44f5\") " pod="openshift-marketplace/marketplace-operator-79b997595-mn6dn" Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.764082 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7bfnt\" (UniqueName: \"kubernetes.io/projected/60ab14da-0f2e-48cc-873a-44eaf0662ed8-kube-api-access-7bfnt\") pod \"image-registry-697d97f7c8-89gcz\" (UID: \"60ab14da-0f2e-48cc-873a-44eaf0662ed8\") " pod="openshift-image-registry/image-registry-697d97f7c8-89gcz" Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.764097 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/281cf811-d3fe-4cd9-9292-74d81584ca10-metrics-tls\") pod \"dns-default-r7x5v\" (UID: \"281cf811-d3fe-4cd9-9292-74d81584ca10\") " pod="openshift-dns/dns-default-r7x5v" Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.764126 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fwssh\" (UniqueName: \"kubernetes.io/projected/2866fbf1-3a49-4e4c-867b-86a40ae85ebe-kube-api-access-fwssh\") pod \"console-f9d7485db-8lvfs\" (UID: \"2866fbf1-3a49-4e4c-867b-86a40ae85ebe\") " pod="openshift-console/console-f9d7485db-8lvfs" Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.764141 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/c1b734d0-de3c-4519-8fb7-8408961ea09b-certs\") pod \"machine-config-server-jmpls\" (UID: \"c1b734d0-de3c-4519-8fb7-8408961ea09b\") " pod="openshift-machine-config-operator/machine-config-server-jmpls" Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.764169 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mb4s6\" (UniqueName: \"kubernetes.io/projected/1786a2c0-54d3-44db-bd63-ac7a0cd09eb4-kube-api-access-mb4s6\") pod \"catalog-operator-68c6474976-vwbn8\" (UID: \"1786a2c0-54d3-44db-bd63-ac7a0cd09eb4\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-vwbn8" Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.764228 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-89gcz\" (UID: \"60ab14da-0f2e-48cc-873a-44eaf0662ed8\") " pod="openshift-image-registry/image-registry-697d97f7c8-89gcz" Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.764247 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/25cd298f-ccde-4805-801d-2d486c7e45da-secret-volume\") pod \"collect-profiles-29335230-q6qww\" (UID: \"25cd298f-ccde-4805-801d-2d486c7e45da\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29335230-q6qww" Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.764292 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/2866fbf1-3a49-4e4c-867b-86a40ae85ebe-oauth-serving-cert\") pod \"console-f9d7485db-8lvfs\" (UID: \"2866fbf1-3a49-4e4c-867b-86a40ae85ebe\") " pod="openshift-console/console-f9d7485db-8lvfs" Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.764308 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cb1d3fef-2917-4e7b-916d-3b6381e25c33-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-hkvxt\" (UID: \"cb1d3fef-2917-4e7b-916d-3b6381e25c33\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-hkvxt" Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.764325 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/977bd741-4d59-48b0-ab48-c22f6eecdb2e-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-98snb\" (UID: \"977bd741-4d59-48b0-ab48-c22f6eecdb2e\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-98snb" Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.764339 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/c0095ab7-a332-4923-b1ac-90abd65087d5-profile-collector-cert\") pod \"olm-operator-6b444d44fb-vfgnk\" (UID: \"c0095ab7-a332-4923-b1ac-90abd65087d5\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-vfgnk" Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.764356 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/c0095ab7-a332-4923-b1ac-90abd65087d5-srv-cert\") pod \"olm-operator-6b444d44fb-vfgnk\" (UID: \"c0095ab7-a332-4923-b1ac-90abd65087d5\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-vfgnk" Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.764616 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/ae03eb1e-0698-4aef-9a6f-7708d92adf66-plugins-dir\") pod \"csi-hostpathplugin-9g6sf\" (UID: \"ae03eb1e-0698-4aef-9a6f-7708d92adf66\") " pod="hostpath-provisioner/csi-hostpathplugin-9g6sf" Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.764686 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/c1b734d0-de3c-4519-8fb7-8408961ea09b-node-bootstrap-token\") pod \"machine-config-server-jmpls\" (UID: \"c1b734d0-de3c-4519-8fb7-8408961ea09b\") " pod="openshift-machine-config-operator/machine-config-server-jmpls" Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.764728 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rbsrn\" (UniqueName: \"kubernetes.io/projected/cb1d3fef-2917-4e7b-916d-3b6381e25c33-kube-api-access-rbsrn\") pod \"kube-storage-version-migrator-operator-b67b599dd-hkvxt\" (UID: \"cb1d3fef-2917-4e7b-916d-3b6381e25c33\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-hkvxt" Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.764777 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jcnfx\" (UniqueName: \"kubernetes.io/projected/25cd298f-ccde-4805-801d-2d486c7e45da-kube-api-access-jcnfx\") pod \"collect-profiles-29335230-q6qww\" (UID: \"25cd298f-ccde-4805-801d-2d486c7e45da\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29335230-q6qww" Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.764813 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/01118562-5441-420c-8d4c-2a983e584de3-cert\") pod \"ingress-canary-bbvkz\" (UID: \"01118562-5441-420c-8d4c-2a983e584de3\") " pod="openshift-ingress-canary/ingress-canary-bbvkz" Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.764828 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/ee5210db-86a0-4346-bfc4-e4a6460eaae1-apiservice-cert\") pod \"packageserver-d55dfcdfc-5r8jg\" (UID: \"ee5210db-86a0-4346-bfc4-e4a6460eaae1\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-5r8jg" Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.764858 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/60ab14da-0f2e-48cc-873a-44eaf0662ed8-installation-pull-secrets\") pod \"image-registry-697d97f7c8-89gcz\" (UID: \"60ab14da-0f2e-48cc-873a-44eaf0662ed8\") " pod="openshift-image-registry/image-registry-697d97f7c8-89gcz" Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.764875 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ld5fb\" (UniqueName: \"kubernetes.io/projected/ee5210db-86a0-4346-bfc4-e4a6460eaae1-kube-api-access-ld5fb\") pod \"packageserver-d55dfcdfc-5r8jg\" (UID: \"ee5210db-86a0-4346-bfc4-e4a6460eaae1\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-5r8jg" Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.764919 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/2866fbf1-3a49-4e4c-867b-86a40ae85ebe-console-config\") pod \"console-f9d7485db-8lvfs\" (UID: \"2866fbf1-3a49-4e4c-867b-86a40ae85ebe\") " pod="openshift-console/console-f9d7485db-8lvfs" Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.764944 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/2866fbf1-3a49-4e4c-867b-86a40ae85ebe-trusted-ca-bundle\") pod \"console-f9d7485db-8lvfs\" (UID: \"2866fbf1-3a49-4e4c-867b-86a40ae85ebe\") " pod="openshift-console/console-f9d7485db-8lvfs" Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.764963 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/1786a2c0-54d3-44db-bd63-ac7a0cd09eb4-srv-cert\") pod \"catalog-operator-68c6474976-vwbn8\" (UID: \"1786a2c0-54d3-44db-bd63-ac7a0cd09eb4\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-vwbn8" Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.764986 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-669bd\" (UniqueName: \"kubernetes.io/projected/18c902fe-0322-4ada-b042-7693b4a5f024-kube-api-access-669bd\") pod \"multus-admission-controller-857f4d67dd-wzwz7\" (UID: \"18c902fe-0322-4ada-b042-7693b4a5f024\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-wzwz7" Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.765001 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/928b4fca-64a2-4c2c-a1b2-38bb069c13c8-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-zphkz\" (UID: \"928b4fca-64a2-4c2c-a1b2-38bb069c13c8\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-zphkz" Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.765039 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/1786a2c0-54d3-44db-bd63-ac7a0cd09eb4-profile-collector-cert\") pod \"catalog-operator-68c6474976-vwbn8\" (UID: \"1786a2c0-54d3-44db-bd63-ac7a0cd09eb4\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-vwbn8" Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.765066 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/60ab14da-0f2e-48cc-873a-44eaf0662ed8-trusted-ca\") pod \"image-registry-697d97f7c8-89gcz\" (UID: \"60ab14da-0f2e-48cc-873a-44eaf0662ed8\") " pod="openshift-image-registry/image-registry-697d97f7c8-89gcz" Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.765082 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2r7w2\" (UniqueName: \"kubernetes.io/projected/e7393869-51bf-4974-97c7-33adfe1b44f5-kube-api-access-2r7w2\") pod \"marketplace-operator-79b997595-mn6dn\" (UID: \"e7393869-51bf-4974-97c7-33adfe1b44f5\") " pod="openshift-marketplace/marketplace-operator-79b997595-mn6dn" Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.769109 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/2866fbf1-3a49-4e4c-867b-86a40ae85ebe-service-ca\") pod \"console-f9d7485db-8lvfs\" (UID: \"2866fbf1-3a49-4e4c-867b-86a40ae85ebe\") " pod="openshift-console/console-f9d7485db-8lvfs" Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.769720 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/60ab14da-0f2e-48cc-873a-44eaf0662ed8-registry-tls\") pod \"image-registry-697d97f7c8-89gcz\" (UID: \"60ab14da-0f2e-48cc-873a-44eaf0662ed8\") " pod="openshift-image-registry/image-registry-697d97f7c8-89gcz" Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.775550 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/5284905f-e20b-49be-8cef-df7f96e4460d-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-n8sc6\" (UID: \"5284905f-e20b-49be-8cef-df7f96e4460d\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-n8sc6" Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.775685 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/2866fbf1-3a49-4e4c-867b-86a40ae85ebe-console-serving-cert\") pod \"console-f9d7485db-8lvfs\" (UID: \"2866fbf1-3a49-4e4c-867b-86a40ae85ebe\") " pod="openshift-console/console-f9d7485db-8lvfs" Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.779187 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/2866fbf1-3a49-4e4c-867b-86a40ae85ebe-console-oauth-config\") pod \"console-f9d7485db-8lvfs\" (UID: \"2866fbf1-3a49-4e4c-867b-86a40ae85ebe\") " pod="openshift-console/console-f9d7485db-8lvfs" Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.780582 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/60ab14da-0f2e-48cc-873a-44eaf0662ed8-registry-certificates\") pod \"image-registry-697d97f7c8-89gcz\" (UID: \"60ab14da-0f2e-48cc-873a-44eaf0662ed8\") " pod="openshift-image-registry/image-registry-697d97f7c8-89gcz" Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.780851 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/2866fbf1-3a49-4e4c-867b-86a40ae85ebe-oauth-serving-cert\") pod \"console-f9d7485db-8lvfs\" (UID: \"2866fbf1-3a49-4e4c-867b-86a40ae85ebe\") " pod="openshift-console/console-f9d7485db-8lvfs" Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.782309 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/2866fbf1-3a49-4e4c-867b-86a40ae85ebe-trusted-ca-bundle\") pod \"console-f9d7485db-8lvfs\" (UID: \"2866fbf1-3a49-4e4c-867b-86a40ae85ebe\") " pod="openshift-console/console-f9d7485db-8lvfs" Oct 10 16:34:05 crc kubenswrapper[4799]: E1010 16:34:05.783884 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-10 16:34:06.283869289 +0000 UTC m=+139.792193404 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-89gcz" (UID: "60ab14da-0f2e-48cc-873a-44eaf0662ed8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.786071 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-nlsfc"] Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.786110 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-6h8q7"] Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.786884 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/2866fbf1-3a49-4e4c-867b-86a40ae85ebe-console-config\") pod \"console-f9d7485db-8lvfs\" (UID: \"2866fbf1-3a49-4e4c-867b-86a40ae85ebe\") " pod="openshift-console/console-f9d7485db-8lvfs" Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.792625 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/60ab14da-0f2e-48cc-873a-44eaf0662ed8-trusted-ca\") pod \"image-registry-697d97f7c8-89gcz\" (UID: \"60ab14da-0f2e-48cc-873a-44eaf0662ed8\") " pod="openshift-image-registry/image-registry-697d97f7c8-89gcz" Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.804992 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/60ab14da-0f2e-48cc-873a-44eaf0662ed8-installation-pull-secrets\") pod \"image-registry-697d97f7c8-89gcz\" (UID: \"60ab14da-0f2e-48cc-873a-44eaf0662ed8\") " pod="openshift-image-registry/image-registry-697d97f7c8-89gcz" Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.805202 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/60ab14da-0f2e-48cc-873a-44eaf0662ed8-bound-sa-token\") pod \"image-registry-697d97f7c8-89gcz\" (UID: \"60ab14da-0f2e-48cc-873a-44eaf0662ed8\") " pod="openshift-image-registry/image-registry-697d97f7c8-89gcz" Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.823504 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fwssh\" (UniqueName: \"kubernetes.io/projected/2866fbf1-3a49-4e4c-867b-86a40ae85ebe-kube-api-access-fwssh\") pod \"console-f9d7485db-8lvfs\" (UID: \"2866fbf1-3a49-4e4c-867b-86a40ae85ebe\") " pod="openshift-console/console-f9d7485db-8lvfs" Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.848873 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7bfnt\" (UniqueName: \"kubernetes.io/projected/60ab14da-0f2e-48cc-873a-44eaf0662ed8-kube-api-access-7bfnt\") pod \"image-registry-697d97f7c8-89gcz\" (UID: \"60ab14da-0f2e-48cc-873a-44eaf0662ed8\") " pod="openshift-image-registry/image-registry-697d97f7c8-89gcz" Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.863969 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-82xb2\" (UniqueName: \"kubernetes.io/projected/5284905f-e20b-49be-8cef-df7f96e4460d-kube-api-access-82xb2\") pod \"cluster-samples-operator-665b6dd947-n8sc6\" (UID: \"5284905f-e20b-49be-8cef-df7f96e4460d\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-n8sc6" Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.866719 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 10 16:34:05 crc kubenswrapper[4799]: E1010 16:34:05.866864 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-10 16:34:06.366840598 +0000 UTC m=+139.875164713 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.867024 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gc7vg\" (UniqueName: \"kubernetes.io/projected/c1b734d0-de3c-4519-8fb7-8408961ea09b-kube-api-access-gc7vg\") pod \"machine-config-server-jmpls\" (UID: \"c1b734d0-de3c-4519-8fb7-8408961ea09b\") " pod="openshift-machine-config-operator/machine-config-server-jmpls" Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.867068 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/977bd741-4d59-48b0-ab48-c22f6eecdb2e-proxy-tls\") pod \"machine-config-controller-84d6567774-98snb\" (UID: \"977bd741-4d59-48b0-ab48-c22f6eecdb2e\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-98snb" Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.867088 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ee5210db-86a0-4346-bfc4-e4a6460eaae1-webhook-cert\") pod \"packageserver-d55dfcdfc-5r8jg\" (UID: \"ee5210db-86a0-4346-bfc4-e4a6460eaae1\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-5r8jg" Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.867588 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p9rwx\" (UniqueName: \"kubernetes.io/projected/977bd741-4d59-48b0-ab48-c22f6eecdb2e-kube-api-access-p9rwx\") pod \"machine-config-controller-84d6567774-98snb\" (UID: \"977bd741-4d59-48b0-ab48-c22f6eecdb2e\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-98snb" Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.868669 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/928b4fca-64a2-4c2c-a1b2-38bb069c13c8-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-zphkz\" (UID: \"928b4fca-64a2-4c2c-a1b2-38bb069c13c8\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-zphkz" Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.868723 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/e7393869-51bf-4974-97c7-33adfe1b44f5-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-mn6dn\" (UID: \"e7393869-51bf-4974-97c7-33adfe1b44f5\") " pod="openshift-marketplace/marketplace-operator-79b997595-mn6dn" Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.868846 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g4knh\" (UniqueName: \"kubernetes.io/projected/c0095ab7-a332-4923-b1ac-90abd65087d5-kube-api-access-g4knh\") pod \"olm-operator-6b444d44fb-vfgnk\" (UID: \"c0095ab7-a332-4923-b1ac-90abd65087d5\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-vfgnk" Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.868876 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/281cf811-d3fe-4cd9-9292-74d81584ca10-metrics-tls\") pod \"dns-default-r7x5v\" (UID: \"281cf811-d3fe-4cd9-9292-74d81584ca10\") " pod="openshift-dns/dns-default-r7x5v" Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.869316 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/c1b734d0-de3c-4519-8fb7-8408961ea09b-certs\") pod \"machine-config-server-jmpls\" (UID: \"c1b734d0-de3c-4519-8fb7-8408961ea09b\") " pod="openshift-machine-config-operator/machine-config-server-jmpls" Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.869868 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mb4s6\" (UniqueName: \"kubernetes.io/projected/1786a2c0-54d3-44db-bd63-ac7a0cd09eb4-kube-api-access-mb4s6\") pod \"catalog-operator-68c6474976-vwbn8\" (UID: \"1786a2c0-54d3-44db-bd63-ac7a0cd09eb4\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-vwbn8" Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.869910 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-89gcz\" (UID: \"60ab14da-0f2e-48cc-873a-44eaf0662ed8\") " pod="openshift-image-registry/image-registry-697d97f7c8-89gcz" Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.870354 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/25cd298f-ccde-4805-801d-2d486c7e45da-secret-volume\") pod \"collect-profiles-29335230-q6qww\" (UID: \"25cd298f-ccde-4805-801d-2d486c7e45da\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29335230-q6qww" Oct 10 16:34:05 crc kubenswrapper[4799]: E1010 16:34:05.870412 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-10 16:34:06.370394378 +0000 UTC m=+139.878718493 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-89gcz" (UID: "60ab14da-0f2e-48cc-873a-44eaf0662ed8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.870649 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/c0095ab7-a332-4923-b1ac-90abd65087d5-profile-collector-cert\") pod \"olm-operator-6b444d44fb-vfgnk\" (UID: \"c0095ab7-a332-4923-b1ac-90abd65087d5\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-vfgnk" Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.870727 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cb1d3fef-2917-4e7b-916d-3b6381e25c33-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-hkvxt\" (UID: \"cb1d3fef-2917-4e7b-916d-3b6381e25c33\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-hkvxt" Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.870778 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/977bd741-4d59-48b0-ab48-c22f6eecdb2e-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-98snb\" (UID: \"977bd741-4d59-48b0-ab48-c22f6eecdb2e\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-98snb" Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.870801 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/c0095ab7-a332-4923-b1ac-90abd65087d5-srv-cert\") pod \"olm-operator-6b444d44fb-vfgnk\" (UID: \"c0095ab7-a332-4923-b1ac-90abd65087d5\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-vfgnk" Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.870865 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/ae03eb1e-0698-4aef-9a6f-7708d92adf66-plugins-dir\") pod \"csi-hostpathplugin-9g6sf\" (UID: \"ae03eb1e-0698-4aef-9a6f-7708d92adf66\") " pod="hostpath-provisioner/csi-hostpathplugin-9g6sf" Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.871978 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/ae03eb1e-0698-4aef-9a6f-7708d92adf66-plugins-dir\") pod \"csi-hostpathplugin-9g6sf\" (UID: \"ae03eb1e-0698-4aef-9a6f-7708d92adf66\") " pod="hostpath-provisioner/csi-hostpathplugin-9g6sf" Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.873070 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cb1d3fef-2917-4e7b-916d-3b6381e25c33-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-hkvxt\" (UID: \"cb1d3fef-2917-4e7b-916d-3b6381e25c33\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-hkvxt" Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.873816 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/977bd741-4d59-48b0-ab48-c22f6eecdb2e-proxy-tls\") pod \"machine-config-controller-84d6567774-98snb\" (UID: \"977bd741-4d59-48b0-ab48-c22f6eecdb2e\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-98snb" Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.873990 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/977bd741-4d59-48b0-ab48-c22f6eecdb2e-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-98snb\" (UID: \"977bd741-4d59-48b0-ab48-c22f6eecdb2e\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-98snb" Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.870893 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/c1b734d0-de3c-4519-8fb7-8408961ea09b-node-bootstrap-token\") pod \"machine-config-server-jmpls\" (UID: \"c1b734d0-de3c-4519-8fb7-8408961ea09b\") " pod="openshift-machine-config-operator/machine-config-server-jmpls" Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.874440 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rbsrn\" (UniqueName: \"kubernetes.io/projected/cb1d3fef-2917-4e7b-916d-3b6381e25c33-kube-api-access-rbsrn\") pod \"kube-storage-version-migrator-operator-b67b599dd-hkvxt\" (UID: \"cb1d3fef-2917-4e7b-916d-3b6381e25c33\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-hkvxt" Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.874489 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jcnfx\" (UniqueName: \"kubernetes.io/projected/25cd298f-ccde-4805-801d-2d486c7e45da-kube-api-access-jcnfx\") pod \"collect-profiles-29335230-q6qww\" (UID: \"25cd298f-ccde-4805-801d-2d486c7e45da\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29335230-q6qww" Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.874586 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/01118562-5441-420c-8d4c-2a983e584de3-cert\") pod \"ingress-canary-bbvkz\" (UID: \"01118562-5441-420c-8d4c-2a983e584de3\") " pod="openshift-ingress-canary/ingress-canary-bbvkz" Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.875642 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/ee5210db-86a0-4346-bfc4-e4a6460eaae1-apiservice-cert\") pod \"packageserver-d55dfcdfc-5r8jg\" (UID: \"ee5210db-86a0-4346-bfc4-e4a6460eaae1\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-5r8jg" Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.875678 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ld5fb\" (UniqueName: \"kubernetes.io/projected/ee5210db-86a0-4346-bfc4-e4a6460eaae1-kube-api-access-ld5fb\") pod \"packageserver-d55dfcdfc-5r8jg\" (UID: \"ee5210db-86a0-4346-bfc4-e4a6460eaae1\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-5r8jg" Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.876149 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/1786a2c0-54d3-44db-bd63-ac7a0cd09eb4-srv-cert\") pod \"catalog-operator-68c6474976-vwbn8\" (UID: \"1786a2c0-54d3-44db-bd63-ac7a0cd09eb4\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-vwbn8" Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.876206 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/928b4fca-64a2-4c2c-a1b2-38bb069c13c8-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-zphkz\" (UID: \"928b4fca-64a2-4c2c-a1b2-38bb069c13c8\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-zphkz" Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.876915 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-669bd\" (UniqueName: \"kubernetes.io/projected/18c902fe-0322-4ada-b042-7693b4a5f024-kube-api-access-669bd\") pod \"multus-admission-controller-857f4d67dd-wzwz7\" (UID: \"18c902fe-0322-4ada-b042-7693b4a5f024\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-wzwz7" Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.876981 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/1786a2c0-54d3-44db-bd63-ac7a0cd09eb4-profile-collector-cert\") pod \"catalog-operator-68c6474976-vwbn8\" (UID: \"1786a2c0-54d3-44db-bd63-ac7a0cd09eb4\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-vwbn8" Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.877031 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2r7w2\" (UniqueName: \"kubernetes.io/projected/e7393869-51bf-4974-97c7-33adfe1b44f5-kube-api-access-2r7w2\") pod \"marketplace-operator-79b997595-mn6dn\" (UID: \"e7393869-51bf-4974-97c7-33adfe1b44f5\") " pod="openshift-marketplace/marketplace-operator-79b997595-mn6dn" Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.877070 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/18c902fe-0322-4ada-b042-7693b4a5f024-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-wzwz7\" (UID: \"18c902fe-0322-4ada-b042-7693b4a5f024\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-wzwz7" Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.877116 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n769b\" (UniqueName: \"kubernetes.io/projected/281cf811-d3fe-4cd9-9292-74d81584ca10-kube-api-access-n769b\") pod \"dns-default-r7x5v\" (UID: \"281cf811-d3fe-4cd9-9292-74d81584ca10\") " pod="openshift-dns/dns-default-r7x5v" Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.877142 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/ae03eb1e-0698-4aef-9a6f-7708d92adf66-mountpoint-dir\") pod \"csi-hostpathplugin-9g6sf\" (UID: \"ae03eb1e-0698-4aef-9a6f-7708d92adf66\") " pod="hostpath-provisioner/csi-hostpathplugin-9g6sf" Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.877202 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lmg5b\" (UniqueName: \"kubernetes.io/projected/1b6f04fa-503f-4596-a740-e807679f686e-kube-api-access-lmg5b\") pod \"service-ca-9c57cc56f-2cggl\" (UID: \"1b6f04fa-503f-4596-a740-e807679f686e\") " pod="openshift-service-ca/service-ca-9c57cc56f-2cggl" Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.877270 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e1269123-5f8e-40e4-87cf-577d7b148684-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-cvkkx\" (UID: \"e1269123-5f8e-40e4-87cf-577d7b148684\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-cvkkx" Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.877301 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/25cd298f-ccde-4805-801d-2d486c7e45da-config-volume\") pod \"collect-profiles-29335230-q6qww\" (UID: \"25cd298f-ccde-4805-801d-2d486c7e45da\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29335230-q6qww" Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.877367 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e1269123-5f8e-40e4-87cf-577d7b148684-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-cvkkx\" (UID: \"e1269123-5f8e-40e4-87cf-577d7b148684\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-cvkkx" Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.877402 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-djh9d\" (UniqueName: \"kubernetes.io/projected/d4b5557a-bd3b-419f-bd0c-e3ed6d1f8def-kube-api-access-djh9d\") pod \"migrator-59844c95c7-9kfj6\" (UID: \"d4b5557a-bd3b-419f-bd0c-e3ed6d1f8def\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-9kfj6" Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.877460 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f50c760d-39a2-4717-bf30-942dcd170900-serving-cert\") pod \"service-ca-operator-777779d784-mgdrk\" (UID: \"f50c760d-39a2-4717-bf30-942dcd170900\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-mgdrk" Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.877520 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e1269123-5f8e-40e4-87cf-577d7b148684-config\") pod \"kube-controller-manager-operator-78b949d7b-cvkkx\" (UID: \"e1269123-5f8e-40e4-87cf-577d7b148684\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-cvkkx" Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.877552 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/ee5210db-86a0-4346-bfc4-e4a6460eaae1-tmpfs\") pod \"packageserver-d55dfcdfc-5r8jg\" (UID: \"ee5210db-86a0-4346-bfc4-e4a6460eaae1\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-5r8jg" Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.877613 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f50c760d-39a2-4717-bf30-942dcd170900-config\") pod \"service-ca-operator-777779d784-mgdrk\" (UID: \"f50c760d-39a2-4717-bf30-942dcd170900\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-mgdrk" Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.877644 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lj5cr\" (UniqueName: \"kubernetes.io/projected/f50c760d-39a2-4717-bf30-942dcd170900-kube-api-access-lj5cr\") pod \"service-ca-operator-777779d784-mgdrk\" (UID: \"f50c760d-39a2-4717-bf30-942dcd170900\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-mgdrk" Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.877727 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/c0095ab7-a332-4923-b1ac-90abd65087d5-srv-cert\") pod \"olm-operator-6b444d44fb-vfgnk\" (UID: \"c0095ab7-a332-4923-b1ac-90abd65087d5\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-vfgnk" Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.877700 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/ae03eb1e-0698-4aef-9a6f-7708d92adf66-csi-data-dir\") pod \"csi-hostpathplugin-9g6sf\" (UID: \"ae03eb1e-0698-4aef-9a6f-7708d92adf66\") " pod="hostpath-provisioner/csi-hostpathplugin-9g6sf" Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.878253 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/281cf811-d3fe-4cd9-9292-74d81584ca10-metrics-tls\") pod \"dns-default-r7x5v\" (UID: \"281cf811-d3fe-4cd9-9292-74d81584ca10\") " pod="openshift-dns/dns-default-r7x5v" Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.878955 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/e7393869-51bf-4974-97c7-33adfe1b44f5-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-mn6dn\" (UID: \"e7393869-51bf-4974-97c7-33adfe1b44f5\") " pod="openshift-marketplace/marketplace-operator-79b997595-mn6dn" Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.879180 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/1786a2c0-54d3-44db-bd63-ac7a0cd09eb4-srv-cert\") pod \"catalog-operator-68c6474976-vwbn8\" (UID: \"1786a2c0-54d3-44db-bd63-ac7a0cd09eb4\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-vwbn8" Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.879684 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e1269123-5f8e-40e4-87cf-577d7b148684-config\") pod \"kube-controller-manager-operator-78b949d7b-cvkkx\" (UID: \"e1269123-5f8e-40e4-87cf-577d7b148684\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-cvkkx" Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.880414 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/25cd298f-ccde-4805-801d-2d486c7e45da-config-volume\") pod \"collect-profiles-29335230-q6qww\" (UID: \"25cd298f-ccde-4805-801d-2d486c7e45da\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29335230-q6qww" Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.880905 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ee5210db-86a0-4346-bfc4-e4a6460eaae1-webhook-cert\") pod \"packageserver-d55dfcdfc-5r8jg\" (UID: \"ee5210db-86a0-4346-bfc4-e4a6460eaae1\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-5r8jg" Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.881850 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/c1b734d0-de3c-4519-8fb7-8408961ea09b-node-bootstrap-token\") pod \"machine-config-server-jmpls\" (UID: \"c1b734d0-de3c-4519-8fb7-8408961ea09b\") " pod="openshift-machine-config-operator/machine-config-server-jmpls" Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.882341 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/1786a2c0-54d3-44db-bd63-ac7a0cd09eb4-profile-collector-cert\") pod \"catalog-operator-68c6474976-vwbn8\" (UID: \"1786a2c0-54d3-44db-bd63-ac7a0cd09eb4\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-vwbn8" Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.882438 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/ae03eb1e-0698-4aef-9a6f-7708d92adf66-csi-data-dir\") pod \"csi-hostpathplugin-9g6sf\" (UID: \"ae03eb1e-0698-4aef-9a6f-7708d92adf66\") " pod="hostpath-provisioner/csi-hostpathplugin-9g6sf" Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.877748 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/ae03eb1e-0698-4aef-9a6f-7708d92adf66-mountpoint-dir\") pod \"csi-hostpathplugin-9g6sf\" (UID: \"ae03eb1e-0698-4aef-9a6f-7708d92adf66\") " pod="hostpath-provisioner/csi-hostpathplugin-9g6sf" Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.883354 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/25cd298f-ccde-4805-801d-2d486c7e45da-secret-volume\") pod \"collect-profiles-29335230-q6qww\" (UID: \"25cd298f-ccde-4805-801d-2d486c7e45da\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29335230-q6qww" Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.883897 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f50c760d-39a2-4717-bf30-942dcd170900-config\") pod \"service-ca-operator-777779d784-mgdrk\" (UID: \"f50c760d-39a2-4717-bf30-942dcd170900\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-mgdrk" Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.884805 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e1269123-5f8e-40e4-87cf-577d7b148684-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-cvkkx\" (UID: \"e1269123-5f8e-40e4-87cf-577d7b148684\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-cvkkx" Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.885084 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/ee5210db-86a0-4346-bfc4-e4a6460eaae1-tmpfs\") pod \"packageserver-d55dfcdfc-5r8jg\" (UID: \"ee5210db-86a0-4346-bfc4-e4a6460eaae1\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-5r8jg" Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.885387 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f50c760d-39a2-4717-bf30-942dcd170900-serving-cert\") pod \"service-ca-operator-777779d784-mgdrk\" (UID: \"f50c760d-39a2-4717-bf30-942dcd170900\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-mgdrk" Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.886602 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-6mnhh"] Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.889838 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/c0095ab7-a332-4923-b1ac-90abd65087d5-profile-collector-cert\") pod \"olm-operator-6b444d44fb-vfgnk\" (UID: \"c0095ab7-a332-4923-b1ac-90abd65087d5\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-vfgnk" Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.890223 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/18c902fe-0322-4ada-b042-7693b4a5f024-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-wzwz7\" (UID: \"18c902fe-0322-4ada-b042-7693b4a5f024\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-wzwz7" Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.877797 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qpr82\" (UniqueName: \"kubernetes.io/projected/01118562-5441-420c-8d4c-2a983e584de3-kube-api-access-qpr82\") pod \"ingress-canary-bbvkz\" (UID: \"01118562-5441-420c-8d4c-2a983e584de3\") " pod="openshift-ingress-canary/ingress-canary-bbvkz" Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.891150 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/1b6f04fa-503f-4596-a740-e807679f686e-signing-key\") pod \"service-ca-9c57cc56f-2cggl\" (UID: \"1b6f04fa-503f-4596-a740-e807679f686e\") " pod="openshift-service-ca/service-ca-9c57cc56f-2cggl" Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.891342 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fv5rp\" (UniqueName: \"kubernetes.io/projected/bb7dd02d-b8d9-4954-aaee-df6a63ea0708-kube-api-access-fv5rp\") pod \"package-server-manager-789f6589d5-8qhsm\" (UID: \"bb7dd02d-b8d9-4954-aaee-df6a63ea0708\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-8qhsm" Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.891524 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/e7393869-51bf-4974-97c7-33adfe1b44f5-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-mn6dn\" (UID: \"e7393869-51bf-4974-97c7-33adfe1b44f5\") " pod="openshift-marketplace/marketplace-operator-79b997595-mn6dn" Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.891958 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/bb7dd02d-b8d9-4954-aaee-df6a63ea0708-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-8qhsm\" (UID: \"bb7dd02d-b8d9-4954-aaee-df6a63ea0708\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-8qhsm" Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.892087 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/281cf811-d3fe-4cd9-9292-74d81584ca10-config-volume\") pod \"dns-default-r7x5v\" (UID: \"281cf811-d3fe-4cd9-9292-74d81584ca10\") " pod="openshift-dns/dns-default-r7x5v" Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.892217 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/ae03eb1e-0698-4aef-9a6f-7708d92adf66-registration-dir\") pod \"csi-hostpathplugin-9g6sf\" (UID: \"ae03eb1e-0698-4aef-9a6f-7708d92adf66\") " pod="hostpath-provisioner/csi-hostpathplugin-9g6sf" Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.892354 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/cb1d3fef-2917-4e7b-916d-3b6381e25c33-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-hkvxt\" (UID: \"cb1d3fef-2917-4e7b-916d-3b6381e25c33\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-hkvxt" Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.892572 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/ae03eb1e-0698-4aef-9a6f-7708d92adf66-socket-dir\") pod \"csi-hostpathplugin-9g6sf\" (UID: \"ae03eb1e-0698-4aef-9a6f-7708d92adf66\") " pod="hostpath-provisioner/csi-hostpathplugin-9g6sf" Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.892695 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/928b4fca-64a2-4c2c-a1b2-38bb069c13c8-config\") pod \"kube-apiserver-operator-766d6c64bb-zphkz\" (UID: \"928b4fca-64a2-4c2c-a1b2-38bb069c13c8\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-zphkz" Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.892844 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gmb7b\" (UniqueName: \"kubernetes.io/projected/ae03eb1e-0698-4aef-9a6f-7708d92adf66-kube-api-access-gmb7b\") pod \"csi-hostpathplugin-9g6sf\" (UID: \"ae03eb1e-0698-4aef-9a6f-7708d92adf66\") " pod="hostpath-provisioner/csi-hostpathplugin-9g6sf" Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.892971 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/1b6f04fa-503f-4596-a740-e807679f686e-signing-cabundle\") pod \"service-ca-9c57cc56f-2cggl\" (UID: \"1b6f04fa-503f-4596-a740-e807679f686e\") " pod="openshift-service-ca/service-ca-9c57cc56f-2cggl" Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.893104 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/e7393869-51bf-4974-97c7-33adfe1b44f5-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-mn6dn\" (UID: \"e7393869-51bf-4974-97c7-33adfe1b44f5\") " pod="openshift-marketplace/marketplace-operator-79b997595-mn6dn" Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.892800 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-cqz89"] Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.892866 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/ae03eb1e-0698-4aef-9a6f-7708d92adf66-registration-dir\") pod \"csi-hostpathplugin-9g6sf\" (UID: \"ae03eb1e-0698-4aef-9a6f-7708d92adf66\") " pod="hostpath-provisioner/csi-hostpathplugin-9g6sf" Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.894381 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/1b6f04fa-503f-4596-a740-e807679f686e-signing-cabundle\") pod \"service-ca-9c57cc56f-2cggl\" (UID: \"1b6f04fa-503f-4596-a740-e807679f686e\") " pod="openshift-service-ca/service-ca-9c57cc56f-2cggl" Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.894423 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/01118562-5441-420c-8d4c-2a983e584de3-cert\") pod \"ingress-canary-bbvkz\" (UID: \"01118562-5441-420c-8d4c-2a983e584de3\") " pod="openshift-ingress-canary/ingress-canary-bbvkz" Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.894473 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/ae03eb1e-0698-4aef-9a6f-7708d92adf66-socket-dir\") pod \"csi-hostpathplugin-9g6sf\" (UID: \"ae03eb1e-0698-4aef-9a6f-7708d92adf66\") " pod="hostpath-provisioner/csi-hostpathplugin-9g6sf" Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.894953 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/928b4fca-64a2-4c2c-a1b2-38bb069c13c8-config\") pod \"kube-apiserver-operator-766d6c64bb-zphkz\" (UID: \"928b4fca-64a2-4c2c-a1b2-38bb069c13c8\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-zphkz" Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.895821 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/281cf811-d3fe-4cd9-9292-74d81584ca10-config-volume\") pod \"dns-default-r7x5v\" (UID: \"281cf811-d3fe-4cd9-9292-74d81584ca10\") " pod="openshift-dns/dns-default-r7x5v" Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.905885 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/bb7dd02d-b8d9-4954-aaee-df6a63ea0708-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-8qhsm\" (UID: \"bb7dd02d-b8d9-4954-aaee-df6a63ea0708\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-8qhsm" Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.906110 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-hqz8m"] Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.906971 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/1b6f04fa-503f-4596-a740-e807679f686e-signing-key\") pod \"service-ca-9c57cc56f-2cggl\" (UID: \"1b6f04fa-503f-4596-a740-e807679f686e\") " pod="openshift-service-ca/service-ca-9c57cc56f-2cggl" Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.906980 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/ee5210db-86a0-4346-bfc4-e4a6460eaae1-apiservice-cert\") pod \"packageserver-d55dfcdfc-5r8jg\" (UID: \"ee5210db-86a0-4346-bfc4-e4a6460eaae1\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-5r8jg" Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.911028 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/cb1d3fef-2917-4e7b-916d-3b6381e25c33-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-hkvxt\" (UID: \"cb1d3fef-2917-4e7b-916d-3b6381e25c33\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-hkvxt" Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.911212 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"certs\" (UniqueName: \"kubernetes.io/secret/c1b734d0-de3c-4519-8fb7-8408961ea09b-certs\") pod \"machine-config-server-jmpls\" (UID: \"c1b734d0-de3c-4519-8fb7-8408961ea09b\") " pod="openshift-machine-config-operator/machine-config-server-jmpls" Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.911246 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/928b4fca-64a2-4c2c-a1b2-38bb069c13c8-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-zphkz\" (UID: \"928b4fca-64a2-4c2c-a1b2-38bb069c13c8\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-zphkz" Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.911521 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gc7vg\" (UniqueName: \"kubernetes.io/projected/c1b734d0-de3c-4519-8fb7-8408961ea09b-kube-api-access-gc7vg\") pod \"machine-config-server-jmpls\" (UID: \"c1b734d0-de3c-4519-8fb7-8408961ea09b\") " pod="openshift-machine-config-operator/machine-config-server-jmpls" Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.934440 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/928b4fca-64a2-4c2c-a1b2-38bb069c13c8-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-zphkz\" (UID: \"928b4fca-64a2-4c2c-a1b2-38bb069c13c8\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-zphkz" Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.942892 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p9rwx\" (UniqueName: \"kubernetes.io/projected/977bd741-4d59-48b0-ab48-c22f6eecdb2e-kube-api-access-p9rwx\") pod \"machine-config-controller-84d6567774-98snb\" (UID: \"977bd741-4d59-48b0-ab48-c22f6eecdb2e\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-98snb" Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.964647 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g4knh\" (UniqueName: \"kubernetes.io/projected/c0095ab7-a332-4923-b1ac-90abd65087d5-kube-api-access-g4knh\") pod \"olm-operator-6b444d44fb-vfgnk\" (UID: \"c0095ab7-a332-4923-b1ac-90abd65087d5\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-vfgnk" Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.990207 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mb4s6\" (UniqueName: \"kubernetes.io/projected/1786a2c0-54d3-44db-bd63-ac7a0cd09eb4-kube-api-access-mb4s6\") pod \"catalog-operator-68c6474976-vwbn8\" (UID: \"1786a2c0-54d3-44db-bd63-ac7a0cd09eb4\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-vwbn8" Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.994634 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 10 16:34:05 crc kubenswrapper[4799]: E1010 16:34:05.995382 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-10 16:34:06.495365035 +0000 UTC m=+140.003689150 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.996506 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-plk2p"] Oct 10 16:34:05 crc kubenswrapper[4799]: I1010 16:34:05.997814 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-mjjtc"] Oct 10 16:34:06 crc kubenswrapper[4799]: I1010 16:34:06.001056 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-zphkz" Oct 10 16:34:06 crc kubenswrapper[4799]: I1010 16:34:06.008365 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-fvpc8"] Oct 10 16:34:06 crc kubenswrapper[4799]: I1010 16:34:06.008589 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-98snb" Oct 10 16:34:06 crc kubenswrapper[4799]: I1010 16:34:06.016966 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-w2jsl"] Oct 10 16:34:06 crc kubenswrapper[4799]: I1010 16:34:06.024411 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rbsrn\" (UniqueName: \"kubernetes.io/projected/cb1d3fef-2917-4e7b-916d-3b6381e25c33-kube-api-access-rbsrn\") pod \"kube-storage-version-migrator-operator-b67b599dd-hkvxt\" (UID: \"cb1d3fef-2917-4e7b-916d-3b6381e25c33\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-hkvxt" Oct 10 16:34:06 crc kubenswrapper[4799]: I1010 16:34:06.034031 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-x7vr5"] Oct 10 16:34:06 crc kubenswrapper[4799]: I1010 16:34:06.041896 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-vfgnk" Oct 10 16:34:06 crc kubenswrapper[4799]: I1010 16:34:06.042055 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jcnfx\" (UniqueName: \"kubernetes.io/projected/25cd298f-ccde-4805-801d-2d486c7e45da-kube-api-access-jcnfx\") pod \"collect-profiles-29335230-q6qww\" (UID: \"25cd298f-ccde-4805-801d-2d486c7e45da\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29335230-q6qww" Oct 10 16:34:06 crc kubenswrapper[4799]: I1010 16:34:06.048304 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29335230-q6qww" Oct 10 16:34:06 crc kubenswrapper[4799]: I1010 16:34:06.060413 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ld5fb\" (UniqueName: \"kubernetes.io/projected/ee5210db-86a0-4346-bfc4-e4a6460eaae1-kube-api-access-ld5fb\") pod \"packageserver-d55dfcdfc-5r8jg\" (UID: \"ee5210db-86a0-4346-bfc4-e4a6460eaae1\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-5r8jg" Oct 10 16:34:06 crc kubenswrapper[4799]: I1010 16:34:06.062326 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-vwbn8" Oct 10 16:34:06 crc kubenswrapper[4799]: I1010 16:34:06.085245 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-jmpls" Oct 10 16:34:06 crc kubenswrapper[4799]: I1010 16:34:06.085383 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-669bd\" (UniqueName: \"kubernetes.io/projected/18c902fe-0322-4ada-b042-7693b4a5f024-kube-api-access-669bd\") pod \"multus-admission-controller-857f4d67dd-wzwz7\" (UID: \"18c902fe-0322-4ada-b042-7693b4a5f024\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-wzwz7" Oct 10 16:34:06 crc kubenswrapper[4799]: I1010 16:34:06.095254 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-8lvfs" Oct 10 16:34:06 crc kubenswrapper[4799]: I1010 16:34:06.096238 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-89gcz\" (UID: \"60ab14da-0f2e-48cc-873a-44eaf0662ed8\") " pod="openshift-image-registry/image-registry-697d97f7c8-89gcz" Oct 10 16:34:06 crc kubenswrapper[4799]: E1010 16:34:06.096603 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-10 16:34:06.596588574 +0000 UTC m=+140.104912699 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-89gcz" (UID: "60ab14da-0f2e-48cc-873a-44eaf0662ed8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 10 16:34:06 crc kubenswrapper[4799]: I1010 16:34:06.105254 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n769b\" (UniqueName: \"kubernetes.io/projected/281cf811-d3fe-4cd9-9292-74d81584ca10-kube-api-access-n769b\") pod \"dns-default-r7x5v\" (UID: \"281cf811-d3fe-4cd9-9292-74d81584ca10\") " pod="openshift-dns/dns-default-r7x5v" Oct 10 16:34:06 crc kubenswrapper[4799]: I1010 16:34:06.123257 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e1269123-5f8e-40e4-87cf-577d7b148684-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-cvkkx\" (UID: \"e1269123-5f8e-40e4-87cf-577d7b148684\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-cvkkx" Oct 10 16:34:06 crc kubenswrapper[4799]: I1010 16:34:06.148229 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-5sgz5"] Oct 10 16:34:06 crc kubenswrapper[4799]: I1010 16:34:06.149661 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-n8sc6" Oct 10 16:34:06 crc kubenswrapper[4799]: I1010 16:34:06.150949 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lmg5b\" (UniqueName: \"kubernetes.io/projected/1b6f04fa-503f-4596-a740-e807679f686e-kube-api-access-lmg5b\") pod \"service-ca-9c57cc56f-2cggl\" (UID: \"1b6f04fa-503f-4596-a740-e807679f686e\") " pod="openshift-service-ca/service-ca-9c57cc56f-2cggl" Oct 10 16:34:06 crc kubenswrapper[4799]: I1010 16:34:06.166618 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2r7w2\" (UniqueName: \"kubernetes.io/projected/e7393869-51bf-4974-97c7-33adfe1b44f5-kube-api-access-2r7w2\") pod \"marketplace-operator-79b997595-mn6dn\" (UID: \"e7393869-51bf-4974-97c7-33adfe1b44f5\") " pod="openshift-marketplace/marketplace-operator-79b997595-mn6dn" Oct 10 16:34:06 crc kubenswrapper[4799]: I1010 16:34:06.174317 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-56xl2"] Oct 10 16:34:06 crc kubenswrapper[4799]: I1010 16:34:06.196806 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 10 16:34:06 crc kubenswrapper[4799]: E1010 16:34:06.197186 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-10 16:34:06.697167776 +0000 UTC m=+140.205491891 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 10 16:34:06 crc kubenswrapper[4799]: I1010 16:34:06.200985 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-mvftm"] Oct 10 16:34:06 crc kubenswrapper[4799]: I1010 16:34:06.202555 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-djh9d\" (UniqueName: \"kubernetes.io/projected/d4b5557a-bd3b-419f-bd0c-e3ed6d1f8def-kube-api-access-djh9d\") pod \"migrator-59844c95c7-9kfj6\" (UID: \"d4b5557a-bd3b-419f-bd0c-e3ed6d1f8def\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-9kfj6" Oct 10 16:34:06 crc kubenswrapper[4799]: I1010 16:34:06.212608 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-qc5c7"] Oct 10 16:34:06 crc kubenswrapper[4799]: I1010 16:34:06.214044 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qpr82\" (UniqueName: \"kubernetes.io/projected/01118562-5441-420c-8d4c-2a983e584de3-kube-api-access-qpr82\") pod \"ingress-canary-bbvkz\" (UID: \"01118562-5441-420c-8d4c-2a983e584de3\") " pod="openshift-ingress-canary/ingress-canary-bbvkz" Oct 10 16:34:06 crc kubenswrapper[4799]: I1010 16:34:06.231303 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-prqrg" event={"ID":"089d7979-5043-482c-a464-b62a385a3f16","Type":"ContainerStarted","Data":"4cf548cd19a78a0a027049400453066b2f575778dea50d47fa5300353cee4128"} Oct 10 16:34:06 crc kubenswrapper[4799]: I1010 16:34:06.233259 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-prqrg" event={"ID":"089d7979-5043-482c-a464-b62a385a3f16","Type":"ContainerStarted","Data":"440f9df4072624ff92c1b6a51f0f5788fe4a55997b3f27565d883f12e068f82e"} Oct 10 16:34:06 crc kubenswrapper[4799]: I1010 16:34:06.233276 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-prqrg" event={"ID":"089d7979-5043-482c-a464-b62a385a3f16","Type":"ContainerStarted","Data":"83be8b10b4ac5d9c841774366acbb1de689f8fb50cc6e3f24ca23ed48c6bc071"} Oct 10 16:34:06 crc kubenswrapper[4799]: I1010 16:34:06.236072 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-x7vr5" event={"ID":"c37fc2e5-d729-4258-8506-22f328f5927a","Type":"ContainerStarted","Data":"cc849f9e59a67b5b5fe42301c5a54b851699223d8d186a528ad8f25d11244aa1"} Oct 10 16:34:06 crc kubenswrapper[4799]: I1010 16:34:06.248446 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lj5cr\" (UniqueName: \"kubernetes.io/projected/f50c760d-39a2-4717-bf30-942dcd170900-kube-api-access-lj5cr\") pod \"service-ca-operator-777779d784-mgdrk\" (UID: \"f50c760d-39a2-4717-bf30-942dcd170900\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-mgdrk" Oct 10 16:34:06 crc kubenswrapper[4799]: I1010 16:34:06.248751 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-plk2p" event={"ID":"e56235b4-8348-4fae-af0a-639fcacfc997","Type":"ContainerStarted","Data":"bb1b0d450642f77e542eee4101e3048e1338c20c3eecd9b84834da878c27525a"} Oct 10 16:34:06 crc kubenswrapper[4799]: I1010 16:34:06.251246 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-hsg4j" event={"ID":"3f2b6b90-47a2-4e3c-8394-13a961d92c03","Type":"ContainerStarted","Data":"264ef1267d5bf208f50c1ba74a89bb07b6a768f0ef7f30e52ac6d5d22c268021"} Oct 10 16:34:06 crc kubenswrapper[4799]: I1010 16:34:06.251298 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-hsg4j" event={"ID":"3f2b6b90-47a2-4e3c-8394-13a961d92c03","Type":"ContainerStarted","Data":"b4d2f5c032c312734bef13ca635b1af52fb47abf2010a639a290e9e6425735ed"} Oct 10 16:34:06 crc kubenswrapper[4799]: I1010 16:34:06.262309 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-hqz8m" event={"ID":"a8700150-e5e3-4b1b-a3c8-f7e781ce8f1f","Type":"ContainerStarted","Data":"fc898df0dfac54b5ef432e43f3267d688a9472a93b0d1d968b39721403b08088"} Oct 10 16:34:06 crc kubenswrapper[4799]: I1010 16:34:06.262362 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-hqz8m" event={"ID":"a8700150-e5e3-4b1b-a3c8-f7e781ce8f1f","Type":"ContainerStarted","Data":"9b391a98ffd9de774b8da5f051bee03e8a55688faba544ffa448121507929266"} Oct 10 16:34:06 crc kubenswrapper[4799]: I1010 16:34:06.270641 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-w2jsl" event={"ID":"f8bab52f-8a27-495e-80d2-9794b984939e","Type":"ContainerStarted","Data":"5bc503abd11d6908f63fe6a908a7c1e428f4244e55b441e9b569cee187281024"} Oct 10 16:34:06 crc kubenswrapper[4799]: I1010 16:34:06.279746 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-mjjtc" event={"ID":"713c24fb-e821-419a-b996-9661a6cbf57b","Type":"ContainerStarted","Data":"311bfedbb8a88104565e5e3af0f8511fc5b476c650ada94d6bd1c1599451a7e4"} Oct 10 16:34:06 crc kubenswrapper[4799]: I1010 16:34:06.286492 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-cvkkx" Oct 10 16:34:06 crc kubenswrapper[4799]: I1010 16:34:06.287540 4799 generic.go:334] "Generic (PLEG): container finished" podID="cd6b9ce8-4fe8-405e-9399-354b7d8ee20b" containerID="d9a3d37578e5b31de29dec79b8f7078ba7e9293749f89f5cfc4da28589d50c45" exitCode=0 Oct 10 16:34:06 crc kubenswrapper[4799]: I1010 16:34:06.287607 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-2ksq6" event={"ID":"cd6b9ce8-4fe8-405e-9399-354b7d8ee20b","Type":"ContainerDied","Data":"d9a3d37578e5b31de29dec79b8f7078ba7e9293749f89f5cfc4da28589d50c45"} Oct 10 16:34:06 crc kubenswrapper[4799]: I1010 16:34:06.288429 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fv5rp\" (UniqueName: \"kubernetes.io/projected/bb7dd02d-b8d9-4954-aaee-df6a63ea0708-kube-api-access-fv5rp\") pod \"package-server-manager-789f6589d5-8qhsm\" (UID: \"bb7dd02d-b8d9-4954-aaee-df6a63ea0708\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-8qhsm" Oct 10 16:34:06 crc kubenswrapper[4799]: I1010 16:34:06.294537 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-hkvxt" Oct 10 16:34:06 crc kubenswrapper[4799]: I1010 16:34:06.297981 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gmb7b\" (UniqueName: \"kubernetes.io/projected/ae03eb1e-0698-4aef-9a6f-7708d92adf66-kube-api-access-gmb7b\") pod \"csi-hostpathplugin-9g6sf\" (UID: \"ae03eb1e-0698-4aef-9a6f-7708d92adf66\") " pod="hostpath-provisioner/csi-hostpathplugin-9g6sf" Oct 10 16:34:06 crc kubenswrapper[4799]: I1010 16:34:06.298326 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-qpw4g" event={"ID":"6a574afe-31ee-4706-90c0-a9c477f5bce7","Type":"ContainerStarted","Data":"3f0803474266721361f71800f44ca55c9f3064e10cf73bc1f361ee84515ea66e"} Oct 10 16:34:06 crc kubenswrapper[4799]: I1010 16:34:06.299653 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-89gcz\" (UID: \"60ab14da-0f2e-48cc-873a-44eaf0662ed8\") " pod="openshift-image-registry/image-registry-697d97f7c8-89gcz" Oct 10 16:34:06 crc kubenswrapper[4799]: E1010 16:34:06.301588 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-10 16:34:06.801569215 +0000 UTC m=+140.309893330 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-89gcz" (UID: "60ab14da-0f2e-48cc-873a-44eaf0662ed8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 10 16:34:06 crc kubenswrapper[4799]: I1010 16:34:06.311603 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-jmpls" event={"ID":"c1b734d0-de3c-4519-8fb7-8408961ea09b","Type":"ContainerStarted","Data":"b4d906b5f49ce69449815f25b4f580cba98aa53c52c813f1783301968ae34b12"} Oct 10 16:34:06 crc kubenswrapper[4799]: I1010 16:34:06.314530 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-5r8jg" Oct 10 16:34:06 crc kubenswrapper[4799]: I1010 16:34:06.321101 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-9kfj6" Oct 10 16:34:06 crc kubenswrapper[4799]: I1010 16:34:06.329344 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-mgdrk" Oct 10 16:34:06 crc kubenswrapper[4799]: I1010 16:34:06.330499 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-6h8q7" event={"ID":"a63337d7-2cc9-49b7-af06-550a24f39ff0","Type":"ContainerStarted","Data":"b3a44e859da281aaddcb59a9b4e9842920db894ad646a5de712dc341acbc5307"} Oct 10 16:34:06 crc kubenswrapper[4799]: I1010 16:34:06.330543 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-6h8q7" event={"ID":"a63337d7-2cc9-49b7-af06-550a24f39ff0","Type":"ContainerStarted","Data":"dc4863e286d2e55e0173b3b0297c8dd76cadc7672a5ca44b92deb0500e9cde8c"} Oct 10 16:34:06 crc kubenswrapper[4799]: I1010 16:34:06.331071 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console-operator/console-operator-58897d9998-6h8q7" Oct 10 16:34:06 crc kubenswrapper[4799]: I1010 16:34:06.339032 4799 patch_prober.go:28] interesting pod/console-operator-58897d9998-6h8q7 container/console-operator namespace/openshift-console-operator: Readiness probe status=failure output="Get \"https://10.217.0.22:8443/readyz\": dial tcp 10.217.0.22:8443: connect: connection refused" start-of-body= Oct 10 16:34:06 crc kubenswrapper[4799]: I1010 16:34:06.339280 4799 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console-operator/console-operator-58897d9998-6h8q7" podUID="a63337d7-2cc9-49b7-af06-550a24f39ff0" containerName="console-operator" probeResult="failure" output="Get \"https://10.217.0.22:8443/readyz\": dial tcp 10.217.0.22:8443: connect: connection refused" Oct 10 16:34:06 crc kubenswrapper[4799]: I1010 16:34:06.339568 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-2cggl" Oct 10 16:34:06 crc kubenswrapper[4799]: W1010 16:34:06.346266 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2dff39f3_58aa_4d27_a85f_c1a09bb3d83e.slice/crio-7eeed19c9754f5603e5b64220964e8394c9de120b42c0ec5f2513a97f162811c WatchSource:0}: Error finding container 7eeed19c9754f5603e5b64220964e8394c9de120b42c0ec5f2513a97f162811c: Status 404 returned error can't find the container with id 7eeed19c9754f5603e5b64220964e8394c9de120b42c0ec5f2513a97f162811c Oct 10 16:34:06 crc kubenswrapper[4799]: I1010 16:34:06.358245 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-wzwz7" Oct 10 16:34:06 crc kubenswrapper[4799]: I1010 16:34:06.378233 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-8qhsm" Oct 10 16:34:06 crc kubenswrapper[4799]: I1010 16:34:06.381246 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-mn6dn" Oct 10 16:34:06 crc kubenswrapper[4799]: I1010 16:34:06.388984 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-6mnhh" event={"ID":"a958b529-c3f0-4131-be7b-4d81a3c25499","Type":"ContainerStarted","Data":"50e7522e448be3cf4ce320cca81c8bc595c590e8e1ab0abb09fac4417fd69080"} Oct 10 16:34:06 crc kubenswrapper[4799]: I1010 16:34:06.389033 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-6mnhh" event={"ID":"a958b529-c3f0-4131-be7b-4d81a3c25499","Type":"ContainerStarted","Data":"d6232812f2497334a712060dd23e257750f5c8c215e35915ee5edcf4307fc09a"} Oct 10 16:34:06 crc kubenswrapper[4799]: I1010 16:34:06.391318 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-r7x5v" Oct 10 16:34:06 crc kubenswrapper[4799]: I1010 16:34:06.397312 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-nsrr7" event={"ID":"6f91a26e-3654-415e-9ebc-91034a85cbc1","Type":"ContainerStarted","Data":"3c96ae06e8e082122f498415ee0623c9e49c431ff6593bb2b10133ff1aba23f3"} Oct 10 16:34:06 crc kubenswrapper[4799]: I1010 16:34:06.397364 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-nsrr7" event={"ID":"6f91a26e-3654-415e-9ebc-91034a85cbc1","Type":"ContainerStarted","Data":"3847bf49890b7d988a898a42d0549d258c2585b4b824975942437440561c25c0"} Oct 10 16:34:06 crc kubenswrapper[4799]: I1010 16:34:06.400257 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 10 16:34:06 crc kubenswrapper[4799]: E1010 16:34:06.400583 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-10 16:34:06.900567078 +0000 UTC m=+140.408891193 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 10 16:34:06 crc kubenswrapper[4799]: I1010 16:34:06.400894 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-89gcz\" (UID: \"60ab14da-0f2e-48cc-873a-44eaf0662ed8\") " pod="openshift-image-registry/image-registry-697d97f7c8-89gcz" Oct 10 16:34:06 crc kubenswrapper[4799]: E1010 16:34:06.403168 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-10 16:34:06.903150233 +0000 UTC m=+140.411474348 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-89gcz" (UID: "60ab14da-0f2e-48cc-873a-44eaf0662ed8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 10 16:34:06 crc kubenswrapper[4799]: I1010 16:34:06.411225 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-w9v24" event={"ID":"eec5360d-85e3-4785-98b2-10ba224ffdfe","Type":"ContainerStarted","Data":"d8c2e3887c6f56897f86b85b44a09761f864d67f6fc5bbb6108fcf3f52f54f29"} Oct 10 16:34:06 crc kubenswrapper[4799]: I1010 16:34:06.411276 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-w9v24" event={"ID":"eec5360d-85e3-4785-98b2-10ba224ffdfe","Type":"ContainerStarted","Data":"fb60151aa5e36ee07ea715a4cd783e3f94a85bdd5e253f0f5252ce5851e97264"} Oct 10 16:34:06 crc kubenswrapper[4799]: I1010 16:34:06.420318 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-fvpc8" event={"ID":"dd0fbd79-76a9-4a87-a67b-20e782993376","Type":"ContainerStarted","Data":"fe566607a90d72c2ee93b3e2245e25a3a5ae8c510ba26555174f5f61387257f4"} Oct 10 16:34:06 crc kubenswrapper[4799]: I1010 16:34:06.422489 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-nlsfc" event={"ID":"98460888-57af-4ef6-a390-c0a592164ddb","Type":"ContainerStarted","Data":"ddaa6e605923a17557a1d529ec2116b8827dbfc1a014bfe22fda578850c0a196"} Oct 10 16:34:06 crc kubenswrapper[4799]: I1010 16:34:06.423947 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-9g6sf" Oct 10 16:34:06 crc kubenswrapper[4799]: I1010 16:34:06.428064 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-w26tc" event={"ID":"159d9178-1402-4232-a9df-ad4389bed9b0","Type":"ContainerStarted","Data":"21a8fcac40200951b39fc2e332e66d7edb6687194eba0c47546ba92e821fb0a2"} Oct 10 16:34:06 crc kubenswrapper[4799]: I1010 16:34:06.428098 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-w26tc" event={"ID":"159d9178-1402-4232-a9df-ad4389bed9b0","Type":"ContainerStarted","Data":"7b40c288205439d86ed10d083926f9a808fd812ead748d9ad65a01b270d08eba"} Oct 10 16:34:06 crc kubenswrapper[4799]: I1010 16:34:06.430829 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-bbvkz" Oct 10 16:34:06 crc kubenswrapper[4799]: I1010 16:34:06.445558 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-cqz89" event={"ID":"eb807790-1011-4dfc-842e-fd4106f7a6c3","Type":"ContainerStarted","Data":"9ca1f9a26d7b3bce54aad3842e4f0100e439f6d60506c2bf7519a41821b35835"} Oct 10 16:34:06 crc kubenswrapper[4799]: I1010 16:34:06.445598 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-cqz89" event={"ID":"eb807790-1011-4dfc-842e-fd4106f7a6c3","Type":"ContainerStarted","Data":"d589ab3fc64bdb85a327668b502e0eba5cbb8084c355d214fa5f344561e46791"} Oct 10 16:34:06 crc kubenswrapper[4799]: I1010 16:34:06.480435 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-98snb"] Oct 10 16:34:06 crc kubenswrapper[4799]: I1010 16:34:06.497247 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29335230-q6qww"] Oct 10 16:34:06 crc kubenswrapper[4799]: I1010 16:34:06.502500 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 10 16:34:06 crc kubenswrapper[4799]: E1010 16:34:06.504712 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-10 16:34:07.0046906 +0000 UTC m=+140.513014715 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 10 16:34:06 crc kubenswrapper[4799]: I1010 16:34:06.590681 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-vwbn8"] Oct 10 16:34:06 crc kubenswrapper[4799]: W1010 16:34:06.599969 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod977bd741_4d59_48b0_ab48_c22f6eecdb2e.slice/crio-aa9d28245b857435111a253849200a89b6174e603d89a7611bd42c52bb633d9d WatchSource:0}: Error finding container aa9d28245b857435111a253849200a89b6174e603d89a7611bd42c52bb633d9d: Status 404 returned error can't find the container with id aa9d28245b857435111a253849200a89b6174e603d89a7611bd42c52bb633d9d Oct 10 16:34:06 crc kubenswrapper[4799]: I1010 16:34:06.603986 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-89gcz\" (UID: \"60ab14da-0f2e-48cc-873a-44eaf0662ed8\") " pod="openshift-image-registry/image-registry-697d97f7c8-89gcz" Oct 10 16:34:06 crc kubenswrapper[4799]: E1010 16:34:06.604623 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-10 16:34:07.104611396 +0000 UTC m=+140.612935511 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-89gcz" (UID: "60ab14da-0f2e-48cc-873a-44eaf0662ed8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 10 16:34:06 crc kubenswrapper[4799]: I1010 16:34:06.639296 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-zphkz"] Oct 10 16:34:06 crc kubenswrapper[4799]: I1010 16:34:06.676492 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-8lvfs"] Oct 10 16:34:06 crc kubenswrapper[4799]: I1010 16:34:06.706487 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 10 16:34:06 crc kubenswrapper[4799]: E1010 16:34:06.706908 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-10 16:34:07.206895011 +0000 UTC m=+140.715219126 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 10 16:34:06 crc kubenswrapper[4799]: I1010 16:34:06.797975 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-vfgnk"] Oct 10 16:34:06 crc kubenswrapper[4799]: I1010 16:34:06.807295 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-n8sc6"] Oct 10 16:34:06 crc kubenswrapper[4799]: I1010 16:34:06.807353 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-cvkkx"] Oct 10 16:34:06 crc kubenswrapper[4799]: I1010 16:34:06.809204 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-89gcz\" (UID: \"60ab14da-0f2e-48cc-873a-44eaf0662ed8\") " pod="openshift-image-registry/image-registry-697d97f7c8-89gcz" Oct 10 16:34:06 crc kubenswrapper[4799]: E1010 16:34:06.809983 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-10 16:34:07.309968737 +0000 UTC m=+140.818292852 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-89gcz" (UID: "60ab14da-0f2e-48cc-873a-44eaf0662ed8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 10 16:34:06 crc kubenswrapper[4799]: I1010 16:34:06.915419 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 10 16:34:06 crc kubenswrapper[4799]: E1010 16:34:06.916405 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-10 16:34:07.415810462 +0000 UTC m=+140.924134577 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 10 16:34:07 crc kubenswrapper[4799]: I1010 16:34:07.023961 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-89gcz\" (UID: \"60ab14da-0f2e-48cc-873a-44eaf0662ed8\") " pod="openshift-image-registry/image-registry-697d97f7c8-89gcz" Oct 10 16:34:07 crc kubenswrapper[4799]: E1010 16:34:07.024658 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-10 16:34:07.524646022 +0000 UTC m=+141.032970137 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-89gcz" (UID: "60ab14da-0f2e-48cc-873a-44eaf0662ed8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 10 16:34:07 crc kubenswrapper[4799]: I1010 16:34:07.067379 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-mgdrk"] Oct 10 16:34:07 crc kubenswrapper[4799]: I1010 16:34:07.068815 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-ingress/router-default-5444994796-qpw4g" Oct 10 16:34:07 crc kubenswrapper[4799]: I1010 16:34:07.078483 4799 patch_prober.go:28] interesting pod/router-default-5444994796-qpw4g container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 10 16:34:07 crc kubenswrapper[4799]: [-]has-synced failed: reason withheld Oct 10 16:34:07 crc kubenswrapper[4799]: [+]process-running ok Oct 10 16:34:07 crc kubenswrapper[4799]: healthz check failed Oct 10 16:34:07 crc kubenswrapper[4799]: I1010 16:34:07.078535 4799 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-qpw4g" podUID="6a574afe-31ee-4706-90c0-a9c477f5bce7" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 10 16:34:07 crc kubenswrapper[4799]: I1010 16:34:07.130337 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 10 16:34:07 crc kubenswrapper[4799]: E1010 16:34:07.131157 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-10 16:34:07.631137554 +0000 UTC m=+141.139461669 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 10 16:34:07 crc kubenswrapper[4799]: I1010 16:34:07.175505 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-2cggl"] Oct 10 16:34:07 crc kubenswrapper[4799]: I1010 16:34:07.200967 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-r7x5v"] Oct 10 16:34:07 crc kubenswrapper[4799]: I1010 16:34:07.234892 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-89gcz\" (UID: \"60ab14da-0f2e-48cc-873a-44eaf0662ed8\") " pod="openshift-image-registry/image-registry-697d97f7c8-89gcz" Oct 10 16:34:07 crc kubenswrapper[4799]: E1010 16:34:07.235282 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-10 16:34:07.735268636 +0000 UTC m=+141.243592741 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-89gcz" (UID: "60ab14da-0f2e-48cc-873a-44eaf0662ed8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 10 16:34:07 crc kubenswrapper[4799]: I1010 16:34:07.264024 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-5r8jg"] Oct 10 16:34:07 crc kubenswrapper[4799]: I1010 16:34:07.269662 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-wzwz7"] Oct 10 16:34:07 crc kubenswrapper[4799]: W1010 16:34:07.282123 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod281cf811_d3fe_4cd9_9292_74d81584ca10.slice/crio-73799f47dbea7dce896237fabaad87832779060d7911be5be4146974371dceeb WatchSource:0}: Error finding container 73799f47dbea7dce896237fabaad87832779060d7911be5be4146974371dceeb: Status 404 returned error can't find the container with id 73799f47dbea7dce896237fabaad87832779060d7911be5be4146974371dceeb Oct 10 16:34:07 crc kubenswrapper[4799]: I1010 16:34:07.312064 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-hkvxt"] Oct 10 16:34:07 crc kubenswrapper[4799]: I1010 16:34:07.335560 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 10 16:34:07 crc kubenswrapper[4799]: E1010 16:34:07.336125 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-10 16:34:07.836110755 +0000 UTC m=+141.344434870 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 10 16:34:07 crc kubenswrapper[4799]: I1010 16:34:07.348880 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-8qhsm"] Oct 10 16:34:07 crc kubenswrapper[4799]: I1010 16:34:07.382116 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-9kfj6"] Oct 10 16:34:07 crc kubenswrapper[4799]: W1010 16:34:07.434253 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podcb1d3fef_2917_4e7b_916d_3b6381e25c33.slice/crio-6175dcfded948ae016e9bbcb115cc660ffc76597767cfca1795111dcd6b35910 WatchSource:0}: Error finding container 6175dcfded948ae016e9bbcb115cc660ffc76597767cfca1795111dcd6b35910: Status 404 returned error can't find the container with id 6175dcfded948ae016e9bbcb115cc660ffc76597767cfca1795111dcd6b35910 Oct 10 16:34:07 crc kubenswrapper[4799]: I1010 16:34:07.435559 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-mn6dn"] Oct 10 16:34:07 crc kubenswrapper[4799]: W1010 16:34:07.436078 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podee5210db_86a0_4346_bfc4_e4a6460eaae1.slice/crio-2a57578a8bb2236c1b0e872a823c4e18c399774a8bc6dcb472d9017259a1a1df WatchSource:0}: Error finding container 2a57578a8bb2236c1b0e872a823c4e18c399774a8bc6dcb472d9017259a1a1df: Status 404 returned error can't find the container with id 2a57578a8bb2236c1b0e872a823c4e18c399774a8bc6dcb472d9017259a1a1df Oct 10 16:34:07 crc kubenswrapper[4799]: I1010 16:34:07.436904 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-89gcz\" (UID: \"60ab14da-0f2e-48cc-873a-44eaf0662ed8\") " pod="openshift-image-registry/image-registry-697d97f7c8-89gcz" Oct 10 16:34:07 crc kubenswrapper[4799]: E1010 16:34:07.440166 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-10 16:34:07.940144424 +0000 UTC m=+141.448468539 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-89gcz" (UID: "60ab14da-0f2e-48cc-873a-44eaf0662ed8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 10 16:34:07 crc kubenswrapper[4799]: W1010 16:34:07.473106 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd4b5557a_bd3b_419f_bd0c_e3ed6d1f8def.slice/crio-a4acffa0cbaf8f3e8c252a903dd039b575d695ccdbd374ff527e08e2e5f6904e WatchSource:0}: Error finding container a4acffa0cbaf8f3e8c252a903dd039b575d695ccdbd374ff527e08e2e5f6904e: Status 404 returned error can't find the container with id a4acffa0cbaf8f3e8c252a903dd039b575d695ccdbd374ff527e08e2e5f6904e Oct 10 16:34:07 crc kubenswrapper[4799]: I1010 16:34:07.524285 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-5sgz5" event={"ID":"18a6e757-37be-47ce-98ea-bda0221cab2c","Type":"ContainerStarted","Data":"eead60f96cf7704b4cc72062aa1583a8c7dd2e2b0d457a40b3fc02f36b61b0b2"} Oct 10 16:34:07 crc kubenswrapper[4799]: I1010 16:34:07.537805 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 10 16:34:07 crc kubenswrapper[4799]: E1010 16:34:07.538417 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-10 16:34:08.038403058 +0000 UTC m=+141.546727173 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 10 16:34:07 crc kubenswrapper[4799]: I1010 16:34:07.546973 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-vwbn8" event={"ID":"1786a2c0-54d3-44db-bd63-ac7a0cd09eb4","Type":"ContainerStarted","Data":"49043284a5cbaf4e2d81259f5354ea9c8167ebd548f89fe7619ea14fe6a6b1cd"} Oct 10 16:34:07 crc kubenswrapper[4799]: I1010 16:34:07.563429 4799 generic.go:334] "Generic (PLEG): container finished" podID="713c24fb-e821-419a-b996-9661a6cbf57b" containerID="dcd5b29683f2acdb1fa49a4ca89d4f6963bb89277e6818c1cf1738b3558b3d88" exitCode=0 Oct 10 16:34:07 crc kubenswrapper[4799]: I1010 16:34:07.563479 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-mjjtc" event={"ID":"713c24fb-e821-419a-b996-9661a6cbf57b","Type":"ContainerDied","Data":"dcd5b29683f2acdb1fa49a4ca89d4f6963bb89277e6818c1cf1738b3558b3d88"} Oct 10 16:34:07 crc kubenswrapper[4799]: I1010 16:34:07.568560 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-w9v24" event={"ID":"eec5360d-85e3-4785-98b2-10ba224ffdfe","Type":"ContainerStarted","Data":"14d7e4347e6bf1c565f8f6b53061368ecce06e2c5a2c88e92b952ab92f737622"} Oct 10 16:34:07 crc kubenswrapper[4799]: I1010 16:34:07.577381 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-zphkz" event={"ID":"928b4fca-64a2-4c2c-a1b2-38bb069c13c8","Type":"ContainerStarted","Data":"798022383eacfc0f741b04dfcd0110dea9259cdf1f3cae43e505b71122fbbda3"} Oct 10 16:34:07 crc kubenswrapper[4799]: I1010 16:34:07.579532 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-jmpls" event={"ID":"c1b734d0-de3c-4519-8fb7-8408961ea09b","Type":"ContainerStarted","Data":"d99c324508c04c385a046bdf35bf37066b6af87f49be00b2da60626ed9a4d71e"} Oct 10 16:34:07 crc kubenswrapper[4799]: I1010 16:34:07.586742 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-2cggl" event={"ID":"1b6f04fa-503f-4596-a740-e807679f686e","Type":"ContainerStarted","Data":"886169b0b69d5450e8f8262c14e06682a7587946ea83bc4314fa6e0de8de48ad"} Oct 10 16:34:07 crc kubenswrapper[4799]: I1010 16:34:07.599549 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-w2jsl" event={"ID":"f8bab52f-8a27-495e-80d2-9794b984939e","Type":"ContainerStarted","Data":"a2313b9847c5d6b5a564afa24e9c95dadc1bf76e172a36672539400197d808c6"} Oct 10 16:34:07 crc kubenswrapper[4799]: I1010 16:34:07.600284 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-w2jsl" Oct 10 16:34:07 crc kubenswrapper[4799]: I1010 16:34:07.607970 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-cqz89" event={"ID":"eb807790-1011-4dfc-842e-fd4106f7a6c3","Type":"ContainerStarted","Data":"52b181db8c247f97f2c2cb3b425f348946ecdfbe9f016c38a133f110b6bfe167"} Oct 10 16:34:07 crc kubenswrapper[4799]: I1010 16:34:07.609085 4799 patch_prober.go:28] interesting pod/route-controller-manager-6576b87f9c-w2jsl container/route-controller-manager namespace/openshift-route-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.9:8443/healthz\": dial tcp 10.217.0.9:8443: connect: connection refused" start-of-body= Oct 10 16:34:07 crc kubenswrapper[4799]: I1010 16:34:07.609125 4799 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-w2jsl" podUID="f8bab52f-8a27-495e-80d2-9794b984939e" containerName="route-controller-manager" probeResult="failure" output="Get \"https://10.217.0.9:8443/healthz\": dial tcp 10.217.0.9:8443: connect: connection refused" Oct 10 16:34:07 crc kubenswrapper[4799]: I1010 16:34:07.613317 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-wzwz7" event={"ID":"18c902fe-0322-4ada-b042-7693b4a5f024","Type":"ContainerStarted","Data":"658e5599351fe1cb33a98dca8249f479d3af588353073cfab36e6d02a3e82115"} Oct 10 16:34:07 crc kubenswrapper[4799]: I1010 16:34:07.636230 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-fvpc8" event={"ID":"dd0fbd79-76a9-4a87-a67b-20e782993376","Type":"ContainerStarted","Data":"2a546856fc823e003adba004f8dcb743f630836d1b927980045531a4243ea0a6"} Oct 10 16:34:07 crc kubenswrapper[4799]: I1010 16:34:07.636643 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-879f6c89f-fvpc8" Oct 10 16:34:07 crc kubenswrapper[4799]: I1010 16:34:07.640439 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-89gcz\" (UID: \"60ab14da-0f2e-48cc-873a-44eaf0662ed8\") " pod="openshift-image-registry/image-registry-697d97f7c8-89gcz" Oct 10 16:34:07 crc kubenswrapper[4799]: I1010 16:34:07.640612 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-qc5c7" event={"ID":"f237a066-51a9-475f-80b5-1627fb073e16","Type":"ContainerStarted","Data":"04b9af6b1c82acf1596536133b2f3cafc722195618e6a7f0eaf59d68ad3cf716"} Oct 10 16:34:07 crc kubenswrapper[4799]: I1010 16:34:07.640659 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-qc5c7" event={"ID":"f237a066-51a9-475f-80b5-1627fb073e16","Type":"ContainerStarted","Data":"b7a3d59078c9d41d517350f5d0e1ce84ef1a63dda26f0e387ac3b072dab72d09"} Oct 10 16:34:07 crc kubenswrapper[4799]: E1010 16:34:07.640817 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-10 16:34:08.140803907 +0000 UTC m=+141.649128022 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-89gcz" (UID: "60ab14da-0f2e-48cc-873a-44eaf0662ed8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 10 16:34:07 crc kubenswrapper[4799]: I1010 16:34:07.647081 4799 patch_prober.go:28] interesting pod/controller-manager-879f6c89f-fvpc8 container/controller-manager namespace/openshift-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.8:8443/healthz\": dial tcp 10.217.0.8:8443: connect: connection refused" start-of-body= Oct 10 16:34:07 crc kubenswrapper[4799]: I1010 16:34:07.647128 4799 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-controller-manager/controller-manager-879f6c89f-fvpc8" podUID="dd0fbd79-76a9-4a87-a67b-20e782993376" containerName="controller-manager" probeResult="failure" output="Get \"https://10.217.0.8:8443/healthz\": dial tcp 10.217.0.8:8443: connect: connection refused" Oct 10 16:34:07 crc kubenswrapper[4799]: I1010 16:34:07.660701 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-x7vr5" event={"ID":"c37fc2e5-d729-4258-8506-22f328f5927a","Type":"ContainerStarted","Data":"b73f9c1384020c9c7e3ac829146add4db4c6bf98bd490da9d081bb87015c296b"} Oct 10 16:34:07 crc kubenswrapper[4799]: I1010 16:34:07.673312 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-9g6sf"] Oct 10 16:34:07 crc kubenswrapper[4799]: I1010 16:34:07.689132 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-8lvfs" event={"ID":"2866fbf1-3a49-4e4c-867b-86a40ae85ebe","Type":"ContainerStarted","Data":"7484481868bc2698a0dd5acf6ac242d50abf95c1be30280b0406989f1eba3894"} Oct 10 16:34:07 crc kubenswrapper[4799]: W1010 16:34:07.723955 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podae03eb1e_0698_4aef_9a6f_7708d92adf66.slice/crio-8fea2f2a82beadff96fa0df4f44c6e3ee41ea026e2f73f8d47614d008d6782bf WatchSource:0}: Error finding container 8fea2f2a82beadff96fa0df4f44c6e3ee41ea026e2f73f8d47614d008d6782bf: Status 404 returned error can't find the container with id 8fea2f2a82beadff96fa0df4f44c6e3ee41ea026e2f73f8d47614d008d6782bf Oct 10 16:34:07 crc kubenswrapper[4799]: I1010 16:34:07.731020 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-mvftm" event={"ID":"2dff39f3-58aa-4d27-a85f-c1a09bb3d83e","Type":"ContainerStarted","Data":"7eeed19c9754f5603e5b64220964e8394c9de120b42c0ec5f2513a97f162811c"} Oct 10 16:34:07 crc kubenswrapper[4799]: I1010 16:34:07.743858 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 10 16:34:07 crc kubenswrapper[4799]: E1010 16:34:07.745286 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-10 16:34:08.245267897 +0000 UTC m=+141.753592012 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 10 16:34:07 crc kubenswrapper[4799]: I1010 16:34:07.758401 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-5r8jg" event={"ID":"ee5210db-86a0-4346-bfc4-e4a6460eaae1","Type":"ContainerStarted","Data":"2a57578a8bb2236c1b0e872a823c4e18c399774a8bc6dcb472d9017259a1a1df"} Oct 10 16:34:07 crc kubenswrapper[4799]: I1010 16:34:07.760030 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-x7vr5" podStartSLOduration=117.760018789 podStartE2EDuration="1m57.760018789s" podCreationTimestamp="2025-10-10 16:32:10 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-10 16:34:07.759317751 +0000 UTC m=+141.267641856" watchObservedRunningTime="2025-10-10 16:34:07.760018789 +0000 UTC m=+141.268342904" Oct 10 16:34:07 crc kubenswrapper[4799]: I1010 16:34:07.799705 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-hqz8m" podStartSLOduration=118.799686258 podStartE2EDuration="1m58.799686258s" podCreationTimestamp="2025-10-10 16:32:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-10 16:34:07.798880217 +0000 UTC m=+141.307204342" watchObservedRunningTime="2025-10-10 16:34:07.799686258 +0000 UTC m=+141.308010363" Oct 10 16:34:07 crc kubenswrapper[4799]: I1010 16:34:07.803969 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-bbvkz"] Oct 10 16:34:07 crc kubenswrapper[4799]: I1010 16:34:07.813516 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-plk2p" event={"ID":"e56235b4-8348-4fae-af0a-639fcacfc997","Type":"ContainerStarted","Data":"2f01856d95e01010ef3f7f9e81e71ca39baddc738724c4daf1413e0b0bf5fdb9"} Oct 10 16:34:07 crc kubenswrapper[4799]: I1010 16:34:07.814216 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/downloads-7954f5f757-plk2p" Oct 10 16:34:07 crc kubenswrapper[4799]: I1010 16:34:07.821134 4799 patch_prober.go:28] interesting pod/downloads-7954f5f757-plk2p container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.10:8080/\": dial tcp 10.217.0.10:8080: connect: connection refused" start-of-body= Oct 10 16:34:07 crc kubenswrapper[4799]: I1010 16:34:07.821189 4799 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-plk2p" podUID="e56235b4-8348-4fae-af0a-639fcacfc997" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.10:8080/\": dial tcp 10.217.0.10:8080: connect: connection refused" Oct 10 16:34:07 crc kubenswrapper[4799]: I1010 16:34:07.826019 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress/router-default-5444994796-qpw4g" podStartSLOduration=118.82599641 podStartE2EDuration="1m58.82599641s" podCreationTimestamp="2025-10-10 16:32:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-10 16:34:07.823537828 +0000 UTC m=+141.331861943" watchObservedRunningTime="2025-10-10 16:34:07.82599641 +0000 UTC m=+141.334320535" Oct 10 16:34:07 crc kubenswrapper[4799]: I1010 16:34:07.831086 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-mgdrk" event={"ID":"f50c760d-39a2-4717-bf30-942dcd170900","Type":"ContainerStarted","Data":"33578ba3fc69788eaceedfa16a6a56758260b8353849b64d46b42d5a4c96c18d"} Oct 10 16:34:07 crc kubenswrapper[4799]: I1010 16:34:07.846851 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-89gcz\" (UID: \"60ab14da-0f2e-48cc-873a-44eaf0662ed8\") " pod="openshift-image-registry/image-registry-697d97f7c8-89gcz" Oct 10 16:34:07 crc kubenswrapper[4799]: E1010 16:34:07.848927 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-10 16:34:08.348916227 +0000 UTC m=+141.857240342 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-89gcz" (UID: "60ab14da-0f2e-48cc-873a-44eaf0662ed8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 10 16:34:07 crc kubenswrapper[4799]: I1010 16:34:07.881371 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-6mnhh" podStartSLOduration=117.881354194 podStartE2EDuration="1m57.881354194s" podCreationTimestamp="2025-10-10 16:32:10 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-10 16:34:07.880249536 +0000 UTC m=+141.388573651" watchObservedRunningTime="2025-10-10 16:34:07.881354194 +0000 UTC m=+141.389678319" Oct 10 16:34:07 crc kubenswrapper[4799]: I1010 16:34:07.883382 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-nlsfc" event={"ID":"98460888-57af-4ef6-a390-c0a592164ddb","Type":"ContainerStarted","Data":"f5a8f33229e70d4881f36b699b2133a08b489ba376d4c7e180ca5f81cff980a9"} Oct 10 16:34:07 crc kubenswrapper[4799]: I1010 16:34:07.909618 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-cqz89" podStartSLOduration=118.909601755 podStartE2EDuration="1m58.909601755s" podCreationTimestamp="2025-10-10 16:32:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-10 16:34:07.907096992 +0000 UTC m=+141.415421127" watchObservedRunningTime="2025-10-10 16:34:07.909601755 +0000 UTC m=+141.417925870" Oct 10 16:34:07 crc kubenswrapper[4799]: I1010 16:34:07.935113 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-8qhsm" event={"ID":"bb7dd02d-b8d9-4954-aaee-df6a63ea0708","Type":"ContainerStarted","Data":"cc10452aea1a55fe4334402a6f832b55eb85ac0c4ac05add176188f03d151522"} Oct 10 16:34:07 crc kubenswrapper[4799]: I1010 16:34:07.946050 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-hsg4j" podStartSLOduration=118.946029422 podStartE2EDuration="1m58.946029422s" podCreationTimestamp="2025-10-10 16:32:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-10 16:34:07.945041638 +0000 UTC m=+141.453365763" watchObservedRunningTime="2025-10-10 16:34:07.946029422 +0000 UTC m=+141.454353537" Oct 10 16:34:07 crc kubenswrapper[4799]: I1010 16:34:07.949114 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 10 16:34:07 crc kubenswrapper[4799]: E1010 16:34:07.950315 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-10 16:34:08.45029885 +0000 UTC m=+141.958622965 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 10 16:34:07 crc kubenswrapper[4799]: I1010 16:34:07.976395 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-r7x5v" event={"ID":"281cf811-d3fe-4cd9-9292-74d81584ca10","Type":"ContainerStarted","Data":"73799f47dbea7dce896237fabaad87832779060d7911be5be4146974371dceeb"} Oct 10 16:34:07 crc kubenswrapper[4799]: I1010 16:34:07.986371 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-cvkkx" event={"ID":"e1269123-5f8e-40e4-87cf-577d7b148684","Type":"ContainerStarted","Data":"b5dad6c90715c1bf80e3108491b11758f724f41f7cfd2d2b8aefdbc6e65d150a"} Oct 10 16:34:07 crc kubenswrapper[4799]: I1010 16:34:07.994432 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-879f6c89f-fvpc8" podStartSLOduration=118.994412111 podStartE2EDuration="1m58.994412111s" podCreationTimestamp="2025-10-10 16:32:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-10 16:34:07.992125703 +0000 UTC m=+141.500449808" watchObservedRunningTime="2025-10-10 16:34:07.994412111 +0000 UTC m=+141.502736226" Oct 10 16:34:08 crc kubenswrapper[4799]: I1010 16:34:08.035589 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-vfgnk" event={"ID":"c0095ab7-a332-4923-b1ac-90abd65087d5","Type":"ContainerStarted","Data":"a0e3b66af1706caf7c45e336ea695d714d9c9263366de33a7d19a052f9e33c7a"} Oct 10 16:34:08 crc kubenswrapper[4799]: I1010 16:34:08.037084 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-vfgnk" Oct 10 16:34:08 crc kubenswrapper[4799]: I1010 16:34:08.039962 4799 patch_prober.go:28] interesting pod/olm-operator-6b444d44fb-vfgnk container/olm-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.35:8443/healthz\": dial tcp 10.217.0.35:8443: connect: connection refused" start-of-body= Oct 10 16:34:08 crc kubenswrapper[4799]: I1010 16:34:08.040017 4799 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-vfgnk" podUID="c0095ab7-a332-4923-b1ac-90abd65087d5" containerName="olm-operator" probeResult="failure" output="Get \"https://10.217.0.35:8443/healthz\": dial tcp 10.217.0.35:8443: connect: connection refused" Oct 10 16:34:08 crc kubenswrapper[4799]: I1010 16:34:08.055148 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-89gcz\" (UID: \"60ab14da-0f2e-48cc-873a-44eaf0662ed8\") " pod="openshift-image-registry/image-registry-697d97f7c8-89gcz" Oct 10 16:34:08 crc kubenswrapper[4799]: E1010 16:34:08.055668 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-10 16:34:08.555653853 +0000 UTC m=+142.063977958 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-89gcz" (UID: "60ab14da-0f2e-48cc-873a-44eaf0662ed8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 10 16:34:08 crc kubenswrapper[4799]: I1010 16:34:08.063891 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-56xl2" event={"ID":"0691553d-e534-4c08-b56e-d99bd02e53fa","Type":"ContainerStarted","Data":"ea13a2790b014ed5cc17450ab6446824bfbb6bf57348df82f1a9544ba7615c24"} Oct 10 16:34:08 crc kubenswrapper[4799]: I1010 16:34:08.063957 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-56xl2" event={"ID":"0691553d-e534-4c08-b56e-d99bd02e53fa","Type":"ContainerStarted","Data":"8839b81d79a5232353d7248137a71c031438c749d48354f56da45a214a244ec8"} Oct 10 16:34:08 crc kubenswrapper[4799]: I1010 16:34:08.064766 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-558db77b4-56xl2" Oct 10 16:34:08 crc kubenswrapper[4799]: I1010 16:34:08.074679 4799 patch_prober.go:28] interesting pod/router-default-5444994796-qpw4g container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 10 16:34:08 crc kubenswrapper[4799]: [-]has-synced failed: reason withheld Oct 10 16:34:08 crc kubenswrapper[4799]: [+]process-running ok Oct 10 16:34:08 crc kubenswrapper[4799]: healthz check failed Oct 10 16:34:08 crc kubenswrapper[4799]: I1010 16:34:08.074738 4799 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-qpw4g" podUID="6a574afe-31ee-4706-90c0-a9c477f5bce7" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 10 16:34:08 crc kubenswrapper[4799]: I1010 16:34:08.080454 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-n8sc6" event={"ID":"5284905f-e20b-49be-8cef-df7f96e4460d","Type":"ContainerStarted","Data":"89aeac4524db9376fac3989a17188d59b5c147f04bf09034d4f112ea7505c3f8"} Oct 10 16:34:08 crc kubenswrapper[4799]: I1010 16:34:08.081117 4799 patch_prober.go:28] interesting pod/oauth-openshift-558db77b4-56xl2 container/oauth-openshift namespace/openshift-authentication: Readiness probe status=failure output="Get \"https://10.217.0.32:6443/healthz\": dial tcp 10.217.0.32:6443: connect: connection refused" start-of-body= Oct 10 16:34:08 crc kubenswrapper[4799]: I1010 16:34:08.081174 4799 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-authentication/oauth-openshift-558db77b4-56xl2" podUID="0691553d-e534-4c08-b56e-d99bd02e53fa" containerName="oauth-openshift" probeResult="failure" output="Get \"https://10.217.0.32:6443/healthz\": dial tcp 10.217.0.32:6443: connect: connection refused" Oct 10 16:34:08 crc kubenswrapper[4799]: I1010 16:34:08.101942 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-nsrr7" podStartSLOduration=119.101915858 podStartE2EDuration="1m59.101915858s" podCreationTimestamp="2025-10-10 16:32:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-10 16:34:08.048146864 +0000 UTC m=+141.556470979" watchObservedRunningTime="2025-10-10 16:34:08.101915858 +0000 UTC m=+141.610239973" Oct 10 16:34:08 crc kubenswrapper[4799]: I1010 16:34:08.102428 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console-operator/console-operator-58897d9998-6h8q7" podStartSLOduration=119.102423291 podStartE2EDuration="1m59.102423291s" podCreationTimestamp="2025-10-10 16:32:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-10 16:34:08.090630104 +0000 UTC m=+141.598954239" watchObservedRunningTime="2025-10-10 16:34:08.102423291 +0000 UTC m=+141.610747396" Oct 10 16:34:08 crc kubenswrapper[4799]: I1010 16:34:08.103313 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29335230-q6qww" event={"ID":"25cd298f-ccde-4805-801d-2d486c7e45da","Type":"ContainerStarted","Data":"ca4b4fc89f1aea2fe5397ec36f16bdd0f479f84c3eec176261510eee8295157f"} Oct 10 16:34:08 crc kubenswrapper[4799]: I1010 16:34:08.128874 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-98snb" event={"ID":"977bd741-4d59-48b0-ab48-c22f6eecdb2e","Type":"ContainerStarted","Data":"7e59ab079cf7071646876cb794fe540a26638039fac1c7ee35ae3146efd452d4"} Oct 10 16:34:08 crc kubenswrapper[4799]: I1010 16:34:08.128912 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-98snb" event={"ID":"977bd741-4d59-48b0-ab48-c22f6eecdb2e","Type":"ContainerStarted","Data":"aa9d28245b857435111a253849200a89b6174e603d89a7611bd42c52bb633d9d"} Oct 10 16:34:08 crc kubenswrapper[4799]: I1010 16:34:08.159502 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 10 16:34:08 crc kubenswrapper[4799]: E1010 16:34:08.160864 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-10 16:34:08.660850182 +0000 UTC m=+142.169174297 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 10 16:34:08 crc kubenswrapper[4799]: I1010 16:34:08.169063 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console-operator/console-operator-58897d9998-6h8q7" Oct 10 16:34:08 crc kubenswrapper[4799]: I1010 16:34:08.203501 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication-operator/authentication-operator-69f744f599-qc5c7" podStartSLOduration=119.203485485 podStartE2EDuration="1m59.203485485s" podCreationTimestamp="2025-10-10 16:32:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-10 16:34:08.130131598 +0000 UTC m=+141.638455713" watchObservedRunningTime="2025-10-10 16:34:08.203485485 +0000 UTC m=+141.711809600" Oct 10 16:34:08 crc kubenswrapper[4799]: I1010 16:34:08.230318 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-w2jsl" podStartSLOduration=118.23029096 podStartE2EDuration="1m58.23029096s" podCreationTimestamp="2025-10-10 16:32:10 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-10 16:34:08.229520801 +0000 UTC m=+141.737844926" watchObservedRunningTime="2025-10-10 16:34:08.23029096 +0000 UTC m=+141.738615075" Oct 10 16:34:08 crc kubenswrapper[4799]: I1010 16:34:08.247400 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-server-jmpls" podStartSLOduration=6.247379151 podStartE2EDuration="6.247379151s" podCreationTimestamp="2025-10-10 16:34:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-10 16:34:08.246533589 +0000 UTC m=+141.754857704" watchObservedRunningTime="2025-10-10 16:34:08.247379151 +0000 UTC m=+141.755703266" Oct 10 16:34:08 crc kubenswrapper[4799]: I1010 16:34:08.261703 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-89gcz\" (UID: \"60ab14da-0f2e-48cc-873a-44eaf0662ed8\") " pod="openshift-image-registry/image-registry-697d97f7c8-89gcz" Oct 10 16:34:08 crc kubenswrapper[4799]: E1010 16:34:08.263906 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-10 16:34:08.763891966 +0000 UTC m=+142.272216081 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-89gcz" (UID: "60ab14da-0f2e-48cc-873a-44eaf0662ed8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 10 16:34:08 crc kubenswrapper[4799]: I1010 16:34:08.280978 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/machine-api-operator-5694c8668f-w26tc" podStartSLOduration=118.280956346 podStartE2EDuration="1m58.280956346s" podCreationTimestamp="2025-10-10 16:32:10 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-10 16:34:08.280410802 +0000 UTC m=+141.788734917" watchObservedRunningTime="2025-10-10 16:34:08.280956346 +0000 UTC m=+141.789280461" Oct 10 16:34:08 crc kubenswrapper[4799]: I1010 16:34:08.371563 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 10 16:34:08 crc kubenswrapper[4799]: E1010 16:34:08.372319 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-10 16:34:08.872302886 +0000 UTC m=+142.380627001 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 10 16:34:08 crc kubenswrapper[4799]: I1010 16:34:08.386485 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-prqrg" podStartSLOduration=118.386459413 podStartE2EDuration="1m58.386459413s" podCreationTimestamp="2025-10-10 16:32:10 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-10 16:34:08.374609404 +0000 UTC m=+141.882933539" watchObservedRunningTime="2025-10-10 16:34:08.386459413 +0000 UTC m=+141.894783538" Oct 10 16:34:08 crc kubenswrapper[4799]: I1010 16:34:08.405549 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-w9v24" podStartSLOduration=120.405526083 podStartE2EDuration="2m0.405526083s" podCreationTimestamp="2025-10-10 16:32:08 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-10 16:34:08.346510547 +0000 UTC m=+141.854834662" watchObservedRunningTime="2025-10-10 16:34:08.405526083 +0000 UTC m=+141.913850208" Oct 10 16:34:08 crc kubenswrapper[4799]: I1010 16:34:08.475645 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-89gcz\" (UID: \"60ab14da-0f2e-48cc-873a-44eaf0662ed8\") " pod="openshift-image-registry/image-registry-697d97f7c8-89gcz" Oct 10 16:34:08 crc kubenswrapper[4799]: E1010 16:34:08.476044 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-10 16:34:08.976028928 +0000 UTC m=+142.484353043 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-89gcz" (UID: "60ab14da-0f2e-48cc-873a-44eaf0662ed8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 10 16:34:08 crc kubenswrapper[4799]: I1010 16:34:08.485073 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-98snb" podStartSLOduration=118.485054855 podStartE2EDuration="1m58.485054855s" podCreationTimestamp="2025-10-10 16:32:10 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-10 16:34:08.45543857 +0000 UTC m=+141.963762695" watchObservedRunningTime="2025-10-10 16:34:08.485054855 +0000 UTC m=+141.993378970" Oct 10 16:34:08 crc kubenswrapper[4799]: I1010 16:34:08.493891 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-558db77b4-56xl2" podStartSLOduration=119.493862957 podStartE2EDuration="1m59.493862957s" podCreationTimestamp="2025-10-10 16:32:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-10 16:34:08.484355288 +0000 UTC m=+141.992679403" watchObservedRunningTime="2025-10-10 16:34:08.493862957 +0000 UTC m=+142.002187072" Oct 10 16:34:08 crc kubenswrapper[4799]: I1010 16:34:08.581638 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 10 16:34:08 crc kubenswrapper[4799]: E1010 16:34:08.582307 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-10 16:34:09.082292914 +0000 UTC m=+142.590617019 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 10 16:34:08 crc kubenswrapper[4799]: I1010 16:34:08.583461 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns-operator/dns-operator-744455d44c-nlsfc" podStartSLOduration=119.583440923 podStartE2EDuration="1m59.583440923s" podCreationTimestamp="2025-10-10 16:32:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-10 16:34:08.541825845 +0000 UTC m=+142.050149960" watchObservedRunningTime="2025-10-10 16:34:08.583440923 +0000 UTC m=+142.091765038" Oct 10 16:34:08 crc kubenswrapper[4799]: I1010 16:34:08.684470 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-89gcz\" (UID: \"60ab14da-0f2e-48cc-873a-44eaf0662ed8\") " pod="openshift-image-registry/image-registry-697d97f7c8-89gcz" Oct 10 16:34:08 crc kubenswrapper[4799]: E1010 16:34:08.684924 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-10 16:34:09.184911028 +0000 UTC m=+142.693235143 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-89gcz" (UID: "60ab14da-0f2e-48cc-873a-44eaf0662ed8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 10 16:34:08 crc kubenswrapper[4799]: I1010 16:34:08.705998 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd-operator/etcd-operator-b45778765-mvftm" podStartSLOduration=119.705980698 podStartE2EDuration="1m59.705980698s" podCreationTimestamp="2025-10-10 16:32:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-10 16:34:08.705456625 +0000 UTC m=+142.213780750" watchObservedRunningTime="2025-10-10 16:34:08.705980698 +0000 UTC m=+142.214304813" Oct 10 16:34:08 crc kubenswrapper[4799]: I1010 16:34:08.788000 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 10 16:34:08 crc kubenswrapper[4799]: I1010 16:34:08.788417 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-vfgnk" podStartSLOduration=118.788355073 podStartE2EDuration="1m58.788355073s" podCreationTimestamp="2025-10-10 16:32:10 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-10 16:34:08.75410117 +0000 UTC m=+142.262425285" watchObservedRunningTime="2025-10-10 16:34:08.788355073 +0000 UTC m=+142.296679188" Oct 10 16:34:08 crc kubenswrapper[4799]: E1010 16:34:08.789384 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-10 16:34:09.289363168 +0000 UTC m=+142.797687273 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 10 16:34:08 crc kubenswrapper[4799]: I1010 16:34:08.789471 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-89gcz\" (UID: \"60ab14da-0f2e-48cc-873a-44eaf0662ed8\") " pod="openshift-image-registry/image-registry-697d97f7c8-89gcz" Oct 10 16:34:08 crc kubenswrapper[4799]: E1010 16:34:08.789786 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-10 16:34:09.289779169 +0000 UTC m=+142.798103284 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-89gcz" (UID: "60ab14da-0f2e-48cc-873a-44eaf0662ed8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 10 16:34:08 crc kubenswrapper[4799]: I1010 16:34:08.891260 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 10 16:34:08 crc kubenswrapper[4799]: E1010 16:34:08.891612 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-10 16:34:09.391596872 +0000 UTC m=+142.899920987 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 10 16:34:08 crc kubenswrapper[4799]: I1010 16:34:08.992256 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-89gcz\" (UID: \"60ab14da-0f2e-48cc-873a-44eaf0662ed8\") " pod="openshift-image-registry/image-registry-697d97f7c8-89gcz" Oct 10 16:34:08 crc kubenswrapper[4799]: E1010 16:34:08.992561 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-10 16:34:09.492548524 +0000 UTC m=+143.000872639 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-89gcz" (UID: "60ab14da-0f2e-48cc-873a-44eaf0662ed8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 10 16:34:09 crc kubenswrapper[4799]: I1010 16:34:09.007676 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-cvkkx" podStartSLOduration=120.007656125 podStartE2EDuration="2m0.007656125s" podCreationTimestamp="2025-10-10 16:32:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-10 16:34:09.005256244 +0000 UTC m=+142.513580369" watchObservedRunningTime="2025-10-10 16:34:09.007656125 +0000 UTC m=+142.515980240" Oct 10 16:34:09 crc kubenswrapper[4799]: I1010 16:34:09.070440 4799 patch_prober.go:28] interesting pod/router-default-5444994796-qpw4g container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 10 16:34:09 crc kubenswrapper[4799]: [-]has-synced failed: reason withheld Oct 10 16:34:09 crc kubenswrapper[4799]: [+]process-running ok Oct 10 16:34:09 crc kubenswrapper[4799]: healthz check failed Oct 10 16:34:09 crc kubenswrapper[4799]: I1010 16:34:09.070732 4799 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-qpw4g" podUID="6a574afe-31ee-4706-90c0-a9c477f5bce7" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 10 16:34:09 crc kubenswrapper[4799]: I1010 16:34:09.093780 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 10 16:34:09 crc kubenswrapper[4799]: E1010 16:34:09.094235 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-10 16:34:09.594220875 +0000 UTC m=+143.102544990 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 10 16:34:09 crc kubenswrapper[4799]: I1010 16:34:09.124720 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/downloads-7954f5f757-plk2p" podStartSLOduration=120.124701042 podStartE2EDuration="2m0.124701042s" podCreationTimestamp="2025-10-10 16:32:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-10 16:34:09.094352288 +0000 UTC m=+142.602676413" watchObservedRunningTime="2025-10-10 16:34:09.124701042 +0000 UTC m=+142.633025157" Oct 10 16:34:09 crc kubenswrapper[4799]: I1010 16:34:09.137406 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-r7x5v" event={"ID":"281cf811-d3fe-4cd9-9292-74d81584ca10","Type":"ContainerStarted","Data":"5d57750015374912441b35c7ee40ebb55573e8f9f93884882dbe023e0905ba94"} Oct 10 16:34:09 crc kubenswrapper[4799]: I1010 16:34:09.137459 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-r7x5v" event={"ID":"281cf811-d3fe-4cd9-9292-74d81584ca10","Type":"ContainerStarted","Data":"60b89eb21a7a375a8b6f5445488b1e801cc2189e61becb20c2183aca832ac1b6"} Oct 10 16:34:09 crc kubenswrapper[4799]: I1010 16:34:09.137569 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-dns/dns-default-r7x5v" Oct 10 16:34:09 crc kubenswrapper[4799]: I1010 16:34:09.150114 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-mn6dn" event={"ID":"e7393869-51bf-4974-97c7-33adfe1b44f5","Type":"ContainerStarted","Data":"862b8488e89b1f15ca8d6add01fab3069539611bc74c85e1f24779fedf97e88c"} Oct 10 16:34:09 crc kubenswrapper[4799]: I1010 16:34:09.150160 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-mn6dn" event={"ID":"e7393869-51bf-4974-97c7-33adfe1b44f5","Type":"ContainerStarted","Data":"daffa33bce56726cb5e580134618dad18f8c9b3589079a5f16bc6401fdd2a1de"} Oct 10 16:34:09 crc kubenswrapper[4799]: I1010 16:34:09.150303 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-mn6dn" Oct 10 16:34:09 crc kubenswrapper[4799]: I1010 16:34:09.151391 4799 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-mn6dn container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.39:8080/healthz\": dial tcp 10.217.0.39:8080: connect: connection refused" start-of-body= Oct 10 16:34:09 crc kubenswrapper[4799]: I1010 16:34:09.151437 4799 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-mn6dn" podUID="e7393869-51bf-4974-97c7-33adfe1b44f5" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.39:8080/healthz\": dial tcp 10.217.0.39:8080: connect: connection refused" Oct 10 16:34:09 crc kubenswrapper[4799]: I1010 16:34:09.156359 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-wzwz7" event={"ID":"18c902fe-0322-4ada-b042-7693b4a5f024","Type":"ContainerStarted","Data":"8709c658496873cdfca7df98f128210dc9baa7aa72a0245ff6a28389d1bfb7fb"} Oct 10 16:34:09 crc kubenswrapper[4799]: I1010 16:34:09.156430 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-wzwz7" event={"ID":"18c902fe-0322-4ada-b042-7693b4a5f024","Type":"ContainerStarted","Data":"d7947a30a17fff17cc2539e7e0d54567f1e43f58e8590336f7d98f13ba64585d"} Oct 10 16:34:09 crc kubenswrapper[4799]: I1010 16:34:09.158821 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-vwbn8" event={"ID":"1786a2c0-54d3-44db-bd63-ac7a0cd09eb4","Type":"ContainerStarted","Data":"d744ba2906e8b7f998f585993badac4af79810625a0bc2a35ef9dc4a1b316bcb"} Oct 10 16:34:09 crc kubenswrapper[4799]: I1010 16:34:09.159013 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-vwbn8" Oct 10 16:34:09 crc kubenswrapper[4799]: I1010 16:34:09.169580 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29335230-q6qww" podStartSLOduration=120.169556852 podStartE2EDuration="2m0.169556852s" podCreationTimestamp="2025-10-10 16:32:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-10 16:34:09.165690764 +0000 UTC m=+142.674014879" watchObservedRunningTime="2025-10-10 16:34:09.169556852 +0000 UTC m=+142.677880967" Oct 10 16:34:09 crc kubenswrapper[4799]: I1010 16:34:09.169746 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-8qhsm" event={"ID":"bb7dd02d-b8d9-4954-aaee-df6a63ea0708","Type":"ContainerStarted","Data":"f68e5b93839b033acb7475d48b5a585f11387c0b059a332aa0e34b93cb15d0d9"} Oct 10 16:34:09 crc kubenswrapper[4799]: I1010 16:34:09.169969 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-8qhsm" event={"ID":"bb7dd02d-b8d9-4954-aaee-df6a63ea0708","Type":"ContainerStarted","Data":"d1ce6f3c64234ddf0e86c7bf47201e92f9e3ab750a408eb3cd9c7a57add023e4"} Oct 10 16:34:09 crc kubenswrapper[4799]: I1010 16:34:09.170306 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-8qhsm" Oct 10 16:34:09 crc kubenswrapper[4799]: I1010 16:34:09.185631 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-2cggl" event={"ID":"1b6f04fa-503f-4596-a740-e807679f686e","Type":"ContainerStarted","Data":"c5922d400c89aafbb6ff11cabe27854a402f0ccd2e81e97b5bc9f30232326fd0"} Oct 10 16:34:09 crc kubenswrapper[4799]: I1010 16:34:09.187860 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-vwbn8" Oct 10 16:34:09 crc kubenswrapper[4799]: I1010 16:34:09.196248 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-89gcz\" (UID: \"60ab14da-0f2e-48cc-873a-44eaf0662ed8\") " pod="openshift-image-registry/image-registry-697d97f7c8-89gcz" Oct 10 16:34:09 crc kubenswrapper[4799]: E1010 16:34:09.196663 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-10 16:34:09.696650844 +0000 UTC m=+143.204974959 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-89gcz" (UID: "60ab14da-0f2e-48cc-873a-44eaf0662ed8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 10 16:34:09 crc kubenswrapper[4799]: I1010 16:34:09.197502 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-8lvfs" event={"ID":"2866fbf1-3a49-4e4c-867b-86a40ae85ebe","Type":"ContainerStarted","Data":"866009b0d6059852883926b4b56b73035c9f18f410a5bc1c0a36dfe8e48c700a"} Oct 10 16:34:09 crc kubenswrapper[4799]: I1010 16:34:09.205436 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29335230-q6qww" event={"ID":"25cd298f-ccde-4805-801d-2d486c7e45da","Type":"ContainerStarted","Data":"8d9753523c04dd86b3bd12b6206d393c9a192c0cfb49fb8be7252c33137623c7"} Oct 10 16:34:09 crc kubenswrapper[4799]: I1010 16:34:09.225194 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-bbvkz" event={"ID":"01118562-5441-420c-8d4c-2a983e584de3","Type":"ContainerStarted","Data":"832d2a50943f4ccc8a708c670055859de69c7b80940fd0d7571e06211af49658"} Oct 10 16:34:09 crc kubenswrapper[4799]: I1010 16:34:09.225241 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-bbvkz" event={"ID":"01118562-5441-420c-8d4c-2a983e584de3","Type":"ContainerStarted","Data":"707b5944ed56f16ac105733b8d932f859f976bd7c8db6977a76e7fab29296762"} Oct 10 16:34:09 crc kubenswrapper[4799]: I1010 16:34:09.240503 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-mn6dn" podStartSLOduration=119.240488258 podStartE2EDuration="1m59.240488258s" podCreationTimestamp="2025-10-10 16:32:10 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-10 16:34:09.239929314 +0000 UTC m=+142.748253429" watchObservedRunningTime="2025-10-10 16:34:09.240488258 +0000 UTC m=+142.748812373" Oct 10 16:34:09 crc kubenswrapper[4799]: I1010 16:34:09.245466 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-cvkkx" event={"ID":"e1269123-5f8e-40e4-87cf-577d7b148684","Type":"ContainerStarted","Data":"77a6baffb2f651384041d9e88499a9b8e1526e81885ac61f4b8d83fb909170f5"} Oct 10 16:34:09 crc kubenswrapper[4799]: I1010 16:34:09.274109 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-5r8jg" event={"ID":"ee5210db-86a0-4346-bfc4-e4a6460eaae1","Type":"ContainerStarted","Data":"b26b8bbf505750c7727921a4f2c44fa44fdd9e6d81d236b85a12810b865bbb6b"} Oct 10 16:34:09 crc kubenswrapper[4799]: I1010 16:34:09.274322 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-5r8jg" Oct 10 16:34:09 crc kubenswrapper[4799]: I1010 16:34:09.276147 4799 patch_prober.go:28] interesting pod/packageserver-d55dfcdfc-5r8jg container/packageserver namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.19:5443/healthz\": dial tcp 10.217.0.19:5443: connect: connection refused" start-of-body= Oct 10 16:34:09 crc kubenswrapper[4799]: I1010 16:34:09.276195 4799 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-5r8jg" podUID="ee5210db-86a0-4346-bfc4-e4a6460eaae1" containerName="packageserver" probeResult="failure" output="Get \"https://10.217.0.19:5443/healthz\": dial tcp 10.217.0.19:5443: connect: connection refused" Oct 10 16:34:09 crc kubenswrapper[4799]: I1010 16:34:09.287242 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-nlsfc" event={"ID":"98460888-57af-4ef6-a390-c0a592164ddb","Type":"ContainerStarted","Data":"69162e4a1468035c431fded577e77c180e0971a962885cdfa289c73b89ada216"} Oct 10 16:34:09 crc kubenswrapper[4799]: I1010 16:34:09.289358 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/dns-default-r7x5v" podStartSLOduration=6.289341108 podStartE2EDuration="6.289341108s" podCreationTimestamp="2025-10-10 16:34:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-10 16:34:09.288094236 +0000 UTC m=+142.796418361" watchObservedRunningTime="2025-10-10 16:34:09.289341108 +0000 UTC m=+142.797665223" Oct 10 16:34:09 crc kubenswrapper[4799]: I1010 16:34:09.298938 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 10 16:34:09 crc kubenswrapper[4799]: E1010 16:34:09.299017 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-10 16:34:09.799001181 +0000 UTC m=+143.307325286 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 10 16:34:09 crc kubenswrapper[4799]: I1010 16:34:09.299793 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-89gcz\" (UID: \"60ab14da-0f2e-48cc-873a-44eaf0662ed8\") " pod="openshift-image-registry/image-registry-697d97f7c8-89gcz" Oct 10 16:34:09 crc kubenswrapper[4799]: I1010 16:34:09.303900 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-9g6sf" event={"ID":"ae03eb1e-0698-4aef-9a6f-7708d92adf66","Type":"ContainerStarted","Data":"8fea2f2a82beadff96fa0df4f44c6e3ee41ea026e2f73f8d47614d008d6782bf"} Oct 10 16:34:09 crc kubenswrapper[4799]: E1010 16:34:09.315415 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-10 16:34:09.815400274 +0000 UTC m=+143.323724389 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-89gcz" (UID: "60ab14da-0f2e-48cc-873a-44eaf0662ed8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 10 16:34:09 crc kubenswrapper[4799]: I1010 16:34:09.324727 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-n8sc6" event={"ID":"5284905f-e20b-49be-8cef-df7f96e4460d","Type":"ContainerStarted","Data":"9782585437ee963c81dbc12bb654da3734aaff3c5f52522b4924c11f6a5f0df3"} Oct 10 16:34:09 crc kubenswrapper[4799]: I1010 16:34:09.324775 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-n8sc6" event={"ID":"5284905f-e20b-49be-8cef-df7f96e4460d","Type":"ContainerStarted","Data":"ecf736c27ec8d62f044f8ae14c6933f9cc09b90f2b042b86e06e577cbc634134"} Oct 10 16:34:09 crc kubenswrapper[4799]: I1010 16:34:09.325954 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-hkvxt" event={"ID":"cb1d3fef-2917-4e7b-916d-3b6381e25c33","Type":"ContainerStarted","Data":"cb07a1e671ec6c3e4f8bd4c9eac12842e643327eb98799190becca9619904c5a"} Oct 10 16:34:09 crc kubenswrapper[4799]: I1010 16:34:09.325981 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-hkvxt" event={"ID":"cb1d3fef-2917-4e7b-916d-3b6381e25c33","Type":"ContainerStarted","Data":"6175dcfded948ae016e9bbcb115cc660ffc76597767cfca1795111dcd6b35910"} Oct 10 16:34:09 crc kubenswrapper[4799]: I1010 16:34:09.330831 4799 generic.go:334] "Generic (PLEG): container finished" podID="18a6e757-37be-47ce-98ea-bda0221cab2c" containerID="0908bd63fc94be8510a5dd0968337cb27f4fd96fb2396d77299c67fc6b7d934b" exitCode=0 Oct 10 16:34:09 crc kubenswrapper[4799]: I1010 16:34:09.331128 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-5sgz5" event={"ID":"18a6e757-37be-47ce-98ea-bda0221cab2c","Type":"ContainerDied","Data":"0908bd63fc94be8510a5dd0968337cb27f4fd96fb2396d77299c67fc6b7d934b"} Oct 10 16:34:09 crc kubenswrapper[4799]: I1010 16:34:09.341659 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-vwbn8" podStartSLOduration=119.341639075 podStartE2EDuration="1m59.341639075s" podCreationTimestamp="2025-10-10 16:32:10 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-10 16:34:09.340228339 +0000 UTC m=+142.848552464" watchObservedRunningTime="2025-10-10 16:34:09.341639075 +0000 UTC m=+142.849963190" Oct 10 16:34:09 crc kubenswrapper[4799]: I1010 16:34:09.345061 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-9kfj6" event={"ID":"d4b5557a-bd3b-419f-bd0c-e3ed6d1f8def","Type":"ContainerStarted","Data":"b2b6eabd6bd0dff64cf70882621db40100f9932d0b2ededa09441a2f3c0447c6"} Oct 10 16:34:09 crc kubenswrapper[4799]: I1010 16:34:09.345121 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-9kfj6" event={"ID":"d4b5557a-bd3b-419f-bd0c-e3ed6d1f8def","Type":"ContainerStarted","Data":"b686c86581c3d56465c65978f4fe48ac62dbe150e6ff96a193ee02f45fd1e83e"} Oct 10 16:34:09 crc kubenswrapper[4799]: I1010 16:34:09.345134 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-9kfj6" event={"ID":"d4b5557a-bd3b-419f-bd0c-e3ed6d1f8def","Type":"ContainerStarted","Data":"a4acffa0cbaf8f3e8c252a903dd039b575d695ccdbd374ff527e08e2e5f6904e"} Oct 10 16:34:09 crc kubenswrapper[4799]: I1010 16:34:09.368095 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-mgdrk" event={"ID":"f50c760d-39a2-4717-bf30-942dcd170900","Type":"ContainerStarted","Data":"3a48aa071a63f9e5e1384961c859b49b9d4ba1b46d6e68c3ced31d4d9c35c232"} Oct 10 16:34:09 crc kubenswrapper[4799]: I1010 16:34:09.390377 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-zphkz" event={"ID":"928b4fca-64a2-4c2c-a1b2-38bb069c13c8","Type":"ContainerStarted","Data":"e38984a541bb1a43427f2caf79adb81f900a90fb5b984dbefe8338a172cb7309"} Oct 10 16:34:09 crc kubenswrapper[4799]: I1010 16:34:09.400790 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 10 16:34:09 crc kubenswrapper[4799]: E1010 16:34:09.401711 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-10 16:34:09.901690297 +0000 UTC m=+143.410014442 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 10 16:34:09 crc kubenswrapper[4799]: I1010 16:34:09.401793 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-89gcz\" (UID: \"60ab14da-0f2e-48cc-873a-44eaf0662ed8\") " pod="openshift-image-registry/image-registry-697d97f7c8-89gcz" Oct 10 16:34:09 crc kubenswrapper[4799]: E1010 16:34:09.403190 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-10 16:34:09.903173314 +0000 UTC m=+143.411497519 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-89gcz" (UID: "60ab14da-0f2e-48cc-873a-44eaf0662ed8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 10 16:34:09 crc kubenswrapper[4799]: I1010 16:34:09.410482 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-admission-controller-857f4d67dd-wzwz7" podStartSLOduration=119.410461218 podStartE2EDuration="1m59.410461218s" podCreationTimestamp="2025-10-10 16:32:10 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-10 16:34:09.378922174 +0000 UTC m=+142.887246289" watchObservedRunningTime="2025-10-10 16:34:09.410461218 +0000 UTC m=+142.918785333" Oct 10 16:34:09 crc kubenswrapper[4799]: I1010 16:34:09.432096 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-mvftm" event={"ID":"2dff39f3-58aa-4d27-a85f-c1a09bb3d83e","Type":"ContainerStarted","Data":"8b368f8e9033d8de5203ed1c9e7db42e675647bb556cb20aeba534764831e49e"} Oct 10 16:34:09 crc kubenswrapper[4799]: I1010 16:34:09.432359 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-mjjtc" event={"ID":"713c24fb-e821-419a-b996-9661a6cbf57b","Type":"ContainerStarted","Data":"26889caab199ffd607d3a66e1ddca3834f4154af23bae9a35864e87a0e0b3ed2"} Oct 10 16:34:09 crc kubenswrapper[4799]: I1010 16:34:09.453098 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-vfgnk" event={"ID":"c0095ab7-a332-4923-b1ac-90abd65087d5","Type":"ContainerStarted","Data":"c319bab1409b17c2b2e560cfbc8d932c0dad88b5084640fc21d2d40a68597685"} Oct 10 16:34:09 crc kubenswrapper[4799]: I1010 16:34:09.475156 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-2ksq6" event={"ID":"cd6b9ce8-4fe8-405e-9399-354b7d8ee20b","Type":"ContainerStarted","Data":"e955d6456363b50fa31fd8ca95da023c40ffd89f747dcfac255248b6bee6c3b2"} Oct 10 16:34:09 crc kubenswrapper[4799]: I1010 16:34:09.475206 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-2ksq6" event={"ID":"cd6b9ce8-4fe8-405e-9399-354b7d8ee20b","Type":"ContainerStarted","Data":"2eff7d0929c15ad51fecb399dbe99f9f3997e116926e38bd1cece789d841d4d4"} Oct 10 16:34:09 crc kubenswrapper[4799]: I1010 16:34:09.478031 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-vfgnk" Oct 10 16:34:09 crc kubenswrapper[4799]: I1010 16:34:09.495079 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-n8sc6" podStartSLOduration=121.495062738 podStartE2EDuration="2m1.495062738s" podCreationTimestamp="2025-10-10 16:32:08 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-10 16:34:09.410997331 +0000 UTC m=+142.919321446" watchObservedRunningTime="2025-10-10 16:34:09.495062738 +0000 UTC m=+143.003386853" Oct 10 16:34:09 crc kubenswrapper[4799]: I1010 16:34:09.500511 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-98snb" event={"ID":"977bd741-4d59-48b0-ab48-c22f6eecdb2e","Type":"ContainerStarted","Data":"d6ac28b63a80266520b81d596f3eb64ee11e7095415040aa41ba49b7bb0f29d6"} Oct 10 16:34:09 crc kubenswrapper[4799]: I1010 16:34:09.503404 4799 patch_prober.go:28] interesting pod/downloads-7954f5f757-plk2p container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.10:8080/\": dial tcp 10.217.0.10:8080: connect: connection refused" start-of-body= Oct 10 16:34:09 crc kubenswrapper[4799]: I1010 16:34:09.503449 4799 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-plk2p" podUID="e56235b4-8348-4fae-af0a-639fcacfc997" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.10:8080/\": dial tcp 10.217.0.10:8080: connect: connection refused" Oct 10 16:34:09 crc kubenswrapper[4799]: I1010 16:34:09.504471 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 10 16:34:09 crc kubenswrapper[4799]: E1010 16:34:09.506135 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-10 16:34:10.006121686 +0000 UTC m=+143.514445801 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 10 16:34:09 crc kubenswrapper[4799]: I1010 16:34:09.512461 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-879f6c89f-fvpc8" Oct 10 16:34:09 crc kubenswrapper[4799]: I1010 16:34:09.514469 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-w2jsl" Oct 10 16:34:09 crc kubenswrapper[4799]: I1010 16:34:09.526613 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-89gcz\" (UID: \"60ab14da-0f2e-48cc-873a-44eaf0662ed8\") " pod="openshift-image-registry/image-registry-697d97f7c8-89gcz" Oct 10 16:34:09 crc kubenswrapper[4799]: E1010 16:34:09.527152 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-10 16:34:10.027140556 +0000 UTC m=+143.535464671 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-89gcz" (UID: "60ab14da-0f2e-48cc-873a-44eaf0662ed8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 10 16:34:09 crc kubenswrapper[4799]: I1010 16:34:09.589732 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-hkvxt" podStartSLOduration=119.589708691 podStartE2EDuration="1m59.589708691s" podCreationTimestamp="2025-10-10 16:32:10 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-10 16:34:09.584596763 +0000 UTC m=+143.092920878" watchObservedRunningTime="2025-10-10 16:34:09.589708691 +0000 UTC m=+143.098032816" Oct 10 16:34:09 crc kubenswrapper[4799]: I1010 16:34:09.589911 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-f9d7485db-8lvfs" podStartSLOduration=120.589906956 podStartE2EDuration="2m0.589906956s" podCreationTimestamp="2025-10-10 16:32:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-10 16:34:09.503217353 +0000 UTC m=+143.011541468" watchObservedRunningTime="2025-10-10 16:34:09.589906956 +0000 UTC m=+143.098231071" Oct 10 16:34:09 crc kubenswrapper[4799]: I1010 16:34:09.631379 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 10 16:34:09 crc kubenswrapper[4799]: E1010 16:34:09.631958 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-10 16:34:10.131944065 +0000 UTC m=+143.640268180 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 10 16:34:09 crc kubenswrapper[4799]: I1010 16:34:09.679256 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca-operator/service-ca-operator-777779d784-mgdrk" podStartSLOduration=119.679235696 podStartE2EDuration="1m59.679235696s" podCreationTimestamp="2025-10-10 16:32:10 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-10 16:34:09.676611749 +0000 UTC m=+143.184935874" watchObservedRunningTime="2025-10-10 16:34:09.679235696 +0000 UTC m=+143.187559811" Oct 10 16:34:09 crc kubenswrapper[4799]: I1010 16:34:09.733071 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-89gcz\" (UID: \"60ab14da-0f2e-48cc-873a-44eaf0662ed8\") " pod="openshift-image-registry/image-registry-697d97f7c8-89gcz" Oct 10 16:34:09 crc kubenswrapper[4799]: E1010 16:34:09.738878 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-10 16:34:10.238837716 +0000 UTC m=+143.747161831 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-89gcz" (UID: "60ab14da-0f2e-48cc-873a-44eaf0662ed8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 10 16:34:09 crc kubenswrapper[4799]: I1010 16:34:09.802043 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-apiserver/apiserver-76f77b778f-2ksq6" Oct 10 16:34:09 crc kubenswrapper[4799]: I1010 16:34:09.802273 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-apiserver/apiserver-76f77b778f-2ksq6" Oct 10 16:34:09 crc kubenswrapper[4799]: I1010 16:34:09.805013 4799 patch_prober.go:28] interesting pod/apiserver-76f77b778f-2ksq6 container/openshift-apiserver namespace/openshift-apiserver: Startup probe status=failure output="Get \"https://10.217.0.6:8443/livez\": dial tcp 10.217.0.6:8443: connect: connection refused" start-of-body= Oct 10 16:34:09 crc kubenswrapper[4799]: I1010 16:34:09.805059 4799 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-apiserver/apiserver-76f77b778f-2ksq6" podUID="cd6b9ce8-4fe8-405e-9399-354b7d8ee20b" containerName="openshift-apiserver" probeResult="failure" output="Get \"https://10.217.0.6:8443/livez\": dial tcp 10.217.0.6:8443: connect: connection refused" Oct 10 16:34:09 crc kubenswrapper[4799]: I1010 16:34:09.837294 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-canary/ingress-canary-bbvkz" podStartSLOduration=6.837268825 podStartE2EDuration="6.837268825s" podCreationTimestamp="2025-10-10 16:34:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-10 16:34:09.835348377 +0000 UTC m=+143.343672492" watchObservedRunningTime="2025-10-10 16:34:09.837268825 +0000 UTC m=+143.345592940" Oct 10 16:34:09 crc kubenswrapper[4799]: I1010 16:34:09.838795 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 10 16:34:09 crc kubenswrapper[4799]: E1010 16:34:09.839086 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-10 16:34:10.33907348 +0000 UTC m=+143.847397595 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 10 16:34:09 crc kubenswrapper[4799]: I1010 16:34:09.918338 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-5r8jg" podStartSLOduration=119.918314556 podStartE2EDuration="1m59.918314556s" podCreationTimestamp="2025-10-10 16:32:10 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-10 16:34:09.902628591 +0000 UTC m=+143.410952716" watchObservedRunningTime="2025-10-10 16:34:09.918314556 +0000 UTC m=+143.426638671" Oct 10 16:34:09 crc kubenswrapper[4799]: I1010 16:34:09.939903 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-89gcz\" (UID: \"60ab14da-0f2e-48cc-873a-44eaf0662ed8\") " pod="openshift-image-registry/image-registry-697d97f7c8-89gcz" Oct 10 16:34:09 crc kubenswrapper[4799]: E1010 16:34:09.940303 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-10 16:34:10.440289969 +0000 UTC m=+143.948614084 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-89gcz" (UID: "60ab14da-0f2e-48cc-873a-44eaf0662ed8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 10 16:34:09 crc kubenswrapper[4799]: I1010 16:34:09.971136 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca/service-ca-9c57cc56f-2cggl" podStartSLOduration=119.971116805 podStartE2EDuration="1m59.971116805s" podCreationTimestamp="2025-10-10 16:32:10 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-10 16:34:09.946034294 +0000 UTC m=+143.454358429" watchObservedRunningTime="2025-10-10 16:34:09.971116805 +0000 UTC m=+143.479440920" Oct 10 16:34:10 crc kubenswrapper[4799]: I1010 16:34:10.019374 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-9kfj6" podStartSLOduration=120.01935272 podStartE2EDuration="2m0.01935272s" podCreationTimestamp="2025-10-10 16:32:10 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-10 16:34:10.018836947 +0000 UTC m=+143.527161082" watchObservedRunningTime="2025-10-10 16:34:10.01935272 +0000 UTC m=+143.527676835" Oct 10 16:34:10 crc kubenswrapper[4799]: I1010 16:34:10.040470 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 10 16:34:10 crc kubenswrapper[4799]: E1010 16:34:10.040594 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-10 16:34:10.540578024 +0000 UTC m=+144.048902139 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 10 16:34:10 crc kubenswrapper[4799]: I1010 16:34:10.040739 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-89gcz\" (UID: \"60ab14da-0f2e-48cc-873a-44eaf0662ed8\") " pod="openshift-image-registry/image-registry-697d97f7c8-89gcz" Oct 10 16:34:10 crc kubenswrapper[4799]: E1010 16:34:10.041008 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-10 16:34:10.541000955 +0000 UTC m=+144.049325070 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-89gcz" (UID: "60ab14da-0f2e-48cc-873a-44eaf0662ed8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 10 16:34:10 crc kubenswrapper[4799]: I1010 16:34:10.043549 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-8qhsm" podStartSLOduration=120.043535019 podStartE2EDuration="2m0.043535019s" podCreationTimestamp="2025-10-10 16:32:10 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-10 16:34:10.04200067 +0000 UTC m=+143.550324805" watchObservedRunningTime="2025-10-10 16:34:10.043535019 +0000 UTC m=+143.551859134" Oct 10 16:34:10 crc kubenswrapper[4799]: I1010 16:34:10.044692 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-558db77b4-56xl2" Oct 10 16:34:10 crc kubenswrapper[4799]: I1010 16:34:10.073155 4799 patch_prober.go:28] interesting pod/router-default-5444994796-qpw4g container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 10 16:34:10 crc kubenswrapper[4799]: [-]has-synced failed: reason withheld Oct 10 16:34:10 crc kubenswrapper[4799]: [+]process-running ok Oct 10 16:34:10 crc kubenswrapper[4799]: healthz check failed Oct 10 16:34:10 crc kubenswrapper[4799]: I1010 16:34:10.073200 4799 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-qpw4g" podUID="6a574afe-31ee-4706-90c0-a9c477f5bce7" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 10 16:34:10 crc kubenswrapper[4799]: I1010 16:34:10.142062 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 10 16:34:10 crc kubenswrapper[4799]: E1010 16:34:10.142438 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-10 16:34:10.642421479 +0000 UTC m=+144.150745594 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 10 16:34:10 crc kubenswrapper[4799]: I1010 16:34:10.154469 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver/apiserver-76f77b778f-2ksq6" podStartSLOduration=121.154450502 podStartE2EDuration="2m1.154450502s" podCreationTimestamp="2025-10-10 16:32:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-10 16:34:10.121476371 +0000 UTC m=+143.629800486" watchObservedRunningTime="2025-10-10 16:34:10.154450502 +0000 UTC m=+143.662774617" Oct 10 16:34:10 crc kubenswrapper[4799]: I1010 16:34:10.214596 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-zphkz" podStartSLOduration=121.214581246 podStartE2EDuration="2m1.214581246s" podCreationTimestamp="2025-10-10 16:32:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-10 16:34:10.181666257 +0000 UTC m=+143.689990382" watchObservedRunningTime="2025-10-10 16:34:10.214581246 +0000 UTC m=+143.722905361" Oct 10 16:34:10 crc kubenswrapper[4799]: I1010 16:34:10.243790 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-89gcz\" (UID: \"60ab14da-0f2e-48cc-873a-44eaf0662ed8\") " pod="openshift-image-registry/image-registry-697d97f7c8-89gcz" Oct 10 16:34:10 crc kubenswrapper[4799]: E1010 16:34:10.244091 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-10 16:34:10.744079669 +0000 UTC m=+144.252403774 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-89gcz" (UID: "60ab14da-0f2e-48cc-873a-44eaf0662ed8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 10 16:34:10 crc kubenswrapper[4799]: I1010 16:34:10.290126 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-mjjtc" podStartSLOduration=120.290105808 podStartE2EDuration="2m0.290105808s" podCreationTimestamp="2025-10-10 16:32:10 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-10 16:34:10.249157497 +0000 UTC m=+143.757481612" watchObservedRunningTime="2025-10-10 16:34:10.290105808 +0000 UTC m=+143.798429923" Oct 10 16:34:10 crc kubenswrapper[4799]: I1010 16:34:10.344848 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 10 16:34:10 crc kubenswrapper[4799]: E1010 16:34:10.345018 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-10 16:34:10.844992 +0000 UTC m=+144.353316115 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 10 16:34:10 crc kubenswrapper[4799]: I1010 16:34:10.345124 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-89gcz\" (UID: \"60ab14da-0f2e-48cc-873a-44eaf0662ed8\") " pod="openshift-image-registry/image-registry-697d97f7c8-89gcz" Oct 10 16:34:10 crc kubenswrapper[4799]: E1010 16:34:10.345439 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-10 16:34:10.845425201 +0000 UTC m=+144.353749326 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-89gcz" (UID: "60ab14da-0f2e-48cc-873a-44eaf0662ed8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 10 16:34:10 crc kubenswrapper[4799]: I1010 16:34:10.441083 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-mjjtc" Oct 10 16:34:10 crc kubenswrapper[4799]: I1010 16:34:10.441132 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-mjjtc" Oct 10 16:34:10 crc kubenswrapper[4799]: I1010 16:34:10.442158 4799 patch_prober.go:28] interesting pod/apiserver-7bbb656c7d-mjjtc container/oauth-apiserver namespace/openshift-oauth-apiserver: Startup probe status=failure output="Get \"https://10.217.0.7:8443/livez\": dial tcp 10.217.0.7:8443: connect: connection refused" start-of-body= Oct 10 16:34:10 crc kubenswrapper[4799]: I1010 16:34:10.442205 4799 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-mjjtc" podUID="713c24fb-e821-419a-b996-9661a6cbf57b" containerName="oauth-apiserver" probeResult="failure" output="Get \"https://10.217.0.7:8443/livez\": dial tcp 10.217.0.7:8443: connect: connection refused" Oct 10 16:34:10 crc kubenswrapper[4799]: I1010 16:34:10.446641 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 10 16:34:10 crc kubenswrapper[4799]: E1010 16:34:10.446803 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-10 16:34:10.946783363 +0000 UTC m=+144.455107478 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 10 16:34:10 crc kubenswrapper[4799]: I1010 16:34:10.446956 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-89gcz\" (UID: \"60ab14da-0f2e-48cc-873a-44eaf0662ed8\") " pod="openshift-image-registry/image-registry-697d97f7c8-89gcz" Oct 10 16:34:10 crc kubenswrapper[4799]: E1010 16:34:10.447322 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-10 16:34:10.947310396 +0000 UTC m=+144.455634511 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-89gcz" (UID: "60ab14da-0f2e-48cc-873a-44eaf0662ed8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 10 16:34:10 crc kubenswrapper[4799]: I1010 16:34:10.505392 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-9g6sf" event={"ID":"ae03eb1e-0698-4aef-9a6f-7708d92adf66","Type":"ContainerStarted","Data":"6a59b059736a579606b326ba1d6466f318ef78de6e384decb4110ad08a24190b"} Oct 10 16:34:10 crc kubenswrapper[4799]: I1010 16:34:10.507474 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-5sgz5" event={"ID":"18a6e757-37be-47ce-98ea-bda0221cab2c","Type":"ContainerStarted","Data":"137fdb0442676fb46abb25261bbaf2ad7a457b5fee6387c47957eb93ebd2ff97"} Oct 10 16:34:10 crc kubenswrapper[4799]: I1010 16:34:10.508266 4799 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-mn6dn container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.39:8080/healthz\": dial tcp 10.217.0.39:8080: connect: connection refused" start-of-body= Oct 10 16:34:10 crc kubenswrapper[4799]: I1010 16:34:10.508321 4799 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-mn6dn" podUID="e7393869-51bf-4974-97c7-33adfe1b44f5" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.39:8080/healthz\": dial tcp 10.217.0.39:8080: connect: connection refused" Oct 10 16:34:10 crc kubenswrapper[4799]: I1010 16:34:10.530584 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-config-operator/openshift-config-operator-7777fb866f-5sgz5" podStartSLOduration=121.530562933 podStartE2EDuration="2m1.530562933s" podCreationTimestamp="2025-10-10 16:32:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-10 16:34:10.527796233 +0000 UTC m=+144.036120368" watchObservedRunningTime="2025-10-10 16:34:10.530562933 +0000 UTC m=+144.038887048" Oct 10 16:34:10 crc kubenswrapper[4799]: I1010 16:34:10.548477 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 10 16:34:10 crc kubenswrapper[4799]: E1010 16:34:10.550151 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-10 16:34:11.050134905 +0000 UTC m=+144.558459020 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 10 16:34:10 crc kubenswrapper[4799]: I1010 16:34:10.651076 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-89gcz\" (UID: \"60ab14da-0f2e-48cc-873a-44eaf0662ed8\") " pod="openshift-image-registry/image-registry-697d97f7c8-89gcz" Oct 10 16:34:10 crc kubenswrapper[4799]: E1010 16:34:10.651531 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-10 16:34:11.151519048 +0000 UTC m=+144.659843163 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-89gcz" (UID: "60ab14da-0f2e-48cc-873a-44eaf0662ed8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 10 16:34:10 crc kubenswrapper[4799]: I1010 16:34:10.752231 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 10 16:34:10 crc kubenswrapper[4799]: E1010 16:34:10.752417 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-10 16:34:11.252390788 +0000 UTC m=+144.760714903 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 10 16:34:10 crc kubenswrapper[4799]: I1010 16:34:10.752627 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-89gcz\" (UID: \"60ab14da-0f2e-48cc-873a-44eaf0662ed8\") " pod="openshift-image-registry/image-registry-697d97f7c8-89gcz" Oct 10 16:34:10 crc kubenswrapper[4799]: E1010 16:34:10.752978 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-10 16:34:11.252961963 +0000 UTC m=+144.761286078 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-89gcz" (UID: "60ab14da-0f2e-48cc-873a-44eaf0662ed8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 10 16:34:10 crc kubenswrapper[4799]: I1010 16:34:10.853921 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 10 16:34:10 crc kubenswrapper[4799]: E1010 16:34:10.854137 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-10 16:34:11.35410608 +0000 UTC m=+144.862430195 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 10 16:34:10 crc kubenswrapper[4799]: I1010 16:34:10.854286 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-89gcz\" (UID: \"60ab14da-0f2e-48cc-873a-44eaf0662ed8\") " pod="openshift-image-registry/image-registry-697d97f7c8-89gcz" Oct 10 16:34:10 crc kubenswrapper[4799]: E1010 16:34:10.854646 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-10 16:34:11.354635283 +0000 UTC m=+144.862959398 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-89gcz" (UID: "60ab14da-0f2e-48cc-873a-44eaf0662ed8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 10 16:34:10 crc kubenswrapper[4799]: I1010 16:34:10.864024 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-c9chn"] Oct 10 16:34:10 crc kubenswrapper[4799]: I1010 16:34:10.864971 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-c9chn" Oct 10 16:34:10 crc kubenswrapper[4799]: I1010 16:34:10.868854 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Oct 10 16:34:10 crc kubenswrapper[4799]: I1010 16:34:10.885871 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-c9chn"] Oct 10 16:34:10 crc kubenswrapper[4799]: I1010 16:34:10.955535 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 10 16:34:10 crc kubenswrapper[4799]: I1010 16:34:10.955924 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0a2e52eb-b82d-4869-ab9b-1c783d1c58fb-catalog-content\") pod \"certified-operators-c9chn\" (UID: \"0a2e52eb-b82d-4869-ab9b-1c783d1c58fb\") " pod="openshift-marketplace/certified-operators-c9chn" Oct 10 16:34:10 crc kubenswrapper[4799]: I1010 16:34:10.955966 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0a2e52eb-b82d-4869-ab9b-1c783d1c58fb-utilities\") pod \"certified-operators-c9chn\" (UID: \"0a2e52eb-b82d-4869-ab9b-1c783d1c58fb\") " pod="openshift-marketplace/certified-operators-c9chn" Oct 10 16:34:10 crc kubenswrapper[4799]: I1010 16:34:10.955998 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7zlqs\" (UniqueName: \"kubernetes.io/projected/0a2e52eb-b82d-4869-ab9b-1c783d1c58fb-kube-api-access-7zlqs\") pod \"certified-operators-c9chn\" (UID: \"0a2e52eb-b82d-4869-ab9b-1c783d1c58fb\") " pod="openshift-marketplace/certified-operators-c9chn" Oct 10 16:34:10 crc kubenswrapper[4799]: E1010 16:34:10.956164 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-10 16:34:11.456145889 +0000 UTC m=+144.964470004 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 10 16:34:11 crc kubenswrapper[4799]: I1010 16:34:11.057726 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0a2e52eb-b82d-4869-ab9b-1c783d1c58fb-catalog-content\") pod \"certified-operators-c9chn\" (UID: \"0a2e52eb-b82d-4869-ab9b-1c783d1c58fb\") " pod="openshift-marketplace/certified-operators-c9chn" Oct 10 16:34:11 crc kubenswrapper[4799]: I1010 16:34:11.057784 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0a2e52eb-b82d-4869-ab9b-1c783d1c58fb-utilities\") pod \"certified-operators-c9chn\" (UID: \"0a2e52eb-b82d-4869-ab9b-1c783d1c58fb\") " pod="openshift-marketplace/certified-operators-c9chn" Oct 10 16:34:11 crc kubenswrapper[4799]: I1010 16:34:11.057816 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7zlqs\" (UniqueName: \"kubernetes.io/projected/0a2e52eb-b82d-4869-ab9b-1c783d1c58fb-kube-api-access-7zlqs\") pod \"certified-operators-c9chn\" (UID: \"0a2e52eb-b82d-4869-ab9b-1c783d1c58fb\") " pod="openshift-marketplace/certified-operators-c9chn" Oct 10 16:34:11 crc kubenswrapper[4799]: I1010 16:34:11.057869 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-89gcz\" (UID: \"60ab14da-0f2e-48cc-873a-44eaf0662ed8\") " pod="openshift-image-registry/image-registry-697d97f7c8-89gcz" Oct 10 16:34:11 crc kubenswrapper[4799]: E1010 16:34:11.058215 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-10 16:34:11.558201898 +0000 UTC m=+145.066526013 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-89gcz" (UID: "60ab14da-0f2e-48cc-873a-44eaf0662ed8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 10 16:34:11 crc kubenswrapper[4799]: I1010 16:34:11.058702 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0a2e52eb-b82d-4869-ab9b-1c783d1c58fb-catalog-content\") pod \"certified-operators-c9chn\" (UID: \"0a2e52eb-b82d-4869-ab9b-1c783d1c58fb\") " pod="openshift-marketplace/certified-operators-c9chn" Oct 10 16:34:11 crc kubenswrapper[4799]: I1010 16:34:11.058932 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0a2e52eb-b82d-4869-ab9b-1c783d1c58fb-utilities\") pod \"certified-operators-c9chn\" (UID: \"0a2e52eb-b82d-4869-ab9b-1c783d1c58fb\") " pod="openshift-marketplace/certified-operators-c9chn" Oct 10 16:34:11 crc kubenswrapper[4799]: I1010 16:34:11.073416 4799 patch_prober.go:28] interesting pod/router-default-5444994796-qpw4g container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 10 16:34:11 crc kubenswrapper[4799]: [-]has-synced failed: reason withheld Oct 10 16:34:11 crc kubenswrapper[4799]: [+]process-running ok Oct 10 16:34:11 crc kubenswrapper[4799]: healthz check failed Oct 10 16:34:11 crc kubenswrapper[4799]: I1010 16:34:11.073482 4799 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-qpw4g" podUID="6a574afe-31ee-4706-90c0-a9c477f5bce7" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 10 16:34:11 crc kubenswrapper[4799]: I1010 16:34:11.090598 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-z5z55"] Oct 10 16:34:11 crc kubenswrapper[4799]: I1010 16:34:11.091603 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-z5z55" Oct 10 16:34:11 crc kubenswrapper[4799]: I1010 16:34:11.093654 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Oct 10 16:34:11 crc kubenswrapper[4799]: I1010 16:34:11.112408 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7zlqs\" (UniqueName: \"kubernetes.io/projected/0a2e52eb-b82d-4869-ab9b-1c783d1c58fb-kube-api-access-7zlqs\") pod \"certified-operators-c9chn\" (UID: \"0a2e52eb-b82d-4869-ab9b-1c783d1c58fb\") " pod="openshift-marketplace/certified-operators-c9chn" Oct 10 16:34:11 crc kubenswrapper[4799]: I1010 16:34:11.138968 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-z5z55"] Oct 10 16:34:11 crc kubenswrapper[4799]: I1010 16:34:11.159468 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 10 16:34:11 crc kubenswrapper[4799]: I1010 16:34:11.159875 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ec4988cf-fb0f-4df8-8f2d-748a5459bbcc-utilities\") pod \"community-operators-z5z55\" (UID: \"ec4988cf-fb0f-4df8-8f2d-748a5459bbcc\") " pod="openshift-marketplace/community-operators-z5z55" Oct 10 16:34:11 crc kubenswrapper[4799]: I1010 16:34:11.159916 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9jsjj\" (UniqueName: \"kubernetes.io/projected/ec4988cf-fb0f-4df8-8f2d-748a5459bbcc-kube-api-access-9jsjj\") pod \"community-operators-z5z55\" (UID: \"ec4988cf-fb0f-4df8-8f2d-748a5459bbcc\") " pod="openshift-marketplace/community-operators-z5z55" Oct 10 16:34:11 crc kubenswrapper[4799]: I1010 16:34:11.159946 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ec4988cf-fb0f-4df8-8f2d-748a5459bbcc-catalog-content\") pod \"community-operators-z5z55\" (UID: \"ec4988cf-fb0f-4df8-8f2d-748a5459bbcc\") " pod="openshift-marketplace/community-operators-z5z55" Oct 10 16:34:11 crc kubenswrapper[4799]: E1010 16:34:11.160070 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-10 16:34:11.660052672 +0000 UTC m=+145.168376787 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 10 16:34:11 crc kubenswrapper[4799]: I1010 16:34:11.179484 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-c9chn" Oct 10 16:34:11 crc kubenswrapper[4799]: I1010 16:34:11.261733 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9jsjj\" (UniqueName: \"kubernetes.io/projected/ec4988cf-fb0f-4df8-8f2d-748a5459bbcc-kube-api-access-9jsjj\") pod \"community-operators-z5z55\" (UID: \"ec4988cf-fb0f-4df8-8f2d-748a5459bbcc\") " pod="openshift-marketplace/community-operators-z5z55" Oct 10 16:34:11 crc kubenswrapper[4799]: I1010 16:34:11.261829 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ec4988cf-fb0f-4df8-8f2d-748a5459bbcc-catalog-content\") pod \"community-operators-z5z55\" (UID: \"ec4988cf-fb0f-4df8-8f2d-748a5459bbcc\") " pod="openshift-marketplace/community-operators-z5z55" Oct 10 16:34:11 crc kubenswrapper[4799]: I1010 16:34:11.262663 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ec4988cf-fb0f-4df8-8f2d-748a5459bbcc-catalog-content\") pod \"community-operators-z5z55\" (UID: \"ec4988cf-fb0f-4df8-8f2d-748a5459bbcc\") " pod="openshift-marketplace/community-operators-z5z55" Oct 10 16:34:11 crc kubenswrapper[4799]: I1010 16:34:11.261864 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-89gcz\" (UID: \"60ab14da-0f2e-48cc-873a-44eaf0662ed8\") " pod="openshift-image-registry/image-registry-697d97f7c8-89gcz" Oct 10 16:34:11 crc kubenswrapper[4799]: I1010 16:34:11.262880 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ec4988cf-fb0f-4df8-8f2d-748a5459bbcc-utilities\") pod \"community-operators-z5z55\" (UID: \"ec4988cf-fb0f-4df8-8f2d-748a5459bbcc\") " pod="openshift-marketplace/community-operators-z5z55" Oct 10 16:34:11 crc kubenswrapper[4799]: I1010 16:34:11.263141 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ec4988cf-fb0f-4df8-8f2d-748a5459bbcc-utilities\") pod \"community-operators-z5z55\" (UID: \"ec4988cf-fb0f-4df8-8f2d-748a5459bbcc\") " pod="openshift-marketplace/community-operators-z5z55" Oct 10 16:34:11 crc kubenswrapper[4799]: E1010 16:34:11.263382 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-10 16:34:11.763371234 +0000 UTC m=+145.271695349 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-89gcz" (UID: "60ab14da-0f2e-48cc-873a-44eaf0662ed8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 10 16:34:11 crc kubenswrapper[4799]: I1010 16:34:11.301848 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-5d7rl"] Oct 10 16:34:11 crc kubenswrapper[4799]: I1010 16:34:11.302998 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-5d7rl" Oct 10 16:34:11 crc kubenswrapper[4799]: I1010 16:34:11.330895 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9jsjj\" (UniqueName: \"kubernetes.io/projected/ec4988cf-fb0f-4df8-8f2d-748a5459bbcc-kube-api-access-9jsjj\") pod \"community-operators-z5z55\" (UID: \"ec4988cf-fb0f-4df8-8f2d-748a5459bbcc\") " pod="openshift-marketplace/community-operators-z5z55" Oct 10 16:34:11 crc kubenswrapper[4799]: I1010 16:34:11.344738 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-5d7rl"] Oct 10 16:34:11 crc kubenswrapper[4799]: I1010 16:34:11.364540 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 10 16:34:11 crc kubenswrapper[4799]: E1010 16:34:11.364848 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-10 16:34:11.864832979 +0000 UTC m=+145.373157094 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 10 16:34:11 crc kubenswrapper[4799]: I1010 16:34:11.365139 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/89264b95-de02-4ba2-a26e-121910a2b2ff-catalog-content\") pod \"certified-operators-5d7rl\" (UID: \"89264b95-de02-4ba2-a26e-121910a2b2ff\") " pod="openshift-marketplace/certified-operators-5d7rl" Oct 10 16:34:11 crc kubenswrapper[4799]: I1010 16:34:11.365182 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/89264b95-de02-4ba2-a26e-121910a2b2ff-utilities\") pod \"certified-operators-5d7rl\" (UID: \"89264b95-de02-4ba2-a26e-121910a2b2ff\") " pod="openshift-marketplace/certified-operators-5d7rl" Oct 10 16:34:11 crc kubenswrapper[4799]: I1010 16:34:11.365222 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-89gcz\" (UID: \"60ab14da-0f2e-48cc-873a-44eaf0662ed8\") " pod="openshift-image-registry/image-registry-697d97f7c8-89gcz" Oct 10 16:34:11 crc kubenswrapper[4799]: I1010 16:34:11.365300 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-59zkz\" (UniqueName: \"kubernetes.io/projected/89264b95-de02-4ba2-a26e-121910a2b2ff-kube-api-access-59zkz\") pod \"certified-operators-5d7rl\" (UID: \"89264b95-de02-4ba2-a26e-121910a2b2ff\") " pod="openshift-marketplace/certified-operators-5d7rl" Oct 10 16:34:11 crc kubenswrapper[4799]: E1010 16:34:11.365458 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-10 16:34:11.865450985 +0000 UTC m=+145.373775100 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-89gcz" (UID: "60ab14da-0f2e-48cc-873a-44eaf0662ed8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 10 16:34:11 crc kubenswrapper[4799]: I1010 16:34:11.403813 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-z5z55" Oct 10 16:34:11 crc kubenswrapper[4799]: I1010 16:34:11.466163 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 10 16:34:11 crc kubenswrapper[4799]: I1010 16:34:11.466437 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-59zkz\" (UniqueName: \"kubernetes.io/projected/89264b95-de02-4ba2-a26e-121910a2b2ff-kube-api-access-59zkz\") pod \"certified-operators-5d7rl\" (UID: \"89264b95-de02-4ba2-a26e-121910a2b2ff\") " pod="openshift-marketplace/certified-operators-5d7rl" Oct 10 16:34:11 crc kubenswrapper[4799]: I1010 16:34:11.466511 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/89264b95-de02-4ba2-a26e-121910a2b2ff-catalog-content\") pod \"certified-operators-5d7rl\" (UID: \"89264b95-de02-4ba2-a26e-121910a2b2ff\") " pod="openshift-marketplace/certified-operators-5d7rl" Oct 10 16:34:11 crc kubenswrapper[4799]: I1010 16:34:11.466544 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/89264b95-de02-4ba2-a26e-121910a2b2ff-utilities\") pod \"certified-operators-5d7rl\" (UID: \"89264b95-de02-4ba2-a26e-121910a2b2ff\") " pod="openshift-marketplace/certified-operators-5d7rl" Oct 10 16:34:11 crc kubenswrapper[4799]: I1010 16:34:11.466934 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/89264b95-de02-4ba2-a26e-121910a2b2ff-utilities\") pod \"certified-operators-5d7rl\" (UID: \"89264b95-de02-4ba2-a26e-121910a2b2ff\") " pod="openshift-marketplace/certified-operators-5d7rl" Oct 10 16:34:11 crc kubenswrapper[4799]: E1010 16:34:11.466997 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-10 16:34:11.966983071 +0000 UTC m=+145.475307186 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 10 16:34:11 crc kubenswrapper[4799]: I1010 16:34:11.467443 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/89264b95-de02-4ba2-a26e-121910a2b2ff-catalog-content\") pod \"certified-operators-5d7rl\" (UID: \"89264b95-de02-4ba2-a26e-121910a2b2ff\") " pod="openshift-marketplace/certified-operators-5d7rl" Oct 10 16:34:11 crc kubenswrapper[4799]: I1010 16:34:11.484883 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-rmstw"] Oct 10 16:34:11 crc kubenswrapper[4799]: I1010 16:34:11.489983 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-rmstw" Oct 10 16:34:11 crc kubenswrapper[4799]: I1010 16:34:11.512214 4799 patch_prober.go:28] interesting pod/packageserver-d55dfcdfc-5r8jg container/packageserver namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.19:5443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Oct 10 16:34:11 crc kubenswrapper[4799]: I1010 16:34:11.512259 4799 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-5r8jg" podUID="ee5210db-86a0-4346-bfc4-e4a6460eaae1" containerName="packageserver" probeResult="failure" output="Get \"https://10.217.0.19:5443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Oct 10 16:34:11 crc kubenswrapper[4799]: I1010 16:34:11.519509 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-59zkz\" (UniqueName: \"kubernetes.io/projected/89264b95-de02-4ba2-a26e-121910a2b2ff-kube-api-access-59zkz\") pod \"certified-operators-5d7rl\" (UID: \"89264b95-de02-4ba2-a26e-121910a2b2ff\") " pod="openshift-marketplace/certified-operators-5d7rl" Oct 10 16:34:11 crc kubenswrapper[4799]: I1010 16:34:11.532999 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-config-operator/openshift-config-operator-7777fb866f-5sgz5" Oct 10 16:34:11 crc kubenswrapper[4799]: I1010 16:34:11.575746 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-rmstw"] Oct 10 16:34:11 crc kubenswrapper[4799]: I1010 16:34:11.581121 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f8195a22-f2dc-4a8e-bb85-abbef8d04ac3-utilities\") pod \"community-operators-rmstw\" (UID: \"f8195a22-f2dc-4a8e-bb85-abbef8d04ac3\") " pod="openshift-marketplace/community-operators-rmstw" Oct 10 16:34:11 crc kubenswrapper[4799]: I1010 16:34:11.581367 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-89gcz\" (UID: \"60ab14da-0f2e-48cc-873a-44eaf0662ed8\") " pod="openshift-image-registry/image-registry-697d97f7c8-89gcz" Oct 10 16:34:11 crc kubenswrapper[4799]: I1010 16:34:11.581493 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f8195a22-f2dc-4a8e-bb85-abbef8d04ac3-catalog-content\") pod \"community-operators-rmstw\" (UID: \"f8195a22-f2dc-4a8e-bb85-abbef8d04ac3\") " pod="openshift-marketplace/community-operators-rmstw" Oct 10 16:34:11 crc kubenswrapper[4799]: I1010 16:34:11.581522 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cqfsd\" (UniqueName: \"kubernetes.io/projected/f8195a22-f2dc-4a8e-bb85-abbef8d04ac3-kube-api-access-cqfsd\") pod \"community-operators-rmstw\" (UID: \"f8195a22-f2dc-4a8e-bb85-abbef8d04ac3\") " pod="openshift-marketplace/community-operators-rmstw" Oct 10 16:34:11 crc kubenswrapper[4799]: E1010 16:34:11.583596 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-10 16:34:12.083564627 +0000 UTC m=+145.591888742 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-89gcz" (UID: "60ab14da-0f2e-48cc-873a-44eaf0662ed8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 10 16:34:11 crc kubenswrapper[4799]: I1010 16:34:11.619389 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-5d7rl" Oct 10 16:34:11 crc kubenswrapper[4799]: I1010 16:34:11.683340 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 10 16:34:11 crc kubenswrapper[4799]: I1010 16:34:11.683600 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f8195a22-f2dc-4a8e-bb85-abbef8d04ac3-catalog-content\") pod \"community-operators-rmstw\" (UID: \"f8195a22-f2dc-4a8e-bb85-abbef8d04ac3\") " pod="openshift-marketplace/community-operators-rmstw" Oct 10 16:34:11 crc kubenswrapper[4799]: I1010 16:34:11.683634 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqfsd\" (UniqueName: \"kubernetes.io/projected/f8195a22-f2dc-4a8e-bb85-abbef8d04ac3-kube-api-access-cqfsd\") pod \"community-operators-rmstw\" (UID: \"f8195a22-f2dc-4a8e-bb85-abbef8d04ac3\") " pod="openshift-marketplace/community-operators-rmstw" Oct 10 16:34:11 crc kubenswrapper[4799]: I1010 16:34:11.683709 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f8195a22-f2dc-4a8e-bb85-abbef8d04ac3-utilities\") pod \"community-operators-rmstw\" (UID: \"f8195a22-f2dc-4a8e-bb85-abbef8d04ac3\") " pod="openshift-marketplace/community-operators-rmstw" Oct 10 16:34:11 crc kubenswrapper[4799]: E1010 16:34:11.684264 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-10 16:34:12.184249442 +0000 UTC m=+145.692573557 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 10 16:34:11 crc kubenswrapper[4799]: I1010 16:34:11.685248 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f8195a22-f2dc-4a8e-bb85-abbef8d04ac3-catalog-content\") pod \"community-operators-rmstw\" (UID: \"f8195a22-f2dc-4a8e-bb85-abbef8d04ac3\") " pod="openshift-marketplace/community-operators-rmstw" Oct 10 16:34:11 crc kubenswrapper[4799]: I1010 16:34:11.685683 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f8195a22-f2dc-4a8e-bb85-abbef8d04ac3-utilities\") pod \"community-operators-rmstw\" (UID: \"f8195a22-f2dc-4a8e-bb85-abbef8d04ac3\") " pod="openshift-marketplace/community-operators-rmstw" Oct 10 16:34:11 crc kubenswrapper[4799]: I1010 16:34:11.717372 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cqfsd\" (UniqueName: \"kubernetes.io/projected/f8195a22-f2dc-4a8e-bb85-abbef8d04ac3-kube-api-access-cqfsd\") pod \"community-operators-rmstw\" (UID: \"f8195a22-f2dc-4a8e-bb85-abbef8d04ac3\") " pod="openshift-marketplace/community-operators-rmstw" Oct 10 16:34:11 crc kubenswrapper[4799]: I1010 16:34:11.772736 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-c9chn"] Oct 10 16:34:11 crc kubenswrapper[4799]: I1010 16:34:11.786573 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-89gcz\" (UID: \"60ab14da-0f2e-48cc-873a-44eaf0662ed8\") " pod="openshift-image-registry/image-registry-697d97f7c8-89gcz" Oct 10 16:34:11 crc kubenswrapper[4799]: E1010 16:34:11.786929 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-10 16:34:12.286917877 +0000 UTC m=+145.795241992 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-89gcz" (UID: "60ab14da-0f2e-48cc-873a-44eaf0662ed8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 10 16:34:11 crc kubenswrapper[4799]: W1010 16:34:11.800717 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod0a2e52eb_b82d_4869_ab9b_1c783d1c58fb.slice/crio-49d9afe499437cc339c0788c0f90142c9ecb2dddb6c66fd4e715d20a5c0326f2 WatchSource:0}: Error finding container 49d9afe499437cc339c0788c0f90142c9ecb2dddb6c66fd4e715d20a5c0326f2: Status 404 returned error can't find the container with id 49d9afe499437cc339c0788c0f90142c9ecb2dddb6c66fd4e715d20a5c0326f2 Oct 10 16:34:11 crc kubenswrapper[4799]: I1010 16:34:11.873128 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-rmstw" Oct 10 16:34:11 crc kubenswrapper[4799]: I1010 16:34:11.890243 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 10 16:34:11 crc kubenswrapper[4799]: E1010 16:34:11.891139 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-10 16:34:12.391118091 +0000 UTC m=+145.899442206 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 10 16:34:11 crc kubenswrapper[4799]: I1010 16:34:11.991788 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-89gcz\" (UID: \"60ab14da-0f2e-48cc-873a-44eaf0662ed8\") " pod="openshift-image-registry/image-registry-697d97f7c8-89gcz" Oct 10 16:34:11 crc kubenswrapper[4799]: E1010 16:34:11.994624 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-10 16:34:12.494609207 +0000 UTC m=+146.002933322 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-89gcz" (UID: "60ab14da-0f2e-48cc-873a-44eaf0662ed8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 10 16:34:12 crc kubenswrapper[4799]: I1010 16:34:12.072018 4799 patch_prober.go:28] interesting pod/router-default-5444994796-qpw4g container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 10 16:34:12 crc kubenswrapper[4799]: [-]has-synced failed: reason withheld Oct 10 16:34:12 crc kubenswrapper[4799]: [+]process-running ok Oct 10 16:34:12 crc kubenswrapper[4799]: healthz check failed Oct 10 16:34:12 crc kubenswrapper[4799]: I1010 16:34:12.072069 4799 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-qpw4g" podUID="6a574afe-31ee-4706-90c0-a9c477f5bce7" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 10 16:34:12 crc kubenswrapper[4799]: I1010 16:34:12.092995 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 10 16:34:12 crc kubenswrapper[4799]: E1010 16:34:12.093330 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-10 16:34:12.593315203 +0000 UTC m=+146.101639318 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 10 16:34:12 crc kubenswrapper[4799]: I1010 16:34:12.109288 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-5d7rl"] Oct 10 16:34:12 crc kubenswrapper[4799]: I1010 16:34:12.136888 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-z5z55"] Oct 10 16:34:12 crc kubenswrapper[4799]: W1010 16:34:12.194316 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod89264b95_de02_4ba2_a26e_121910a2b2ff.slice/crio-1938c582311580dec01c0dd14ffe07bf0bcaae54e270ca570cdee0a1b28700bf WatchSource:0}: Error finding container 1938c582311580dec01c0dd14ffe07bf0bcaae54e270ca570cdee0a1b28700bf: Status 404 returned error can't find the container with id 1938c582311580dec01c0dd14ffe07bf0bcaae54e270ca570cdee0a1b28700bf Oct 10 16:34:12 crc kubenswrapper[4799]: I1010 16:34:12.194566 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-89gcz\" (UID: \"60ab14da-0f2e-48cc-873a-44eaf0662ed8\") " pod="openshift-image-registry/image-registry-697d97f7c8-89gcz" Oct 10 16:34:12 crc kubenswrapper[4799]: E1010 16:34:12.194899 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-10 16:34:12.69488625 +0000 UTC m=+146.203210365 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-89gcz" (UID: "60ab14da-0f2e-48cc-873a-44eaf0662ed8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 10 16:34:12 crc kubenswrapper[4799]: I1010 16:34:12.280660 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-rmstw"] Oct 10 16:34:12 crc kubenswrapper[4799]: I1010 16:34:12.295706 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 10 16:34:12 crc kubenswrapper[4799]: E1010 16:34:12.295903 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-10 16:34:12.795875203 +0000 UTC m=+146.304199318 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 10 16:34:12 crc kubenswrapper[4799]: I1010 16:34:12.296020 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-89gcz\" (UID: \"60ab14da-0f2e-48cc-873a-44eaf0662ed8\") " pod="openshift-image-registry/image-registry-697d97f7c8-89gcz" Oct 10 16:34:12 crc kubenswrapper[4799]: E1010 16:34:12.296380 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-10 16:34:12.796371176 +0000 UTC m=+146.304695291 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-89gcz" (UID: "60ab14da-0f2e-48cc-873a-44eaf0662ed8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 10 16:34:12 crc kubenswrapper[4799]: I1010 16:34:12.396652 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 10 16:34:12 crc kubenswrapper[4799]: E1010 16:34:12.396851 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-10 16:34:12.896823465 +0000 UTC m=+146.405147580 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 10 16:34:12 crc kubenswrapper[4799]: I1010 16:34:12.397024 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-89gcz\" (UID: \"60ab14da-0f2e-48cc-873a-44eaf0662ed8\") " pod="openshift-image-registry/image-registry-697d97f7c8-89gcz" Oct 10 16:34:12 crc kubenswrapper[4799]: E1010 16:34:12.397275 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-10 16:34:12.897263946 +0000 UTC m=+146.405588061 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-89gcz" (UID: "60ab14da-0f2e-48cc-873a-44eaf0662ed8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 10 16:34:12 crc kubenswrapper[4799]: I1010 16:34:12.498010 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 10 16:34:12 crc kubenswrapper[4799]: E1010 16:34:12.498204 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-10 16:34:12.998180027 +0000 UTC m=+146.506504142 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 10 16:34:12 crc kubenswrapper[4799]: I1010 16:34:12.498388 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-89gcz\" (UID: \"60ab14da-0f2e-48cc-873a-44eaf0662ed8\") " pod="openshift-image-registry/image-registry-697d97f7c8-89gcz" Oct 10 16:34:12 crc kubenswrapper[4799]: E1010 16:34:12.498660 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-10 16:34:12.998652409 +0000 UTC m=+146.506976524 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-89gcz" (UID: "60ab14da-0f2e-48cc-873a-44eaf0662ed8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 10 16:34:12 crc kubenswrapper[4799]: I1010 16:34:12.537902 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-c9chn" event={"ID":"0a2e52eb-b82d-4869-ab9b-1c783d1c58fb","Type":"ContainerStarted","Data":"36aef9f05b185c22779c267025905cd2370332e5867a378339b7969b90423564"} Oct 10 16:34:12 crc kubenswrapper[4799]: I1010 16:34:12.537959 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-c9chn" event={"ID":"0a2e52eb-b82d-4869-ab9b-1c783d1c58fb","Type":"ContainerStarted","Data":"49d9afe499437cc339c0788c0f90142c9ecb2dddb6c66fd4e715d20a5c0326f2"} Oct 10 16:34:12 crc kubenswrapper[4799]: I1010 16:34:12.538834 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5d7rl" event={"ID":"89264b95-de02-4ba2-a26e-121910a2b2ff","Type":"ContainerStarted","Data":"1938c582311580dec01c0dd14ffe07bf0bcaae54e270ca570cdee0a1b28700bf"} Oct 10 16:34:12 crc kubenswrapper[4799]: I1010 16:34:12.539712 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-rmstw" event={"ID":"f8195a22-f2dc-4a8e-bb85-abbef8d04ac3","Type":"ContainerStarted","Data":"87a2d4c6baff5529364b8c7e1628139d47ad9c5b30cfd927b0f232c8610d2856"} Oct 10 16:34:12 crc kubenswrapper[4799]: I1010 16:34:12.540525 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-z5z55" event={"ID":"ec4988cf-fb0f-4df8-8f2d-748a5459bbcc","Type":"ContainerStarted","Data":"902b03ac04ed41b609698f710dd28906ba67ba463f3840ef2f1545adde13eb93"} Oct 10 16:34:12 crc kubenswrapper[4799]: I1010 16:34:12.599193 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 10 16:34:12 crc kubenswrapper[4799]: E1010 16:34:12.599426 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-10 16:34:13.099389976 +0000 UTC m=+146.607714091 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 10 16:34:12 crc kubenswrapper[4799]: I1010 16:34:12.599496 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-89gcz\" (UID: \"60ab14da-0f2e-48cc-873a-44eaf0662ed8\") " pod="openshift-image-registry/image-registry-697d97f7c8-89gcz" Oct 10 16:34:12 crc kubenswrapper[4799]: E1010 16:34:12.599851 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-10 16:34:13.099841777 +0000 UTC m=+146.608165932 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-89gcz" (UID: "60ab14da-0f2e-48cc-873a-44eaf0662ed8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 10 16:34:12 crc kubenswrapper[4799]: I1010 16:34:12.700651 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 10 16:34:12 crc kubenswrapper[4799]: E1010 16:34:12.700799 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-10 16:34:13.200735678 +0000 UTC m=+146.709059793 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 10 16:34:12 crc kubenswrapper[4799]: I1010 16:34:12.701099 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-89gcz\" (UID: \"60ab14da-0f2e-48cc-873a-44eaf0662ed8\") " pod="openshift-image-registry/image-registry-697d97f7c8-89gcz" Oct 10 16:34:12 crc kubenswrapper[4799]: E1010 16:34:12.701362 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-10 16:34:13.201355063 +0000 UTC m=+146.709679178 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-89gcz" (UID: "60ab14da-0f2e-48cc-873a-44eaf0662ed8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 10 16:34:12 crc kubenswrapper[4799]: I1010 16:34:12.802853 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 10 16:34:12 crc kubenswrapper[4799]: E1010 16:34:12.803039 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-10 16:34:13.303009793 +0000 UTC m=+146.811333908 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 10 16:34:12 crc kubenswrapper[4799]: I1010 16:34:12.803164 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-89gcz\" (UID: \"60ab14da-0f2e-48cc-873a-44eaf0662ed8\") " pod="openshift-image-registry/image-registry-697d97f7c8-89gcz" Oct 10 16:34:12 crc kubenswrapper[4799]: E1010 16:34:12.803527 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-10 16:34:13.303519976 +0000 UTC m=+146.811844091 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-89gcz" (UID: "60ab14da-0f2e-48cc-873a-44eaf0662ed8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 10 16:34:12 crc kubenswrapper[4799]: I1010 16:34:12.873203 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-config-operator/openshift-config-operator-7777fb866f-5sgz5" Oct 10 16:34:12 crc kubenswrapper[4799]: I1010 16:34:12.904451 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 10 16:34:12 crc kubenswrapper[4799]: E1010 16:34:12.904632 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-10 16:34:13.404605011 +0000 UTC m=+146.912929136 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 10 16:34:12 crc kubenswrapper[4799]: I1010 16:34:12.904837 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-89gcz\" (UID: \"60ab14da-0f2e-48cc-873a-44eaf0662ed8\") " pod="openshift-image-registry/image-registry-697d97f7c8-89gcz" Oct 10 16:34:12 crc kubenswrapper[4799]: E1010 16:34:12.905192 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-10 16:34:13.405180806 +0000 UTC m=+146.913505021 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-89gcz" (UID: "60ab14da-0f2e-48cc-873a-44eaf0662ed8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 10 16:34:13 crc kubenswrapper[4799]: I1010 16:34:13.005966 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 10 16:34:13 crc kubenswrapper[4799]: E1010 16:34:13.006181 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-10 16:34:13.506147708 +0000 UTC m=+147.014471823 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 10 16:34:13 crc kubenswrapper[4799]: I1010 16:34:13.006507 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-89gcz\" (UID: \"60ab14da-0f2e-48cc-873a-44eaf0662ed8\") " pod="openshift-image-registry/image-registry-697d97f7c8-89gcz" Oct 10 16:34:13 crc kubenswrapper[4799]: E1010 16:34:13.006900 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-10 16:34:13.506884787 +0000 UTC m=+147.015208992 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-89gcz" (UID: "60ab14da-0f2e-48cc-873a-44eaf0662ed8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 10 16:34:13 crc kubenswrapper[4799]: I1010 16:34:13.051471 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-4786v"] Oct 10 16:34:13 crc kubenswrapper[4799]: I1010 16:34:13.052391 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-4786v" Oct 10 16:34:13 crc kubenswrapper[4799]: I1010 16:34:13.053829 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Oct 10 16:34:13 crc kubenswrapper[4799]: I1010 16:34:13.067342 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-4786v"] Oct 10 16:34:13 crc kubenswrapper[4799]: I1010 16:34:13.070081 4799 patch_prober.go:28] interesting pod/router-default-5444994796-qpw4g container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 10 16:34:13 crc kubenswrapper[4799]: [-]has-synced failed: reason withheld Oct 10 16:34:13 crc kubenswrapper[4799]: [+]process-running ok Oct 10 16:34:13 crc kubenswrapper[4799]: healthz check failed Oct 10 16:34:13 crc kubenswrapper[4799]: I1010 16:34:13.070147 4799 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-qpw4g" podUID="6a574afe-31ee-4706-90c0-a9c477f5bce7" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 10 16:34:13 crc kubenswrapper[4799]: I1010 16:34:13.107696 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 10 16:34:13 crc kubenswrapper[4799]: E1010 16:34:13.107987 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-10 16:34:13.607949622 +0000 UTC m=+147.116273747 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 10 16:34:13 crc kubenswrapper[4799]: I1010 16:34:13.108050 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ch4sf\" (UniqueName: \"kubernetes.io/projected/048287bf-12a1-4fef-8fc2-7fa4686d31cc-kube-api-access-ch4sf\") pod \"redhat-marketplace-4786v\" (UID: \"048287bf-12a1-4fef-8fc2-7fa4686d31cc\") " pod="openshift-marketplace/redhat-marketplace-4786v" Oct 10 16:34:13 crc kubenswrapper[4799]: I1010 16:34:13.108206 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/048287bf-12a1-4fef-8fc2-7fa4686d31cc-catalog-content\") pod \"redhat-marketplace-4786v\" (UID: \"048287bf-12a1-4fef-8fc2-7fa4686d31cc\") " pod="openshift-marketplace/redhat-marketplace-4786v" Oct 10 16:34:13 crc kubenswrapper[4799]: I1010 16:34:13.108252 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/048287bf-12a1-4fef-8fc2-7fa4686d31cc-utilities\") pod \"redhat-marketplace-4786v\" (UID: \"048287bf-12a1-4fef-8fc2-7fa4686d31cc\") " pod="openshift-marketplace/redhat-marketplace-4786v" Oct 10 16:34:13 crc kubenswrapper[4799]: I1010 16:34:13.108474 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-89gcz\" (UID: \"60ab14da-0f2e-48cc-873a-44eaf0662ed8\") " pod="openshift-image-registry/image-registry-697d97f7c8-89gcz" Oct 10 16:34:13 crc kubenswrapper[4799]: E1010 16:34:13.108823 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-10 16:34:13.608810703 +0000 UTC m=+147.117134818 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-89gcz" (UID: "60ab14da-0f2e-48cc-873a-44eaf0662ed8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 10 16:34:13 crc kubenswrapper[4799]: I1010 16:34:13.209932 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 10 16:34:13 crc kubenswrapper[4799]: E1010 16:34:13.210143 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-10 16:34:13.710115574 +0000 UTC m=+147.218439689 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 10 16:34:13 crc kubenswrapper[4799]: I1010 16:34:13.210235 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-89gcz\" (UID: \"60ab14da-0f2e-48cc-873a-44eaf0662ed8\") " pod="openshift-image-registry/image-registry-697d97f7c8-89gcz" Oct 10 16:34:13 crc kubenswrapper[4799]: I1010 16:34:13.210290 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ch4sf\" (UniqueName: \"kubernetes.io/projected/048287bf-12a1-4fef-8fc2-7fa4686d31cc-kube-api-access-ch4sf\") pod \"redhat-marketplace-4786v\" (UID: \"048287bf-12a1-4fef-8fc2-7fa4686d31cc\") " pod="openshift-marketplace/redhat-marketplace-4786v" Oct 10 16:34:13 crc kubenswrapper[4799]: I1010 16:34:13.210339 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/048287bf-12a1-4fef-8fc2-7fa4686d31cc-catalog-content\") pod \"redhat-marketplace-4786v\" (UID: \"048287bf-12a1-4fef-8fc2-7fa4686d31cc\") " pod="openshift-marketplace/redhat-marketplace-4786v" Oct 10 16:34:13 crc kubenswrapper[4799]: I1010 16:34:13.210361 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/048287bf-12a1-4fef-8fc2-7fa4686d31cc-utilities\") pod \"redhat-marketplace-4786v\" (UID: \"048287bf-12a1-4fef-8fc2-7fa4686d31cc\") " pod="openshift-marketplace/redhat-marketplace-4786v" Oct 10 16:34:13 crc kubenswrapper[4799]: E1010 16:34:13.210601 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-10 16:34:13.710585726 +0000 UTC m=+147.218909841 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-89gcz" (UID: "60ab14da-0f2e-48cc-873a-44eaf0662ed8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 10 16:34:13 crc kubenswrapper[4799]: I1010 16:34:13.211106 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/048287bf-12a1-4fef-8fc2-7fa4686d31cc-utilities\") pod \"redhat-marketplace-4786v\" (UID: \"048287bf-12a1-4fef-8fc2-7fa4686d31cc\") " pod="openshift-marketplace/redhat-marketplace-4786v" Oct 10 16:34:13 crc kubenswrapper[4799]: I1010 16:34:13.211192 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/048287bf-12a1-4fef-8fc2-7fa4686d31cc-catalog-content\") pod \"redhat-marketplace-4786v\" (UID: \"048287bf-12a1-4fef-8fc2-7fa4686d31cc\") " pod="openshift-marketplace/redhat-marketplace-4786v" Oct 10 16:34:13 crc kubenswrapper[4799]: I1010 16:34:13.228020 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ch4sf\" (UniqueName: \"kubernetes.io/projected/048287bf-12a1-4fef-8fc2-7fa4686d31cc-kube-api-access-ch4sf\") pod \"redhat-marketplace-4786v\" (UID: \"048287bf-12a1-4fef-8fc2-7fa4686d31cc\") " pod="openshift-marketplace/redhat-marketplace-4786v" Oct 10 16:34:13 crc kubenswrapper[4799]: I1010 16:34:13.311180 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 10 16:34:13 crc kubenswrapper[4799]: E1010 16:34:13.311352 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-10 16:34:13.811314693 +0000 UTC m=+147.319638838 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 10 16:34:13 crc kubenswrapper[4799]: I1010 16:34:13.311624 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-89gcz\" (UID: \"60ab14da-0f2e-48cc-873a-44eaf0662ed8\") " pod="openshift-image-registry/image-registry-697d97f7c8-89gcz" Oct 10 16:34:13 crc kubenswrapper[4799]: E1010 16:34:13.312065 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-10 16:34:13.812053481 +0000 UTC m=+147.320377596 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-89gcz" (UID: "60ab14da-0f2e-48cc-873a-44eaf0662ed8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 10 16:34:13 crc kubenswrapper[4799]: I1010 16:34:13.373406 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-4786v" Oct 10 16:34:13 crc kubenswrapper[4799]: I1010 16:34:13.412788 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 10 16:34:13 crc kubenswrapper[4799]: E1010 16:34:13.413173 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-10 16:34:13.913141697 +0000 UTC m=+147.421465822 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 10 16:34:13 crc kubenswrapper[4799]: I1010 16:34:13.413242 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-89gcz\" (UID: \"60ab14da-0f2e-48cc-873a-44eaf0662ed8\") " pod="openshift-image-registry/image-registry-697d97f7c8-89gcz" Oct 10 16:34:13 crc kubenswrapper[4799]: E1010 16:34:13.413679 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-10 16:34:13.91366478 +0000 UTC m=+147.421988895 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-89gcz" (UID: "60ab14da-0f2e-48cc-873a-44eaf0662ed8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 10 16:34:13 crc kubenswrapper[4799]: I1010 16:34:13.455811 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-vtc78"] Oct 10 16:34:13 crc kubenswrapper[4799]: I1010 16:34:13.457685 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-vtc78" Oct 10 16:34:13 crc kubenswrapper[4799]: I1010 16:34:13.470796 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-vtc78"] Oct 10 16:34:13 crc kubenswrapper[4799]: I1010 16:34:13.514180 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 10 16:34:13 crc kubenswrapper[4799]: I1010 16:34:13.514447 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/122cb808-3c4b-4829-8ac3-3419cf9b6bdd-catalog-content\") pod \"redhat-marketplace-vtc78\" (UID: \"122cb808-3c4b-4829-8ac3-3419cf9b6bdd\") " pod="openshift-marketplace/redhat-marketplace-vtc78" Oct 10 16:34:13 crc kubenswrapper[4799]: E1010 16:34:13.514509 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-10 16:34:14.014484018 +0000 UTC m=+147.522808133 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 10 16:34:13 crc kubenswrapper[4799]: I1010 16:34:13.514658 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-65skb\" (UniqueName: \"kubernetes.io/projected/122cb808-3c4b-4829-8ac3-3419cf9b6bdd-kube-api-access-65skb\") pod \"redhat-marketplace-vtc78\" (UID: \"122cb808-3c4b-4829-8ac3-3419cf9b6bdd\") " pod="openshift-marketplace/redhat-marketplace-vtc78" Oct 10 16:34:13 crc kubenswrapper[4799]: I1010 16:34:13.514732 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-89gcz\" (UID: \"60ab14da-0f2e-48cc-873a-44eaf0662ed8\") " pod="openshift-image-registry/image-registry-697d97f7c8-89gcz" Oct 10 16:34:13 crc kubenswrapper[4799]: I1010 16:34:13.514802 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/122cb808-3c4b-4829-8ac3-3419cf9b6bdd-utilities\") pod \"redhat-marketplace-vtc78\" (UID: \"122cb808-3c4b-4829-8ac3-3419cf9b6bdd\") " pod="openshift-marketplace/redhat-marketplace-vtc78" Oct 10 16:34:13 crc kubenswrapper[4799]: E1010 16:34:13.515161 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-10 16:34:14.015145275 +0000 UTC m=+147.523469390 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-89gcz" (UID: "60ab14da-0f2e-48cc-873a-44eaf0662ed8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 10 16:34:13 crc kubenswrapper[4799]: I1010 16:34:13.565479 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-rmstw" event={"ID":"f8195a22-f2dc-4a8e-bb85-abbef8d04ac3","Type":"ContainerStarted","Data":"178ae838de186c3ffda884436135132b26ff4e0ae12551b699f11212fbee6451"} Oct 10 16:34:13 crc kubenswrapper[4799]: I1010 16:34:13.567992 4799 generic.go:334] "Generic (PLEG): container finished" podID="ec4988cf-fb0f-4df8-8f2d-748a5459bbcc" containerID="0f2a8412a97266ed1ef1700224e39d163d1aa20db1195e0eb26991eb7838317e" exitCode=0 Oct 10 16:34:13 crc kubenswrapper[4799]: I1010 16:34:13.568056 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-z5z55" event={"ID":"ec4988cf-fb0f-4df8-8f2d-748a5459bbcc","Type":"ContainerDied","Data":"0f2a8412a97266ed1ef1700224e39d163d1aa20db1195e0eb26991eb7838317e"} Oct 10 16:34:13 crc kubenswrapper[4799]: I1010 16:34:13.569664 4799 generic.go:334] "Generic (PLEG): container finished" podID="0a2e52eb-b82d-4869-ab9b-1c783d1c58fb" containerID="36aef9f05b185c22779c267025905cd2370332e5867a378339b7969b90423564" exitCode=0 Oct 10 16:34:13 crc kubenswrapper[4799]: I1010 16:34:13.569716 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-c9chn" event={"ID":"0a2e52eb-b82d-4869-ab9b-1c783d1c58fb","Type":"ContainerDied","Data":"36aef9f05b185c22779c267025905cd2370332e5867a378339b7969b90423564"} Oct 10 16:34:13 crc kubenswrapper[4799]: I1010 16:34:13.573190 4799 generic.go:334] "Generic (PLEG): container finished" podID="89264b95-de02-4ba2-a26e-121910a2b2ff" containerID="bbe6bcde77490fe58438503b1815e2613245ed5464acda29f815adfede36893c" exitCode=0 Oct 10 16:34:13 crc kubenswrapper[4799]: I1010 16:34:13.573675 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5d7rl" event={"ID":"89264b95-de02-4ba2-a26e-121910a2b2ff","Type":"ContainerDied","Data":"bbe6bcde77490fe58438503b1815e2613245ed5464acda29f815adfede36893c"} Oct 10 16:34:13 crc kubenswrapper[4799]: I1010 16:34:13.574677 4799 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 10 16:34:13 crc kubenswrapper[4799]: I1010 16:34:13.602060 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Oct 10 16:34:13 crc kubenswrapper[4799]: I1010 16:34:13.603151 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Oct 10 16:34:13 crc kubenswrapper[4799]: I1010 16:34:13.605896 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager"/"kube-root-ca.crt" Oct 10 16:34:13 crc kubenswrapper[4799]: I1010 16:34:13.606199 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager"/"installer-sa-dockercfg-kjl2n" Oct 10 16:34:13 crc kubenswrapper[4799]: I1010 16:34:13.607931 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Oct 10 16:34:13 crc kubenswrapper[4799]: I1010 16:34:13.622205 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 10 16:34:13 crc kubenswrapper[4799]: I1010 16:34:13.622481 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/122cb808-3c4b-4829-8ac3-3419cf9b6bdd-utilities\") pod \"redhat-marketplace-vtc78\" (UID: \"122cb808-3c4b-4829-8ac3-3419cf9b6bdd\") " pod="openshift-marketplace/redhat-marketplace-vtc78" Oct 10 16:34:13 crc kubenswrapper[4799]: I1010 16:34:13.622539 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/122cb808-3c4b-4829-8ac3-3419cf9b6bdd-catalog-content\") pod \"redhat-marketplace-vtc78\" (UID: \"122cb808-3c4b-4829-8ac3-3419cf9b6bdd\") " pod="openshift-marketplace/redhat-marketplace-vtc78" Oct 10 16:34:13 crc kubenswrapper[4799]: I1010 16:34:13.622619 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-65skb\" (UniqueName: \"kubernetes.io/projected/122cb808-3c4b-4829-8ac3-3419cf9b6bdd-kube-api-access-65skb\") pod \"redhat-marketplace-vtc78\" (UID: \"122cb808-3c4b-4829-8ac3-3419cf9b6bdd\") " pod="openshift-marketplace/redhat-marketplace-vtc78" Oct 10 16:34:13 crc kubenswrapper[4799]: E1010 16:34:13.622830 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-10 16:34:14.122808266 +0000 UTC m=+147.631132381 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 10 16:34:13 crc kubenswrapper[4799]: I1010 16:34:13.623332 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/122cb808-3c4b-4829-8ac3-3419cf9b6bdd-utilities\") pod \"redhat-marketplace-vtc78\" (UID: \"122cb808-3c4b-4829-8ac3-3419cf9b6bdd\") " pod="openshift-marketplace/redhat-marketplace-vtc78" Oct 10 16:34:13 crc kubenswrapper[4799]: I1010 16:34:13.623578 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/122cb808-3c4b-4829-8ac3-3419cf9b6bdd-catalog-content\") pod \"redhat-marketplace-vtc78\" (UID: \"122cb808-3c4b-4829-8ac3-3419cf9b6bdd\") " pod="openshift-marketplace/redhat-marketplace-vtc78" Oct 10 16:34:13 crc kubenswrapper[4799]: I1010 16:34:13.639001 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-65skb\" (UniqueName: \"kubernetes.io/projected/122cb808-3c4b-4829-8ac3-3419cf9b6bdd-kube-api-access-65skb\") pod \"redhat-marketplace-vtc78\" (UID: \"122cb808-3c4b-4829-8ac3-3419cf9b6bdd\") " pod="openshift-marketplace/redhat-marketplace-vtc78" Oct 10 16:34:13 crc kubenswrapper[4799]: I1010 16:34:13.723595 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/dc89d1a8-4954-42af-b9ab-6fc1b88f9e02-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"dc89d1a8-4954-42af-b9ab-6fc1b88f9e02\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Oct 10 16:34:13 crc kubenswrapper[4799]: I1010 16:34:13.723730 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/dc89d1a8-4954-42af-b9ab-6fc1b88f9e02-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"dc89d1a8-4954-42af-b9ab-6fc1b88f9e02\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Oct 10 16:34:13 crc kubenswrapper[4799]: I1010 16:34:13.723807 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-89gcz\" (UID: \"60ab14da-0f2e-48cc-873a-44eaf0662ed8\") " pod="openshift-image-registry/image-registry-697d97f7c8-89gcz" Oct 10 16:34:13 crc kubenswrapper[4799]: E1010 16:34:13.724069 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-10 16:34:14.224057446 +0000 UTC m=+147.732381561 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-89gcz" (UID: "60ab14da-0f2e-48cc-873a-44eaf0662ed8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 10 16:34:13 crc kubenswrapper[4799]: I1010 16:34:13.774242 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-vtc78" Oct 10 16:34:13 crc kubenswrapper[4799]: I1010 16:34:13.817172 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-4786v"] Oct 10 16:34:13 crc kubenswrapper[4799]: W1010 16:34:13.823344 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod048287bf_12a1_4fef_8fc2_7fa4686d31cc.slice/crio-2f1fb1897dee773d66a1611a4d1d1c2694c8118ea7753b34a2dd1093c1e231aa WatchSource:0}: Error finding container 2f1fb1897dee773d66a1611a4d1d1c2694c8118ea7753b34a2dd1093c1e231aa: Status 404 returned error can't find the container with id 2f1fb1897dee773d66a1611a4d1d1c2694c8118ea7753b34a2dd1093c1e231aa Oct 10 16:34:13 crc kubenswrapper[4799]: I1010 16:34:13.824415 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 10 16:34:13 crc kubenswrapper[4799]: E1010 16:34:13.824543 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-10 16:34:14.324526396 +0000 UTC m=+147.832850501 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 10 16:34:13 crc kubenswrapper[4799]: I1010 16:34:13.824691 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/dc89d1a8-4954-42af-b9ab-6fc1b88f9e02-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"dc89d1a8-4954-42af-b9ab-6fc1b88f9e02\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Oct 10 16:34:13 crc kubenswrapper[4799]: I1010 16:34:13.824792 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/dc89d1a8-4954-42af-b9ab-6fc1b88f9e02-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"dc89d1a8-4954-42af-b9ab-6fc1b88f9e02\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Oct 10 16:34:13 crc kubenswrapper[4799]: I1010 16:34:13.824809 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/dc89d1a8-4954-42af-b9ab-6fc1b88f9e02-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"dc89d1a8-4954-42af-b9ab-6fc1b88f9e02\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Oct 10 16:34:13 crc kubenswrapper[4799]: I1010 16:34:13.824861 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-89gcz\" (UID: \"60ab14da-0f2e-48cc-873a-44eaf0662ed8\") " pod="openshift-image-registry/image-registry-697d97f7c8-89gcz" Oct 10 16:34:13 crc kubenswrapper[4799]: E1010 16:34:13.825135 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-10 16:34:14.325123951 +0000 UTC m=+147.833448066 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-89gcz" (UID: "60ab14da-0f2e-48cc-873a-44eaf0662ed8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 10 16:34:13 crc kubenswrapper[4799]: I1010 16:34:13.852729 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/dc89d1a8-4954-42af-b9ab-6fc1b88f9e02-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"dc89d1a8-4954-42af-b9ab-6fc1b88f9e02\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Oct 10 16:34:13 crc kubenswrapper[4799]: I1010 16:34:13.925565 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 10 16:34:13 crc kubenswrapper[4799]: E1010 16:34:13.926040 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-10 16:34:14.426021101 +0000 UTC m=+147.934345216 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 10 16:34:13 crc kubenswrapper[4799]: I1010 16:34:13.965976 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-vtc78"] Oct 10 16:34:13 crc kubenswrapper[4799]: W1010 16:34:13.973239 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod122cb808_3c4b_4829_8ac3_3419cf9b6bdd.slice/crio-e72538b5e9d6ac1ad814685a80c226dcf9a43e45b7cbba9a99ee939c3c894f55 WatchSource:0}: Error finding container e72538b5e9d6ac1ad814685a80c226dcf9a43e45b7cbba9a99ee939c3c894f55: Status 404 returned error can't find the container with id e72538b5e9d6ac1ad814685a80c226dcf9a43e45b7cbba9a99ee939c3c894f55 Oct 10 16:34:14 crc kubenswrapper[4799]: I1010 16:34:14.009832 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Oct 10 16:34:14 crc kubenswrapper[4799]: I1010 16:34:14.027359 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-89gcz\" (UID: \"60ab14da-0f2e-48cc-873a-44eaf0662ed8\") " pod="openshift-image-registry/image-registry-697d97f7c8-89gcz" Oct 10 16:34:14 crc kubenswrapper[4799]: E1010 16:34:14.027709 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-10 16:34:14.527694271 +0000 UTC m=+148.036018386 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-89gcz" (UID: "60ab14da-0f2e-48cc-873a-44eaf0662ed8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 10 16:34:14 crc kubenswrapper[4799]: I1010 16:34:14.054044 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-mkr4n"] Oct 10 16:34:14 crc kubenswrapper[4799]: I1010 16:34:14.055318 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-mkr4n" Oct 10 16:34:14 crc kubenswrapper[4799]: I1010 16:34:14.060368 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Oct 10 16:34:14 crc kubenswrapper[4799]: I1010 16:34:14.069061 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-mkr4n"] Oct 10 16:34:14 crc kubenswrapper[4799]: I1010 16:34:14.073107 4799 patch_prober.go:28] interesting pod/router-default-5444994796-qpw4g container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 10 16:34:14 crc kubenswrapper[4799]: [-]has-synced failed: reason withheld Oct 10 16:34:14 crc kubenswrapper[4799]: [+]process-running ok Oct 10 16:34:14 crc kubenswrapper[4799]: healthz check failed Oct 10 16:34:14 crc kubenswrapper[4799]: I1010 16:34:14.073159 4799 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-qpw4g" podUID="6a574afe-31ee-4706-90c0-a9c477f5bce7" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 10 16:34:14 crc kubenswrapper[4799]: I1010 16:34:14.133364 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 10 16:34:14 crc kubenswrapper[4799]: E1010 16:34:14.133527 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-10 16:34:14.633498566 +0000 UTC m=+148.141822691 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 10 16:34:14 crc kubenswrapper[4799]: I1010 16:34:14.133572 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-89gcz\" (UID: \"60ab14da-0f2e-48cc-873a-44eaf0662ed8\") " pod="openshift-image-registry/image-registry-697d97f7c8-89gcz" Oct 10 16:34:14 crc kubenswrapper[4799]: I1010 16:34:14.133601 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ce937bef-231c-4353-8af2-f8e4517c68c4-utilities\") pod \"redhat-operators-mkr4n\" (UID: \"ce937bef-231c-4353-8af2-f8e4517c68c4\") " pod="openshift-marketplace/redhat-operators-mkr4n" Oct 10 16:34:14 crc kubenswrapper[4799]: I1010 16:34:14.133626 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ce937bef-231c-4353-8af2-f8e4517c68c4-catalog-content\") pod \"redhat-operators-mkr4n\" (UID: \"ce937bef-231c-4353-8af2-f8e4517c68c4\") " pod="openshift-marketplace/redhat-operators-mkr4n" Oct 10 16:34:14 crc kubenswrapper[4799]: I1010 16:34:14.133686 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hr52z\" (UniqueName: \"kubernetes.io/projected/ce937bef-231c-4353-8af2-f8e4517c68c4-kube-api-access-hr52z\") pod \"redhat-operators-mkr4n\" (UID: \"ce937bef-231c-4353-8af2-f8e4517c68c4\") " pod="openshift-marketplace/redhat-operators-mkr4n" Oct 10 16:34:14 crc kubenswrapper[4799]: E1010 16:34:14.133946 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-10 16:34:14.633937187 +0000 UTC m=+148.142261392 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-89gcz" (UID: "60ab14da-0f2e-48cc-873a-44eaf0662ed8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 10 16:34:14 crc kubenswrapper[4799]: I1010 16:34:14.234561 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 10 16:34:14 crc kubenswrapper[4799]: E1010 16:34:14.234768 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-10 16:34:14.734727335 +0000 UTC m=+148.243051450 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 10 16:34:14 crc kubenswrapper[4799]: I1010 16:34:14.234873 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-89gcz\" (UID: \"60ab14da-0f2e-48cc-873a-44eaf0662ed8\") " pod="openshift-image-registry/image-registry-697d97f7c8-89gcz" Oct 10 16:34:14 crc kubenswrapper[4799]: I1010 16:34:14.234906 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ce937bef-231c-4353-8af2-f8e4517c68c4-utilities\") pod \"redhat-operators-mkr4n\" (UID: \"ce937bef-231c-4353-8af2-f8e4517c68c4\") " pod="openshift-marketplace/redhat-operators-mkr4n" Oct 10 16:34:14 crc kubenswrapper[4799]: I1010 16:34:14.234933 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ce937bef-231c-4353-8af2-f8e4517c68c4-catalog-content\") pod \"redhat-operators-mkr4n\" (UID: \"ce937bef-231c-4353-8af2-f8e4517c68c4\") " pod="openshift-marketplace/redhat-operators-mkr4n" Oct 10 16:34:14 crc kubenswrapper[4799]: I1010 16:34:14.235002 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hr52z\" (UniqueName: \"kubernetes.io/projected/ce937bef-231c-4353-8af2-f8e4517c68c4-kube-api-access-hr52z\") pod \"redhat-operators-mkr4n\" (UID: \"ce937bef-231c-4353-8af2-f8e4517c68c4\") " pod="openshift-marketplace/redhat-operators-mkr4n" Oct 10 16:34:14 crc kubenswrapper[4799]: E1010 16:34:14.235222 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-10 16:34:14.735209367 +0000 UTC m=+148.243533482 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-89gcz" (UID: "60ab14da-0f2e-48cc-873a-44eaf0662ed8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 10 16:34:14 crc kubenswrapper[4799]: I1010 16:34:14.235451 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ce937bef-231c-4353-8af2-f8e4517c68c4-utilities\") pod \"redhat-operators-mkr4n\" (UID: \"ce937bef-231c-4353-8af2-f8e4517c68c4\") " pod="openshift-marketplace/redhat-operators-mkr4n" Oct 10 16:34:14 crc kubenswrapper[4799]: I1010 16:34:14.236159 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ce937bef-231c-4353-8af2-f8e4517c68c4-catalog-content\") pod \"redhat-operators-mkr4n\" (UID: \"ce937bef-231c-4353-8af2-f8e4517c68c4\") " pod="openshift-marketplace/redhat-operators-mkr4n" Oct 10 16:34:14 crc kubenswrapper[4799]: I1010 16:34:14.253927 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hr52z\" (UniqueName: \"kubernetes.io/projected/ce937bef-231c-4353-8af2-f8e4517c68c4-kube-api-access-hr52z\") pod \"redhat-operators-mkr4n\" (UID: \"ce937bef-231c-4353-8af2-f8e4517c68c4\") " pod="openshift-marketplace/redhat-operators-mkr4n" Oct 10 16:34:14 crc kubenswrapper[4799]: I1010 16:34:14.335959 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 10 16:34:14 crc kubenswrapper[4799]: E1010 16:34:14.336076 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-10 16:34:14.836030115 +0000 UTC m=+148.344354230 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 10 16:34:14 crc kubenswrapper[4799]: I1010 16:34:14.336238 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-89gcz\" (UID: \"60ab14da-0f2e-48cc-873a-44eaf0662ed8\") " pod="openshift-image-registry/image-registry-697d97f7c8-89gcz" Oct 10 16:34:14 crc kubenswrapper[4799]: E1010 16:34:14.336637 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-10 16:34:14.836629341 +0000 UTC m=+148.344953456 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-89gcz" (UID: "60ab14da-0f2e-48cc-873a-44eaf0662ed8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 10 16:34:14 crc kubenswrapper[4799]: I1010 16:34:14.437208 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 10 16:34:14 crc kubenswrapper[4799]: E1010 16:34:14.437473 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-10 16:34:14.937434099 +0000 UTC m=+148.445758254 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 10 16:34:14 crc kubenswrapper[4799]: I1010 16:34:14.437866 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-89gcz\" (UID: \"60ab14da-0f2e-48cc-873a-44eaf0662ed8\") " pod="openshift-image-registry/image-registry-697d97f7c8-89gcz" Oct 10 16:34:14 crc kubenswrapper[4799]: E1010 16:34:14.438289 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-10 16:34:14.93827437 +0000 UTC m=+148.446598485 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-89gcz" (UID: "60ab14da-0f2e-48cc-873a-44eaf0662ed8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 10 16:34:14 crc kubenswrapper[4799]: I1010 16:34:14.452269 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Oct 10 16:34:14 crc kubenswrapper[4799]: I1010 16:34:14.454067 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-mkr4n" Oct 10 16:34:14 crc kubenswrapper[4799]: I1010 16:34:14.455965 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-vlrmq"] Oct 10 16:34:14 crc kubenswrapper[4799]: I1010 16:34:14.457220 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-vlrmq" Oct 10 16:34:14 crc kubenswrapper[4799]: I1010 16:34:14.517338 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-vlrmq"] Oct 10 16:34:14 crc kubenswrapper[4799]: W1010 16:34:14.534440 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-poddc89d1a8_4954_42af_b9ab_6fc1b88f9e02.slice/crio-a87a3c27b4e06617fee78d260f3d276b57dde930dd8d134208aa9605d5e5db47 WatchSource:0}: Error finding container a87a3c27b4e06617fee78d260f3d276b57dde930dd8d134208aa9605d5e5db47: Status 404 returned error can't find the container with id a87a3c27b4e06617fee78d260f3d276b57dde930dd8d134208aa9605d5e5db47 Oct 10 16:34:14 crc kubenswrapper[4799]: I1010 16:34:14.538596 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 10 16:34:14 crc kubenswrapper[4799]: E1010 16:34:14.539073 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-10 16:34:15.038792701 +0000 UTC m=+148.547116806 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 10 16:34:14 crc kubenswrapper[4799]: I1010 16:34:14.539159 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2ad01aa8-e719-4b4d-af75-0589792d8b3c-utilities\") pod \"redhat-operators-vlrmq\" (UID: \"2ad01aa8-e719-4b4d-af75-0589792d8b3c\") " pod="openshift-marketplace/redhat-operators-vlrmq" Oct 10 16:34:14 crc kubenswrapper[4799]: I1010 16:34:14.539376 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-89gcz\" (UID: \"60ab14da-0f2e-48cc-873a-44eaf0662ed8\") " pod="openshift-image-registry/image-registry-697d97f7c8-89gcz" Oct 10 16:34:14 crc kubenswrapper[4799]: I1010 16:34:14.539408 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2ad01aa8-e719-4b4d-af75-0589792d8b3c-catalog-content\") pod \"redhat-operators-vlrmq\" (UID: \"2ad01aa8-e719-4b4d-af75-0589792d8b3c\") " pod="openshift-marketplace/redhat-operators-vlrmq" Oct 10 16:34:14 crc kubenswrapper[4799]: I1010 16:34:14.539659 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mffxh\" (UniqueName: \"kubernetes.io/projected/2ad01aa8-e719-4b4d-af75-0589792d8b3c-kube-api-access-mffxh\") pod \"redhat-operators-vlrmq\" (UID: \"2ad01aa8-e719-4b4d-af75-0589792d8b3c\") " pod="openshift-marketplace/redhat-operators-vlrmq" Oct 10 16:34:14 crc kubenswrapper[4799]: E1010 16:34:14.539683 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-10 16:34:15.039669773 +0000 UTC m=+148.547993888 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-89gcz" (UID: "60ab14da-0f2e-48cc-873a-44eaf0662ed8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 10 16:34:14 crc kubenswrapper[4799]: I1010 16:34:14.579589 4799 generic.go:334] "Generic (PLEG): container finished" podID="122cb808-3c4b-4829-8ac3-3419cf9b6bdd" containerID="690379bbddac979469b853c3b34a068412a0dabd6d2fad7a1e5a4007814eea4a" exitCode=0 Oct 10 16:34:14 crc kubenswrapper[4799]: I1010 16:34:14.579651 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vtc78" event={"ID":"122cb808-3c4b-4829-8ac3-3419cf9b6bdd","Type":"ContainerDied","Data":"690379bbddac979469b853c3b34a068412a0dabd6d2fad7a1e5a4007814eea4a"} Oct 10 16:34:14 crc kubenswrapper[4799]: I1010 16:34:14.579677 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vtc78" event={"ID":"122cb808-3c4b-4829-8ac3-3419cf9b6bdd","Type":"ContainerStarted","Data":"e72538b5e9d6ac1ad814685a80c226dcf9a43e45b7cbba9a99ee939c3c894f55"} Oct 10 16:34:14 crc kubenswrapper[4799]: I1010 16:34:14.588010 4799 generic.go:334] "Generic (PLEG): container finished" podID="25cd298f-ccde-4805-801d-2d486c7e45da" containerID="8d9753523c04dd86b3bd12b6206d393c9a192c0cfb49fb8be7252c33137623c7" exitCode=0 Oct 10 16:34:14 crc kubenswrapper[4799]: I1010 16:34:14.588080 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29335230-q6qww" event={"ID":"25cd298f-ccde-4805-801d-2d486c7e45da","Type":"ContainerDied","Data":"8d9753523c04dd86b3bd12b6206d393c9a192c0cfb49fb8be7252c33137623c7"} Oct 10 16:34:14 crc kubenswrapper[4799]: I1010 16:34:14.594790 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"dc89d1a8-4954-42af-b9ab-6fc1b88f9e02","Type":"ContainerStarted","Data":"a87a3c27b4e06617fee78d260f3d276b57dde930dd8d134208aa9605d5e5db47"} Oct 10 16:34:14 crc kubenswrapper[4799]: I1010 16:34:14.640553 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 10 16:34:14 crc kubenswrapper[4799]: E1010 16:34:14.640721 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-10 16:34:15.140689136 +0000 UTC m=+148.649013251 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 10 16:34:14 crc kubenswrapper[4799]: I1010 16:34:14.641169 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mffxh\" (UniqueName: \"kubernetes.io/projected/2ad01aa8-e719-4b4d-af75-0589792d8b3c-kube-api-access-mffxh\") pod \"redhat-operators-vlrmq\" (UID: \"2ad01aa8-e719-4b4d-af75-0589792d8b3c\") " pod="openshift-marketplace/redhat-operators-vlrmq" Oct 10 16:34:14 crc kubenswrapper[4799]: I1010 16:34:14.641246 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2ad01aa8-e719-4b4d-af75-0589792d8b3c-utilities\") pod \"redhat-operators-vlrmq\" (UID: \"2ad01aa8-e719-4b4d-af75-0589792d8b3c\") " pod="openshift-marketplace/redhat-operators-vlrmq" Oct 10 16:34:14 crc kubenswrapper[4799]: I1010 16:34:14.641304 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-89gcz\" (UID: \"60ab14da-0f2e-48cc-873a-44eaf0662ed8\") " pod="openshift-image-registry/image-registry-697d97f7c8-89gcz" Oct 10 16:34:14 crc kubenswrapper[4799]: I1010 16:34:14.641329 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2ad01aa8-e719-4b4d-af75-0589792d8b3c-catalog-content\") pod \"redhat-operators-vlrmq\" (UID: \"2ad01aa8-e719-4b4d-af75-0589792d8b3c\") " pod="openshift-marketplace/redhat-operators-vlrmq" Oct 10 16:34:14 crc kubenswrapper[4799]: I1010 16:34:14.641873 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2ad01aa8-e719-4b4d-af75-0589792d8b3c-catalog-content\") pod \"redhat-operators-vlrmq\" (UID: \"2ad01aa8-e719-4b4d-af75-0589792d8b3c\") " pod="openshift-marketplace/redhat-operators-vlrmq" Oct 10 16:34:14 crc kubenswrapper[4799]: I1010 16:34:14.642110 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2ad01aa8-e719-4b4d-af75-0589792d8b3c-utilities\") pod \"redhat-operators-vlrmq\" (UID: \"2ad01aa8-e719-4b4d-af75-0589792d8b3c\") " pod="openshift-marketplace/redhat-operators-vlrmq" Oct 10 16:34:14 crc kubenswrapper[4799]: E1010 16:34:14.642543 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-10 16:34:15.142527522 +0000 UTC m=+148.650851637 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-89gcz" (UID: "60ab14da-0f2e-48cc-873a-44eaf0662ed8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 10 16:34:14 crc kubenswrapper[4799]: I1010 16:34:14.644865 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-9g6sf" event={"ID":"ae03eb1e-0698-4aef-9a6f-7708d92adf66","Type":"ContainerStarted","Data":"c9e4756893f6231c598bb2d0806c03e7b29c3c74da2c583bf011263fa58eb53f"} Oct 10 16:34:14 crc kubenswrapper[4799]: I1010 16:34:14.652014 4799 generic.go:334] "Generic (PLEG): container finished" podID="048287bf-12a1-4fef-8fc2-7fa4686d31cc" containerID="b05205f84fe832dad3b0055359de22613d19535d2a4eca2500b46df4f8c794f4" exitCode=0 Oct 10 16:34:14 crc kubenswrapper[4799]: I1010 16:34:14.652643 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-4786v" event={"ID":"048287bf-12a1-4fef-8fc2-7fa4686d31cc","Type":"ContainerDied","Data":"b05205f84fe832dad3b0055359de22613d19535d2a4eca2500b46df4f8c794f4"} Oct 10 16:34:14 crc kubenswrapper[4799]: I1010 16:34:14.652672 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-4786v" event={"ID":"048287bf-12a1-4fef-8fc2-7fa4686d31cc","Type":"ContainerStarted","Data":"2f1fb1897dee773d66a1611a4d1d1c2694c8118ea7753b34a2dd1093c1e231aa"} Oct 10 16:34:14 crc kubenswrapper[4799]: I1010 16:34:14.665018 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mffxh\" (UniqueName: \"kubernetes.io/projected/2ad01aa8-e719-4b4d-af75-0589792d8b3c-kube-api-access-mffxh\") pod \"redhat-operators-vlrmq\" (UID: \"2ad01aa8-e719-4b4d-af75-0589792d8b3c\") " pod="openshift-marketplace/redhat-operators-vlrmq" Oct 10 16:34:14 crc kubenswrapper[4799]: I1010 16:34:14.679003 4799 generic.go:334] "Generic (PLEG): container finished" podID="f8195a22-f2dc-4a8e-bb85-abbef8d04ac3" containerID="178ae838de186c3ffda884436135132b26ff4e0ae12551b699f11212fbee6451" exitCode=0 Oct 10 16:34:14 crc kubenswrapper[4799]: I1010 16:34:14.679542 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-rmstw" event={"ID":"f8195a22-f2dc-4a8e-bb85-abbef8d04ac3","Type":"ContainerDied","Data":"178ae838de186c3ffda884436135132b26ff4e0ae12551b699f11212fbee6451"} Oct 10 16:34:14 crc kubenswrapper[4799]: I1010 16:34:14.743485 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 10 16:34:14 crc kubenswrapper[4799]: E1010 16:34:14.744350 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-10 16:34:15.244331946 +0000 UTC m=+148.752656061 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 10 16:34:14 crc kubenswrapper[4799]: I1010 16:34:14.770120 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-mkr4n"] Oct 10 16:34:14 crc kubenswrapper[4799]: W1010 16:34:14.778300 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podce937bef_231c_4353_8af2_f8e4517c68c4.slice/crio-f009f8392490988ab6a6cb0835896ac639535b9a9b68261b5e7f11ff5d27b1ae WatchSource:0}: Error finding container f009f8392490988ab6a6cb0835896ac639535b9a9b68261b5e7f11ff5d27b1ae: Status 404 returned error can't find the container with id f009f8392490988ab6a6cb0835896ac639535b9a9b68261b5e7f11ff5d27b1ae Oct 10 16:34:14 crc kubenswrapper[4799]: I1010 16:34:14.796361 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-vlrmq" Oct 10 16:34:14 crc kubenswrapper[4799]: I1010 16:34:14.808676 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-apiserver/apiserver-76f77b778f-2ksq6" Oct 10 16:34:14 crc kubenswrapper[4799]: I1010 16:34:14.812929 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-apiserver/apiserver-76f77b778f-2ksq6" Oct 10 16:34:14 crc kubenswrapper[4799]: I1010 16:34:14.845328 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-89gcz\" (UID: \"60ab14da-0f2e-48cc-873a-44eaf0662ed8\") " pod="openshift-image-registry/image-registry-697d97f7c8-89gcz" Oct 10 16:34:14 crc kubenswrapper[4799]: E1010 16:34:14.849682 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-10 16:34:15.349663588 +0000 UTC m=+148.857987693 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-89gcz" (UID: "60ab14da-0f2e-48cc-873a-44eaf0662ed8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 10 16:34:14 crc kubenswrapper[4799]: I1010 16:34:14.950609 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 10 16:34:14 crc kubenswrapper[4799]: E1010 16:34:14.951505 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-10 16:34:15.451475412 +0000 UTC m=+148.959799527 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 10 16:34:15 crc kubenswrapper[4799]: I1010 16:34:15.053458 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-89gcz\" (UID: \"60ab14da-0f2e-48cc-873a-44eaf0662ed8\") " pod="openshift-image-registry/image-registry-697d97f7c8-89gcz" Oct 10 16:34:15 crc kubenswrapper[4799]: E1010 16:34:15.053925 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-10 16:34:15.553903801 +0000 UTC m=+149.062227916 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-89gcz" (UID: "60ab14da-0f2e-48cc-873a-44eaf0662ed8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 10 16:34:15 crc kubenswrapper[4799]: I1010 16:34:15.054550 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-mcwfc" Oct 10 16:34:15 crc kubenswrapper[4799]: I1010 16:34:15.068730 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ingress/router-default-5444994796-qpw4g" Oct 10 16:34:15 crc kubenswrapper[4799]: I1010 16:34:15.078057 4799 patch_prober.go:28] interesting pod/router-default-5444994796-qpw4g container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 10 16:34:15 crc kubenswrapper[4799]: [-]has-synced failed: reason withheld Oct 10 16:34:15 crc kubenswrapper[4799]: [+]process-running ok Oct 10 16:34:15 crc kubenswrapper[4799]: healthz check failed Oct 10 16:34:15 crc kubenswrapper[4799]: I1010 16:34:15.078109 4799 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-qpw4g" podUID="6a574afe-31ee-4706-90c0-a9c477f5bce7" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 10 16:34:15 crc kubenswrapper[4799]: I1010 16:34:15.155660 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 10 16:34:15 crc kubenswrapper[4799]: E1010 16:34:15.155976 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-10 16:34:15.655944801 +0000 UTC m=+149.164268916 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 10 16:34:15 crc kubenswrapper[4799]: I1010 16:34:15.156379 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-89gcz\" (UID: \"60ab14da-0f2e-48cc-873a-44eaf0662ed8\") " pod="openshift-image-registry/image-registry-697d97f7c8-89gcz" Oct 10 16:34:15 crc kubenswrapper[4799]: E1010 16:34:15.156684 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-10 16:34:15.656673719 +0000 UTC m=+149.164997834 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-89gcz" (UID: "60ab14da-0f2e-48cc-873a-44eaf0662ed8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 10 16:34:15 crc kubenswrapper[4799]: I1010 16:34:15.173353 4799 plugin_watcher.go:194] "Adding socket path or updating timestamp to desired state cache" path="/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock" Oct 10 16:34:15 crc kubenswrapper[4799]: I1010 16:34:15.188248 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-vlrmq"] Oct 10 16:34:15 crc kubenswrapper[4799]: W1010 16:34:15.211415 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2ad01aa8_e719_4b4d_af75_0589792d8b3c.slice/crio-c54dbd5683d21fb9c373fc20436799dbc8f43217286cee672cc2e46a0e11431d WatchSource:0}: Error finding container c54dbd5683d21fb9c373fc20436799dbc8f43217286cee672cc2e46a0e11431d: Status 404 returned error can't find the container with id c54dbd5683d21fb9c373fc20436799dbc8f43217286cee672cc2e46a0e11431d Oct 10 16:34:15 crc kubenswrapper[4799]: I1010 16:34:15.258050 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 10 16:34:15 crc kubenswrapper[4799]: E1010 16:34:15.258422 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-10 16:34:15.758407461 +0000 UTC m=+149.266731576 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 10 16:34:15 crc kubenswrapper[4799]: I1010 16:34:15.265129 4799 patch_prober.go:28] interesting pod/machine-config-daemon-rh8zc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 10 16:34:15 crc kubenswrapper[4799]: I1010 16:34:15.265183 4799 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 10 16:34:15 crc kubenswrapper[4799]: I1010 16:34:15.359528 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-89gcz\" (UID: \"60ab14da-0f2e-48cc-873a-44eaf0662ed8\") " pod="openshift-image-registry/image-registry-697d97f7c8-89gcz" Oct 10 16:34:15 crc kubenswrapper[4799]: E1010 16:34:15.360123 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-10 16:34:15.860109692 +0000 UTC m=+149.368433807 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-89gcz" (UID: "60ab14da-0f2e-48cc-873a-44eaf0662ed8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 10 16:34:15 crc kubenswrapper[4799]: I1010 16:34:15.447380 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-mjjtc" Oct 10 16:34:15 crc kubenswrapper[4799]: I1010 16:34:15.463905 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-mjjtc" Oct 10 16:34:15 crc kubenswrapper[4799]: I1010 16:34:15.469431 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 10 16:34:15 crc kubenswrapper[4799]: E1010 16:34:15.469794 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-10 16:34:15.969777953 +0000 UTC m=+149.478102068 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 10 16:34:15 crc kubenswrapper[4799]: I1010 16:34:15.478388 4799 patch_prober.go:28] interesting pod/downloads-7954f5f757-plk2p container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.10:8080/\": dial tcp 10.217.0.10:8080: connect: connection refused" start-of-body= Oct 10 16:34:15 crc kubenswrapper[4799]: I1010 16:34:15.478442 4799 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-plk2p" podUID="e56235b4-8348-4fae-af0a-639fcacfc997" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.10:8080/\": dial tcp 10.217.0.10:8080: connect: connection refused" Oct 10 16:34:15 crc kubenswrapper[4799]: I1010 16:34:15.478403 4799 patch_prober.go:28] interesting pod/downloads-7954f5f757-plk2p container/download-server namespace/openshift-console: Liveness probe status=failure output="Get \"http://10.217.0.10:8080/\": dial tcp 10.217.0.10:8080: connect: connection refused" start-of-body= Oct 10 16:34:15 crc kubenswrapper[4799]: I1010 16:34:15.478828 4799 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console/downloads-7954f5f757-plk2p" podUID="e56235b4-8348-4fae-af0a-639fcacfc997" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.10:8080/\": dial tcp 10.217.0.10:8080: connect: connection refused" Oct 10 16:34:15 crc kubenswrapper[4799]: I1010 16:34:15.571290 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-89gcz\" (UID: \"60ab14da-0f2e-48cc-873a-44eaf0662ed8\") " pod="openshift-image-registry/image-registry-697d97f7c8-89gcz" Oct 10 16:34:15 crc kubenswrapper[4799]: E1010 16:34:15.572224 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-10 16:34:16.072207462 +0000 UTC m=+149.580531577 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-89gcz" (UID: "60ab14da-0f2e-48cc-873a-44eaf0662ed8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 10 16:34:15 crc kubenswrapper[4799]: I1010 16:34:15.595874 4799 reconciler.go:161] "OperationExecutor.RegisterPlugin started" plugin={"SocketPath":"/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock","Timestamp":"2025-10-10T16:34:15.17338955Z","Handler":null,"Name":""} Oct 10 16:34:15 crc kubenswrapper[4799]: I1010 16:34:15.597801 4799 csi_plugin.go:100] kubernetes.io/csi: Trying to validate a new CSI Driver with name: kubevirt.io.hostpath-provisioner endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock versions: 1.0.0 Oct 10 16:34:15 crc kubenswrapper[4799]: I1010 16:34:15.597843 4799 csi_plugin.go:113] kubernetes.io/csi: Register new plugin with name: kubevirt.io.hostpath-provisioner at endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock Oct 10 16:34:15 crc kubenswrapper[4799]: I1010 16:34:15.672909 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 10 16:34:15 crc kubenswrapper[4799]: I1010 16:34:15.680371 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Oct 10 16:34:15 crc kubenswrapper[4799]: I1010 16:34:15.717386 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vlrmq" event={"ID":"2ad01aa8-e719-4b4d-af75-0589792d8b3c","Type":"ContainerStarted","Data":"c54dbd5683d21fb9c373fc20436799dbc8f43217286cee672cc2e46a0e11431d"} Oct 10 16:34:15 crc kubenswrapper[4799]: I1010 16:34:15.730630 4799 generic.go:334] "Generic (PLEG): container finished" podID="ce937bef-231c-4353-8af2-f8e4517c68c4" containerID="12150c685523a851ac97553f2a50b4557bb79507e6ec0cc3d50aabaaf8538a84" exitCode=0 Oct 10 16:34:15 crc kubenswrapper[4799]: I1010 16:34:15.730725 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-mkr4n" event={"ID":"ce937bef-231c-4353-8af2-f8e4517c68c4","Type":"ContainerDied","Data":"12150c685523a851ac97553f2a50b4557bb79507e6ec0cc3d50aabaaf8538a84"} Oct 10 16:34:15 crc kubenswrapper[4799]: I1010 16:34:15.730773 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-mkr4n" event={"ID":"ce937bef-231c-4353-8af2-f8e4517c68c4","Type":"ContainerStarted","Data":"f009f8392490988ab6a6cb0835896ac639535b9a9b68261b5e7f11ff5d27b1ae"} Oct 10 16:34:15 crc kubenswrapper[4799]: I1010 16:34:15.742261 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"dc89d1a8-4954-42af-b9ab-6fc1b88f9e02","Type":"ContainerStarted","Data":"6b442399d98530df7df00c183eb96e8950a65ef74738999f0ef5ca73b299756e"} Oct 10 16:34:15 crc kubenswrapper[4799]: I1010 16:34:15.749175 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-9g6sf" event={"ID":"ae03eb1e-0698-4aef-9a6f-7708d92adf66","Type":"ContainerStarted","Data":"196eece846942879bb445a990286cbdf390c9ffcbb616506e1b25b7b8fcbedfe"} Oct 10 16:34:15 crc kubenswrapper[4799]: I1010 16:34:15.774889 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-89gcz\" (UID: \"60ab14da-0f2e-48cc-873a-44eaf0662ed8\") " pod="openshift-image-registry/image-registry-697d97f7c8-89gcz" Oct 10 16:34:15 crc kubenswrapper[4799]: I1010 16:34:15.779323 4799 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Oct 10 16:34:15 crc kubenswrapper[4799]: I1010 16:34:15.779370 4799 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-89gcz\" (UID: \"60ab14da-0f2e-48cc-873a-44eaf0662ed8\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount\"" pod="openshift-image-registry/image-registry-697d97f7c8-89gcz" Oct 10 16:34:15 crc kubenswrapper[4799]: I1010 16:34:15.819843 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-89gcz\" (UID: \"60ab14da-0f2e-48cc-873a-44eaf0662ed8\") " pod="openshift-image-registry/image-registry-697d97f7c8-89gcz" Oct 10 16:34:15 crc kubenswrapper[4799]: I1010 16:34:15.852451 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-89gcz" Oct 10 16:34:16 crc kubenswrapper[4799]: I1010 16:34:16.075827 4799 patch_prober.go:28] interesting pod/router-default-5444994796-qpw4g container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 10 16:34:16 crc kubenswrapper[4799]: [-]has-synced failed: reason withheld Oct 10 16:34:16 crc kubenswrapper[4799]: [+]process-running ok Oct 10 16:34:16 crc kubenswrapper[4799]: healthz check failed Oct 10 16:34:16 crc kubenswrapper[4799]: I1010 16:34:16.075871 4799 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-qpw4g" podUID="6a574afe-31ee-4706-90c0-a9c477f5bce7" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 10 16:34:16 crc kubenswrapper[4799]: I1010 16:34:16.095489 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-f9d7485db-8lvfs" Oct 10 16:34:16 crc kubenswrapper[4799]: I1010 16:34:16.096081 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-f9d7485db-8lvfs" Oct 10 16:34:16 crc kubenswrapper[4799]: I1010 16:34:16.098888 4799 patch_prober.go:28] interesting pod/console-f9d7485db-8lvfs container/console namespace/openshift-console: Startup probe status=failure output="Get \"https://10.217.0.11:8443/health\": dial tcp 10.217.0.11:8443: connect: connection refused" start-of-body= Oct 10 16:34:16 crc kubenswrapper[4799]: I1010 16:34:16.098921 4799 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-console/console-f9d7485db-8lvfs" podUID="2866fbf1-3a49-4e4c-867b-86a40ae85ebe" containerName="console" probeResult="failure" output="Get \"https://10.217.0.11:8443/health\": dial tcp 10.217.0.11:8443: connect: connection refused" Oct 10 16:34:16 crc kubenswrapper[4799]: I1010 16:34:16.100884 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29335230-q6qww" Oct 10 16:34:16 crc kubenswrapper[4799]: I1010 16:34:16.182633 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/25cd298f-ccde-4805-801d-2d486c7e45da-secret-volume\") pod \"25cd298f-ccde-4805-801d-2d486c7e45da\" (UID: \"25cd298f-ccde-4805-801d-2d486c7e45da\") " Oct 10 16:34:16 crc kubenswrapper[4799]: I1010 16:34:16.182700 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jcnfx\" (UniqueName: \"kubernetes.io/projected/25cd298f-ccde-4805-801d-2d486c7e45da-kube-api-access-jcnfx\") pod \"25cd298f-ccde-4805-801d-2d486c7e45da\" (UID: \"25cd298f-ccde-4805-801d-2d486c7e45da\") " Oct 10 16:34:16 crc kubenswrapper[4799]: I1010 16:34:16.182738 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/25cd298f-ccde-4805-801d-2d486c7e45da-config-volume\") pod \"25cd298f-ccde-4805-801d-2d486c7e45da\" (UID: \"25cd298f-ccde-4805-801d-2d486c7e45da\") " Oct 10 16:34:16 crc kubenswrapper[4799]: I1010 16:34:16.186267 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/25cd298f-ccde-4805-801d-2d486c7e45da-config-volume" (OuterVolumeSpecName: "config-volume") pod "25cd298f-ccde-4805-801d-2d486c7e45da" (UID: "25cd298f-ccde-4805-801d-2d486c7e45da"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 16:34:16 crc kubenswrapper[4799]: I1010 16:34:16.190530 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/25cd298f-ccde-4805-801d-2d486c7e45da-kube-api-access-jcnfx" (OuterVolumeSpecName: "kube-api-access-jcnfx") pod "25cd298f-ccde-4805-801d-2d486c7e45da" (UID: "25cd298f-ccde-4805-801d-2d486c7e45da"). InnerVolumeSpecName "kube-api-access-jcnfx". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 16:34:16 crc kubenswrapper[4799]: I1010 16:34:16.191816 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/25cd298f-ccde-4805-801d-2d486c7e45da-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "25cd298f-ccde-4805-801d-2d486c7e45da" (UID: "25cd298f-ccde-4805-801d-2d486c7e45da"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:34:16 crc kubenswrapper[4799]: I1010 16:34:16.210865 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-89gcz"] Oct 10 16:34:16 crc kubenswrapper[4799]: I1010 16:34:16.284808 4799 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/25cd298f-ccde-4805-801d-2d486c7e45da-secret-volume\") on node \"crc\" DevicePath \"\"" Oct 10 16:34:16 crc kubenswrapper[4799]: I1010 16:34:16.284844 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jcnfx\" (UniqueName: \"kubernetes.io/projected/25cd298f-ccde-4805-801d-2d486c7e45da-kube-api-access-jcnfx\") on node \"crc\" DevicePath \"\"" Oct 10 16:34:16 crc kubenswrapper[4799]: I1010 16:34:16.284856 4799 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/25cd298f-ccde-4805-801d-2d486c7e45da-config-volume\") on node \"crc\" DevicePath \"\"" Oct 10 16:34:16 crc kubenswrapper[4799]: I1010 16:34:16.318132 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-5r8jg" Oct 10 16:34:16 crc kubenswrapper[4799]: I1010 16:34:16.386721 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-mn6dn" Oct 10 16:34:16 crc kubenswrapper[4799]: I1010 16:34:16.767085 4799 generic.go:334] "Generic (PLEG): container finished" podID="dc89d1a8-4954-42af-b9ab-6fc1b88f9e02" containerID="6b442399d98530df7df00c183eb96e8950a65ef74738999f0ef5ca73b299756e" exitCode=0 Oct 10 16:34:16 crc kubenswrapper[4799]: I1010 16:34:16.767183 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"dc89d1a8-4954-42af-b9ab-6fc1b88f9e02","Type":"ContainerDied","Data":"6b442399d98530df7df00c183eb96e8950a65ef74738999f0ef5ca73b299756e"} Oct 10 16:34:16 crc kubenswrapper[4799]: I1010 16:34:16.774613 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-9g6sf" event={"ID":"ae03eb1e-0698-4aef-9a6f-7708d92adf66","Type":"ContainerStarted","Data":"4b00ca54b8b44e0f32864ef64dc7115179bd3eab255bb62c40f9f9aa9aa66693"} Oct 10 16:34:16 crc kubenswrapper[4799]: I1010 16:34:16.778403 4799 generic.go:334] "Generic (PLEG): container finished" podID="2ad01aa8-e719-4b4d-af75-0589792d8b3c" containerID="b70c53ec486ab9a218be0bf65863d1644e18fe9e7cae05af297e3cf12c45d6c2" exitCode=0 Oct 10 16:34:16 crc kubenswrapper[4799]: I1010 16:34:16.778530 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vlrmq" event={"ID":"2ad01aa8-e719-4b4d-af75-0589792d8b3c","Type":"ContainerDied","Data":"b70c53ec486ab9a218be0bf65863d1644e18fe9e7cae05af297e3cf12c45d6c2"} Oct 10 16:34:16 crc kubenswrapper[4799]: I1010 16:34:16.783420 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-89gcz" event={"ID":"60ab14da-0f2e-48cc-873a-44eaf0662ed8","Type":"ContainerStarted","Data":"18d970602ee3f1139c249a21549627b3ee4aef7552b5efcc210e434fbb5e61e7"} Oct 10 16:34:16 crc kubenswrapper[4799]: I1010 16:34:16.783483 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-89gcz" event={"ID":"60ab14da-0f2e-48cc-873a-44eaf0662ed8","Type":"ContainerStarted","Data":"4e39e4bedf5e43eafe0e998497bf4f65e6e7724b2450001ac981e655e3f7b758"} Oct 10 16:34:16 crc kubenswrapper[4799]: I1010 16:34:16.784587 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-697d97f7c8-89gcz" Oct 10 16:34:16 crc kubenswrapper[4799]: I1010 16:34:16.787580 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29335230-q6qww" Oct 10 16:34:16 crc kubenswrapper[4799]: I1010 16:34:16.790079 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29335230-q6qww" event={"ID":"25cd298f-ccde-4805-801d-2d486c7e45da","Type":"ContainerDied","Data":"ca4b4fc89f1aea2fe5397ec36f16bdd0f479f84c3eec176261510eee8295157f"} Oct 10 16:34:16 crc kubenswrapper[4799]: I1010 16:34:16.790145 4799 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ca4b4fc89f1aea2fe5397ec36f16bdd0f479f84c3eec176261510eee8295157f" Oct 10 16:34:16 crc kubenswrapper[4799]: I1010 16:34:16.811931 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="hostpath-provisioner/csi-hostpathplugin-9g6sf" podStartSLOduration=13.811906879 podStartE2EDuration="13.811906879s" podCreationTimestamp="2025-10-10 16:34:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-10 16:34:16.808983625 +0000 UTC m=+150.317307740" watchObservedRunningTime="2025-10-10 16:34:16.811906879 +0000 UTC m=+150.320231014" Oct 10 16:34:16 crc kubenswrapper[4799]: I1010 16:34:16.905990 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-697d97f7c8-89gcz" podStartSLOduration=127.905969707 podStartE2EDuration="2m7.905969707s" podCreationTimestamp="2025-10-10 16:32:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-10 16:34:16.862246346 +0000 UTC m=+150.370570461" watchObservedRunningTime="2025-10-10 16:34:16.905969707 +0000 UTC m=+150.414293812" Oct 10 16:34:16 crc kubenswrapper[4799]: I1010 16:34:16.910820 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Oct 10 16:34:16 crc kubenswrapper[4799]: E1010 16:34:16.922053 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="25cd298f-ccde-4805-801d-2d486c7e45da" containerName="collect-profiles" Oct 10 16:34:16 crc kubenswrapper[4799]: I1010 16:34:16.922105 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="25cd298f-ccde-4805-801d-2d486c7e45da" containerName="collect-profiles" Oct 10 16:34:16 crc kubenswrapper[4799]: I1010 16:34:16.922331 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="25cd298f-ccde-4805-801d-2d486c7e45da" containerName="collect-profiles" Oct 10 16:34:16 crc kubenswrapper[4799]: I1010 16:34:16.925051 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Oct 10 16:34:16 crc kubenswrapper[4799]: I1010 16:34:16.933385 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Oct 10 16:34:16 crc kubenswrapper[4799]: I1010 16:34:16.935852 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver"/"installer-sa-dockercfg-5pr6n" Oct 10 16:34:16 crc kubenswrapper[4799]: I1010 16:34:16.936102 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver"/"kube-root-ca.crt" Oct 10 16:34:17 crc kubenswrapper[4799]: I1010 16:34:17.004690 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/d3e5ea0c-87cd-48f2-af88-2256afe86fe9-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"d3e5ea0c-87cd-48f2-af88-2256afe86fe9\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Oct 10 16:34:17 crc kubenswrapper[4799]: I1010 16:34:17.004824 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/d3e5ea0c-87cd-48f2-af88-2256afe86fe9-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"d3e5ea0c-87cd-48f2-af88-2256afe86fe9\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Oct 10 16:34:17 crc kubenswrapper[4799]: I1010 16:34:17.071321 4799 patch_prober.go:28] interesting pod/router-default-5444994796-qpw4g container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 10 16:34:17 crc kubenswrapper[4799]: [-]has-synced failed: reason withheld Oct 10 16:34:17 crc kubenswrapper[4799]: [+]process-running ok Oct 10 16:34:17 crc kubenswrapper[4799]: healthz check failed Oct 10 16:34:17 crc kubenswrapper[4799]: I1010 16:34:17.071378 4799 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-qpw4g" podUID="6a574afe-31ee-4706-90c0-a9c477f5bce7" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 10 16:34:17 crc kubenswrapper[4799]: I1010 16:34:17.103484 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Oct 10 16:34:17 crc kubenswrapper[4799]: I1010 16:34:17.107288 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/d3e5ea0c-87cd-48f2-af88-2256afe86fe9-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"d3e5ea0c-87cd-48f2-af88-2256afe86fe9\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Oct 10 16:34:17 crc kubenswrapper[4799]: I1010 16:34:17.107347 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/d3e5ea0c-87cd-48f2-af88-2256afe86fe9-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"d3e5ea0c-87cd-48f2-af88-2256afe86fe9\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Oct 10 16:34:17 crc kubenswrapper[4799]: I1010 16:34:17.107517 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/d3e5ea0c-87cd-48f2-af88-2256afe86fe9-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"d3e5ea0c-87cd-48f2-af88-2256afe86fe9\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Oct 10 16:34:17 crc kubenswrapper[4799]: I1010 16:34:17.153829 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/d3e5ea0c-87cd-48f2-af88-2256afe86fe9-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"d3e5ea0c-87cd-48f2-af88-2256afe86fe9\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Oct 10 16:34:17 crc kubenswrapper[4799]: I1010 16:34:17.209012 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/dc89d1a8-4954-42af-b9ab-6fc1b88f9e02-kubelet-dir\") pod \"dc89d1a8-4954-42af-b9ab-6fc1b88f9e02\" (UID: \"dc89d1a8-4954-42af-b9ab-6fc1b88f9e02\") " Oct 10 16:34:17 crc kubenswrapper[4799]: I1010 16:34:17.209151 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/dc89d1a8-4954-42af-b9ab-6fc1b88f9e02-kube-api-access\") pod \"dc89d1a8-4954-42af-b9ab-6fc1b88f9e02\" (UID: \"dc89d1a8-4954-42af-b9ab-6fc1b88f9e02\") " Oct 10 16:34:17 crc kubenswrapper[4799]: I1010 16:34:17.210591 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/dc89d1a8-4954-42af-b9ab-6fc1b88f9e02-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "dc89d1a8-4954-42af-b9ab-6fc1b88f9e02" (UID: "dc89d1a8-4954-42af-b9ab-6fc1b88f9e02"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 10 16:34:17 crc kubenswrapper[4799]: I1010 16:34:17.221837 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dc89d1a8-4954-42af-b9ab-6fc1b88f9e02-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "dc89d1a8-4954-42af-b9ab-6fc1b88f9e02" (UID: "dc89d1a8-4954-42af-b9ab-6fc1b88f9e02"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 16:34:17 crc kubenswrapper[4799]: I1010 16:34:17.247937 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Oct 10 16:34:17 crc kubenswrapper[4799]: I1010 16:34:17.311971 4799 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/dc89d1a8-4954-42af-b9ab-6fc1b88f9e02-kubelet-dir\") on node \"crc\" DevicePath \"\"" Oct 10 16:34:17 crc kubenswrapper[4799]: I1010 16:34:17.312010 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/dc89d1a8-4954-42af-b9ab-6fc1b88f9e02-kube-api-access\") on node \"crc\" DevicePath \"\"" Oct 10 16:34:17 crc kubenswrapper[4799]: I1010 16:34:17.413461 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 10 16:34:17 crc kubenswrapper[4799]: I1010 16:34:17.413962 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 10 16:34:17 crc kubenswrapper[4799]: I1010 16:34:17.414044 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 10 16:34:17 crc kubenswrapper[4799]: I1010 16:34:17.414968 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 10 16:34:17 crc kubenswrapper[4799]: I1010 16:34:17.415290 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 10 16:34:17 crc kubenswrapper[4799]: I1010 16:34:17.418349 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 10 16:34:17 crc kubenswrapper[4799]: I1010 16:34:17.418735 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 10 16:34:17 crc kubenswrapper[4799]: I1010 16:34:17.421119 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 10 16:34:17 crc kubenswrapper[4799]: I1010 16:34:17.440874 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8f668bae-612b-4b75-9490-919e737c6a3b" path="/var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes" Oct 10 16:34:17 crc kubenswrapper[4799]: I1010 16:34:17.546691 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 10 16:34:17 crc kubenswrapper[4799]: I1010 16:34:17.626876 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 10 16:34:17 crc kubenswrapper[4799]: I1010 16:34:17.644044 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 10 16:34:17 crc kubenswrapper[4799]: I1010 16:34:17.817858 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"dc89d1a8-4954-42af-b9ab-6fc1b88f9e02","Type":"ContainerDied","Data":"a87a3c27b4e06617fee78d260f3d276b57dde930dd8d134208aa9605d5e5db47"} Oct 10 16:34:17 crc kubenswrapper[4799]: I1010 16:34:17.818197 4799 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a87a3c27b4e06617fee78d260f3d276b57dde930dd8d134208aa9605d5e5db47" Oct 10 16:34:17 crc kubenswrapper[4799]: I1010 16:34:17.817967 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Oct 10 16:34:17 crc kubenswrapper[4799]: I1010 16:34:17.915327 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Oct 10 16:34:17 crc kubenswrapper[4799]: W1010 16:34:17.976629 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-podd3e5ea0c_87cd_48f2_af88_2256afe86fe9.slice/crio-028865b1c1124300638b93b3b6ea55bcf4f7e655e7eae84afad1400ea174dba8 WatchSource:0}: Error finding container 028865b1c1124300638b93b3b6ea55bcf4f7e655e7eae84afad1400ea174dba8: Status 404 returned error can't find the container with id 028865b1c1124300638b93b3b6ea55bcf4f7e655e7eae84afad1400ea174dba8 Oct 10 16:34:18 crc kubenswrapper[4799]: I1010 16:34:18.089648 4799 patch_prober.go:28] interesting pod/router-default-5444994796-qpw4g container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 10 16:34:18 crc kubenswrapper[4799]: [-]has-synced failed: reason withheld Oct 10 16:34:18 crc kubenswrapper[4799]: [+]process-running ok Oct 10 16:34:18 crc kubenswrapper[4799]: healthz check failed Oct 10 16:34:18 crc kubenswrapper[4799]: I1010 16:34:18.089710 4799 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-qpw4g" podUID="6a574afe-31ee-4706-90c0-a9c477f5bce7" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 10 16:34:18 crc kubenswrapper[4799]: W1010 16:34:18.145143 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3b6479f0_333b_4a96_9adf_2099afdc2447.slice/crio-b551926efabef0284c8d360bb5ed2be91c83f35293d72439370511ec1f3d36f7 WatchSource:0}: Error finding container b551926efabef0284c8d360bb5ed2be91c83f35293d72439370511ec1f3d36f7: Status 404 returned error can't find the container with id b551926efabef0284c8d360bb5ed2be91c83f35293d72439370511ec1f3d36f7 Oct 10 16:34:18 crc kubenswrapper[4799]: I1010 16:34:18.397406 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-dns/dns-default-r7x5v" Oct 10 16:34:18 crc kubenswrapper[4799]: W1010 16:34:18.600925 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5fe485a1_e14f_4c09_b5b9_f252bc42b7e8.slice/crio-76bde7a9b3fc5297afff6a8434822ea45b5fc40b01d18945f271a38787ca0115 WatchSource:0}: Error finding container 76bde7a9b3fc5297afff6a8434822ea45b5fc40b01d18945f271a38787ca0115: Status 404 returned error can't find the container with id 76bde7a9b3fc5297afff6a8434822ea45b5fc40b01d18945f271a38787ca0115 Oct 10 16:34:18 crc kubenswrapper[4799]: W1010 16:34:18.613064 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9d751cbb_f2e2_430d_9754_c882a5e924a5.slice/crio-31bb81f0520d3489f7dcfae2ab661e985b307cfef8401d8cb9f9c4ab2704a232 WatchSource:0}: Error finding container 31bb81f0520d3489f7dcfae2ab661e985b307cfef8401d8cb9f9c4ab2704a232: Status 404 returned error can't find the container with id 31bb81f0520d3489f7dcfae2ab661e985b307cfef8401d8cb9f9c4ab2704a232 Oct 10 16:34:18 crc kubenswrapper[4799]: I1010 16:34:18.902213 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"403e6ee300ff245853575cd65e5842ffb848137208ee58e0d97675fe02b4c9e1"} Oct 10 16:34:18 crc kubenswrapper[4799]: I1010 16:34:18.902258 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"b551926efabef0284c8d360bb5ed2be91c83f35293d72439370511ec1f3d36f7"} Oct 10 16:34:18 crc kubenswrapper[4799]: I1010 16:34:18.911566 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"31bb81f0520d3489f7dcfae2ab661e985b307cfef8401d8cb9f9c4ab2704a232"} Oct 10 16:34:18 crc kubenswrapper[4799]: I1010 16:34:18.922783 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"76bde7a9b3fc5297afff6a8434822ea45b5fc40b01d18945f271a38787ca0115"} Oct 10 16:34:18 crc kubenswrapper[4799]: I1010 16:34:18.924640 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"d3e5ea0c-87cd-48f2-af88-2256afe86fe9","Type":"ContainerStarted","Data":"028865b1c1124300638b93b3b6ea55bcf4f7e655e7eae84afad1400ea174dba8"} Oct 10 16:34:19 crc kubenswrapper[4799]: I1010 16:34:19.080010 4799 patch_prober.go:28] interesting pod/router-default-5444994796-qpw4g container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 10 16:34:19 crc kubenswrapper[4799]: [-]has-synced failed: reason withheld Oct 10 16:34:19 crc kubenswrapper[4799]: [+]process-running ok Oct 10 16:34:19 crc kubenswrapper[4799]: healthz check failed Oct 10 16:34:19 crc kubenswrapper[4799]: I1010 16:34:19.080066 4799 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-qpw4g" podUID="6a574afe-31ee-4706-90c0-a9c477f5bce7" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 10 16:34:20 crc kubenswrapper[4799]: I1010 16:34:20.069872 4799 patch_prober.go:28] interesting pod/router-default-5444994796-qpw4g container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 10 16:34:20 crc kubenswrapper[4799]: [-]has-synced failed: reason withheld Oct 10 16:34:20 crc kubenswrapper[4799]: [+]process-running ok Oct 10 16:34:20 crc kubenswrapper[4799]: healthz check failed Oct 10 16:34:20 crc kubenswrapper[4799]: I1010 16:34:20.070675 4799 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-qpw4g" podUID="6a574afe-31ee-4706-90c0-a9c477f5bce7" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 10 16:34:20 crc kubenswrapper[4799]: I1010 16:34:20.941352 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"d3e5ea0c-87cd-48f2-af88-2256afe86fe9","Type":"ContainerStarted","Data":"bf16c90a1c9d52b21ee87c0573e9605bb43f2ee89e64b6b3acd0550b8a7641d4"} Oct 10 16:34:20 crc kubenswrapper[4799]: I1010 16:34:20.943903 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"24cca8977b6576486d3ea33df37e751e30ed9bf97638c6c336548bbf710ae1d0"} Oct 10 16:34:20 crc kubenswrapper[4799]: I1010 16:34:20.943977 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 10 16:34:21 crc kubenswrapper[4799]: I1010 16:34:21.069896 4799 patch_prober.go:28] interesting pod/router-default-5444994796-qpw4g container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 10 16:34:21 crc kubenswrapper[4799]: [-]has-synced failed: reason withheld Oct 10 16:34:21 crc kubenswrapper[4799]: [+]process-running ok Oct 10 16:34:21 crc kubenswrapper[4799]: healthz check failed Oct 10 16:34:21 crc kubenswrapper[4799]: I1010 16:34:21.069949 4799 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-qpw4g" podUID="6a574afe-31ee-4706-90c0-a9c477f5bce7" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 10 16:34:21 crc kubenswrapper[4799]: I1010 16:34:21.949379 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"f8d3cb00e76220b4231102383e3be67dcf4a59536a13ac27bdbe3f3e74073ef0"} Oct 10 16:34:21 crc kubenswrapper[4799]: I1010 16:34:21.951310 4799 generic.go:334] "Generic (PLEG): container finished" podID="d3e5ea0c-87cd-48f2-af88-2256afe86fe9" containerID="bf16c90a1c9d52b21ee87c0573e9605bb43f2ee89e64b6b3acd0550b8a7641d4" exitCode=0 Oct 10 16:34:21 crc kubenswrapper[4799]: I1010 16:34:21.952623 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"d3e5ea0c-87cd-48f2-af88-2256afe86fe9","Type":"ContainerDied","Data":"bf16c90a1c9d52b21ee87c0573e9605bb43f2ee89e64b6b3acd0550b8a7641d4"} Oct 10 16:34:22 crc kubenswrapper[4799]: I1010 16:34:22.068661 4799 patch_prober.go:28] interesting pod/router-default-5444994796-qpw4g container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 10 16:34:22 crc kubenswrapper[4799]: [+]has-synced ok Oct 10 16:34:22 crc kubenswrapper[4799]: [+]process-running ok Oct 10 16:34:22 crc kubenswrapper[4799]: healthz check failed Oct 10 16:34:22 crc kubenswrapper[4799]: I1010 16:34:22.068708 4799 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-qpw4g" podUID="6a574afe-31ee-4706-90c0-a9c477f5bce7" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 10 16:34:23 crc kubenswrapper[4799]: I1010 16:34:23.070868 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-ingress/router-default-5444994796-qpw4g" Oct 10 16:34:23 crc kubenswrapper[4799]: I1010 16:34:23.074162 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ingress/router-default-5444994796-qpw4g" Oct 10 16:34:23 crc kubenswrapper[4799]: I1010 16:34:23.256152 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Oct 10 16:34:23 crc kubenswrapper[4799]: I1010 16:34:23.312973 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/d3e5ea0c-87cd-48f2-af88-2256afe86fe9-kube-api-access\") pod \"d3e5ea0c-87cd-48f2-af88-2256afe86fe9\" (UID: \"d3e5ea0c-87cd-48f2-af88-2256afe86fe9\") " Oct 10 16:34:23 crc kubenswrapper[4799]: I1010 16:34:23.313009 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/d3e5ea0c-87cd-48f2-af88-2256afe86fe9-kubelet-dir\") pod \"d3e5ea0c-87cd-48f2-af88-2256afe86fe9\" (UID: \"d3e5ea0c-87cd-48f2-af88-2256afe86fe9\") " Oct 10 16:34:23 crc kubenswrapper[4799]: I1010 16:34:23.313288 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/d3e5ea0c-87cd-48f2-af88-2256afe86fe9-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "d3e5ea0c-87cd-48f2-af88-2256afe86fe9" (UID: "d3e5ea0c-87cd-48f2-af88-2256afe86fe9"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 10 16:34:23 crc kubenswrapper[4799]: I1010 16:34:23.317705 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d3e5ea0c-87cd-48f2-af88-2256afe86fe9-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "d3e5ea0c-87cd-48f2-af88-2256afe86fe9" (UID: "d3e5ea0c-87cd-48f2-af88-2256afe86fe9"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 16:34:23 crc kubenswrapper[4799]: I1010 16:34:23.414314 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/d3e5ea0c-87cd-48f2-af88-2256afe86fe9-kube-api-access\") on node \"crc\" DevicePath \"\"" Oct 10 16:34:23 crc kubenswrapper[4799]: I1010 16:34:23.414344 4799 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/d3e5ea0c-87cd-48f2-af88-2256afe86fe9-kubelet-dir\") on node \"crc\" DevicePath \"\"" Oct 10 16:34:23 crc kubenswrapper[4799]: I1010 16:34:23.964828 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"d3e5ea0c-87cd-48f2-af88-2256afe86fe9","Type":"ContainerDied","Data":"028865b1c1124300638b93b3b6ea55bcf4f7e655e7eae84afad1400ea174dba8"} Oct 10 16:34:23 crc kubenswrapper[4799]: I1010 16:34:23.964868 4799 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="028865b1c1124300638b93b3b6ea55bcf4f7e655e7eae84afad1400ea174dba8" Oct 10 16:34:23 crc kubenswrapper[4799]: I1010 16:34:23.964835 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Oct 10 16:34:25 crc kubenswrapper[4799]: I1010 16:34:25.477920 4799 patch_prober.go:28] interesting pod/downloads-7954f5f757-plk2p container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.10:8080/\": dial tcp 10.217.0.10:8080: connect: connection refused" start-of-body= Oct 10 16:34:25 crc kubenswrapper[4799]: I1010 16:34:25.478919 4799 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-plk2p" podUID="e56235b4-8348-4fae-af0a-639fcacfc997" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.10:8080/\": dial tcp 10.217.0.10:8080: connect: connection refused" Oct 10 16:34:25 crc kubenswrapper[4799]: I1010 16:34:25.478197 4799 patch_prober.go:28] interesting pod/downloads-7954f5f757-plk2p container/download-server namespace/openshift-console: Liveness probe status=failure output="Get \"http://10.217.0.10:8080/\": dial tcp 10.217.0.10:8080: connect: connection refused" start-of-body= Oct 10 16:34:25 crc kubenswrapper[4799]: I1010 16:34:25.479106 4799 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console/downloads-7954f5f757-plk2p" podUID="e56235b4-8348-4fae-af0a-639fcacfc997" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.10:8080/\": dial tcp 10.217.0.10:8080: connect: connection refused" Oct 10 16:34:26 crc kubenswrapper[4799]: I1010 16:34:26.100365 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-f9d7485db-8lvfs" Oct 10 16:34:26 crc kubenswrapper[4799]: I1010 16:34:26.122108 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-f9d7485db-8lvfs" Oct 10 16:34:31 crc kubenswrapper[4799]: I1010 16:34:31.742240 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/7903c578-d05e-4ad7-8fd9-f438abf4a085-metrics-certs\") pod \"network-metrics-daemon-k6hch\" (UID: \"7903c578-d05e-4ad7-8fd9-f438abf4a085\") " pod="openshift-multus/network-metrics-daemon-k6hch" Oct 10 16:34:31 crc kubenswrapper[4799]: I1010 16:34:31.748406 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/7903c578-d05e-4ad7-8fd9-f438abf4a085-metrics-certs\") pod \"network-metrics-daemon-k6hch\" (UID: \"7903c578-d05e-4ad7-8fd9-f438abf4a085\") " pod="openshift-multus/network-metrics-daemon-k6hch" Oct 10 16:34:31 crc kubenswrapper[4799]: I1010 16:34:31.939391 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k6hch" Oct 10 16:34:35 crc kubenswrapper[4799]: I1010 16:34:35.478414 4799 patch_prober.go:28] interesting pod/downloads-7954f5f757-plk2p container/download-server namespace/openshift-console: Liveness probe status=failure output="Get \"http://10.217.0.10:8080/\": dial tcp 10.217.0.10:8080: connect: connection refused" start-of-body= Oct 10 16:34:35 crc kubenswrapper[4799]: I1010 16:34:35.478420 4799 patch_prober.go:28] interesting pod/downloads-7954f5f757-plk2p container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.10:8080/\": dial tcp 10.217.0.10:8080: connect: connection refused" start-of-body= Oct 10 16:34:35 crc kubenswrapper[4799]: I1010 16:34:35.478794 4799 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console/downloads-7954f5f757-plk2p" podUID="e56235b4-8348-4fae-af0a-639fcacfc997" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.10:8080/\": dial tcp 10.217.0.10:8080: connect: connection refused" Oct 10 16:34:35 crc kubenswrapper[4799]: I1010 16:34:35.478828 4799 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-plk2p" podUID="e56235b4-8348-4fae-af0a-639fcacfc997" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.10:8080/\": dial tcp 10.217.0.10:8080: connect: connection refused" Oct 10 16:34:35 crc kubenswrapper[4799]: I1010 16:34:35.478855 4799 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-console/downloads-7954f5f757-plk2p" Oct 10 16:34:35 crc kubenswrapper[4799]: I1010 16:34:35.479373 4799 patch_prober.go:28] interesting pod/downloads-7954f5f757-plk2p container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.10:8080/\": dial tcp 10.217.0.10:8080: connect: connection refused" start-of-body= Oct 10 16:34:35 crc kubenswrapper[4799]: I1010 16:34:35.479435 4799 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-plk2p" podUID="e56235b4-8348-4fae-af0a-639fcacfc997" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.10:8080/\": dial tcp 10.217.0.10:8080: connect: connection refused" Oct 10 16:34:35 crc kubenswrapper[4799]: I1010 16:34:35.479534 4799 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="download-server" containerStatusID={"Type":"cri-o","ID":"2f01856d95e01010ef3f7f9e81e71ca39baddc738724c4daf1413e0b0bf5fdb9"} pod="openshift-console/downloads-7954f5f757-plk2p" containerMessage="Container download-server failed liveness probe, will be restarted" Oct 10 16:34:35 crc kubenswrapper[4799]: I1010 16:34:35.479653 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-console/downloads-7954f5f757-plk2p" podUID="e56235b4-8348-4fae-af0a-639fcacfc997" containerName="download-server" containerID="cri-o://2f01856d95e01010ef3f7f9e81e71ca39baddc738724c4daf1413e0b0bf5fdb9" gracePeriod=2 Oct 10 16:34:35 crc kubenswrapper[4799]: I1010 16:34:35.859968 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-697d97f7c8-89gcz" Oct 10 16:34:37 crc kubenswrapper[4799]: I1010 16:34:37.044978 4799 generic.go:334] "Generic (PLEG): container finished" podID="e56235b4-8348-4fae-af0a-639fcacfc997" containerID="2f01856d95e01010ef3f7f9e81e71ca39baddc738724c4daf1413e0b0bf5fdb9" exitCode=0 Oct 10 16:34:37 crc kubenswrapper[4799]: I1010 16:34:37.045053 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-plk2p" event={"ID":"e56235b4-8348-4fae-af0a-639fcacfc997","Type":"ContainerDied","Data":"2f01856d95e01010ef3f7f9e81e71ca39baddc738724c4daf1413e0b0bf5fdb9"} Oct 10 16:34:45 crc kubenswrapper[4799]: I1010 16:34:45.249164 4799 patch_prober.go:28] interesting pod/machine-config-daemon-rh8zc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 10 16:34:45 crc kubenswrapper[4799]: I1010 16:34:45.249559 4799 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 10 16:34:45 crc kubenswrapper[4799]: I1010 16:34:45.479306 4799 patch_prober.go:28] interesting pod/downloads-7954f5f757-plk2p container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.10:8080/\": dial tcp 10.217.0.10:8080: connect: connection refused" start-of-body= Oct 10 16:34:45 crc kubenswrapper[4799]: I1010 16:34:45.479397 4799 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-plk2p" podUID="e56235b4-8348-4fae-af0a-639fcacfc997" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.10:8080/\": dial tcp 10.217.0.10:8080: connect: connection refused" Oct 10 16:34:46 crc kubenswrapper[4799]: I1010 16:34:46.385975 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-8qhsm" Oct 10 16:34:47 crc kubenswrapper[4799]: E1010 16:34:47.454428 4799 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-operator-index:v4.18" Oct 10 16:34:47 crc kubenswrapper[4799]: E1010 16:34:47.454727 4799 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-mffxh,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-operators-vlrmq_openshift-marketplace(2ad01aa8-e719-4b4d-af75-0589792d8b3c): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Oct 10 16:34:47 crc kubenswrapper[4799]: E1010 16:34:47.456092 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-operators-vlrmq" podUID="2ad01aa8-e719-4b4d-af75-0589792d8b3c" Oct 10 16:34:49 crc kubenswrapper[4799]: E1010 16:34:49.490264 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-operators-vlrmq" podUID="2ad01aa8-e719-4b4d-af75-0589792d8b3c" Oct 10 16:34:55 crc kubenswrapper[4799]: I1010 16:34:55.478258 4799 patch_prober.go:28] interesting pod/downloads-7954f5f757-plk2p container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.10:8080/\": dial tcp 10.217.0.10:8080: connect: connection refused" start-of-body= Oct 10 16:34:55 crc kubenswrapper[4799]: I1010 16:34:55.478649 4799 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-plk2p" podUID="e56235b4-8348-4fae-af0a-639fcacfc997" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.10:8080/\": dial tcp 10.217.0.10:8080: connect: connection refused" Oct 10 16:34:56 crc kubenswrapper[4799]: E1010 16:34:56.160875 4799 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/certified-operator-index:v4.18" Oct 10 16:34:56 crc kubenswrapper[4799]: E1010 16:34:56.161064 4799 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/certified-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-7zlqs,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod certified-operators-c9chn_openshift-marketplace(0a2e52eb-b82d-4869-ab9b-1c783d1c58fb): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Oct 10 16:34:56 crc kubenswrapper[4799]: E1010 16:34:56.162518 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/certified-operators-c9chn" podUID="0a2e52eb-b82d-4869-ab9b-1c783d1c58fb" Oct 10 16:34:57 crc kubenswrapper[4799]: E1010 16:34:57.493788 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"\"" pod="openshift-marketplace/certified-operators-c9chn" podUID="0a2e52eb-b82d-4869-ab9b-1c783d1c58fb" Oct 10 16:34:57 crc kubenswrapper[4799]: I1010 16:34:57.569487 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 10 16:34:57 crc kubenswrapper[4799]: E1010 16:34:57.607425 4799 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/community-operator-index:v4.18" Oct 10 16:34:57 crc kubenswrapper[4799]: E1010 16:34:57.607659 4799 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/community-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-9jsjj,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod community-operators-z5z55_openshift-marketplace(ec4988cf-fb0f-4df8-8f2d-748a5459bbcc): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Oct 10 16:34:57 crc kubenswrapper[4799]: E1010 16:34:57.608770 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/community-operators-z5z55" podUID="ec4988cf-fb0f-4df8-8f2d-748a5459bbcc" Oct 10 16:34:58 crc kubenswrapper[4799]: E1010 16:34:58.278022 4799 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-marketplace-index:v4.18" Oct 10 16:34:58 crc kubenswrapper[4799]: E1010 16:34:58.278448 4799 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-marketplace-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-65skb,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-marketplace-vtc78_openshift-marketplace(122cb808-3c4b-4829-8ac3-3419cf9b6bdd): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Oct 10 16:34:58 crc kubenswrapper[4799]: E1010 16:34:58.279555 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-marketplace-vtc78" podUID="122cb808-3c4b-4829-8ac3-3419cf9b6bdd" Oct 10 16:34:58 crc kubenswrapper[4799]: E1010 16:34:58.306865 4799 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/certified-operator-index:v4.18" Oct 10 16:34:58 crc kubenswrapper[4799]: E1010 16:34:58.307012 4799 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/certified-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-59zkz,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod certified-operators-5d7rl_openshift-marketplace(89264b95-de02-4ba2-a26e-121910a2b2ff): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Oct 10 16:34:58 crc kubenswrapper[4799]: E1010 16:34:58.308250 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/certified-operators-5d7rl" podUID="89264b95-de02-4ba2-a26e-121910a2b2ff" Oct 10 16:34:58 crc kubenswrapper[4799]: E1010 16:34:58.347546 4799 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/community-operator-index:v4.18" Oct 10 16:34:58 crc kubenswrapper[4799]: E1010 16:34:58.347705 4799 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/community-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-cqfsd,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod community-operators-rmstw_openshift-marketplace(f8195a22-f2dc-4a8e-bb85-abbef8d04ac3): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Oct 10 16:34:58 crc kubenswrapper[4799]: E1010 16:34:58.349675 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/community-operators-rmstw" podUID="f8195a22-f2dc-4a8e-bb85-abbef8d04ac3" Oct 10 16:34:58 crc kubenswrapper[4799]: E1010 16:34:58.361991 4799 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-marketplace-index:v4.18" Oct 10 16:34:58 crc kubenswrapper[4799]: E1010 16:34:58.362172 4799 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-marketplace-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-ch4sf,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-marketplace-4786v_openshift-marketplace(048287bf-12a1-4fef-8fc2-7fa4686d31cc): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Oct 10 16:34:58 crc kubenswrapper[4799]: E1010 16:34:58.363293 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-marketplace-4786v" podUID="048287bf-12a1-4fef-8fc2-7fa4686d31cc" Oct 10 16:34:58 crc kubenswrapper[4799]: I1010 16:34:58.593602 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-k6hch"] Oct 10 16:34:58 crc kubenswrapper[4799]: W1010 16:34:58.600065 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7903c578_d05e_4ad7_8fd9_f438abf4a085.slice/crio-35b294446bdd2b6b6d525943d26f60b0ca3cc4dcd1f816098ebd0e75f6fe4b73 WatchSource:0}: Error finding container 35b294446bdd2b6b6d525943d26f60b0ca3cc4dcd1f816098ebd0e75f6fe4b73: Status 404 returned error can't find the container with id 35b294446bdd2b6b6d525943d26f60b0ca3cc4dcd1f816098ebd0e75f6fe4b73 Oct 10 16:34:59 crc kubenswrapper[4799]: I1010 16:34:59.183280 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-plk2p" event={"ID":"e56235b4-8348-4fae-af0a-639fcacfc997","Type":"ContainerStarted","Data":"5db6c022c65c2320fef6e05735678dfc0283f8a4d9676243ec27614afabd42ae"} Oct 10 16:34:59 crc kubenswrapper[4799]: I1010 16:34:59.183606 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/downloads-7954f5f757-plk2p" Oct 10 16:34:59 crc kubenswrapper[4799]: I1010 16:34:59.184025 4799 patch_prober.go:28] interesting pod/downloads-7954f5f757-plk2p container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.10:8080/\": dial tcp 10.217.0.10:8080: connect: connection refused" start-of-body= Oct 10 16:34:59 crc kubenswrapper[4799]: I1010 16:34:59.184072 4799 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-plk2p" podUID="e56235b4-8348-4fae-af0a-639fcacfc997" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.10:8080/\": dial tcp 10.217.0.10:8080: connect: connection refused" Oct 10 16:34:59 crc kubenswrapper[4799]: I1010 16:34:59.187050 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-k6hch" event={"ID":"7903c578-d05e-4ad7-8fd9-f438abf4a085","Type":"ContainerStarted","Data":"3bb703766311b9764c360a7de9c9c0ea3206911fe526946ade223bac30cfb62d"} Oct 10 16:34:59 crc kubenswrapper[4799]: I1010 16:34:59.187092 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-k6hch" event={"ID":"7903c578-d05e-4ad7-8fd9-f438abf4a085","Type":"ContainerStarted","Data":"35b294446bdd2b6b6d525943d26f60b0ca3cc4dcd1f816098ebd0e75f6fe4b73"} Oct 10 16:34:59 crc kubenswrapper[4799]: I1010 16:34:59.188963 4799 generic.go:334] "Generic (PLEG): container finished" podID="ce937bef-231c-4353-8af2-f8e4517c68c4" containerID="47764ec6487ef350dd82a777a93cbe1cd2f6242a74dc1e2a046bc6f1132d8c63" exitCode=0 Oct 10 16:34:59 crc kubenswrapper[4799]: I1010 16:34:59.190005 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-mkr4n" event={"ID":"ce937bef-231c-4353-8af2-f8e4517c68c4","Type":"ContainerDied","Data":"47764ec6487ef350dd82a777a93cbe1cd2f6242a74dc1e2a046bc6f1132d8c63"} Oct 10 16:34:59 crc kubenswrapper[4799]: E1010 16:34:59.190821 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-marketplace-4786v" podUID="048287bf-12a1-4fef-8fc2-7fa4686d31cc" Oct 10 16:34:59 crc kubenswrapper[4799]: E1010 16:34:59.192804 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"\"" pod="openshift-marketplace/community-operators-rmstw" podUID="f8195a22-f2dc-4a8e-bb85-abbef8d04ac3" Oct 10 16:34:59 crc kubenswrapper[4799]: E1010 16:34:59.193599 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-marketplace-vtc78" podUID="122cb808-3c4b-4829-8ac3-3419cf9b6bdd" Oct 10 16:35:00 crc kubenswrapper[4799]: I1010 16:35:00.210283 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-k6hch" event={"ID":"7903c578-d05e-4ad7-8fd9-f438abf4a085","Type":"ContainerStarted","Data":"2ada3ade1a078f466a08f6cbf5ad523399420abf1b00a168b0152bd4d13d8303"} Oct 10 16:35:00 crc kubenswrapper[4799]: I1010 16:35:00.214985 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-mkr4n" event={"ID":"ce937bef-231c-4353-8af2-f8e4517c68c4","Type":"ContainerStarted","Data":"e31a8a21bb71845f1ffed4bf9908e006a8cd80c51c60cd28fd954067926d6c8f"} Oct 10 16:35:00 crc kubenswrapper[4799]: I1010 16:35:00.215694 4799 patch_prober.go:28] interesting pod/downloads-7954f5f757-plk2p container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.10:8080/\": dial tcp 10.217.0.10:8080: connect: connection refused" start-of-body= Oct 10 16:35:00 crc kubenswrapper[4799]: I1010 16:35:00.215778 4799 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-plk2p" podUID="e56235b4-8348-4fae-af0a-639fcacfc997" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.10:8080/\": dial tcp 10.217.0.10:8080: connect: connection refused" Oct 10 16:35:00 crc kubenswrapper[4799]: I1010 16:35:00.235208 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/network-metrics-daemon-k6hch" podStartSLOduration=171.2351875 podStartE2EDuration="2m51.2351875s" podCreationTimestamp="2025-10-10 16:32:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-10 16:35:00.232942694 +0000 UTC m=+193.741266889" watchObservedRunningTime="2025-10-10 16:35:00.2351875 +0000 UTC m=+193.743511625" Oct 10 16:35:00 crc kubenswrapper[4799]: I1010 16:35:00.263901 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-mkr4n" podStartSLOduration=2.326181237 podStartE2EDuration="46.263870822s" podCreationTimestamp="2025-10-10 16:34:14 +0000 UTC" firstStartedPulling="2025-10-10 16:34:15.733664298 +0000 UTC m=+149.241988413" lastFinishedPulling="2025-10-10 16:34:59.671353853 +0000 UTC m=+193.179677998" observedRunningTime="2025-10-10 16:35:00.262277072 +0000 UTC m=+193.770601267" watchObservedRunningTime="2025-10-10 16:35:00.263870822 +0000 UTC m=+193.772194977" Oct 10 16:35:04 crc kubenswrapper[4799]: I1010 16:35:04.454608 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-mkr4n" Oct 10 16:35:04 crc kubenswrapper[4799]: I1010 16:35:04.456581 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-mkr4n" Oct 10 16:35:05 crc kubenswrapper[4799]: I1010 16:35:05.478424 4799 patch_prober.go:28] interesting pod/downloads-7954f5f757-plk2p container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.10:8080/\": dial tcp 10.217.0.10:8080: connect: connection refused" start-of-body= Oct 10 16:35:05 crc kubenswrapper[4799]: I1010 16:35:05.478466 4799 patch_prober.go:28] interesting pod/downloads-7954f5f757-plk2p container/download-server namespace/openshift-console: Liveness probe status=failure output="Get \"http://10.217.0.10:8080/\": dial tcp 10.217.0.10:8080: connect: connection refused" start-of-body= Oct 10 16:35:05 crc kubenswrapper[4799]: I1010 16:35:05.478498 4799 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-plk2p" podUID="e56235b4-8348-4fae-af0a-639fcacfc997" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.10:8080/\": dial tcp 10.217.0.10:8080: connect: connection refused" Oct 10 16:35:05 crc kubenswrapper[4799]: I1010 16:35:05.478573 4799 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console/downloads-7954f5f757-plk2p" podUID="e56235b4-8348-4fae-af0a-639fcacfc997" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.10:8080/\": dial tcp 10.217.0.10:8080: connect: connection refused" Oct 10 16:35:05 crc kubenswrapper[4799]: I1010 16:35:05.943284 4799 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-mkr4n" podUID="ce937bef-231c-4353-8af2-f8e4517c68c4" containerName="registry-server" probeResult="failure" output=< Oct 10 16:35:05 crc kubenswrapper[4799]: timeout: failed to connect service ":50051" within 1s Oct 10 16:35:05 crc kubenswrapper[4799]: > Oct 10 16:35:08 crc kubenswrapper[4799]: I1010 16:35:08.268705 4799 generic.go:334] "Generic (PLEG): container finished" podID="2ad01aa8-e719-4b4d-af75-0589792d8b3c" containerID="99e44ea00e43c7e98fb7aaa730690ad56169703065f7218ae502f25f12f2c1f5" exitCode=0 Oct 10 16:35:08 crc kubenswrapper[4799]: I1010 16:35:08.268778 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vlrmq" event={"ID":"2ad01aa8-e719-4b4d-af75-0589792d8b3c","Type":"ContainerDied","Data":"99e44ea00e43c7e98fb7aaa730690ad56169703065f7218ae502f25f12f2c1f5"} Oct 10 16:35:10 crc kubenswrapper[4799]: I1010 16:35:10.285054 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vlrmq" event={"ID":"2ad01aa8-e719-4b4d-af75-0589792d8b3c","Type":"ContainerStarted","Data":"10d345db8c23583e33b654adb28fb4f085dcf396b1c856c1ca63c369800a9c24"} Oct 10 16:35:10 crc kubenswrapper[4799]: I1010 16:35:10.313908 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-vlrmq" podStartSLOduration=3.902382894 podStartE2EDuration="56.313885011s" podCreationTimestamp="2025-10-10 16:34:14 +0000 UTC" firstStartedPulling="2025-10-10 16:34:16.781226936 +0000 UTC m=+150.289551051" lastFinishedPulling="2025-10-10 16:35:09.192729033 +0000 UTC m=+202.701053168" observedRunningTime="2025-10-10 16:35:10.312590917 +0000 UTC m=+203.820915112" watchObservedRunningTime="2025-10-10 16:35:10.313885011 +0000 UTC m=+203.822209136" Oct 10 16:35:14 crc kubenswrapper[4799]: I1010 16:35:14.606503 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-mkr4n" Oct 10 16:35:14 crc kubenswrapper[4799]: I1010 16:35:14.678449 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-mkr4n" Oct 10 16:35:14 crc kubenswrapper[4799]: I1010 16:35:14.797031 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-vlrmq" Oct 10 16:35:14 crc kubenswrapper[4799]: I1010 16:35:14.797106 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-vlrmq" Oct 10 16:35:15 crc kubenswrapper[4799]: I1010 16:35:15.248968 4799 patch_prober.go:28] interesting pod/machine-config-daemon-rh8zc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 10 16:35:15 crc kubenswrapper[4799]: I1010 16:35:15.249313 4799 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 10 16:35:15 crc kubenswrapper[4799]: I1010 16:35:15.249369 4799 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" Oct 10 16:35:15 crc kubenswrapper[4799]: I1010 16:35:15.250147 4799 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"0ad00545d7a2fff370e19a55a89365b8c9914cb6286dbf1892d7ad0f399288a5"} pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 10 16:35:15 crc kubenswrapper[4799]: I1010 16:35:15.250228 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" containerName="machine-config-daemon" containerID="cri-o://0ad00545d7a2fff370e19a55a89365b8c9914cb6286dbf1892d7ad0f399288a5" gracePeriod=600 Oct 10 16:35:15 crc kubenswrapper[4799]: I1010 16:35:15.501046 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/downloads-7954f5f757-plk2p" Oct 10 16:35:15 crc kubenswrapper[4799]: I1010 16:35:15.858119 4799 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-vlrmq" podUID="2ad01aa8-e719-4b4d-af75-0589792d8b3c" containerName="registry-server" probeResult="failure" output=< Oct 10 16:35:15 crc kubenswrapper[4799]: timeout: failed to connect service ":50051" within 1s Oct 10 16:35:15 crc kubenswrapper[4799]: > Oct 10 16:35:16 crc kubenswrapper[4799]: I1010 16:35:16.325633 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" event={"ID":"6cebefda-e31d-4be2-9bf4-8e1f8ec002cb","Type":"ContainerDied","Data":"0ad00545d7a2fff370e19a55a89365b8c9914cb6286dbf1892d7ad0f399288a5"} Oct 10 16:35:16 crc kubenswrapper[4799]: I1010 16:35:16.325512 4799 generic.go:334] "Generic (PLEG): container finished" podID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" containerID="0ad00545d7a2fff370e19a55a89365b8c9914cb6286dbf1892d7ad0f399288a5" exitCode=0 Oct 10 16:35:18 crc kubenswrapper[4799]: I1010 16:35:18.339401 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" event={"ID":"6cebefda-e31d-4be2-9bf4-8e1f8ec002cb","Type":"ContainerStarted","Data":"a685c745539e6b5d6ae1c99d911448ed7e9748dc5640aa9ad19005d1a2df7456"} Oct 10 16:35:18 crc kubenswrapper[4799]: I1010 16:35:18.342567 4799 generic.go:334] "Generic (PLEG): container finished" podID="0a2e52eb-b82d-4869-ab9b-1c783d1c58fb" containerID="b169e77ed56aa1a5ba394fe34fe0bf00c0c597cfb72b1308448ba1223bd067b9" exitCode=0 Oct 10 16:35:18 crc kubenswrapper[4799]: I1010 16:35:18.342605 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-c9chn" event={"ID":"0a2e52eb-b82d-4869-ab9b-1c783d1c58fb","Type":"ContainerDied","Data":"b169e77ed56aa1a5ba394fe34fe0bf00c0c597cfb72b1308448ba1223bd067b9"} Oct 10 16:35:22 crc kubenswrapper[4799]: I1010 16:35:22.370560 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-c9chn" event={"ID":"0a2e52eb-b82d-4869-ab9b-1c783d1c58fb","Type":"ContainerStarted","Data":"f958cff23bca7d5793ea9871cce292ffc8011cbcaacbdcd5a925dfd263a354b5"} Oct 10 16:35:22 crc kubenswrapper[4799]: I1010 16:35:22.373271 4799 generic.go:334] "Generic (PLEG): container finished" podID="122cb808-3c4b-4829-8ac3-3419cf9b6bdd" containerID="0a53a74059460265e2fcc6d939d0bc193e4d328d22e63e7ab2d38be257786465" exitCode=0 Oct 10 16:35:22 crc kubenswrapper[4799]: I1010 16:35:22.373378 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vtc78" event={"ID":"122cb808-3c4b-4829-8ac3-3419cf9b6bdd","Type":"ContainerDied","Data":"0a53a74059460265e2fcc6d939d0bc193e4d328d22e63e7ab2d38be257786465"} Oct 10 16:35:22 crc kubenswrapper[4799]: I1010 16:35:22.375857 4799 generic.go:334] "Generic (PLEG): container finished" podID="89264b95-de02-4ba2-a26e-121910a2b2ff" containerID="c0b267b0940c09560e240e3a348b666096d65f8da74a309718588064a2278f2e" exitCode=0 Oct 10 16:35:22 crc kubenswrapper[4799]: I1010 16:35:22.375940 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5d7rl" event={"ID":"89264b95-de02-4ba2-a26e-121910a2b2ff","Type":"ContainerDied","Data":"c0b267b0940c09560e240e3a348b666096d65f8da74a309718588064a2278f2e"} Oct 10 16:35:22 crc kubenswrapper[4799]: I1010 16:35:22.383251 4799 generic.go:334] "Generic (PLEG): container finished" podID="048287bf-12a1-4fef-8fc2-7fa4686d31cc" containerID="6d41933ae11ef7823b62d2b58d70b791229f91237a9f25776f250974eb14801a" exitCode=0 Oct 10 16:35:22 crc kubenswrapper[4799]: I1010 16:35:22.383382 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-4786v" event={"ID":"048287bf-12a1-4fef-8fc2-7fa4686d31cc","Type":"ContainerDied","Data":"6d41933ae11ef7823b62d2b58d70b791229f91237a9f25776f250974eb14801a"} Oct 10 16:35:22 crc kubenswrapper[4799]: I1010 16:35:22.386868 4799 generic.go:334] "Generic (PLEG): container finished" podID="f8195a22-f2dc-4a8e-bb85-abbef8d04ac3" containerID="86eb2910ca1ca2ace7680ae85f82a52d9677eb85f89c40dd9cda38029a784678" exitCode=0 Oct 10 16:35:22 crc kubenswrapper[4799]: I1010 16:35:22.386969 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-rmstw" event={"ID":"f8195a22-f2dc-4a8e-bb85-abbef8d04ac3","Type":"ContainerDied","Data":"86eb2910ca1ca2ace7680ae85f82a52d9677eb85f89c40dd9cda38029a784678"} Oct 10 16:35:22 crc kubenswrapper[4799]: I1010 16:35:22.392280 4799 generic.go:334] "Generic (PLEG): container finished" podID="ec4988cf-fb0f-4df8-8f2d-748a5459bbcc" containerID="a45a0e091ec875ce087184fde67116b845ace4d903536f9d88fe302c0c771339" exitCode=0 Oct 10 16:35:22 crc kubenswrapper[4799]: I1010 16:35:22.392354 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-z5z55" event={"ID":"ec4988cf-fb0f-4df8-8f2d-748a5459bbcc","Type":"ContainerDied","Data":"a45a0e091ec875ce087184fde67116b845ace4d903536f9d88fe302c0c771339"} Oct 10 16:35:22 crc kubenswrapper[4799]: I1010 16:35:22.419120 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-c9chn" podStartSLOduration=4.835470303 podStartE2EDuration="1m12.419090879s" podCreationTimestamp="2025-10-10 16:34:10 +0000 UTC" firstStartedPulling="2025-10-10 16:34:13.574447968 +0000 UTC m=+147.082772083" lastFinishedPulling="2025-10-10 16:35:21.158068544 +0000 UTC m=+214.666392659" observedRunningTime="2025-10-10 16:35:22.413935656 +0000 UTC m=+215.922259801" watchObservedRunningTime="2025-10-10 16:35:22.419090879 +0000 UTC m=+215.927415004" Oct 10 16:35:23 crc kubenswrapper[4799]: I1010 16:35:23.399603 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-4786v" event={"ID":"048287bf-12a1-4fef-8fc2-7fa4686d31cc","Type":"ContainerStarted","Data":"6f70847985ca6300398c906a8ee6710004ef8ee9422411ceb3a5dd3f85897be0"} Oct 10 16:35:23 crc kubenswrapper[4799]: I1010 16:35:23.409197 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-rmstw" event={"ID":"f8195a22-f2dc-4a8e-bb85-abbef8d04ac3","Type":"ContainerStarted","Data":"86067d8fda7a062c1fc9d499ec7e5bf9eb9dc7dd2484e1654a864c1824d4a10b"} Oct 10 16:35:23 crc kubenswrapper[4799]: I1010 16:35:23.409806 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-z5z55" event={"ID":"ec4988cf-fb0f-4df8-8f2d-748a5459bbcc","Type":"ContainerStarted","Data":"1e35d6bc74d8a24f42976236a75719667c6ae61e15826c8a0c0a76a54baa39d4"} Oct 10 16:35:23 crc kubenswrapper[4799]: I1010 16:35:23.413664 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vtc78" event={"ID":"122cb808-3c4b-4829-8ac3-3419cf9b6bdd","Type":"ContainerStarted","Data":"58c1a27b830e7c6d9517fd4281408edeae0b008b56390d3368b130f82be02483"} Oct 10 16:35:23 crc kubenswrapper[4799]: I1010 16:35:23.416188 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5d7rl" event={"ID":"89264b95-de02-4ba2-a26e-121910a2b2ff","Type":"ContainerStarted","Data":"f25e1c43980c408ea8307216c08e61ee52ed6cde061b2efcb8bc808d797b4318"} Oct 10 16:35:23 crc kubenswrapper[4799]: I1010 16:35:23.425370 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-4786v" podStartSLOduration=2.201736335 podStartE2EDuration="1m10.425346904s" podCreationTimestamp="2025-10-10 16:34:13 +0000 UTC" firstStartedPulling="2025-10-10 16:34:14.67379232 +0000 UTC m=+148.182116435" lastFinishedPulling="2025-10-10 16:35:22.897402879 +0000 UTC m=+216.405727004" observedRunningTime="2025-10-10 16:35:23.422674385 +0000 UTC m=+216.930998500" watchObservedRunningTime="2025-10-10 16:35:23.425346904 +0000 UTC m=+216.933671029" Oct 10 16:35:23 crc kubenswrapper[4799]: I1010 16:35:23.439500 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-vtc78" podStartSLOduration=1.910525297 podStartE2EDuration="1m10.43947794s" podCreationTimestamp="2025-10-10 16:34:13 +0000 UTC" firstStartedPulling="2025-10-10 16:34:14.58201956 +0000 UTC m=+148.090343675" lastFinishedPulling="2025-10-10 16:35:23.110972203 +0000 UTC m=+216.619296318" observedRunningTime="2025-10-10 16:35:23.439207793 +0000 UTC m=+216.947531908" watchObservedRunningTime="2025-10-10 16:35:23.43947794 +0000 UTC m=+216.947802065" Oct 10 16:35:23 crc kubenswrapper[4799]: I1010 16:35:23.458667 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-5d7rl" podStartSLOduration=4.037835036 podStartE2EDuration="1m12.458645595s" podCreationTimestamp="2025-10-10 16:34:11 +0000 UTC" firstStartedPulling="2025-10-10 16:34:14.685851103 +0000 UTC m=+148.194175218" lastFinishedPulling="2025-10-10 16:35:23.106661652 +0000 UTC m=+216.614985777" observedRunningTime="2025-10-10 16:35:23.454890578 +0000 UTC m=+216.963214713" watchObservedRunningTime="2025-10-10 16:35:23.458645595 +0000 UTC m=+216.966969710" Oct 10 16:35:23 crc kubenswrapper[4799]: I1010 16:35:23.476477 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-z5z55" podStartSLOduration=4.213231233 podStartE2EDuration="1m12.476462396s" podCreationTimestamp="2025-10-10 16:34:11 +0000 UTC" firstStartedPulling="2025-10-10 16:34:14.685297199 +0000 UTC m=+148.193621314" lastFinishedPulling="2025-10-10 16:35:22.948528362 +0000 UTC m=+216.456852477" observedRunningTime="2025-10-10 16:35:23.475161373 +0000 UTC m=+216.983485498" watchObservedRunningTime="2025-10-10 16:35:23.476462396 +0000 UTC m=+216.984786511" Oct 10 16:35:23 crc kubenswrapper[4799]: I1010 16:35:23.498463 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-rmstw" podStartSLOduration=4.198395691 podStartE2EDuration="1m12.498442865s" podCreationTimestamp="2025-10-10 16:34:11 +0000 UTC" firstStartedPulling="2025-10-10 16:34:14.685588947 +0000 UTC m=+148.193913052" lastFinishedPulling="2025-10-10 16:35:22.985636111 +0000 UTC m=+216.493960226" observedRunningTime="2025-10-10 16:35:23.497803028 +0000 UTC m=+217.006127143" watchObservedRunningTime="2025-10-10 16:35:23.498442865 +0000 UTC m=+217.006766980" Oct 10 16:35:23 crc kubenswrapper[4799]: I1010 16:35:23.775070 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-vtc78" Oct 10 16:35:23 crc kubenswrapper[4799]: I1010 16:35:23.775708 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-vtc78" Oct 10 16:35:24 crc kubenswrapper[4799]: I1010 16:35:24.816527 4799 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-marketplace-vtc78" podUID="122cb808-3c4b-4829-8ac3-3419cf9b6bdd" containerName="registry-server" probeResult="failure" output=< Oct 10 16:35:24 crc kubenswrapper[4799]: timeout: failed to connect service ":50051" within 1s Oct 10 16:35:24 crc kubenswrapper[4799]: > Oct 10 16:35:24 crc kubenswrapper[4799]: I1010 16:35:24.839064 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-vlrmq" Oct 10 16:35:24 crc kubenswrapper[4799]: I1010 16:35:24.882240 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-vlrmq" Oct 10 16:35:28 crc kubenswrapper[4799]: I1010 16:35:28.622682 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-vlrmq"] Oct 10 16:35:28 crc kubenswrapper[4799]: I1010 16:35:28.623494 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-vlrmq" podUID="2ad01aa8-e719-4b4d-af75-0589792d8b3c" containerName="registry-server" containerID="cri-o://10d345db8c23583e33b654adb28fb4f085dcf396b1c856c1ca63c369800a9c24" gracePeriod=2 Oct 10 16:35:28 crc kubenswrapper[4799]: I1010 16:35:28.988870 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-vlrmq" Oct 10 16:35:29 crc kubenswrapper[4799]: I1010 16:35:29.096304 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2ad01aa8-e719-4b4d-af75-0589792d8b3c-catalog-content\") pod \"2ad01aa8-e719-4b4d-af75-0589792d8b3c\" (UID: \"2ad01aa8-e719-4b4d-af75-0589792d8b3c\") " Oct 10 16:35:29 crc kubenswrapper[4799]: I1010 16:35:29.096440 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mffxh\" (UniqueName: \"kubernetes.io/projected/2ad01aa8-e719-4b4d-af75-0589792d8b3c-kube-api-access-mffxh\") pod \"2ad01aa8-e719-4b4d-af75-0589792d8b3c\" (UID: \"2ad01aa8-e719-4b4d-af75-0589792d8b3c\") " Oct 10 16:35:29 crc kubenswrapper[4799]: I1010 16:35:29.096581 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2ad01aa8-e719-4b4d-af75-0589792d8b3c-utilities\") pod \"2ad01aa8-e719-4b4d-af75-0589792d8b3c\" (UID: \"2ad01aa8-e719-4b4d-af75-0589792d8b3c\") " Oct 10 16:35:29 crc kubenswrapper[4799]: I1010 16:35:29.098231 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2ad01aa8-e719-4b4d-af75-0589792d8b3c-utilities" (OuterVolumeSpecName: "utilities") pod "2ad01aa8-e719-4b4d-af75-0589792d8b3c" (UID: "2ad01aa8-e719-4b4d-af75-0589792d8b3c"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 10 16:35:29 crc kubenswrapper[4799]: I1010 16:35:29.104735 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2ad01aa8-e719-4b4d-af75-0589792d8b3c-kube-api-access-mffxh" (OuterVolumeSpecName: "kube-api-access-mffxh") pod "2ad01aa8-e719-4b4d-af75-0589792d8b3c" (UID: "2ad01aa8-e719-4b4d-af75-0589792d8b3c"). InnerVolumeSpecName "kube-api-access-mffxh". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 16:35:29 crc kubenswrapper[4799]: I1010 16:35:29.198202 4799 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2ad01aa8-e719-4b4d-af75-0589792d8b3c-utilities\") on node \"crc\" DevicePath \"\"" Oct 10 16:35:29 crc kubenswrapper[4799]: I1010 16:35:29.198246 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mffxh\" (UniqueName: \"kubernetes.io/projected/2ad01aa8-e719-4b4d-af75-0589792d8b3c-kube-api-access-mffxh\") on node \"crc\" DevicePath \"\"" Oct 10 16:35:29 crc kubenswrapper[4799]: I1010 16:35:29.221940 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2ad01aa8-e719-4b4d-af75-0589792d8b3c-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "2ad01aa8-e719-4b4d-af75-0589792d8b3c" (UID: "2ad01aa8-e719-4b4d-af75-0589792d8b3c"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 10 16:35:29 crc kubenswrapper[4799]: I1010 16:35:29.299626 4799 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2ad01aa8-e719-4b4d-af75-0589792d8b3c-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 10 16:35:29 crc kubenswrapper[4799]: I1010 16:35:29.455803 4799 generic.go:334] "Generic (PLEG): container finished" podID="2ad01aa8-e719-4b4d-af75-0589792d8b3c" containerID="10d345db8c23583e33b654adb28fb4f085dcf396b1c856c1ca63c369800a9c24" exitCode=0 Oct 10 16:35:29 crc kubenswrapper[4799]: I1010 16:35:29.455863 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vlrmq" event={"ID":"2ad01aa8-e719-4b4d-af75-0589792d8b3c","Type":"ContainerDied","Data":"10d345db8c23583e33b654adb28fb4f085dcf396b1c856c1ca63c369800a9c24"} Oct 10 16:35:29 crc kubenswrapper[4799]: I1010 16:35:29.455911 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vlrmq" event={"ID":"2ad01aa8-e719-4b4d-af75-0589792d8b3c","Type":"ContainerDied","Data":"c54dbd5683d21fb9c373fc20436799dbc8f43217286cee672cc2e46a0e11431d"} Oct 10 16:35:29 crc kubenswrapper[4799]: I1010 16:35:29.455941 4799 scope.go:117] "RemoveContainer" containerID="10d345db8c23583e33b654adb28fb4f085dcf396b1c856c1ca63c369800a9c24" Oct 10 16:35:29 crc kubenswrapper[4799]: I1010 16:35:29.455968 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-vlrmq" Oct 10 16:35:29 crc kubenswrapper[4799]: I1010 16:35:29.480448 4799 scope.go:117] "RemoveContainer" containerID="99e44ea00e43c7e98fb7aaa730690ad56169703065f7218ae502f25f12f2c1f5" Oct 10 16:35:29 crc kubenswrapper[4799]: I1010 16:35:29.484291 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-vlrmq"] Oct 10 16:35:29 crc kubenswrapper[4799]: I1010 16:35:29.490272 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-vlrmq"] Oct 10 16:35:29 crc kubenswrapper[4799]: I1010 16:35:29.494830 4799 scope.go:117] "RemoveContainer" containerID="b70c53ec486ab9a218be0bf65863d1644e18fe9e7cae05af297e3cf12c45d6c2" Oct 10 16:35:29 crc kubenswrapper[4799]: I1010 16:35:29.516436 4799 scope.go:117] "RemoveContainer" containerID="10d345db8c23583e33b654adb28fb4f085dcf396b1c856c1ca63c369800a9c24" Oct 10 16:35:29 crc kubenswrapper[4799]: E1010 16:35:29.517378 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"10d345db8c23583e33b654adb28fb4f085dcf396b1c856c1ca63c369800a9c24\": container with ID starting with 10d345db8c23583e33b654adb28fb4f085dcf396b1c856c1ca63c369800a9c24 not found: ID does not exist" containerID="10d345db8c23583e33b654adb28fb4f085dcf396b1c856c1ca63c369800a9c24" Oct 10 16:35:29 crc kubenswrapper[4799]: I1010 16:35:29.517748 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"10d345db8c23583e33b654adb28fb4f085dcf396b1c856c1ca63c369800a9c24"} err="failed to get container status \"10d345db8c23583e33b654adb28fb4f085dcf396b1c856c1ca63c369800a9c24\": rpc error: code = NotFound desc = could not find container \"10d345db8c23583e33b654adb28fb4f085dcf396b1c856c1ca63c369800a9c24\": container with ID starting with 10d345db8c23583e33b654adb28fb4f085dcf396b1c856c1ca63c369800a9c24 not found: ID does not exist" Oct 10 16:35:29 crc kubenswrapper[4799]: I1010 16:35:29.518008 4799 scope.go:117] "RemoveContainer" containerID="99e44ea00e43c7e98fb7aaa730690ad56169703065f7218ae502f25f12f2c1f5" Oct 10 16:35:29 crc kubenswrapper[4799]: E1010 16:35:29.518720 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"99e44ea00e43c7e98fb7aaa730690ad56169703065f7218ae502f25f12f2c1f5\": container with ID starting with 99e44ea00e43c7e98fb7aaa730690ad56169703065f7218ae502f25f12f2c1f5 not found: ID does not exist" containerID="99e44ea00e43c7e98fb7aaa730690ad56169703065f7218ae502f25f12f2c1f5" Oct 10 16:35:29 crc kubenswrapper[4799]: I1010 16:35:29.518857 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"99e44ea00e43c7e98fb7aaa730690ad56169703065f7218ae502f25f12f2c1f5"} err="failed to get container status \"99e44ea00e43c7e98fb7aaa730690ad56169703065f7218ae502f25f12f2c1f5\": rpc error: code = NotFound desc = could not find container \"99e44ea00e43c7e98fb7aaa730690ad56169703065f7218ae502f25f12f2c1f5\": container with ID starting with 99e44ea00e43c7e98fb7aaa730690ad56169703065f7218ae502f25f12f2c1f5 not found: ID does not exist" Oct 10 16:35:29 crc kubenswrapper[4799]: I1010 16:35:29.518895 4799 scope.go:117] "RemoveContainer" containerID="b70c53ec486ab9a218be0bf65863d1644e18fe9e7cae05af297e3cf12c45d6c2" Oct 10 16:35:29 crc kubenswrapper[4799]: E1010 16:35:29.519405 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b70c53ec486ab9a218be0bf65863d1644e18fe9e7cae05af297e3cf12c45d6c2\": container with ID starting with b70c53ec486ab9a218be0bf65863d1644e18fe9e7cae05af297e3cf12c45d6c2 not found: ID does not exist" containerID="b70c53ec486ab9a218be0bf65863d1644e18fe9e7cae05af297e3cf12c45d6c2" Oct 10 16:35:29 crc kubenswrapper[4799]: I1010 16:35:29.519458 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b70c53ec486ab9a218be0bf65863d1644e18fe9e7cae05af297e3cf12c45d6c2"} err="failed to get container status \"b70c53ec486ab9a218be0bf65863d1644e18fe9e7cae05af297e3cf12c45d6c2\": rpc error: code = NotFound desc = could not find container \"b70c53ec486ab9a218be0bf65863d1644e18fe9e7cae05af297e3cf12c45d6c2\": container with ID starting with b70c53ec486ab9a218be0bf65863d1644e18fe9e7cae05af297e3cf12c45d6c2 not found: ID does not exist" Oct 10 16:35:30 crc kubenswrapper[4799]: I1010 16:35:30.676126 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-56xl2"] Oct 10 16:35:31 crc kubenswrapper[4799]: I1010 16:35:31.180693 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-c9chn" Oct 10 16:35:31 crc kubenswrapper[4799]: I1010 16:35:31.180983 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-c9chn" Oct 10 16:35:31 crc kubenswrapper[4799]: I1010 16:35:31.239332 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-c9chn" Oct 10 16:35:31 crc kubenswrapper[4799]: I1010 16:35:31.409546 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2ad01aa8-e719-4b4d-af75-0589792d8b3c" path="/var/lib/kubelet/pods/2ad01aa8-e719-4b4d-af75-0589792d8b3c/volumes" Oct 10 16:35:31 crc kubenswrapper[4799]: I1010 16:35:31.410240 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-z5z55" Oct 10 16:35:31 crc kubenswrapper[4799]: I1010 16:35:31.410276 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-z5z55" Oct 10 16:35:31 crc kubenswrapper[4799]: I1010 16:35:31.454386 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-z5z55" Oct 10 16:35:31 crc kubenswrapper[4799]: I1010 16:35:31.539371 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-z5z55" Oct 10 16:35:31 crc kubenswrapper[4799]: I1010 16:35:31.539448 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-c9chn" Oct 10 16:35:31 crc kubenswrapper[4799]: I1010 16:35:31.620977 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-5d7rl" Oct 10 16:35:31 crc kubenswrapper[4799]: I1010 16:35:31.621025 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-5d7rl" Oct 10 16:35:31 crc kubenswrapper[4799]: I1010 16:35:31.662987 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-5d7rl" Oct 10 16:35:31 crc kubenswrapper[4799]: I1010 16:35:31.874623 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-rmstw" Oct 10 16:35:31 crc kubenswrapper[4799]: I1010 16:35:31.874667 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-rmstw" Oct 10 16:35:31 crc kubenswrapper[4799]: I1010 16:35:31.913794 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-rmstw" Oct 10 16:35:32 crc kubenswrapper[4799]: I1010 16:35:32.525631 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-rmstw" Oct 10 16:35:32 crc kubenswrapper[4799]: I1010 16:35:32.525966 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-5d7rl" Oct 10 16:35:33 crc kubenswrapper[4799]: I1010 16:35:33.374824 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-4786v" Oct 10 16:35:33 crc kubenswrapper[4799]: I1010 16:35:33.374881 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-4786v" Oct 10 16:35:33 crc kubenswrapper[4799]: I1010 16:35:33.420463 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-4786v" Oct 10 16:35:33 crc kubenswrapper[4799]: I1010 16:35:33.532912 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-4786v" Oct 10 16:35:33 crc kubenswrapper[4799]: I1010 16:35:33.622595 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-5d7rl"] Oct 10 16:35:33 crc kubenswrapper[4799]: I1010 16:35:33.809870 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-vtc78" Oct 10 16:35:33 crc kubenswrapper[4799]: I1010 16:35:33.861424 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-vtc78" Oct 10 16:35:34 crc kubenswrapper[4799]: I1010 16:35:34.222920 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-rmstw"] Oct 10 16:35:34 crc kubenswrapper[4799]: I1010 16:35:34.496365 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-rmstw" podUID="f8195a22-f2dc-4a8e-bb85-abbef8d04ac3" containerName="registry-server" containerID="cri-o://86067d8fda7a062c1fc9d499ec7e5bf9eb9dc7dd2484e1654a864c1824d4a10b" gracePeriod=2 Oct 10 16:35:34 crc kubenswrapper[4799]: I1010 16:35:34.496559 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-5d7rl" podUID="89264b95-de02-4ba2-a26e-121910a2b2ff" containerName="registry-server" containerID="cri-o://f25e1c43980c408ea8307216c08e61ee52ed6cde061b2efcb8bc808d797b4318" gracePeriod=2 Oct 10 16:35:35 crc kubenswrapper[4799]: I1010 16:35:35.534050 4799 generic.go:334] "Generic (PLEG): container finished" podID="89264b95-de02-4ba2-a26e-121910a2b2ff" containerID="f25e1c43980c408ea8307216c08e61ee52ed6cde061b2efcb8bc808d797b4318" exitCode=0 Oct 10 16:35:35 crc kubenswrapper[4799]: I1010 16:35:35.534136 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5d7rl" event={"ID":"89264b95-de02-4ba2-a26e-121910a2b2ff","Type":"ContainerDied","Data":"f25e1c43980c408ea8307216c08e61ee52ed6cde061b2efcb8bc808d797b4318"} Oct 10 16:35:35 crc kubenswrapper[4799]: I1010 16:35:35.536449 4799 generic.go:334] "Generic (PLEG): container finished" podID="f8195a22-f2dc-4a8e-bb85-abbef8d04ac3" containerID="86067d8fda7a062c1fc9d499ec7e5bf9eb9dc7dd2484e1654a864c1824d4a10b" exitCode=0 Oct 10 16:35:35 crc kubenswrapper[4799]: I1010 16:35:35.536490 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-rmstw" event={"ID":"f8195a22-f2dc-4a8e-bb85-abbef8d04ac3","Type":"ContainerDied","Data":"86067d8fda7a062c1fc9d499ec7e5bf9eb9dc7dd2484e1654a864c1824d4a10b"} Oct 10 16:35:35 crc kubenswrapper[4799]: I1010 16:35:35.724278 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-5d7rl" Oct 10 16:35:35 crc kubenswrapper[4799]: I1010 16:35:35.734165 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-rmstw" Oct 10 16:35:35 crc kubenswrapper[4799]: I1010 16:35:35.744962 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/89264b95-de02-4ba2-a26e-121910a2b2ff-catalog-content\") pod \"89264b95-de02-4ba2-a26e-121910a2b2ff\" (UID: \"89264b95-de02-4ba2-a26e-121910a2b2ff\") " Oct 10 16:35:35 crc kubenswrapper[4799]: I1010 16:35:35.745025 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/89264b95-de02-4ba2-a26e-121910a2b2ff-utilities\") pod \"89264b95-de02-4ba2-a26e-121910a2b2ff\" (UID: \"89264b95-de02-4ba2-a26e-121910a2b2ff\") " Oct 10 16:35:35 crc kubenswrapper[4799]: I1010 16:35:35.745068 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-59zkz\" (UniqueName: \"kubernetes.io/projected/89264b95-de02-4ba2-a26e-121910a2b2ff-kube-api-access-59zkz\") pod \"89264b95-de02-4ba2-a26e-121910a2b2ff\" (UID: \"89264b95-de02-4ba2-a26e-121910a2b2ff\") " Oct 10 16:35:35 crc kubenswrapper[4799]: I1010 16:35:35.746657 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/89264b95-de02-4ba2-a26e-121910a2b2ff-utilities" (OuterVolumeSpecName: "utilities") pod "89264b95-de02-4ba2-a26e-121910a2b2ff" (UID: "89264b95-de02-4ba2-a26e-121910a2b2ff"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 10 16:35:35 crc kubenswrapper[4799]: I1010 16:35:35.751524 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/89264b95-de02-4ba2-a26e-121910a2b2ff-kube-api-access-59zkz" (OuterVolumeSpecName: "kube-api-access-59zkz") pod "89264b95-de02-4ba2-a26e-121910a2b2ff" (UID: "89264b95-de02-4ba2-a26e-121910a2b2ff"). InnerVolumeSpecName "kube-api-access-59zkz". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 16:35:35 crc kubenswrapper[4799]: I1010 16:35:35.792383 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/89264b95-de02-4ba2-a26e-121910a2b2ff-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "89264b95-de02-4ba2-a26e-121910a2b2ff" (UID: "89264b95-de02-4ba2-a26e-121910a2b2ff"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 10 16:35:35 crc kubenswrapper[4799]: I1010 16:35:35.846512 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cqfsd\" (UniqueName: \"kubernetes.io/projected/f8195a22-f2dc-4a8e-bb85-abbef8d04ac3-kube-api-access-cqfsd\") pod \"f8195a22-f2dc-4a8e-bb85-abbef8d04ac3\" (UID: \"f8195a22-f2dc-4a8e-bb85-abbef8d04ac3\") " Oct 10 16:35:35 crc kubenswrapper[4799]: I1010 16:35:35.846583 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f8195a22-f2dc-4a8e-bb85-abbef8d04ac3-catalog-content\") pod \"f8195a22-f2dc-4a8e-bb85-abbef8d04ac3\" (UID: \"f8195a22-f2dc-4a8e-bb85-abbef8d04ac3\") " Oct 10 16:35:35 crc kubenswrapper[4799]: I1010 16:35:35.846611 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f8195a22-f2dc-4a8e-bb85-abbef8d04ac3-utilities\") pod \"f8195a22-f2dc-4a8e-bb85-abbef8d04ac3\" (UID: \"f8195a22-f2dc-4a8e-bb85-abbef8d04ac3\") " Oct 10 16:35:35 crc kubenswrapper[4799]: I1010 16:35:35.846907 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-59zkz\" (UniqueName: \"kubernetes.io/projected/89264b95-de02-4ba2-a26e-121910a2b2ff-kube-api-access-59zkz\") on node \"crc\" DevicePath \"\"" Oct 10 16:35:35 crc kubenswrapper[4799]: I1010 16:35:35.846922 4799 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/89264b95-de02-4ba2-a26e-121910a2b2ff-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 10 16:35:35 crc kubenswrapper[4799]: I1010 16:35:35.846933 4799 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/89264b95-de02-4ba2-a26e-121910a2b2ff-utilities\") on node \"crc\" DevicePath \"\"" Oct 10 16:35:35 crc kubenswrapper[4799]: I1010 16:35:35.847392 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f8195a22-f2dc-4a8e-bb85-abbef8d04ac3-utilities" (OuterVolumeSpecName: "utilities") pod "f8195a22-f2dc-4a8e-bb85-abbef8d04ac3" (UID: "f8195a22-f2dc-4a8e-bb85-abbef8d04ac3"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 10 16:35:35 crc kubenswrapper[4799]: I1010 16:35:35.849684 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f8195a22-f2dc-4a8e-bb85-abbef8d04ac3-kube-api-access-cqfsd" (OuterVolumeSpecName: "kube-api-access-cqfsd") pod "f8195a22-f2dc-4a8e-bb85-abbef8d04ac3" (UID: "f8195a22-f2dc-4a8e-bb85-abbef8d04ac3"). InnerVolumeSpecName "kube-api-access-cqfsd". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 16:35:35 crc kubenswrapper[4799]: I1010 16:35:35.890914 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f8195a22-f2dc-4a8e-bb85-abbef8d04ac3-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "f8195a22-f2dc-4a8e-bb85-abbef8d04ac3" (UID: "f8195a22-f2dc-4a8e-bb85-abbef8d04ac3"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 10 16:35:35 crc kubenswrapper[4799]: I1010 16:35:35.948133 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cqfsd\" (UniqueName: \"kubernetes.io/projected/f8195a22-f2dc-4a8e-bb85-abbef8d04ac3-kube-api-access-cqfsd\") on node \"crc\" DevicePath \"\"" Oct 10 16:35:35 crc kubenswrapper[4799]: I1010 16:35:35.948168 4799 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f8195a22-f2dc-4a8e-bb85-abbef8d04ac3-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 10 16:35:35 crc kubenswrapper[4799]: I1010 16:35:35.948178 4799 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f8195a22-f2dc-4a8e-bb85-abbef8d04ac3-utilities\") on node \"crc\" DevicePath \"\"" Oct 10 16:35:36 crc kubenswrapper[4799]: I1010 16:35:36.023662 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-vtc78"] Oct 10 16:35:36 crc kubenswrapper[4799]: I1010 16:35:36.023919 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-vtc78" podUID="122cb808-3c4b-4829-8ac3-3419cf9b6bdd" containerName="registry-server" containerID="cri-o://58c1a27b830e7c6d9517fd4281408edeae0b008b56390d3368b130f82be02483" gracePeriod=2 Oct 10 16:35:36 crc kubenswrapper[4799]: I1010 16:35:36.369800 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-vtc78" Oct 10 16:35:36 crc kubenswrapper[4799]: I1010 16:35:36.455389 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/122cb808-3c4b-4829-8ac3-3419cf9b6bdd-utilities\") pod \"122cb808-3c4b-4829-8ac3-3419cf9b6bdd\" (UID: \"122cb808-3c4b-4829-8ac3-3419cf9b6bdd\") " Oct 10 16:35:36 crc kubenswrapper[4799]: I1010 16:35:36.455564 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/122cb808-3c4b-4829-8ac3-3419cf9b6bdd-catalog-content\") pod \"122cb808-3c4b-4829-8ac3-3419cf9b6bdd\" (UID: \"122cb808-3c4b-4829-8ac3-3419cf9b6bdd\") " Oct 10 16:35:36 crc kubenswrapper[4799]: I1010 16:35:36.455602 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-65skb\" (UniqueName: \"kubernetes.io/projected/122cb808-3c4b-4829-8ac3-3419cf9b6bdd-kube-api-access-65skb\") pod \"122cb808-3c4b-4829-8ac3-3419cf9b6bdd\" (UID: \"122cb808-3c4b-4829-8ac3-3419cf9b6bdd\") " Oct 10 16:35:36 crc kubenswrapper[4799]: I1010 16:35:36.456259 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/122cb808-3c4b-4829-8ac3-3419cf9b6bdd-utilities" (OuterVolumeSpecName: "utilities") pod "122cb808-3c4b-4829-8ac3-3419cf9b6bdd" (UID: "122cb808-3c4b-4829-8ac3-3419cf9b6bdd"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 10 16:35:36 crc kubenswrapper[4799]: I1010 16:35:36.462244 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/122cb808-3c4b-4829-8ac3-3419cf9b6bdd-kube-api-access-65skb" (OuterVolumeSpecName: "kube-api-access-65skb") pod "122cb808-3c4b-4829-8ac3-3419cf9b6bdd" (UID: "122cb808-3c4b-4829-8ac3-3419cf9b6bdd"). InnerVolumeSpecName "kube-api-access-65skb". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 16:35:36 crc kubenswrapper[4799]: I1010 16:35:36.469633 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/122cb808-3c4b-4829-8ac3-3419cf9b6bdd-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "122cb808-3c4b-4829-8ac3-3419cf9b6bdd" (UID: "122cb808-3c4b-4829-8ac3-3419cf9b6bdd"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 10 16:35:36 crc kubenswrapper[4799]: I1010 16:35:36.547797 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-rmstw" event={"ID":"f8195a22-f2dc-4a8e-bb85-abbef8d04ac3","Type":"ContainerDied","Data":"87a2d4c6baff5529364b8c7e1628139d47ad9c5b30cfd927b0f232c8610d2856"} Oct 10 16:35:36 crc kubenswrapper[4799]: I1010 16:35:36.547862 4799 scope.go:117] "RemoveContainer" containerID="86067d8fda7a062c1fc9d499ec7e5bf9eb9dc7dd2484e1654a864c1824d4a10b" Oct 10 16:35:36 crc kubenswrapper[4799]: I1010 16:35:36.548524 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-rmstw" Oct 10 16:35:36 crc kubenswrapper[4799]: I1010 16:35:36.550530 4799 generic.go:334] "Generic (PLEG): container finished" podID="122cb808-3c4b-4829-8ac3-3419cf9b6bdd" containerID="58c1a27b830e7c6d9517fd4281408edeae0b008b56390d3368b130f82be02483" exitCode=0 Oct 10 16:35:36 crc kubenswrapper[4799]: I1010 16:35:36.550569 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-vtc78" Oct 10 16:35:36 crc kubenswrapper[4799]: I1010 16:35:36.550586 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vtc78" event={"ID":"122cb808-3c4b-4829-8ac3-3419cf9b6bdd","Type":"ContainerDied","Data":"58c1a27b830e7c6d9517fd4281408edeae0b008b56390d3368b130f82be02483"} Oct 10 16:35:36 crc kubenswrapper[4799]: I1010 16:35:36.550672 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vtc78" event={"ID":"122cb808-3c4b-4829-8ac3-3419cf9b6bdd","Type":"ContainerDied","Data":"e72538b5e9d6ac1ad814685a80c226dcf9a43e45b7cbba9a99ee939c3c894f55"} Oct 10 16:35:36 crc kubenswrapper[4799]: I1010 16:35:36.555223 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5d7rl" event={"ID":"89264b95-de02-4ba2-a26e-121910a2b2ff","Type":"ContainerDied","Data":"1938c582311580dec01c0dd14ffe07bf0bcaae54e270ca570cdee0a1b28700bf"} Oct 10 16:35:36 crc kubenswrapper[4799]: I1010 16:35:36.555269 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-5d7rl" Oct 10 16:35:36 crc kubenswrapper[4799]: I1010 16:35:36.556559 4799 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/122cb808-3c4b-4829-8ac3-3419cf9b6bdd-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 10 16:35:36 crc kubenswrapper[4799]: I1010 16:35:36.556587 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-65skb\" (UniqueName: \"kubernetes.io/projected/122cb808-3c4b-4829-8ac3-3419cf9b6bdd-kube-api-access-65skb\") on node \"crc\" DevicePath \"\"" Oct 10 16:35:36 crc kubenswrapper[4799]: I1010 16:35:36.556603 4799 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/122cb808-3c4b-4829-8ac3-3419cf9b6bdd-utilities\") on node \"crc\" DevicePath \"\"" Oct 10 16:35:36 crc kubenswrapper[4799]: I1010 16:35:36.579642 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-rmstw"] Oct 10 16:35:36 crc kubenswrapper[4799]: I1010 16:35:36.582658 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-rmstw"] Oct 10 16:35:36 crc kubenswrapper[4799]: I1010 16:35:36.590927 4799 scope.go:117] "RemoveContainer" containerID="86eb2910ca1ca2ace7680ae85f82a52d9677eb85f89c40dd9cda38029a784678" Oct 10 16:35:36 crc kubenswrapper[4799]: I1010 16:35:36.594912 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-5d7rl"] Oct 10 16:35:36 crc kubenswrapper[4799]: I1010 16:35:36.597644 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-5d7rl"] Oct 10 16:35:36 crc kubenswrapper[4799]: I1010 16:35:36.619444 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-vtc78"] Oct 10 16:35:36 crc kubenswrapper[4799]: I1010 16:35:36.621892 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-vtc78"] Oct 10 16:35:36 crc kubenswrapper[4799]: I1010 16:35:36.632328 4799 scope.go:117] "RemoveContainer" containerID="178ae838de186c3ffda884436135132b26ff4e0ae12551b699f11212fbee6451" Oct 10 16:35:36 crc kubenswrapper[4799]: I1010 16:35:36.657684 4799 scope.go:117] "RemoveContainer" containerID="58c1a27b830e7c6d9517fd4281408edeae0b008b56390d3368b130f82be02483" Oct 10 16:35:36 crc kubenswrapper[4799]: I1010 16:35:36.678980 4799 scope.go:117] "RemoveContainer" containerID="0a53a74059460265e2fcc6d939d0bc193e4d328d22e63e7ab2d38be257786465" Oct 10 16:35:36 crc kubenswrapper[4799]: I1010 16:35:36.701508 4799 scope.go:117] "RemoveContainer" containerID="690379bbddac979469b853c3b34a068412a0dabd6d2fad7a1e5a4007814eea4a" Oct 10 16:35:36 crc kubenswrapper[4799]: I1010 16:35:36.713720 4799 scope.go:117] "RemoveContainer" containerID="58c1a27b830e7c6d9517fd4281408edeae0b008b56390d3368b130f82be02483" Oct 10 16:35:36 crc kubenswrapper[4799]: E1010 16:35:36.714170 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"58c1a27b830e7c6d9517fd4281408edeae0b008b56390d3368b130f82be02483\": container with ID starting with 58c1a27b830e7c6d9517fd4281408edeae0b008b56390d3368b130f82be02483 not found: ID does not exist" containerID="58c1a27b830e7c6d9517fd4281408edeae0b008b56390d3368b130f82be02483" Oct 10 16:35:36 crc kubenswrapper[4799]: I1010 16:35:36.714209 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"58c1a27b830e7c6d9517fd4281408edeae0b008b56390d3368b130f82be02483"} err="failed to get container status \"58c1a27b830e7c6d9517fd4281408edeae0b008b56390d3368b130f82be02483\": rpc error: code = NotFound desc = could not find container \"58c1a27b830e7c6d9517fd4281408edeae0b008b56390d3368b130f82be02483\": container with ID starting with 58c1a27b830e7c6d9517fd4281408edeae0b008b56390d3368b130f82be02483 not found: ID does not exist" Oct 10 16:35:36 crc kubenswrapper[4799]: I1010 16:35:36.714236 4799 scope.go:117] "RemoveContainer" containerID="0a53a74059460265e2fcc6d939d0bc193e4d328d22e63e7ab2d38be257786465" Oct 10 16:35:36 crc kubenswrapper[4799]: E1010 16:35:36.714607 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0a53a74059460265e2fcc6d939d0bc193e4d328d22e63e7ab2d38be257786465\": container with ID starting with 0a53a74059460265e2fcc6d939d0bc193e4d328d22e63e7ab2d38be257786465 not found: ID does not exist" containerID="0a53a74059460265e2fcc6d939d0bc193e4d328d22e63e7ab2d38be257786465" Oct 10 16:35:36 crc kubenswrapper[4799]: I1010 16:35:36.714645 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0a53a74059460265e2fcc6d939d0bc193e4d328d22e63e7ab2d38be257786465"} err="failed to get container status \"0a53a74059460265e2fcc6d939d0bc193e4d328d22e63e7ab2d38be257786465\": rpc error: code = NotFound desc = could not find container \"0a53a74059460265e2fcc6d939d0bc193e4d328d22e63e7ab2d38be257786465\": container with ID starting with 0a53a74059460265e2fcc6d939d0bc193e4d328d22e63e7ab2d38be257786465 not found: ID does not exist" Oct 10 16:35:36 crc kubenswrapper[4799]: I1010 16:35:36.714671 4799 scope.go:117] "RemoveContainer" containerID="690379bbddac979469b853c3b34a068412a0dabd6d2fad7a1e5a4007814eea4a" Oct 10 16:35:36 crc kubenswrapper[4799]: E1010 16:35:36.715104 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"690379bbddac979469b853c3b34a068412a0dabd6d2fad7a1e5a4007814eea4a\": container with ID starting with 690379bbddac979469b853c3b34a068412a0dabd6d2fad7a1e5a4007814eea4a not found: ID does not exist" containerID="690379bbddac979469b853c3b34a068412a0dabd6d2fad7a1e5a4007814eea4a" Oct 10 16:35:36 crc kubenswrapper[4799]: I1010 16:35:36.715131 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"690379bbddac979469b853c3b34a068412a0dabd6d2fad7a1e5a4007814eea4a"} err="failed to get container status \"690379bbddac979469b853c3b34a068412a0dabd6d2fad7a1e5a4007814eea4a\": rpc error: code = NotFound desc = could not find container \"690379bbddac979469b853c3b34a068412a0dabd6d2fad7a1e5a4007814eea4a\": container with ID starting with 690379bbddac979469b853c3b34a068412a0dabd6d2fad7a1e5a4007814eea4a not found: ID does not exist" Oct 10 16:35:36 crc kubenswrapper[4799]: I1010 16:35:36.715144 4799 scope.go:117] "RemoveContainer" containerID="f25e1c43980c408ea8307216c08e61ee52ed6cde061b2efcb8bc808d797b4318" Oct 10 16:35:36 crc kubenswrapper[4799]: I1010 16:35:36.725961 4799 scope.go:117] "RemoveContainer" containerID="c0b267b0940c09560e240e3a348b666096d65f8da74a309718588064a2278f2e" Oct 10 16:35:36 crc kubenswrapper[4799]: I1010 16:35:36.739047 4799 scope.go:117] "RemoveContainer" containerID="bbe6bcde77490fe58438503b1815e2613245ed5464acda29f815adfede36893c" Oct 10 16:35:37 crc kubenswrapper[4799]: I1010 16:35:37.414271 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="122cb808-3c4b-4829-8ac3-3419cf9b6bdd" path="/var/lib/kubelet/pods/122cb808-3c4b-4829-8ac3-3419cf9b6bdd/volumes" Oct 10 16:35:37 crc kubenswrapper[4799]: I1010 16:35:37.415565 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="89264b95-de02-4ba2-a26e-121910a2b2ff" path="/var/lib/kubelet/pods/89264b95-de02-4ba2-a26e-121910a2b2ff/volumes" Oct 10 16:35:37 crc kubenswrapper[4799]: I1010 16:35:37.416692 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f8195a22-f2dc-4a8e-bb85-abbef8d04ac3" path="/var/lib/kubelet/pods/f8195a22-f2dc-4a8e-bb85-abbef8d04ac3/volumes" Oct 10 16:35:55 crc kubenswrapper[4799]: I1010 16:35:55.703555 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-authentication/oauth-openshift-558db77b4-56xl2" podUID="0691553d-e534-4c08-b56e-d99bd02e53fa" containerName="oauth-openshift" containerID="cri-o://ea13a2790b014ed5cc17450ab6446824bfbb6bf57348df82f1a9544ba7615c24" gracePeriod=15 Oct 10 16:35:56 crc kubenswrapper[4799]: I1010 16:35:56.175925 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-56xl2" Oct 10 16:35:56 crc kubenswrapper[4799]: I1010 16:35:56.217703 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-69bcbbd7f8-b8w5n"] Oct 10 16:35:56 crc kubenswrapper[4799]: E1010 16:35:56.217985 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="122cb808-3c4b-4829-8ac3-3419cf9b6bdd" containerName="extract-utilities" Oct 10 16:35:56 crc kubenswrapper[4799]: I1010 16:35:56.218746 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="122cb808-3c4b-4829-8ac3-3419cf9b6bdd" containerName="extract-utilities" Oct 10 16:35:56 crc kubenswrapper[4799]: E1010 16:35:56.218800 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="122cb808-3c4b-4829-8ac3-3419cf9b6bdd" containerName="registry-server" Oct 10 16:35:56 crc kubenswrapper[4799]: I1010 16:35:56.218813 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="122cb808-3c4b-4829-8ac3-3419cf9b6bdd" containerName="registry-server" Oct 10 16:35:56 crc kubenswrapper[4799]: E1010 16:35:56.218827 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dc89d1a8-4954-42af-b9ab-6fc1b88f9e02" containerName="pruner" Oct 10 16:35:56 crc kubenswrapper[4799]: I1010 16:35:56.218839 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="dc89d1a8-4954-42af-b9ab-6fc1b88f9e02" containerName="pruner" Oct 10 16:35:56 crc kubenswrapper[4799]: E1010 16:35:56.218854 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f8195a22-f2dc-4a8e-bb85-abbef8d04ac3" containerName="extract-utilities" Oct 10 16:35:56 crc kubenswrapper[4799]: I1010 16:35:56.218863 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="f8195a22-f2dc-4a8e-bb85-abbef8d04ac3" containerName="extract-utilities" Oct 10 16:35:56 crc kubenswrapper[4799]: E1010 16:35:56.218878 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f8195a22-f2dc-4a8e-bb85-abbef8d04ac3" containerName="registry-server" Oct 10 16:35:56 crc kubenswrapper[4799]: I1010 16:35:56.218887 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="f8195a22-f2dc-4a8e-bb85-abbef8d04ac3" containerName="registry-server" Oct 10 16:35:56 crc kubenswrapper[4799]: E1010 16:35:56.218900 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2ad01aa8-e719-4b4d-af75-0589792d8b3c" containerName="registry-server" Oct 10 16:35:56 crc kubenswrapper[4799]: I1010 16:35:56.218911 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="2ad01aa8-e719-4b4d-af75-0589792d8b3c" containerName="registry-server" Oct 10 16:35:56 crc kubenswrapper[4799]: E1010 16:35:56.218927 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0691553d-e534-4c08-b56e-d99bd02e53fa" containerName="oauth-openshift" Oct 10 16:35:56 crc kubenswrapper[4799]: I1010 16:35:56.218937 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="0691553d-e534-4c08-b56e-d99bd02e53fa" containerName="oauth-openshift" Oct 10 16:35:56 crc kubenswrapper[4799]: E1010 16:35:56.218950 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d3e5ea0c-87cd-48f2-af88-2256afe86fe9" containerName="pruner" Oct 10 16:35:56 crc kubenswrapper[4799]: I1010 16:35:56.218960 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="d3e5ea0c-87cd-48f2-af88-2256afe86fe9" containerName="pruner" Oct 10 16:35:56 crc kubenswrapper[4799]: E1010 16:35:56.218977 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="89264b95-de02-4ba2-a26e-121910a2b2ff" containerName="extract-content" Oct 10 16:35:56 crc kubenswrapper[4799]: I1010 16:35:56.218986 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="89264b95-de02-4ba2-a26e-121910a2b2ff" containerName="extract-content" Oct 10 16:35:56 crc kubenswrapper[4799]: E1010 16:35:56.219000 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="89264b95-de02-4ba2-a26e-121910a2b2ff" containerName="extract-utilities" Oct 10 16:35:56 crc kubenswrapper[4799]: I1010 16:35:56.219011 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="89264b95-de02-4ba2-a26e-121910a2b2ff" containerName="extract-utilities" Oct 10 16:35:56 crc kubenswrapper[4799]: E1010 16:35:56.219027 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="122cb808-3c4b-4829-8ac3-3419cf9b6bdd" containerName="extract-content" Oct 10 16:35:56 crc kubenswrapper[4799]: I1010 16:35:56.219038 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="122cb808-3c4b-4829-8ac3-3419cf9b6bdd" containerName="extract-content" Oct 10 16:35:56 crc kubenswrapper[4799]: E1010 16:35:56.219057 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2ad01aa8-e719-4b4d-af75-0589792d8b3c" containerName="extract-utilities" Oct 10 16:35:56 crc kubenswrapper[4799]: I1010 16:35:56.219067 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="2ad01aa8-e719-4b4d-af75-0589792d8b3c" containerName="extract-utilities" Oct 10 16:35:56 crc kubenswrapper[4799]: E1010 16:35:56.219079 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="89264b95-de02-4ba2-a26e-121910a2b2ff" containerName="registry-server" Oct 10 16:35:56 crc kubenswrapper[4799]: I1010 16:35:56.219089 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="89264b95-de02-4ba2-a26e-121910a2b2ff" containerName="registry-server" Oct 10 16:35:56 crc kubenswrapper[4799]: E1010 16:35:56.219107 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f8195a22-f2dc-4a8e-bb85-abbef8d04ac3" containerName="extract-content" Oct 10 16:35:56 crc kubenswrapper[4799]: I1010 16:35:56.219118 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="f8195a22-f2dc-4a8e-bb85-abbef8d04ac3" containerName="extract-content" Oct 10 16:35:56 crc kubenswrapper[4799]: E1010 16:35:56.219138 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2ad01aa8-e719-4b4d-af75-0589792d8b3c" containerName="extract-content" Oct 10 16:35:56 crc kubenswrapper[4799]: I1010 16:35:56.219149 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="2ad01aa8-e719-4b4d-af75-0589792d8b3c" containerName="extract-content" Oct 10 16:35:56 crc kubenswrapper[4799]: I1010 16:35:56.219318 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="d3e5ea0c-87cd-48f2-af88-2256afe86fe9" containerName="pruner" Oct 10 16:35:56 crc kubenswrapper[4799]: I1010 16:35:56.219337 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="f8195a22-f2dc-4a8e-bb85-abbef8d04ac3" containerName="registry-server" Oct 10 16:35:56 crc kubenswrapper[4799]: I1010 16:35:56.219350 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="dc89d1a8-4954-42af-b9ab-6fc1b88f9e02" containerName="pruner" Oct 10 16:35:56 crc kubenswrapper[4799]: I1010 16:35:56.219363 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="0691553d-e534-4c08-b56e-d99bd02e53fa" containerName="oauth-openshift" Oct 10 16:35:56 crc kubenswrapper[4799]: I1010 16:35:56.219376 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="122cb808-3c4b-4829-8ac3-3419cf9b6bdd" containerName="registry-server" Oct 10 16:35:56 crc kubenswrapper[4799]: I1010 16:35:56.219388 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="2ad01aa8-e719-4b4d-af75-0589792d8b3c" containerName="registry-server" Oct 10 16:35:56 crc kubenswrapper[4799]: I1010 16:35:56.219404 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="89264b95-de02-4ba2-a26e-121910a2b2ff" containerName="registry-server" Oct 10 16:35:56 crc kubenswrapper[4799]: I1010 16:35:56.219946 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-69bcbbd7f8-b8w5n" Oct 10 16:35:56 crc kubenswrapper[4799]: I1010 16:35:56.239376 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-69bcbbd7f8-b8w5n"] Oct 10 16:35:56 crc kubenswrapper[4799]: I1010 16:35:56.265851 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/0691553d-e534-4c08-b56e-d99bd02e53fa-v4-0-config-user-template-login\") pod \"0691553d-e534-4c08-b56e-d99bd02e53fa\" (UID: \"0691553d-e534-4c08-b56e-d99bd02e53fa\") " Oct 10 16:35:56 crc kubenswrapper[4799]: I1010 16:35:56.266142 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/0691553d-e534-4c08-b56e-d99bd02e53fa-audit-policies\") pod \"0691553d-e534-4c08-b56e-d99bd02e53fa\" (UID: \"0691553d-e534-4c08-b56e-d99bd02e53fa\") " Oct 10 16:35:56 crc kubenswrapper[4799]: I1010 16:35:56.266295 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/0691553d-e534-4c08-b56e-d99bd02e53fa-v4-0-config-system-trusted-ca-bundle\") pod \"0691553d-e534-4c08-b56e-d99bd02e53fa\" (UID: \"0691553d-e534-4c08-b56e-d99bd02e53fa\") " Oct 10 16:35:56 crc kubenswrapper[4799]: I1010 16:35:56.266410 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gmcp9\" (UniqueName: \"kubernetes.io/projected/0691553d-e534-4c08-b56e-d99bd02e53fa-kube-api-access-gmcp9\") pod \"0691553d-e534-4c08-b56e-d99bd02e53fa\" (UID: \"0691553d-e534-4c08-b56e-d99bd02e53fa\") " Oct 10 16:35:56 crc kubenswrapper[4799]: I1010 16:35:56.266518 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/0691553d-e534-4c08-b56e-d99bd02e53fa-v4-0-config-system-session\") pod \"0691553d-e534-4c08-b56e-d99bd02e53fa\" (UID: \"0691553d-e534-4c08-b56e-d99bd02e53fa\") " Oct 10 16:35:56 crc kubenswrapper[4799]: I1010 16:35:56.266611 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/0691553d-e534-4c08-b56e-d99bd02e53fa-v4-0-config-system-service-ca\") pod \"0691553d-e534-4c08-b56e-d99bd02e53fa\" (UID: \"0691553d-e534-4c08-b56e-d99bd02e53fa\") " Oct 10 16:35:56 crc kubenswrapper[4799]: I1010 16:35:56.266731 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/0691553d-e534-4c08-b56e-d99bd02e53fa-audit-dir\") pod \"0691553d-e534-4c08-b56e-d99bd02e53fa\" (UID: \"0691553d-e534-4c08-b56e-d99bd02e53fa\") " Oct 10 16:35:56 crc kubenswrapper[4799]: I1010 16:35:56.266869 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/0691553d-e534-4c08-b56e-d99bd02e53fa-v4-0-config-system-ocp-branding-template\") pod \"0691553d-e534-4c08-b56e-d99bd02e53fa\" (UID: \"0691553d-e534-4c08-b56e-d99bd02e53fa\") " Oct 10 16:35:56 crc kubenswrapper[4799]: I1010 16:35:56.267020 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/0691553d-e534-4c08-b56e-d99bd02e53fa-v4-0-config-system-router-certs\") pod \"0691553d-e534-4c08-b56e-d99bd02e53fa\" (UID: \"0691553d-e534-4c08-b56e-d99bd02e53fa\") " Oct 10 16:35:56 crc kubenswrapper[4799]: I1010 16:35:56.267128 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/0691553d-e534-4c08-b56e-d99bd02e53fa-v4-0-config-system-cliconfig\") pod \"0691553d-e534-4c08-b56e-d99bd02e53fa\" (UID: \"0691553d-e534-4c08-b56e-d99bd02e53fa\") " Oct 10 16:35:56 crc kubenswrapper[4799]: I1010 16:35:56.267244 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/0691553d-e534-4c08-b56e-d99bd02e53fa-v4-0-config-user-template-error\") pod \"0691553d-e534-4c08-b56e-d99bd02e53fa\" (UID: \"0691553d-e534-4c08-b56e-d99bd02e53fa\") " Oct 10 16:35:56 crc kubenswrapper[4799]: I1010 16:35:56.267424 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/0691553d-e534-4c08-b56e-d99bd02e53fa-v4-0-config-user-template-provider-selection\") pod \"0691553d-e534-4c08-b56e-d99bd02e53fa\" (UID: \"0691553d-e534-4c08-b56e-d99bd02e53fa\") " Oct 10 16:35:56 crc kubenswrapper[4799]: I1010 16:35:56.267541 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/0691553d-e534-4c08-b56e-d99bd02e53fa-v4-0-config-system-serving-cert\") pod \"0691553d-e534-4c08-b56e-d99bd02e53fa\" (UID: \"0691553d-e534-4c08-b56e-d99bd02e53fa\") " Oct 10 16:35:56 crc kubenswrapper[4799]: I1010 16:35:56.267663 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/0691553d-e534-4c08-b56e-d99bd02e53fa-v4-0-config-user-idp-0-file-data\") pod \"0691553d-e534-4c08-b56e-d99bd02e53fa\" (UID: \"0691553d-e534-4c08-b56e-d99bd02e53fa\") " Oct 10 16:35:56 crc kubenswrapper[4799]: I1010 16:35:56.268922 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0691553d-e534-4c08-b56e-d99bd02e53fa-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "0691553d-e534-4c08-b56e-d99bd02e53fa" (UID: "0691553d-e534-4c08-b56e-d99bd02e53fa"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 16:35:56 crc kubenswrapper[4799]: I1010 16:35:56.269297 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0691553d-e534-4c08-b56e-d99bd02e53fa-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "0691553d-e534-4c08-b56e-d99bd02e53fa" (UID: "0691553d-e534-4c08-b56e-d99bd02e53fa"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 16:35:56 crc kubenswrapper[4799]: I1010 16:35:56.269828 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0691553d-e534-4c08-b56e-d99bd02e53fa-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "0691553d-e534-4c08-b56e-d99bd02e53fa" (UID: "0691553d-e534-4c08-b56e-d99bd02e53fa"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 16:35:56 crc kubenswrapper[4799]: I1010 16:35:56.270177 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0691553d-e534-4c08-b56e-d99bd02e53fa-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "0691553d-e534-4c08-b56e-d99bd02e53fa" (UID: "0691553d-e534-4c08-b56e-d99bd02e53fa"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 16:35:56 crc kubenswrapper[4799]: I1010 16:35:56.270233 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/0691553d-e534-4c08-b56e-d99bd02e53fa-audit-dir" (OuterVolumeSpecName: "audit-dir") pod "0691553d-e534-4c08-b56e-d99bd02e53fa" (UID: "0691553d-e534-4c08-b56e-d99bd02e53fa"). InnerVolumeSpecName "audit-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 10 16:35:56 crc kubenswrapper[4799]: I1010 16:35:56.273545 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0691553d-e534-4c08-b56e-d99bd02e53fa-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "0691553d-e534-4c08-b56e-d99bd02e53fa" (UID: "0691553d-e534-4c08-b56e-d99bd02e53fa"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:35:56 crc kubenswrapper[4799]: I1010 16:35:56.273966 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0691553d-e534-4c08-b56e-d99bd02e53fa-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "0691553d-e534-4c08-b56e-d99bd02e53fa" (UID: "0691553d-e534-4c08-b56e-d99bd02e53fa"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:35:56 crc kubenswrapper[4799]: I1010 16:35:56.274677 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0691553d-e534-4c08-b56e-d99bd02e53fa-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "0691553d-e534-4c08-b56e-d99bd02e53fa" (UID: "0691553d-e534-4c08-b56e-d99bd02e53fa"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:35:56 crc kubenswrapper[4799]: I1010 16:35:56.275258 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0691553d-e534-4c08-b56e-d99bd02e53fa-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "0691553d-e534-4c08-b56e-d99bd02e53fa" (UID: "0691553d-e534-4c08-b56e-d99bd02e53fa"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:35:56 crc kubenswrapper[4799]: I1010 16:35:56.275841 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0691553d-e534-4c08-b56e-d99bd02e53fa-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "0691553d-e534-4c08-b56e-d99bd02e53fa" (UID: "0691553d-e534-4c08-b56e-d99bd02e53fa"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:35:56 crc kubenswrapper[4799]: I1010 16:35:56.276134 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0691553d-e534-4c08-b56e-d99bd02e53fa-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "0691553d-e534-4c08-b56e-d99bd02e53fa" (UID: "0691553d-e534-4c08-b56e-d99bd02e53fa"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:35:56 crc kubenswrapper[4799]: I1010 16:35:56.276356 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0691553d-e534-4c08-b56e-d99bd02e53fa-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "0691553d-e534-4c08-b56e-d99bd02e53fa" (UID: "0691553d-e534-4c08-b56e-d99bd02e53fa"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:35:56 crc kubenswrapper[4799]: I1010 16:35:56.276637 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0691553d-e534-4c08-b56e-d99bd02e53fa-kube-api-access-gmcp9" (OuterVolumeSpecName: "kube-api-access-gmcp9") pod "0691553d-e534-4c08-b56e-d99bd02e53fa" (UID: "0691553d-e534-4c08-b56e-d99bd02e53fa"). InnerVolumeSpecName "kube-api-access-gmcp9". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 16:35:56 crc kubenswrapper[4799]: I1010 16:35:56.278353 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0691553d-e534-4c08-b56e-d99bd02e53fa-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "0691553d-e534-4c08-b56e-d99bd02e53fa" (UID: "0691553d-e534-4c08-b56e-d99bd02e53fa"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:35:56 crc kubenswrapper[4799]: I1010 16:35:56.369291 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/7c01718b-e160-4776-b441-967b72eceb65-v4-0-config-user-template-login\") pod \"oauth-openshift-69bcbbd7f8-b8w5n\" (UID: \"7c01718b-e160-4776-b441-967b72eceb65\") " pod="openshift-authentication/oauth-openshift-69bcbbd7f8-b8w5n" Oct 10 16:35:56 crc kubenswrapper[4799]: I1010 16:35:56.369372 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/7c01718b-e160-4776-b441-967b72eceb65-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-69bcbbd7f8-b8w5n\" (UID: \"7c01718b-e160-4776-b441-967b72eceb65\") " pod="openshift-authentication/oauth-openshift-69bcbbd7f8-b8w5n" Oct 10 16:35:56 crc kubenswrapper[4799]: I1010 16:35:56.369449 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/7c01718b-e160-4776-b441-967b72eceb65-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-69bcbbd7f8-b8w5n\" (UID: \"7c01718b-e160-4776-b441-967b72eceb65\") " pod="openshift-authentication/oauth-openshift-69bcbbd7f8-b8w5n" Oct 10 16:35:56 crc kubenswrapper[4799]: I1010 16:35:56.369534 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/7c01718b-e160-4776-b441-967b72eceb65-v4-0-config-system-session\") pod \"oauth-openshift-69bcbbd7f8-b8w5n\" (UID: \"7c01718b-e160-4776-b441-967b72eceb65\") " pod="openshift-authentication/oauth-openshift-69bcbbd7f8-b8w5n" Oct 10 16:35:56 crc kubenswrapper[4799]: I1010 16:35:56.369573 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/7c01718b-e160-4776-b441-967b72eceb65-v4-0-config-system-cliconfig\") pod \"oauth-openshift-69bcbbd7f8-b8w5n\" (UID: \"7c01718b-e160-4776-b441-967b72eceb65\") " pod="openshift-authentication/oauth-openshift-69bcbbd7f8-b8w5n" Oct 10 16:35:56 crc kubenswrapper[4799]: I1010 16:35:56.369618 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/7c01718b-e160-4776-b441-967b72eceb65-audit-policies\") pod \"oauth-openshift-69bcbbd7f8-b8w5n\" (UID: \"7c01718b-e160-4776-b441-967b72eceb65\") " pod="openshift-authentication/oauth-openshift-69bcbbd7f8-b8w5n" Oct 10 16:35:56 crc kubenswrapper[4799]: I1010 16:35:56.369670 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/7c01718b-e160-4776-b441-967b72eceb65-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-69bcbbd7f8-b8w5n\" (UID: \"7c01718b-e160-4776-b441-967b72eceb65\") " pod="openshift-authentication/oauth-openshift-69bcbbd7f8-b8w5n" Oct 10 16:35:56 crc kubenswrapper[4799]: I1010 16:35:56.369723 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/7c01718b-e160-4776-b441-967b72eceb65-v4-0-config-system-serving-cert\") pod \"oauth-openshift-69bcbbd7f8-b8w5n\" (UID: \"7c01718b-e160-4776-b441-967b72eceb65\") " pod="openshift-authentication/oauth-openshift-69bcbbd7f8-b8w5n" Oct 10 16:35:56 crc kubenswrapper[4799]: I1010 16:35:56.369785 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/7c01718b-e160-4776-b441-967b72eceb65-v4-0-config-system-service-ca\") pod \"oauth-openshift-69bcbbd7f8-b8w5n\" (UID: \"7c01718b-e160-4776-b441-967b72eceb65\") " pod="openshift-authentication/oauth-openshift-69bcbbd7f8-b8w5n" Oct 10 16:35:56 crc kubenswrapper[4799]: I1010 16:35:56.369840 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tlj72\" (UniqueName: \"kubernetes.io/projected/7c01718b-e160-4776-b441-967b72eceb65-kube-api-access-tlj72\") pod \"oauth-openshift-69bcbbd7f8-b8w5n\" (UID: \"7c01718b-e160-4776-b441-967b72eceb65\") " pod="openshift-authentication/oauth-openshift-69bcbbd7f8-b8w5n" Oct 10 16:35:56 crc kubenswrapper[4799]: I1010 16:35:56.369883 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/7c01718b-e160-4776-b441-967b72eceb65-audit-dir\") pod \"oauth-openshift-69bcbbd7f8-b8w5n\" (UID: \"7c01718b-e160-4776-b441-967b72eceb65\") " pod="openshift-authentication/oauth-openshift-69bcbbd7f8-b8w5n" Oct 10 16:35:56 crc kubenswrapper[4799]: I1010 16:35:56.369919 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/7c01718b-e160-4776-b441-967b72eceb65-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-69bcbbd7f8-b8w5n\" (UID: \"7c01718b-e160-4776-b441-967b72eceb65\") " pod="openshift-authentication/oauth-openshift-69bcbbd7f8-b8w5n" Oct 10 16:35:56 crc kubenswrapper[4799]: I1010 16:35:56.369969 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/7c01718b-e160-4776-b441-967b72eceb65-v4-0-config-user-template-error\") pod \"oauth-openshift-69bcbbd7f8-b8w5n\" (UID: \"7c01718b-e160-4776-b441-967b72eceb65\") " pod="openshift-authentication/oauth-openshift-69bcbbd7f8-b8w5n" Oct 10 16:35:56 crc kubenswrapper[4799]: I1010 16:35:56.370019 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/7c01718b-e160-4776-b441-967b72eceb65-v4-0-config-system-router-certs\") pod \"oauth-openshift-69bcbbd7f8-b8w5n\" (UID: \"7c01718b-e160-4776-b441-967b72eceb65\") " pod="openshift-authentication/oauth-openshift-69bcbbd7f8-b8w5n" Oct 10 16:35:56 crc kubenswrapper[4799]: I1010 16:35:56.370098 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gmcp9\" (UniqueName: \"kubernetes.io/projected/0691553d-e534-4c08-b56e-d99bd02e53fa-kube-api-access-gmcp9\") on node \"crc\" DevicePath \"\"" Oct 10 16:35:56 crc kubenswrapper[4799]: I1010 16:35:56.370123 4799 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/0691553d-e534-4c08-b56e-d99bd02e53fa-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Oct 10 16:35:56 crc kubenswrapper[4799]: I1010 16:35:56.370142 4799 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/0691553d-e534-4c08-b56e-d99bd02e53fa-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Oct 10 16:35:56 crc kubenswrapper[4799]: I1010 16:35:56.370162 4799 reconciler_common.go:293] "Volume detached for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/0691553d-e534-4c08-b56e-d99bd02e53fa-audit-dir\") on node \"crc\" DevicePath \"\"" Oct 10 16:35:56 crc kubenswrapper[4799]: I1010 16:35:56.370182 4799 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/0691553d-e534-4c08-b56e-d99bd02e53fa-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Oct 10 16:35:56 crc kubenswrapper[4799]: I1010 16:35:56.370201 4799 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/0691553d-e534-4c08-b56e-d99bd02e53fa-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Oct 10 16:35:56 crc kubenswrapper[4799]: I1010 16:35:56.370221 4799 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/0691553d-e534-4c08-b56e-d99bd02e53fa-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Oct 10 16:35:56 crc kubenswrapper[4799]: I1010 16:35:56.370240 4799 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/0691553d-e534-4c08-b56e-d99bd02e53fa-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Oct 10 16:35:56 crc kubenswrapper[4799]: I1010 16:35:56.370260 4799 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/0691553d-e534-4c08-b56e-d99bd02e53fa-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Oct 10 16:35:56 crc kubenswrapper[4799]: I1010 16:35:56.370279 4799 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/0691553d-e534-4c08-b56e-d99bd02e53fa-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Oct 10 16:35:56 crc kubenswrapper[4799]: I1010 16:35:56.370299 4799 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/0691553d-e534-4c08-b56e-d99bd02e53fa-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 10 16:35:56 crc kubenswrapper[4799]: I1010 16:35:56.370318 4799 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/0691553d-e534-4c08-b56e-d99bd02e53fa-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Oct 10 16:35:56 crc kubenswrapper[4799]: I1010 16:35:56.370336 4799 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/0691553d-e534-4c08-b56e-d99bd02e53fa-audit-policies\") on node \"crc\" DevicePath \"\"" Oct 10 16:35:56 crc kubenswrapper[4799]: I1010 16:35:56.370366 4799 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/0691553d-e534-4c08-b56e-d99bd02e53fa-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 10 16:35:56 crc kubenswrapper[4799]: I1010 16:35:56.472442 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/7c01718b-e160-4776-b441-967b72eceb65-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-69bcbbd7f8-b8w5n\" (UID: \"7c01718b-e160-4776-b441-967b72eceb65\") " pod="openshift-authentication/oauth-openshift-69bcbbd7f8-b8w5n" Oct 10 16:35:56 crc kubenswrapper[4799]: I1010 16:35:56.472523 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/7c01718b-e160-4776-b441-967b72eceb65-v4-0-config-system-serving-cert\") pod \"oauth-openshift-69bcbbd7f8-b8w5n\" (UID: \"7c01718b-e160-4776-b441-967b72eceb65\") " pod="openshift-authentication/oauth-openshift-69bcbbd7f8-b8w5n" Oct 10 16:35:56 crc kubenswrapper[4799]: I1010 16:35:56.472568 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/7c01718b-e160-4776-b441-967b72eceb65-v4-0-config-system-service-ca\") pod \"oauth-openshift-69bcbbd7f8-b8w5n\" (UID: \"7c01718b-e160-4776-b441-967b72eceb65\") " pod="openshift-authentication/oauth-openshift-69bcbbd7f8-b8w5n" Oct 10 16:35:56 crc kubenswrapper[4799]: I1010 16:35:56.472643 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tlj72\" (UniqueName: \"kubernetes.io/projected/7c01718b-e160-4776-b441-967b72eceb65-kube-api-access-tlj72\") pod \"oauth-openshift-69bcbbd7f8-b8w5n\" (UID: \"7c01718b-e160-4776-b441-967b72eceb65\") " pod="openshift-authentication/oauth-openshift-69bcbbd7f8-b8w5n" Oct 10 16:35:56 crc kubenswrapper[4799]: I1010 16:35:56.472688 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/7c01718b-e160-4776-b441-967b72eceb65-audit-dir\") pod \"oauth-openshift-69bcbbd7f8-b8w5n\" (UID: \"7c01718b-e160-4776-b441-967b72eceb65\") " pod="openshift-authentication/oauth-openshift-69bcbbd7f8-b8w5n" Oct 10 16:35:56 crc kubenswrapper[4799]: I1010 16:35:56.472943 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/7c01718b-e160-4776-b441-967b72eceb65-audit-dir\") pod \"oauth-openshift-69bcbbd7f8-b8w5n\" (UID: \"7c01718b-e160-4776-b441-967b72eceb65\") " pod="openshift-authentication/oauth-openshift-69bcbbd7f8-b8w5n" Oct 10 16:35:56 crc kubenswrapper[4799]: I1010 16:35:56.473294 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/7c01718b-e160-4776-b441-967b72eceb65-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-69bcbbd7f8-b8w5n\" (UID: \"7c01718b-e160-4776-b441-967b72eceb65\") " pod="openshift-authentication/oauth-openshift-69bcbbd7f8-b8w5n" Oct 10 16:35:56 crc kubenswrapper[4799]: I1010 16:35:56.473387 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/7c01718b-e160-4776-b441-967b72eceb65-v4-0-config-system-service-ca\") pod \"oauth-openshift-69bcbbd7f8-b8w5n\" (UID: \"7c01718b-e160-4776-b441-967b72eceb65\") " pod="openshift-authentication/oauth-openshift-69bcbbd7f8-b8w5n" Oct 10 16:35:56 crc kubenswrapper[4799]: I1010 16:35:56.473977 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/7c01718b-e160-4776-b441-967b72eceb65-v4-0-config-user-template-error\") pod \"oauth-openshift-69bcbbd7f8-b8w5n\" (UID: \"7c01718b-e160-4776-b441-967b72eceb65\") " pod="openshift-authentication/oauth-openshift-69bcbbd7f8-b8w5n" Oct 10 16:35:56 crc kubenswrapper[4799]: I1010 16:35:56.474049 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/7c01718b-e160-4776-b441-967b72eceb65-v4-0-config-system-router-certs\") pod \"oauth-openshift-69bcbbd7f8-b8w5n\" (UID: \"7c01718b-e160-4776-b441-967b72eceb65\") " pod="openshift-authentication/oauth-openshift-69bcbbd7f8-b8w5n" Oct 10 16:35:56 crc kubenswrapper[4799]: I1010 16:35:56.474092 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/7c01718b-e160-4776-b441-967b72eceb65-v4-0-config-user-template-login\") pod \"oauth-openshift-69bcbbd7f8-b8w5n\" (UID: \"7c01718b-e160-4776-b441-967b72eceb65\") " pod="openshift-authentication/oauth-openshift-69bcbbd7f8-b8w5n" Oct 10 16:35:56 crc kubenswrapper[4799]: I1010 16:35:56.474135 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/7c01718b-e160-4776-b441-967b72eceb65-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-69bcbbd7f8-b8w5n\" (UID: \"7c01718b-e160-4776-b441-967b72eceb65\") " pod="openshift-authentication/oauth-openshift-69bcbbd7f8-b8w5n" Oct 10 16:35:56 crc kubenswrapper[4799]: I1010 16:35:56.474178 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/7c01718b-e160-4776-b441-967b72eceb65-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-69bcbbd7f8-b8w5n\" (UID: \"7c01718b-e160-4776-b441-967b72eceb65\") " pod="openshift-authentication/oauth-openshift-69bcbbd7f8-b8w5n" Oct 10 16:35:56 crc kubenswrapper[4799]: I1010 16:35:56.474219 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/7c01718b-e160-4776-b441-967b72eceb65-v4-0-config-system-session\") pod \"oauth-openshift-69bcbbd7f8-b8w5n\" (UID: \"7c01718b-e160-4776-b441-967b72eceb65\") " pod="openshift-authentication/oauth-openshift-69bcbbd7f8-b8w5n" Oct 10 16:35:56 crc kubenswrapper[4799]: I1010 16:35:56.474249 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/7c01718b-e160-4776-b441-967b72eceb65-v4-0-config-system-cliconfig\") pod \"oauth-openshift-69bcbbd7f8-b8w5n\" (UID: \"7c01718b-e160-4776-b441-967b72eceb65\") " pod="openshift-authentication/oauth-openshift-69bcbbd7f8-b8w5n" Oct 10 16:35:56 crc kubenswrapper[4799]: I1010 16:35:56.474295 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/7c01718b-e160-4776-b441-967b72eceb65-audit-policies\") pod \"oauth-openshift-69bcbbd7f8-b8w5n\" (UID: \"7c01718b-e160-4776-b441-967b72eceb65\") " pod="openshift-authentication/oauth-openshift-69bcbbd7f8-b8w5n" Oct 10 16:35:56 crc kubenswrapper[4799]: I1010 16:35:56.475195 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/7c01718b-e160-4776-b441-967b72eceb65-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-69bcbbd7f8-b8w5n\" (UID: \"7c01718b-e160-4776-b441-967b72eceb65\") " pod="openshift-authentication/oauth-openshift-69bcbbd7f8-b8w5n" Oct 10 16:35:56 crc kubenswrapper[4799]: I1010 16:35:56.475577 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/7c01718b-e160-4776-b441-967b72eceb65-audit-policies\") pod \"oauth-openshift-69bcbbd7f8-b8w5n\" (UID: \"7c01718b-e160-4776-b441-967b72eceb65\") " pod="openshift-authentication/oauth-openshift-69bcbbd7f8-b8w5n" Oct 10 16:35:56 crc kubenswrapper[4799]: I1010 16:35:56.475822 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/7c01718b-e160-4776-b441-967b72eceb65-v4-0-config-system-cliconfig\") pod \"oauth-openshift-69bcbbd7f8-b8w5n\" (UID: \"7c01718b-e160-4776-b441-967b72eceb65\") " pod="openshift-authentication/oauth-openshift-69bcbbd7f8-b8w5n" Oct 10 16:35:56 crc kubenswrapper[4799]: I1010 16:35:56.480049 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/7c01718b-e160-4776-b441-967b72eceb65-v4-0-config-user-template-login\") pod \"oauth-openshift-69bcbbd7f8-b8w5n\" (UID: \"7c01718b-e160-4776-b441-967b72eceb65\") " pod="openshift-authentication/oauth-openshift-69bcbbd7f8-b8w5n" Oct 10 16:35:56 crc kubenswrapper[4799]: I1010 16:35:56.480156 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/7c01718b-e160-4776-b441-967b72eceb65-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-69bcbbd7f8-b8w5n\" (UID: \"7c01718b-e160-4776-b441-967b72eceb65\") " pod="openshift-authentication/oauth-openshift-69bcbbd7f8-b8w5n" Oct 10 16:35:56 crc kubenswrapper[4799]: I1010 16:35:56.480708 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/7c01718b-e160-4776-b441-967b72eceb65-v4-0-config-user-template-error\") pod \"oauth-openshift-69bcbbd7f8-b8w5n\" (UID: \"7c01718b-e160-4776-b441-967b72eceb65\") " pod="openshift-authentication/oauth-openshift-69bcbbd7f8-b8w5n" Oct 10 16:35:56 crc kubenswrapper[4799]: I1010 16:35:56.481557 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/7c01718b-e160-4776-b441-967b72eceb65-v4-0-config-system-router-certs\") pod \"oauth-openshift-69bcbbd7f8-b8w5n\" (UID: \"7c01718b-e160-4776-b441-967b72eceb65\") " pod="openshift-authentication/oauth-openshift-69bcbbd7f8-b8w5n" Oct 10 16:35:56 crc kubenswrapper[4799]: I1010 16:35:56.481624 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/7c01718b-e160-4776-b441-967b72eceb65-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-69bcbbd7f8-b8w5n\" (UID: \"7c01718b-e160-4776-b441-967b72eceb65\") " pod="openshift-authentication/oauth-openshift-69bcbbd7f8-b8w5n" Oct 10 16:35:56 crc kubenswrapper[4799]: I1010 16:35:56.482571 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/7c01718b-e160-4776-b441-967b72eceb65-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-69bcbbd7f8-b8w5n\" (UID: \"7c01718b-e160-4776-b441-967b72eceb65\") " pod="openshift-authentication/oauth-openshift-69bcbbd7f8-b8w5n" Oct 10 16:35:56 crc kubenswrapper[4799]: I1010 16:35:56.484010 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/7c01718b-e160-4776-b441-967b72eceb65-v4-0-config-system-session\") pod \"oauth-openshift-69bcbbd7f8-b8w5n\" (UID: \"7c01718b-e160-4776-b441-967b72eceb65\") " pod="openshift-authentication/oauth-openshift-69bcbbd7f8-b8w5n" Oct 10 16:35:56 crc kubenswrapper[4799]: I1010 16:35:56.485297 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/7c01718b-e160-4776-b441-967b72eceb65-v4-0-config-system-serving-cert\") pod \"oauth-openshift-69bcbbd7f8-b8w5n\" (UID: \"7c01718b-e160-4776-b441-967b72eceb65\") " pod="openshift-authentication/oauth-openshift-69bcbbd7f8-b8w5n" Oct 10 16:35:56 crc kubenswrapper[4799]: I1010 16:35:56.502247 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tlj72\" (UniqueName: \"kubernetes.io/projected/7c01718b-e160-4776-b441-967b72eceb65-kube-api-access-tlj72\") pod \"oauth-openshift-69bcbbd7f8-b8w5n\" (UID: \"7c01718b-e160-4776-b441-967b72eceb65\") " pod="openshift-authentication/oauth-openshift-69bcbbd7f8-b8w5n" Oct 10 16:35:56 crc kubenswrapper[4799]: I1010 16:35:56.546213 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-69bcbbd7f8-b8w5n" Oct 10 16:35:56 crc kubenswrapper[4799]: I1010 16:35:56.713722 4799 generic.go:334] "Generic (PLEG): container finished" podID="0691553d-e534-4c08-b56e-d99bd02e53fa" containerID="ea13a2790b014ed5cc17450ab6446824bfbb6bf57348df82f1a9544ba7615c24" exitCode=0 Oct 10 16:35:56 crc kubenswrapper[4799]: I1010 16:35:56.713923 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-56xl2" Oct 10 16:35:56 crc kubenswrapper[4799]: I1010 16:35:56.713993 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-56xl2" event={"ID":"0691553d-e534-4c08-b56e-d99bd02e53fa","Type":"ContainerDied","Data":"ea13a2790b014ed5cc17450ab6446824bfbb6bf57348df82f1a9544ba7615c24"} Oct 10 16:35:56 crc kubenswrapper[4799]: I1010 16:35:56.714554 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-56xl2" event={"ID":"0691553d-e534-4c08-b56e-d99bd02e53fa","Type":"ContainerDied","Data":"8839b81d79a5232353d7248137a71c031438c749d48354f56da45a214a244ec8"} Oct 10 16:35:56 crc kubenswrapper[4799]: I1010 16:35:56.714621 4799 scope.go:117] "RemoveContainer" containerID="ea13a2790b014ed5cc17450ab6446824bfbb6bf57348df82f1a9544ba7615c24" Oct 10 16:35:56 crc kubenswrapper[4799]: I1010 16:35:56.755497 4799 scope.go:117] "RemoveContainer" containerID="ea13a2790b014ed5cc17450ab6446824bfbb6bf57348df82f1a9544ba7615c24" Oct 10 16:35:56 crc kubenswrapper[4799]: E1010 16:35:56.756791 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ea13a2790b014ed5cc17450ab6446824bfbb6bf57348df82f1a9544ba7615c24\": container with ID starting with ea13a2790b014ed5cc17450ab6446824bfbb6bf57348df82f1a9544ba7615c24 not found: ID does not exist" containerID="ea13a2790b014ed5cc17450ab6446824bfbb6bf57348df82f1a9544ba7615c24" Oct 10 16:35:56 crc kubenswrapper[4799]: I1010 16:35:56.756835 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ea13a2790b014ed5cc17450ab6446824bfbb6bf57348df82f1a9544ba7615c24"} err="failed to get container status \"ea13a2790b014ed5cc17450ab6446824bfbb6bf57348df82f1a9544ba7615c24\": rpc error: code = NotFound desc = could not find container \"ea13a2790b014ed5cc17450ab6446824bfbb6bf57348df82f1a9544ba7615c24\": container with ID starting with ea13a2790b014ed5cc17450ab6446824bfbb6bf57348df82f1a9544ba7615c24 not found: ID does not exist" Oct 10 16:35:56 crc kubenswrapper[4799]: I1010 16:35:56.770334 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-56xl2"] Oct 10 16:35:56 crc kubenswrapper[4799]: I1010 16:35:56.773591 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-56xl2"] Oct 10 16:35:57 crc kubenswrapper[4799]: I1010 16:35:57.041335 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-69bcbbd7f8-b8w5n"] Oct 10 16:35:57 crc kubenswrapper[4799]: I1010 16:35:57.415696 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0691553d-e534-4c08-b56e-d99bd02e53fa" path="/var/lib/kubelet/pods/0691553d-e534-4c08-b56e-d99bd02e53fa/volumes" Oct 10 16:35:57 crc kubenswrapper[4799]: I1010 16:35:57.728352 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-69bcbbd7f8-b8w5n" event={"ID":"7c01718b-e160-4776-b441-967b72eceb65","Type":"ContainerStarted","Data":"c80c47e8d7ca8120ed93757535d7ea23f7a2f2d628d15547b40dc155b88ac8b2"} Oct 10 16:35:57 crc kubenswrapper[4799]: I1010 16:35:57.728415 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-69bcbbd7f8-b8w5n" event={"ID":"7c01718b-e160-4776-b441-967b72eceb65","Type":"ContainerStarted","Data":"38a5aaedb9703c1551e6ab4612f5cc4660671d4bca36c0b00d494a0318bee55c"} Oct 10 16:35:57 crc kubenswrapper[4799]: I1010 16:35:57.728854 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-69bcbbd7f8-b8w5n" Oct 10 16:35:57 crc kubenswrapper[4799]: I1010 16:35:57.764122 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-69bcbbd7f8-b8w5n" podStartSLOduration=27.764091501 podStartE2EDuration="27.764091501s" podCreationTimestamp="2025-10-10 16:35:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-10 16:35:57.759654426 +0000 UTC m=+251.267978601" watchObservedRunningTime="2025-10-10 16:35:57.764091501 +0000 UTC m=+251.272415656" Oct 10 16:35:57 crc kubenswrapper[4799]: I1010 16:35:57.911312 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-69bcbbd7f8-b8w5n" Oct 10 16:36:24 crc kubenswrapper[4799]: I1010 16:36:24.878453 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-c9chn"] Oct 10 16:36:24 crc kubenswrapper[4799]: I1010 16:36:24.880083 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-c9chn" podUID="0a2e52eb-b82d-4869-ab9b-1c783d1c58fb" containerName="registry-server" containerID="cri-o://f958cff23bca7d5793ea9871cce292ffc8011cbcaacbdcd5a925dfd263a354b5" gracePeriod=30 Oct 10 16:36:24 crc kubenswrapper[4799]: I1010 16:36:24.882473 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-z5z55"] Oct 10 16:36:24 crc kubenswrapper[4799]: I1010 16:36:24.882967 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-z5z55" podUID="ec4988cf-fb0f-4df8-8f2d-748a5459bbcc" containerName="registry-server" containerID="cri-o://1e35d6bc74d8a24f42976236a75719667c6ae61e15826c8a0c0a76a54baa39d4" gracePeriod=30 Oct 10 16:36:24 crc kubenswrapper[4799]: I1010 16:36:24.883722 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-mn6dn"] Oct 10 16:36:24 crc kubenswrapper[4799]: I1010 16:36:24.883977 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/marketplace-operator-79b997595-mn6dn" podUID="e7393869-51bf-4974-97c7-33adfe1b44f5" containerName="marketplace-operator" containerID="cri-o://862b8488e89b1f15ca8d6add01fab3069539611bc74c85e1f24779fedf97e88c" gracePeriod=30 Oct 10 16:36:24 crc kubenswrapper[4799]: I1010 16:36:24.890932 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-4786v"] Oct 10 16:36:24 crc kubenswrapper[4799]: I1010 16:36:24.891223 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-4786v" podUID="048287bf-12a1-4fef-8fc2-7fa4686d31cc" containerName="registry-server" containerID="cri-o://6f70847985ca6300398c906a8ee6710004ef8ee9422411ceb3a5dd3f85897be0" gracePeriod=30 Oct 10 16:36:24 crc kubenswrapper[4799]: I1010 16:36:24.909183 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-82hks"] Oct 10 16:36:24 crc kubenswrapper[4799]: I1010 16:36:24.910070 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-82hks" Oct 10 16:36:24 crc kubenswrapper[4799]: I1010 16:36:24.917628 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-mkr4n"] Oct 10 16:36:24 crc kubenswrapper[4799]: I1010 16:36:24.918244 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-mkr4n" podUID="ce937bef-231c-4353-8af2-f8e4517c68c4" containerName="registry-server" containerID="cri-o://e31a8a21bb71845f1ffed4bf9908e006a8cd80c51c60cd28fd954067926d6c8f" gracePeriod=30 Oct 10 16:36:24 crc kubenswrapper[4799]: I1010 16:36:24.927786 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-82hks"] Oct 10 16:36:25 crc kubenswrapper[4799]: I1010 16:36:25.075118 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/ec7d66a7-bea8-4dd5-8d91-84ec51cd57a7-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-82hks\" (UID: \"ec7d66a7-bea8-4dd5-8d91-84ec51cd57a7\") " pod="openshift-marketplace/marketplace-operator-79b997595-82hks" Oct 10 16:36:25 crc kubenswrapper[4799]: I1010 16:36:25.075180 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/ec7d66a7-bea8-4dd5-8d91-84ec51cd57a7-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-82hks\" (UID: \"ec7d66a7-bea8-4dd5-8d91-84ec51cd57a7\") " pod="openshift-marketplace/marketplace-operator-79b997595-82hks" Oct 10 16:36:25 crc kubenswrapper[4799]: I1010 16:36:25.075202 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hxhsv\" (UniqueName: \"kubernetes.io/projected/ec7d66a7-bea8-4dd5-8d91-84ec51cd57a7-kube-api-access-hxhsv\") pod \"marketplace-operator-79b997595-82hks\" (UID: \"ec7d66a7-bea8-4dd5-8d91-84ec51cd57a7\") " pod="openshift-marketplace/marketplace-operator-79b997595-82hks" Oct 10 16:36:25 crc kubenswrapper[4799]: I1010 16:36:25.176694 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/ec7d66a7-bea8-4dd5-8d91-84ec51cd57a7-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-82hks\" (UID: \"ec7d66a7-bea8-4dd5-8d91-84ec51cd57a7\") " pod="openshift-marketplace/marketplace-operator-79b997595-82hks" Oct 10 16:36:25 crc kubenswrapper[4799]: I1010 16:36:25.176737 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hxhsv\" (UniqueName: \"kubernetes.io/projected/ec7d66a7-bea8-4dd5-8d91-84ec51cd57a7-kube-api-access-hxhsv\") pod \"marketplace-operator-79b997595-82hks\" (UID: \"ec7d66a7-bea8-4dd5-8d91-84ec51cd57a7\") " pod="openshift-marketplace/marketplace-operator-79b997595-82hks" Oct 10 16:36:25 crc kubenswrapper[4799]: I1010 16:36:25.176811 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/ec7d66a7-bea8-4dd5-8d91-84ec51cd57a7-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-82hks\" (UID: \"ec7d66a7-bea8-4dd5-8d91-84ec51cd57a7\") " pod="openshift-marketplace/marketplace-operator-79b997595-82hks" Oct 10 16:36:25 crc kubenswrapper[4799]: I1010 16:36:25.178186 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/ec7d66a7-bea8-4dd5-8d91-84ec51cd57a7-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-82hks\" (UID: \"ec7d66a7-bea8-4dd5-8d91-84ec51cd57a7\") " pod="openshift-marketplace/marketplace-operator-79b997595-82hks" Oct 10 16:36:25 crc kubenswrapper[4799]: I1010 16:36:25.187211 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/ec7d66a7-bea8-4dd5-8d91-84ec51cd57a7-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-82hks\" (UID: \"ec7d66a7-bea8-4dd5-8d91-84ec51cd57a7\") " pod="openshift-marketplace/marketplace-operator-79b997595-82hks" Oct 10 16:36:25 crc kubenswrapper[4799]: I1010 16:36:25.192745 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hxhsv\" (UniqueName: \"kubernetes.io/projected/ec7d66a7-bea8-4dd5-8d91-84ec51cd57a7-kube-api-access-hxhsv\") pod \"marketplace-operator-79b997595-82hks\" (UID: \"ec7d66a7-bea8-4dd5-8d91-84ec51cd57a7\") " pod="openshift-marketplace/marketplace-operator-79b997595-82hks" Oct 10 16:36:25 crc kubenswrapper[4799]: I1010 16:36:25.238024 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-82hks" Oct 10 16:36:25 crc kubenswrapper[4799]: I1010 16:36:25.350296 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-z5z55" Oct 10 16:36:25 crc kubenswrapper[4799]: I1010 16:36:25.355802 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-mn6dn" Oct 10 16:36:25 crc kubenswrapper[4799]: I1010 16:36:25.366093 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-c9chn" Oct 10 16:36:25 crc kubenswrapper[4799]: I1010 16:36:25.380452 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-4786v" Oct 10 16:36:25 crc kubenswrapper[4799]: I1010 16:36:25.388148 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-mkr4n" Oct 10 16:36:25 crc kubenswrapper[4799]: I1010 16:36:25.481479 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2r7w2\" (UniqueName: \"kubernetes.io/projected/e7393869-51bf-4974-97c7-33adfe1b44f5-kube-api-access-2r7w2\") pod \"e7393869-51bf-4974-97c7-33adfe1b44f5\" (UID: \"e7393869-51bf-4974-97c7-33adfe1b44f5\") " Oct 10 16:36:25 crc kubenswrapper[4799]: I1010 16:36:25.481527 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/e7393869-51bf-4974-97c7-33adfe1b44f5-marketplace-trusted-ca\") pod \"e7393869-51bf-4974-97c7-33adfe1b44f5\" (UID: \"e7393869-51bf-4974-97c7-33adfe1b44f5\") " Oct 10 16:36:25 crc kubenswrapper[4799]: I1010 16:36:25.481586 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0a2e52eb-b82d-4869-ab9b-1c783d1c58fb-utilities\") pod \"0a2e52eb-b82d-4869-ab9b-1c783d1c58fb\" (UID: \"0a2e52eb-b82d-4869-ab9b-1c783d1c58fb\") " Oct 10 16:36:25 crc kubenswrapper[4799]: I1010 16:36:25.481612 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ch4sf\" (UniqueName: \"kubernetes.io/projected/048287bf-12a1-4fef-8fc2-7fa4686d31cc-kube-api-access-ch4sf\") pod \"048287bf-12a1-4fef-8fc2-7fa4686d31cc\" (UID: \"048287bf-12a1-4fef-8fc2-7fa4686d31cc\") " Oct 10 16:36:25 crc kubenswrapper[4799]: I1010 16:36:25.481629 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9jsjj\" (UniqueName: \"kubernetes.io/projected/ec4988cf-fb0f-4df8-8f2d-748a5459bbcc-kube-api-access-9jsjj\") pod \"ec4988cf-fb0f-4df8-8f2d-748a5459bbcc\" (UID: \"ec4988cf-fb0f-4df8-8f2d-748a5459bbcc\") " Oct 10 16:36:25 crc kubenswrapper[4799]: I1010 16:36:25.481663 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7zlqs\" (UniqueName: \"kubernetes.io/projected/0a2e52eb-b82d-4869-ab9b-1c783d1c58fb-kube-api-access-7zlqs\") pod \"0a2e52eb-b82d-4869-ab9b-1c783d1c58fb\" (UID: \"0a2e52eb-b82d-4869-ab9b-1c783d1c58fb\") " Oct 10 16:36:25 crc kubenswrapper[4799]: I1010 16:36:25.481702 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ec4988cf-fb0f-4df8-8f2d-748a5459bbcc-utilities\") pod \"ec4988cf-fb0f-4df8-8f2d-748a5459bbcc\" (UID: \"ec4988cf-fb0f-4df8-8f2d-748a5459bbcc\") " Oct 10 16:36:25 crc kubenswrapper[4799]: I1010 16:36:25.481727 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hr52z\" (UniqueName: \"kubernetes.io/projected/ce937bef-231c-4353-8af2-f8e4517c68c4-kube-api-access-hr52z\") pod \"ce937bef-231c-4353-8af2-f8e4517c68c4\" (UID: \"ce937bef-231c-4353-8af2-f8e4517c68c4\") " Oct 10 16:36:25 crc kubenswrapper[4799]: I1010 16:36:25.481776 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/e7393869-51bf-4974-97c7-33adfe1b44f5-marketplace-operator-metrics\") pod \"e7393869-51bf-4974-97c7-33adfe1b44f5\" (UID: \"e7393869-51bf-4974-97c7-33adfe1b44f5\") " Oct 10 16:36:25 crc kubenswrapper[4799]: I1010 16:36:25.481816 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/048287bf-12a1-4fef-8fc2-7fa4686d31cc-catalog-content\") pod \"048287bf-12a1-4fef-8fc2-7fa4686d31cc\" (UID: \"048287bf-12a1-4fef-8fc2-7fa4686d31cc\") " Oct 10 16:36:25 crc kubenswrapper[4799]: I1010 16:36:25.481834 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ec4988cf-fb0f-4df8-8f2d-748a5459bbcc-catalog-content\") pod \"ec4988cf-fb0f-4df8-8f2d-748a5459bbcc\" (UID: \"ec4988cf-fb0f-4df8-8f2d-748a5459bbcc\") " Oct 10 16:36:25 crc kubenswrapper[4799]: I1010 16:36:25.481854 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0a2e52eb-b82d-4869-ab9b-1c783d1c58fb-catalog-content\") pod \"0a2e52eb-b82d-4869-ab9b-1c783d1c58fb\" (UID: \"0a2e52eb-b82d-4869-ab9b-1c783d1c58fb\") " Oct 10 16:36:25 crc kubenswrapper[4799]: I1010 16:36:25.481876 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ce937bef-231c-4353-8af2-f8e4517c68c4-utilities\") pod \"ce937bef-231c-4353-8af2-f8e4517c68c4\" (UID: \"ce937bef-231c-4353-8af2-f8e4517c68c4\") " Oct 10 16:36:25 crc kubenswrapper[4799]: I1010 16:36:25.481892 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ce937bef-231c-4353-8af2-f8e4517c68c4-catalog-content\") pod \"ce937bef-231c-4353-8af2-f8e4517c68c4\" (UID: \"ce937bef-231c-4353-8af2-f8e4517c68c4\") " Oct 10 16:36:25 crc kubenswrapper[4799]: I1010 16:36:25.481912 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/048287bf-12a1-4fef-8fc2-7fa4686d31cc-utilities\") pod \"048287bf-12a1-4fef-8fc2-7fa4686d31cc\" (UID: \"048287bf-12a1-4fef-8fc2-7fa4686d31cc\") " Oct 10 16:36:25 crc kubenswrapper[4799]: I1010 16:36:25.482471 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e7393869-51bf-4974-97c7-33adfe1b44f5-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "e7393869-51bf-4974-97c7-33adfe1b44f5" (UID: "e7393869-51bf-4974-97c7-33adfe1b44f5"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 16:36:25 crc kubenswrapper[4799]: I1010 16:36:25.482717 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/048287bf-12a1-4fef-8fc2-7fa4686d31cc-utilities" (OuterVolumeSpecName: "utilities") pod "048287bf-12a1-4fef-8fc2-7fa4686d31cc" (UID: "048287bf-12a1-4fef-8fc2-7fa4686d31cc"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 10 16:36:25 crc kubenswrapper[4799]: I1010 16:36:25.483095 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ec4988cf-fb0f-4df8-8f2d-748a5459bbcc-utilities" (OuterVolumeSpecName: "utilities") pod "ec4988cf-fb0f-4df8-8f2d-748a5459bbcc" (UID: "ec4988cf-fb0f-4df8-8f2d-748a5459bbcc"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 10 16:36:25 crc kubenswrapper[4799]: I1010 16:36:25.483783 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0a2e52eb-b82d-4869-ab9b-1c783d1c58fb-utilities" (OuterVolumeSpecName: "utilities") pod "0a2e52eb-b82d-4869-ab9b-1c783d1c58fb" (UID: "0a2e52eb-b82d-4869-ab9b-1c783d1c58fb"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 10 16:36:25 crc kubenswrapper[4799]: I1010 16:36:25.485102 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ce937bef-231c-4353-8af2-f8e4517c68c4-utilities" (OuterVolumeSpecName: "utilities") pod "ce937bef-231c-4353-8af2-f8e4517c68c4" (UID: "ce937bef-231c-4353-8af2-f8e4517c68c4"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 10 16:36:25 crc kubenswrapper[4799]: I1010 16:36:25.485805 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0a2e52eb-b82d-4869-ab9b-1c783d1c58fb-kube-api-access-7zlqs" (OuterVolumeSpecName: "kube-api-access-7zlqs") pod "0a2e52eb-b82d-4869-ab9b-1c783d1c58fb" (UID: "0a2e52eb-b82d-4869-ab9b-1c783d1c58fb"). InnerVolumeSpecName "kube-api-access-7zlqs". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 16:36:25 crc kubenswrapper[4799]: I1010 16:36:25.485837 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e7393869-51bf-4974-97c7-33adfe1b44f5-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "e7393869-51bf-4974-97c7-33adfe1b44f5" (UID: "e7393869-51bf-4974-97c7-33adfe1b44f5"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:36:25 crc kubenswrapper[4799]: I1010 16:36:25.487469 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/048287bf-12a1-4fef-8fc2-7fa4686d31cc-kube-api-access-ch4sf" (OuterVolumeSpecName: "kube-api-access-ch4sf") pod "048287bf-12a1-4fef-8fc2-7fa4686d31cc" (UID: "048287bf-12a1-4fef-8fc2-7fa4686d31cc"). InnerVolumeSpecName "kube-api-access-ch4sf". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 16:36:25 crc kubenswrapper[4799]: I1010 16:36:25.487986 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ce937bef-231c-4353-8af2-f8e4517c68c4-kube-api-access-hr52z" (OuterVolumeSpecName: "kube-api-access-hr52z") pod "ce937bef-231c-4353-8af2-f8e4517c68c4" (UID: "ce937bef-231c-4353-8af2-f8e4517c68c4"). InnerVolumeSpecName "kube-api-access-hr52z". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 16:36:25 crc kubenswrapper[4799]: I1010 16:36:25.488175 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e7393869-51bf-4974-97c7-33adfe1b44f5-kube-api-access-2r7w2" (OuterVolumeSpecName: "kube-api-access-2r7w2") pod "e7393869-51bf-4974-97c7-33adfe1b44f5" (UID: "e7393869-51bf-4974-97c7-33adfe1b44f5"). InnerVolumeSpecName "kube-api-access-2r7w2". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 16:36:25 crc kubenswrapper[4799]: I1010 16:36:25.493048 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ec4988cf-fb0f-4df8-8f2d-748a5459bbcc-kube-api-access-9jsjj" (OuterVolumeSpecName: "kube-api-access-9jsjj") pod "ec4988cf-fb0f-4df8-8f2d-748a5459bbcc" (UID: "ec4988cf-fb0f-4df8-8f2d-748a5459bbcc"). InnerVolumeSpecName "kube-api-access-9jsjj". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 16:36:25 crc kubenswrapper[4799]: I1010 16:36:25.503952 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/048287bf-12a1-4fef-8fc2-7fa4686d31cc-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "048287bf-12a1-4fef-8fc2-7fa4686d31cc" (UID: "048287bf-12a1-4fef-8fc2-7fa4686d31cc"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 10 16:36:25 crc kubenswrapper[4799]: I1010 16:36:25.538374 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ec4988cf-fb0f-4df8-8f2d-748a5459bbcc-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "ec4988cf-fb0f-4df8-8f2d-748a5459bbcc" (UID: "ec4988cf-fb0f-4df8-8f2d-748a5459bbcc"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 10 16:36:25 crc kubenswrapper[4799]: I1010 16:36:25.554478 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0a2e52eb-b82d-4869-ab9b-1c783d1c58fb-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "0a2e52eb-b82d-4869-ab9b-1c783d1c58fb" (UID: "0a2e52eb-b82d-4869-ab9b-1c783d1c58fb"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 10 16:36:25 crc kubenswrapper[4799]: I1010 16:36:25.571399 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ce937bef-231c-4353-8af2-f8e4517c68c4-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "ce937bef-231c-4353-8af2-f8e4517c68c4" (UID: "ce937bef-231c-4353-8af2-f8e4517c68c4"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 10 16:36:25 crc kubenswrapper[4799]: I1010 16:36:25.583811 4799 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/e7393869-51bf-4974-97c7-33adfe1b44f5-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Oct 10 16:36:25 crc kubenswrapper[4799]: I1010 16:36:25.583846 4799 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ec4988cf-fb0f-4df8-8f2d-748a5459bbcc-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 10 16:36:25 crc kubenswrapper[4799]: I1010 16:36:25.583859 4799 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/048287bf-12a1-4fef-8fc2-7fa4686d31cc-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 10 16:36:25 crc kubenswrapper[4799]: I1010 16:36:25.583871 4799 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0a2e52eb-b82d-4869-ab9b-1c783d1c58fb-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 10 16:36:25 crc kubenswrapper[4799]: I1010 16:36:25.583882 4799 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ce937bef-231c-4353-8af2-f8e4517c68c4-utilities\") on node \"crc\" DevicePath \"\"" Oct 10 16:36:25 crc kubenswrapper[4799]: I1010 16:36:25.583893 4799 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ce937bef-231c-4353-8af2-f8e4517c68c4-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 10 16:36:25 crc kubenswrapper[4799]: I1010 16:36:25.583901 4799 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/048287bf-12a1-4fef-8fc2-7fa4686d31cc-utilities\") on node \"crc\" DevicePath \"\"" Oct 10 16:36:25 crc kubenswrapper[4799]: I1010 16:36:25.583912 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2r7w2\" (UniqueName: \"kubernetes.io/projected/e7393869-51bf-4974-97c7-33adfe1b44f5-kube-api-access-2r7w2\") on node \"crc\" DevicePath \"\"" Oct 10 16:36:25 crc kubenswrapper[4799]: I1010 16:36:25.583920 4799 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/e7393869-51bf-4974-97c7-33adfe1b44f5-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Oct 10 16:36:25 crc kubenswrapper[4799]: I1010 16:36:25.583928 4799 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0a2e52eb-b82d-4869-ab9b-1c783d1c58fb-utilities\") on node \"crc\" DevicePath \"\"" Oct 10 16:36:25 crc kubenswrapper[4799]: I1010 16:36:25.583936 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ch4sf\" (UniqueName: \"kubernetes.io/projected/048287bf-12a1-4fef-8fc2-7fa4686d31cc-kube-api-access-ch4sf\") on node \"crc\" DevicePath \"\"" Oct 10 16:36:25 crc kubenswrapper[4799]: I1010 16:36:25.583944 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9jsjj\" (UniqueName: \"kubernetes.io/projected/ec4988cf-fb0f-4df8-8f2d-748a5459bbcc-kube-api-access-9jsjj\") on node \"crc\" DevicePath \"\"" Oct 10 16:36:25 crc kubenswrapper[4799]: I1010 16:36:25.583952 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7zlqs\" (UniqueName: \"kubernetes.io/projected/0a2e52eb-b82d-4869-ab9b-1c783d1c58fb-kube-api-access-7zlqs\") on node \"crc\" DevicePath \"\"" Oct 10 16:36:25 crc kubenswrapper[4799]: I1010 16:36:25.583960 4799 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ec4988cf-fb0f-4df8-8f2d-748a5459bbcc-utilities\") on node \"crc\" DevicePath \"\"" Oct 10 16:36:25 crc kubenswrapper[4799]: I1010 16:36:25.583969 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hr52z\" (UniqueName: \"kubernetes.io/projected/ce937bef-231c-4353-8af2-f8e4517c68c4-kube-api-access-hr52z\") on node \"crc\" DevicePath \"\"" Oct 10 16:36:25 crc kubenswrapper[4799]: I1010 16:36:25.759919 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-82hks"] Oct 10 16:36:25 crc kubenswrapper[4799]: I1010 16:36:25.926191 4799 generic.go:334] "Generic (PLEG): container finished" podID="0a2e52eb-b82d-4869-ab9b-1c783d1c58fb" containerID="f958cff23bca7d5793ea9871cce292ffc8011cbcaacbdcd5a925dfd263a354b5" exitCode=0 Oct 10 16:36:25 crc kubenswrapper[4799]: I1010 16:36:25.926290 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-c9chn" event={"ID":"0a2e52eb-b82d-4869-ab9b-1c783d1c58fb","Type":"ContainerDied","Data":"f958cff23bca7d5793ea9871cce292ffc8011cbcaacbdcd5a925dfd263a354b5"} Oct 10 16:36:25 crc kubenswrapper[4799]: I1010 16:36:25.926312 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-c9chn" Oct 10 16:36:25 crc kubenswrapper[4799]: I1010 16:36:25.926656 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-c9chn" event={"ID":"0a2e52eb-b82d-4869-ab9b-1c783d1c58fb","Type":"ContainerDied","Data":"49d9afe499437cc339c0788c0f90142c9ecb2dddb6c66fd4e715d20a5c0326f2"} Oct 10 16:36:25 crc kubenswrapper[4799]: I1010 16:36:25.926860 4799 scope.go:117] "RemoveContainer" containerID="f958cff23bca7d5793ea9871cce292ffc8011cbcaacbdcd5a925dfd263a354b5" Oct 10 16:36:25 crc kubenswrapper[4799]: I1010 16:36:25.930433 4799 generic.go:334] "Generic (PLEG): container finished" podID="ce937bef-231c-4353-8af2-f8e4517c68c4" containerID="e31a8a21bb71845f1ffed4bf9908e006a8cd80c51c60cd28fd954067926d6c8f" exitCode=0 Oct 10 16:36:25 crc kubenswrapper[4799]: I1010 16:36:25.930492 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-mkr4n" event={"ID":"ce937bef-231c-4353-8af2-f8e4517c68c4","Type":"ContainerDied","Data":"e31a8a21bb71845f1ffed4bf9908e006a8cd80c51c60cd28fd954067926d6c8f"} Oct 10 16:36:25 crc kubenswrapper[4799]: I1010 16:36:25.930515 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-mkr4n" event={"ID":"ce937bef-231c-4353-8af2-f8e4517c68c4","Type":"ContainerDied","Data":"f009f8392490988ab6a6cb0835896ac639535b9a9b68261b5e7f11ff5d27b1ae"} Oct 10 16:36:25 crc kubenswrapper[4799]: I1010 16:36:25.930536 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-mkr4n" Oct 10 16:36:25 crc kubenswrapper[4799]: I1010 16:36:25.937182 4799 generic.go:334] "Generic (PLEG): container finished" podID="ec4988cf-fb0f-4df8-8f2d-748a5459bbcc" containerID="1e35d6bc74d8a24f42976236a75719667c6ae61e15826c8a0c0a76a54baa39d4" exitCode=0 Oct 10 16:36:25 crc kubenswrapper[4799]: I1010 16:36:25.937271 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-z5z55" event={"ID":"ec4988cf-fb0f-4df8-8f2d-748a5459bbcc","Type":"ContainerDied","Data":"1e35d6bc74d8a24f42976236a75719667c6ae61e15826c8a0c0a76a54baa39d4"} Oct 10 16:36:25 crc kubenswrapper[4799]: I1010 16:36:25.937313 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-z5z55" event={"ID":"ec4988cf-fb0f-4df8-8f2d-748a5459bbcc","Type":"ContainerDied","Data":"902b03ac04ed41b609698f710dd28906ba67ba463f3840ef2f1545adde13eb93"} Oct 10 16:36:25 crc kubenswrapper[4799]: I1010 16:36:25.937536 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-z5z55" Oct 10 16:36:25 crc kubenswrapper[4799]: I1010 16:36:25.942743 4799 generic.go:334] "Generic (PLEG): container finished" podID="e7393869-51bf-4974-97c7-33adfe1b44f5" containerID="862b8488e89b1f15ca8d6add01fab3069539611bc74c85e1f24779fedf97e88c" exitCode=0 Oct 10 16:36:25 crc kubenswrapper[4799]: I1010 16:36:25.942817 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-mn6dn" event={"ID":"e7393869-51bf-4974-97c7-33adfe1b44f5","Type":"ContainerDied","Data":"862b8488e89b1f15ca8d6add01fab3069539611bc74c85e1f24779fedf97e88c"} Oct 10 16:36:25 crc kubenswrapper[4799]: I1010 16:36:25.942849 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-mn6dn" Oct 10 16:36:25 crc kubenswrapper[4799]: I1010 16:36:25.942876 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-mn6dn" event={"ID":"e7393869-51bf-4974-97c7-33adfe1b44f5","Type":"ContainerDied","Data":"daffa33bce56726cb5e580134618dad18f8c9b3589079a5f16bc6401fdd2a1de"} Oct 10 16:36:25 crc kubenswrapper[4799]: I1010 16:36:25.948106 4799 generic.go:334] "Generic (PLEG): container finished" podID="048287bf-12a1-4fef-8fc2-7fa4686d31cc" containerID="6f70847985ca6300398c906a8ee6710004ef8ee9422411ceb3a5dd3f85897be0" exitCode=0 Oct 10 16:36:25 crc kubenswrapper[4799]: I1010 16:36:25.948182 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-4786v" event={"ID":"048287bf-12a1-4fef-8fc2-7fa4686d31cc","Type":"ContainerDied","Data":"6f70847985ca6300398c906a8ee6710004ef8ee9422411ceb3a5dd3f85897be0"} Oct 10 16:36:25 crc kubenswrapper[4799]: I1010 16:36:25.948206 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-4786v" event={"ID":"048287bf-12a1-4fef-8fc2-7fa4686d31cc","Type":"ContainerDied","Data":"2f1fb1897dee773d66a1611a4d1d1c2694c8118ea7753b34a2dd1093c1e231aa"} Oct 10 16:36:25 crc kubenswrapper[4799]: I1010 16:36:25.948320 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-4786v" Oct 10 16:36:25 crc kubenswrapper[4799]: I1010 16:36:25.950347 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-82hks" event={"ID":"ec7d66a7-bea8-4dd5-8d91-84ec51cd57a7","Type":"ContainerStarted","Data":"4077062d71558a5a8895f6c0e4f0b7052ab445ecfe1a111187c5606123f6a857"} Oct 10 16:36:25 crc kubenswrapper[4799]: I1010 16:36:25.950401 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-82hks" event={"ID":"ec7d66a7-bea8-4dd5-8d91-84ec51cd57a7","Type":"ContainerStarted","Data":"676bc6bc7db52eca7357cff809aafe429e65dd8b22a6837edfa04f82f1ce3a22"} Oct 10 16:36:25 crc kubenswrapper[4799]: I1010 16:36:25.950903 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-82hks" Oct 10 16:36:25 crc kubenswrapper[4799]: I1010 16:36:25.950997 4799 scope.go:117] "RemoveContainer" containerID="b169e77ed56aa1a5ba394fe34fe0bf00c0c597cfb72b1308448ba1223bd067b9" Oct 10 16:36:25 crc kubenswrapper[4799]: I1010 16:36:25.954221 4799 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-82hks container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.55:8080/healthz\": dial tcp 10.217.0.55:8080: connect: connection refused" start-of-body= Oct 10 16:36:25 crc kubenswrapper[4799]: I1010 16:36:25.954262 4799 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-82hks" podUID="ec7d66a7-bea8-4dd5-8d91-84ec51cd57a7" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.55:8080/healthz\": dial tcp 10.217.0.55:8080: connect: connection refused" Oct 10 16:36:25 crc kubenswrapper[4799]: I1010 16:36:25.969332 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-82hks" podStartSLOduration=1.9693152299999999 podStartE2EDuration="1.96931523s" podCreationTimestamp="2025-10-10 16:36:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-10 16:36:25.967125285 +0000 UTC m=+279.475449420" watchObservedRunningTime="2025-10-10 16:36:25.96931523 +0000 UTC m=+279.477639345" Oct 10 16:36:25 crc kubenswrapper[4799]: I1010 16:36:25.982271 4799 scope.go:117] "RemoveContainer" containerID="36aef9f05b185c22779c267025905cd2370332e5867a378339b7969b90423564" Oct 10 16:36:25 crc kubenswrapper[4799]: I1010 16:36:25.989916 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-c9chn"] Oct 10 16:36:25 crc kubenswrapper[4799]: I1010 16:36:25.991317 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-c9chn"] Oct 10 16:36:26 crc kubenswrapper[4799]: I1010 16:36:26.011043 4799 scope.go:117] "RemoveContainer" containerID="f958cff23bca7d5793ea9871cce292ffc8011cbcaacbdcd5a925dfd263a354b5" Oct 10 16:36:26 crc kubenswrapper[4799]: E1010 16:36:26.011513 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f958cff23bca7d5793ea9871cce292ffc8011cbcaacbdcd5a925dfd263a354b5\": container with ID starting with f958cff23bca7d5793ea9871cce292ffc8011cbcaacbdcd5a925dfd263a354b5 not found: ID does not exist" containerID="f958cff23bca7d5793ea9871cce292ffc8011cbcaacbdcd5a925dfd263a354b5" Oct 10 16:36:26 crc kubenswrapper[4799]: I1010 16:36:26.011601 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f958cff23bca7d5793ea9871cce292ffc8011cbcaacbdcd5a925dfd263a354b5"} err="failed to get container status \"f958cff23bca7d5793ea9871cce292ffc8011cbcaacbdcd5a925dfd263a354b5\": rpc error: code = NotFound desc = could not find container \"f958cff23bca7d5793ea9871cce292ffc8011cbcaacbdcd5a925dfd263a354b5\": container with ID starting with f958cff23bca7d5793ea9871cce292ffc8011cbcaacbdcd5a925dfd263a354b5 not found: ID does not exist" Oct 10 16:36:26 crc kubenswrapper[4799]: I1010 16:36:26.011633 4799 scope.go:117] "RemoveContainer" containerID="b169e77ed56aa1a5ba394fe34fe0bf00c0c597cfb72b1308448ba1223bd067b9" Oct 10 16:36:26 crc kubenswrapper[4799]: E1010 16:36:26.012038 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b169e77ed56aa1a5ba394fe34fe0bf00c0c597cfb72b1308448ba1223bd067b9\": container with ID starting with b169e77ed56aa1a5ba394fe34fe0bf00c0c597cfb72b1308448ba1223bd067b9 not found: ID does not exist" containerID="b169e77ed56aa1a5ba394fe34fe0bf00c0c597cfb72b1308448ba1223bd067b9" Oct 10 16:36:26 crc kubenswrapper[4799]: I1010 16:36:26.012088 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b169e77ed56aa1a5ba394fe34fe0bf00c0c597cfb72b1308448ba1223bd067b9"} err="failed to get container status \"b169e77ed56aa1a5ba394fe34fe0bf00c0c597cfb72b1308448ba1223bd067b9\": rpc error: code = NotFound desc = could not find container \"b169e77ed56aa1a5ba394fe34fe0bf00c0c597cfb72b1308448ba1223bd067b9\": container with ID starting with b169e77ed56aa1a5ba394fe34fe0bf00c0c597cfb72b1308448ba1223bd067b9 not found: ID does not exist" Oct 10 16:36:26 crc kubenswrapper[4799]: I1010 16:36:26.013027 4799 scope.go:117] "RemoveContainer" containerID="36aef9f05b185c22779c267025905cd2370332e5867a378339b7969b90423564" Oct 10 16:36:26 crc kubenswrapper[4799]: E1010 16:36:26.013475 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"36aef9f05b185c22779c267025905cd2370332e5867a378339b7969b90423564\": container with ID starting with 36aef9f05b185c22779c267025905cd2370332e5867a378339b7969b90423564 not found: ID does not exist" containerID="36aef9f05b185c22779c267025905cd2370332e5867a378339b7969b90423564" Oct 10 16:36:26 crc kubenswrapper[4799]: I1010 16:36:26.013521 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"36aef9f05b185c22779c267025905cd2370332e5867a378339b7969b90423564"} err="failed to get container status \"36aef9f05b185c22779c267025905cd2370332e5867a378339b7969b90423564\": rpc error: code = NotFound desc = could not find container \"36aef9f05b185c22779c267025905cd2370332e5867a378339b7969b90423564\": container with ID starting with 36aef9f05b185c22779c267025905cd2370332e5867a378339b7969b90423564 not found: ID does not exist" Oct 10 16:36:26 crc kubenswrapper[4799]: I1010 16:36:26.013543 4799 scope.go:117] "RemoveContainer" containerID="e31a8a21bb71845f1ffed4bf9908e006a8cd80c51c60cd28fd954067926d6c8f" Oct 10 16:36:26 crc kubenswrapper[4799]: I1010 16:36:26.018390 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-z5z55"] Oct 10 16:36:26 crc kubenswrapper[4799]: I1010 16:36:26.021894 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-z5z55"] Oct 10 16:36:26 crc kubenswrapper[4799]: I1010 16:36:26.026581 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-mkr4n"] Oct 10 16:36:26 crc kubenswrapper[4799]: I1010 16:36:26.034089 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-mkr4n"] Oct 10 16:36:26 crc kubenswrapper[4799]: I1010 16:36:26.035948 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-mn6dn"] Oct 10 16:36:26 crc kubenswrapper[4799]: I1010 16:36:26.041795 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-mn6dn"] Oct 10 16:36:26 crc kubenswrapper[4799]: I1010 16:36:26.042855 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-4786v"] Oct 10 16:36:26 crc kubenswrapper[4799]: I1010 16:36:26.055700 4799 scope.go:117] "RemoveContainer" containerID="47764ec6487ef350dd82a777a93cbe1cd2f6242a74dc1e2a046bc6f1132d8c63" Oct 10 16:36:26 crc kubenswrapper[4799]: I1010 16:36:26.056917 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-4786v"] Oct 10 16:36:26 crc kubenswrapper[4799]: I1010 16:36:26.079374 4799 scope.go:117] "RemoveContainer" containerID="12150c685523a851ac97553f2a50b4557bb79507e6ec0cc3d50aabaaf8538a84" Oct 10 16:36:26 crc kubenswrapper[4799]: I1010 16:36:26.112774 4799 scope.go:117] "RemoveContainer" containerID="e31a8a21bb71845f1ffed4bf9908e006a8cd80c51c60cd28fd954067926d6c8f" Oct 10 16:36:26 crc kubenswrapper[4799]: E1010 16:36:26.113206 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e31a8a21bb71845f1ffed4bf9908e006a8cd80c51c60cd28fd954067926d6c8f\": container with ID starting with e31a8a21bb71845f1ffed4bf9908e006a8cd80c51c60cd28fd954067926d6c8f not found: ID does not exist" containerID="e31a8a21bb71845f1ffed4bf9908e006a8cd80c51c60cd28fd954067926d6c8f" Oct 10 16:36:26 crc kubenswrapper[4799]: I1010 16:36:26.113236 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e31a8a21bb71845f1ffed4bf9908e006a8cd80c51c60cd28fd954067926d6c8f"} err="failed to get container status \"e31a8a21bb71845f1ffed4bf9908e006a8cd80c51c60cd28fd954067926d6c8f\": rpc error: code = NotFound desc = could not find container \"e31a8a21bb71845f1ffed4bf9908e006a8cd80c51c60cd28fd954067926d6c8f\": container with ID starting with e31a8a21bb71845f1ffed4bf9908e006a8cd80c51c60cd28fd954067926d6c8f not found: ID does not exist" Oct 10 16:36:26 crc kubenswrapper[4799]: I1010 16:36:26.113256 4799 scope.go:117] "RemoveContainer" containerID="47764ec6487ef350dd82a777a93cbe1cd2f6242a74dc1e2a046bc6f1132d8c63" Oct 10 16:36:26 crc kubenswrapper[4799]: E1010 16:36:26.114009 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"47764ec6487ef350dd82a777a93cbe1cd2f6242a74dc1e2a046bc6f1132d8c63\": container with ID starting with 47764ec6487ef350dd82a777a93cbe1cd2f6242a74dc1e2a046bc6f1132d8c63 not found: ID does not exist" containerID="47764ec6487ef350dd82a777a93cbe1cd2f6242a74dc1e2a046bc6f1132d8c63" Oct 10 16:36:26 crc kubenswrapper[4799]: I1010 16:36:26.114040 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"47764ec6487ef350dd82a777a93cbe1cd2f6242a74dc1e2a046bc6f1132d8c63"} err="failed to get container status \"47764ec6487ef350dd82a777a93cbe1cd2f6242a74dc1e2a046bc6f1132d8c63\": rpc error: code = NotFound desc = could not find container \"47764ec6487ef350dd82a777a93cbe1cd2f6242a74dc1e2a046bc6f1132d8c63\": container with ID starting with 47764ec6487ef350dd82a777a93cbe1cd2f6242a74dc1e2a046bc6f1132d8c63 not found: ID does not exist" Oct 10 16:36:26 crc kubenswrapper[4799]: I1010 16:36:26.114057 4799 scope.go:117] "RemoveContainer" containerID="12150c685523a851ac97553f2a50b4557bb79507e6ec0cc3d50aabaaf8538a84" Oct 10 16:36:26 crc kubenswrapper[4799]: E1010 16:36:26.114879 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"12150c685523a851ac97553f2a50b4557bb79507e6ec0cc3d50aabaaf8538a84\": container with ID starting with 12150c685523a851ac97553f2a50b4557bb79507e6ec0cc3d50aabaaf8538a84 not found: ID does not exist" containerID="12150c685523a851ac97553f2a50b4557bb79507e6ec0cc3d50aabaaf8538a84" Oct 10 16:36:26 crc kubenswrapper[4799]: I1010 16:36:26.114929 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"12150c685523a851ac97553f2a50b4557bb79507e6ec0cc3d50aabaaf8538a84"} err="failed to get container status \"12150c685523a851ac97553f2a50b4557bb79507e6ec0cc3d50aabaaf8538a84\": rpc error: code = NotFound desc = could not find container \"12150c685523a851ac97553f2a50b4557bb79507e6ec0cc3d50aabaaf8538a84\": container with ID starting with 12150c685523a851ac97553f2a50b4557bb79507e6ec0cc3d50aabaaf8538a84 not found: ID does not exist" Oct 10 16:36:26 crc kubenswrapper[4799]: I1010 16:36:26.114964 4799 scope.go:117] "RemoveContainer" containerID="1e35d6bc74d8a24f42976236a75719667c6ae61e15826c8a0c0a76a54baa39d4" Oct 10 16:36:26 crc kubenswrapper[4799]: I1010 16:36:26.134179 4799 scope.go:117] "RemoveContainer" containerID="a45a0e091ec875ce087184fde67116b845ace4d903536f9d88fe302c0c771339" Oct 10 16:36:26 crc kubenswrapper[4799]: I1010 16:36:26.148313 4799 scope.go:117] "RemoveContainer" containerID="0f2a8412a97266ed1ef1700224e39d163d1aa20db1195e0eb26991eb7838317e" Oct 10 16:36:26 crc kubenswrapper[4799]: I1010 16:36:26.165022 4799 scope.go:117] "RemoveContainer" containerID="1e35d6bc74d8a24f42976236a75719667c6ae61e15826c8a0c0a76a54baa39d4" Oct 10 16:36:26 crc kubenswrapper[4799]: E1010 16:36:26.165992 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1e35d6bc74d8a24f42976236a75719667c6ae61e15826c8a0c0a76a54baa39d4\": container with ID starting with 1e35d6bc74d8a24f42976236a75719667c6ae61e15826c8a0c0a76a54baa39d4 not found: ID does not exist" containerID="1e35d6bc74d8a24f42976236a75719667c6ae61e15826c8a0c0a76a54baa39d4" Oct 10 16:36:26 crc kubenswrapper[4799]: I1010 16:36:26.166022 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1e35d6bc74d8a24f42976236a75719667c6ae61e15826c8a0c0a76a54baa39d4"} err="failed to get container status \"1e35d6bc74d8a24f42976236a75719667c6ae61e15826c8a0c0a76a54baa39d4\": rpc error: code = NotFound desc = could not find container \"1e35d6bc74d8a24f42976236a75719667c6ae61e15826c8a0c0a76a54baa39d4\": container with ID starting with 1e35d6bc74d8a24f42976236a75719667c6ae61e15826c8a0c0a76a54baa39d4 not found: ID does not exist" Oct 10 16:36:26 crc kubenswrapper[4799]: I1010 16:36:26.166044 4799 scope.go:117] "RemoveContainer" containerID="a45a0e091ec875ce087184fde67116b845ace4d903536f9d88fe302c0c771339" Oct 10 16:36:26 crc kubenswrapper[4799]: E1010 16:36:26.166268 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a45a0e091ec875ce087184fde67116b845ace4d903536f9d88fe302c0c771339\": container with ID starting with a45a0e091ec875ce087184fde67116b845ace4d903536f9d88fe302c0c771339 not found: ID does not exist" containerID="a45a0e091ec875ce087184fde67116b845ace4d903536f9d88fe302c0c771339" Oct 10 16:36:26 crc kubenswrapper[4799]: I1010 16:36:26.166298 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a45a0e091ec875ce087184fde67116b845ace4d903536f9d88fe302c0c771339"} err="failed to get container status \"a45a0e091ec875ce087184fde67116b845ace4d903536f9d88fe302c0c771339\": rpc error: code = NotFound desc = could not find container \"a45a0e091ec875ce087184fde67116b845ace4d903536f9d88fe302c0c771339\": container with ID starting with a45a0e091ec875ce087184fde67116b845ace4d903536f9d88fe302c0c771339 not found: ID does not exist" Oct 10 16:36:26 crc kubenswrapper[4799]: I1010 16:36:26.166314 4799 scope.go:117] "RemoveContainer" containerID="0f2a8412a97266ed1ef1700224e39d163d1aa20db1195e0eb26991eb7838317e" Oct 10 16:36:26 crc kubenswrapper[4799]: E1010 16:36:26.166550 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0f2a8412a97266ed1ef1700224e39d163d1aa20db1195e0eb26991eb7838317e\": container with ID starting with 0f2a8412a97266ed1ef1700224e39d163d1aa20db1195e0eb26991eb7838317e not found: ID does not exist" containerID="0f2a8412a97266ed1ef1700224e39d163d1aa20db1195e0eb26991eb7838317e" Oct 10 16:36:26 crc kubenswrapper[4799]: I1010 16:36:26.166593 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0f2a8412a97266ed1ef1700224e39d163d1aa20db1195e0eb26991eb7838317e"} err="failed to get container status \"0f2a8412a97266ed1ef1700224e39d163d1aa20db1195e0eb26991eb7838317e\": rpc error: code = NotFound desc = could not find container \"0f2a8412a97266ed1ef1700224e39d163d1aa20db1195e0eb26991eb7838317e\": container with ID starting with 0f2a8412a97266ed1ef1700224e39d163d1aa20db1195e0eb26991eb7838317e not found: ID does not exist" Oct 10 16:36:26 crc kubenswrapper[4799]: I1010 16:36:26.166619 4799 scope.go:117] "RemoveContainer" containerID="862b8488e89b1f15ca8d6add01fab3069539611bc74c85e1f24779fedf97e88c" Oct 10 16:36:26 crc kubenswrapper[4799]: I1010 16:36:26.178734 4799 scope.go:117] "RemoveContainer" containerID="862b8488e89b1f15ca8d6add01fab3069539611bc74c85e1f24779fedf97e88c" Oct 10 16:36:26 crc kubenswrapper[4799]: E1010 16:36:26.179089 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"862b8488e89b1f15ca8d6add01fab3069539611bc74c85e1f24779fedf97e88c\": container with ID starting with 862b8488e89b1f15ca8d6add01fab3069539611bc74c85e1f24779fedf97e88c not found: ID does not exist" containerID="862b8488e89b1f15ca8d6add01fab3069539611bc74c85e1f24779fedf97e88c" Oct 10 16:36:26 crc kubenswrapper[4799]: I1010 16:36:26.179131 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"862b8488e89b1f15ca8d6add01fab3069539611bc74c85e1f24779fedf97e88c"} err="failed to get container status \"862b8488e89b1f15ca8d6add01fab3069539611bc74c85e1f24779fedf97e88c\": rpc error: code = NotFound desc = could not find container \"862b8488e89b1f15ca8d6add01fab3069539611bc74c85e1f24779fedf97e88c\": container with ID starting with 862b8488e89b1f15ca8d6add01fab3069539611bc74c85e1f24779fedf97e88c not found: ID does not exist" Oct 10 16:36:26 crc kubenswrapper[4799]: I1010 16:36:26.179161 4799 scope.go:117] "RemoveContainer" containerID="6f70847985ca6300398c906a8ee6710004ef8ee9422411ceb3a5dd3f85897be0" Oct 10 16:36:26 crc kubenswrapper[4799]: I1010 16:36:26.189240 4799 scope.go:117] "RemoveContainer" containerID="6d41933ae11ef7823b62d2b58d70b791229f91237a9f25776f250974eb14801a" Oct 10 16:36:26 crc kubenswrapper[4799]: I1010 16:36:26.200392 4799 scope.go:117] "RemoveContainer" containerID="b05205f84fe832dad3b0055359de22613d19535d2a4eca2500b46df4f8c794f4" Oct 10 16:36:26 crc kubenswrapper[4799]: I1010 16:36:26.214675 4799 scope.go:117] "RemoveContainer" containerID="6f70847985ca6300398c906a8ee6710004ef8ee9422411ceb3a5dd3f85897be0" Oct 10 16:36:26 crc kubenswrapper[4799]: E1010 16:36:26.215377 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6f70847985ca6300398c906a8ee6710004ef8ee9422411ceb3a5dd3f85897be0\": container with ID starting with 6f70847985ca6300398c906a8ee6710004ef8ee9422411ceb3a5dd3f85897be0 not found: ID does not exist" containerID="6f70847985ca6300398c906a8ee6710004ef8ee9422411ceb3a5dd3f85897be0" Oct 10 16:36:26 crc kubenswrapper[4799]: I1010 16:36:26.215430 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6f70847985ca6300398c906a8ee6710004ef8ee9422411ceb3a5dd3f85897be0"} err="failed to get container status \"6f70847985ca6300398c906a8ee6710004ef8ee9422411ceb3a5dd3f85897be0\": rpc error: code = NotFound desc = could not find container \"6f70847985ca6300398c906a8ee6710004ef8ee9422411ceb3a5dd3f85897be0\": container with ID starting with 6f70847985ca6300398c906a8ee6710004ef8ee9422411ceb3a5dd3f85897be0 not found: ID does not exist" Oct 10 16:36:26 crc kubenswrapper[4799]: I1010 16:36:26.215481 4799 scope.go:117] "RemoveContainer" containerID="6d41933ae11ef7823b62d2b58d70b791229f91237a9f25776f250974eb14801a" Oct 10 16:36:26 crc kubenswrapper[4799]: E1010 16:36:26.215792 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6d41933ae11ef7823b62d2b58d70b791229f91237a9f25776f250974eb14801a\": container with ID starting with 6d41933ae11ef7823b62d2b58d70b791229f91237a9f25776f250974eb14801a not found: ID does not exist" containerID="6d41933ae11ef7823b62d2b58d70b791229f91237a9f25776f250974eb14801a" Oct 10 16:36:26 crc kubenswrapper[4799]: I1010 16:36:26.215823 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6d41933ae11ef7823b62d2b58d70b791229f91237a9f25776f250974eb14801a"} err="failed to get container status \"6d41933ae11ef7823b62d2b58d70b791229f91237a9f25776f250974eb14801a\": rpc error: code = NotFound desc = could not find container \"6d41933ae11ef7823b62d2b58d70b791229f91237a9f25776f250974eb14801a\": container with ID starting with 6d41933ae11ef7823b62d2b58d70b791229f91237a9f25776f250974eb14801a not found: ID does not exist" Oct 10 16:36:26 crc kubenswrapper[4799]: I1010 16:36:26.215839 4799 scope.go:117] "RemoveContainer" containerID="b05205f84fe832dad3b0055359de22613d19535d2a4eca2500b46df4f8c794f4" Oct 10 16:36:26 crc kubenswrapper[4799]: E1010 16:36:26.216335 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b05205f84fe832dad3b0055359de22613d19535d2a4eca2500b46df4f8c794f4\": container with ID starting with b05205f84fe832dad3b0055359de22613d19535d2a4eca2500b46df4f8c794f4 not found: ID does not exist" containerID="b05205f84fe832dad3b0055359de22613d19535d2a4eca2500b46df4f8c794f4" Oct 10 16:36:26 crc kubenswrapper[4799]: I1010 16:36:26.216367 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b05205f84fe832dad3b0055359de22613d19535d2a4eca2500b46df4f8c794f4"} err="failed to get container status \"b05205f84fe832dad3b0055359de22613d19535d2a4eca2500b46df4f8c794f4\": rpc error: code = NotFound desc = could not find container \"b05205f84fe832dad3b0055359de22613d19535d2a4eca2500b46df4f8c794f4\": container with ID starting with b05205f84fe832dad3b0055359de22613d19535d2a4eca2500b46df4f8c794f4 not found: ID does not exist" Oct 10 16:36:26 crc kubenswrapper[4799]: I1010 16:36:26.964640 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-82hks" Oct 10 16:36:27 crc kubenswrapper[4799]: I1010 16:36:27.085533 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-s2pcd"] Oct 10 16:36:27 crc kubenswrapper[4799]: E1010 16:36:27.086084 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ec4988cf-fb0f-4df8-8f2d-748a5459bbcc" containerName="extract-content" Oct 10 16:36:27 crc kubenswrapper[4799]: I1010 16:36:27.086100 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="ec4988cf-fb0f-4df8-8f2d-748a5459bbcc" containerName="extract-content" Oct 10 16:36:27 crc kubenswrapper[4799]: E1010 16:36:27.086113 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="048287bf-12a1-4fef-8fc2-7fa4686d31cc" containerName="extract-utilities" Oct 10 16:36:27 crc kubenswrapper[4799]: I1010 16:36:27.086121 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="048287bf-12a1-4fef-8fc2-7fa4686d31cc" containerName="extract-utilities" Oct 10 16:36:27 crc kubenswrapper[4799]: E1010 16:36:27.086132 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ce937bef-231c-4353-8af2-f8e4517c68c4" containerName="extract-content" Oct 10 16:36:27 crc kubenswrapper[4799]: I1010 16:36:27.086139 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="ce937bef-231c-4353-8af2-f8e4517c68c4" containerName="extract-content" Oct 10 16:36:27 crc kubenswrapper[4799]: E1010 16:36:27.086155 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ce937bef-231c-4353-8af2-f8e4517c68c4" containerName="registry-server" Oct 10 16:36:27 crc kubenswrapper[4799]: I1010 16:36:27.086162 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="ce937bef-231c-4353-8af2-f8e4517c68c4" containerName="registry-server" Oct 10 16:36:27 crc kubenswrapper[4799]: E1010 16:36:27.086172 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ce937bef-231c-4353-8af2-f8e4517c68c4" containerName="extract-utilities" Oct 10 16:36:27 crc kubenswrapper[4799]: I1010 16:36:27.086179 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="ce937bef-231c-4353-8af2-f8e4517c68c4" containerName="extract-utilities" Oct 10 16:36:27 crc kubenswrapper[4799]: E1010 16:36:27.086188 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0a2e52eb-b82d-4869-ab9b-1c783d1c58fb" containerName="registry-server" Oct 10 16:36:27 crc kubenswrapper[4799]: I1010 16:36:27.086196 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="0a2e52eb-b82d-4869-ab9b-1c783d1c58fb" containerName="registry-server" Oct 10 16:36:27 crc kubenswrapper[4799]: E1010 16:36:27.086208 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="048287bf-12a1-4fef-8fc2-7fa4686d31cc" containerName="registry-server" Oct 10 16:36:27 crc kubenswrapper[4799]: I1010 16:36:27.086215 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="048287bf-12a1-4fef-8fc2-7fa4686d31cc" containerName="registry-server" Oct 10 16:36:27 crc kubenswrapper[4799]: E1010 16:36:27.086226 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0a2e52eb-b82d-4869-ab9b-1c783d1c58fb" containerName="extract-content" Oct 10 16:36:27 crc kubenswrapper[4799]: I1010 16:36:27.086234 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="0a2e52eb-b82d-4869-ab9b-1c783d1c58fb" containerName="extract-content" Oct 10 16:36:27 crc kubenswrapper[4799]: E1010 16:36:27.086246 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0a2e52eb-b82d-4869-ab9b-1c783d1c58fb" containerName="extract-utilities" Oct 10 16:36:27 crc kubenswrapper[4799]: I1010 16:36:27.086253 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="0a2e52eb-b82d-4869-ab9b-1c783d1c58fb" containerName="extract-utilities" Oct 10 16:36:27 crc kubenswrapper[4799]: E1010 16:36:27.086265 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="048287bf-12a1-4fef-8fc2-7fa4686d31cc" containerName="extract-content" Oct 10 16:36:27 crc kubenswrapper[4799]: I1010 16:36:27.086272 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="048287bf-12a1-4fef-8fc2-7fa4686d31cc" containerName="extract-content" Oct 10 16:36:27 crc kubenswrapper[4799]: E1010 16:36:27.086283 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e7393869-51bf-4974-97c7-33adfe1b44f5" containerName="marketplace-operator" Oct 10 16:36:27 crc kubenswrapper[4799]: I1010 16:36:27.086290 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="e7393869-51bf-4974-97c7-33adfe1b44f5" containerName="marketplace-operator" Oct 10 16:36:27 crc kubenswrapper[4799]: E1010 16:36:27.086298 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ec4988cf-fb0f-4df8-8f2d-748a5459bbcc" containerName="extract-utilities" Oct 10 16:36:27 crc kubenswrapper[4799]: I1010 16:36:27.086305 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="ec4988cf-fb0f-4df8-8f2d-748a5459bbcc" containerName="extract-utilities" Oct 10 16:36:27 crc kubenswrapper[4799]: E1010 16:36:27.086315 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ec4988cf-fb0f-4df8-8f2d-748a5459bbcc" containerName="registry-server" Oct 10 16:36:27 crc kubenswrapper[4799]: I1010 16:36:27.086322 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="ec4988cf-fb0f-4df8-8f2d-748a5459bbcc" containerName="registry-server" Oct 10 16:36:27 crc kubenswrapper[4799]: I1010 16:36:27.086418 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="ec4988cf-fb0f-4df8-8f2d-748a5459bbcc" containerName="registry-server" Oct 10 16:36:27 crc kubenswrapper[4799]: I1010 16:36:27.086429 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="0a2e52eb-b82d-4869-ab9b-1c783d1c58fb" containerName="registry-server" Oct 10 16:36:27 crc kubenswrapper[4799]: I1010 16:36:27.086444 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="e7393869-51bf-4974-97c7-33adfe1b44f5" containerName="marketplace-operator" Oct 10 16:36:27 crc kubenswrapper[4799]: I1010 16:36:27.086453 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="048287bf-12a1-4fef-8fc2-7fa4686d31cc" containerName="registry-server" Oct 10 16:36:27 crc kubenswrapper[4799]: I1010 16:36:27.086466 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="ce937bef-231c-4353-8af2-f8e4517c68c4" containerName="registry-server" Oct 10 16:36:27 crc kubenswrapper[4799]: I1010 16:36:27.087321 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-s2pcd" Oct 10 16:36:27 crc kubenswrapper[4799]: I1010 16:36:27.089818 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Oct 10 16:36:27 crc kubenswrapper[4799]: I1010 16:36:27.096577 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-s2pcd"] Oct 10 16:36:27 crc kubenswrapper[4799]: I1010 16:36:27.213466 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/96d94a46-8fc7-4315-8353-8f36275a0669-utilities\") pod \"redhat-marketplace-s2pcd\" (UID: \"96d94a46-8fc7-4315-8353-8f36275a0669\") " pod="openshift-marketplace/redhat-marketplace-s2pcd" Oct 10 16:36:27 crc kubenswrapper[4799]: I1010 16:36:27.213563 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gbm5m\" (UniqueName: \"kubernetes.io/projected/96d94a46-8fc7-4315-8353-8f36275a0669-kube-api-access-gbm5m\") pod \"redhat-marketplace-s2pcd\" (UID: \"96d94a46-8fc7-4315-8353-8f36275a0669\") " pod="openshift-marketplace/redhat-marketplace-s2pcd" Oct 10 16:36:27 crc kubenswrapper[4799]: I1010 16:36:27.213822 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/96d94a46-8fc7-4315-8353-8f36275a0669-catalog-content\") pod \"redhat-marketplace-s2pcd\" (UID: \"96d94a46-8fc7-4315-8353-8f36275a0669\") " pod="openshift-marketplace/redhat-marketplace-s2pcd" Oct 10 16:36:27 crc kubenswrapper[4799]: I1010 16:36:27.285162 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-bmgch"] Oct 10 16:36:27 crc kubenswrapper[4799]: I1010 16:36:27.286354 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-bmgch" Oct 10 16:36:27 crc kubenswrapper[4799]: I1010 16:36:27.289779 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Oct 10 16:36:27 crc kubenswrapper[4799]: I1010 16:36:27.295970 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-bmgch"] Oct 10 16:36:27 crc kubenswrapper[4799]: I1010 16:36:27.314743 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/96d94a46-8fc7-4315-8353-8f36275a0669-utilities\") pod \"redhat-marketplace-s2pcd\" (UID: \"96d94a46-8fc7-4315-8353-8f36275a0669\") " pod="openshift-marketplace/redhat-marketplace-s2pcd" Oct 10 16:36:27 crc kubenswrapper[4799]: I1010 16:36:27.314966 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gbm5m\" (UniqueName: \"kubernetes.io/projected/96d94a46-8fc7-4315-8353-8f36275a0669-kube-api-access-gbm5m\") pod \"redhat-marketplace-s2pcd\" (UID: \"96d94a46-8fc7-4315-8353-8f36275a0669\") " pod="openshift-marketplace/redhat-marketplace-s2pcd" Oct 10 16:36:27 crc kubenswrapper[4799]: I1010 16:36:27.315067 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/96d94a46-8fc7-4315-8353-8f36275a0669-catalog-content\") pod \"redhat-marketplace-s2pcd\" (UID: \"96d94a46-8fc7-4315-8353-8f36275a0669\") " pod="openshift-marketplace/redhat-marketplace-s2pcd" Oct 10 16:36:27 crc kubenswrapper[4799]: I1010 16:36:27.315705 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/96d94a46-8fc7-4315-8353-8f36275a0669-utilities\") pod \"redhat-marketplace-s2pcd\" (UID: \"96d94a46-8fc7-4315-8353-8f36275a0669\") " pod="openshift-marketplace/redhat-marketplace-s2pcd" Oct 10 16:36:27 crc kubenswrapper[4799]: I1010 16:36:27.315793 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/96d94a46-8fc7-4315-8353-8f36275a0669-catalog-content\") pod \"redhat-marketplace-s2pcd\" (UID: \"96d94a46-8fc7-4315-8353-8f36275a0669\") " pod="openshift-marketplace/redhat-marketplace-s2pcd" Oct 10 16:36:27 crc kubenswrapper[4799]: I1010 16:36:27.335313 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gbm5m\" (UniqueName: \"kubernetes.io/projected/96d94a46-8fc7-4315-8353-8f36275a0669-kube-api-access-gbm5m\") pod \"redhat-marketplace-s2pcd\" (UID: \"96d94a46-8fc7-4315-8353-8f36275a0669\") " pod="openshift-marketplace/redhat-marketplace-s2pcd" Oct 10 16:36:27 crc kubenswrapper[4799]: I1010 16:36:27.404691 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-s2pcd" Oct 10 16:36:27 crc kubenswrapper[4799]: I1010 16:36:27.411777 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="048287bf-12a1-4fef-8fc2-7fa4686d31cc" path="/var/lib/kubelet/pods/048287bf-12a1-4fef-8fc2-7fa4686d31cc/volumes" Oct 10 16:36:27 crc kubenswrapper[4799]: I1010 16:36:27.413179 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0a2e52eb-b82d-4869-ab9b-1c783d1c58fb" path="/var/lib/kubelet/pods/0a2e52eb-b82d-4869-ab9b-1c783d1c58fb/volumes" Oct 10 16:36:27 crc kubenswrapper[4799]: I1010 16:36:27.414394 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ce937bef-231c-4353-8af2-f8e4517c68c4" path="/var/lib/kubelet/pods/ce937bef-231c-4353-8af2-f8e4517c68c4/volumes" Oct 10 16:36:27 crc kubenswrapper[4799]: I1010 16:36:27.416258 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-26z4f\" (UniqueName: \"kubernetes.io/projected/a313fc1d-b984-4d89-bfa1-2703e0e27a5b-kube-api-access-26z4f\") pod \"redhat-operators-bmgch\" (UID: \"a313fc1d-b984-4d89-bfa1-2703e0e27a5b\") " pod="openshift-marketplace/redhat-operators-bmgch" Oct 10 16:36:27 crc kubenswrapper[4799]: I1010 16:36:27.416304 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a313fc1d-b984-4d89-bfa1-2703e0e27a5b-catalog-content\") pod \"redhat-operators-bmgch\" (UID: \"a313fc1d-b984-4d89-bfa1-2703e0e27a5b\") " pod="openshift-marketplace/redhat-operators-bmgch" Oct 10 16:36:27 crc kubenswrapper[4799]: I1010 16:36:27.416345 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a313fc1d-b984-4d89-bfa1-2703e0e27a5b-utilities\") pod \"redhat-operators-bmgch\" (UID: \"a313fc1d-b984-4d89-bfa1-2703e0e27a5b\") " pod="openshift-marketplace/redhat-operators-bmgch" Oct 10 16:36:27 crc kubenswrapper[4799]: I1010 16:36:27.416653 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e7393869-51bf-4974-97c7-33adfe1b44f5" path="/var/lib/kubelet/pods/e7393869-51bf-4974-97c7-33adfe1b44f5/volumes" Oct 10 16:36:27 crc kubenswrapper[4799]: I1010 16:36:27.417587 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ec4988cf-fb0f-4df8-8f2d-748a5459bbcc" path="/var/lib/kubelet/pods/ec4988cf-fb0f-4df8-8f2d-748a5459bbcc/volumes" Oct 10 16:36:27 crc kubenswrapper[4799]: I1010 16:36:27.517528 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a313fc1d-b984-4d89-bfa1-2703e0e27a5b-utilities\") pod \"redhat-operators-bmgch\" (UID: \"a313fc1d-b984-4d89-bfa1-2703e0e27a5b\") " pod="openshift-marketplace/redhat-operators-bmgch" Oct 10 16:36:27 crc kubenswrapper[4799]: I1010 16:36:27.517604 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-26z4f\" (UniqueName: \"kubernetes.io/projected/a313fc1d-b984-4d89-bfa1-2703e0e27a5b-kube-api-access-26z4f\") pod \"redhat-operators-bmgch\" (UID: \"a313fc1d-b984-4d89-bfa1-2703e0e27a5b\") " pod="openshift-marketplace/redhat-operators-bmgch" Oct 10 16:36:27 crc kubenswrapper[4799]: I1010 16:36:27.517638 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a313fc1d-b984-4d89-bfa1-2703e0e27a5b-catalog-content\") pod \"redhat-operators-bmgch\" (UID: \"a313fc1d-b984-4d89-bfa1-2703e0e27a5b\") " pod="openshift-marketplace/redhat-operators-bmgch" Oct 10 16:36:27 crc kubenswrapper[4799]: I1010 16:36:27.518010 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a313fc1d-b984-4d89-bfa1-2703e0e27a5b-catalog-content\") pod \"redhat-operators-bmgch\" (UID: \"a313fc1d-b984-4d89-bfa1-2703e0e27a5b\") " pod="openshift-marketplace/redhat-operators-bmgch" Oct 10 16:36:27 crc kubenswrapper[4799]: I1010 16:36:27.518978 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a313fc1d-b984-4d89-bfa1-2703e0e27a5b-utilities\") pod \"redhat-operators-bmgch\" (UID: \"a313fc1d-b984-4d89-bfa1-2703e0e27a5b\") " pod="openshift-marketplace/redhat-operators-bmgch" Oct 10 16:36:27 crc kubenswrapper[4799]: I1010 16:36:27.535340 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-26z4f\" (UniqueName: \"kubernetes.io/projected/a313fc1d-b984-4d89-bfa1-2703e0e27a5b-kube-api-access-26z4f\") pod \"redhat-operators-bmgch\" (UID: \"a313fc1d-b984-4d89-bfa1-2703e0e27a5b\") " pod="openshift-marketplace/redhat-operators-bmgch" Oct 10 16:36:27 crc kubenswrapper[4799]: I1010 16:36:27.582122 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-s2pcd"] Oct 10 16:36:27 crc kubenswrapper[4799]: W1010 16:36:27.586598 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod96d94a46_8fc7_4315_8353_8f36275a0669.slice/crio-67c71244d34c87ca56dd6e16a295602ac63c28ec9b611d9fcb51ada69e62308b WatchSource:0}: Error finding container 67c71244d34c87ca56dd6e16a295602ac63c28ec9b611d9fcb51ada69e62308b: Status 404 returned error can't find the container with id 67c71244d34c87ca56dd6e16a295602ac63c28ec9b611d9fcb51ada69e62308b Oct 10 16:36:27 crc kubenswrapper[4799]: I1010 16:36:27.612825 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-bmgch" Oct 10 16:36:27 crc kubenswrapper[4799]: I1010 16:36:27.785514 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-bmgch"] Oct 10 16:36:27 crc kubenswrapper[4799]: I1010 16:36:27.969099 4799 generic.go:334] "Generic (PLEG): container finished" podID="96d94a46-8fc7-4315-8353-8f36275a0669" containerID="d4e83d9146b88d11fc356aabeb3698c6191e8c185036b2ff739a4c947f808161" exitCode=0 Oct 10 16:36:27 crc kubenswrapper[4799]: I1010 16:36:27.969141 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-s2pcd" event={"ID":"96d94a46-8fc7-4315-8353-8f36275a0669","Type":"ContainerDied","Data":"d4e83d9146b88d11fc356aabeb3698c6191e8c185036b2ff739a4c947f808161"} Oct 10 16:36:27 crc kubenswrapper[4799]: I1010 16:36:27.969451 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-s2pcd" event={"ID":"96d94a46-8fc7-4315-8353-8f36275a0669","Type":"ContainerStarted","Data":"67c71244d34c87ca56dd6e16a295602ac63c28ec9b611d9fcb51ada69e62308b"} Oct 10 16:36:27 crc kubenswrapper[4799]: I1010 16:36:27.975022 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-bmgch" event={"ID":"a313fc1d-b984-4d89-bfa1-2703e0e27a5b","Type":"ContainerStarted","Data":"0c6e7668c12d32e3044a2e61e1e053a94f418fb09e490df722677e0b6bf2c921"} Oct 10 16:36:27 crc kubenswrapper[4799]: I1010 16:36:27.975058 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-bmgch" event={"ID":"a313fc1d-b984-4d89-bfa1-2703e0e27a5b","Type":"ContainerStarted","Data":"dda65f1a7d56f4bf1cee2d2e2d115be4af692735acaaab77c2666d2ce6206f5e"} Oct 10 16:36:28 crc kubenswrapper[4799]: I1010 16:36:28.987556 4799 generic.go:334] "Generic (PLEG): container finished" podID="a313fc1d-b984-4d89-bfa1-2703e0e27a5b" containerID="0c6e7668c12d32e3044a2e61e1e053a94f418fb09e490df722677e0b6bf2c921" exitCode=0 Oct 10 16:36:28 crc kubenswrapper[4799]: I1010 16:36:28.987820 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-bmgch" event={"ID":"a313fc1d-b984-4d89-bfa1-2703e0e27a5b","Type":"ContainerDied","Data":"0c6e7668c12d32e3044a2e61e1e053a94f418fb09e490df722677e0b6bf2c921"} Oct 10 16:36:29 crc kubenswrapper[4799]: I1010 16:36:29.492884 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-x5rwq"] Oct 10 16:36:29 crc kubenswrapper[4799]: I1010 16:36:29.493974 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-x5rwq" Oct 10 16:36:29 crc kubenswrapper[4799]: I1010 16:36:29.495721 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Oct 10 16:36:29 crc kubenswrapper[4799]: I1010 16:36:29.503458 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-x5rwq"] Oct 10 16:36:29 crc kubenswrapper[4799]: I1010 16:36:29.644893 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0f9d3340-b2a7-4571-a317-7f535f9b900a-catalog-content\") pod \"certified-operators-x5rwq\" (UID: \"0f9d3340-b2a7-4571-a317-7f535f9b900a\") " pod="openshift-marketplace/certified-operators-x5rwq" Oct 10 16:36:29 crc kubenswrapper[4799]: I1010 16:36:29.645090 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dx2gj\" (UniqueName: \"kubernetes.io/projected/0f9d3340-b2a7-4571-a317-7f535f9b900a-kube-api-access-dx2gj\") pod \"certified-operators-x5rwq\" (UID: \"0f9d3340-b2a7-4571-a317-7f535f9b900a\") " pod="openshift-marketplace/certified-operators-x5rwq" Oct 10 16:36:29 crc kubenswrapper[4799]: I1010 16:36:29.645143 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0f9d3340-b2a7-4571-a317-7f535f9b900a-utilities\") pod \"certified-operators-x5rwq\" (UID: \"0f9d3340-b2a7-4571-a317-7f535f9b900a\") " pod="openshift-marketplace/certified-operators-x5rwq" Oct 10 16:36:29 crc kubenswrapper[4799]: I1010 16:36:29.706188 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-vdv4v"] Oct 10 16:36:29 crc kubenswrapper[4799]: I1010 16:36:29.709549 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-vdv4v" Oct 10 16:36:29 crc kubenswrapper[4799]: I1010 16:36:29.712920 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Oct 10 16:36:29 crc kubenswrapper[4799]: I1010 16:36:29.722277 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-vdv4v"] Oct 10 16:36:29 crc kubenswrapper[4799]: I1010 16:36:29.747847 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0f9d3340-b2a7-4571-a317-7f535f9b900a-catalog-content\") pod \"certified-operators-x5rwq\" (UID: \"0f9d3340-b2a7-4571-a317-7f535f9b900a\") " pod="openshift-marketplace/certified-operators-x5rwq" Oct 10 16:36:29 crc kubenswrapper[4799]: I1010 16:36:29.748301 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0f9d3340-b2a7-4571-a317-7f535f9b900a-catalog-content\") pod \"certified-operators-x5rwq\" (UID: \"0f9d3340-b2a7-4571-a317-7f535f9b900a\") " pod="openshift-marketplace/certified-operators-x5rwq" Oct 10 16:36:29 crc kubenswrapper[4799]: I1010 16:36:29.748574 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dx2gj\" (UniqueName: \"kubernetes.io/projected/0f9d3340-b2a7-4571-a317-7f535f9b900a-kube-api-access-dx2gj\") pod \"certified-operators-x5rwq\" (UID: \"0f9d3340-b2a7-4571-a317-7f535f9b900a\") " pod="openshift-marketplace/certified-operators-x5rwq" Oct 10 16:36:29 crc kubenswrapper[4799]: I1010 16:36:29.750231 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0f9d3340-b2a7-4571-a317-7f535f9b900a-utilities\") pod \"certified-operators-x5rwq\" (UID: \"0f9d3340-b2a7-4571-a317-7f535f9b900a\") " pod="openshift-marketplace/certified-operators-x5rwq" Oct 10 16:36:29 crc kubenswrapper[4799]: I1010 16:36:29.750959 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0f9d3340-b2a7-4571-a317-7f535f9b900a-utilities\") pod \"certified-operators-x5rwq\" (UID: \"0f9d3340-b2a7-4571-a317-7f535f9b900a\") " pod="openshift-marketplace/certified-operators-x5rwq" Oct 10 16:36:29 crc kubenswrapper[4799]: I1010 16:36:29.786496 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dx2gj\" (UniqueName: \"kubernetes.io/projected/0f9d3340-b2a7-4571-a317-7f535f9b900a-kube-api-access-dx2gj\") pod \"certified-operators-x5rwq\" (UID: \"0f9d3340-b2a7-4571-a317-7f535f9b900a\") " pod="openshift-marketplace/certified-operators-x5rwq" Oct 10 16:36:29 crc kubenswrapper[4799]: I1010 16:36:29.851810 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/49348cb6-a0ca-44e9-81c9-370942d4e5b7-catalog-content\") pod \"community-operators-vdv4v\" (UID: \"49348cb6-a0ca-44e9-81c9-370942d4e5b7\") " pod="openshift-marketplace/community-operators-vdv4v" Oct 10 16:36:29 crc kubenswrapper[4799]: I1010 16:36:29.851860 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mrbrx\" (UniqueName: \"kubernetes.io/projected/49348cb6-a0ca-44e9-81c9-370942d4e5b7-kube-api-access-mrbrx\") pod \"community-operators-vdv4v\" (UID: \"49348cb6-a0ca-44e9-81c9-370942d4e5b7\") " pod="openshift-marketplace/community-operators-vdv4v" Oct 10 16:36:29 crc kubenswrapper[4799]: I1010 16:36:29.851900 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/49348cb6-a0ca-44e9-81c9-370942d4e5b7-utilities\") pod \"community-operators-vdv4v\" (UID: \"49348cb6-a0ca-44e9-81c9-370942d4e5b7\") " pod="openshift-marketplace/community-operators-vdv4v" Oct 10 16:36:29 crc kubenswrapper[4799]: I1010 16:36:29.854321 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-x5rwq" Oct 10 16:36:29 crc kubenswrapper[4799]: I1010 16:36:29.953666 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/49348cb6-a0ca-44e9-81c9-370942d4e5b7-catalog-content\") pod \"community-operators-vdv4v\" (UID: \"49348cb6-a0ca-44e9-81c9-370942d4e5b7\") " pod="openshift-marketplace/community-operators-vdv4v" Oct 10 16:36:29 crc kubenswrapper[4799]: I1010 16:36:29.953851 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mrbrx\" (UniqueName: \"kubernetes.io/projected/49348cb6-a0ca-44e9-81c9-370942d4e5b7-kube-api-access-mrbrx\") pod \"community-operators-vdv4v\" (UID: \"49348cb6-a0ca-44e9-81c9-370942d4e5b7\") " pod="openshift-marketplace/community-operators-vdv4v" Oct 10 16:36:29 crc kubenswrapper[4799]: I1010 16:36:29.953899 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/49348cb6-a0ca-44e9-81c9-370942d4e5b7-utilities\") pod \"community-operators-vdv4v\" (UID: \"49348cb6-a0ca-44e9-81c9-370942d4e5b7\") " pod="openshift-marketplace/community-operators-vdv4v" Oct 10 16:36:29 crc kubenswrapper[4799]: I1010 16:36:29.954261 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/49348cb6-a0ca-44e9-81c9-370942d4e5b7-utilities\") pod \"community-operators-vdv4v\" (UID: \"49348cb6-a0ca-44e9-81c9-370942d4e5b7\") " pod="openshift-marketplace/community-operators-vdv4v" Oct 10 16:36:29 crc kubenswrapper[4799]: I1010 16:36:29.954431 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/49348cb6-a0ca-44e9-81c9-370942d4e5b7-catalog-content\") pod \"community-operators-vdv4v\" (UID: \"49348cb6-a0ca-44e9-81c9-370942d4e5b7\") " pod="openshift-marketplace/community-operators-vdv4v" Oct 10 16:36:29 crc kubenswrapper[4799]: I1010 16:36:29.972938 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mrbrx\" (UniqueName: \"kubernetes.io/projected/49348cb6-a0ca-44e9-81c9-370942d4e5b7-kube-api-access-mrbrx\") pod \"community-operators-vdv4v\" (UID: \"49348cb6-a0ca-44e9-81c9-370942d4e5b7\") " pod="openshift-marketplace/community-operators-vdv4v" Oct 10 16:36:30 crc kubenswrapper[4799]: I1010 16:36:30.004812 4799 generic.go:334] "Generic (PLEG): container finished" podID="96d94a46-8fc7-4315-8353-8f36275a0669" containerID="54a9349667a9a7a5f5acca54d89e08e55e09dfd65735e9cb4fcd1205a111ede7" exitCode=0 Oct 10 16:36:30 crc kubenswrapper[4799]: I1010 16:36:30.004951 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-s2pcd" event={"ID":"96d94a46-8fc7-4315-8353-8f36275a0669","Type":"ContainerDied","Data":"54a9349667a9a7a5f5acca54d89e08e55e09dfd65735e9cb4fcd1205a111ede7"} Oct 10 16:36:30 crc kubenswrapper[4799]: I1010 16:36:30.013189 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-bmgch" event={"ID":"a313fc1d-b984-4d89-bfa1-2703e0e27a5b","Type":"ContainerStarted","Data":"d8288b01000b10c375aa55a727ab7779ae52130e67b2d75ca59ce62331643946"} Oct 10 16:36:30 crc kubenswrapper[4799]: I1010 16:36:30.057111 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-vdv4v" Oct 10 16:36:30 crc kubenswrapper[4799]: I1010 16:36:30.057515 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-x5rwq"] Oct 10 16:36:30 crc kubenswrapper[4799]: W1010 16:36:30.069731 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod0f9d3340_b2a7_4571_a317_7f535f9b900a.slice/crio-e5572e5f19a7967308d170313402efd27e647d650997a51f2006fd8abc9be112 WatchSource:0}: Error finding container e5572e5f19a7967308d170313402efd27e647d650997a51f2006fd8abc9be112: Status 404 returned error can't find the container with id e5572e5f19a7967308d170313402efd27e647d650997a51f2006fd8abc9be112 Oct 10 16:36:30 crc kubenswrapper[4799]: I1010 16:36:30.222442 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-vdv4v"] Oct 10 16:36:30 crc kubenswrapper[4799]: W1010 16:36:30.349177 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod49348cb6_a0ca_44e9_81c9_370942d4e5b7.slice/crio-7eb9061e5ad9f3a61be157ff6dd1e8c4421127e0f3c104dbfbea0dd37aa5919a WatchSource:0}: Error finding container 7eb9061e5ad9f3a61be157ff6dd1e8c4421127e0f3c104dbfbea0dd37aa5919a: Status 404 returned error can't find the container with id 7eb9061e5ad9f3a61be157ff6dd1e8c4421127e0f3c104dbfbea0dd37aa5919a Oct 10 16:36:31 crc kubenswrapper[4799]: I1010 16:36:31.019281 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-s2pcd" event={"ID":"96d94a46-8fc7-4315-8353-8f36275a0669","Type":"ContainerStarted","Data":"1f18b5fa39468fe68cad36d76c18577af16e1753a86a1d602232afc3b09d1873"} Oct 10 16:36:31 crc kubenswrapper[4799]: I1010 16:36:31.021022 4799 generic.go:334] "Generic (PLEG): container finished" podID="a313fc1d-b984-4d89-bfa1-2703e0e27a5b" containerID="d8288b01000b10c375aa55a727ab7779ae52130e67b2d75ca59ce62331643946" exitCode=0 Oct 10 16:36:31 crc kubenswrapper[4799]: I1010 16:36:31.021090 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-bmgch" event={"ID":"a313fc1d-b984-4d89-bfa1-2703e0e27a5b","Type":"ContainerDied","Data":"d8288b01000b10c375aa55a727ab7779ae52130e67b2d75ca59ce62331643946"} Oct 10 16:36:31 crc kubenswrapper[4799]: I1010 16:36:31.022425 4799 generic.go:334] "Generic (PLEG): container finished" podID="0f9d3340-b2a7-4571-a317-7f535f9b900a" containerID="e83d38507cc835e2393d5f5f33f746924ebfa4676172630efac2079f5b8674eb" exitCode=0 Oct 10 16:36:31 crc kubenswrapper[4799]: I1010 16:36:31.022475 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-x5rwq" event={"ID":"0f9d3340-b2a7-4571-a317-7f535f9b900a","Type":"ContainerDied","Data":"e83d38507cc835e2393d5f5f33f746924ebfa4676172630efac2079f5b8674eb"} Oct 10 16:36:31 crc kubenswrapper[4799]: I1010 16:36:31.022492 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-x5rwq" event={"ID":"0f9d3340-b2a7-4571-a317-7f535f9b900a","Type":"ContainerStarted","Data":"e5572e5f19a7967308d170313402efd27e647d650997a51f2006fd8abc9be112"} Oct 10 16:36:31 crc kubenswrapper[4799]: I1010 16:36:31.023716 4799 generic.go:334] "Generic (PLEG): container finished" podID="49348cb6-a0ca-44e9-81c9-370942d4e5b7" containerID="8c72cabef0bccd663e62c36c128830868838eadcc39c7f851b73f6d0e1f637ad" exitCode=0 Oct 10 16:36:31 crc kubenswrapper[4799]: I1010 16:36:31.023746 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-vdv4v" event={"ID":"49348cb6-a0ca-44e9-81c9-370942d4e5b7","Type":"ContainerDied","Data":"8c72cabef0bccd663e62c36c128830868838eadcc39c7f851b73f6d0e1f637ad"} Oct 10 16:36:31 crc kubenswrapper[4799]: I1010 16:36:31.023782 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-vdv4v" event={"ID":"49348cb6-a0ca-44e9-81c9-370942d4e5b7","Type":"ContainerStarted","Data":"7eb9061e5ad9f3a61be157ff6dd1e8c4421127e0f3c104dbfbea0dd37aa5919a"} Oct 10 16:36:31 crc kubenswrapper[4799]: I1010 16:36:31.036472 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-s2pcd" podStartSLOduration=1.611696061 podStartE2EDuration="4.036455339s" podCreationTimestamp="2025-10-10 16:36:27 +0000 UTC" firstStartedPulling="2025-10-10 16:36:27.970532695 +0000 UTC m=+281.478856820" lastFinishedPulling="2025-10-10 16:36:30.395291983 +0000 UTC m=+283.903616098" observedRunningTime="2025-10-10 16:36:31.034748166 +0000 UTC m=+284.543072281" watchObservedRunningTime="2025-10-10 16:36:31.036455339 +0000 UTC m=+284.544779454" Oct 10 16:36:32 crc kubenswrapper[4799]: I1010 16:36:32.042620 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-bmgch" event={"ID":"a313fc1d-b984-4d89-bfa1-2703e0e27a5b","Type":"ContainerStarted","Data":"88e2a8ccecc02d7e2b340ceace47eaec3c7c8f5086cf5552482debeca587f98a"} Oct 10 16:36:32 crc kubenswrapper[4799]: I1010 16:36:32.071051 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-bmgch" podStartSLOduration=2.504374275 podStartE2EDuration="5.071037011s" podCreationTimestamp="2025-10-10 16:36:27 +0000 UTC" firstStartedPulling="2025-10-10 16:36:28.991869914 +0000 UTC m=+282.500194039" lastFinishedPulling="2025-10-10 16:36:31.55853266 +0000 UTC m=+285.066856775" observedRunningTime="2025-10-10 16:36:32.069523573 +0000 UTC m=+285.577847688" watchObservedRunningTime="2025-10-10 16:36:32.071037011 +0000 UTC m=+285.579361126" Oct 10 16:36:33 crc kubenswrapper[4799]: I1010 16:36:33.048394 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-x5rwq" event={"ID":"0f9d3340-b2a7-4571-a317-7f535f9b900a","Type":"ContainerStarted","Data":"c7599865add37e644f9b406cb6c3bd9ade63a0afa8d6ae73166eacd4a887d4c3"} Oct 10 16:36:33 crc kubenswrapper[4799]: I1010 16:36:33.051198 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-vdv4v" event={"ID":"49348cb6-a0ca-44e9-81c9-370942d4e5b7","Type":"ContainerStarted","Data":"2ac0dbd976177d28e98d2b18e48eb6c3dd52ad9146d394f017ada5f690bf7d97"} Oct 10 16:36:34 crc kubenswrapper[4799]: I1010 16:36:34.058239 4799 generic.go:334] "Generic (PLEG): container finished" podID="49348cb6-a0ca-44e9-81c9-370942d4e5b7" containerID="2ac0dbd976177d28e98d2b18e48eb6c3dd52ad9146d394f017ada5f690bf7d97" exitCode=0 Oct 10 16:36:34 crc kubenswrapper[4799]: I1010 16:36:34.058342 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-vdv4v" event={"ID":"49348cb6-a0ca-44e9-81c9-370942d4e5b7","Type":"ContainerDied","Data":"2ac0dbd976177d28e98d2b18e48eb6c3dd52ad9146d394f017ada5f690bf7d97"} Oct 10 16:36:34 crc kubenswrapper[4799]: I1010 16:36:34.062318 4799 generic.go:334] "Generic (PLEG): container finished" podID="0f9d3340-b2a7-4571-a317-7f535f9b900a" containerID="c7599865add37e644f9b406cb6c3bd9ade63a0afa8d6ae73166eacd4a887d4c3" exitCode=0 Oct 10 16:36:34 crc kubenswrapper[4799]: I1010 16:36:34.062358 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-x5rwq" event={"ID":"0f9d3340-b2a7-4571-a317-7f535f9b900a","Type":"ContainerDied","Data":"c7599865add37e644f9b406cb6c3bd9ade63a0afa8d6ae73166eacd4a887d4c3"} Oct 10 16:36:35 crc kubenswrapper[4799]: I1010 16:36:35.070096 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-x5rwq" event={"ID":"0f9d3340-b2a7-4571-a317-7f535f9b900a","Type":"ContainerStarted","Data":"1c42073bc770f58726975e9504d336e63c172d733341a53aee75558acd928d41"} Oct 10 16:36:35 crc kubenswrapper[4799]: I1010 16:36:35.071873 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-vdv4v" event={"ID":"49348cb6-a0ca-44e9-81c9-370942d4e5b7","Type":"ContainerStarted","Data":"017307e03e9b84aa53e1d09494898a5393495181f93263582950c5638c4e01b1"} Oct 10 16:36:35 crc kubenswrapper[4799]: I1010 16:36:35.086580 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-x5rwq" podStartSLOduration=2.656052102 podStartE2EDuration="6.086567057s" podCreationTimestamp="2025-10-10 16:36:29 +0000 UTC" firstStartedPulling="2025-10-10 16:36:31.02380635 +0000 UTC m=+284.532130485" lastFinishedPulling="2025-10-10 16:36:34.454321325 +0000 UTC m=+287.962645440" observedRunningTime="2025-10-10 16:36:35.085844549 +0000 UTC m=+288.594168704" watchObservedRunningTime="2025-10-10 16:36:35.086567057 +0000 UTC m=+288.594891162" Oct 10 16:36:37 crc kubenswrapper[4799]: I1010 16:36:37.410705 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-s2pcd" Oct 10 16:36:37 crc kubenswrapper[4799]: I1010 16:36:37.410912 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-s2pcd" Oct 10 16:36:37 crc kubenswrapper[4799]: I1010 16:36:37.470499 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-s2pcd" Oct 10 16:36:37 crc kubenswrapper[4799]: I1010 16:36:37.485049 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-vdv4v" podStartSLOduration=5.052820977 podStartE2EDuration="8.485029254s" podCreationTimestamp="2025-10-10 16:36:29 +0000 UTC" firstStartedPulling="2025-10-10 16:36:31.026463437 +0000 UTC m=+284.534787552" lastFinishedPulling="2025-10-10 16:36:34.458671714 +0000 UTC m=+287.966995829" observedRunningTime="2025-10-10 16:36:35.101447812 +0000 UTC m=+288.609771957" watchObservedRunningTime="2025-10-10 16:36:37.485029254 +0000 UTC m=+290.993353379" Oct 10 16:36:37 crc kubenswrapper[4799]: I1010 16:36:37.612987 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-bmgch" Oct 10 16:36:37 crc kubenswrapper[4799]: I1010 16:36:37.613033 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-bmgch" Oct 10 16:36:37 crc kubenswrapper[4799]: I1010 16:36:37.668293 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-bmgch" Oct 10 16:36:38 crc kubenswrapper[4799]: I1010 16:36:38.134974 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-s2pcd" Oct 10 16:36:38 crc kubenswrapper[4799]: I1010 16:36:38.150393 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-bmgch" Oct 10 16:36:39 crc kubenswrapper[4799]: I1010 16:36:39.855262 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-x5rwq" Oct 10 16:36:39 crc kubenswrapper[4799]: I1010 16:36:39.855334 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-x5rwq" Oct 10 16:36:39 crc kubenswrapper[4799]: I1010 16:36:39.897310 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-x5rwq" Oct 10 16:36:40 crc kubenswrapper[4799]: I1010 16:36:40.058845 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-vdv4v" Oct 10 16:36:40 crc kubenswrapper[4799]: I1010 16:36:40.058901 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-vdv4v" Oct 10 16:36:40 crc kubenswrapper[4799]: I1010 16:36:40.103979 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-vdv4v" Oct 10 16:36:40 crc kubenswrapper[4799]: I1010 16:36:40.141334 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-x5rwq" Oct 10 16:36:40 crc kubenswrapper[4799]: I1010 16:36:40.160044 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-vdv4v" Oct 10 16:37:45 crc kubenswrapper[4799]: I1010 16:37:45.248467 4799 patch_prober.go:28] interesting pod/machine-config-daemon-rh8zc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 10 16:37:45 crc kubenswrapper[4799]: I1010 16:37:45.249011 4799 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 10 16:38:15 crc kubenswrapper[4799]: I1010 16:38:15.249675 4799 patch_prober.go:28] interesting pod/machine-config-daemon-rh8zc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 10 16:38:15 crc kubenswrapper[4799]: I1010 16:38:15.250491 4799 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 10 16:38:45 crc kubenswrapper[4799]: I1010 16:38:45.248356 4799 patch_prober.go:28] interesting pod/machine-config-daemon-rh8zc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 10 16:38:45 crc kubenswrapper[4799]: I1010 16:38:45.249082 4799 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 10 16:38:45 crc kubenswrapper[4799]: I1010 16:38:45.249161 4799 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" Oct 10 16:38:45 crc kubenswrapper[4799]: I1010 16:38:45.250198 4799 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"a685c745539e6b5d6ae1c99d911448ed7e9748dc5640aa9ad19005d1a2df7456"} pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 10 16:38:45 crc kubenswrapper[4799]: I1010 16:38:45.250319 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" containerName="machine-config-daemon" containerID="cri-o://a685c745539e6b5d6ae1c99d911448ed7e9748dc5640aa9ad19005d1a2df7456" gracePeriod=600 Oct 10 16:38:45 crc kubenswrapper[4799]: I1010 16:38:45.961849 4799 generic.go:334] "Generic (PLEG): container finished" podID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" containerID="a685c745539e6b5d6ae1c99d911448ed7e9748dc5640aa9ad19005d1a2df7456" exitCode=0 Oct 10 16:38:45 crc kubenswrapper[4799]: I1010 16:38:45.961939 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" event={"ID":"6cebefda-e31d-4be2-9bf4-8e1f8ec002cb","Type":"ContainerDied","Data":"a685c745539e6b5d6ae1c99d911448ed7e9748dc5640aa9ad19005d1a2df7456"} Oct 10 16:38:45 crc kubenswrapper[4799]: I1010 16:38:45.962244 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" event={"ID":"6cebefda-e31d-4be2-9bf4-8e1f8ec002cb","Type":"ContainerStarted","Data":"6060cbfbf40b005d1ca61153e05b93d95432b8f5bf820a7b753f840c4cc943ae"} Oct 10 16:38:45 crc kubenswrapper[4799]: I1010 16:38:45.962278 4799 scope.go:117] "RemoveContainer" containerID="0ad00545d7a2fff370e19a55a89365b8c9914cb6286dbf1892d7ad0f399288a5" Oct 10 16:40:08 crc kubenswrapper[4799]: I1010 16:40:08.703292 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-cqvpm"] Oct 10 16:40:08 crc kubenswrapper[4799]: I1010 16:40:08.704683 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-cqvpm" Oct 10 16:40:08 crc kubenswrapper[4799]: I1010 16:40:08.718630 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-cqvpm"] Oct 10 16:40:08 crc kubenswrapper[4799]: I1010 16:40:08.860330 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/ab397657-b0ad-4620-80eb-2eb1d7b77735-registry-tls\") pod \"image-registry-66df7c8f76-cqvpm\" (UID: \"ab397657-b0ad-4620-80eb-2eb1d7b77735\") " pod="openshift-image-registry/image-registry-66df7c8f76-cqvpm" Oct 10 16:40:08 crc kubenswrapper[4799]: I1010 16:40:08.860409 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/ab397657-b0ad-4620-80eb-2eb1d7b77735-bound-sa-token\") pod \"image-registry-66df7c8f76-cqvpm\" (UID: \"ab397657-b0ad-4620-80eb-2eb1d7b77735\") " pod="openshift-image-registry/image-registry-66df7c8f76-cqvpm" Oct 10 16:40:08 crc kubenswrapper[4799]: I1010 16:40:08.860498 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/ab397657-b0ad-4620-80eb-2eb1d7b77735-ca-trust-extracted\") pod \"image-registry-66df7c8f76-cqvpm\" (UID: \"ab397657-b0ad-4620-80eb-2eb1d7b77735\") " pod="openshift-image-registry/image-registry-66df7c8f76-cqvpm" Oct 10 16:40:08 crc kubenswrapper[4799]: I1010 16:40:08.860525 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-cqvpm\" (UID: \"ab397657-b0ad-4620-80eb-2eb1d7b77735\") " pod="openshift-image-registry/image-registry-66df7c8f76-cqvpm" Oct 10 16:40:08 crc kubenswrapper[4799]: I1010 16:40:08.860573 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/ab397657-b0ad-4620-80eb-2eb1d7b77735-registry-certificates\") pod \"image-registry-66df7c8f76-cqvpm\" (UID: \"ab397657-b0ad-4620-80eb-2eb1d7b77735\") " pod="openshift-image-registry/image-registry-66df7c8f76-cqvpm" Oct 10 16:40:08 crc kubenswrapper[4799]: I1010 16:40:08.860597 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/ab397657-b0ad-4620-80eb-2eb1d7b77735-installation-pull-secrets\") pod \"image-registry-66df7c8f76-cqvpm\" (UID: \"ab397657-b0ad-4620-80eb-2eb1d7b77735\") " pod="openshift-image-registry/image-registry-66df7c8f76-cqvpm" Oct 10 16:40:08 crc kubenswrapper[4799]: I1010 16:40:08.860652 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/ab397657-b0ad-4620-80eb-2eb1d7b77735-trusted-ca\") pod \"image-registry-66df7c8f76-cqvpm\" (UID: \"ab397657-b0ad-4620-80eb-2eb1d7b77735\") " pod="openshift-image-registry/image-registry-66df7c8f76-cqvpm" Oct 10 16:40:08 crc kubenswrapper[4799]: I1010 16:40:08.860710 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k8hqd\" (UniqueName: \"kubernetes.io/projected/ab397657-b0ad-4620-80eb-2eb1d7b77735-kube-api-access-k8hqd\") pod \"image-registry-66df7c8f76-cqvpm\" (UID: \"ab397657-b0ad-4620-80eb-2eb1d7b77735\") " pod="openshift-image-registry/image-registry-66df7c8f76-cqvpm" Oct 10 16:40:08 crc kubenswrapper[4799]: I1010 16:40:08.881822 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-cqvpm\" (UID: \"ab397657-b0ad-4620-80eb-2eb1d7b77735\") " pod="openshift-image-registry/image-registry-66df7c8f76-cqvpm" Oct 10 16:40:08 crc kubenswrapper[4799]: I1010 16:40:08.961514 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/ab397657-b0ad-4620-80eb-2eb1d7b77735-ca-trust-extracted\") pod \"image-registry-66df7c8f76-cqvpm\" (UID: \"ab397657-b0ad-4620-80eb-2eb1d7b77735\") " pod="openshift-image-registry/image-registry-66df7c8f76-cqvpm" Oct 10 16:40:08 crc kubenswrapper[4799]: I1010 16:40:08.961594 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/ab397657-b0ad-4620-80eb-2eb1d7b77735-registry-certificates\") pod \"image-registry-66df7c8f76-cqvpm\" (UID: \"ab397657-b0ad-4620-80eb-2eb1d7b77735\") " pod="openshift-image-registry/image-registry-66df7c8f76-cqvpm" Oct 10 16:40:08 crc kubenswrapper[4799]: I1010 16:40:08.961644 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/ab397657-b0ad-4620-80eb-2eb1d7b77735-installation-pull-secrets\") pod \"image-registry-66df7c8f76-cqvpm\" (UID: \"ab397657-b0ad-4620-80eb-2eb1d7b77735\") " pod="openshift-image-registry/image-registry-66df7c8f76-cqvpm" Oct 10 16:40:08 crc kubenswrapper[4799]: I1010 16:40:08.961690 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/ab397657-b0ad-4620-80eb-2eb1d7b77735-trusted-ca\") pod \"image-registry-66df7c8f76-cqvpm\" (UID: \"ab397657-b0ad-4620-80eb-2eb1d7b77735\") " pod="openshift-image-registry/image-registry-66df7c8f76-cqvpm" Oct 10 16:40:08 crc kubenswrapper[4799]: I1010 16:40:08.961794 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k8hqd\" (UniqueName: \"kubernetes.io/projected/ab397657-b0ad-4620-80eb-2eb1d7b77735-kube-api-access-k8hqd\") pod \"image-registry-66df7c8f76-cqvpm\" (UID: \"ab397657-b0ad-4620-80eb-2eb1d7b77735\") " pod="openshift-image-registry/image-registry-66df7c8f76-cqvpm" Oct 10 16:40:08 crc kubenswrapper[4799]: I1010 16:40:08.961846 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/ab397657-b0ad-4620-80eb-2eb1d7b77735-registry-tls\") pod \"image-registry-66df7c8f76-cqvpm\" (UID: \"ab397657-b0ad-4620-80eb-2eb1d7b77735\") " pod="openshift-image-registry/image-registry-66df7c8f76-cqvpm" Oct 10 16:40:08 crc kubenswrapper[4799]: I1010 16:40:08.961879 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/ab397657-b0ad-4620-80eb-2eb1d7b77735-bound-sa-token\") pod \"image-registry-66df7c8f76-cqvpm\" (UID: \"ab397657-b0ad-4620-80eb-2eb1d7b77735\") " pod="openshift-image-registry/image-registry-66df7c8f76-cqvpm" Oct 10 16:40:08 crc kubenswrapper[4799]: I1010 16:40:08.962433 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/ab397657-b0ad-4620-80eb-2eb1d7b77735-ca-trust-extracted\") pod \"image-registry-66df7c8f76-cqvpm\" (UID: \"ab397657-b0ad-4620-80eb-2eb1d7b77735\") " pod="openshift-image-registry/image-registry-66df7c8f76-cqvpm" Oct 10 16:40:08 crc kubenswrapper[4799]: I1010 16:40:08.963464 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/ab397657-b0ad-4620-80eb-2eb1d7b77735-registry-certificates\") pod \"image-registry-66df7c8f76-cqvpm\" (UID: \"ab397657-b0ad-4620-80eb-2eb1d7b77735\") " pod="openshift-image-registry/image-registry-66df7c8f76-cqvpm" Oct 10 16:40:08 crc kubenswrapper[4799]: I1010 16:40:08.965264 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/ab397657-b0ad-4620-80eb-2eb1d7b77735-trusted-ca\") pod \"image-registry-66df7c8f76-cqvpm\" (UID: \"ab397657-b0ad-4620-80eb-2eb1d7b77735\") " pod="openshift-image-registry/image-registry-66df7c8f76-cqvpm" Oct 10 16:40:08 crc kubenswrapper[4799]: I1010 16:40:08.972599 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/ab397657-b0ad-4620-80eb-2eb1d7b77735-registry-tls\") pod \"image-registry-66df7c8f76-cqvpm\" (UID: \"ab397657-b0ad-4620-80eb-2eb1d7b77735\") " pod="openshift-image-registry/image-registry-66df7c8f76-cqvpm" Oct 10 16:40:08 crc kubenswrapper[4799]: I1010 16:40:08.974516 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/ab397657-b0ad-4620-80eb-2eb1d7b77735-installation-pull-secrets\") pod \"image-registry-66df7c8f76-cqvpm\" (UID: \"ab397657-b0ad-4620-80eb-2eb1d7b77735\") " pod="openshift-image-registry/image-registry-66df7c8f76-cqvpm" Oct 10 16:40:08 crc kubenswrapper[4799]: I1010 16:40:08.991856 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/ab397657-b0ad-4620-80eb-2eb1d7b77735-bound-sa-token\") pod \"image-registry-66df7c8f76-cqvpm\" (UID: \"ab397657-b0ad-4620-80eb-2eb1d7b77735\") " pod="openshift-image-registry/image-registry-66df7c8f76-cqvpm" Oct 10 16:40:08 crc kubenswrapper[4799]: I1010 16:40:08.994207 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k8hqd\" (UniqueName: \"kubernetes.io/projected/ab397657-b0ad-4620-80eb-2eb1d7b77735-kube-api-access-k8hqd\") pod \"image-registry-66df7c8f76-cqvpm\" (UID: \"ab397657-b0ad-4620-80eb-2eb1d7b77735\") " pod="openshift-image-registry/image-registry-66df7c8f76-cqvpm" Oct 10 16:40:09 crc kubenswrapper[4799]: I1010 16:40:09.023465 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-cqvpm" Oct 10 16:40:09 crc kubenswrapper[4799]: I1010 16:40:09.261646 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-cqvpm"] Oct 10 16:40:09 crc kubenswrapper[4799]: I1010 16:40:09.604710 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-cqvpm" event={"ID":"ab397657-b0ad-4620-80eb-2eb1d7b77735","Type":"ContainerStarted","Data":"eb3ecfd0fef478c2ba584caddae625e10a6ca9192dd3b0262e7794e326d4555d"} Oct 10 16:40:09 crc kubenswrapper[4799]: I1010 16:40:09.605076 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-cqvpm" event={"ID":"ab397657-b0ad-4620-80eb-2eb1d7b77735","Type":"ContainerStarted","Data":"4dc9c81e4994df8abebde4ee2bbcd3de5f8b47bcf715cc892606e89906cc12ca"} Oct 10 16:40:09 crc kubenswrapper[4799]: I1010 16:40:09.605142 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-66df7c8f76-cqvpm" Oct 10 16:40:09 crc kubenswrapper[4799]: I1010 16:40:09.631509 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-66df7c8f76-cqvpm" podStartSLOduration=1.63148712 podStartE2EDuration="1.63148712s" podCreationTimestamp="2025-10-10 16:40:08 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-10 16:40:09.626702264 +0000 UTC m=+503.135026469" watchObservedRunningTime="2025-10-10 16:40:09.63148712 +0000 UTC m=+503.139811245" Oct 10 16:40:29 crc kubenswrapper[4799]: I1010 16:40:29.029208 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-66df7c8f76-cqvpm" Oct 10 16:40:29 crc kubenswrapper[4799]: I1010 16:40:29.110166 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-89gcz"] Oct 10 16:40:45 crc kubenswrapper[4799]: I1010 16:40:45.248222 4799 patch_prober.go:28] interesting pod/machine-config-daemon-rh8zc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 10 16:40:45 crc kubenswrapper[4799]: I1010 16:40:45.248800 4799 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 10 16:40:54 crc kubenswrapper[4799]: I1010 16:40:54.169396 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-image-registry/image-registry-697d97f7c8-89gcz" podUID="60ab14da-0f2e-48cc-873a-44eaf0662ed8" containerName="registry" containerID="cri-o://18d970602ee3f1139c249a21549627b3ee4aef7552b5efcc210e434fbb5e61e7" gracePeriod=30 Oct 10 16:40:54 crc kubenswrapper[4799]: E1010 16:40:54.262411 4799 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod60ab14da_0f2e_48cc_873a_44eaf0662ed8.slice/crio-18d970602ee3f1139c249a21549627b3ee4aef7552b5efcc210e434fbb5e61e7.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod60ab14da_0f2e_48cc_873a_44eaf0662ed8.slice/crio-conmon-18d970602ee3f1139c249a21549627b3ee4aef7552b5efcc210e434fbb5e61e7.scope\": RecentStats: unable to find data in memory cache]" Oct 10 16:40:54 crc kubenswrapper[4799]: I1010 16:40:54.572098 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-89gcz" Oct 10 16:40:54 crc kubenswrapper[4799]: I1010 16:40:54.709642 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/60ab14da-0f2e-48cc-873a-44eaf0662ed8-registry-certificates\") pod \"60ab14da-0f2e-48cc-873a-44eaf0662ed8\" (UID: \"60ab14da-0f2e-48cc-873a-44eaf0662ed8\") " Oct 10 16:40:54 crc kubenswrapper[4799]: I1010 16:40:54.709716 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7bfnt\" (UniqueName: \"kubernetes.io/projected/60ab14da-0f2e-48cc-873a-44eaf0662ed8-kube-api-access-7bfnt\") pod \"60ab14da-0f2e-48cc-873a-44eaf0662ed8\" (UID: \"60ab14da-0f2e-48cc-873a-44eaf0662ed8\") " Oct 10 16:40:54 crc kubenswrapper[4799]: I1010 16:40:54.709824 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/60ab14da-0f2e-48cc-873a-44eaf0662ed8-registry-tls\") pod \"60ab14da-0f2e-48cc-873a-44eaf0662ed8\" (UID: \"60ab14da-0f2e-48cc-873a-44eaf0662ed8\") " Oct 10 16:40:54 crc kubenswrapper[4799]: I1010 16:40:54.709941 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/60ab14da-0f2e-48cc-873a-44eaf0662ed8-installation-pull-secrets\") pod \"60ab14da-0f2e-48cc-873a-44eaf0662ed8\" (UID: \"60ab14da-0f2e-48cc-873a-44eaf0662ed8\") " Oct 10 16:40:54 crc kubenswrapper[4799]: I1010 16:40:54.709989 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/60ab14da-0f2e-48cc-873a-44eaf0662ed8-trusted-ca\") pod \"60ab14da-0f2e-48cc-873a-44eaf0662ed8\" (UID: \"60ab14da-0f2e-48cc-873a-44eaf0662ed8\") " Oct 10 16:40:54 crc kubenswrapper[4799]: I1010 16:40:54.710231 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-storage\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"60ab14da-0f2e-48cc-873a-44eaf0662ed8\" (UID: \"60ab14da-0f2e-48cc-873a-44eaf0662ed8\") " Oct 10 16:40:54 crc kubenswrapper[4799]: I1010 16:40:54.710299 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/60ab14da-0f2e-48cc-873a-44eaf0662ed8-ca-trust-extracted\") pod \"60ab14da-0f2e-48cc-873a-44eaf0662ed8\" (UID: \"60ab14da-0f2e-48cc-873a-44eaf0662ed8\") " Oct 10 16:40:54 crc kubenswrapper[4799]: I1010 16:40:54.710353 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/60ab14da-0f2e-48cc-873a-44eaf0662ed8-bound-sa-token\") pod \"60ab14da-0f2e-48cc-873a-44eaf0662ed8\" (UID: \"60ab14da-0f2e-48cc-873a-44eaf0662ed8\") " Oct 10 16:40:54 crc kubenswrapper[4799]: I1010 16:40:54.711437 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/60ab14da-0f2e-48cc-873a-44eaf0662ed8-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "60ab14da-0f2e-48cc-873a-44eaf0662ed8" (UID: "60ab14da-0f2e-48cc-873a-44eaf0662ed8"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 16:40:54 crc kubenswrapper[4799]: I1010 16:40:54.711489 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/60ab14da-0f2e-48cc-873a-44eaf0662ed8-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "60ab14da-0f2e-48cc-873a-44eaf0662ed8" (UID: "60ab14da-0f2e-48cc-873a-44eaf0662ed8"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 16:40:54 crc kubenswrapper[4799]: I1010 16:40:54.721542 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/60ab14da-0f2e-48cc-873a-44eaf0662ed8-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "60ab14da-0f2e-48cc-873a-44eaf0662ed8" (UID: "60ab14da-0f2e-48cc-873a-44eaf0662ed8"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 16:40:54 crc kubenswrapper[4799]: I1010 16:40:54.721800 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/60ab14da-0f2e-48cc-873a-44eaf0662ed8-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "60ab14da-0f2e-48cc-873a-44eaf0662ed8" (UID: "60ab14da-0f2e-48cc-873a-44eaf0662ed8"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:40:54 crc kubenswrapper[4799]: I1010 16:40:54.722132 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/60ab14da-0f2e-48cc-873a-44eaf0662ed8-kube-api-access-7bfnt" (OuterVolumeSpecName: "kube-api-access-7bfnt") pod "60ab14da-0f2e-48cc-873a-44eaf0662ed8" (UID: "60ab14da-0f2e-48cc-873a-44eaf0662ed8"). InnerVolumeSpecName "kube-api-access-7bfnt". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 16:40:54 crc kubenswrapper[4799]: I1010 16:40:54.722397 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/60ab14da-0f2e-48cc-873a-44eaf0662ed8-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "60ab14da-0f2e-48cc-873a-44eaf0662ed8" (UID: "60ab14da-0f2e-48cc-873a-44eaf0662ed8"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 16:40:54 crc kubenswrapper[4799]: I1010 16:40:54.724563 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "registry-storage") pod "60ab14da-0f2e-48cc-873a-44eaf0662ed8" (UID: "60ab14da-0f2e-48cc-873a-44eaf0662ed8"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Oct 10 16:40:54 crc kubenswrapper[4799]: I1010 16:40:54.745242 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/60ab14da-0f2e-48cc-873a-44eaf0662ed8-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "60ab14da-0f2e-48cc-873a-44eaf0662ed8" (UID: "60ab14da-0f2e-48cc-873a-44eaf0662ed8"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 10 16:40:54 crc kubenswrapper[4799]: I1010 16:40:54.811634 4799 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/60ab14da-0f2e-48cc-873a-44eaf0662ed8-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Oct 10 16:40:54 crc kubenswrapper[4799]: I1010 16:40:54.811690 4799 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/60ab14da-0f2e-48cc-873a-44eaf0662ed8-trusted-ca\") on node \"crc\" DevicePath \"\"" Oct 10 16:40:54 crc kubenswrapper[4799]: I1010 16:40:54.811719 4799 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/60ab14da-0f2e-48cc-873a-44eaf0662ed8-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Oct 10 16:40:54 crc kubenswrapper[4799]: I1010 16:40:54.811741 4799 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/60ab14da-0f2e-48cc-873a-44eaf0662ed8-bound-sa-token\") on node \"crc\" DevicePath \"\"" Oct 10 16:40:54 crc kubenswrapper[4799]: I1010 16:40:54.811796 4799 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/60ab14da-0f2e-48cc-873a-44eaf0662ed8-registry-certificates\") on node \"crc\" DevicePath \"\"" Oct 10 16:40:54 crc kubenswrapper[4799]: I1010 16:40:54.811820 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7bfnt\" (UniqueName: \"kubernetes.io/projected/60ab14da-0f2e-48cc-873a-44eaf0662ed8-kube-api-access-7bfnt\") on node \"crc\" DevicePath \"\"" Oct 10 16:40:54 crc kubenswrapper[4799]: I1010 16:40:54.811841 4799 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/60ab14da-0f2e-48cc-873a-44eaf0662ed8-registry-tls\") on node \"crc\" DevicePath \"\"" Oct 10 16:40:54 crc kubenswrapper[4799]: I1010 16:40:54.912509 4799 generic.go:334] "Generic (PLEG): container finished" podID="60ab14da-0f2e-48cc-873a-44eaf0662ed8" containerID="18d970602ee3f1139c249a21549627b3ee4aef7552b5efcc210e434fbb5e61e7" exitCode=0 Oct 10 16:40:54 crc kubenswrapper[4799]: I1010 16:40:54.912557 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-89gcz" Oct 10 16:40:54 crc kubenswrapper[4799]: I1010 16:40:54.912574 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-89gcz" event={"ID":"60ab14da-0f2e-48cc-873a-44eaf0662ed8","Type":"ContainerDied","Data":"18d970602ee3f1139c249a21549627b3ee4aef7552b5efcc210e434fbb5e61e7"} Oct 10 16:40:54 crc kubenswrapper[4799]: I1010 16:40:54.912664 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-89gcz" event={"ID":"60ab14da-0f2e-48cc-873a-44eaf0662ed8","Type":"ContainerDied","Data":"4e39e4bedf5e43eafe0e998497bf4f65e6e7724b2450001ac981e655e3f7b758"} Oct 10 16:40:54 crc kubenswrapper[4799]: I1010 16:40:54.912695 4799 scope.go:117] "RemoveContainer" containerID="18d970602ee3f1139c249a21549627b3ee4aef7552b5efcc210e434fbb5e61e7" Oct 10 16:40:54 crc kubenswrapper[4799]: I1010 16:40:54.933746 4799 scope.go:117] "RemoveContainer" containerID="18d970602ee3f1139c249a21549627b3ee4aef7552b5efcc210e434fbb5e61e7" Oct 10 16:40:54 crc kubenswrapper[4799]: E1010 16:40:54.941911 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"18d970602ee3f1139c249a21549627b3ee4aef7552b5efcc210e434fbb5e61e7\": container with ID starting with 18d970602ee3f1139c249a21549627b3ee4aef7552b5efcc210e434fbb5e61e7 not found: ID does not exist" containerID="18d970602ee3f1139c249a21549627b3ee4aef7552b5efcc210e434fbb5e61e7" Oct 10 16:40:54 crc kubenswrapper[4799]: I1010 16:40:54.941964 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"18d970602ee3f1139c249a21549627b3ee4aef7552b5efcc210e434fbb5e61e7"} err="failed to get container status \"18d970602ee3f1139c249a21549627b3ee4aef7552b5efcc210e434fbb5e61e7\": rpc error: code = NotFound desc = could not find container \"18d970602ee3f1139c249a21549627b3ee4aef7552b5efcc210e434fbb5e61e7\": container with ID starting with 18d970602ee3f1139c249a21549627b3ee4aef7552b5efcc210e434fbb5e61e7 not found: ID does not exist" Oct 10 16:40:54 crc kubenswrapper[4799]: I1010 16:40:54.953252 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-89gcz"] Oct 10 16:40:54 crc kubenswrapper[4799]: I1010 16:40:54.959054 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-89gcz"] Oct 10 16:40:55 crc kubenswrapper[4799]: I1010 16:40:55.417116 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="60ab14da-0f2e-48cc-873a-44eaf0662ed8" path="/var/lib/kubelet/pods/60ab14da-0f2e-48cc-873a-44eaf0662ed8/volumes" Oct 10 16:41:15 crc kubenswrapper[4799]: I1010 16:41:15.249215 4799 patch_prober.go:28] interesting pod/machine-config-daemon-rh8zc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 10 16:41:15 crc kubenswrapper[4799]: I1010 16:41:15.249977 4799 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 10 16:41:45 crc kubenswrapper[4799]: I1010 16:41:45.248734 4799 patch_prober.go:28] interesting pod/machine-config-daemon-rh8zc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 10 16:41:45 crc kubenswrapper[4799]: I1010 16:41:45.249354 4799 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 10 16:41:45 crc kubenswrapper[4799]: I1010 16:41:45.249406 4799 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" Oct 10 16:41:45 crc kubenswrapper[4799]: I1010 16:41:45.250054 4799 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"6060cbfbf40b005d1ca61153e05b93d95432b8f5bf820a7b753f840c4cc943ae"} pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 10 16:41:45 crc kubenswrapper[4799]: I1010 16:41:45.250124 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" containerName="machine-config-daemon" containerID="cri-o://6060cbfbf40b005d1ca61153e05b93d95432b8f5bf820a7b753f840c4cc943ae" gracePeriod=600 Oct 10 16:41:46 crc kubenswrapper[4799]: I1010 16:41:46.252999 4799 generic.go:334] "Generic (PLEG): container finished" podID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" containerID="6060cbfbf40b005d1ca61153e05b93d95432b8f5bf820a7b753f840c4cc943ae" exitCode=0 Oct 10 16:41:46 crc kubenswrapper[4799]: I1010 16:41:46.253094 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" event={"ID":"6cebefda-e31d-4be2-9bf4-8e1f8ec002cb","Type":"ContainerDied","Data":"6060cbfbf40b005d1ca61153e05b93d95432b8f5bf820a7b753f840c4cc943ae"} Oct 10 16:41:46 crc kubenswrapper[4799]: I1010 16:41:46.253577 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" event={"ID":"6cebefda-e31d-4be2-9bf4-8e1f8ec002cb","Type":"ContainerStarted","Data":"ff017f427623f1c99da82aa1f76b3d32ffeae8d4ca8e7ce1e98dc285ba08fb9c"} Oct 10 16:41:46 crc kubenswrapper[4799]: I1010 16:41:46.253600 4799 scope.go:117] "RemoveContainer" containerID="a685c745539e6b5d6ae1c99d911448ed7e9748dc5640aa9ad19005d1a2df7456" Oct 10 16:43:12 crc kubenswrapper[4799]: I1010 16:43:12.393903 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-mcwfc"] Oct 10 16:43:12 crc kubenswrapper[4799]: I1010 16:43:12.395279 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-mcwfc" podUID="abe7f2d9-ec99-4724-a01f-cc7096377e07" containerName="ovn-controller" containerID="cri-o://ff4fcf53aeed6c07f775152de0faa9fa0671848df06d37cbca6ec7097d0024d5" gracePeriod=30 Oct 10 16:43:12 crc kubenswrapper[4799]: I1010 16:43:12.396124 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-mcwfc" podUID="abe7f2d9-ec99-4724-a01f-cc7096377e07" containerName="sbdb" containerID="cri-o://08406e220de50ba85f882a05117b5df8c9445a38c026bb85c95fc9f98f2d2cfe" gracePeriod=30 Oct 10 16:43:12 crc kubenswrapper[4799]: I1010 16:43:12.396171 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-mcwfc" podUID="abe7f2d9-ec99-4724-a01f-cc7096377e07" containerName="nbdb" containerID="cri-o://c7d0e536ad5143941dd18418b1ac7972a1136a841542b950f6891a386d43ca9c" gracePeriod=30 Oct 10 16:43:12 crc kubenswrapper[4799]: I1010 16:43:12.396207 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-mcwfc" podUID="abe7f2d9-ec99-4724-a01f-cc7096377e07" containerName="northd" containerID="cri-o://cfe05183ad0b03415525e6aa2a8d52a5d63b8c273113c46326396df5e0c2bb12" gracePeriod=30 Oct 10 16:43:12 crc kubenswrapper[4799]: I1010 16:43:12.396241 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-mcwfc" podUID="abe7f2d9-ec99-4724-a01f-cc7096377e07" containerName="kube-rbac-proxy-ovn-metrics" containerID="cri-o://cd261112ca7db4d0d76f6ab29a0347d64dccfff4db42ac9f55d6d7df1443ab23" gracePeriod=30 Oct 10 16:43:12 crc kubenswrapper[4799]: I1010 16:43:12.396276 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-mcwfc" podUID="abe7f2d9-ec99-4724-a01f-cc7096377e07" containerName="kube-rbac-proxy-node" containerID="cri-o://8cbc87c392646ebf9c016f8c7b40bcec30e33a0a05ea4a896d1143c5f1086990" gracePeriod=30 Oct 10 16:43:12 crc kubenswrapper[4799]: I1010 16:43:12.396326 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-mcwfc" podUID="abe7f2d9-ec99-4724-a01f-cc7096377e07" containerName="ovn-acl-logging" containerID="cri-o://6562d440ce1f1477fd09c15c34ab88e17e1fb2c2cae4b32a7bf8cbdd29f4d5a3" gracePeriod=30 Oct 10 16:43:12 crc kubenswrapper[4799]: I1010 16:43:12.461200 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-mcwfc" podUID="abe7f2d9-ec99-4724-a01f-cc7096377e07" containerName="ovnkube-controller" containerID="cri-o://7dd9264bc068801f661655f2e77814e97ed7f61382f1e64dc3f9c3ffd546bdb3" gracePeriod=30 Oct 10 16:43:12 crc kubenswrapper[4799]: I1010 16:43:12.859138 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-gg5hb_f000ac73-b5de-47c8-a0a7-84bd06475f62/kube-multus/2.log" Oct 10 16:43:12 crc kubenswrapper[4799]: I1010 16:43:12.860710 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-gg5hb_f000ac73-b5de-47c8-a0a7-84bd06475f62/kube-multus/1.log" Oct 10 16:43:12 crc kubenswrapper[4799]: I1010 16:43:12.860812 4799 generic.go:334] "Generic (PLEG): container finished" podID="f000ac73-b5de-47c8-a0a7-84bd06475f62" containerID="8c9c3aaa13091c5803bad24fd1f3a1d3fe4da491900046f26bf4316a55987309" exitCode=2 Oct 10 16:43:12 crc kubenswrapper[4799]: I1010 16:43:12.860908 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-gg5hb" event={"ID":"f000ac73-b5de-47c8-a0a7-84bd06475f62","Type":"ContainerDied","Data":"8c9c3aaa13091c5803bad24fd1f3a1d3fe4da491900046f26bf4316a55987309"} Oct 10 16:43:12 crc kubenswrapper[4799]: I1010 16:43:12.860987 4799 scope.go:117] "RemoveContainer" containerID="9fa19f17c5ed052d9c266f2da2d4e8338037b397bc2fb5e859f733c6b8c1b69e" Oct 10 16:43:12 crc kubenswrapper[4799]: I1010 16:43:12.863242 4799 scope.go:117] "RemoveContainer" containerID="8c9c3aaa13091c5803bad24fd1f3a1d3fe4da491900046f26bf4316a55987309" Oct 10 16:43:12 crc kubenswrapper[4799]: I1010 16:43:12.864334 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-mcwfc_abe7f2d9-ec99-4724-a01f-cc7096377e07/ovnkube-controller/3.log" Oct 10 16:43:12 crc kubenswrapper[4799]: E1010 16:43:12.864550 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 20s restarting failed container=kube-multus pod=multus-gg5hb_openshift-multus(f000ac73-b5de-47c8-a0a7-84bd06475f62)\"" pod="openshift-multus/multus-gg5hb" podUID="f000ac73-b5de-47c8-a0a7-84bd06475f62" Oct 10 16:43:12 crc kubenswrapper[4799]: I1010 16:43:12.870850 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-mcwfc_abe7f2d9-ec99-4724-a01f-cc7096377e07/ovn-acl-logging/0.log" Oct 10 16:43:12 crc kubenswrapper[4799]: I1010 16:43:12.871593 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-mcwfc_abe7f2d9-ec99-4724-a01f-cc7096377e07/ovn-controller/0.log" Oct 10 16:43:12 crc kubenswrapper[4799]: I1010 16:43:12.872162 4799 generic.go:334] "Generic (PLEG): container finished" podID="abe7f2d9-ec99-4724-a01f-cc7096377e07" containerID="7dd9264bc068801f661655f2e77814e97ed7f61382f1e64dc3f9c3ffd546bdb3" exitCode=0 Oct 10 16:43:12 crc kubenswrapper[4799]: I1010 16:43:12.872224 4799 generic.go:334] "Generic (PLEG): container finished" podID="abe7f2d9-ec99-4724-a01f-cc7096377e07" containerID="08406e220de50ba85f882a05117b5df8c9445a38c026bb85c95fc9f98f2d2cfe" exitCode=0 Oct 10 16:43:12 crc kubenswrapper[4799]: I1010 16:43:12.872260 4799 generic.go:334] "Generic (PLEG): container finished" podID="abe7f2d9-ec99-4724-a01f-cc7096377e07" containerID="c7d0e536ad5143941dd18418b1ac7972a1136a841542b950f6891a386d43ca9c" exitCode=0 Oct 10 16:43:12 crc kubenswrapper[4799]: I1010 16:43:12.872285 4799 generic.go:334] "Generic (PLEG): container finished" podID="abe7f2d9-ec99-4724-a01f-cc7096377e07" containerID="cfe05183ad0b03415525e6aa2a8d52a5d63b8c273113c46326396df5e0c2bb12" exitCode=0 Oct 10 16:43:12 crc kubenswrapper[4799]: I1010 16:43:12.872303 4799 generic.go:334] "Generic (PLEG): container finished" podID="abe7f2d9-ec99-4724-a01f-cc7096377e07" containerID="cd261112ca7db4d0d76f6ab29a0347d64dccfff4db42ac9f55d6d7df1443ab23" exitCode=0 Oct 10 16:43:12 crc kubenswrapper[4799]: I1010 16:43:12.872321 4799 generic.go:334] "Generic (PLEG): container finished" podID="abe7f2d9-ec99-4724-a01f-cc7096377e07" containerID="8cbc87c392646ebf9c016f8c7b40bcec30e33a0a05ea4a896d1143c5f1086990" exitCode=0 Oct 10 16:43:12 crc kubenswrapper[4799]: I1010 16:43:12.872338 4799 generic.go:334] "Generic (PLEG): container finished" podID="abe7f2d9-ec99-4724-a01f-cc7096377e07" containerID="6562d440ce1f1477fd09c15c34ab88e17e1fb2c2cae4b32a7bf8cbdd29f4d5a3" exitCode=143 Oct 10 16:43:12 crc kubenswrapper[4799]: I1010 16:43:12.872356 4799 generic.go:334] "Generic (PLEG): container finished" podID="abe7f2d9-ec99-4724-a01f-cc7096377e07" containerID="ff4fcf53aeed6c07f775152de0faa9fa0671848df06d37cbca6ec7097d0024d5" exitCode=143 Oct 10 16:43:12 crc kubenswrapper[4799]: I1010 16:43:12.872227 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mcwfc" event={"ID":"abe7f2d9-ec99-4724-a01f-cc7096377e07","Type":"ContainerDied","Data":"7dd9264bc068801f661655f2e77814e97ed7f61382f1e64dc3f9c3ffd546bdb3"} Oct 10 16:43:12 crc kubenswrapper[4799]: I1010 16:43:12.872426 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mcwfc" event={"ID":"abe7f2d9-ec99-4724-a01f-cc7096377e07","Type":"ContainerDied","Data":"08406e220de50ba85f882a05117b5df8c9445a38c026bb85c95fc9f98f2d2cfe"} Oct 10 16:43:12 crc kubenswrapper[4799]: I1010 16:43:12.872466 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mcwfc" event={"ID":"abe7f2d9-ec99-4724-a01f-cc7096377e07","Type":"ContainerDied","Data":"c7d0e536ad5143941dd18418b1ac7972a1136a841542b950f6891a386d43ca9c"} Oct 10 16:43:12 crc kubenswrapper[4799]: I1010 16:43:12.872496 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mcwfc" event={"ID":"abe7f2d9-ec99-4724-a01f-cc7096377e07","Type":"ContainerDied","Data":"cfe05183ad0b03415525e6aa2a8d52a5d63b8c273113c46326396df5e0c2bb12"} Oct 10 16:43:12 crc kubenswrapper[4799]: I1010 16:43:12.872521 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mcwfc" event={"ID":"abe7f2d9-ec99-4724-a01f-cc7096377e07","Type":"ContainerDied","Data":"cd261112ca7db4d0d76f6ab29a0347d64dccfff4db42ac9f55d6d7df1443ab23"} Oct 10 16:43:12 crc kubenswrapper[4799]: I1010 16:43:12.872548 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mcwfc" event={"ID":"abe7f2d9-ec99-4724-a01f-cc7096377e07","Type":"ContainerDied","Data":"8cbc87c392646ebf9c016f8c7b40bcec30e33a0a05ea4a896d1143c5f1086990"} Oct 10 16:43:12 crc kubenswrapper[4799]: I1010 16:43:12.872574 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mcwfc" event={"ID":"abe7f2d9-ec99-4724-a01f-cc7096377e07","Type":"ContainerDied","Data":"6562d440ce1f1477fd09c15c34ab88e17e1fb2c2cae4b32a7bf8cbdd29f4d5a3"} Oct 10 16:43:12 crc kubenswrapper[4799]: I1010 16:43:12.872600 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mcwfc" event={"ID":"abe7f2d9-ec99-4724-a01f-cc7096377e07","Type":"ContainerDied","Data":"ff4fcf53aeed6c07f775152de0faa9fa0671848df06d37cbca6ec7097d0024d5"} Oct 10 16:43:12 crc kubenswrapper[4799]: I1010 16:43:12.890611 4799 scope.go:117] "RemoveContainer" containerID="df22025d59e852d7ca86c7739f0dd141f6b388604bcf9ffaabfa48433290db84" Oct 10 16:43:13 crc kubenswrapper[4799]: I1010 16:43:13.117326 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-mcwfc_abe7f2d9-ec99-4724-a01f-cc7096377e07/ovn-acl-logging/0.log" Oct 10 16:43:13 crc kubenswrapper[4799]: I1010 16:43:13.118245 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-mcwfc_abe7f2d9-ec99-4724-a01f-cc7096377e07/ovn-controller/0.log" Oct 10 16:43:13 crc kubenswrapper[4799]: I1010 16:43:13.118939 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-mcwfc" Oct 10 16:43:13 crc kubenswrapper[4799]: I1010 16:43:13.202850 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-pwvtr"] Oct 10 16:43:13 crc kubenswrapper[4799]: E1010 16:43:13.203220 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="abe7f2d9-ec99-4724-a01f-cc7096377e07" containerName="ovnkube-controller" Oct 10 16:43:13 crc kubenswrapper[4799]: I1010 16:43:13.203251 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="abe7f2d9-ec99-4724-a01f-cc7096377e07" containerName="ovnkube-controller" Oct 10 16:43:13 crc kubenswrapper[4799]: E1010 16:43:13.203273 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="abe7f2d9-ec99-4724-a01f-cc7096377e07" containerName="kube-rbac-proxy-node" Oct 10 16:43:13 crc kubenswrapper[4799]: I1010 16:43:13.203289 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="abe7f2d9-ec99-4724-a01f-cc7096377e07" containerName="kube-rbac-proxy-node" Oct 10 16:43:13 crc kubenswrapper[4799]: E1010 16:43:13.203315 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="abe7f2d9-ec99-4724-a01f-cc7096377e07" containerName="ovnkube-controller" Oct 10 16:43:13 crc kubenswrapper[4799]: I1010 16:43:13.203334 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="abe7f2d9-ec99-4724-a01f-cc7096377e07" containerName="ovnkube-controller" Oct 10 16:43:13 crc kubenswrapper[4799]: E1010 16:43:13.203360 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="abe7f2d9-ec99-4724-a01f-cc7096377e07" containerName="northd" Oct 10 16:43:13 crc kubenswrapper[4799]: I1010 16:43:13.203376 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="abe7f2d9-ec99-4724-a01f-cc7096377e07" containerName="northd" Oct 10 16:43:13 crc kubenswrapper[4799]: E1010 16:43:13.203401 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="60ab14da-0f2e-48cc-873a-44eaf0662ed8" containerName="registry" Oct 10 16:43:13 crc kubenswrapper[4799]: I1010 16:43:13.203419 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="60ab14da-0f2e-48cc-873a-44eaf0662ed8" containerName="registry" Oct 10 16:43:13 crc kubenswrapper[4799]: E1010 16:43:13.203443 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="abe7f2d9-ec99-4724-a01f-cc7096377e07" containerName="ovnkube-controller" Oct 10 16:43:13 crc kubenswrapper[4799]: I1010 16:43:13.203459 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="abe7f2d9-ec99-4724-a01f-cc7096377e07" containerName="ovnkube-controller" Oct 10 16:43:13 crc kubenswrapper[4799]: E1010 16:43:13.203479 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="abe7f2d9-ec99-4724-a01f-cc7096377e07" containerName="kubecfg-setup" Oct 10 16:43:13 crc kubenswrapper[4799]: I1010 16:43:13.203496 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="abe7f2d9-ec99-4724-a01f-cc7096377e07" containerName="kubecfg-setup" Oct 10 16:43:13 crc kubenswrapper[4799]: E1010 16:43:13.203519 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="abe7f2d9-ec99-4724-a01f-cc7096377e07" containerName="ovn-controller" Oct 10 16:43:13 crc kubenswrapper[4799]: I1010 16:43:13.203535 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="abe7f2d9-ec99-4724-a01f-cc7096377e07" containerName="ovn-controller" Oct 10 16:43:13 crc kubenswrapper[4799]: E1010 16:43:13.203553 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="abe7f2d9-ec99-4724-a01f-cc7096377e07" containerName="kube-rbac-proxy-ovn-metrics" Oct 10 16:43:13 crc kubenswrapper[4799]: I1010 16:43:13.203568 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="abe7f2d9-ec99-4724-a01f-cc7096377e07" containerName="kube-rbac-proxy-ovn-metrics" Oct 10 16:43:13 crc kubenswrapper[4799]: E1010 16:43:13.203596 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="abe7f2d9-ec99-4724-a01f-cc7096377e07" containerName="sbdb" Oct 10 16:43:13 crc kubenswrapper[4799]: I1010 16:43:13.203611 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="abe7f2d9-ec99-4724-a01f-cc7096377e07" containerName="sbdb" Oct 10 16:43:13 crc kubenswrapper[4799]: E1010 16:43:13.203638 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="abe7f2d9-ec99-4724-a01f-cc7096377e07" containerName="nbdb" Oct 10 16:43:13 crc kubenswrapper[4799]: I1010 16:43:13.203654 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="abe7f2d9-ec99-4724-a01f-cc7096377e07" containerName="nbdb" Oct 10 16:43:13 crc kubenswrapper[4799]: E1010 16:43:13.203684 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="abe7f2d9-ec99-4724-a01f-cc7096377e07" containerName="ovn-acl-logging" Oct 10 16:43:13 crc kubenswrapper[4799]: I1010 16:43:13.203700 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="abe7f2d9-ec99-4724-a01f-cc7096377e07" containerName="ovn-acl-logging" Oct 10 16:43:13 crc kubenswrapper[4799]: I1010 16:43:13.203983 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="abe7f2d9-ec99-4724-a01f-cc7096377e07" containerName="ovnkube-controller" Oct 10 16:43:13 crc kubenswrapper[4799]: I1010 16:43:13.204013 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="abe7f2d9-ec99-4724-a01f-cc7096377e07" containerName="ovnkube-controller" Oct 10 16:43:13 crc kubenswrapper[4799]: I1010 16:43:13.204034 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="abe7f2d9-ec99-4724-a01f-cc7096377e07" containerName="ovn-controller" Oct 10 16:43:13 crc kubenswrapper[4799]: I1010 16:43:13.204058 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="abe7f2d9-ec99-4724-a01f-cc7096377e07" containerName="kube-rbac-proxy-ovn-metrics" Oct 10 16:43:13 crc kubenswrapper[4799]: I1010 16:43:13.204078 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="abe7f2d9-ec99-4724-a01f-cc7096377e07" containerName="kube-rbac-proxy-node" Oct 10 16:43:13 crc kubenswrapper[4799]: I1010 16:43:13.204100 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="abe7f2d9-ec99-4724-a01f-cc7096377e07" containerName="ovnkube-controller" Oct 10 16:43:13 crc kubenswrapper[4799]: I1010 16:43:13.204116 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="abe7f2d9-ec99-4724-a01f-cc7096377e07" containerName="northd" Oct 10 16:43:13 crc kubenswrapper[4799]: I1010 16:43:13.204132 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="abe7f2d9-ec99-4724-a01f-cc7096377e07" containerName="nbdb" Oct 10 16:43:13 crc kubenswrapper[4799]: I1010 16:43:13.204154 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="abe7f2d9-ec99-4724-a01f-cc7096377e07" containerName="sbdb" Oct 10 16:43:13 crc kubenswrapper[4799]: I1010 16:43:13.204181 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="60ab14da-0f2e-48cc-873a-44eaf0662ed8" containerName="registry" Oct 10 16:43:13 crc kubenswrapper[4799]: I1010 16:43:13.204202 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="abe7f2d9-ec99-4724-a01f-cc7096377e07" containerName="ovn-acl-logging" Oct 10 16:43:13 crc kubenswrapper[4799]: E1010 16:43:13.204402 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="abe7f2d9-ec99-4724-a01f-cc7096377e07" containerName="ovnkube-controller" Oct 10 16:43:13 crc kubenswrapper[4799]: I1010 16:43:13.204423 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="abe7f2d9-ec99-4724-a01f-cc7096377e07" containerName="ovnkube-controller" Oct 10 16:43:13 crc kubenswrapper[4799]: I1010 16:43:13.204681 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="abe7f2d9-ec99-4724-a01f-cc7096377e07" containerName="ovnkube-controller" Oct 10 16:43:13 crc kubenswrapper[4799]: I1010 16:43:13.204710 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="abe7f2d9-ec99-4724-a01f-cc7096377e07" containerName="ovnkube-controller" Oct 10 16:43:13 crc kubenswrapper[4799]: E1010 16:43:13.205052 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="abe7f2d9-ec99-4724-a01f-cc7096377e07" containerName="ovnkube-controller" Oct 10 16:43:13 crc kubenswrapper[4799]: I1010 16:43:13.205075 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="abe7f2d9-ec99-4724-a01f-cc7096377e07" containerName="ovnkube-controller" Oct 10 16:43:13 crc kubenswrapper[4799]: I1010 16:43:13.208482 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-pwvtr" Oct 10 16:43:13 crc kubenswrapper[4799]: I1010 16:43:13.248669 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/abe7f2d9-ec99-4724-a01f-cc7096377e07-host-cni-netd\") pod \"abe7f2d9-ec99-4724-a01f-cc7096377e07\" (UID: \"abe7f2d9-ec99-4724-a01f-cc7096377e07\") " Oct 10 16:43:13 crc kubenswrapper[4799]: I1010 16:43:13.248721 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/abe7f2d9-ec99-4724-a01f-cc7096377e07-run-systemd\") pod \"abe7f2d9-ec99-4724-a01f-cc7096377e07\" (UID: \"abe7f2d9-ec99-4724-a01f-cc7096377e07\") " Oct 10 16:43:13 crc kubenswrapper[4799]: I1010 16:43:13.248746 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/abe7f2d9-ec99-4724-a01f-cc7096377e07-host-run-ovn-kubernetes\") pod \"abe7f2d9-ec99-4724-a01f-cc7096377e07\" (UID: \"abe7f2d9-ec99-4724-a01f-cc7096377e07\") " Oct 10 16:43:13 crc kubenswrapper[4799]: I1010 16:43:13.248819 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/abe7f2d9-ec99-4724-a01f-cc7096377e07-host-cni-netd" (OuterVolumeSpecName: "host-cni-netd") pod "abe7f2d9-ec99-4724-a01f-cc7096377e07" (UID: "abe7f2d9-ec99-4724-a01f-cc7096377e07"). InnerVolumeSpecName "host-cni-netd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 10 16:43:13 crc kubenswrapper[4799]: I1010 16:43:13.248931 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/abe7f2d9-ec99-4724-a01f-cc7096377e07-host-cni-bin\") pod \"abe7f2d9-ec99-4724-a01f-cc7096377e07\" (UID: \"abe7f2d9-ec99-4724-a01f-cc7096377e07\") " Oct 10 16:43:13 crc kubenswrapper[4799]: I1010 16:43:13.248966 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/abe7f2d9-ec99-4724-a01f-cc7096377e07-host-cni-bin" (OuterVolumeSpecName: "host-cni-bin") pod "abe7f2d9-ec99-4724-a01f-cc7096377e07" (UID: "abe7f2d9-ec99-4724-a01f-cc7096377e07"). InnerVolumeSpecName "host-cni-bin". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 10 16:43:13 crc kubenswrapper[4799]: I1010 16:43:13.248997 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/abe7f2d9-ec99-4724-a01f-cc7096377e07-host-run-ovn-kubernetes" (OuterVolumeSpecName: "host-run-ovn-kubernetes") pod "abe7f2d9-ec99-4724-a01f-cc7096377e07" (UID: "abe7f2d9-ec99-4724-a01f-cc7096377e07"). InnerVolumeSpecName "host-run-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 10 16:43:13 crc kubenswrapper[4799]: I1010 16:43:13.249325 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/abe7f2d9-ec99-4724-a01f-cc7096377e07-run-ovn\") pod \"abe7f2d9-ec99-4724-a01f-cc7096377e07\" (UID: \"abe7f2d9-ec99-4724-a01f-cc7096377e07\") " Oct 10 16:43:13 crc kubenswrapper[4799]: I1010 16:43:13.249346 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/abe7f2d9-ec99-4724-a01f-cc7096377e07-host-kubelet\") pod \"abe7f2d9-ec99-4724-a01f-cc7096377e07\" (UID: \"abe7f2d9-ec99-4724-a01f-cc7096377e07\") " Oct 10 16:43:13 crc kubenswrapper[4799]: I1010 16:43:13.249369 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/abe7f2d9-ec99-4724-a01f-cc7096377e07-host-slash\") pod \"abe7f2d9-ec99-4724-a01f-cc7096377e07\" (UID: \"abe7f2d9-ec99-4724-a01f-cc7096377e07\") " Oct 10 16:43:13 crc kubenswrapper[4799]: I1010 16:43:13.249390 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/abe7f2d9-ec99-4724-a01f-cc7096377e07-host-var-lib-cni-networks-ovn-kubernetes\") pod \"abe7f2d9-ec99-4724-a01f-cc7096377e07\" (UID: \"abe7f2d9-ec99-4724-a01f-cc7096377e07\") " Oct 10 16:43:13 crc kubenswrapper[4799]: I1010 16:43:13.249423 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/abe7f2d9-ec99-4724-a01f-cc7096377e07-ovn-node-metrics-cert\") pod \"abe7f2d9-ec99-4724-a01f-cc7096377e07\" (UID: \"abe7f2d9-ec99-4724-a01f-cc7096377e07\") " Oct 10 16:43:13 crc kubenswrapper[4799]: I1010 16:43:13.249446 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7pr44\" (UniqueName: \"kubernetes.io/projected/abe7f2d9-ec99-4724-a01f-cc7096377e07-kube-api-access-7pr44\") pod \"abe7f2d9-ec99-4724-a01f-cc7096377e07\" (UID: \"abe7f2d9-ec99-4724-a01f-cc7096377e07\") " Oct 10 16:43:13 crc kubenswrapper[4799]: I1010 16:43:13.249465 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/abe7f2d9-ec99-4724-a01f-cc7096377e07-etc-openvswitch\") pod \"abe7f2d9-ec99-4724-a01f-cc7096377e07\" (UID: \"abe7f2d9-ec99-4724-a01f-cc7096377e07\") " Oct 10 16:43:13 crc kubenswrapper[4799]: I1010 16:43:13.249488 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/abe7f2d9-ec99-4724-a01f-cc7096377e07-node-log\") pod \"abe7f2d9-ec99-4724-a01f-cc7096377e07\" (UID: \"abe7f2d9-ec99-4724-a01f-cc7096377e07\") " Oct 10 16:43:13 crc kubenswrapper[4799]: I1010 16:43:13.249509 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/abe7f2d9-ec99-4724-a01f-cc7096377e07-log-socket\") pod \"abe7f2d9-ec99-4724-a01f-cc7096377e07\" (UID: \"abe7f2d9-ec99-4724-a01f-cc7096377e07\") " Oct 10 16:43:13 crc kubenswrapper[4799]: I1010 16:43:13.249529 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/abe7f2d9-ec99-4724-a01f-cc7096377e07-systemd-units\") pod \"abe7f2d9-ec99-4724-a01f-cc7096377e07\" (UID: \"abe7f2d9-ec99-4724-a01f-cc7096377e07\") " Oct 10 16:43:13 crc kubenswrapper[4799]: I1010 16:43:13.249561 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/abe7f2d9-ec99-4724-a01f-cc7096377e07-ovnkube-config\") pod \"abe7f2d9-ec99-4724-a01f-cc7096377e07\" (UID: \"abe7f2d9-ec99-4724-a01f-cc7096377e07\") " Oct 10 16:43:13 crc kubenswrapper[4799]: I1010 16:43:13.249593 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/abe7f2d9-ec99-4724-a01f-cc7096377e07-ovnkube-script-lib\") pod \"abe7f2d9-ec99-4724-a01f-cc7096377e07\" (UID: \"abe7f2d9-ec99-4724-a01f-cc7096377e07\") " Oct 10 16:43:13 crc kubenswrapper[4799]: I1010 16:43:13.249624 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/abe7f2d9-ec99-4724-a01f-cc7096377e07-env-overrides\") pod \"abe7f2d9-ec99-4724-a01f-cc7096377e07\" (UID: \"abe7f2d9-ec99-4724-a01f-cc7096377e07\") " Oct 10 16:43:13 crc kubenswrapper[4799]: I1010 16:43:13.249648 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/abe7f2d9-ec99-4724-a01f-cc7096377e07-host-run-netns\") pod \"abe7f2d9-ec99-4724-a01f-cc7096377e07\" (UID: \"abe7f2d9-ec99-4724-a01f-cc7096377e07\") " Oct 10 16:43:13 crc kubenswrapper[4799]: I1010 16:43:13.249673 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/abe7f2d9-ec99-4724-a01f-cc7096377e07-var-lib-openvswitch\") pod \"abe7f2d9-ec99-4724-a01f-cc7096377e07\" (UID: \"abe7f2d9-ec99-4724-a01f-cc7096377e07\") " Oct 10 16:43:13 crc kubenswrapper[4799]: I1010 16:43:13.249695 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/abe7f2d9-ec99-4724-a01f-cc7096377e07-run-openvswitch\") pod \"abe7f2d9-ec99-4724-a01f-cc7096377e07\" (UID: \"abe7f2d9-ec99-4724-a01f-cc7096377e07\") " Oct 10 16:43:13 crc kubenswrapper[4799]: I1010 16:43:13.249720 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/abe7f2d9-ec99-4724-a01f-cc7096377e07-host-kubelet" (OuterVolumeSpecName: "host-kubelet") pod "abe7f2d9-ec99-4724-a01f-cc7096377e07" (UID: "abe7f2d9-ec99-4724-a01f-cc7096377e07"). InnerVolumeSpecName "host-kubelet". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 10 16:43:13 crc kubenswrapper[4799]: I1010 16:43:13.249827 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/81da9e6e-35ef-437d-8515-7fc38686b854-run-ovn\") pod \"ovnkube-node-pwvtr\" (UID: \"81da9e6e-35ef-437d-8515-7fc38686b854\") " pod="openshift-ovn-kubernetes/ovnkube-node-pwvtr" Oct 10 16:43:13 crc kubenswrapper[4799]: I1010 16:43:13.249845 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/abe7f2d9-ec99-4724-a01f-cc7096377e07-run-ovn" (OuterVolumeSpecName: "run-ovn") pod "abe7f2d9-ec99-4724-a01f-cc7096377e07" (UID: "abe7f2d9-ec99-4724-a01f-cc7096377e07"). InnerVolumeSpecName "run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 10 16:43:13 crc kubenswrapper[4799]: I1010 16:43:13.249858 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2xgqm\" (UniqueName: \"kubernetes.io/projected/81da9e6e-35ef-437d-8515-7fc38686b854-kube-api-access-2xgqm\") pod \"ovnkube-node-pwvtr\" (UID: \"81da9e6e-35ef-437d-8515-7fc38686b854\") " pod="openshift-ovn-kubernetes/ovnkube-node-pwvtr" Oct 10 16:43:13 crc kubenswrapper[4799]: I1010 16:43:13.249882 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/81da9e6e-35ef-437d-8515-7fc38686b854-run-systemd\") pod \"ovnkube-node-pwvtr\" (UID: \"81da9e6e-35ef-437d-8515-7fc38686b854\") " pod="openshift-ovn-kubernetes/ovnkube-node-pwvtr" Oct 10 16:43:13 crc kubenswrapper[4799]: I1010 16:43:13.249893 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/abe7f2d9-ec99-4724-a01f-cc7096377e07-host-slash" (OuterVolumeSpecName: "host-slash") pod "abe7f2d9-ec99-4724-a01f-cc7096377e07" (UID: "abe7f2d9-ec99-4724-a01f-cc7096377e07"). InnerVolumeSpecName "host-slash". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 10 16:43:13 crc kubenswrapper[4799]: I1010 16:43:13.249909 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/81da9e6e-35ef-437d-8515-7fc38686b854-host-slash\") pod \"ovnkube-node-pwvtr\" (UID: \"81da9e6e-35ef-437d-8515-7fc38686b854\") " pod="openshift-ovn-kubernetes/ovnkube-node-pwvtr" Oct 10 16:43:13 crc kubenswrapper[4799]: I1010 16:43:13.249935 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/81da9e6e-35ef-437d-8515-7fc38686b854-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-pwvtr\" (UID: \"81da9e6e-35ef-437d-8515-7fc38686b854\") " pod="openshift-ovn-kubernetes/ovnkube-node-pwvtr" Oct 10 16:43:13 crc kubenswrapper[4799]: I1010 16:43:13.249957 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/81da9e6e-35ef-437d-8515-7fc38686b854-host-cni-netd\") pod \"ovnkube-node-pwvtr\" (UID: \"81da9e6e-35ef-437d-8515-7fc38686b854\") " pod="openshift-ovn-kubernetes/ovnkube-node-pwvtr" Oct 10 16:43:13 crc kubenswrapper[4799]: I1010 16:43:13.249980 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/81da9e6e-35ef-437d-8515-7fc38686b854-ovn-node-metrics-cert\") pod \"ovnkube-node-pwvtr\" (UID: \"81da9e6e-35ef-437d-8515-7fc38686b854\") " pod="openshift-ovn-kubernetes/ovnkube-node-pwvtr" Oct 10 16:43:13 crc kubenswrapper[4799]: I1010 16:43:13.250004 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/81da9e6e-35ef-437d-8515-7fc38686b854-host-run-ovn-kubernetes\") pod \"ovnkube-node-pwvtr\" (UID: \"81da9e6e-35ef-437d-8515-7fc38686b854\") " pod="openshift-ovn-kubernetes/ovnkube-node-pwvtr" Oct 10 16:43:13 crc kubenswrapper[4799]: I1010 16:43:13.250005 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/abe7f2d9-ec99-4724-a01f-cc7096377e07-host-var-lib-cni-networks-ovn-kubernetes" (OuterVolumeSpecName: "host-var-lib-cni-networks-ovn-kubernetes") pod "abe7f2d9-ec99-4724-a01f-cc7096377e07" (UID: "abe7f2d9-ec99-4724-a01f-cc7096377e07"). InnerVolumeSpecName "host-var-lib-cni-networks-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 10 16:43:13 crc kubenswrapper[4799]: I1010 16:43:13.250024 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/81da9e6e-35ef-437d-8515-7fc38686b854-env-overrides\") pod \"ovnkube-node-pwvtr\" (UID: \"81da9e6e-35ef-437d-8515-7fc38686b854\") " pod="openshift-ovn-kubernetes/ovnkube-node-pwvtr" Oct 10 16:43:13 crc kubenswrapper[4799]: I1010 16:43:13.250046 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/abe7f2d9-ec99-4724-a01f-cc7096377e07-var-lib-openvswitch" (OuterVolumeSpecName: "var-lib-openvswitch") pod "abe7f2d9-ec99-4724-a01f-cc7096377e07" (UID: "abe7f2d9-ec99-4724-a01f-cc7096377e07"). InnerVolumeSpecName "var-lib-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 10 16:43:13 crc kubenswrapper[4799]: I1010 16:43:13.250049 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/81da9e6e-35ef-437d-8515-7fc38686b854-host-kubelet\") pod \"ovnkube-node-pwvtr\" (UID: \"81da9e6e-35ef-437d-8515-7fc38686b854\") " pod="openshift-ovn-kubernetes/ovnkube-node-pwvtr" Oct 10 16:43:13 crc kubenswrapper[4799]: I1010 16:43:13.250089 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/abe7f2d9-ec99-4724-a01f-cc7096377e07-run-openvswitch" (OuterVolumeSpecName: "run-openvswitch") pod "abe7f2d9-ec99-4724-a01f-cc7096377e07" (UID: "abe7f2d9-ec99-4724-a01f-cc7096377e07"). InnerVolumeSpecName "run-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 10 16:43:13 crc kubenswrapper[4799]: I1010 16:43:13.250500 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/abe7f2d9-ec99-4724-a01f-cc7096377e07-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "abe7f2d9-ec99-4724-a01f-cc7096377e07" (UID: "abe7f2d9-ec99-4724-a01f-cc7096377e07"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 16:43:13 crc kubenswrapper[4799]: I1010 16:43:13.250534 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/abe7f2d9-ec99-4724-a01f-cc7096377e07-etc-openvswitch" (OuterVolumeSpecName: "etc-openvswitch") pod "abe7f2d9-ec99-4724-a01f-cc7096377e07" (UID: "abe7f2d9-ec99-4724-a01f-cc7096377e07"). InnerVolumeSpecName "etc-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 10 16:43:13 crc kubenswrapper[4799]: I1010 16:43:13.250556 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/abe7f2d9-ec99-4724-a01f-cc7096377e07-node-log" (OuterVolumeSpecName: "node-log") pod "abe7f2d9-ec99-4724-a01f-cc7096377e07" (UID: "abe7f2d9-ec99-4724-a01f-cc7096377e07"). InnerVolumeSpecName "node-log". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 10 16:43:13 crc kubenswrapper[4799]: I1010 16:43:13.250577 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/abe7f2d9-ec99-4724-a01f-cc7096377e07-log-socket" (OuterVolumeSpecName: "log-socket") pod "abe7f2d9-ec99-4724-a01f-cc7096377e07" (UID: "abe7f2d9-ec99-4724-a01f-cc7096377e07"). InnerVolumeSpecName "log-socket". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 10 16:43:13 crc kubenswrapper[4799]: I1010 16:43:13.250598 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/abe7f2d9-ec99-4724-a01f-cc7096377e07-systemd-units" (OuterVolumeSpecName: "systemd-units") pod "abe7f2d9-ec99-4724-a01f-cc7096377e07" (UID: "abe7f2d9-ec99-4724-a01f-cc7096377e07"). InnerVolumeSpecName "systemd-units". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 10 16:43:13 crc kubenswrapper[4799]: I1010 16:43:13.250957 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/abe7f2d9-ec99-4724-a01f-cc7096377e07-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "abe7f2d9-ec99-4724-a01f-cc7096377e07" (UID: "abe7f2d9-ec99-4724-a01f-cc7096377e07"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 16:43:13 crc kubenswrapper[4799]: I1010 16:43:13.251273 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/abe7f2d9-ec99-4724-a01f-cc7096377e07-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "abe7f2d9-ec99-4724-a01f-cc7096377e07" (UID: "abe7f2d9-ec99-4724-a01f-cc7096377e07"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 16:43:13 crc kubenswrapper[4799]: I1010 16:43:13.251307 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/abe7f2d9-ec99-4724-a01f-cc7096377e07-host-run-netns" (OuterVolumeSpecName: "host-run-netns") pod "abe7f2d9-ec99-4724-a01f-cc7096377e07" (UID: "abe7f2d9-ec99-4724-a01f-cc7096377e07"). InnerVolumeSpecName "host-run-netns". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 10 16:43:13 crc kubenswrapper[4799]: I1010 16:43:13.251441 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/81da9e6e-35ef-437d-8515-7fc38686b854-etc-openvswitch\") pod \"ovnkube-node-pwvtr\" (UID: \"81da9e6e-35ef-437d-8515-7fc38686b854\") " pod="openshift-ovn-kubernetes/ovnkube-node-pwvtr" Oct 10 16:43:13 crc kubenswrapper[4799]: I1010 16:43:13.251500 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/81da9e6e-35ef-437d-8515-7fc38686b854-host-cni-bin\") pod \"ovnkube-node-pwvtr\" (UID: \"81da9e6e-35ef-437d-8515-7fc38686b854\") " pod="openshift-ovn-kubernetes/ovnkube-node-pwvtr" Oct 10 16:43:13 crc kubenswrapper[4799]: I1010 16:43:13.251535 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/81da9e6e-35ef-437d-8515-7fc38686b854-systemd-units\") pod \"ovnkube-node-pwvtr\" (UID: \"81da9e6e-35ef-437d-8515-7fc38686b854\") " pod="openshift-ovn-kubernetes/ovnkube-node-pwvtr" Oct 10 16:43:13 crc kubenswrapper[4799]: I1010 16:43:13.251563 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/81da9e6e-35ef-437d-8515-7fc38686b854-host-run-netns\") pod \"ovnkube-node-pwvtr\" (UID: \"81da9e6e-35ef-437d-8515-7fc38686b854\") " pod="openshift-ovn-kubernetes/ovnkube-node-pwvtr" Oct 10 16:43:13 crc kubenswrapper[4799]: I1010 16:43:13.251605 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/81da9e6e-35ef-437d-8515-7fc38686b854-var-lib-openvswitch\") pod \"ovnkube-node-pwvtr\" (UID: \"81da9e6e-35ef-437d-8515-7fc38686b854\") " pod="openshift-ovn-kubernetes/ovnkube-node-pwvtr" Oct 10 16:43:13 crc kubenswrapper[4799]: I1010 16:43:13.251648 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/81da9e6e-35ef-437d-8515-7fc38686b854-node-log\") pod \"ovnkube-node-pwvtr\" (UID: \"81da9e6e-35ef-437d-8515-7fc38686b854\") " pod="openshift-ovn-kubernetes/ovnkube-node-pwvtr" Oct 10 16:43:13 crc kubenswrapper[4799]: I1010 16:43:13.251675 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/81da9e6e-35ef-437d-8515-7fc38686b854-ovnkube-config\") pod \"ovnkube-node-pwvtr\" (UID: \"81da9e6e-35ef-437d-8515-7fc38686b854\") " pod="openshift-ovn-kubernetes/ovnkube-node-pwvtr" Oct 10 16:43:13 crc kubenswrapper[4799]: I1010 16:43:13.251705 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/81da9e6e-35ef-437d-8515-7fc38686b854-run-openvswitch\") pod \"ovnkube-node-pwvtr\" (UID: \"81da9e6e-35ef-437d-8515-7fc38686b854\") " pod="openshift-ovn-kubernetes/ovnkube-node-pwvtr" Oct 10 16:43:13 crc kubenswrapper[4799]: I1010 16:43:13.251735 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/81da9e6e-35ef-437d-8515-7fc38686b854-log-socket\") pod \"ovnkube-node-pwvtr\" (UID: \"81da9e6e-35ef-437d-8515-7fc38686b854\") " pod="openshift-ovn-kubernetes/ovnkube-node-pwvtr" Oct 10 16:43:13 crc kubenswrapper[4799]: I1010 16:43:13.251803 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/81da9e6e-35ef-437d-8515-7fc38686b854-ovnkube-script-lib\") pod \"ovnkube-node-pwvtr\" (UID: \"81da9e6e-35ef-437d-8515-7fc38686b854\") " pod="openshift-ovn-kubernetes/ovnkube-node-pwvtr" Oct 10 16:43:13 crc kubenswrapper[4799]: I1010 16:43:13.251871 4799 reconciler_common.go:293] "Volume detached for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/abe7f2d9-ec99-4724-a01f-cc7096377e07-run-openvswitch\") on node \"crc\" DevicePath \"\"" Oct 10 16:43:13 crc kubenswrapper[4799]: I1010 16:43:13.251895 4799 reconciler_common.go:293] "Volume detached for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/abe7f2d9-ec99-4724-a01f-cc7096377e07-host-cni-netd\") on node \"crc\" DevicePath \"\"" Oct 10 16:43:13 crc kubenswrapper[4799]: I1010 16:43:13.251915 4799 reconciler_common.go:293] "Volume detached for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/abe7f2d9-ec99-4724-a01f-cc7096377e07-host-run-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Oct 10 16:43:13 crc kubenswrapper[4799]: I1010 16:43:13.251936 4799 reconciler_common.go:293] "Volume detached for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/abe7f2d9-ec99-4724-a01f-cc7096377e07-host-cni-bin\") on node \"crc\" DevicePath \"\"" Oct 10 16:43:13 crc kubenswrapper[4799]: I1010 16:43:13.251956 4799 reconciler_common.go:293] "Volume detached for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/abe7f2d9-ec99-4724-a01f-cc7096377e07-run-ovn\") on node \"crc\" DevicePath \"\"" Oct 10 16:43:13 crc kubenswrapper[4799]: I1010 16:43:13.251976 4799 reconciler_common.go:293] "Volume detached for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/abe7f2d9-ec99-4724-a01f-cc7096377e07-host-kubelet\") on node \"crc\" DevicePath \"\"" Oct 10 16:43:13 crc kubenswrapper[4799]: I1010 16:43:13.251995 4799 reconciler_common.go:293] "Volume detached for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/abe7f2d9-ec99-4724-a01f-cc7096377e07-host-slash\") on node \"crc\" DevicePath \"\"" Oct 10 16:43:13 crc kubenswrapper[4799]: I1010 16:43:13.252017 4799 reconciler_common.go:293] "Volume detached for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/abe7f2d9-ec99-4724-a01f-cc7096377e07-host-var-lib-cni-networks-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Oct 10 16:43:13 crc kubenswrapper[4799]: I1010 16:43:13.252037 4799 reconciler_common.go:293] "Volume detached for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/abe7f2d9-ec99-4724-a01f-cc7096377e07-etc-openvswitch\") on node \"crc\" DevicePath \"\"" Oct 10 16:43:13 crc kubenswrapper[4799]: I1010 16:43:13.252067 4799 reconciler_common.go:293] "Volume detached for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/abe7f2d9-ec99-4724-a01f-cc7096377e07-node-log\") on node \"crc\" DevicePath \"\"" Oct 10 16:43:13 crc kubenswrapper[4799]: I1010 16:43:13.252083 4799 reconciler_common.go:293] "Volume detached for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/abe7f2d9-ec99-4724-a01f-cc7096377e07-log-socket\") on node \"crc\" DevicePath \"\"" Oct 10 16:43:13 crc kubenswrapper[4799]: I1010 16:43:13.252098 4799 reconciler_common.go:293] "Volume detached for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/abe7f2d9-ec99-4724-a01f-cc7096377e07-systemd-units\") on node \"crc\" DevicePath \"\"" Oct 10 16:43:13 crc kubenswrapper[4799]: I1010 16:43:13.252113 4799 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/abe7f2d9-ec99-4724-a01f-cc7096377e07-ovnkube-config\") on node \"crc\" DevicePath \"\"" Oct 10 16:43:13 crc kubenswrapper[4799]: I1010 16:43:13.252128 4799 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/abe7f2d9-ec99-4724-a01f-cc7096377e07-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Oct 10 16:43:13 crc kubenswrapper[4799]: I1010 16:43:13.252157 4799 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/abe7f2d9-ec99-4724-a01f-cc7096377e07-env-overrides\") on node \"crc\" DevicePath \"\"" Oct 10 16:43:13 crc kubenswrapper[4799]: I1010 16:43:13.252173 4799 reconciler_common.go:293] "Volume detached for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/abe7f2d9-ec99-4724-a01f-cc7096377e07-host-run-netns\") on node \"crc\" DevicePath \"\"" Oct 10 16:43:13 crc kubenswrapper[4799]: I1010 16:43:13.252188 4799 reconciler_common.go:293] "Volume detached for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/abe7f2d9-ec99-4724-a01f-cc7096377e07-var-lib-openvswitch\") on node \"crc\" DevicePath \"\"" Oct 10 16:43:13 crc kubenswrapper[4799]: I1010 16:43:13.255340 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/abe7f2d9-ec99-4724-a01f-cc7096377e07-kube-api-access-7pr44" (OuterVolumeSpecName: "kube-api-access-7pr44") pod "abe7f2d9-ec99-4724-a01f-cc7096377e07" (UID: "abe7f2d9-ec99-4724-a01f-cc7096377e07"). InnerVolumeSpecName "kube-api-access-7pr44". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 16:43:13 crc kubenswrapper[4799]: I1010 16:43:13.255622 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/abe7f2d9-ec99-4724-a01f-cc7096377e07-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "abe7f2d9-ec99-4724-a01f-cc7096377e07" (UID: "abe7f2d9-ec99-4724-a01f-cc7096377e07"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:43:13 crc kubenswrapper[4799]: I1010 16:43:13.264357 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/abe7f2d9-ec99-4724-a01f-cc7096377e07-run-systemd" (OuterVolumeSpecName: "run-systemd") pod "abe7f2d9-ec99-4724-a01f-cc7096377e07" (UID: "abe7f2d9-ec99-4724-a01f-cc7096377e07"). InnerVolumeSpecName "run-systemd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 10 16:43:13 crc kubenswrapper[4799]: I1010 16:43:13.353369 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/81da9e6e-35ef-437d-8515-7fc38686b854-var-lib-openvswitch\") pod \"ovnkube-node-pwvtr\" (UID: \"81da9e6e-35ef-437d-8515-7fc38686b854\") " pod="openshift-ovn-kubernetes/ovnkube-node-pwvtr" Oct 10 16:43:13 crc kubenswrapper[4799]: I1010 16:43:13.353418 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/81da9e6e-35ef-437d-8515-7fc38686b854-node-log\") pod \"ovnkube-node-pwvtr\" (UID: \"81da9e6e-35ef-437d-8515-7fc38686b854\") " pod="openshift-ovn-kubernetes/ovnkube-node-pwvtr" Oct 10 16:43:13 crc kubenswrapper[4799]: I1010 16:43:13.353438 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/81da9e6e-35ef-437d-8515-7fc38686b854-ovnkube-config\") pod \"ovnkube-node-pwvtr\" (UID: \"81da9e6e-35ef-437d-8515-7fc38686b854\") " pod="openshift-ovn-kubernetes/ovnkube-node-pwvtr" Oct 10 16:43:13 crc kubenswrapper[4799]: I1010 16:43:13.353457 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/81da9e6e-35ef-437d-8515-7fc38686b854-run-openvswitch\") pod \"ovnkube-node-pwvtr\" (UID: \"81da9e6e-35ef-437d-8515-7fc38686b854\") " pod="openshift-ovn-kubernetes/ovnkube-node-pwvtr" Oct 10 16:43:13 crc kubenswrapper[4799]: I1010 16:43:13.353473 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/81da9e6e-35ef-437d-8515-7fc38686b854-log-socket\") pod \"ovnkube-node-pwvtr\" (UID: \"81da9e6e-35ef-437d-8515-7fc38686b854\") " pod="openshift-ovn-kubernetes/ovnkube-node-pwvtr" Oct 10 16:43:13 crc kubenswrapper[4799]: I1010 16:43:13.353493 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/81da9e6e-35ef-437d-8515-7fc38686b854-ovnkube-script-lib\") pod \"ovnkube-node-pwvtr\" (UID: \"81da9e6e-35ef-437d-8515-7fc38686b854\") " pod="openshift-ovn-kubernetes/ovnkube-node-pwvtr" Oct 10 16:43:13 crc kubenswrapper[4799]: I1010 16:43:13.353515 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/81da9e6e-35ef-437d-8515-7fc38686b854-run-ovn\") pod \"ovnkube-node-pwvtr\" (UID: \"81da9e6e-35ef-437d-8515-7fc38686b854\") " pod="openshift-ovn-kubernetes/ovnkube-node-pwvtr" Oct 10 16:43:13 crc kubenswrapper[4799]: I1010 16:43:13.353532 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2xgqm\" (UniqueName: \"kubernetes.io/projected/81da9e6e-35ef-437d-8515-7fc38686b854-kube-api-access-2xgqm\") pod \"ovnkube-node-pwvtr\" (UID: \"81da9e6e-35ef-437d-8515-7fc38686b854\") " pod="openshift-ovn-kubernetes/ovnkube-node-pwvtr" Oct 10 16:43:13 crc kubenswrapper[4799]: I1010 16:43:13.353546 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/81da9e6e-35ef-437d-8515-7fc38686b854-run-systemd\") pod \"ovnkube-node-pwvtr\" (UID: \"81da9e6e-35ef-437d-8515-7fc38686b854\") " pod="openshift-ovn-kubernetes/ovnkube-node-pwvtr" Oct 10 16:43:13 crc kubenswrapper[4799]: I1010 16:43:13.353566 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/81da9e6e-35ef-437d-8515-7fc38686b854-host-slash\") pod \"ovnkube-node-pwvtr\" (UID: \"81da9e6e-35ef-437d-8515-7fc38686b854\") " pod="openshift-ovn-kubernetes/ovnkube-node-pwvtr" Oct 10 16:43:13 crc kubenswrapper[4799]: I1010 16:43:13.353585 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/81da9e6e-35ef-437d-8515-7fc38686b854-host-cni-netd\") pod \"ovnkube-node-pwvtr\" (UID: \"81da9e6e-35ef-437d-8515-7fc38686b854\") " pod="openshift-ovn-kubernetes/ovnkube-node-pwvtr" Oct 10 16:43:13 crc kubenswrapper[4799]: I1010 16:43:13.353602 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/81da9e6e-35ef-437d-8515-7fc38686b854-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-pwvtr\" (UID: \"81da9e6e-35ef-437d-8515-7fc38686b854\") " pod="openshift-ovn-kubernetes/ovnkube-node-pwvtr" Oct 10 16:43:13 crc kubenswrapper[4799]: I1010 16:43:13.353622 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/81da9e6e-35ef-437d-8515-7fc38686b854-ovn-node-metrics-cert\") pod \"ovnkube-node-pwvtr\" (UID: \"81da9e6e-35ef-437d-8515-7fc38686b854\") " pod="openshift-ovn-kubernetes/ovnkube-node-pwvtr" Oct 10 16:43:13 crc kubenswrapper[4799]: I1010 16:43:13.353638 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/81da9e6e-35ef-437d-8515-7fc38686b854-env-overrides\") pod \"ovnkube-node-pwvtr\" (UID: \"81da9e6e-35ef-437d-8515-7fc38686b854\") " pod="openshift-ovn-kubernetes/ovnkube-node-pwvtr" Oct 10 16:43:13 crc kubenswrapper[4799]: I1010 16:43:13.353656 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/81da9e6e-35ef-437d-8515-7fc38686b854-host-run-ovn-kubernetes\") pod \"ovnkube-node-pwvtr\" (UID: \"81da9e6e-35ef-437d-8515-7fc38686b854\") " pod="openshift-ovn-kubernetes/ovnkube-node-pwvtr" Oct 10 16:43:13 crc kubenswrapper[4799]: I1010 16:43:13.353672 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/81da9e6e-35ef-437d-8515-7fc38686b854-host-kubelet\") pod \"ovnkube-node-pwvtr\" (UID: \"81da9e6e-35ef-437d-8515-7fc38686b854\") " pod="openshift-ovn-kubernetes/ovnkube-node-pwvtr" Oct 10 16:43:13 crc kubenswrapper[4799]: I1010 16:43:13.353699 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/81da9e6e-35ef-437d-8515-7fc38686b854-etc-openvswitch\") pod \"ovnkube-node-pwvtr\" (UID: \"81da9e6e-35ef-437d-8515-7fc38686b854\") " pod="openshift-ovn-kubernetes/ovnkube-node-pwvtr" Oct 10 16:43:13 crc kubenswrapper[4799]: I1010 16:43:13.353716 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/81da9e6e-35ef-437d-8515-7fc38686b854-host-cni-bin\") pod \"ovnkube-node-pwvtr\" (UID: \"81da9e6e-35ef-437d-8515-7fc38686b854\") " pod="openshift-ovn-kubernetes/ovnkube-node-pwvtr" Oct 10 16:43:13 crc kubenswrapper[4799]: I1010 16:43:13.353733 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/81da9e6e-35ef-437d-8515-7fc38686b854-systemd-units\") pod \"ovnkube-node-pwvtr\" (UID: \"81da9e6e-35ef-437d-8515-7fc38686b854\") " pod="openshift-ovn-kubernetes/ovnkube-node-pwvtr" Oct 10 16:43:13 crc kubenswrapper[4799]: I1010 16:43:13.353749 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/81da9e6e-35ef-437d-8515-7fc38686b854-host-run-netns\") pod \"ovnkube-node-pwvtr\" (UID: \"81da9e6e-35ef-437d-8515-7fc38686b854\") " pod="openshift-ovn-kubernetes/ovnkube-node-pwvtr" Oct 10 16:43:13 crc kubenswrapper[4799]: I1010 16:43:13.353804 4799 reconciler_common.go:293] "Volume detached for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/abe7f2d9-ec99-4724-a01f-cc7096377e07-run-systemd\") on node \"crc\" DevicePath \"\"" Oct 10 16:43:13 crc kubenswrapper[4799]: I1010 16:43:13.353816 4799 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/abe7f2d9-ec99-4724-a01f-cc7096377e07-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Oct 10 16:43:13 crc kubenswrapper[4799]: I1010 16:43:13.353826 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7pr44\" (UniqueName: \"kubernetes.io/projected/abe7f2d9-ec99-4724-a01f-cc7096377e07-kube-api-access-7pr44\") on node \"crc\" DevicePath \"\"" Oct 10 16:43:13 crc kubenswrapper[4799]: I1010 16:43:13.353869 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/81da9e6e-35ef-437d-8515-7fc38686b854-host-run-netns\") pod \"ovnkube-node-pwvtr\" (UID: \"81da9e6e-35ef-437d-8515-7fc38686b854\") " pod="openshift-ovn-kubernetes/ovnkube-node-pwvtr" Oct 10 16:43:13 crc kubenswrapper[4799]: I1010 16:43:13.354057 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/81da9e6e-35ef-437d-8515-7fc38686b854-var-lib-openvswitch\") pod \"ovnkube-node-pwvtr\" (UID: \"81da9e6e-35ef-437d-8515-7fc38686b854\") " pod="openshift-ovn-kubernetes/ovnkube-node-pwvtr" Oct 10 16:43:13 crc kubenswrapper[4799]: I1010 16:43:13.354077 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/81da9e6e-35ef-437d-8515-7fc38686b854-node-log\") pod \"ovnkube-node-pwvtr\" (UID: \"81da9e6e-35ef-437d-8515-7fc38686b854\") " pod="openshift-ovn-kubernetes/ovnkube-node-pwvtr" Oct 10 16:43:13 crc kubenswrapper[4799]: I1010 16:43:13.354097 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/81da9e6e-35ef-437d-8515-7fc38686b854-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-pwvtr\" (UID: \"81da9e6e-35ef-437d-8515-7fc38686b854\") " pod="openshift-ovn-kubernetes/ovnkube-node-pwvtr" Oct 10 16:43:13 crc kubenswrapper[4799]: I1010 16:43:13.354184 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/81da9e6e-35ef-437d-8515-7fc38686b854-host-cni-netd\") pod \"ovnkube-node-pwvtr\" (UID: \"81da9e6e-35ef-437d-8515-7fc38686b854\") " pod="openshift-ovn-kubernetes/ovnkube-node-pwvtr" Oct 10 16:43:13 crc kubenswrapper[4799]: I1010 16:43:13.354282 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/81da9e6e-35ef-437d-8515-7fc38686b854-run-systemd\") pod \"ovnkube-node-pwvtr\" (UID: \"81da9e6e-35ef-437d-8515-7fc38686b854\") " pod="openshift-ovn-kubernetes/ovnkube-node-pwvtr" Oct 10 16:43:13 crc kubenswrapper[4799]: I1010 16:43:13.354332 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/81da9e6e-35ef-437d-8515-7fc38686b854-host-slash\") pod \"ovnkube-node-pwvtr\" (UID: \"81da9e6e-35ef-437d-8515-7fc38686b854\") " pod="openshift-ovn-kubernetes/ovnkube-node-pwvtr" Oct 10 16:43:13 crc kubenswrapper[4799]: I1010 16:43:13.354383 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/81da9e6e-35ef-437d-8515-7fc38686b854-run-openvswitch\") pod \"ovnkube-node-pwvtr\" (UID: \"81da9e6e-35ef-437d-8515-7fc38686b854\") " pod="openshift-ovn-kubernetes/ovnkube-node-pwvtr" Oct 10 16:43:13 crc kubenswrapper[4799]: I1010 16:43:13.354861 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/81da9e6e-35ef-437d-8515-7fc38686b854-run-ovn\") pod \"ovnkube-node-pwvtr\" (UID: \"81da9e6e-35ef-437d-8515-7fc38686b854\") " pod="openshift-ovn-kubernetes/ovnkube-node-pwvtr" Oct 10 16:43:13 crc kubenswrapper[4799]: I1010 16:43:13.354912 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/81da9e6e-35ef-437d-8515-7fc38686b854-log-socket\") pod \"ovnkube-node-pwvtr\" (UID: \"81da9e6e-35ef-437d-8515-7fc38686b854\") " pod="openshift-ovn-kubernetes/ovnkube-node-pwvtr" Oct 10 16:43:13 crc kubenswrapper[4799]: I1010 16:43:13.354945 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/81da9e6e-35ef-437d-8515-7fc38686b854-host-kubelet\") pod \"ovnkube-node-pwvtr\" (UID: \"81da9e6e-35ef-437d-8515-7fc38686b854\") " pod="openshift-ovn-kubernetes/ovnkube-node-pwvtr" Oct 10 16:43:13 crc kubenswrapper[4799]: I1010 16:43:13.355224 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/81da9e6e-35ef-437d-8515-7fc38686b854-ovnkube-script-lib\") pod \"ovnkube-node-pwvtr\" (UID: \"81da9e6e-35ef-437d-8515-7fc38686b854\") " pod="openshift-ovn-kubernetes/ovnkube-node-pwvtr" Oct 10 16:43:13 crc kubenswrapper[4799]: I1010 16:43:13.355298 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/81da9e6e-35ef-437d-8515-7fc38686b854-host-cni-bin\") pod \"ovnkube-node-pwvtr\" (UID: \"81da9e6e-35ef-437d-8515-7fc38686b854\") " pod="openshift-ovn-kubernetes/ovnkube-node-pwvtr" Oct 10 16:43:13 crc kubenswrapper[4799]: I1010 16:43:13.355347 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/81da9e6e-35ef-437d-8515-7fc38686b854-etc-openvswitch\") pod \"ovnkube-node-pwvtr\" (UID: \"81da9e6e-35ef-437d-8515-7fc38686b854\") " pod="openshift-ovn-kubernetes/ovnkube-node-pwvtr" Oct 10 16:43:13 crc kubenswrapper[4799]: I1010 16:43:13.355397 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/81da9e6e-35ef-437d-8515-7fc38686b854-host-run-ovn-kubernetes\") pod \"ovnkube-node-pwvtr\" (UID: \"81da9e6e-35ef-437d-8515-7fc38686b854\") " pod="openshift-ovn-kubernetes/ovnkube-node-pwvtr" Oct 10 16:43:13 crc kubenswrapper[4799]: I1010 16:43:13.355442 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/81da9e6e-35ef-437d-8515-7fc38686b854-systemd-units\") pod \"ovnkube-node-pwvtr\" (UID: \"81da9e6e-35ef-437d-8515-7fc38686b854\") " pod="openshift-ovn-kubernetes/ovnkube-node-pwvtr" Oct 10 16:43:13 crc kubenswrapper[4799]: I1010 16:43:13.355446 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/81da9e6e-35ef-437d-8515-7fc38686b854-env-overrides\") pod \"ovnkube-node-pwvtr\" (UID: \"81da9e6e-35ef-437d-8515-7fc38686b854\") " pod="openshift-ovn-kubernetes/ovnkube-node-pwvtr" Oct 10 16:43:13 crc kubenswrapper[4799]: I1010 16:43:13.355621 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/81da9e6e-35ef-437d-8515-7fc38686b854-ovnkube-config\") pod \"ovnkube-node-pwvtr\" (UID: \"81da9e6e-35ef-437d-8515-7fc38686b854\") " pod="openshift-ovn-kubernetes/ovnkube-node-pwvtr" Oct 10 16:43:13 crc kubenswrapper[4799]: I1010 16:43:13.356652 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/81da9e6e-35ef-437d-8515-7fc38686b854-ovn-node-metrics-cert\") pod \"ovnkube-node-pwvtr\" (UID: \"81da9e6e-35ef-437d-8515-7fc38686b854\") " pod="openshift-ovn-kubernetes/ovnkube-node-pwvtr" Oct 10 16:43:13 crc kubenswrapper[4799]: I1010 16:43:13.388393 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2xgqm\" (UniqueName: \"kubernetes.io/projected/81da9e6e-35ef-437d-8515-7fc38686b854-kube-api-access-2xgqm\") pod \"ovnkube-node-pwvtr\" (UID: \"81da9e6e-35ef-437d-8515-7fc38686b854\") " pod="openshift-ovn-kubernetes/ovnkube-node-pwvtr" Oct 10 16:43:13 crc kubenswrapper[4799]: I1010 16:43:13.524326 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-pwvtr" Oct 10 16:43:13 crc kubenswrapper[4799]: W1010 16:43:13.562420 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod81da9e6e_35ef_437d_8515_7fc38686b854.slice/crio-08a47ff846096de7c20b6088682a37135e29367f8877f691a0f26bac5121e0e8 WatchSource:0}: Error finding container 08a47ff846096de7c20b6088682a37135e29367f8877f691a0f26bac5121e0e8: Status 404 returned error can't find the container with id 08a47ff846096de7c20b6088682a37135e29367f8877f691a0f26bac5121e0e8 Oct 10 16:43:13 crc kubenswrapper[4799]: I1010 16:43:13.885515 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-gg5hb_f000ac73-b5de-47c8-a0a7-84bd06475f62/kube-multus/2.log" Oct 10 16:43:13 crc kubenswrapper[4799]: I1010 16:43:13.888434 4799 generic.go:334] "Generic (PLEG): container finished" podID="81da9e6e-35ef-437d-8515-7fc38686b854" containerID="8ba44a3790d46081cc173e27ff0d4cb2cf31ebd4356b9e1853cdc3cb086f34cb" exitCode=0 Oct 10 16:43:13 crc kubenswrapper[4799]: I1010 16:43:13.888530 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-pwvtr" event={"ID":"81da9e6e-35ef-437d-8515-7fc38686b854","Type":"ContainerDied","Data":"8ba44a3790d46081cc173e27ff0d4cb2cf31ebd4356b9e1853cdc3cb086f34cb"} Oct 10 16:43:13 crc kubenswrapper[4799]: I1010 16:43:13.888569 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-pwvtr" event={"ID":"81da9e6e-35ef-437d-8515-7fc38686b854","Type":"ContainerStarted","Data":"08a47ff846096de7c20b6088682a37135e29367f8877f691a0f26bac5121e0e8"} Oct 10 16:43:13 crc kubenswrapper[4799]: I1010 16:43:13.897859 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-mcwfc_abe7f2d9-ec99-4724-a01f-cc7096377e07/ovn-acl-logging/0.log" Oct 10 16:43:13 crc kubenswrapper[4799]: I1010 16:43:13.898789 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-mcwfc_abe7f2d9-ec99-4724-a01f-cc7096377e07/ovn-controller/0.log" Oct 10 16:43:13 crc kubenswrapper[4799]: I1010 16:43:13.899466 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mcwfc" event={"ID":"abe7f2d9-ec99-4724-a01f-cc7096377e07","Type":"ContainerDied","Data":"5b6b940562569201c6b4876710f0f0e2e4a2e13e9f3db42bf255fa61f31e09e1"} Oct 10 16:43:13 crc kubenswrapper[4799]: I1010 16:43:13.899571 4799 scope.go:117] "RemoveContainer" containerID="7dd9264bc068801f661655f2e77814e97ed7f61382f1e64dc3f9c3ffd546bdb3" Oct 10 16:43:13 crc kubenswrapper[4799]: I1010 16:43:13.899604 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-mcwfc" Oct 10 16:43:13 crc kubenswrapper[4799]: I1010 16:43:13.935823 4799 scope.go:117] "RemoveContainer" containerID="08406e220de50ba85f882a05117b5df8c9445a38c026bb85c95fc9f98f2d2cfe" Oct 10 16:43:13 crc kubenswrapper[4799]: I1010 16:43:13.978641 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-mcwfc"] Oct 10 16:43:13 crc kubenswrapper[4799]: I1010 16:43:13.983125 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-mcwfc"] Oct 10 16:43:13 crc kubenswrapper[4799]: I1010 16:43:13.985108 4799 scope.go:117] "RemoveContainer" containerID="c7d0e536ad5143941dd18418b1ac7972a1136a841542b950f6891a386d43ca9c" Oct 10 16:43:14 crc kubenswrapper[4799]: I1010 16:43:14.026883 4799 scope.go:117] "RemoveContainer" containerID="cfe05183ad0b03415525e6aa2a8d52a5d63b8c273113c46326396df5e0c2bb12" Oct 10 16:43:14 crc kubenswrapper[4799]: I1010 16:43:14.070636 4799 scope.go:117] "RemoveContainer" containerID="cd261112ca7db4d0d76f6ab29a0347d64dccfff4db42ac9f55d6d7df1443ab23" Oct 10 16:43:14 crc kubenswrapper[4799]: I1010 16:43:14.086002 4799 scope.go:117] "RemoveContainer" containerID="8cbc87c392646ebf9c016f8c7b40bcec30e33a0a05ea4a896d1143c5f1086990" Oct 10 16:43:14 crc kubenswrapper[4799]: I1010 16:43:14.104831 4799 scope.go:117] "RemoveContainer" containerID="6562d440ce1f1477fd09c15c34ab88e17e1fb2c2cae4b32a7bf8cbdd29f4d5a3" Oct 10 16:43:14 crc kubenswrapper[4799]: I1010 16:43:14.127228 4799 scope.go:117] "RemoveContainer" containerID="ff4fcf53aeed6c07f775152de0faa9fa0671848df06d37cbca6ec7097d0024d5" Oct 10 16:43:14 crc kubenswrapper[4799]: I1010 16:43:14.153584 4799 scope.go:117] "RemoveContainer" containerID="d2ce36def99eaf908452410a523cd14eb31a5a4dc3ee38d5983ea95d5ee75f83" Oct 10 16:43:14 crc kubenswrapper[4799]: I1010 16:43:14.911627 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-pwvtr" event={"ID":"81da9e6e-35ef-437d-8515-7fc38686b854","Type":"ContainerStarted","Data":"83df080ec50a5a258ee421d3f132c99cc3a4e9fba98f82b08a4eba19af5f24db"} Oct 10 16:43:14 crc kubenswrapper[4799]: I1010 16:43:14.913097 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-pwvtr" event={"ID":"81da9e6e-35ef-437d-8515-7fc38686b854","Type":"ContainerStarted","Data":"4bf23858099c5d7687bb75e25a711450616d6ed34966a479a6eab913ea698d83"} Oct 10 16:43:14 crc kubenswrapper[4799]: I1010 16:43:14.913168 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-pwvtr" event={"ID":"81da9e6e-35ef-437d-8515-7fc38686b854","Type":"ContainerStarted","Data":"9ae65cd153ed05ad05aa02e7e1284c93332e4839466a7ed8c86b892507325325"} Oct 10 16:43:14 crc kubenswrapper[4799]: I1010 16:43:14.913227 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-pwvtr" event={"ID":"81da9e6e-35ef-437d-8515-7fc38686b854","Type":"ContainerStarted","Data":"ef543b2b5884fb8378c0a9b1ce01946fc92ae4a87d8621096b12aea9af8ab070"} Oct 10 16:43:14 crc kubenswrapper[4799]: I1010 16:43:14.913284 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-pwvtr" event={"ID":"81da9e6e-35ef-437d-8515-7fc38686b854","Type":"ContainerStarted","Data":"dd6c7b3c62aa3235fb92c1e15d0b1a47ee9e9a69b0355f8c1f573676f1b0e7fb"} Oct 10 16:43:14 crc kubenswrapper[4799]: I1010 16:43:14.913361 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-pwvtr" event={"ID":"81da9e6e-35ef-437d-8515-7fc38686b854","Type":"ContainerStarted","Data":"d9d66aa9fc9b8af0eef50ab8bace9954c0400128dfee10375577ebbd6221ec11"} Oct 10 16:43:15 crc kubenswrapper[4799]: I1010 16:43:15.416525 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="abe7f2d9-ec99-4724-a01f-cc7096377e07" path="/var/lib/kubelet/pods/abe7f2d9-ec99-4724-a01f-cc7096377e07/volumes" Oct 10 16:43:17 crc kubenswrapper[4799]: I1010 16:43:17.755421 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["crc-storage/crc-storage-crc-sj59m"] Oct 10 16:43:17 crc kubenswrapper[4799]: I1010 16:43:17.757307 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-sj59m" Oct 10 16:43:17 crc kubenswrapper[4799]: I1010 16:43:17.759570 4799 reflector.go:368] Caches populated for *v1.Secret from object-"crc-storage"/"crc-storage-dockercfg-qqh4q" Oct 10 16:43:17 crc kubenswrapper[4799]: I1010 16:43:17.760272 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"openshift-service-ca.crt" Oct 10 16:43:17 crc kubenswrapper[4799]: I1010 16:43:17.760283 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"crc-storage" Oct 10 16:43:17 crc kubenswrapper[4799]: I1010 16:43:17.760953 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"kube-root-ca.crt" Oct 10 16:43:17 crc kubenswrapper[4799]: I1010 16:43:17.919013 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/f2688c5c-df78-4127-9133-4221ea43363d-crc-storage\") pod \"crc-storage-crc-sj59m\" (UID: \"f2688c5c-df78-4127-9133-4221ea43363d\") " pod="crc-storage/crc-storage-crc-sj59m" Oct 10 16:43:17 crc kubenswrapper[4799]: I1010 16:43:17.919298 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/f2688c5c-df78-4127-9133-4221ea43363d-node-mnt\") pod \"crc-storage-crc-sj59m\" (UID: \"f2688c5c-df78-4127-9133-4221ea43363d\") " pod="crc-storage/crc-storage-crc-sj59m" Oct 10 16:43:17 crc kubenswrapper[4799]: I1010 16:43:17.919450 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t4242\" (UniqueName: \"kubernetes.io/projected/f2688c5c-df78-4127-9133-4221ea43363d-kube-api-access-t4242\") pod \"crc-storage-crc-sj59m\" (UID: \"f2688c5c-df78-4127-9133-4221ea43363d\") " pod="crc-storage/crc-storage-crc-sj59m" Oct 10 16:43:17 crc kubenswrapper[4799]: I1010 16:43:17.940747 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-pwvtr" event={"ID":"81da9e6e-35ef-437d-8515-7fc38686b854","Type":"ContainerStarted","Data":"7d9415481dae693cbf92be05745d8de7b3487026f055246b3d3250cc23350cf8"} Oct 10 16:43:18 crc kubenswrapper[4799]: I1010 16:43:18.021266 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t4242\" (UniqueName: \"kubernetes.io/projected/f2688c5c-df78-4127-9133-4221ea43363d-kube-api-access-t4242\") pod \"crc-storage-crc-sj59m\" (UID: \"f2688c5c-df78-4127-9133-4221ea43363d\") " pod="crc-storage/crc-storage-crc-sj59m" Oct 10 16:43:18 crc kubenswrapper[4799]: I1010 16:43:18.021391 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/f2688c5c-df78-4127-9133-4221ea43363d-crc-storage\") pod \"crc-storage-crc-sj59m\" (UID: \"f2688c5c-df78-4127-9133-4221ea43363d\") " pod="crc-storage/crc-storage-crc-sj59m" Oct 10 16:43:18 crc kubenswrapper[4799]: I1010 16:43:18.021482 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/f2688c5c-df78-4127-9133-4221ea43363d-node-mnt\") pod \"crc-storage-crc-sj59m\" (UID: \"f2688c5c-df78-4127-9133-4221ea43363d\") " pod="crc-storage/crc-storage-crc-sj59m" Oct 10 16:43:18 crc kubenswrapper[4799]: I1010 16:43:18.021875 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/f2688c5c-df78-4127-9133-4221ea43363d-node-mnt\") pod \"crc-storage-crc-sj59m\" (UID: \"f2688c5c-df78-4127-9133-4221ea43363d\") " pod="crc-storage/crc-storage-crc-sj59m" Oct 10 16:43:18 crc kubenswrapper[4799]: I1010 16:43:18.023196 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/f2688c5c-df78-4127-9133-4221ea43363d-crc-storage\") pod \"crc-storage-crc-sj59m\" (UID: \"f2688c5c-df78-4127-9133-4221ea43363d\") " pod="crc-storage/crc-storage-crc-sj59m" Oct 10 16:43:18 crc kubenswrapper[4799]: I1010 16:43:18.056934 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t4242\" (UniqueName: \"kubernetes.io/projected/f2688c5c-df78-4127-9133-4221ea43363d-kube-api-access-t4242\") pod \"crc-storage-crc-sj59m\" (UID: \"f2688c5c-df78-4127-9133-4221ea43363d\") " pod="crc-storage/crc-storage-crc-sj59m" Oct 10 16:43:18 crc kubenswrapper[4799]: I1010 16:43:18.082686 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-sj59m" Oct 10 16:43:18 crc kubenswrapper[4799]: E1010 16:43:18.126467 4799 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_crc-storage-crc-sj59m_crc-storage_f2688c5c-df78-4127-9133-4221ea43363d_0(c5e9f6d5eb37ca8a16b43404d87ac25129c5754ccefe2eaec089d66cc18f7be4): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Oct 10 16:43:18 crc kubenswrapper[4799]: E1010 16:43:18.126583 4799 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_crc-storage-crc-sj59m_crc-storage_f2688c5c-df78-4127-9133-4221ea43363d_0(c5e9f6d5eb37ca8a16b43404d87ac25129c5754ccefe2eaec089d66cc18f7be4): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="crc-storage/crc-storage-crc-sj59m" Oct 10 16:43:18 crc kubenswrapper[4799]: E1010 16:43:18.126636 4799 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_crc-storage-crc-sj59m_crc-storage_f2688c5c-df78-4127-9133-4221ea43363d_0(c5e9f6d5eb37ca8a16b43404d87ac25129c5754ccefe2eaec089d66cc18f7be4): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="crc-storage/crc-storage-crc-sj59m" Oct 10 16:43:18 crc kubenswrapper[4799]: E1010 16:43:18.126790 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"crc-storage-crc-sj59m_crc-storage(f2688c5c-df78-4127-9133-4221ea43363d)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"crc-storage-crc-sj59m_crc-storage(f2688c5c-df78-4127-9133-4221ea43363d)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_crc-storage-crc-sj59m_crc-storage_f2688c5c-df78-4127-9133-4221ea43363d_0(c5e9f6d5eb37ca8a16b43404d87ac25129c5754ccefe2eaec089d66cc18f7be4): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="crc-storage/crc-storage-crc-sj59m" podUID="f2688c5c-df78-4127-9133-4221ea43363d" Oct 10 16:43:19 crc kubenswrapper[4799]: I1010 16:43:19.966893 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-pwvtr" event={"ID":"81da9e6e-35ef-437d-8515-7fc38686b854","Type":"ContainerStarted","Data":"caa8b1567d0fc12c6e176ca4b5a9aef0114da6a648eb5c59a001c87571365671"} Oct 10 16:43:19 crc kubenswrapper[4799]: I1010 16:43:19.967624 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-pwvtr" Oct 10 16:43:19 crc kubenswrapper[4799]: I1010 16:43:19.967655 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-pwvtr" Oct 10 16:43:19 crc kubenswrapper[4799]: I1010 16:43:19.967672 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-pwvtr" Oct 10 16:43:20 crc kubenswrapper[4799]: I1010 16:43:20.004965 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-pwvtr" podStartSLOduration=7.004939821 podStartE2EDuration="7.004939821s" podCreationTimestamp="2025-10-10 16:43:13 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-10 16:43:20.001574417 +0000 UTC m=+693.509898592" watchObservedRunningTime="2025-10-10 16:43:20.004939821 +0000 UTC m=+693.513263976" Oct 10 16:43:20 crc kubenswrapper[4799]: I1010 16:43:20.004999 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-pwvtr" Oct 10 16:43:20 crc kubenswrapper[4799]: I1010 16:43:20.011843 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-pwvtr" Oct 10 16:43:20 crc kubenswrapper[4799]: I1010 16:43:20.100217 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["crc-storage/crc-storage-crc-sj59m"] Oct 10 16:43:20 crc kubenswrapper[4799]: I1010 16:43:20.100406 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-sj59m" Oct 10 16:43:20 crc kubenswrapper[4799]: I1010 16:43:20.101013 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-sj59m" Oct 10 16:43:20 crc kubenswrapper[4799]: E1010 16:43:20.128147 4799 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_crc-storage-crc-sj59m_crc-storage_f2688c5c-df78-4127-9133-4221ea43363d_0(36150173f064a0518662188ae15583c3b58bf41569fd200ada3a98450546b266): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Oct 10 16:43:20 crc kubenswrapper[4799]: E1010 16:43:20.128208 4799 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_crc-storage-crc-sj59m_crc-storage_f2688c5c-df78-4127-9133-4221ea43363d_0(36150173f064a0518662188ae15583c3b58bf41569fd200ada3a98450546b266): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="crc-storage/crc-storage-crc-sj59m" Oct 10 16:43:20 crc kubenswrapper[4799]: E1010 16:43:20.128226 4799 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_crc-storage-crc-sj59m_crc-storage_f2688c5c-df78-4127-9133-4221ea43363d_0(36150173f064a0518662188ae15583c3b58bf41569fd200ada3a98450546b266): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="crc-storage/crc-storage-crc-sj59m" Oct 10 16:43:20 crc kubenswrapper[4799]: E1010 16:43:20.128271 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"crc-storage-crc-sj59m_crc-storage(f2688c5c-df78-4127-9133-4221ea43363d)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"crc-storage-crc-sj59m_crc-storage(f2688c5c-df78-4127-9133-4221ea43363d)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_crc-storage-crc-sj59m_crc-storage_f2688c5c-df78-4127-9133-4221ea43363d_0(36150173f064a0518662188ae15583c3b58bf41569fd200ada3a98450546b266): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="crc-storage/crc-storage-crc-sj59m" podUID="f2688c5c-df78-4127-9133-4221ea43363d" Oct 10 16:43:24 crc kubenswrapper[4799]: I1010 16:43:24.402234 4799 scope.go:117] "RemoveContainer" containerID="8c9c3aaa13091c5803bad24fd1f3a1d3fe4da491900046f26bf4316a55987309" Oct 10 16:43:24 crc kubenswrapper[4799]: E1010 16:43:24.402875 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 20s restarting failed container=kube-multus pod=multus-gg5hb_openshift-multus(f000ac73-b5de-47c8-a0a7-84bd06475f62)\"" pod="openshift-multus/multus-gg5hb" podUID="f000ac73-b5de-47c8-a0a7-84bd06475f62" Oct 10 16:43:34 crc kubenswrapper[4799]: I1010 16:43:34.402019 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-sj59m" Oct 10 16:43:34 crc kubenswrapper[4799]: I1010 16:43:34.403476 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-sj59m" Oct 10 16:43:34 crc kubenswrapper[4799]: E1010 16:43:34.439264 4799 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_crc-storage-crc-sj59m_crc-storage_f2688c5c-df78-4127-9133-4221ea43363d_0(b10badc35973180b2734c5bd958542b4df1e33740dce562039be2bfe20dc25e0): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Oct 10 16:43:34 crc kubenswrapper[4799]: E1010 16:43:34.439361 4799 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_crc-storage-crc-sj59m_crc-storage_f2688c5c-df78-4127-9133-4221ea43363d_0(b10badc35973180b2734c5bd958542b4df1e33740dce562039be2bfe20dc25e0): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="crc-storage/crc-storage-crc-sj59m" Oct 10 16:43:34 crc kubenswrapper[4799]: E1010 16:43:34.439395 4799 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_crc-storage-crc-sj59m_crc-storage_f2688c5c-df78-4127-9133-4221ea43363d_0(b10badc35973180b2734c5bd958542b4df1e33740dce562039be2bfe20dc25e0): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="crc-storage/crc-storage-crc-sj59m" Oct 10 16:43:34 crc kubenswrapper[4799]: E1010 16:43:34.439470 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"crc-storage-crc-sj59m_crc-storage(f2688c5c-df78-4127-9133-4221ea43363d)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"crc-storage-crc-sj59m_crc-storage(f2688c5c-df78-4127-9133-4221ea43363d)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_crc-storage-crc-sj59m_crc-storage_f2688c5c-df78-4127-9133-4221ea43363d_0(b10badc35973180b2734c5bd958542b4df1e33740dce562039be2bfe20dc25e0): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="crc-storage/crc-storage-crc-sj59m" podUID="f2688c5c-df78-4127-9133-4221ea43363d" Oct 10 16:43:39 crc kubenswrapper[4799]: I1010 16:43:39.402717 4799 scope.go:117] "RemoveContainer" containerID="8c9c3aaa13091c5803bad24fd1f3a1d3fe4da491900046f26bf4316a55987309" Oct 10 16:43:40 crc kubenswrapper[4799]: I1010 16:43:40.099576 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-gg5hb_f000ac73-b5de-47c8-a0a7-84bd06475f62/kube-multus/2.log" Oct 10 16:43:40 crc kubenswrapper[4799]: I1010 16:43:40.099982 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-gg5hb" event={"ID":"f000ac73-b5de-47c8-a0a7-84bd06475f62","Type":"ContainerStarted","Data":"c7abd5da7510473333e527f8f417093a53d4a4c02c118cbb8b2fabda7c98d1ca"} Oct 10 16:43:43 crc kubenswrapper[4799]: I1010 16:43:43.555814 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-pwvtr" Oct 10 16:43:45 crc kubenswrapper[4799]: I1010 16:43:45.248943 4799 patch_prober.go:28] interesting pod/machine-config-daemon-rh8zc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 10 16:43:45 crc kubenswrapper[4799]: I1010 16:43:45.249223 4799 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 10 16:43:45 crc kubenswrapper[4799]: I1010 16:43:45.402135 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-sj59m" Oct 10 16:43:45 crc kubenswrapper[4799]: I1010 16:43:45.403122 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-sj59m" Oct 10 16:43:45 crc kubenswrapper[4799]: I1010 16:43:45.631628 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["crc-storage/crc-storage-crc-sj59m"] Oct 10 16:43:45 crc kubenswrapper[4799]: I1010 16:43:45.643399 4799 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 10 16:43:46 crc kubenswrapper[4799]: I1010 16:43:46.138526 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-sj59m" event={"ID":"f2688c5c-df78-4127-9133-4221ea43363d","Type":"ContainerStarted","Data":"4424e12028da8ae781387ec3693e947546a1eb9d6dd8df6511e69d98f8164e08"} Oct 10 16:43:48 crc kubenswrapper[4799]: I1010 16:43:48.153258 4799 generic.go:334] "Generic (PLEG): container finished" podID="f2688c5c-df78-4127-9133-4221ea43363d" containerID="a021416239a2534be17767ac1ddffe083f1460b6ab99aa56ad5dc721ac8328b8" exitCode=0 Oct 10 16:43:48 crc kubenswrapper[4799]: I1010 16:43:48.153378 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-sj59m" event={"ID":"f2688c5c-df78-4127-9133-4221ea43363d","Type":"ContainerDied","Data":"a021416239a2534be17767ac1ddffe083f1460b6ab99aa56ad5dc721ac8328b8"} Oct 10 16:43:49 crc kubenswrapper[4799]: I1010 16:43:49.457165 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-sj59m" Oct 10 16:43:49 crc kubenswrapper[4799]: I1010 16:43:49.579585 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/f2688c5c-df78-4127-9133-4221ea43363d-node-mnt\") pod \"f2688c5c-df78-4127-9133-4221ea43363d\" (UID: \"f2688c5c-df78-4127-9133-4221ea43363d\") " Oct 10 16:43:49 crc kubenswrapper[4799]: I1010 16:43:49.579692 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-t4242\" (UniqueName: \"kubernetes.io/projected/f2688c5c-df78-4127-9133-4221ea43363d-kube-api-access-t4242\") pod \"f2688c5c-df78-4127-9133-4221ea43363d\" (UID: \"f2688c5c-df78-4127-9133-4221ea43363d\") " Oct 10 16:43:49 crc kubenswrapper[4799]: I1010 16:43:49.579772 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/f2688c5c-df78-4127-9133-4221ea43363d-crc-storage\") pod \"f2688c5c-df78-4127-9133-4221ea43363d\" (UID: \"f2688c5c-df78-4127-9133-4221ea43363d\") " Oct 10 16:43:49 crc kubenswrapper[4799]: I1010 16:43:49.579826 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f2688c5c-df78-4127-9133-4221ea43363d-node-mnt" (OuterVolumeSpecName: "node-mnt") pod "f2688c5c-df78-4127-9133-4221ea43363d" (UID: "f2688c5c-df78-4127-9133-4221ea43363d"). InnerVolumeSpecName "node-mnt". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 10 16:43:49 crc kubenswrapper[4799]: I1010 16:43:49.580058 4799 reconciler_common.go:293] "Volume detached for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/f2688c5c-df78-4127-9133-4221ea43363d-node-mnt\") on node \"crc\" DevicePath \"\"" Oct 10 16:43:49 crc kubenswrapper[4799]: I1010 16:43:49.586118 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f2688c5c-df78-4127-9133-4221ea43363d-kube-api-access-t4242" (OuterVolumeSpecName: "kube-api-access-t4242") pod "f2688c5c-df78-4127-9133-4221ea43363d" (UID: "f2688c5c-df78-4127-9133-4221ea43363d"). InnerVolumeSpecName "kube-api-access-t4242". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 16:43:49 crc kubenswrapper[4799]: I1010 16:43:49.596003 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f2688c5c-df78-4127-9133-4221ea43363d-crc-storage" (OuterVolumeSpecName: "crc-storage") pod "f2688c5c-df78-4127-9133-4221ea43363d" (UID: "f2688c5c-df78-4127-9133-4221ea43363d"). InnerVolumeSpecName "crc-storage". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 16:43:49 crc kubenswrapper[4799]: I1010 16:43:49.681644 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-t4242\" (UniqueName: \"kubernetes.io/projected/f2688c5c-df78-4127-9133-4221ea43363d-kube-api-access-t4242\") on node \"crc\" DevicePath \"\"" Oct 10 16:43:49 crc kubenswrapper[4799]: I1010 16:43:49.681690 4799 reconciler_common.go:293] "Volume detached for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/f2688c5c-df78-4127-9133-4221ea43363d-crc-storage\") on node \"crc\" DevicePath \"\"" Oct 10 16:43:50 crc kubenswrapper[4799]: I1010 16:43:50.170192 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-sj59m" event={"ID":"f2688c5c-df78-4127-9133-4221ea43363d","Type":"ContainerDied","Data":"4424e12028da8ae781387ec3693e947546a1eb9d6dd8df6511e69d98f8164e08"} Oct 10 16:43:50 crc kubenswrapper[4799]: I1010 16:43:50.170233 4799 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4424e12028da8ae781387ec3693e947546a1eb9d6dd8df6511e69d98f8164e08" Oct 10 16:43:50 crc kubenswrapper[4799]: I1010 16:43:50.170265 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-sj59m" Oct 10 16:43:57 crc kubenswrapper[4799]: I1010 16:43:57.684459 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c29kcf"] Oct 10 16:43:57 crc kubenswrapper[4799]: E1010 16:43:57.686375 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f2688c5c-df78-4127-9133-4221ea43363d" containerName="storage" Oct 10 16:43:57 crc kubenswrapper[4799]: I1010 16:43:57.686407 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="f2688c5c-df78-4127-9133-4221ea43363d" containerName="storage" Oct 10 16:43:57 crc kubenswrapper[4799]: I1010 16:43:57.686852 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="f2688c5c-df78-4127-9133-4221ea43363d" containerName="storage" Oct 10 16:43:57 crc kubenswrapper[4799]: I1010 16:43:57.690688 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c29kcf" Oct 10 16:43:57 crc kubenswrapper[4799]: I1010 16:43:57.695309 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Oct 10 16:43:57 crc kubenswrapper[4799]: I1010 16:43:57.703367 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c29kcf"] Oct 10 16:43:57 crc kubenswrapper[4799]: I1010 16:43:57.798065 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/d37f944b-0659-443a-baf7-fb1f2fc07fc2-util\") pod \"fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c29kcf\" (UID: \"d37f944b-0659-443a-baf7-fb1f2fc07fc2\") " pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c29kcf" Oct 10 16:43:57 crc kubenswrapper[4799]: I1010 16:43:57.798132 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j2gfx\" (UniqueName: \"kubernetes.io/projected/d37f944b-0659-443a-baf7-fb1f2fc07fc2-kube-api-access-j2gfx\") pod \"fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c29kcf\" (UID: \"d37f944b-0659-443a-baf7-fb1f2fc07fc2\") " pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c29kcf" Oct 10 16:43:57 crc kubenswrapper[4799]: I1010 16:43:57.798244 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/d37f944b-0659-443a-baf7-fb1f2fc07fc2-bundle\") pod \"fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c29kcf\" (UID: \"d37f944b-0659-443a-baf7-fb1f2fc07fc2\") " pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c29kcf" Oct 10 16:43:57 crc kubenswrapper[4799]: I1010 16:43:57.899545 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/d37f944b-0659-443a-baf7-fb1f2fc07fc2-util\") pod \"fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c29kcf\" (UID: \"d37f944b-0659-443a-baf7-fb1f2fc07fc2\") " pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c29kcf" Oct 10 16:43:57 crc kubenswrapper[4799]: I1010 16:43:57.899625 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j2gfx\" (UniqueName: \"kubernetes.io/projected/d37f944b-0659-443a-baf7-fb1f2fc07fc2-kube-api-access-j2gfx\") pod \"fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c29kcf\" (UID: \"d37f944b-0659-443a-baf7-fb1f2fc07fc2\") " pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c29kcf" Oct 10 16:43:57 crc kubenswrapper[4799]: I1010 16:43:57.899727 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/d37f944b-0659-443a-baf7-fb1f2fc07fc2-bundle\") pod \"fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c29kcf\" (UID: \"d37f944b-0659-443a-baf7-fb1f2fc07fc2\") " pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c29kcf" Oct 10 16:43:57 crc kubenswrapper[4799]: I1010 16:43:57.900642 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/d37f944b-0659-443a-baf7-fb1f2fc07fc2-util\") pod \"fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c29kcf\" (UID: \"d37f944b-0659-443a-baf7-fb1f2fc07fc2\") " pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c29kcf" Oct 10 16:43:57 crc kubenswrapper[4799]: I1010 16:43:57.901221 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/d37f944b-0659-443a-baf7-fb1f2fc07fc2-bundle\") pod \"fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c29kcf\" (UID: \"d37f944b-0659-443a-baf7-fb1f2fc07fc2\") " pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c29kcf" Oct 10 16:43:57 crc kubenswrapper[4799]: I1010 16:43:57.925175 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j2gfx\" (UniqueName: \"kubernetes.io/projected/d37f944b-0659-443a-baf7-fb1f2fc07fc2-kube-api-access-j2gfx\") pod \"fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c29kcf\" (UID: \"d37f944b-0659-443a-baf7-fb1f2fc07fc2\") " pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c29kcf" Oct 10 16:43:58 crc kubenswrapper[4799]: I1010 16:43:58.024842 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c29kcf" Oct 10 16:43:58 crc kubenswrapper[4799]: I1010 16:43:58.291467 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c29kcf"] Oct 10 16:43:58 crc kubenswrapper[4799]: W1010 16:43:58.298180 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd37f944b_0659_443a_baf7_fb1f2fc07fc2.slice/crio-602ca09348b11886f994680437257949751fd15a6cc3366f2ab03b4ea48a4807 WatchSource:0}: Error finding container 602ca09348b11886f994680437257949751fd15a6cc3366f2ab03b4ea48a4807: Status 404 returned error can't find the container with id 602ca09348b11886f994680437257949751fd15a6cc3366f2ab03b4ea48a4807 Oct 10 16:43:59 crc kubenswrapper[4799]: I1010 16:43:59.231331 4799 generic.go:334] "Generic (PLEG): container finished" podID="d37f944b-0659-443a-baf7-fb1f2fc07fc2" containerID="eb80b3552f5924e67aaf6a46b8b1694f10db762332158a2adc5548f406c82386" exitCode=0 Oct 10 16:43:59 crc kubenswrapper[4799]: I1010 16:43:59.231377 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c29kcf" event={"ID":"d37f944b-0659-443a-baf7-fb1f2fc07fc2","Type":"ContainerDied","Data":"eb80b3552f5924e67aaf6a46b8b1694f10db762332158a2adc5548f406c82386"} Oct 10 16:43:59 crc kubenswrapper[4799]: I1010 16:43:59.231406 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c29kcf" event={"ID":"d37f944b-0659-443a-baf7-fb1f2fc07fc2","Type":"ContainerStarted","Data":"602ca09348b11886f994680437257949751fd15a6cc3366f2ab03b4ea48a4807"} Oct 10 16:44:01 crc kubenswrapper[4799]: I1010 16:44:01.251885 4799 generic.go:334] "Generic (PLEG): container finished" podID="d37f944b-0659-443a-baf7-fb1f2fc07fc2" containerID="6b9ff055a6b920228187cdee991b0e4258abdcffe028b14119c0124b1e06b2f7" exitCode=0 Oct 10 16:44:01 crc kubenswrapper[4799]: I1010 16:44:01.251972 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c29kcf" event={"ID":"d37f944b-0659-443a-baf7-fb1f2fc07fc2","Type":"ContainerDied","Data":"6b9ff055a6b920228187cdee991b0e4258abdcffe028b14119c0124b1e06b2f7"} Oct 10 16:44:02 crc kubenswrapper[4799]: I1010 16:44:02.264174 4799 generic.go:334] "Generic (PLEG): container finished" podID="d37f944b-0659-443a-baf7-fb1f2fc07fc2" containerID="a605567f22ae407fa6261eee21e107dcb69bf79360f8ba4ab9a3177725b8c51e" exitCode=0 Oct 10 16:44:02 crc kubenswrapper[4799]: I1010 16:44:02.264232 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c29kcf" event={"ID":"d37f944b-0659-443a-baf7-fb1f2fc07fc2","Type":"ContainerDied","Data":"a605567f22ae407fa6261eee21e107dcb69bf79360f8ba4ab9a3177725b8c51e"} Oct 10 16:44:03 crc kubenswrapper[4799]: I1010 16:44:03.591592 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c29kcf" Oct 10 16:44:03 crc kubenswrapper[4799]: I1010 16:44:03.693256 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/d37f944b-0659-443a-baf7-fb1f2fc07fc2-bundle\") pod \"d37f944b-0659-443a-baf7-fb1f2fc07fc2\" (UID: \"d37f944b-0659-443a-baf7-fb1f2fc07fc2\") " Oct 10 16:44:03 crc kubenswrapper[4799]: I1010 16:44:03.693396 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-j2gfx\" (UniqueName: \"kubernetes.io/projected/d37f944b-0659-443a-baf7-fb1f2fc07fc2-kube-api-access-j2gfx\") pod \"d37f944b-0659-443a-baf7-fb1f2fc07fc2\" (UID: \"d37f944b-0659-443a-baf7-fb1f2fc07fc2\") " Oct 10 16:44:03 crc kubenswrapper[4799]: I1010 16:44:03.693475 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/d37f944b-0659-443a-baf7-fb1f2fc07fc2-util\") pod \"d37f944b-0659-443a-baf7-fb1f2fc07fc2\" (UID: \"d37f944b-0659-443a-baf7-fb1f2fc07fc2\") " Oct 10 16:44:03 crc kubenswrapper[4799]: I1010 16:44:03.694079 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d37f944b-0659-443a-baf7-fb1f2fc07fc2-bundle" (OuterVolumeSpecName: "bundle") pod "d37f944b-0659-443a-baf7-fb1f2fc07fc2" (UID: "d37f944b-0659-443a-baf7-fb1f2fc07fc2"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 10 16:44:03 crc kubenswrapper[4799]: I1010 16:44:03.701182 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d37f944b-0659-443a-baf7-fb1f2fc07fc2-kube-api-access-j2gfx" (OuterVolumeSpecName: "kube-api-access-j2gfx") pod "d37f944b-0659-443a-baf7-fb1f2fc07fc2" (UID: "d37f944b-0659-443a-baf7-fb1f2fc07fc2"). InnerVolumeSpecName "kube-api-access-j2gfx". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 16:44:03 crc kubenswrapper[4799]: I1010 16:44:03.795697 4799 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/d37f944b-0659-443a-baf7-fb1f2fc07fc2-bundle\") on node \"crc\" DevicePath \"\"" Oct 10 16:44:03 crc kubenswrapper[4799]: I1010 16:44:03.795778 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-j2gfx\" (UniqueName: \"kubernetes.io/projected/d37f944b-0659-443a-baf7-fb1f2fc07fc2-kube-api-access-j2gfx\") on node \"crc\" DevicePath \"\"" Oct 10 16:44:04 crc kubenswrapper[4799]: I1010 16:44:04.077397 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d37f944b-0659-443a-baf7-fb1f2fc07fc2-util" (OuterVolumeSpecName: "util") pod "d37f944b-0659-443a-baf7-fb1f2fc07fc2" (UID: "d37f944b-0659-443a-baf7-fb1f2fc07fc2"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 10 16:44:04 crc kubenswrapper[4799]: I1010 16:44:04.098844 4799 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/d37f944b-0659-443a-baf7-fb1f2fc07fc2-util\") on node \"crc\" DevicePath \"\"" Oct 10 16:44:04 crc kubenswrapper[4799]: I1010 16:44:04.281628 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c29kcf" event={"ID":"d37f944b-0659-443a-baf7-fb1f2fc07fc2","Type":"ContainerDied","Data":"602ca09348b11886f994680437257949751fd15a6cc3366f2ab03b4ea48a4807"} Oct 10 16:44:04 crc kubenswrapper[4799]: I1010 16:44:04.281687 4799 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="602ca09348b11886f994680437257949751fd15a6cc3366f2ab03b4ea48a4807" Oct 10 16:44:04 crc kubenswrapper[4799]: I1010 16:44:04.281728 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c29kcf" Oct 10 16:44:05 crc kubenswrapper[4799]: I1010 16:44:05.370484 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-operator-858ddd8f98-8pflw"] Oct 10 16:44:05 crc kubenswrapper[4799]: E1010 16:44:05.370743 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d37f944b-0659-443a-baf7-fb1f2fc07fc2" containerName="extract" Oct 10 16:44:05 crc kubenswrapper[4799]: I1010 16:44:05.370779 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="d37f944b-0659-443a-baf7-fb1f2fc07fc2" containerName="extract" Oct 10 16:44:05 crc kubenswrapper[4799]: E1010 16:44:05.370795 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d37f944b-0659-443a-baf7-fb1f2fc07fc2" containerName="util" Oct 10 16:44:05 crc kubenswrapper[4799]: I1010 16:44:05.370804 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="d37f944b-0659-443a-baf7-fb1f2fc07fc2" containerName="util" Oct 10 16:44:05 crc kubenswrapper[4799]: E1010 16:44:05.370818 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d37f944b-0659-443a-baf7-fb1f2fc07fc2" containerName="pull" Oct 10 16:44:05 crc kubenswrapper[4799]: I1010 16:44:05.370827 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="d37f944b-0659-443a-baf7-fb1f2fc07fc2" containerName="pull" Oct 10 16:44:05 crc kubenswrapper[4799]: I1010 16:44:05.370940 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="d37f944b-0659-443a-baf7-fb1f2fc07fc2" containerName="extract" Oct 10 16:44:05 crc kubenswrapper[4799]: I1010 16:44:05.371401 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-operator-858ddd8f98-8pflw" Oct 10 16:44:05 crc kubenswrapper[4799]: I1010 16:44:05.373273 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"kube-root-ca.crt" Oct 10 16:44:05 crc kubenswrapper[4799]: I1010 16:44:05.373334 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"nmstate-operator-dockercfg-fncmr" Oct 10 16:44:05 crc kubenswrapper[4799]: I1010 16:44:05.373599 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"openshift-service-ca.crt" Oct 10 16:44:05 crc kubenswrapper[4799]: I1010 16:44:05.380996 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-operator-858ddd8f98-8pflw"] Oct 10 16:44:05 crc kubenswrapper[4799]: I1010 16:44:05.516036 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j74hz\" (UniqueName: \"kubernetes.io/projected/5a084a6f-f89a-48f4-b61a-0111ac1b72ba-kube-api-access-j74hz\") pod \"nmstate-operator-858ddd8f98-8pflw\" (UID: \"5a084a6f-f89a-48f4-b61a-0111ac1b72ba\") " pod="openshift-nmstate/nmstate-operator-858ddd8f98-8pflw" Oct 10 16:44:05 crc kubenswrapper[4799]: I1010 16:44:05.616846 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j74hz\" (UniqueName: \"kubernetes.io/projected/5a084a6f-f89a-48f4-b61a-0111ac1b72ba-kube-api-access-j74hz\") pod \"nmstate-operator-858ddd8f98-8pflw\" (UID: \"5a084a6f-f89a-48f4-b61a-0111ac1b72ba\") " pod="openshift-nmstate/nmstate-operator-858ddd8f98-8pflw" Oct 10 16:44:05 crc kubenswrapper[4799]: I1010 16:44:05.638961 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j74hz\" (UniqueName: \"kubernetes.io/projected/5a084a6f-f89a-48f4-b61a-0111ac1b72ba-kube-api-access-j74hz\") pod \"nmstate-operator-858ddd8f98-8pflw\" (UID: \"5a084a6f-f89a-48f4-b61a-0111ac1b72ba\") " pod="openshift-nmstate/nmstate-operator-858ddd8f98-8pflw" Oct 10 16:44:05 crc kubenswrapper[4799]: I1010 16:44:05.686949 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-operator-858ddd8f98-8pflw" Oct 10 16:44:05 crc kubenswrapper[4799]: I1010 16:44:05.950186 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-operator-858ddd8f98-8pflw"] Oct 10 16:44:05 crc kubenswrapper[4799]: W1010 16:44:05.953461 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5a084a6f_f89a_48f4_b61a_0111ac1b72ba.slice/crio-a0db6c93ed78d58f20171f79330520332aa57c0c4fb0163031879c4866b3f8ff WatchSource:0}: Error finding container a0db6c93ed78d58f20171f79330520332aa57c0c4fb0163031879c4866b3f8ff: Status 404 returned error can't find the container with id a0db6c93ed78d58f20171f79330520332aa57c0c4fb0163031879c4866b3f8ff Oct 10 16:44:06 crc kubenswrapper[4799]: I1010 16:44:06.294195 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-operator-858ddd8f98-8pflw" event={"ID":"5a084a6f-f89a-48f4-b61a-0111ac1b72ba","Type":"ContainerStarted","Data":"a0db6c93ed78d58f20171f79330520332aa57c0c4fb0163031879c4866b3f8ff"} Oct 10 16:44:08 crc kubenswrapper[4799]: I1010 16:44:08.098993 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-fvpc8"] Oct 10 16:44:08 crc kubenswrapper[4799]: I1010 16:44:08.101026 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-879f6c89f-fvpc8" podUID="dd0fbd79-76a9-4a87-a67b-20e782993376" containerName="controller-manager" containerID="cri-o://2a546856fc823e003adba004f8dcb743f630836d1b927980045531a4243ea0a6" gracePeriod=30 Oct 10 16:44:08 crc kubenswrapper[4799]: I1010 16:44:08.192642 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-w2jsl"] Oct 10 16:44:08 crc kubenswrapper[4799]: I1010 16:44:08.193067 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-w2jsl" podUID="f8bab52f-8a27-495e-80d2-9794b984939e" containerName="route-controller-manager" containerID="cri-o://a2313b9847c5d6b5a564afa24e9c95dadc1bf76e172a36672539400197d808c6" gracePeriod=30 Oct 10 16:44:08 crc kubenswrapper[4799]: I1010 16:44:08.307396 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-operator-858ddd8f98-8pflw" event={"ID":"5a084a6f-f89a-48f4-b61a-0111ac1b72ba","Type":"ContainerStarted","Data":"537e1e436c44338843343556766c85badc791cedff8ee245982e1be4c0d0549d"} Oct 10 16:44:08 crc kubenswrapper[4799]: I1010 16:44:08.314347 4799 generic.go:334] "Generic (PLEG): container finished" podID="dd0fbd79-76a9-4a87-a67b-20e782993376" containerID="2a546856fc823e003adba004f8dcb743f630836d1b927980045531a4243ea0a6" exitCode=0 Oct 10 16:44:08 crc kubenswrapper[4799]: I1010 16:44:08.314383 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-fvpc8" event={"ID":"dd0fbd79-76a9-4a87-a67b-20e782993376","Type":"ContainerDied","Data":"2a546856fc823e003adba004f8dcb743f630836d1b927980045531a4243ea0a6"} Oct 10 16:44:08 crc kubenswrapper[4799]: I1010 16:44:08.452710 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-fvpc8" Oct 10 16:44:08 crc kubenswrapper[4799]: I1010 16:44:08.469051 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-operator-858ddd8f98-8pflw" podStartSLOduration=1.479306823 podStartE2EDuration="3.469033277s" podCreationTimestamp="2025-10-10 16:44:05 +0000 UTC" firstStartedPulling="2025-10-10 16:44:05.956636135 +0000 UTC m=+739.464960260" lastFinishedPulling="2025-10-10 16:44:07.946362599 +0000 UTC m=+741.454686714" observedRunningTime="2025-10-10 16:44:08.328077474 +0000 UTC m=+741.836401589" watchObservedRunningTime="2025-10-10 16:44:08.469033277 +0000 UTC m=+741.977357392" Oct 10 16:44:08 crc kubenswrapper[4799]: I1010 16:44:08.519546 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-w2jsl" Oct 10 16:44:08 crc kubenswrapper[4799]: I1010 16:44:08.554576 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/dd0fbd79-76a9-4a87-a67b-20e782993376-config\") pod \"dd0fbd79-76a9-4a87-a67b-20e782993376\" (UID: \"dd0fbd79-76a9-4a87-a67b-20e782993376\") " Oct 10 16:44:08 crc kubenswrapper[4799]: I1010 16:44:08.554625 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2bsdq\" (UniqueName: \"kubernetes.io/projected/dd0fbd79-76a9-4a87-a67b-20e782993376-kube-api-access-2bsdq\") pod \"dd0fbd79-76a9-4a87-a67b-20e782993376\" (UID: \"dd0fbd79-76a9-4a87-a67b-20e782993376\") " Oct 10 16:44:08 crc kubenswrapper[4799]: I1010 16:44:08.554677 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/dd0fbd79-76a9-4a87-a67b-20e782993376-serving-cert\") pod \"dd0fbd79-76a9-4a87-a67b-20e782993376\" (UID: \"dd0fbd79-76a9-4a87-a67b-20e782993376\") " Oct 10 16:44:08 crc kubenswrapper[4799]: I1010 16:44:08.554769 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/dd0fbd79-76a9-4a87-a67b-20e782993376-client-ca\") pod \"dd0fbd79-76a9-4a87-a67b-20e782993376\" (UID: \"dd0fbd79-76a9-4a87-a67b-20e782993376\") " Oct 10 16:44:08 crc kubenswrapper[4799]: I1010 16:44:08.554790 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/dd0fbd79-76a9-4a87-a67b-20e782993376-proxy-ca-bundles\") pod \"dd0fbd79-76a9-4a87-a67b-20e782993376\" (UID: \"dd0fbd79-76a9-4a87-a67b-20e782993376\") " Oct 10 16:44:08 crc kubenswrapper[4799]: I1010 16:44:08.555620 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/dd0fbd79-76a9-4a87-a67b-20e782993376-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "dd0fbd79-76a9-4a87-a67b-20e782993376" (UID: "dd0fbd79-76a9-4a87-a67b-20e782993376"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 16:44:08 crc kubenswrapper[4799]: I1010 16:44:08.555745 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/dd0fbd79-76a9-4a87-a67b-20e782993376-config" (OuterVolumeSpecName: "config") pod "dd0fbd79-76a9-4a87-a67b-20e782993376" (UID: "dd0fbd79-76a9-4a87-a67b-20e782993376"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 16:44:08 crc kubenswrapper[4799]: I1010 16:44:08.556071 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/dd0fbd79-76a9-4a87-a67b-20e782993376-client-ca" (OuterVolumeSpecName: "client-ca") pod "dd0fbd79-76a9-4a87-a67b-20e782993376" (UID: "dd0fbd79-76a9-4a87-a67b-20e782993376"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 16:44:08 crc kubenswrapper[4799]: I1010 16:44:08.560027 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dd0fbd79-76a9-4a87-a67b-20e782993376-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "dd0fbd79-76a9-4a87-a67b-20e782993376" (UID: "dd0fbd79-76a9-4a87-a67b-20e782993376"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:44:08 crc kubenswrapper[4799]: I1010 16:44:08.560027 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dd0fbd79-76a9-4a87-a67b-20e782993376-kube-api-access-2bsdq" (OuterVolumeSpecName: "kube-api-access-2bsdq") pod "dd0fbd79-76a9-4a87-a67b-20e782993376" (UID: "dd0fbd79-76a9-4a87-a67b-20e782993376"). InnerVolumeSpecName "kube-api-access-2bsdq". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 16:44:08 crc kubenswrapper[4799]: I1010 16:44:08.656769 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f8bab52f-8a27-495e-80d2-9794b984939e-serving-cert\") pod \"f8bab52f-8a27-495e-80d2-9794b984939e\" (UID: \"f8bab52f-8a27-495e-80d2-9794b984939e\") " Oct 10 16:44:08 crc kubenswrapper[4799]: I1010 16:44:08.656829 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/f8bab52f-8a27-495e-80d2-9794b984939e-client-ca\") pod \"f8bab52f-8a27-495e-80d2-9794b984939e\" (UID: \"f8bab52f-8a27-495e-80d2-9794b984939e\") " Oct 10 16:44:08 crc kubenswrapper[4799]: I1010 16:44:08.656856 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f8bab52f-8a27-495e-80d2-9794b984939e-config\") pod \"f8bab52f-8a27-495e-80d2-9794b984939e\" (UID: \"f8bab52f-8a27-495e-80d2-9794b984939e\") " Oct 10 16:44:08 crc kubenswrapper[4799]: I1010 16:44:08.656908 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-55fpk\" (UniqueName: \"kubernetes.io/projected/f8bab52f-8a27-495e-80d2-9794b984939e-kube-api-access-55fpk\") pod \"f8bab52f-8a27-495e-80d2-9794b984939e\" (UID: \"f8bab52f-8a27-495e-80d2-9794b984939e\") " Oct 10 16:44:08 crc kubenswrapper[4799]: I1010 16:44:08.657117 4799 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/dd0fbd79-76a9-4a87-a67b-20e782993376-client-ca\") on node \"crc\" DevicePath \"\"" Oct 10 16:44:08 crc kubenswrapper[4799]: I1010 16:44:08.657129 4799 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/dd0fbd79-76a9-4a87-a67b-20e782993376-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Oct 10 16:44:08 crc kubenswrapper[4799]: I1010 16:44:08.657137 4799 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/dd0fbd79-76a9-4a87-a67b-20e782993376-config\") on node \"crc\" DevicePath \"\"" Oct 10 16:44:08 crc kubenswrapper[4799]: I1010 16:44:08.657146 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2bsdq\" (UniqueName: \"kubernetes.io/projected/dd0fbd79-76a9-4a87-a67b-20e782993376-kube-api-access-2bsdq\") on node \"crc\" DevicePath \"\"" Oct 10 16:44:08 crc kubenswrapper[4799]: I1010 16:44:08.657154 4799 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/dd0fbd79-76a9-4a87-a67b-20e782993376-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 10 16:44:08 crc kubenswrapper[4799]: I1010 16:44:08.658330 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f8bab52f-8a27-495e-80d2-9794b984939e-config" (OuterVolumeSpecName: "config") pod "f8bab52f-8a27-495e-80d2-9794b984939e" (UID: "f8bab52f-8a27-495e-80d2-9794b984939e"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 16:44:08 crc kubenswrapper[4799]: I1010 16:44:08.658682 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f8bab52f-8a27-495e-80d2-9794b984939e-client-ca" (OuterVolumeSpecName: "client-ca") pod "f8bab52f-8a27-495e-80d2-9794b984939e" (UID: "f8bab52f-8a27-495e-80d2-9794b984939e"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 16:44:08 crc kubenswrapper[4799]: I1010 16:44:08.660669 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f8bab52f-8a27-495e-80d2-9794b984939e-kube-api-access-55fpk" (OuterVolumeSpecName: "kube-api-access-55fpk") pod "f8bab52f-8a27-495e-80d2-9794b984939e" (UID: "f8bab52f-8a27-495e-80d2-9794b984939e"). InnerVolumeSpecName "kube-api-access-55fpk". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 16:44:08 crc kubenswrapper[4799]: I1010 16:44:08.660728 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f8bab52f-8a27-495e-80d2-9794b984939e-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "f8bab52f-8a27-495e-80d2-9794b984939e" (UID: "f8bab52f-8a27-495e-80d2-9794b984939e"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:44:08 crc kubenswrapper[4799]: I1010 16:44:08.759044 4799 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f8bab52f-8a27-495e-80d2-9794b984939e-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 10 16:44:08 crc kubenswrapper[4799]: I1010 16:44:08.759113 4799 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/f8bab52f-8a27-495e-80d2-9794b984939e-client-ca\") on node \"crc\" DevicePath \"\"" Oct 10 16:44:08 crc kubenswrapper[4799]: I1010 16:44:08.759138 4799 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f8bab52f-8a27-495e-80d2-9794b984939e-config\") on node \"crc\" DevicePath \"\"" Oct 10 16:44:08 crc kubenswrapper[4799]: I1010 16:44:08.759162 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-55fpk\" (UniqueName: \"kubernetes.io/projected/f8bab52f-8a27-495e-80d2-9794b984939e-kube-api-access-55fpk\") on node \"crc\" DevicePath \"\"" Oct 10 16:44:09 crc kubenswrapper[4799]: I1010 16:44:09.322807 4799 generic.go:334] "Generic (PLEG): container finished" podID="f8bab52f-8a27-495e-80d2-9794b984939e" containerID="a2313b9847c5d6b5a564afa24e9c95dadc1bf76e172a36672539400197d808c6" exitCode=0 Oct 10 16:44:09 crc kubenswrapper[4799]: I1010 16:44:09.322868 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-w2jsl" Oct 10 16:44:09 crc kubenswrapper[4799]: I1010 16:44:09.322955 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-w2jsl" event={"ID":"f8bab52f-8a27-495e-80d2-9794b984939e","Type":"ContainerDied","Data":"a2313b9847c5d6b5a564afa24e9c95dadc1bf76e172a36672539400197d808c6"} Oct 10 16:44:09 crc kubenswrapper[4799]: I1010 16:44:09.323050 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-w2jsl" event={"ID":"f8bab52f-8a27-495e-80d2-9794b984939e","Type":"ContainerDied","Data":"5bc503abd11d6908f63fe6a908a7c1e428f4244e55b441e9b569cee187281024"} Oct 10 16:44:09 crc kubenswrapper[4799]: I1010 16:44:09.323084 4799 scope.go:117] "RemoveContainer" containerID="a2313b9847c5d6b5a564afa24e9c95dadc1bf76e172a36672539400197d808c6" Oct 10 16:44:09 crc kubenswrapper[4799]: I1010 16:44:09.325264 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-fvpc8" Oct 10 16:44:09 crc kubenswrapper[4799]: I1010 16:44:09.325743 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-fvpc8" event={"ID":"dd0fbd79-76a9-4a87-a67b-20e782993376","Type":"ContainerDied","Data":"fe566607a90d72c2ee93b3e2245e25a3a5ae8c510ba26555174f5f61387257f4"} Oct 10 16:44:09 crc kubenswrapper[4799]: I1010 16:44:09.358741 4799 scope.go:117] "RemoveContainer" containerID="a2313b9847c5d6b5a564afa24e9c95dadc1bf76e172a36672539400197d808c6" Oct 10 16:44:09 crc kubenswrapper[4799]: E1010 16:44:09.359612 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a2313b9847c5d6b5a564afa24e9c95dadc1bf76e172a36672539400197d808c6\": container with ID starting with a2313b9847c5d6b5a564afa24e9c95dadc1bf76e172a36672539400197d808c6 not found: ID does not exist" containerID="a2313b9847c5d6b5a564afa24e9c95dadc1bf76e172a36672539400197d808c6" Oct 10 16:44:09 crc kubenswrapper[4799]: I1010 16:44:09.359654 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a2313b9847c5d6b5a564afa24e9c95dadc1bf76e172a36672539400197d808c6"} err="failed to get container status \"a2313b9847c5d6b5a564afa24e9c95dadc1bf76e172a36672539400197d808c6\": rpc error: code = NotFound desc = could not find container \"a2313b9847c5d6b5a564afa24e9c95dadc1bf76e172a36672539400197d808c6\": container with ID starting with a2313b9847c5d6b5a564afa24e9c95dadc1bf76e172a36672539400197d808c6 not found: ID does not exist" Oct 10 16:44:09 crc kubenswrapper[4799]: I1010 16:44:09.359681 4799 scope.go:117] "RemoveContainer" containerID="2a546856fc823e003adba004f8dcb743f630836d1b927980045531a4243ea0a6" Oct 10 16:44:09 crc kubenswrapper[4799]: I1010 16:44:09.364768 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-w2jsl"] Oct 10 16:44:09 crc kubenswrapper[4799]: I1010 16:44:09.370099 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-w2jsl"] Oct 10 16:44:09 crc kubenswrapper[4799]: I1010 16:44:09.380736 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-fvpc8"] Oct 10 16:44:09 crc kubenswrapper[4799]: I1010 16:44:09.386591 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-fvpc8"] Oct 10 16:44:09 crc kubenswrapper[4799]: I1010 16:44:09.413852 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="dd0fbd79-76a9-4a87-a67b-20e782993376" path="/var/lib/kubelet/pods/dd0fbd79-76a9-4a87-a67b-20e782993376/volumes" Oct 10 16:44:09 crc kubenswrapper[4799]: I1010 16:44:09.418853 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f8bab52f-8a27-495e-80d2-9794b984939e" path="/var/lib/kubelet/pods/f8bab52f-8a27-495e-80d2-9794b984939e/volumes" Oct 10 16:44:09 crc kubenswrapper[4799]: I1010 16:44:09.419452 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-metrics-fdff9cb8d-8szkw"] Oct 10 16:44:09 crc kubenswrapper[4799]: E1010 16:44:09.419801 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dd0fbd79-76a9-4a87-a67b-20e782993376" containerName="controller-manager" Oct 10 16:44:09 crc kubenswrapper[4799]: I1010 16:44:09.419818 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="dd0fbd79-76a9-4a87-a67b-20e782993376" containerName="controller-manager" Oct 10 16:44:09 crc kubenswrapper[4799]: E1010 16:44:09.419837 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f8bab52f-8a27-495e-80d2-9794b984939e" containerName="route-controller-manager" Oct 10 16:44:09 crc kubenswrapper[4799]: I1010 16:44:09.419846 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="f8bab52f-8a27-495e-80d2-9794b984939e" containerName="route-controller-manager" Oct 10 16:44:09 crc kubenswrapper[4799]: I1010 16:44:09.420543 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="dd0fbd79-76a9-4a87-a67b-20e782993376" containerName="controller-manager" Oct 10 16:44:09 crc kubenswrapper[4799]: I1010 16:44:09.420688 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="f8bab52f-8a27-495e-80d2-9794b984939e" containerName="route-controller-manager" Oct 10 16:44:09 crc kubenswrapper[4799]: I1010 16:44:09.423557 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-metrics-fdff9cb8d-8szkw" Oct 10 16:44:09 crc kubenswrapper[4799]: I1010 16:44:09.425021 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-webhook-6cdbc54649-5j22z"] Oct 10 16:44:09 crc kubenswrapper[4799]: I1010 16:44:09.436728 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-webhook-6cdbc54649-5j22z" Oct 10 16:44:09 crc kubenswrapper[4799]: I1010 16:44:09.439360 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-metrics-fdff9cb8d-8szkw"] Oct 10 16:44:09 crc kubenswrapper[4799]: I1010 16:44:09.439496 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"openshift-nmstate-webhook" Oct 10 16:44:09 crc kubenswrapper[4799]: I1010 16:44:09.441615 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-webhook-6cdbc54649-5j22z"] Oct 10 16:44:09 crc kubenswrapper[4799]: I1010 16:44:09.449427 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-handler-44gb8"] Oct 10 16:44:09 crc kubenswrapper[4799]: I1010 16:44:09.450256 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-handler-44gb8" Oct 10 16:44:09 crc kubenswrapper[4799]: I1010 16:44:09.522872 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-console-plugin-6b874cbd85-stz8n"] Oct 10 16:44:09 crc kubenswrapper[4799]: I1010 16:44:09.523551 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-stz8n" Oct 10 16:44:09 crc kubenswrapper[4799]: I1010 16:44:09.529176 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"plugin-serving-cert" Oct 10 16:44:09 crc kubenswrapper[4799]: I1010 16:44:09.529520 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"default-dockercfg-q5495" Oct 10 16:44:09 crc kubenswrapper[4799]: I1010 16:44:09.535317 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"nginx-conf" Oct 10 16:44:09 crc kubenswrapper[4799]: I1010 16:44:09.555745 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-855d595dcb-r2pw5"] Oct 10 16:44:09 crc kubenswrapper[4799]: I1010 16:44:09.556497 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-855d595dcb-r2pw5" Oct 10 16:44:09 crc kubenswrapper[4799]: I1010 16:44:09.558593 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Oct 10 16:44:09 crc kubenswrapper[4799]: I1010 16:44:09.558715 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-7c59c6bb75-l5crw"] Oct 10 16:44:09 crc kubenswrapper[4799]: I1010 16:44:09.559380 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-7c59c6bb75-l5crw" Oct 10 16:44:09 crc kubenswrapper[4799]: I1010 16:44:09.561709 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Oct 10 16:44:09 crc kubenswrapper[4799]: I1010 16:44:09.561874 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Oct 10 16:44:09 crc kubenswrapper[4799]: I1010 16:44:09.561992 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Oct 10 16:44:09 crc kubenswrapper[4799]: I1010 16:44:09.562137 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Oct 10 16:44:09 crc kubenswrapper[4799]: I1010 16:44:09.562591 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Oct 10 16:44:09 crc kubenswrapper[4799]: I1010 16:44:09.562717 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Oct 10 16:44:09 crc kubenswrapper[4799]: I1010 16:44:09.565739 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Oct 10 16:44:09 crc kubenswrapper[4799]: I1010 16:44:09.569682 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Oct 10 16:44:09 crc kubenswrapper[4799]: I1010 16:44:09.570257 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Oct 10 16:44:09 crc kubenswrapper[4799]: I1010 16:44:09.570829 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/03e8f0e3-1119-4f24-9625-4f799ca6d87f-tls-key-pair\") pod \"nmstate-webhook-6cdbc54649-5j22z\" (UID: \"03e8f0e3-1119-4f24-9625-4f799ca6d87f\") " pod="openshift-nmstate/nmstate-webhook-6cdbc54649-5j22z" Oct 10 16:44:09 crc kubenswrapper[4799]: I1010 16:44:09.570866 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Oct 10 16:44:09 crc kubenswrapper[4799]: I1010 16:44:09.570891 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gdl4t\" (UniqueName: \"kubernetes.io/projected/1cea25d9-ee17-4a2d-8cfc-74b0cad9e91a-kube-api-access-gdl4t\") pod \"nmstate-handler-44gb8\" (UID: \"1cea25d9-ee17-4a2d-8cfc-74b0cad9e91a\") " pod="openshift-nmstate/nmstate-handler-44gb8" Oct 10 16:44:09 crc kubenswrapper[4799]: I1010 16:44:09.570917 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rx27w\" (UniqueName: \"kubernetes.io/projected/cf339ce7-7475-4cea-a474-df2df7c47cbc-kube-api-access-rx27w\") pod \"nmstate-metrics-fdff9cb8d-8szkw\" (UID: \"cf339ce7-7475-4cea-a474-df2df7c47cbc\") " pod="openshift-nmstate/nmstate-metrics-fdff9cb8d-8szkw" Oct 10 16:44:09 crc kubenswrapper[4799]: I1010 16:44:09.570984 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/1cea25d9-ee17-4a2d-8cfc-74b0cad9e91a-nmstate-lock\") pod \"nmstate-handler-44gb8\" (UID: \"1cea25d9-ee17-4a2d-8cfc-74b0cad9e91a\") " pod="openshift-nmstate/nmstate-handler-44gb8" Oct 10 16:44:09 crc kubenswrapper[4799]: I1010 16:44:09.571043 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2vgc7\" (UniqueName: \"kubernetes.io/projected/03e8f0e3-1119-4f24-9625-4f799ca6d87f-kube-api-access-2vgc7\") pod \"nmstate-webhook-6cdbc54649-5j22z\" (UID: \"03e8f0e3-1119-4f24-9625-4f799ca6d87f\") " pod="openshift-nmstate/nmstate-webhook-6cdbc54649-5j22z" Oct 10 16:44:09 crc kubenswrapper[4799]: I1010 16:44:09.571099 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/1cea25d9-ee17-4a2d-8cfc-74b0cad9e91a-ovs-socket\") pod \"nmstate-handler-44gb8\" (UID: \"1cea25d9-ee17-4a2d-8cfc-74b0cad9e91a\") " pod="openshift-nmstate/nmstate-handler-44gb8" Oct 10 16:44:09 crc kubenswrapper[4799]: I1010 16:44:09.571140 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/1cea25d9-ee17-4a2d-8cfc-74b0cad9e91a-dbus-socket\") pod \"nmstate-handler-44gb8\" (UID: \"1cea25d9-ee17-4a2d-8cfc-74b0cad9e91a\") " pod="openshift-nmstate/nmstate-handler-44gb8" Oct 10 16:44:09 crc kubenswrapper[4799]: I1010 16:44:09.571936 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Oct 10 16:44:09 crc kubenswrapper[4799]: I1010 16:44:09.573685 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-7c59c6bb75-l5crw"] Oct 10 16:44:09 crc kubenswrapper[4799]: I1010 16:44:09.577100 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Oct 10 16:44:09 crc kubenswrapper[4799]: I1010 16:44:09.579562 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-855d595dcb-r2pw5"] Oct 10 16:44:09 crc kubenswrapper[4799]: I1010 16:44:09.586962 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-console-plugin-6b874cbd85-stz8n"] Oct 10 16:44:09 crc kubenswrapper[4799]: I1010 16:44:09.672731 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7a4ed768-740f-41cc-a806-5d8dd36a294a-serving-cert\") pod \"controller-manager-7c59c6bb75-l5crw\" (UID: \"7a4ed768-740f-41cc-a806-5d8dd36a294a\") " pod="openshift-controller-manager/controller-manager-7c59c6bb75-l5crw" Oct 10 16:44:09 crc kubenswrapper[4799]: I1010 16:44:09.672803 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kljp6\" (UniqueName: \"kubernetes.io/projected/65a0d7cd-721a-497c-9d14-22823486b5e3-kube-api-access-kljp6\") pod \"route-controller-manager-855d595dcb-r2pw5\" (UID: \"65a0d7cd-721a-497c-9d14-22823486b5e3\") " pod="openshift-route-controller-manager/route-controller-manager-855d595dcb-r2pw5" Oct 10 16:44:09 crc kubenswrapper[4799]: I1010 16:44:09.672827 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7a4ed768-740f-41cc-a806-5d8dd36a294a-config\") pod \"controller-manager-7c59c6bb75-l5crw\" (UID: \"7a4ed768-740f-41cc-a806-5d8dd36a294a\") " pod="openshift-controller-manager/controller-manager-7c59c6bb75-l5crw" Oct 10 16:44:09 crc kubenswrapper[4799]: I1010 16:44:09.672863 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/1cea25d9-ee17-4a2d-8cfc-74b0cad9e91a-nmstate-lock\") pod \"nmstate-handler-44gb8\" (UID: \"1cea25d9-ee17-4a2d-8cfc-74b0cad9e91a\") " pod="openshift-nmstate/nmstate-handler-44gb8" Oct 10 16:44:09 crc kubenswrapper[4799]: I1010 16:44:09.672881 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/65a0d7cd-721a-497c-9d14-22823486b5e3-client-ca\") pod \"route-controller-manager-855d595dcb-r2pw5\" (UID: \"65a0d7cd-721a-497c-9d14-22823486b5e3\") " pod="openshift-route-controller-manager/route-controller-manager-855d595dcb-r2pw5" Oct 10 16:44:09 crc kubenswrapper[4799]: I1010 16:44:09.672901 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m22rl\" (UniqueName: \"kubernetes.io/projected/9a1584c9-53d3-448d-a84d-123526cfc076-kube-api-access-m22rl\") pod \"nmstate-console-plugin-6b874cbd85-stz8n\" (UID: \"9a1584c9-53d3-448d-a84d-123526cfc076\") " pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-stz8n" Oct 10 16:44:09 crc kubenswrapper[4799]: I1010 16:44:09.672928 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2vgc7\" (UniqueName: \"kubernetes.io/projected/03e8f0e3-1119-4f24-9625-4f799ca6d87f-kube-api-access-2vgc7\") pod \"nmstate-webhook-6cdbc54649-5j22z\" (UID: \"03e8f0e3-1119-4f24-9625-4f799ca6d87f\") " pod="openshift-nmstate/nmstate-webhook-6cdbc54649-5j22z" Oct 10 16:44:09 crc kubenswrapper[4799]: I1010 16:44:09.672946 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/65a0d7cd-721a-497c-9d14-22823486b5e3-serving-cert\") pod \"route-controller-manager-855d595dcb-r2pw5\" (UID: \"65a0d7cd-721a-497c-9d14-22823486b5e3\") " pod="openshift-route-controller-manager/route-controller-manager-855d595dcb-r2pw5" Oct 10 16:44:09 crc kubenswrapper[4799]: I1010 16:44:09.672966 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7a4ed768-740f-41cc-a806-5d8dd36a294a-client-ca\") pod \"controller-manager-7c59c6bb75-l5crw\" (UID: \"7a4ed768-740f-41cc-a806-5d8dd36a294a\") " pod="openshift-controller-manager/controller-manager-7c59c6bb75-l5crw" Oct 10 16:44:09 crc kubenswrapper[4799]: I1010 16:44:09.672983 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/9a1584c9-53d3-448d-a84d-123526cfc076-nginx-conf\") pod \"nmstate-console-plugin-6b874cbd85-stz8n\" (UID: \"9a1584c9-53d3-448d-a84d-123526cfc076\") " pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-stz8n" Oct 10 16:44:09 crc kubenswrapper[4799]: I1010 16:44:09.673002 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/1cea25d9-ee17-4a2d-8cfc-74b0cad9e91a-ovs-socket\") pod \"nmstate-handler-44gb8\" (UID: \"1cea25d9-ee17-4a2d-8cfc-74b0cad9e91a\") " pod="openshift-nmstate/nmstate-handler-44gb8" Oct 10 16:44:09 crc kubenswrapper[4799]: I1010 16:44:09.673020 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/65a0d7cd-721a-497c-9d14-22823486b5e3-config\") pod \"route-controller-manager-855d595dcb-r2pw5\" (UID: \"65a0d7cd-721a-497c-9d14-22823486b5e3\") " pod="openshift-route-controller-manager/route-controller-manager-855d595dcb-r2pw5" Oct 10 16:44:09 crc kubenswrapper[4799]: I1010 16:44:09.673039 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bh55j\" (UniqueName: \"kubernetes.io/projected/7a4ed768-740f-41cc-a806-5d8dd36a294a-kube-api-access-bh55j\") pod \"controller-manager-7c59c6bb75-l5crw\" (UID: \"7a4ed768-740f-41cc-a806-5d8dd36a294a\") " pod="openshift-controller-manager/controller-manager-7c59c6bb75-l5crw" Oct 10 16:44:09 crc kubenswrapper[4799]: I1010 16:44:09.673054 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/9a1584c9-53d3-448d-a84d-123526cfc076-plugin-serving-cert\") pod \"nmstate-console-plugin-6b874cbd85-stz8n\" (UID: \"9a1584c9-53d3-448d-a84d-123526cfc076\") " pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-stz8n" Oct 10 16:44:09 crc kubenswrapper[4799]: I1010 16:44:09.673071 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7a4ed768-740f-41cc-a806-5d8dd36a294a-proxy-ca-bundles\") pod \"controller-manager-7c59c6bb75-l5crw\" (UID: \"7a4ed768-740f-41cc-a806-5d8dd36a294a\") " pod="openshift-controller-manager/controller-manager-7c59c6bb75-l5crw" Oct 10 16:44:09 crc kubenswrapper[4799]: I1010 16:44:09.673091 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/1cea25d9-ee17-4a2d-8cfc-74b0cad9e91a-dbus-socket\") pod \"nmstate-handler-44gb8\" (UID: \"1cea25d9-ee17-4a2d-8cfc-74b0cad9e91a\") " pod="openshift-nmstate/nmstate-handler-44gb8" Oct 10 16:44:09 crc kubenswrapper[4799]: I1010 16:44:09.673175 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/1cea25d9-ee17-4a2d-8cfc-74b0cad9e91a-ovs-socket\") pod \"nmstate-handler-44gb8\" (UID: \"1cea25d9-ee17-4a2d-8cfc-74b0cad9e91a\") " pod="openshift-nmstate/nmstate-handler-44gb8" Oct 10 16:44:09 crc kubenswrapper[4799]: I1010 16:44:09.673202 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/03e8f0e3-1119-4f24-9625-4f799ca6d87f-tls-key-pair\") pod \"nmstate-webhook-6cdbc54649-5j22z\" (UID: \"03e8f0e3-1119-4f24-9625-4f799ca6d87f\") " pod="openshift-nmstate/nmstate-webhook-6cdbc54649-5j22z" Oct 10 16:44:09 crc kubenswrapper[4799]: I1010 16:44:09.673274 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gdl4t\" (UniqueName: \"kubernetes.io/projected/1cea25d9-ee17-4a2d-8cfc-74b0cad9e91a-kube-api-access-gdl4t\") pod \"nmstate-handler-44gb8\" (UID: \"1cea25d9-ee17-4a2d-8cfc-74b0cad9e91a\") " pod="openshift-nmstate/nmstate-handler-44gb8" Oct 10 16:44:09 crc kubenswrapper[4799]: I1010 16:44:09.673303 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rx27w\" (UniqueName: \"kubernetes.io/projected/cf339ce7-7475-4cea-a474-df2df7c47cbc-kube-api-access-rx27w\") pod \"nmstate-metrics-fdff9cb8d-8szkw\" (UID: \"cf339ce7-7475-4cea-a474-df2df7c47cbc\") " pod="openshift-nmstate/nmstate-metrics-fdff9cb8d-8szkw" Oct 10 16:44:09 crc kubenswrapper[4799]: E1010 16:44:09.673321 4799 secret.go:188] Couldn't get secret openshift-nmstate/openshift-nmstate-webhook: secret "openshift-nmstate-webhook" not found Oct 10 16:44:09 crc kubenswrapper[4799]: I1010 16:44:09.673337 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/1cea25d9-ee17-4a2d-8cfc-74b0cad9e91a-dbus-socket\") pod \"nmstate-handler-44gb8\" (UID: \"1cea25d9-ee17-4a2d-8cfc-74b0cad9e91a\") " pod="openshift-nmstate/nmstate-handler-44gb8" Oct 10 16:44:09 crc kubenswrapper[4799]: E1010 16:44:09.673384 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/03e8f0e3-1119-4f24-9625-4f799ca6d87f-tls-key-pair podName:03e8f0e3-1119-4f24-9625-4f799ca6d87f nodeName:}" failed. No retries permitted until 2025-10-10 16:44:10.173360789 +0000 UTC m=+743.681684984 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "tls-key-pair" (UniqueName: "kubernetes.io/secret/03e8f0e3-1119-4f24-9625-4f799ca6d87f-tls-key-pair") pod "nmstate-webhook-6cdbc54649-5j22z" (UID: "03e8f0e3-1119-4f24-9625-4f799ca6d87f") : secret "openshift-nmstate-webhook" not found Oct 10 16:44:09 crc kubenswrapper[4799]: I1010 16:44:09.673301 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/1cea25d9-ee17-4a2d-8cfc-74b0cad9e91a-nmstate-lock\") pod \"nmstate-handler-44gb8\" (UID: \"1cea25d9-ee17-4a2d-8cfc-74b0cad9e91a\") " pod="openshift-nmstate/nmstate-handler-44gb8" Oct 10 16:44:09 crc kubenswrapper[4799]: I1010 16:44:09.695223 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rx27w\" (UniqueName: \"kubernetes.io/projected/cf339ce7-7475-4cea-a474-df2df7c47cbc-kube-api-access-rx27w\") pod \"nmstate-metrics-fdff9cb8d-8szkw\" (UID: \"cf339ce7-7475-4cea-a474-df2df7c47cbc\") " pod="openshift-nmstate/nmstate-metrics-fdff9cb8d-8szkw" Oct 10 16:44:09 crc kubenswrapper[4799]: I1010 16:44:09.699451 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gdl4t\" (UniqueName: \"kubernetes.io/projected/1cea25d9-ee17-4a2d-8cfc-74b0cad9e91a-kube-api-access-gdl4t\") pod \"nmstate-handler-44gb8\" (UID: \"1cea25d9-ee17-4a2d-8cfc-74b0cad9e91a\") " pod="openshift-nmstate/nmstate-handler-44gb8" Oct 10 16:44:09 crc kubenswrapper[4799]: I1010 16:44:09.722414 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2vgc7\" (UniqueName: \"kubernetes.io/projected/03e8f0e3-1119-4f24-9625-4f799ca6d87f-kube-api-access-2vgc7\") pod \"nmstate-webhook-6cdbc54649-5j22z\" (UID: \"03e8f0e3-1119-4f24-9625-4f799ca6d87f\") " pod="openshift-nmstate/nmstate-webhook-6cdbc54649-5j22z" Oct 10 16:44:09 crc kubenswrapper[4799]: I1010 16:44:09.756575 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-metrics-fdff9cb8d-8szkw" Oct 10 16:44:09 crc kubenswrapper[4799]: I1010 16:44:09.775068 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7a4ed768-740f-41cc-a806-5d8dd36a294a-serving-cert\") pod \"controller-manager-7c59c6bb75-l5crw\" (UID: \"7a4ed768-740f-41cc-a806-5d8dd36a294a\") " pod="openshift-controller-manager/controller-manager-7c59c6bb75-l5crw" Oct 10 16:44:09 crc kubenswrapper[4799]: I1010 16:44:09.775114 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kljp6\" (UniqueName: \"kubernetes.io/projected/65a0d7cd-721a-497c-9d14-22823486b5e3-kube-api-access-kljp6\") pod \"route-controller-manager-855d595dcb-r2pw5\" (UID: \"65a0d7cd-721a-497c-9d14-22823486b5e3\") " pod="openshift-route-controller-manager/route-controller-manager-855d595dcb-r2pw5" Oct 10 16:44:09 crc kubenswrapper[4799]: I1010 16:44:09.775132 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7a4ed768-740f-41cc-a806-5d8dd36a294a-config\") pod \"controller-manager-7c59c6bb75-l5crw\" (UID: \"7a4ed768-740f-41cc-a806-5d8dd36a294a\") " pod="openshift-controller-manager/controller-manager-7c59c6bb75-l5crw" Oct 10 16:44:09 crc kubenswrapper[4799]: I1010 16:44:09.775160 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/65a0d7cd-721a-497c-9d14-22823486b5e3-client-ca\") pod \"route-controller-manager-855d595dcb-r2pw5\" (UID: \"65a0d7cd-721a-497c-9d14-22823486b5e3\") " pod="openshift-route-controller-manager/route-controller-manager-855d595dcb-r2pw5" Oct 10 16:44:09 crc kubenswrapper[4799]: I1010 16:44:09.775186 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m22rl\" (UniqueName: \"kubernetes.io/projected/9a1584c9-53d3-448d-a84d-123526cfc076-kube-api-access-m22rl\") pod \"nmstate-console-plugin-6b874cbd85-stz8n\" (UID: \"9a1584c9-53d3-448d-a84d-123526cfc076\") " pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-stz8n" Oct 10 16:44:09 crc kubenswrapper[4799]: I1010 16:44:09.775209 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/65a0d7cd-721a-497c-9d14-22823486b5e3-serving-cert\") pod \"route-controller-manager-855d595dcb-r2pw5\" (UID: \"65a0d7cd-721a-497c-9d14-22823486b5e3\") " pod="openshift-route-controller-manager/route-controller-manager-855d595dcb-r2pw5" Oct 10 16:44:09 crc kubenswrapper[4799]: I1010 16:44:09.775228 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7a4ed768-740f-41cc-a806-5d8dd36a294a-client-ca\") pod \"controller-manager-7c59c6bb75-l5crw\" (UID: \"7a4ed768-740f-41cc-a806-5d8dd36a294a\") " pod="openshift-controller-manager/controller-manager-7c59c6bb75-l5crw" Oct 10 16:44:09 crc kubenswrapper[4799]: I1010 16:44:09.775250 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/9a1584c9-53d3-448d-a84d-123526cfc076-nginx-conf\") pod \"nmstate-console-plugin-6b874cbd85-stz8n\" (UID: \"9a1584c9-53d3-448d-a84d-123526cfc076\") " pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-stz8n" Oct 10 16:44:09 crc kubenswrapper[4799]: I1010 16:44:09.775277 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/65a0d7cd-721a-497c-9d14-22823486b5e3-config\") pod \"route-controller-manager-855d595dcb-r2pw5\" (UID: \"65a0d7cd-721a-497c-9d14-22823486b5e3\") " pod="openshift-route-controller-manager/route-controller-manager-855d595dcb-r2pw5" Oct 10 16:44:09 crc kubenswrapper[4799]: I1010 16:44:09.775300 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bh55j\" (UniqueName: \"kubernetes.io/projected/7a4ed768-740f-41cc-a806-5d8dd36a294a-kube-api-access-bh55j\") pod \"controller-manager-7c59c6bb75-l5crw\" (UID: \"7a4ed768-740f-41cc-a806-5d8dd36a294a\") " pod="openshift-controller-manager/controller-manager-7c59c6bb75-l5crw" Oct 10 16:44:09 crc kubenswrapper[4799]: I1010 16:44:09.775323 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/9a1584c9-53d3-448d-a84d-123526cfc076-plugin-serving-cert\") pod \"nmstate-console-plugin-6b874cbd85-stz8n\" (UID: \"9a1584c9-53d3-448d-a84d-123526cfc076\") " pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-stz8n" Oct 10 16:44:09 crc kubenswrapper[4799]: I1010 16:44:09.775340 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7a4ed768-740f-41cc-a806-5d8dd36a294a-proxy-ca-bundles\") pod \"controller-manager-7c59c6bb75-l5crw\" (UID: \"7a4ed768-740f-41cc-a806-5d8dd36a294a\") " pod="openshift-controller-manager/controller-manager-7c59c6bb75-l5crw" Oct 10 16:44:09 crc kubenswrapper[4799]: E1010 16:44:09.776306 4799 secret.go:188] Couldn't get secret openshift-nmstate/plugin-serving-cert: secret "plugin-serving-cert" not found Oct 10 16:44:09 crc kubenswrapper[4799]: I1010 16:44:09.776356 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7a4ed768-740f-41cc-a806-5d8dd36a294a-client-ca\") pod \"controller-manager-7c59c6bb75-l5crw\" (UID: \"7a4ed768-740f-41cc-a806-5d8dd36a294a\") " pod="openshift-controller-manager/controller-manager-7c59c6bb75-l5crw" Oct 10 16:44:09 crc kubenswrapper[4799]: E1010 16:44:09.776368 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/9a1584c9-53d3-448d-a84d-123526cfc076-plugin-serving-cert podName:9a1584c9-53d3-448d-a84d-123526cfc076 nodeName:}" failed. No retries permitted until 2025-10-10 16:44:10.276355656 +0000 UTC m=+743.784679771 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "plugin-serving-cert" (UniqueName: "kubernetes.io/secret/9a1584c9-53d3-448d-a84d-123526cfc076-plugin-serving-cert") pod "nmstate-console-plugin-6b874cbd85-stz8n" (UID: "9a1584c9-53d3-448d-a84d-123526cfc076") : secret "plugin-serving-cert" not found Oct 10 16:44:09 crc kubenswrapper[4799]: I1010 16:44:09.776803 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7a4ed768-740f-41cc-a806-5d8dd36a294a-proxy-ca-bundles\") pod \"controller-manager-7c59c6bb75-l5crw\" (UID: \"7a4ed768-740f-41cc-a806-5d8dd36a294a\") " pod="openshift-controller-manager/controller-manager-7c59c6bb75-l5crw" Oct 10 16:44:09 crc kubenswrapper[4799]: I1010 16:44:09.776816 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/65a0d7cd-721a-497c-9d14-22823486b5e3-client-ca\") pod \"route-controller-manager-855d595dcb-r2pw5\" (UID: \"65a0d7cd-721a-497c-9d14-22823486b5e3\") " pod="openshift-route-controller-manager/route-controller-manager-855d595dcb-r2pw5" Oct 10 16:44:09 crc kubenswrapper[4799]: I1010 16:44:09.776878 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/65a0d7cd-721a-497c-9d14-22823486b5e3-config\") pod \"route-controller-manager-855d595dcb-r2pw5\" (UID: \"65a0d7cd-721a-497c-9d14-22823486b5e3\") " pod="openshift-route-controller-manager/route-controller-manager-855d595dcb-r2pw5" Oct 10 16:44:09 crc kubenswrapper[4799]: I1010 16:44:09.777134 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/9a1584c9-53d3-448d-a84d-123526cfc076-nginx-conf\") pod \"nmstate-console-plugin-6b874cbd85-stz8n\" (UID: \"9a1584c9-53d3-448d-a84d-123526cfc076\") " pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-stz8n" Oct 10 16:44:09 crc kubenswrapper[4799]: I1010 16:44:09.777574 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7a4ed768-740f-41cc-a806-5d8dd36a294a-config\") pod \"controller-manager-7c59c6bb75-l5crw\" (UID: \"7a4ed768-740f-41cc-a806-5d8dd36a294a\") " pod="openshift-controller-manager/controller-manager-7c59c6bb75-l5crw" Oct 10 16:44:09 crc kubenswrapper[4799]: I1010 16:44:09.779220 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/65a0d7cd-721a-497c-9d14-22823486b5e3-serving-cert\") pod \"route-controller-manager-855d595dcb-r2pw5\" (UID: \"65a0d7cd-721a-497c-9d14-22823486b5e3\") " pod="openshift-route-controller-manager/route-controller-manager-855d595dcb-r2pw5" Oct 10 16:44:09 crc kubenswrapper[4799]: I1010 16:44:09.779821 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7a4ed768-740f-41cc-a806-5d8dd36a294a-serving-cert\") pod \"controller-manager-7c59c6bb75-l5crw\" (UID: \"7a4ed768-740f-41cc-a806-5d8dd36a294a\") " pod="openshift-controller-manager/controller-manager-7c59c6bb75-l5crw" Oct 10 16:44:09 crc kubenswrapper[4799]: I1010 16:44:09.790035 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-handler-44gb8" Oct 10 16:44:09 crc kubenswrapper[4799]: I1010 16:44:09.794548 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kljp6\" (UniqueName: \"kubernetes.io/projected/65a0d7cd-721a-497c-9d14-22823486b5e3-kube-api-access-kljp6\") pod \"route-controller-manager-855d595dcb-r2pw5\" (UID: \"65a0d7cd-721a-497c-9d14-22823486b5e3\") " pod="openshift-route-controller-manager/route-controller-manager-855d595dcb-r2pw5" Oct 10 16:44:09 crc kubenswrapper[4799]: I1010 16:44:09.796474 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m22rl\" (UniqueName: \"kubernetes.io/projected/9a1584c9-53d3-448d-a84d-123526cfc076-kube-api-access-m22rl\") pod \"nmstate-console-plugin-6b874cbd85-stz8n\" (UID: \"9a1584c9-53d3-448d-a84d-123526cfc076\") " pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-stz8n" Oct 10 16:44:09 crc kubenswrapper[4799]: I1010 16:44:09.798634 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bh55j\" (UniqueName: \"kubernetes.io/projected/7a4ed768-740f-41cc-a806-5d8dd36a294a-kube-api-access-bh55j\") pod \"controller-manager-7c59c6bb75-l5crw\" (UID: \"7a4ed768-740f-41cc-a806-5d8dd36a294a\") " pod="openshift-controller-manager/controller-manager-7c59c6bb75-l5crw" Oct 10 16:44:09 crc kubenswrapper[4799]: I1010 16:44:09.815420 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-7c59c6bb75-l5crw"] Oct 10 16:44:09 crc kubenswrapper[4799]: I1010 16:44:09.815842 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-7c59c6bb75-l5crw" Oct 10 16:44:09 crc kubenswrapper[4799]: I1010 16:44:09.827331 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-855d595dcb-r2pw5"] Oct 10 16:44:09 crc kubenswrapper[4799]: I1010 16:44:09.827718 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-855d595dcb-r2pw5" Oct 10 16:44:09 crc kubenswrapper[4799]: I1010 16:44:09.970789 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-metrics-fdff9cb8d-8szkw"] Oct 10 16:44:10 crc kubenswrapper[4799]: I1010 16:44:10.147702 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-855d595dcb-r2pw5"] Oct 10 16:44:10 crc kubenswrapper[4799]: W1010 16:44:10.156393 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod65a0d7cd_721a_497c_9d14_22823486b5e3.slice/crio-f9c7a15466e196c923db88d8d5f7cb238d9db8c4a1d519006b806b9e07afcb6b WatchSource:0}: Error finding container f9c7a15466e196c923db88d8d5f7cb238d9db8c4a1d519006b806b9e07afcb6b: Status 404 returned error can't find the container with id f9c7a15466e196c923db88d8d5f7cb238d9db8c4a1d519006b806b9e07afcb6b Oct 10 16:44:10 crc kubenswrapper[4799]: I1010 16:44:10.184466 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/03e8f0e3-1119-4f24-9625-4f799ca6d87f-tls-key-pair\") pod \"nmstate-webhook-6cdbc54649-5j22z\" (UID: \"03e8f0e3-1119-4f24-9625-4f799ca6d87f\") " pod="openshift-nmstate/nmstate-webhook-6cdbc54649-5j22z" Oct 10 16:44:10 crc kubenswrapper[4799]: I1010 16:44:10.194476 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/03e8f0e3-1119-4f24-9625-4f799ca6d87f-tls-key-pair\") pod \"nmstate-webhook-6cdbc54649-5j22z\" (UID: \"03e8f0e3-1119-4f24-9625-4f799ca6d87f\") " pod="openshift-nmstate/nmstate-webhook-6cdbc54649-5j22z" Oct 10 16:44:10 crc kubenswrapper[4799]: I1010 16:44:10.285388 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/9a1584c9-53d3-448d-a84d-123526cfc076-plugin-serving-cert\") pod \"nmstate-console-plugin-6b874cbd85-stz8n\" (UID: \"9a1584c9-53d3-448d-a84d-123526cfc076\") " pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-stz8n" Oct 10 16:44:10 crc kubenswrapper[4799]: I1010 16:44:10.288442 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/9a1584c9-53d3-448d-a84d-123526cfc076-plugin-serving-cert\") pod \"nmstate-console-plugin-6b874cbd85-stz8n\" (UID: \"9a1584c9-53d3-448d-a84d-123526cfc076\") " pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-stz8n" Oct 10 16:44:10 crc kubenswrapper[4799]: I1010 16:44:10.302546 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-7c59c6bb75-l5crw"] Oct 10 16:44:10 crc kubenswrapper[4799]: I1010 16:44:10.338520 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-7c59c6bb75-l5crw" event={"ID":"7a4ed768-740f-41cc-a806-5d8dd36a294a","Type":"ContainerStarted","Data":"80c10c82e2d50060283458383c9ccefd99e0027882353bf8721aac4c5accc1f7"} Oct 10 16:44:10 crc kubenswrapper[4799]: I1010 16:44:10.339972 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-fdff9cb8d-8szkw" event={"ID":"cf339ce7-7475-4cea-a474-df2df7c47cbc","Type":"ContainerStarted","Data":"b236b79e5ccd9be50404e745b754c0dd20a1211edfff507a840e28e23bcb5a61"} Oct 10 16:44:10 crc kubenswrapper[4799]: I1010 16:44:10.343163 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-855d595dcb-r2pw5" event={"ID":"65a0d7cd-721a-497c-9d14-22823486b5e3","Type":"ContainerStarted","Data":"d2eb3e3c1777ff3be43ce8780b9366dca334d79ef732f1ab2cf0458002f9ac5e"} Oct 10 16:44:10 crc kubenswrapper[4799]: I1010 16:44:10.343191 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-855d595dcb-r2pw5" event={"ID":"65a0d7cd-721a-497c-9d14-22823486b5e3","Type":"ContainerStarted","Data":"f9c7a15466e196c923db88d8d5f7cb238d9db8c4a1d519006b806b9e07afcb6b"} Oct 10 16:44:10 crc kubenswrapper[4799]: I1010 16:44:10.343293 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-855d595dcb-r2pw5" podUID="65a0d7cd-721a-497c-9d14-22823486b5e3" containerName="route-controller-manager" containerID="cri-o://d2eb3e3c1777ff3be43ce8780b9366dca334d79ef732f1ab2cf0458002f9ac5e" gracePeriod=30 Oct 10 16:44:10 crc kubenswrapper[4799]: I1010 16:44:10.343733 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-855d595dcb-r2pw5" Oct 10 16:44:10 crc kubenswrapper[4799]: I1010 16:44:10.344718 4799 patch_prober.go:28] interesting pod/route-controller-manager-855d595dcb-r2pw5 container/route-controller-manager namespace/openshift-route-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.46:8443/healthz\": dial tcp 10.217.0.46:8443: connect: connection refused" start-of-body= Oct 10 16:44:10 crc kubenswrapper[4799]: I1010 16:44:10.344789 4799 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-route-controller-manager/route-controller-manager-855d595dcb-r2pw5" podUID="65a0d7cd-721a-497c-9d14-22823486b5e3" containerName="route-controller-manager" probeResult="failure" output="Get \"https://10.217.0.46:8443/healthz\": dial tcp 10.217.0.46:8443: connect: connection refused" Oct 10 16:44:10 crc kubenswrapper[4799]: I1010 16:44:10.358548 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-handler-44gb8" event={"ID":"1cea25d9-ee17-4a2d-8cfc-74b0cad9e91a","Type":"ContainerStarted","Data":"a6b9906a9ed964e258474a11cb4f803895ee13bbc8d1b72100bd4bd948b26577"} Oct 10 16:44:10 crc kubenswrapper[4799]: I1010 16:44:10.364221 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-855d595dcb-r2pw5" podStartSLOduration=2.36420705 podStartE2EDuration="2.36420705s" podCreationTimestamp="2025-10-10 16:44:08 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-10 16:44:10.362113579 +0000 UTC m=+743.870437714" watchObservedRunningTime="2025-10-10 16:44:10.36420705 +0000 UTC m=+743.872531165" Oct 10 16:44:10 crc kubenswrapper[4799]: I1010 16:44:10.380290 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-webhook-6cdbc54649-5j22z" Oct 10 16:44:10 crc kubenswrapper[4799]: I1010 16:44:10.441646 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-stz8n" Oct 10 16:44:10 crc kubenswrapper[4799]: I1010 16:44:10.611964 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-console-plugin-6b874cbd85-stz8n"] Oct 10 16:44:10 crc kubenswrapper[4799]: I1010 16:44:10.737814 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-route-controller-manager_route-controller-manager-855d595dcb-r2pw5_65a0d7cd-721a-497c-9d14-22823486b5e3/route-controller-manager/0.log" Oct 10 16:44:10 crc kubenswrapper[4799]: I1010 16:44:10.738156 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-855d595dcb-r2pw5" Oct 10 16:44:10 crc kubenswrapper[4799]: I1010 16:44:10.877795 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-webhook-6cdbc54649-5j22z"] Oct 10 16:44:10 crc kubenswrapper[4799]: W1010 16:44:10.879726 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod03e8f0e3_1119_4f24_9625_4f799ca6d87f.slice/crio-ef1282a23cd1b11cf8f05dc3b49c73407ac673134df41c066c90a086b063f884 WatchSource:0}: Error finding container ef1282a23cd1b11cf8f05dc3b49c73407ac673134df41c066c90a086b063f884: Status 404 returned error can't find the container with id ef1282a23cd1b11cf8f05dc3b49c73407ac673134df41c066c90a086b063f884 Oct 10 16:44:10 crc kubenswrapper[4799]: I1010 16:44:10.892723 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kljp6\" (UniqueName: \"kubernetes.io/projected/65a0d7cd-721a-497c-9d14-22823486b5e3-kube-api-access-kljp6\") pod \"65a0d7cd-721a-497c-9d14-22823486b5e3\" (UID: \"65a0d7cd-721a-497c-9d14-22823486b5e3\") " Oct 10 16:44:10 crc kubenswrapper[4799]: I1010 16:44:10.892784 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/65a0d7cd-721a-497c-9d14-22823486b5e3-serving-cert\") pod \"65a0d7cd-721a-497c-9d14-22823486b5e3\" (UID: \"65a0d7cd-721a-497c-9d14-22823486b5e3\") " Oct 10 16:44:10 crc kubenswrapper[4799]: I1010 16:44:10.892822 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/65a0d7cd-721a-497c-9d14-22823486b5e3-config\") pod \"65a0d7cd-721a-497c-9d14-22823486b5e3\" (UID: \"65a0d7cd-721a-497c-9d14-22823486b5e3\") " Oct 10 16:44:10 crc kubenswrapper[4799]: I1010 16:44:10.892902 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/65a0d7cd-721a-497c-9d14-22823486b5e3-client-ca\") pod \"65a0d7cd-721a-497c-9d14-22823486b5e3\" (UID: \"65a0d7cd-721a-497c-9d14-22823486b5e3\") " Oct 10 16:44:10 crc kubenswrapper[4799]: I1010 16:44:10.894035 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/65a0d7cd-721a-497c-9d14-22823486b5e3-client-ca" (OuterVolumeSpecName: "client-ca") pod "65a0d7cd-721a-497c-9d14-22823486b5e3" (UID: "65a0d7cd-721a-497c-9d14-22823486b5e3"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 16:44:10 crc kubenswrapper[4799]: I1010 16:44:10.897088 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/65a0d7cd-721a-497c-9d14-22823486b5e3-config" (OuterVolumeSpecName: "config") pod "65a0d7cd-721a-497c-9d14-22823486b5e3" (UID: "65a0d7cd-721a-497c-9d14-22823486b5e3"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 16:44:10 crc kubenswrapper[4799]: I1010 16:44:10.898987 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/65a0d7cd-721a-497c-9d14-22823486b5e3-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "65a0d7cd-721a-497c-9d14-22823486b5e3" (UID: "65a0d7cd-721a-497c-9d14-22823486b5e3"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:44:10 crc kubenswrapper[4799]: I1010 16:44:10.899128 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/65a0d7cd-721a-497c-9d14-22823486b5e3-kube-api-access-kljp6" (OuterVolumeSpecName: "kube-api-access-kljp6") pod "65a0d7cd-721a-497c-9d14-22823486b5e3" (UID: "65a0d7cd-721a-497c-9d14-22823486b5e3"). InnerVolumeSpecName "kube-api-access-kljp6". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 16:44:10 crc kubenswrapper[4799]: I1010 16:44:10.994505 4799 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/65a0d7cd-721a-497c-9d14-22823486b5e3-client-ca\") on node \"crc\" DevicePath \"\"" Oct 10 16:44:10 crc kubenswrapper[4799]: I1010 16:44:10.994546 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kljp6\" (UniqueName: \"kubernetes.io/projected/65a0d7cd-721a-497c-9d14-22823486b5e3-kube-api-access-kljp6\") on node \"crc\" DevicePath \"\"" Oct 10 16:44:10 crc kubenswrapper[4799]: I1010 16:44:10.994559 4799 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/65a0d7cd-721a-497c-9d14-22823486b5e3-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 10 16:44:10 crc kubenswrapper[4799]: I1010 16:44:10.994570 4799 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/65a0d7cd-721a-497c-9d14-22823486b5e3-config\") on node \"crc\" DevicePath \"\"" Oct 10 16:44:11 crc kubenswrapper[4799]: I1010 16:44:11.031474 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-7bd76bbbc5-wc5qw"] Oct 10 16:44:11 crc kubenswrapper[4799]: E1010 16:44:11.032295 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="65a0d7cd-721a-497c-9d14-22823486b5e3" containerName="route-controller-manager" Oct 10 16:44:11 crc kubenswrapper[4799]: I1010 16:44:11.032494 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="65a0d7cd-721a-497c-9d14-22823486b5e3" containerName="route-controller-manager" Oct 10 16:44:11 crc kubenswrapper[4799]: I1010 16:44:11.032921 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="65a0d7cd-721a-497c-9d14-22823486b5e3" containerName="route-controller-manager" Oct 10 16:44:11 crc kubenswrapper[4799]: I1010 16:44:11.033919 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-7bd76bbbc5-wc5qw" Oct 10 16:44:11 crc kubenswrapper[4799]: I1010 16:44:11.045480 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-7bd76bbbc5-wc5qw"] Oct 10 16:44:11 crc kubenswrapper[4799]: I1010 16:44:11.197483 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zkxmz\" (UniqueName: \"kubernetes.io/projected/401bd6b3-1d04-40e1-9e24-63b15caaa94b-kube-api-access-zkxmz\") pod \"console-7bd76bbbc5-wc5qw\" (UID: \"401bd6b3-1d04-40e1-9e24-63b15caaa94b\") " pod="openshift-console/console-7bd76bbbc5-wc5qw" Oct 10 16:44:11 crc kubenswrapper[4799]: I1010 16:44:11.197536 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/401bd6b3-1d04-40e1-9e24-63b15caaa94b-console-serving-cert\") pod \"console-7bd76bbbc5-wc5qw\" (UID: \"401bd6b3-1d04-40e1-9e24-63b15caaa94b\") " pod="openshift-console/console-7bd76bbbc5-wc5qw" Oct 10 16:44:11 crc kubenswrapper[4799]: I1010 16:44:11.197576 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/401bd6b3-1d04-40e1-9e24-63b15caaa94b-service-ca\") pod \"console-7bd76bbbc5-wc5qw\" (UID: \"401bd6b3-1d04-40e1-9e24-63b15caaa94b\") " pod="openshift-console/console-7bd76bbbc5-wc5qw" Oct 10 16:44:11 crc kubenswrapper[4799]: I1010 16:44:11.197639 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/401bd6b3-1d04-40e1-9e24-63b15caaa94b-console-config\") pod \"console-7bd76bbbc5-wc5qw\" (UID: \"401bd6b3-1d04-40e1-9e24-63b15caaa94b\") " pod="openshift-console/console-7bd76bbbc5-wc5qw" Oct 10 16:44:11 crc kubenswrapper[4799]: I1010 16:44:11.197715 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/401bd6b3-1d04-40e1-9e24-63b15caaa94b-console-oauth-config\") pod \"console-7bd76bbbc5-wc5qw\" (UID: \"401bd6b3-1d04-40e1-9e24-63b15caaa94b\") " pod="openshift-console/console-7bd76bbbc5-wc5qw" Oct 10 16:44:11 crc kubenswrapper[4799]: I1010 16:44:11.197743 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/401bd6b3-1d04-40e1-9e24-63b15caaa94b-trusted-ca-bundle\") pod \"console-7bd76bbbc5-wc5qw\" (UID: \"401bd6b3-1d04-40e1-9e24-63b15caaa94b\") " pod="openshift-console/console-7bd76bbbc5-wc5qw" Oct 10 16:44:11 crc kubenswrapper[4799]: I1010 16:44:11.197846 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/401bd6b3-1d04-40e1-9e24-63b15caaa94b-oauth-serving-cert\") pod \"console-7bd76bbbc5-wc5qw\" (UID: \"401bd6b3-1d04-40e1-9e24-63b15caaa94b\") " pod="openshift-console/console-7bd76bbbc5-wc5qw" Oct 10 16:44:11 crc kubenswrapper[4799]: I1010 16:44:11.299677 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/401bd6b3-1d04-40e1-9e24-63b15caaa94b-console-oauth-config\") pod \"console-7bd76bbbc5-wc5qw\" (UID: \"401bd6b3-1d04-40e1-9e24-63b15caaa94b\") " pod="openshift-console/console-7bd76bbbc5-wc5qw" Oct 10 16:44:11 crc kubenswrapper[4799]: I1010 16:44:11.299997 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/401bd6b3-1d04-40e1-9e24-63b15caaa94b-trusted-ca-bundle\") pod \"console-7bd76bbbc5-wc5qw\" (UID: \"401bd6b3-1d04-40e1-9e24-63b15caaa94b\") " pod="openshift-console/console-7bd76bbbc5-wc5qw" Oct 10 16:44:11 crc kubenswrapper[4799]: I1010 16:44:11.300133 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/401bd6b3-1d04-40e1-9e24-63b15caaa94b-oauth-serving-cert\") pod \"console-7bd76bbbc5-wc5qw\" (UID: \"401bd6b3-1d04-40e1-9e24-63b15caaa94b\") " pod="openshift-console/console-7bd76bbbc5-wc5qw" Oct 10 16:44:11 crc kubenswrapper[4799]: I1010 16:44:11.300239 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zkxmz\" (UniqueName: \"kubernetes.io/projected/401bd6b3-1d04-40e1-9e24-63b15caaa94b-kube-api-access-zkxmz\") pod \"console-7bd76bbbc5-wc5qw\" (UID: \"401bd6b3-1d04-40e1-9e24-63b15caaa94b\") " pod="openshift-console/console-7bd76bbbc5-wc5qw" Oct 10 16:44:11 crc kubenswrapper[4799]: I1010 16:44:11.300353 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/401bd6b3-1d04-40e1-9e24-63b15caaa94b-console-serving-cert\") pod \"console-7bd76bbbc5-wc5qw\" (UID: \"401bd6b3-1d04-40e1-9e24-63b15caaa94b\") " pod="openshift-console/console-7bd76bbbc5-wc5qw" Oct 10 16:44:11 crc kubenswrapper[4799]: I1010 16:44:11.300476 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/401bd6b3-1d04-40e1-9e24-63b15caaa94b-service-ca\") pod \"console-7bd76bbbc5-wc5qw\" (UID: \"401bd6b3-1d04-40e1-9e24-63b15caaa94b\") " pod="openshift-console/console-7bd76bbbc5-wc5qw" Oct 10 16:44:11 crc kubenswrapper[4799]: I1010 16:44:11.300600 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/401bd6b3-1d04-40e1-9e24-63b15caaa94b-console-config\") pod \"console-7bd76bbbc5-wc5qw\" (UID: \"401bd6b3-1d04-40e1-9e24-63b15caaa94b\") " pod="openshift-console/console-7bd76bbbc5-wc5qw" Oct 10 16:44:11 crc kubenswrapper[4799]: I1010 16:44:11.301469 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/401bd6b3-1d04-40e1-9e24-63b15caaa94b-trusted-ca-bundle\") pod \"console-7bd76bbbc5-wc5qw\" (UID: \"401bd6b3-1d04-40e1-9e24-63b15caaa94b\") " pod="openshift-console/console-7bd76bbbc5-wc5qw" Oct 10 16:44:11 crc kubenswrapper[4799]: I1010 16:44:11.301610 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/401bd6b3-1d04-40e1-9e24-63b15caaa94b-service-ca\") pod \"console-7bd76bbbc5-wc5qw\" (UID: \"401bd6b3-1d04-40e1-9e24-63b15caaa94b\") " pod="openshift-console/console-7bd76bbbc5-wc5qw" Oct 10 16:44:11 crc kubenswrapper[4799]: I1010 16:44:11.301699 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/401bd6b3-1d04-40e1-9e24-63b15caaa94b-oauth-serving-cert\") pod \"console-7bd76bbbc5-wc5qw\" (UID: \"401bd6b3-1d04-40e1-9e24-63b15caaa94b\") " pod="openshift-console/console-7bd76bbbc5-wc5qw" Oct 10 16:44:11 crc kubenswrapper[4799]: I1010 16:44:11.301884 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/401bd6b3-1d04-40e1-9e24-63b15caaa94b-console-config\") pod \"console-7bd76bbbc5-wc5qw\" (UID: \"401bd6b3-1d04-40e1-9e24-63b15caaa94b\") " pod="openshift-console/console-7bd76bbbc5-wc5qw" Oct 10 16:44:11 crc kubenswrapper[4799]: I1010 16:44:11.304032 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/401bd6b3-1d04-40e1-9e24-63b15caaa94b-console-oauth-config\") pod \"console-7bd76bbbc5-wc5qw\" (UID: \"401bd6b3-1d04-40e1-9e24-63b15caaa94b\") " pod="openshift-console/console-7bd76bbbc5-wc5qw" Oct 10 16:44:11 crc kubenswrapper[4799]: I1010 16:44:11.313904 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/401bd6b3-1d04-40e1-9e24-63b15caaa94b-console-serving-cert\") pod \"console-7bd76bbbc5-wc5qw\" (UID: \"401bd6b3-1d04-40e1-9e24-63b15caaa94b\") " pod="openshift-console/console-7bd76bbbc5-wc5qw" Oct 10 16:44:11 crc kubenswrapper[4799]: I1010 16:44:11.326736 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zkxmz\" (UniqueName: \"kubernetes.io/projected/401bd6b3-1d04-40e1-9e24-63b15caaa94b-kube-api-access-zkxmz\") pod \"console-7bd76bbbc5-wc5qw\" (UID: \"401bd6b3-1d04-40e1-9e24-63b15caaa94b\") " pod="openshift-console/console-7bd76bbbc5-wc5qw" Oct 10 16:44:11 crc kubenswrapper[4799]: I1010 16:44:11.350256 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-7bd76bbbc5-wc5qw" Oct 10 16:44:11 crc kubenswrapper[4799]: I1010 16:44:11.370698 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-7c59c6bb75-l5crw" event={"ID":"7a4ed768-740f-41cc-a806-5d8dd36a294a","Type":"ContainerStarted","Data":"a43797bc8268b63970e19c66f1eaaf02559d412bce4d727bb0bd5bffd05b992f"} Oct 10 16:44:11 crc kubenswrapper[4799]: I1010 16:44:11.370864 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-7c59c6bb75-l5crw" podUID="7a4ed768-740f-41cc-a806-5d8dd36a294a" containerName="controller-manager" containerID="cri-o://a43797bc8268b63970e19c66f1eaaf02559d412bce4d727bb0bd5bffd05b992f" gracePeriod=30 Oct 10 16:44:11 crc kubenswrapper[4799]: I1010 16:44:11.373974 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-route-controller-manager_route-controller-manager-855d595dcb-r2pw5_65a0d7cd-721a-497c-9d14-22823486b5e3/route-controller-manager/0.log" Oct 10 16:44:11 crc kubenswrapper[4799]: I1010 16:44:11.374297 4799 generic.go:334] "Generic (PLEG): container finished" podID="65a0d7cd-721a-497c-9d14-22823486b5e3" containerID="d2eb3e3c1777ff3be43ce8780b9366dca334d79ef732f1ab2cf0458002f9ac5e" exitCode=2 Oct 10 16:44:11 crc kubenswrapper[4799]: I1010 16:44:11.374418 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-855d595dcb-r2pw5" Oct 10 16:44:11 crc kubenswrapper[4799]: I1010 16:44:11.374432 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-855d595dcb-r2pw5" event={"ID":"65a0d7cd-721a-497c-9d14-22823486b5e3","Type":"ContainerDied","Data":"d2eb3e3c1777ff3be43ce8780b9366dca334d79ef732f1ab2cf0458002f9ac5e"} Oct 10 16:44:11 crc kubenswrapper[4799]: I1010 16:44:11.374873 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-855d595dcb-r2pw5" event={"ID":"65a0d7cd-721a-497c-9d14-22823486b5e3","Type":"ContainerDied","Data":"f9c7a15466e196c923db88d8d5f7cb238d9db8c4a1d519006b806b9e07afcb6b"} Oct 10 16:44:11 crc kubenswrapper[4799]: I1010 16:44:11.374899 4799 scope.go:117] "RemoveContainer" containerID="d2eb3e3c1777ff3be43ce8780b9366dca334d79ef732f1ab2cf0458002f9ac5e" Oct 10 16:44:11 crc kubenswrapper[4799]: I1010 16:44:11.381082 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-stz8n" event={"ID":"9a1584c9-53d3-448d-a84d-123526cfc076","Type":"ContainerStarted","Data":"0fd29caa0105caeb9d2fc9d3844fd908372dd9bacf39330741c1aff9fb6bdbd1"} Oct 10 16:44:11 crc kubenswrapper[4799]: I1010 16:44:11.384182 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-webhook-6cdbc54649-5j22z" event={"ID":"03e8f0e3-1119-4f24-9625-4f799ca6d87f","Type":"ContainerStarted","Data":"ef1282a23cd1b11cf8f05dc3b49c73407ac673134df41c066c90a086b063f884"} Oct 10 16:44:11 crc kubenswrapper[4799]: I1010 16:44:11.394447 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-7c59c6bb75-l5crw" podStartSLOduration=3.394420463 podStartE2EDuration="3.394420463s" podCreationTimestamp="2025-10-10 16:44:08 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-10 16:44:11.387175935 +0000 UTC m=+744.895500060" watchObservedRunningTime="2025-10-10 16:44:11.394420463 +0000 UTC m=+744.902744588" Oct 10 16:44:11 crc kubenswrapper[4799]: I1010 16:44:11.431829 4799 scope.go:117] "RemoveContainer" containerID="d2eb3e3c1777ff3be43ce8780b9366dca334d79ef732f1ab2cf0458002f9ac5e" Oct 10 16:44:11 crc kubenswrapper[4799]: I1010 16:44:11.434951 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-855d595dcb-r2pw5"] Oct 10 16:44:11 crc kubenswrapper[4799]: E1010 16:44:11.436379 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d2eb3e3c1777ff3be43ce8780b9366dca334d79ef732f1ab2cf0458002f9ac5e\": container with ID starting with d2eb3e3c1777ff3be43ce8780b9366dca334d79ef732f1ab2cf0458002f9ac5e not found: ID does not exist" containerID="d2eb3e3c1777ff3be43ce8780b9366dca334d79ef732f1ab2cf0458002f9ac5e" Oct 10 16:44:11 crc kubenswrapper[4799]: I1010 16:44:11.436419 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d2eb3e3c1777ff3be43ce8780b9366dca334d79ef732f1ab2cf0458002f9ac5e"} err="failed to get container status \"d2eb3e3c1777ff3be43ce8780b9366dca334d79ef732f1ab2cf0458002f9ac5e\": rpc error: code = NotFound desc = could not find container \"d2eb3e3c1777ff3be43ce8780b9366dca334d79ef732f1ab2cf0458002f9ac5e\": container with ID starting with d2eb3e3c1777ff3be43ce8780b9366dca334d79ef732f1ab2cf0458002f9ac5e not found: ID does not exist" Oct 10 16:44:11 crc kubenswrapper[4799]: I1010 16:44:11.442620 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-855d595dcb-r2pw5"] Oct 10 16:44:11 crc kubenswrapper[4799]: I1010 16:44:11.559688 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-5b5b675445-dn979"] Oct 10 16:44:11 crc kubenswrapper[4799]: I1010 16:44:11.560907 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-5b5b675445-dn979" Oct 10 16:44:11 crc kubenswrapper[4799]: I1010 16:44:11.564463 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Oct 10 16:44:11 crc kubenswrapper[4799]: I1010 16:44:11.566972 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Oct 10 16:44:11 crc kubenswrapper[4799]: I1010 16:44:11.567002 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Oct 10 16:44:11 crc kubenswrapper[4799]: I1010 16:44:11.567181 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Oct 10 16:44:11 crc kubenswrapper[4799]: I1010 16:44:11.567397 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Oct 10 16:44:11 crc kubenswrapper[4799]: I1010 16:44:11.572736 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Oct 10 16:44:11 crc kubenswrapper[4799]: I1010 16:44:11.573888 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-5b5b675445-dn979"] Oct 10 16:44:11 crc kubenswrapper[4799]: I1010 16:44:11.715176 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f5aea425-57a0-413b-824f-19d043788362-serving-cert\") pod \"route-controller-manager-5b5b675445-dn979\" (UID: \"f5aea425-57a0-413b-824f-19d043788362\") " pod="openshift-route-controller-manager/route-controller-manager-5b5b675445-dn979" Oct 10 16:44:11 crc kubenswrapper[4799]: I1010 16:44:11.715247 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/f5aea425-57a0-413b-824f-19d043788362-client-ca\") pod \"route-controller-manager-5b5b675445-dn979\" (UID: \"f5aea425-57a0-413b-824f-19d043788362\") " pod="openshift-route-controller-manager/route-controller-manager-5b5b675445-dn979" Oct 10 16:44:11 crc kubenswrapper[4799]: I1010 16:44:11.715277 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r8t5w\" (UniqueName: \"kubernetes.io/projected/f5aea425-57a0-413b-824f-19d043788362-kube-api-access-r8t5w\") pod \"route-controller-manager-5b5b675445-dn979\" (UID: \"f5aea425-57a0-413b-824f-19d043788362\") " pod="openshift-route-controller-manager/route-controller-manager-5b5b675445-dn979" Oct 10 16:44:11 crc kubenswrapper[4799]: I1010 16:44:11.715305 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f5aea425-57a0-413b-824f-19d043788362-config\") pod \"route-controller-manager-5b5b675445-dn979\" (UID: \"f5aea425-57a0-413b-824f-19d043788362\") " pod="openshift-route-controller-manager/route-controller-manager-5b5b675445-dn979" Oct 10 16:44:11 crc kubenswrapper[4799]: I1010 16:44:11.778355 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-7bd76bbbc5-wc5qw"] Oct 10 16:44:11 crc kubenswrapper[4799]: I1010 16:44:11.797512 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-7c59c6bb75-l5crw" Oct 10 16:44:11 crc kubenswrapper[4799]: I1010 16:44:11.820444 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/f5aea425-57a0-413b-824f-19d043788362-client-ca\") pod \"route-controller-manager-5b5b675445-dn979\" (UID: \"f5aea425-57a0-413b-824f-19d043788362\") " pod="openshift-route-controller-manager/route-controller-manager-5b5b675445-dn979" Oct 10 16:44:11 crc kubenswrapper[4799]: I1010 16:44:11.820489 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r8t5w\" (UniqueName: \"kubernetes.io/projected/f5aea425-57a0-413b-824f-19d043788362-kube-api-access-r8t5w\") pod \"route-controller-manager-5b5b675445-dn979\" (UID: \"f5aea425-57a0-413b-824f-19d043788362\") " pod="openshift-route-controller-manager/route-controller-manager-5b5b675445-dn979" Oct 10 16:44:11 crc kubenswrapper[4799]: I1010 16:44:11.820518 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f5aea425-57a0-413b-824f-19d043788362-config\") pod \"route-controller-manager-5b5b675445-dn979\" (UID: \"f5aea425-57a0-413b-824f-19d043788362\") " pod="openshift-route-controller-manager/route-controller-manager-5b5b675445-dn979" Oct 10 16:44:11 crc kubenswrapper[4799]: I1010 16:44:11.820583 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f5aea425-57a0-413b-824f-19d043788362-serving-cert\") pod \"route-controller-manager-5b5b675445-dn979\" (UID: \"f5aea425-57a0-413b-824f-19d043788362\") " pod="openshift-route-controller-manager/route-controller-manager-5b5b675445-dn979" Oct 10 16:44:11 crc kubenswrapper[4799]: I1010 16:44:11.821986 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/f5aea425-57a0-413b-824f-19d043788362-client-ca\") pod \"route-controller-manager-5b5b675445-dn979\" (UID: \"f5aea425-57a0-413b-824f-19d043788362\") " pod="openshift-route-controller-manager/route-controller-manager-5b5b675445-dn979" Oct 10 16:44:11 crc kubenswrapper[4799]: I1010 16:44:11.823315 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f5aea425-57a0-413b-824f-19d043788362-config\") pod \"route-controller-manager-5b5b675445-dn979\" (UID: \"f5aea425-57a0-413b-824f-19d043788362\") " pod="openshift-route-controller-manager/route-controller-manager-5b5b675445-dn979" Oct 10 16:44:11 crc kubenswrapper[4799]: I1010 16:44:11.827695 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f5aea425-57a0-413b-824f-19d043788362-serving-cert\") pod \"route-controller-manager-5b5b675445-dn979\" (UID: \"f5aea425-57a0-413b-824f-19d043788362\") " pod="openshift-route-controller-manager/route-controller-manager-5b5b675445-dn979" Oct 10 16:44:11 crc kubenswrapper[4799]: I1010 16:44:11.852317 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r8t5w\" (UniqueName: \"kubernetes.io/projected/f5aea425-57a0-413b-824f-19d043788362-kube-api-access-r8t5w\") pod \"route-controller-manager-5b5b675445-dn979\" (UID: \"f5aea425-57a0-413b-824f-19d043788362\") " pod="openshift-route-controller-manager/route-controller-manager-5b5b675445-dn979" Oct 10 16:44:11 crc kubenswrapper[4799]: I1010 16:44:11.885911 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-5b5b675445-dn979" Oct 10 16:44:11 crc kubenswrapper[4799]: I1010 16:44:11.921262 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7a4ed768-740f-41cc-a806-5d8dd36a294a-config\") pod \"7a4ed768-740f-41cc-a806-5d8dd36a294a\" (UID: \"7a4ed768-740f-41cc-a806-5d8dd36a294a\") " Oct 10 16:44:11 crc kubenswrapper[4799]: I1010 16:44:11.921330 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7a4ed768-740f-41cc-a806-5d8dd36a294a-serving-cert\") pod \"7a4ed768-740f-41cc-a806-5d8dd36a294a\" (UID: \"7a4ed768-740f-41cc-a806-5d8dd36a294a\") " Oct 10 16:44:11 crc kubenswrapper[4799]: I1010 16:44:11.921374 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7a4ed768-740f-41cc-a806-5d8dd36a294a-proxy-ca-bundles\") pod \"7a4ed768-740f-41cc-a806-5d8dd36a294a\" (UID: \"7a4ed768-740f-41cc-a806-5d8dd36a294a\") " Oct 10 16:44:11 crc kubenswrapper[4799]: I1010 16:44:11.921425 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bh55j\" (UniqueName: \"kubernetes.io/projected/7a4ed768-740f-41cc-a806-5d8dd36a294a-kube-api-access-bh55j\") pod \"7a4ed768-740f-41cc-a806-5d8dd36a294a\" (UID: \"7a4ed768-740f-41cc-a806-5d8dd36a294a\") " Oct 10 16:44:11 crc kubenswrapper[4799]: I1010 16:44:11.921441 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7a4ed768-740f-41cc-a806-5d8dd36a294a-client-ca\") pod \"7a4ed768-740f-41cc-a806-5d8dd36a294a\" (UID: \"7a4ed768-740f-41cc-a806-5d8dd36a294a\") " Oct 10 16:44:11 crc kubenswrapper[4799]: I1010 16:44:11.922482 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7a4ed768-740f-41cc-a806-5d8dd36a294a-client-ca" (OuterVolumeSpecName: "client-ca") pod "7a4ed768-740f-41cc-a806-5d8dd36a294a" (UID: "7a4ed768-740f-41cc-a806-5d8dd36a294a"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 16:44:11 crc kubenswrapper[4799]: I1010 16:44:11.923205 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7a4ed768-740f-41cc-a806-5d8dd36a294a-config" (OuterVolumeSpecName: "config") pod "7a4ed768-740f-41cc-a806-5d8dd36a294a" (UID: "7a4ed768-740f-41cc-a806-5d8dd36a294a"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 16:44:11 crc kubenswrapper[4799]: I1010 16:44:11.924053 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7a4ed768-740f-41cc-a806-5d8dd36a294a-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "7a4ed768-740f-41cc-a806-5d8dd36a294a" (UID: "7a4ed768-740f-41cc-a806-5d8dd36a294a"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 16:44:11 crc kubenswrapper[4799]: I1010 16:44:11.926898 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7a4ed768-740f-41cc-a806-5d8dd36a294a-kube-api-access-bh55j" (OuterVolumeSpecName: "kube-api-access-bh55j") pod "7a4ed768-740f-41cc-a806-5d8dd36a294a" (UID: "7a4ed768-740f-41cc-a806-5d8dd36a294a"). InnerVolumeSpecName "kube-api-access-bh55j". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 16:44:11 crc kubenswrapper[4799]: I1010 16:44:11.939937 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7a4ed768-740f-41cc-a806-5d8dd36a294a-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7a4ed768-740f-41cc-a806-5d8dd36a294a" (UID: "7a4ed768-740f-41cc-a806-5d8dd36a294a"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:44:12 crc kubenswrapper[4799]: I1010 16:44:12.026658 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bh55j\" (UniqueName: \"kubernetes.io/projected/7a4ed768-740f-41cc-a806-5d8dd36a294a-kube-api-access-bh55j\") on node \"crc\" DevicePath \"\"" Oct 10 16:44:12 crc kubenswrapper[4799]: I1010 16:44:12.026684 4799 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7a4ed768-740f-41cc-a806-5d8dd36a294a-client-ca\") on node \"crc\" DevicePath \"\"" Oct 10 16:44:12 crc kubenswrapper[4799]: I1010 16:44:12.026695 4799 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7a4ed768-740f-41cc-a806-5d8dd36a294a-config\") on node \"crc\" DevicePath \"\"" Oct 10 16:44:12 crc kubenswrapper[4799]: I1010 16:44:12.026703 4799 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7a4ed768-740f-41cc-a806-5d8dd36a294a-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 10 16:44:12 crc kubenswrapper[4799]: I1010 16:44:12.026711 4799 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7a4ed768-740f-41cc-a806-5d8dd36a294a-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Oct 10 16:44:12 crc kubenswrapper[4799]: I1010 16:44:12.147705 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-5b5b675445-dn979"] Oct 10 16:44:12 crc kubenswrapper[4799]: I1010 16:44:12.395619 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-7bd76bbbc5-wc5qw" event={"ID":"401bd6b3-1d04-40e1-9e24-63b15caaa94b","Type":"ContainerStarted","Data":"ce76f0ded30ca8e02764fa95cfb5f459f8bf3c0c910ad383b59a785eb3a5df08"} Oct 10 16:44:12 crc kubenswrapper[4799]: I1010 16:44:12.395661 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-7bd76bbbc5-wc5qw" event={"ID":"401bd6b3-1d04-40e1-9e24-63b15caaa94b","Type":"ContainerStarted","Data":"faa67cc08005fb9101c512a6f2d5eef92dad7def634320f59e42db675095edeb"} Oct 10 16:44:12 crc kubenswrapper[4799]: I1010 16:44:12.396894 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-5b5b675445-dn979" event={"ID":"f5aea425-57a0-413b-824f-19d043788362","Type":"ContainerStarted","Data":"9f290e74a27da3a45e7ea5d42a27fb56a798adf289f98f51c2dea3b2560340e7"} Oct 10 16:44:12 crc kubenswrapper[4799]: I1010 16:44:12.396967 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-5b5b675445-dn979" event={"ID":"f5aea425-57a0-413b-824f-19d043788362","Type":"ContainerStarted","Data":"534eaa841c756a6260fde9cb3e3f1b0abec1d4805dce37717a4fda640114fba5"} Oct 10 16:44:12 crc kubenswrapper[4799]: I1010 16:44:12.398445 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-5b5b675445-dn979" Oct 10 16:44:12 crc kubenswrapper[4799]: I1010 16:44:12.400739 4799 patch_prober.go:28] interesting pod/route-controller-manager-5b5b675445-dn979 container/route-controller-manager namespace/openshift-route-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.49:8443/healthz\": dial tcp 10.217.0.49:8443: connect: connection refused" start-of-body= Oct 10 16:44:12 crc kubenswrapper[4799]: I1010 16:44:12.400801 4799 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-route-controller-manager/route-controller-manager-5b5b675445-dn979" podUID="f5aea425-57a0-413b-824f-19d043788362" containerName="route-controller-manager" probeResult="failure" output="Get \"https://10.217.0.49:8443/healthz\": dial tcp 10.217.0.49:8443: connect: connection refused" Oct 10 16:44:12 crc kubenswrapper[4799]: I1010 16:44:12.405072 4799 generic.go:334] "Generic (PLEG): container finished" podID="7a4ed768-740f-41cc-a806-5d8dd36a294a" containerID="a43797bc8268b63970e19c66f1eaaf02559d412bce4d727bb0bd5bffd05b992f" exitCode=0 Oct 10 16:44:12 crc kubenswrapper[4799]: I1010 16:44:12.405110 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-7c59c6bb75-l5crw" event={"ID":"7a4ed768-740f-41cc-a806-5d8dd36a294a","Type":"ContainerDied","Data":"a43797bc8268b63970e19c66f1eaaf02559d412bce4d727bb0bd5bffd05b992f"} Oct 10 16:44:12 crc kubenswrapper[4799]: I1010 16:44:12.405131 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-7c59c6bb75-l5crw" event={"ID":"7a4ed768-740f-41cc-a806-5d8dd36a294a","Type":"ContainerDied","Data":"80c10c82e2d50060283458383c9ccefd99e0027882353bf8721aac4c5accc1f7"} Oct 10 16:44:12 crc kubenswrapper[4799]: I1010 16:44:12.405147 4799 scope.go:117] "RemoveContainer" containerID="a43797bc8268b63970e19c66f1eaaf02559d412bce4d727bb0bd5bffd05b992f" Oct 10 16:44:12 crc kubenswrapper[4799]: I1010 16:44:12.405174 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-7c59c6bb75-l5crw" Oct 10 16:44:12 crc kubenswrapper[4799]: I1010 16:44:12.411815 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-7bd76bbbc5-wc5qw" podStartSLOduration=1.411793999 podStartE2EDuration="1.411793999s" podCreationTimestamp="2025-10-10 16:44:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-10 16:44:12.410700222 +0000 UTC m=+745.919024357" watchObservedRunningTime="2025-10-10 16:44:12.411793999 +0000 UTC m=+745.920118124" Oct 10 16:44:12 crc kubenswrapper[4799]: I1010 16:44:12.432089 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-5b5b675445-dn979" podStartSLOduration=3.4320683880000002 podStartE2EDuration="3.432068388s" podCreationTimestamp="2025-10-10 16:44:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-10 16:44:12.430566481 +0000 UTC m=+745.938890616" watchObservedRunningTime="2025-10-10 16:44:12.432068388 +0000 UTC m=+745.940392523" Oct 10 16:44:12 crc kubenswrapper[4799]: I1010 16:44:12.435226 4799 scope.go:117] "RemoveContainer" containerID="a43797bc8268b63970e19c66f1eaaf02559d412bce4d727bb0bd5bffd05b992f" Oct 10 16:44:12 crc kubenswrapper[4799]: E1010 16:44:12.436469 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a43797bc8268b63970e19c66f1eaaf02559d412bce4d727bb0bd5bffd05b992f\": container with ID starting with a43797bc8268b63970e19c66f1eaaf02559d412bce4d727bb0bd5bffd05b992f not found: ID does not exist" containerID="a43797bc8268b63970e19c66f1eaaf02559d412bce4d727bb0bd5bffd05b992f" Oct 10 16:44:12 crc kubenswrapper[4799]: I1010 16:44:12.436521 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a43797bc8268b63970e19c66f1eaaf02559d412bce4d727bb0bd5bffd05b992f"} err="failed to get container status \"a43797bc8268b63970e19c66f1eaaf02559d412bce4d727bb0bd5bffd05b992f\": rpc error: code = NotFound desc = could not find container \"a43797bc8268b63970e19c66f1eaaf02559d412bce4d727bb0bd5bffd05b992f\": container with ID starting with a43797bc8268b63970e19c66f1eaaf02559d412bce4d727bb0bd5bffd05b992f not found: ID does not exist" Oct 10 16:44:12 crc kubenswrapper[4799]: I1010 16:44:12.447476 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-7c59c6bb75-l5crw"] Oct 10 16:44:12 crc kubenswrapper[4799]: I1010 16:44:12.470962 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-7c59c6bb75-l5crw"] Oct 10 16:44:13 crc kubenswrapper[4799]: I1010 16:44:13.409458 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="65a0d7cd-721a-497c-9d14-22823486b5e3" path="/var/lib/kubelet/pods/65a0d7cd-721a-497c-9d14-22823486b5e3/volumes" Oct 10 16:44:13 crc kubenswrapper[4799]: I1010 16:44:13.410481 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7a4ed768-740f-41cc-a806-5d8dd36a294a" path="/var/lib/kubelet/pods/7a4ed768-740f-41cc-a806-5d8dd36a294a/volumes" Oct 10 16:44:13 crc kubenswrapper[4799]: I1010 16:44:13.416388 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-5b5b675445-dn979" Oct 10 16:44:13 crc kubenswrapper[4799]: I1010 16:44:13.559963 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-546b464d68-rd8fr"] Oct 10 16:44:13 crc kubenswrapper[4799]: E1010 16:44:13.560228 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7a4ed768-740f-41cc-a806-5d8dd36a294a" containerName="controller-manager" Oct 10 16:44:13 crc kubenswrapper[4799]: I1010 16:44:13.560246 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="7a4ed768-740f-41cc-a806-5d8dd36a294a" containerName="controller-manager" Oct 10 16:44:13 crc kubenswrapper[4799]: I1010 16:44:13.560356 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="7a4ed768-740f-41cc-a806-5d8dd36a294a" containerName="controller-manager" Oct 10 16:44:13 crc kubenswrapper[4799]: I1010 16:44:13.560885 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-546b464d68-rd8fr" Oct 10 16:44:13 crc kubenswrapper[4799]: I1010 16:44:13.566953 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Oct 10 16:44:13 crc kubenswrapper[4799]: I1010 16:44:13.567185 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Oct 10 16:44:13 crc kubenswrapper[4799]: I1010 16:44:13.569008 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Oct 10 16:44:13 crc kubenswrapper[4799]: I1010 16:44:13.569247 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Oct 10 16:44:13 crc kubenswrapper[4799]: I1010 16:44:13.569624 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Oct 10 16:44:13 crc kubenswrapper[4799]: I1010 16:44:13.571236 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Oct 10 16:44:13 crc kubenswrapper[4799]: I1010 16:44:13.571725 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-546b464d68-rd8fr"] Oct 10 16:44:13 crc kubenswrapper[4799]: I1010 16:44:13.613609 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Oct 10 16:44:13 crc kubenswrapper[4799]: I1010 16:44:13.652143 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/dc6ac9a0-2b60-4800-83a7-390cbb747a0b-config\") pod \"controller-manager-546b464d68-rd8fr\" (UID: \"dc6ac9a0-2b60-4800-83a7-390cbb747a0b\") " pod="openshift-controller-manager/controller-manager-546b464d68-rd8fr" Oct 10 16:44:13 crc kubenswrapper[4799]: I1010 16:44:13.652256 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/dc6ac9a0-2b60-4800-83a7-390cbb747a0b-serving-cert\") pod \"controller-manager-546b464d68-rd8fr\" (UID: \"dc6ac9a0-2b60-4800-83a7-390cbb747a0b\") " pod="openshift-controller-manager/controller-manager-546b464d68-rd8fr" Oct 10 16:44:13 crc kubenswrapper[4799]: I1010 16:44:13.652293 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/dc6ac9a0-2b60-4800-83a7-390cbb747a0b-proxy-ca-bundles\") pod \"controller-manager-546b464d68-rd8fr\" (UID: \"dc6ac9a0-2b60-4800-83a7-390cbb747a0b\") " pod="openshift-controller-manager/controller-manager-546b464d68-rd8fr" Oct 10 16:44:13 crc kubenswrapper[4799]: I1010 16:44:13.652324 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ztv99\" (UniqueName: \"kubernetes.io/projected/dc6ac9a0-2b60-4800-83a7-390cbb747a0b-kube-api-access-ztv99\") pod \"controller-manager-546b464d68-rd8fr\" (UID: \"dc6ac9a0-2b60-4800-83a7-390cbb747a0b\") " pod="openshift-controller-manager/controller-manager-546b464d68-rd8fr" Oct 10 16:44:13 crc kubenswrapper[4799]: I1010 16:44:13.652532 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/dc6ac9a0-2b60-4800-83a7-390cbb747a0b-client-ca\") pod \"controller-manager-546b464d68-rd8fr\" (UID: \"dc6ac9a0-2b60-4800-83a7-390cbb747a0b\") " pod="openshift-controller-manager/controller-manager-546b464d68-rd8fr" Oct 10 16:44:13 crc kubenswrapper[4799]: I1010 16:44:13.754032 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/dc6ac9a0-2b60-4800-83a7-390cbb747a0b-serving-cert\") pod \"controller-manager-546b464d68-rd8fr\" (UID: \"dc6ac9a0-2b60-4800-83a7-390cbb747a0b\") " pod="openshift-controller-manager/controller-manager-546b464d68-rd8fr" Oct 10 16:44:13 crc kubenswrapper[4799]: I1010 16:44:13.754074 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/dc6ac9a0-2b60-4800-83a7-390cbb747a0b-proxy-ca-bundles\") pod \"controller-manager-546b464d68-rd8fr\" (UID: \"dc6ac9a0-2b60-4800-83a7-390cbb747a0b\") " pod="openshift-controller-manager/controller-manager-546b464d68-rd8fr" Oct 10 16:44:13 crc kubenswrapper[4799]: I1010 16:44:13.754102 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ztv99\" (UniqueName: \"kubernetes.io/projected/dc6ac9a0-2b60-4800-83a7-390cbb747a0b-kube-api-access-ztv99\") pod \"controller-manager-546b464d68-rd8fr\" (UID: \"dc6ac9a0-2b60-4800-83a7-390cbb747a0b\") " pod="openshift-controller-manager/controller-manager-546b464d68-rd8fr" Oct 10 16:44:13 crc kubenswrapper[4799]: I1010 16:44:13.754131 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/dc6ac9a0-2b60-4800-83a7-390cbb747a0b-client-ca\") pod \"controller-manager-546b464d68-rd8fr\" (UID: \"dc6ac9a0-2b60-4800-83a7-390cbb747a0b\") " pod="openshift-controller-manager/controller-manager-546b464d68-rd8fr" Oct 10 16:44:13 crc kubenswrapper[4799]: I1010 16:44:13.754155 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/dc6ac9a0-2b60-4800-83a7-390cbb747a0b-config\") pod \"controller-manager-546b464d68-rd8fr\" (UID: \"dc6ac9a0-2b60-4800-83a7-390cbb747a0b\") " pod="openshift-controller-manager/controller-manager-546b464d68-rd8fr" Oct 10 16:44:13 crc kubenswrapper[4799]: I1010 16:44:13.755502 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/dc6ac9a0-2b60-4800-83a7-390cbb747a0b-client-ca\") pod \"controller-manager-546b464d68-rd8fr\" (UID: \"dc6ac9a0-2b60-4800-83a7-390cbb747a0b\") " pod="openshift-controller-manager/controller-manager-546b464d68-rd8fr" Oct 10 16:44:13 crc kubenswrapper[4799]: I1010 16:44:13.755541 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/dc6ac9a0-2b60-4800-83a7-390cbb747a0b-config\") pod \"controller-manager-546b464d68-rd8fr\" (UID: \"dc6ac9a0-2b60-4800-83a7-390cbb747a0b\") " pod="openshift-controller-manager/controller-manager-546b464d68-rd8fr" Oct 10 16:44:13 crc kubenswrapper[4799]: I1010 16:44:13.756583 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/dc6ac9a0-2b60-4800-83a7-390cbb747a0b-proxy-ca-bundles\") pod \"controller-manager-546b464d68-rd8fr\" (UID: \"dc6ac9a0-2b60-4800-83a7-390cbb747a0b\") " pod="openshift-controller-manager/controller-manager-546b464d68-rd8fr" Oct 10 16:44:13 crc kubenswrapper[4799]: I1010 16:44:13.763001 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/dc6ac9a0-2b60-4800-83a7-390cbb747a0b-serving-cert\") pod \"controller-manager-546b464d68-rd8fr\" (UID: \"dc6ac9a0-2b60-4800-83a7-390cbb747a0b\") " pod="openshift-controller-manager/controller-manager-546b464d68-rd8fr" Oct 10 16:44:13 crc kubenswrapper[4799]: I1010 16:44:13.787314 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ztv99\" (UniqueName: \"kubernetes.io/projected/dc6ac9a0-2b60-4800-83a7-390cbb747a0b-kube-api-access-ztv99\") pod \"controller-manager-546b464d68-rd8fr\" (UID: \"dc6ac9a0-2b60-4800-83a7-390cbb747a0b\") " pod="openshift-controller-manager/controller-manager-546b464d68-rd8fr" Oct 10 16:44:13 crc kubenswrapper[4799]: I1010 16:44:13.926561 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-546b464d68-rd8fr" Oct 10 16:44:14 crc kubenswrapper[4799]: I1010 16:44:14.159321 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-546b464d68-rd8fr"] Oct 10 16:44:14 crc kubenswrapper[4799]: W1010 16:44:14.165718 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poddc6ac9a0_2b60_4800_83a7_390cbb747a0b.slice/crio-75141cbc16177901c5ddc8fb33d9c1e093ac88167ccd8bb036713e402cd333be WatchSource:0}: Error finding container 75141cbc16177901c5ddc8fb33d9c1e093ac88167ccd8bb036713e402cd333be: Status 404 returned error can't find the container with id 75141cbc16177901c5ddc8fb33d9c1e093ac88167ccd8bb036713e402cd333be Oct 10 16:44:14 crc kubenswrapper[4799]: I1010 16:44:14.419032 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-webhook-6cdbc54649-5j22z" event={"ID":"03e8f0e3-1119-4f24-9625-4f799ca6d87f","Type":"ContainerStarted","Data":"e61d7b3e36140e964ab99277a0618b3929659181c621e42c7848b8b2cbce4c82"} Oct 10 16:44:14 crc kubenswrapper[4799]: I1010 16:44:14.419164 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-nmstate/nmstate-webhook-6cdbc54649-5j22z" Oct 10 16:44:14 crc kubenswrapper[4799]: I1010 16:44:14.421256 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-fdff9cb8d-8szkw" event={"ID":"cf339ce7-7475-4cea-a474-df2df7c47cbc","Type":"ContainerStarted","Data":"4ee4fe36a26aaf635889f4c4140d1e03c884db392436147a15c643aeea49c9b4"} Oct 10 16:44:14 crc kubenswrapper[4799]: I1010 16:44:14.426014 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-546b464d68-rd8fr" event={"ID":"dc6ac9a0-2b60-4800-83a7-390cbb747a0b","Type":"ContainerStarted","Data":"f8ce7c22b09990bab4a85ee36340e63c92cd0d2354d6963ca8f56b29fc0d2dd3"} Oct 10 16:44:14 crc kubenswrapper[4799]: I1010 16:44:14.426053 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-546b464d68-rd8fr" event={"ID":"dc6ac9a0-2b60-4800-83a7-390cbb747a0b","Type":"ContainerStarted","Data":"75141cbc16177901c5ddc8fb33d9c1e093ac88167ccd8bb036713e402cd333be"} Oct 10 16:44:14 crc kubenswrapper[4799]: I1010 16:44:14.426226 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-546b464d68-rd8fr" Oct 10 16:44:14 crc kubenswrapper[4799]: I1010 16:44:14.429988 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-handler-44gb8" event={"ID":"1cea25d9-ee17-4a2d-8cfc-74b0cad9e91a","Type":"ContainerStarted","Data":"1d75e93d4b7e89dfc4673c0abb37a41ce451a5d01433a6d4e98ff224751755d8"} Oct 10 16:44:14 crc kubenswrapper[4799]: I1010 16:44:14.433995 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-webhook-6cdbc54649-5j22z" podStartSLOduration=2.512558805 podStartE2EDuration="5.433981193s" podCreationTimestamp="2025-10-10 16:44:09 +0000 UTC" firstStartedPulling="2025-10-10 16:44:10.888320334 +0000 UTC m=+744.396644449" lastFinishedPulling="2025-10-10 16:44:13.809742712 +0000 UTC m=+747.318066837" observedRunningTime="2025-10-10 16:44:14.433523611 +0000 UTC m=+747.941847746" watchObservedRunningTime="2025-10-10 16:44:14.433981193 +0000 UTC m=+747.942305318" Oct 10 16:44:14 crc kubenswrapper[4799]: I1010 16:44:14.438797 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-546b464d68-rd8fr" Oct 10 16:44:14 crc kubenswrapper[4799]: I1010 16:44:14.462093 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-546b464d68-rd8fr" podStartSLOduration=5.462066435 podStartE2EDuration="5.462066435s" podCreationTimestamp="2025-10-10 16:44:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-10 16:44:14.455342479 +0000 UTC m=+747.963666604" watchObservedRunningTime="2025-10-10 16:44:14.462066435 +0000 UTC m=+747.970390590" Oct 10 16:44:14 crc kubenswrapper[4799]: I1010 16:44:14.477210 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-handler-44gb8" podStartSLOduration=1.507833308 podStartE2EDuration="5.477193767s" podCreationTimestamp="2025-10-10 16:44:09 +0000 UTC" firstStartedPulling="2025-10-10 16:44:09.828227214 +0000 UTC m=+743.336551319" lastFinishedPulling="2025-10-10 16:44:13.797587663 +0000 UTC m=+747.305911778" observedRunningTime="2025-10-10 16:44:14.471315322 +0000 UTC m=+747.979639457" watchObservedRunningTime="2025-10-10 16:44:14.477193767 +0000 UTC m=+747.985517892" Oct 10 16:44:14 crc kubenswrapper[4799]: I1010 16:44:14.791053 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-nmstate/nmstate-handler-44gb8" Oct 10 16:44:14 crc kubenswrapper[4799]: I1010 16:44:14.917238 4799 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Oct 10 16:44:15 crc kubenswrapper[4799]: I1010 16:44:15.248512 4799 patch_prober.go:28] interesting pod/machine-config-daemon-rh8zc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 10 16:44:15 crc kubenswrapper[4799]: I1010 16:44:15.248592 4799 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 10 16:44:16 crc kubenswrapper[4799]: I1010 16:44:16.462562 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-stz8n" event={"ID":"9a1584c9-53d3-448d-a84d-123526cfc076","Type":"ContainerStarted","Data":"2835d0d60ee85222978c8bd438822978f997ba70f0e0c1cb6ab82841ce031b57"} Oct 10 16:44:17 crc kubenswrapper[4799]: I1010 16:44:17.434454 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-stz8n" podStartSLOduration=3.762645643 podStartE2EDuration="8.434429148s" podCreationTimestamp="2025-10-10 16:44:09 +0000 UTC" firstStartedPulling="2025-10-10 16:44:10.62678843 +0000 UTC m=+744.135112545" lastFinishedPulling="2025-10-10 16:44:15.298571935 +0000 UTC m=+748.806896050" observedRunningTime="2025-10-10 16:44:16.480530586 +0000 UTC m=+749.988854741" watchObservedRunningTime="2025-10-10 16:44:17.434429148 +0000 UTC m=+750.942753273" Oct 10 16:44:17 crc kubenswrapper[4799]: I1010 16:44:17.473913 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-fdff9cb8d-8szkw" event={"ID":"cf339ce7-7475-4cea-a474-df2df7c47cbc","Type":"ContainerStarted","Data":"479b95f7c40868c951810f53b067f14c21461918cb6ff10c6ffcbd485ae300d0"} Oct 10 16:44:17 crc kubenswrapper[4799]: I1010 16:44:17.500504 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-metrics-fdff9cb8d-8szkw" podStartSLOduration=1.613532063 podStartE2EDuration="8.500472395s" podCreationTimestamp="2025-10-10 16:44:09 +0000 UTC" firstStartedPulling="2025-10-10 16:44:10.01635201 +0000 UTC m=+743.524676115" lastFinishedPulling="2025-10-10 16:44:16.903292312 +0000 UTC m=+750.411616447" observedRunningTime="2025-10-10 16:44:17.489848104 +0000 UTC m=+750.998172239" watchObservedRunningTime="2025-10-10 16:44:17.500472395 +0000 UTC m=+751.008796530" Oct 10 16:44:19 crc kubenswrapper[4799]: I1010 16:44:19.826779 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-nmstate/nmstate-handler-44gb8" Oct 10 16:44:21 crc kubenswrapper[4799]: I1010 16:44:21.350641 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-7bd76bbbc5-wc5qw" Oct 10 16:44:21 crc kubenswrapper[4799]: I1010 16:44:21.351126 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-7bd76bbbc5-wc5qw" Oct 10 16:44:21 crc kubenswrapper[4799]: I1010 16:44:21.358866 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-7bd76bbbc5-wc5qw" Oct 10 16:44:21 crc kubenswrapper[4799]: I1010 16:44:21.505111 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-7bd76bbbc5-wc5qw" Oct 10 16:44:21 crc kubenswrapper[4799]: I1010 16:44:21.583145 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-console/console-f9d7485db-8lvfs"] Oct 10 16:44:30 crc kubenswrapper[4799]: I1010 16:44:30.388372 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-nmstate/nmstate-webhook-6cdbc54649-5j22z" Oct 10 16:44:43 crc kubenswrapper[4799]: I1010 16:44:43.946519 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d24s9jq"] Oct 10 16:44:43 crc kubenswrapper[4799]: I1010 16:44:43.948452 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d24s9jq" Oct 10 16:44:43 crc kubenswrapper[4799]: I1010 16:44:43.950039 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Oct 10 16:44:43 crc kubenswrapper[4799]: I1010 16:44:43.954638 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d24s9jq"] Oct 10 16:44:44 crc kubenswrapper[4799]: I1010 16:44:44.109571 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/f532bcbd-a09f-4d14-b41d-0e55252454c2-bundle\") pod \"8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d24s9jq\" (UID: \"f532bcbd-a09f-4d14-b41d-0e55252454c2\") " pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d24s9jq" Oct 10 16:44:44 crc kubenswrapper[4799]: I1010 16:44:44.109621 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wpgwb\" (UniqueName: \"kubernetes.io/projected/f532bcbd-a09f-4d14-b41d-0e55252454c2-kube-api-access-wpgwb\") pod \"8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d24s9jq\" (UID: \"f532bcbd-a09f-4d14-b41d-0e55252454c2\") " pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d24s9jq" Oct 10 16:44:44 crc kubenswrapper[4799]: I1010 16:44:44.109657 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/f532bcbd-a09f-4d14-b41d-0e55252454c2-util\") pod \"8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d24s9jq\" (UID: \"f532bcbd-a09f-4d14-b41d-0e55252454c2\") " pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d24s9jq" Oct 10 16:44:44 crc kubenswrapper[4799]: I1010 16:44:44.211293 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/f532bcbd-a09f-4d14-b41d-0e55252454c2-bundle\") pod \"8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d24s9jq\" (UID: \"f532bcbd-a09f-4d14-b41d-0e55252454c2\") " pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d24s9jq" Oct 10 16:44:44 crc kubenswrapper[4799]: I1010 16:44:44.211374 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wpgwb\" (UniqueName: \"kubernetes.io/projected/f532bcbd-a09f-4d14-b41d-0e55252454c2-kube-api-access-wpgwb\") pod \"8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d24s9jq\" (UID: \"f532bcbd-a09f-4d14-b41d-0e55252454c2\") " pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d24s9jq" Oct 10 16:44:44 crc kubenswrapper[4799]: I1010 16:44:44.211440 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/f532bcbd-a09f-4d14-b41d-0e55252454c2-util\") pod \"8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d24s9jq\" (UID: \"f532bcbd-a09f-4d14-b41d-0e55252454c2\") " pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d24s9jq" Oct 10 16:44:44 crc kubenswrapper[4799]: I1010 16:44:44.211975 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/f532bcbd-a09f-4d14-b41d-0e55252454c2-bundle\") pod \"8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d24s9jq\" (UID: \"f532bcbd-a09f-4d14-b41d-0e55252454c2\") " pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d24s9jq" Oct 10 16:44:44 crc kubenswrapper[4799]: I1010 16:44:44.211982 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/f532bcbd-a09f-4d14-b41d-0e55252454c2-util\") pod \"8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d24s9jq\" (UID: \"f532bcbd-a09f-4d14-b41d-0e55252454c2\") " pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d24s9jq" Oct 10 16:44:44 crc kubenswrapper[4799]: I1010 16:44:44.235952 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wpgwb\" (UniqueName: \"kubernetes.io/projected/f532bcbd-a09f-4d14-b41d-0e55252454c2-kube-api-access-wpgwb\") pod \"8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d24s9jq\" (UID: \"f532bcbd-a09f-4d14-b41d-0e55252454c2\") " pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d24s9jq" Oct 10 16:44:44 crc kubenswrapper[4799]: I1010 16:44:44.263634 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d24s9jq" Oct 10 16:44:44 crc kubenswrapper[4799]: I1010 16:44:44.722178 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d24s9jq"] Oct 10 16:44:45 crc kubenswrapper[4799]: I1010 16:44:45.249057 4799 patch_prober.go:28] interesting pod/machine-config-daemon-rh8zc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 10 16:44:45 crc kubenswrapper[4799]: I1010 16:44:45.249524 4799 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 10 16:44:45 crc kubenswrapper[4799]: I1010 16:44:45.249579 4799 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" Oct 10 16:44:45 crc kubenswrapper[4799]: I1010 16:44:45.250513 4799 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"ff017f427623f1c99da82aa1f76b3d32ffeae8d4ca8e7ce1e98dc285ba08fb9c"} pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 10 16:44:45 crc kubenswrapper[4799]: I1010 16:44:45.250582 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" containerName="machine-config-daemon" containerID="cri-o://ff017f427623f1c99da82aa1f76b3d32ffeae8d4ca8e7ce1e98dc285ba08fb9c" gracePeriod=600 Oct 10 16:44:45 crc kubenswrapper[4799]: I1010 16:44:45.655824 4799 generic.go:334] "Generic (PLEG): container finished" podID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" containerID="ff017f427623f1c99da82aa1f76b3d32ffeae8d4ca8e7ce1e98dc285ba08fb9c" exitCode=0 Oct 10 16:44:45 crc kubenswrapper[4799]: I1010 16:44:45.655860 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" event={"ID":"6cebefda-e31d-4be2-9bf4-8e1f8ec002cb","Type":"ContainerDied","Data":"ff017f427623f1c99da82aa1f76b3d32ffeae8d4ca8e7ce1e98dc285ba08fb9c"} Oct 10 16:44:45 crc kubenswrapper[4799]: I1010 16:44:45.655904 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" event={"ID":"6cebefda-e31d-4be2-9bf4-8e1f8ec002cb","Type":"ContainerStarted","Data":"5d0c20be696163127fb1361e7edc7eadb541b7fccbd83cd240ae6b5f02af5dd5"} Oct 10 16:44:45 crc kubenswrapper[4799]: I1010 16:44:45.655924 4799 scope.go:117] "RemoveContainer" containerID="6060cbfbf40b005d1ca61153e05b93d95432b8f5bf820a7b753f840c4cc943ae" Oct 10 16:44:45 crc kubenswrapper[4799]: I1010 16:44:45.659361 4799 generic.go:334] "Generic (PLEG): container finished" podID="f532bcbd-a09f-4d14-b41d-0e55252454c2" containerID="21e6e5510540b8caca21e1698b38434f93a33816848a68f5a2920d5371c20a6b" exitCode=0 Oct 10 16:44:45 crc kubenswrapper[4799]: I1010 16:44:45.659386 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d24s9jq" event={"ID":"f532bcbd-a09f-4d14-b41d-0e55252454c2","Type":"ContainerDied","Data":"21e6e5510540b8caca21e1698b38434f93a33816848a68f5a2920d5371c20a6b"} Oct 10 16:44:45 crc kubenswrapper[4799]: I1010 16:44:45.659403 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d24s9jq" event={"ID":"f532bcbd-a09f-4d14-b41d-0e55252454c2","Type":"ContainerStarted","Data":"5b0402e8325757e0f5992c885499c5d0c5069fcccb822447493a15228e5248ea"} Oct 10 16:44:46 crc kubenswrapper[4799]: I1010 16:44:46.080273 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-r6zdv"] Oct 10 16:44:46 crc kubenswrapper[4799]: I1010 16:44:46.082405 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-r6zdv" Oct 10 16:44:46 crc kubenswrapper[4799]: I1010 16:44:46.088544 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-r6zdv"] Oct 10 16:44:46 crc kubenswrapper[4799]: I1010 16:44:46.236430 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2ef7eb90-d1ca-4423-9fba-f61bae153e3b-utilities\") pod \"redhat-operators-r6zdv\" (UID: \"2ef7eb90-d1ca-4423-9fba-f61bae153e3b\") " pod="openshift-marketplace/redhat-operators-r6zdv" Oct 10 16:44:46 crc kubenswrapper[4799]: I1010 16:44:46.236501 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-79lnp\" (UniqueName: \"kubernetes.io/projected/2ef7eb90-d1ca-4423-9fba-f61bae153e3b-kube-api-access-79lnp\") pod \"redhat-operators-r6zdv\" (UID: \"2ef7eb90-d1ca-4423-9fba-f61bae153e3b\") " pod="openshift-marketplace/redhat-operators-r6zdv" Oct 10 16:44:46 crc kubenswrapper[4799]: I1010 16:44:46.236560 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2ef7eb90-d1ca-4423-9fba-f61bae153e3b-catalog-content\") pod \"redhat-operators-r6zdv\" (UID: \"2ef7eb90-d1ca-4423-9fba-f61bae153e3b\") " pod="openshift-marketplace/redhat-operators-r6zdv" Oct 10 16:44:46 crc kubenswrapper[4799]: I1010 16:44:46.338055 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2ef7eb90-d1ca-4423-9fba-f61bae153e3b-catalog-content\") pod \"redhat-operators-r6zdv\" (UID: \"2ef7eb90-d1ca-4423-9fba-f61bae153e3b\") " pod="openshift-marketplace/redhat-operators-r6zdv" Oct 10 16:44:46 crc kubenswrapper[4799]: I1010 16:44:46.338172 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2ef7eb90-d1ca-4423-9fba-f61bae153e3b-utilities\") pod \"redhat-operators-r6zdv\" (UID: \"2ef7eb90-d1ca-4423-9fba-f61bae153e3b\") " pod="openshift-marketplace/redhat-operators-r6zdv" Oct 10 16:44:46 crc kubenswrapper[4799]: I1010 16:44:46.338207 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-79lnp\" (UniqueName: \"kubernetes.io/projected/2ef7eb90-d1ca-4423-9fba-f61bae153e3b-kube-api-access-79lnp\") pod \"redhat-operators-r6zdv\" (UID: \"2ef7eb90-d1ca-4423-9fba-f61bae153e3b\") " pod="openshift-marketplace/redhat-operators-r6zdv" Oct 10 16:44:46 crc kubenswrapper[4799]: I1010 16:44:46.338729 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2ef7eb90-d1ca-4423-9fba-f61bae153e3b-utilities\") pod \"redhat-operators-r6zdv\" (UID: \"2ef7eb90-d1ca-4423-9fba-f61bae153e3b\") " pod="openshift-marketplace/redhat-operators-r6zdv" Oct 10 16:44:46 crc kubenswrapper[4799]: I1010 16:44:46.339522 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2ef7eb90-d1ca-4423-9fba-f61bae153e3b-catalog-content\") pod \"redhat-operators-r6zdv\" (UID: \"2ef7eb90-d1ca-4423-9fba-f61bae153e3b\") " pod="openshift-marketplace/redhat-operators-r6zdv" Oct 10 16:44:46 crc kubenswrapper[4799]: I1010 16:44:46.371867 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-79lnp\" (UniqueName: \"kubernetes.io/projected/2ef7eb90-d1ca-4423-9fba-f61bae153e3b-kube-api-access-79lnp\") pod \"redhat-operators-r6zdv\" (UID: \"2ef7eb90-d1ca-4423-9fba-f61bae153e3b\") " pod="openshift-marketplace/redhat-operators-r6zdv" Oct 10 16:44:46 crc kubenswrapper[4799]: I1010 16:44:46.416911 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-r6zdv" Oct 10 16:44:46 crc kubenswrapper[4799]: I1010 16:44:46.632078 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-console/console-f9d7485db-8lvfs" podUID="2866fbf1-3a49-4e4c-867b-86a40ae85ebe" containerName="console" containerID="cri-o://866009b0d6059852883926b4b56b73035c9f18f410a5bc1c0a36dfe8e48c700a" gracePeriod=15 Oct 10 16:44:46 crc kubenswrapper[4799]: I1010 16:44:46.827300 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-r6zdv"] Oct 10 16:44:46 crc kubenswrapper[4799]: W1010 16:44:46.838873 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2ef7eb90_d1ca_4423_9fba_f61bae153e3b.slice/crio-d458d81407ab8a0a46e39be2e2832ec03b8811daab10d7cec1bbfb36baeb38d9 WatchSource:0}: Error finding container d458d81407ab8a0a46e39be2e2832ec03b8811daab10d7cec1bbfb36baeb38d9: Status 404 returned error can't find the container with id d458d81407ab8a0a46e39be2e2832ec03b8811daab10d7cec1bbfb36baeb38d9 Oct 10 16:44:47 crc kubenswrapper[4799]: I1010 16:44:47.156171 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-console_console-f9d7485db-8lvfs_2866fbf1-3a49-4e4c-867b-86a40ae85ebe/console/0.log" Oct 10 16:44:47 crc kubenswrapper[4799]: I1010 16:44:47.156427 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-8lvfs" Oct 10 16:44:47 crc kubenswrapper[4799]: I1010 16:44:47.248680 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fwssh\" (UniqueName: \"kubernetes.io/projected/2866fbf1-3a49-4e4c-867b-86a40ae85ebe-kube-api-access-fwssh\") pod \"2866fbf1-3a49-4e4c-867b-86a40ae85ebe\" (UID: \"2866fbf1-3a49-4e4c-867b-86a40ae85ebe\") " Oct 10 16:44:47 crc kubenswrapper[4799]: I1010 16:44:47.249741 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/2866fbf1-3a49-4e4c-867b-86a40ae85ebe-console-oauth-config\") pod \"2866fbf1-3a49-4e4c-867b-86a40ae85ebe\" (UID: \"2866fbf1-3a49-4e4c-867b-86a40ae85ebe\") " Oct 10 16:44:47 crc kubenswrapper[4799]: I1010 16:44:47.249813 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/2866fbf1-3a49-4e4c-867b-86a40ae85ebe-console-config\") pod \"2866fbf1-3a49-4e4c-867b-86a40ae85ebe\" (UID: \"2866fbf1-3a49-4e4c-867b-86a40ae85ebe\") " Oct 10 16:44:47 crc kubenswrapper[4799]: I1010 16:44:47.249833 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/2866fbf1-3a49-4e4c-867b-86a40ae85ebe-trusted-ca-bundle\") pod \"2866fbf1-3a49-4e4c-867b-86a40ae85ebe\" (UID: \"2866fbf1-3a49-4e4c-867b-86a40ae85ebe\") " Oct 10 16:44:47 crc kubenswrapper[4799]: I1010 16:44:47.249853 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/2866fbf1-3a49-4e4c-867b-86a40ae85ebe-oauth-serving-cert\") pod \"2866fbf1-3a49-4e4c-867b-86a40ae85ebe\" (UID: \"2866fbf1-3a49-4e4c-867b-86a40ae85ebe\") " Oct 10 16:44:47 crc kubenswrapper[4799]: I1010 16:44:47.249874 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/2866fbf1-3a49-4e4c-867b-86a40ae85ebe-service-ca\") pod \"2866fbf1-3a49-4e4c-867b-86a40ae85ebe\" (UID: \"2866fbf1-3a49-4e4c-867b-86a40ae85ebe\") " Oct 10 16:44:47 crc kubenswrapper[4799]: I1010 16:44:47.249918 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/2866fbf1-3a49-4e4c-867b-86a40ae85ebe-console-serving-cert\") pod \"2866fbf1-3a49-4e4c-867b-86a40ae85ebe\" (UID: \"2866fbf1-3a49-4e4c-867b-86a40ae85ebe\") " Oct 10 16:44:47 crc kubenswrapper[4799]: I1010 16:44:47.250442 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2866fbf1-3a49-4e4c-867b-86a40ae85ebe-console-config" (OuterVolumeSpecName: "console-config") pod "2866fbf1-3a49-4e4c-867b-86a40ae85ebe" (UID: "2866fbf1-3a49-4e4c-867b-86a40ae85ebe"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 16:44:47 crc kubenswrapper[4799]: I1010 16:44:47.250515 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2866fbf1-3a49-4e4c-867b-86a40ae85ebe-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "2866fbf1-3a49-4e4c-867b-86a40ae85ebe" (UID: "2866fbf1-3a49-4e4c-867b-86a40ae85ebe"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 16:44:47 crc kubenswrapper[4799]: I1010 16:44:47.250749 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2866fbf1-3a49-4e4c-867b-86a40ae85ebe-service-ca" (OuterVolumeSpecName: "service-ca") pod "2866fbf1-3a49-4e4c-867b-86a40ae85ebe" (UID: "2866fbf1-3a49-4e4c-867b-86a40ae85ebe"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 16:44:47 crc kubenswrapper[4799]: I1010 16:44:47.251089 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2866fbf1-3a49-4e4c-867b-86a40ae85ebe-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "2866fbf1-3a49-4e4c-867b-86a40ae85ebe" (UID: "2866fbf1-3a49-4e4c-867b-86a40ae85ebe"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 16:44:47 crc kubenswrapper[4799]: I1010 16:44:47.253890 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2866fbf1-3a49-4e4c-867b-86a40ae85ebe-kube-api-access-fwssh" (OuterVolumeSpecName: "kube-api-access-fwssh") pod "2866fbf1-3a49-4e4c-867b-86a40ae85ebe" (UID: "2866fbf1-3a49-4e4c-867b-86a40ae85ebe"). InnerVolumeSpecName "kube-api-access-fwssh". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 16:44:47 crc kubenswrapper[4799]: I1010 16:44:47.254074 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2866fbf1-3a49-4e4c-867b-86a40ae85ebe-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "2866fbf1-3a49-4e4c-867b-86a40ae85ebe" (UID: "2866fbf1-3a49-4e4c-867b-86a40ae85ebe"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:44:47 crc kubenswrapper[4799]: I1010 16:44:47.254679 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2866fbf1-3a49-4e4c-867b-86a40ae85ebe-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "2866fbf1-3a49-4e4c-867b-86a40ae85ebe" (UID: "2866fbf1-3a49-4e4c-867b-86a40ae85ebe"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:44:47 crc kubenswrapper[4799]: I1010 16:44:47.351146 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fwssh\" (UniqueName: \"kubernetes.io/projected/2866fbf1-3a49-4e4c-867b-86a40ae85ebe-kube-api-access-fwssh\") on node \"crc\" DevicePath \"\"" Oct 10 16:44:47 crc kubenswrapper[4799]: I1010 16:44:47.351196 4799 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/2866fbf1-3a49-4e4c-867b-86a40ae85ebe-console-oauth-config\") on node \"crc\" DevicePath \"\"" Oct 10 16:44:47 crc kubenswrapper[4799]: I1010 16:44:47.351275 4799 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/2866fbf1-3a49-4e4c-867b-86a40ae85ebe-console-config\") on node \"crc\" DevicePath \"\"" Oct 10 16:44:47 crc kubenswrapper[4799]: I1010 16:44:47.351294 4799 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/2866fbf1-3a49-4e4c-867b-86a40ae85ebe-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 10 16:44:47 crc kubenswrapper[4799]: I1010 16:44:47.351312 4799 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/2866fbf1-3a49-4e4c-867b-86a40ae85ebe-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 10 16:44:47 crc kubenswrapper[4799]: I1010 16:44:47.351329 4799 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/2866fbf1-3a49-4e4c-867b-86a40ae85ebe-service-ca\") on node \"crc\" DevicePath \"\"" Oct 10 16:44:47 crc kubenswrapper[4799]: I1010 16:44:47.351349 4799 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/2866fbf1-3a49-4e4c-867b-86a40ae85ebe-console-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 10 16:44:47 crc kubenswrapper[4799]: I1010 16:44:47.631904 4799 scope.go:117] "RemoveContainer" containerID="866009b0d6059852883926b4b56b73035c9f18f410a5bc1c0a36dfe8e48c700a" Oct 10 16:44:47 crc kubenswrapper[4799]: I1010 16:44:47.673656 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-8lvfs" event={"ID":"2866fbf1-3a49-4e4c-867b-86a40ae85ebe","Type":"ContainerDied","Data":"866009b0d6059852883926b4b56b73035c9f18f410a5bc1c0a36dfe8e48c700a"} Oct 10 16:44:47 crc kubenswrapper[4799]: I1010 16:44:47.673723 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-8lvfs" event={"ID":"2866fbf1-3a49-4e4c-867b-86a40ae85ebe","Type":"ContainerDied","Data":"7484481868bc2698a0dd5acf6ac242d50abf95c1be30280b0406989f1eba3894"} Oct 10 16:44:47 crc kubenswrapper[4799]: I1010 16:44:47.675592 4799 generic.go:334] "Generic (PLEG): container finished" podID="2ef7eb90-d1ca-4423-9fba-f61bae153e3b" containerID="35da9f680173aabf03e981aed4db3a25065ff42da21441c6168a643c43dbe2bb" exitCode=0 Oct 10 16:44:47 crc kubenswrapper[4799]: I1010 16:44:47.675674 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-r6zdv" event={"ID":"2ef7eb90-d1ca-4423-9fba-f61bae153e3b","Type":"ContainerDied","Data":"35da9f680173aabf03e981aed4db3a25065ff42da21441c6168a643c43dbe2bb"} Oct 10 16:44:47 crc kubenswrapper[4799]: I1010 16:44:47.675707 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-r6zdv" event={"ID":"2ef7eb90-d1ca-4423-9fba-f61bae153e3b","Type":"ContainerStarted","Data":"d458d81407ab8a0a46e39be2e2832ec03b8811daab10d7cec1bbfb36baeb38d9"} Oct 10 16:44:47 crc kubenswrapper[4799]: I1010 16:44:47.677320 4799 generic.go:334] "Generic (PLEG): container finished" podID="f532bcbd-a09f-4d14-b41d-0e55252454c2" containerID="043d3e09c5e71bd1917ae51d9a36920ba42bb75ccc86b843003349b5967b9fb4" exitCode=0 Oct 10 16:44:47 crc kubenswrapper[4799]: I1010 16:44:47.677389 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-8lvfs" Oct 10 16:44:47 crc kubenswrapper[4799]: I1010 16:44:47.677455 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d24s9jq" event={"ID":"f532bcbd-a09f-4d14-b41d-0e55252454c2","Type":"ContainerDied","Data":"043d3e09c5e71bd1917ae51d9a36920ba42bb75ccc86b843003349b5967b9fb4"} Oct 10 16:44:47 crc kubenswrapper[4799]: I1010 16:44:47.735165 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-console/console-f9d7485db-8lvfs"] Oct 10 16:44:47 crc kubenswrapper[4799]: I1010 16:44:47.738209 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-console/console-f9d7485db-8lvfs"] Oct 10 16:44:48 crc kubenswrapper[4799]: I1010 16:44:48.687605 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-r6zdv" event={"ID":"2ef7eb90-d1ca-4423-9fba-f61bae153e3b","Type":"ContainerStarted","Data":"5fe538f086f98f21dadbe50919bb59489a33686af05856e9ae51317afc8b5979"} Oct 10 16:44:48 crc kubenswrapper[4799]: I1010 16:44:48.710977 4799 generic.go:334] "Generic (PLEG): container finished" podID="f532bcbd-a09f-4d14-b41d-0e55252454c2" containerID="aaa8c067862b2e1990ca90f0d145596b2c772b96df9368523f0f895c0b85bc57" exitCode=0 Oct 10 16:44:48 crc kubenswrapper[4799]: I1010 16:44:48.711074 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d24s9jq" event={"ID":"f532bcbd-a09f-4d14-b41d-0e55252454c2","Type":"ContainerDied","Data":"aaa8c067862b2e1990ca90f0d145596b2c772b96df9368523f0f895c0b85bc57"} Oct 10 16:44:49 crc kubenswrapper[4799]: I1010 16:44:49.411443 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2866fbf1-3a49-4e4c-867b-86a40ae85ebe" path="/var/lib/kubelet/pods/2866fbf1-3a49-4e4c-867b-86a40ae85ebe/volumes" Oct 10 16:44:49 crc kubenswrapper[4799]: I1010 16:44:49.721011 4799 generic.go:334] "Generic (PLEG): container finished" podID="2ef7eb90-d1ca-4423-9fba-f61bae153e3b" containerID="5fe538f086f98f21dadbe50919bb59489a33686af05856e9ae51317afc8b5979" exitCode=0 Oct 10 16:44:49 crc kubenswrapper[4799]: I1010 16:44:49.721135 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-r6zdv" event={"ID":"2ef7eb90-d1ca-4423-9fba-f61bae153e3b","Type":"ContainerDied","Data":"5fe538f086f98f21dadbe50919bb59489a33686af05856e9ae51317afc8b5979"} Oct 10 16:44:50 crc kubenswrapper[4799]: I1010 16:44:50.021387 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d24s9jq" Oct 10 16:44:50 crc kubenswrapper[4799]: I1010 16:44:50.088169 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/f532bcbd-a09f-4d14-b41d-0e55252454c2-util\") pod \"f532bcbd-a09f-4d14-b41d-0e55252454c2\" (UID: \"f532bcbd-a09f-4d14-b41d-0e55252454c2\") " Oct 10 16:44:50 crc kubenswrapper[4799]: I1010 16:44:50.088347 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wpgwb\" (UniqueName: \"kubernetes.io/projected/f532bcbd-a09f-4d14-b41d-0e55252454c2-kube-api-access-wpgwb\") pod \"f532bcbd-a09f-4d14-b41d-0e55252454c2\" (UID: \"f532bcbd-a09f-4d14-b41d-0e55252454c2\") " Oct 10 16:44:50 crc kubenswrapper[4799]: I1010 16:44:50.088468 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/f532bcbd-a09f-4d14-b41d-0e55252454c2-bundle\") pod \"f532bcbd-a09f-4d14-b41d-0e55252454c2\" (UID: \"f532bcbd-a09f-4d14-b41d-0e55252454c2\") " Oct 10 16:44:50 crc kubenswrapper[4799]: I1010 16:44:50.090013 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f532bcbd-a09f-4d14-b41d-0e55252454c2-bundle" (OuterVolumeSpecName: "bundle") pod "f532bcbd-a09f-4d14-b41d-0e55252454c2" (UID: "f532bcbd-a09f-4d14-b41d-0e55252454c2"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 10 16:44:50 crc kubenswrapper[4799]: I1010 16:44:50.094655 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f532bcbd-a09f-4d14-b41d-0e55252454c2-kube-api-access-wpgwb" (OuterVolumeSpecName: "kube-api-access-wpgwb") pod "f532bcbd-a09f-4d14-b41d-0e55252454c2" (UID: "f532bcbd-a09f-4d14-b41d-0e55252454c2"). InnerVolumeSpecName "kube-api-access-wpgwb". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 16:44:50 crc kubenswrapper[4799]: I1010 16:44:50.109886 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f532bcbd-a09f-4d14-b41d-0e55252454c2-util" (OuterVolumeSpecName: "util") pod "f532bcbd-a09f-4d14-b41d-0e55252454c2" (UID: "f532bcbd-a09f-4d14-b41d-0e55252454c2"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 10 16:44:50 crc kubenswrapper[4799]: I1010 16:44:50.190651 4799 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/f532bcbd-a09f-4d14-b41d-0e55252454c2-util\") on node \"crc\" DevicePath \"\"" Oct 10 16:44:50 crc kubenswrapper[4799]: I1010 16:44:50.190692 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wpgwb\" (UniqueName: \"kubernetes.io/projected/f532bcbd-a09f-4d14-b41d-0e55252454c2-kube-api-access-wpgwb\") on node \"crc\" DevicePath \"\"" Oct 10 16:44:50 crc kubenswrapper[4799]: I1010 16:44:50.190706 4799 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/f532bcbd-a09f-4d14-b41d-0e55252454c2-bundle\") on node \"crc\" DevicePath \"\"" Oct 10 16:44:50 crc kubenswrapper[4799]: I1010 16:44:50.730624 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-r6zdv" event={"ID":"2ef7eb90-d1ca-4423-9fba-f61bae153e3b","Type":"ContainerStarted","Data":"cf1107f59268e688205d1e39dbf62e96b7db212db948e2e4c60b962abc1b38d1"} Oct 10 16:44:50 crc kubenswrapper[4799]: I1010 16:44:50.736203 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d24s9jq" event={"ID":"f532bcbd-a09f-4d14-b41d-0e55252454c2","Type":"ContainerDied","Data":"5b0402e8325757e0f5992c885499c5d0c5069fcccb822447493a15228e5248ea"} Oct 10 16:44:50 crc kubenswrapper[4799]: I1010 16:44:50.736254 4799 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5b0402e8325757e0f5992c885499c5d0c5069fcccb822447493a15228e5248ea" Oct 10 16:44:50 crc kubenswrapper[4799]: I1010 16:44:50.736516 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d24s9jq" Oct 10 16:44:50 crc kubenswrapper[4799]: I1010 16:44:50.763864 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-r6zdv" podStartSLOduration=2.319803389 podStartE2EDuration="4.763831725s" podCreationTimestamp="2025-10-10 16:44:46 +0000 UTC" firstStartedPulling="2025-10-10 16:44:47.689343316 +0000 UTC m=+781.197667431" lastFinishedPulling="2025-10-10 16:44:50.133371632 +0000 UTC m=+783.641695767" observedRunningTime="2025-10-10 16:44:50.762642096 +0000 UTC m=+784.270966241" watchObservedRunningTime="2025-10-10 16:44:50.763831725 +0000 UTC m=+784.272155920" Oct 10 16:44:56 crc kubenswrapper[4799]: I1010 16:44:56.417347 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-r6zdv" Oct 10 16:44:56 crc kubenswrapper[4799]: I1010 16:44:56.417622 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-r6zdv" Oct 10 16:44:56 crc kubenswrapper[4799]: I1010 16:44:56.465939 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-r6zdv" Oct 10 16:44:56 crc kubenswrapper[4799]: I1010 16:44:56.840274 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-r6zdv" Oct 10 16:44:58 crc kubenswrapper[4799]: I1010 16:44:58.266516 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-r6zdv"] Oct 10 16:44:58 crc kubenswrapper[4799]: I1010 16:44:58.791811 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-r6zdv" podUID="2ef7eb90-d1ca-4423-9fba-f61bae153e3b" containerName="registry-server" containerID="cri-o://cf1107f59268e688205d1e39dbf62e96b7db212db948e2e4c60b962abc1b38d1" gracePeriod=2 Oct 10 16:44:59 crc kubenswrapper[4799]: I1010 16:44:59.167471 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-r6zdv" Oct 10 16:44:59 crc kubenswrapper[4799]: I1010 16:44:59.215541 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2ef7eb90-d1ca-4423-9fba-f61bae153e3b-catalog-content\") pod \"2ef7eb90-d1ca-4423-9fba-f61bae153e3b\" (UID: \"2ef7eb90-d1ca-4423-9fba-f61bae153e3b\") " Oct 10 16:44:59 crc kubenswrapper[4799]: I1010 16:44:59.215590 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-79lnp\" (UniqueName: \"kubernetes.io/projected/2ef7eb90-d1ca-4423-9fba-f61bae153e3b-kube-api-access-79lnp\") pod \"2ef7eb90-d1ca-4423-9fba-f61bae153e3b\" (UID: \"2ef7eb90-d1ca-4423-9fba-f61bae153e3b\") " Oct 10 16:44:59 crc kubenswrapper[4799]: I1010 16:44:59.215679 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2ef7eb90-d1ca-4423-9fba-f61bae153e3b-utilities\") pod \"2ef7eb90-d1ca-4423-9fba-f61bae153e3b\" (UID: \"2ef7eb90-d1ca-4423-9fba-f61bae153e3b\") " Oct 10 16:44:59 crc kubenswrapper[4799]: I1010 16:44:59.216570 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2ef7eb90-d1ca-4423-9fba-f61bae153e3b-utilities" (OuterVolumeSpecName: "utilities") pod "2ef7eb90-d1ca-4423-9fba-f61bae153e3b" (UID: "2ef7eb90-d1ca-4423-9fba-f61bae153e3b"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 10 16:44:59 crc kubenswrapper[4799]: I1010 16:44:59.220608 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2ef7eb90-d1ca-4423-9fba-f61bae153e3b-kube-api-access-79lnp" (OuterVolumeSpecName: "kube-api-access-79lnp") pod "2ef7eb90-d1ca-4423-9fba-f61bae153e3b" (UID: "2ef7eb90-d1ca-4423-9fba-f61bae153e3b"). InnerVolumeSpecName "kube-api-access-79lnp". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 16:44:59 crc kubenswrapper[4799]: I1010 16:44:59.317332 4799 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2ef7eb90-d1ca-4423-9fba-f61bae153e3b-utilities\") on node \"crc\" DevicePath \"\"" Oct 10 16:44:59 crc kubenswrapper[4799]: I1010 16:44:59.317361 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-79lnp\" (UniqueName: \"kubernetes.io/projected/2ef7eb90-d1ca-4423-9fba-f61bae153e3b-kube-api-access-79lnp\") on node \"crc\" DevicePath \"\"" Oct 10 16:44:59 crc kubenswrapper[4799]: I1010 16:44:59.735835 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2ef7eb90-d1ca-4423-9fba-f61bae153e3b-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "2ef7eb90-d1ca-4423-9fba-f61bae153e3b" (UID: "2ef7eb90-d1ca-4423-9fba-f61bae153e3b"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 10 16:44:59 crc kubenswrapper[4799]: I1010 16:44:59.802454 4799 generic.go:334] "Generic (PLEG): container finished" podID="2ef7eb90-d1ca-4423-9fba-f61bae153e3b" containerID="cf1107f59268e688205d1e39dbf62e96b7db212db948e2e4c60b962abc1b38d1" exitCode=0 Oct 10 16:44:59 crc kubenswrapper[4799]: I1010 16:44:59.802502 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-r6zdv" Oct 10 16:44:59 crc kubenswrapper[4799]: I1010 16:44:59.802536 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-r6zdv" event={"ID":"2ef7eb90-d1ca-4423-9fba-f61bae153e3b","Type":"ContainerDied","Data":"cf1107f59268e688205d1e39dbf62e96b7db212db948e2e4c60b962abc1b38d1"} Oct 10 16:44:59 crc kubenswrapper[4799]: I1010 16:44:59.804988 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-r6zdv" event={"ID":"2ef7eb90-d1ca-4423-9fba-f61bae153e3b","Type":"ContainerDied","Data":"d458d81407ab8a0a46e39be2e2832ec03b8811daab10d7cec1bbfb36baeb38d9"} Oct 10 16:44:59 crc kubenswrapper[4799]: I1010 16:44:59.805046 4799 scope.go:117] "RemoveContainer" containerID="cf1107f59268e688205d1e39dbf62e96b7db212db948e2e4c60b962abc1b38d1" Oct 10 16:44:59 crc kubenswrapper[4799]: I1010 16:44:59.824538 4799 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2ef7eb90-d1ca-4423-9fba-f61bae153e3b-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 10 16:44:59 crc kubenswrapper[4799]: I1010 16:44:59.824862 4799 scope.go:117] "RemoveContainer" containerID="5fe538f086f98f21dadbe50919bb59489a33686af05856e9ae51317afc8b5979" Oct 10 16:44:59 crc kubenswrapper[4799]: I1010 16:44:59.841539 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-r6zdv"] Oct 10 16:44:59 crc kubenswrapper[4799]: I1010 16:44:59.845080 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-r6zdv"] Oct 10 16:44:59 crc kubenswrapper[4799]: I1010 16:44:59.859207 4799 scope.go:117] "RemoveContainer" containerID="35da9f680173aabf03e981aed4db3a25065ff42da21441c6168a643c43dbe2bb" Oct 10 16:44:59 crc kubenswrapper[4799]: I1010 16:44:59.877904 4799 scope.go:117] "RemoveContainer" containerID="cf1107f59268e688205d1e39dbf62e96b7db212db948e2e4c60b962abc1b38d1" Oct 10 16:44:59 crc kubenswrapper[4799]: E1010 16:44:59.878229 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cf1107f59268e688205d1e39dbf62e96b7db212db948e2e4c60b962abc1b38d1\": container with ID starting with cf1107f59268e688205d1e39dbf62e96b7db212db948e2e4c60b962abc1b38d1 not found: ID does not exist" containerID="cf1107f59268e688205d1e39dbf62e96b7db212db948e2e4c60b962abc1b38d1" Oct 10 16:44:59 crc kubenswrapper[4799]: I1010 16:44:59.878256 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cf1107f59268e688205d1e39dbf62e96b7db212db948e2e4c60b962abc1b38d1"} err="failed to get container status \"cf1107f59268e688205d1e39dbf62e96b7db212db948e2e4c60b962abc1b38d1\": rpc error: code = NotFound desc = could not find container \"cf1107f59268e688205d1e39dbf62e96b7db212db948e2e4c60b962abc1b38d1\": container with ID starting with cf1107f59268e688205d1e39dbf62e96b7db212db948e2e4c60b962abc1b38d1 not found: ID does not exist" Oct 10 16:44:59 crc kubenswrapper[4799]: I1010 16:44:59.878277 4799 scope.go:117] "RemoveContainer" containerID="5fe538f086f98f21dadbe50919bb59489a33686af05856e9ae51317afc8b5979" Oct 10 16:44:59 crc kubenswrapper[4799]: E1010 16:44:59.878492 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5fe538f086f98f21dadbe50919bb59489a33686af05856e9ae51317afc8b5979\": container with ID starting with 5fe538f086f98f21dadbe50919bb59489a33686af05856e9ae51317afc8b5979 not found: ID does not exist" containerID="5fe538f086f98f21dadbe50919bb59489a33686af05856e9ae51317afc8b5979" Oct 10 16:44:59 crc kubenswrapper[4799]: I1010 16:44:59.878579 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5fe538f086f98f21dadbe50919bb59489a33686af05856e9ae51317afc8b5979"} err="failed to get container status \"5fe538f086f98f21dadbe50919bb59489a33686af05856e9ae51317afc8b5979\": rpc error: code = NotFound desc = could not find container \"5fe538f086f98f21dadbe50919bb59489a33686af05856e9ae51317afc8b5979\": container with ID starting with 5fe538f086f98f21dadbe50919bb59489a33686af05856e9ae51317afc8b5979 not found: ID does not exist" Oct 10 16:44:59 crc kubenswrapper[4799]: I1010 16:44:59.878659 4799 scope.go:117] "RemoveContainer" containerID="35da9f680173aabf03e981aed4db3a25065ff42da21441c6168a643c43dbe2bb" Oct 10 16:44:59 crc kubenswrapper[4799]: E1010 16:44:59.879009 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"35da9f680173aabf03e981aed4db3a25065ff42da21441c6168a643c43dbe2bb\": container with ID starting with 35da9f680173aabf03e981aed4db3a25065ff42da21441c6168a643c43dbe2bb not found: ID does not exist" containerID="35da9f680173aabf03e981aed4db3a25065ff42da21441c6168a643c43dbe2bb" Oct 10 16:44:59 crc kubenswrapper[4799]: I1010 16:44:59.879036 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"35da9f680173aabf03e981aed4db3a25065ff42da21441c6168a643c43dbe2bb"} err="failed to get container status \"35da9f680173aabf03e981aed4db3a25065ff42da21441c6168a643c43dbe2bb\": rpc error: code = NotFound desc = could not find container \"35da9f680173aabf03e981aed4db3a25065ff42da21441c6168a643c43dbe2bb\": container with ID starting with 35da9f680173aabf03e981aed4db3a25065ff42da21441c6168a643c43dbe2bb not found: ID does not exist" Oct 10 16:45:00 crc kubenswrapper[4799]: I1010 16:45:00.129472 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29335245-pwfqs"] Oct 10 16:45:00 crc kubenswrapper[4799]: E1010 16:45:00.129705 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f532bcbd-a09f-4d14-b41d-0e55252454c2" containerName="extract" Oct 10 16:45:00 crc kubenswrapper[4799]: I1010 16:45:00.129721 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="f532bcbd-a09f-4d14-b41d-0e55252454c2" containerName="extract" Oct 10 16:45:00 crc kubenswrapper[4799]: E1010 16:45:00.129733 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2ef7eb90-d1ca-4423-9fba-f61bae153e3b" containerName="extract-content" Oct 10 16:45:00 crc kubenswrapper[4799]: I1010 16:45:00.129741 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="2ef7eb90-d1ca-4423-9fba-f61bae153e3b" containerName="extract-content" Oct 10 16:45:00 crc kubenswrapper[4799]: E1010 16:45:00.129771 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2ef7eb90-d1ca-4423-9fba-f61bae153e3b" containerName="registry-server" Oct 10 16:45:00 crc kubenswrapper[4799]: I1010 16:45:00.129779 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="2ef7eb90-d1ca-4423-9fba-f61bae153e3b" containerName="registry-server" Oct 10 16:45:00 crc kubenswrapper[4799]: E1010 16:45:00.129793 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2866fbf1-3a49-4e4c-867b-86a40ae85ebe" containerName="console" Oct 10 16:45:00 crc kubenswrapper[4799]: I1010 16:45:00.129800 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="2866fbf1-3a49-4e4c-867b-86a40ae85ebe" containerName="console" Oct 10 16:45:00 crc kubenswrapper[4799]: E1010 16:45:00.129811 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f532bcbd-a09f-4d14-b41d-0e55252454c2" containerName="util" Oct 10 16:45:00 crc kubenswrapper[4799]: I1010 16:45:00.129818 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="f532bcbd-a09f-4d14-b41d-0e55252454c2" containerName="util" Oct 10 16:45:00 crc kubenswrapper[4799]: E1010 16:45:00.129833 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f532bcbd-a09f-4d14-b41d-0e55252454c2" containerName="pull" Oct 10 16:45:00 crc kubenswrapper[4799]: I1010 16:45:00.129840 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="f532bcbd-a09f-4d14-b41d-0e55252454c2" containerName="pull" Oct 10 16:45:00 crc kubenswrapper[4799]: E1010 16:45:00.129870 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2ef7eb90-d1ca-4423-9fba-f61bae153e3b" containerName="extract-utilities" Oct 10 16:45:00 crc kubenswrapper[4799]: I1010 16:45:00.129878 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="2ef7eb90-d1ca-4423-9fba-f61bae153e3b" containerName="extract-utilities" Oct 10 16:45:00 crc kubenswrapper[4799]: I1010 16:45:00.129999 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="2866fbf1-3a49-4e4c-867b-86a40ae85ebe" containerName="console" Oct 10 16:45:00 crc kubenswrapper[4799]: I1010 16:45:00.130016 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="2ef7eb90-d1ca-4423-9fba-f61bae153e3b" containerName="registry-server" Oct 10 16:45:00 crc kubenswrapper[4799]: I1010 16:45:00.130025 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="f532bcbd-a09f-4d14-b41d-0e55252454c2" containerName="extract" Oct 10 16:45:00 crc kubenswrapper[4799]: I1010 16:45:00.130475 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29335245-pwfqs" Oct 10 16:45:00 crc kubenswrapper[4799]: I1010 16:45:00.134104 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Oct 10 16:45:00 crc kubenswrapper[4799]: I1010 16:45:00.134994 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Oct 10 16:45:00 crc kubenswrapper[4799]: I1010 16:45:00.140269 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29335245-pwfqs"] Oct 10 16:45:00 crc kubenswrapper[4799]: I1010 16:45:00.229815 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bfmrx\" (UniqueName: \"kubernetes.io/projected/923eeaa7-33ad-4958-8c29-83d9508c527c-kube-api-access-bfmrx\") pod \"collect-profiles-29335245-pwfqs\" (UID: \"923eeaa7-33ad-4958-8c29-83d9508c527c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29335245-pwfqs" Oct 10 16:45:00 crc kubenswrapper[4799]: I1010 16:45:00.230079 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/923eeaa7-33ad-4958-8c29-83d9508c527c-config-volume\") pod \"collect-profiles-29335245-pwfqs\" (UID: \"923eeaa7-33ad-4958-8c29-83d9508c527c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29335245-pwfqs" Oct 10 16:45:00 crc kubenswrapper[4799]: I1010 16:45:00.230200 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/923eeaa7-33ad-4958-8c29-83d9508c527c-secret-volume\") pod \"collect-profiles-29335245-pwfqs\" (UID: \"923eeaa7-33ad-4958-8c29-83d9508c527c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29335245-pwfqs" Oct 10 16:45:00 crc kubenswrapper[4799]: I1010 16:45:00.331666 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/923eeaa7-33ad-4958-8c29-83d9508c527c-config-volume\") pod \"collect-profiles-29335245-pwfqs\" (UID: \"923eeaa7-33ad-4958-8c29-83d9508c527c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29335245-pwfqs" Oct 10 16:45:00 crc kubenswrapper[4799]: I1010 16:45:00.331844 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/923eeaa7-33ad-4958-8c29-83d9508c527c-secret-volume\") pod \"collect-profiles-29335245-pwfqs\" (UID: \"923eeaa7-33ad-4958-8c29-83d9508c527c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29335245-pwfqs" Oct 10 16:45:00 crc kubenswrapper[4799]: I1010 16:45:00.331927 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bfmrx\" (UniqueName: \"kubernetes.io/projected/923eeaa7-33ad-4958-8c29-83d9508c527c-kube-api-access-bfmrx\") pod \"collect-profiles-29335245-pwfqs\" (UID: \"923eeaa7-33ad-4958-8c29-83d9508c527c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29335245-pwfqs" Oct 10 16:45:00 crc kubenswrapper[4799]: I1010 16:45:00.332989 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/923eeaa7-33ad-4958-8c29-83d9508c527c-config-volume\") pod \"collect-profiles-29335245-pwfqs\" (UID: \"923eeaa7-33ad-4958-8c29-83d9508c527c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29335245-pwfqs" Oct 10 16:45:00 crc kubenswrapper[4799]: I1010 16:45:00.338303 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/923eeaa7-33ad-4958-8c29-83d9508c527c-secret-volume\") pod \"collect-profiles-29335245-pwfqs\" (UID: \"923eeaa7-33ad-4958-8c29-83d9508c527c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29335245-pwfqs" Oct 10 16:45:00 crc kubenswrapper[4799]: I1010 16:45:00.368381 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bfmrx\" (UniqueName: \"kubernetes.io/projected/923eeaa7-33ad-4958-8c29-83d9508c527c-kube-api-access-bfmrx\") pod \"collect-profiles-29335245-pwfqs\" (UID: \"923eeaa7-33ad-4958-8c29-83d9508c527c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29335245-pwfqs" Oct 10 16:45:00 crc kubenswrapper[4799]: I1010 16:45:00.447352 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29335245-pwfqs" Oct 10 16:45:00 crc kubenswrapper[4799]: I1010 16:45:00.929188 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29335245-pwfqs"] Oct 10 16:45:00 crc kubenswrapper[4799]: W1010 16:45:00.937540 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod923eeaa7_33ad_4958_8c29_83d9508c527c.slice/crio-e4845e85e67f871960def390e5699b7299b4ebef6dc54f593d34992e33f87e5f WatchSource:0}: Error finding container e4845e85e67f871960def390e5699b7299b4ebef6dc54f593d34992e33f87e5f: Status 404 returned error can't find the container with id e4845e85e67f871960def390e5699b7299b4ebef6dc54f593d34992e33f87e5f Oct 10 16:45:01 crc kubenswrapper[4799]: I1010 16:45:01.419000 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2ef7eb90-d1ca-4423-9fba-f61bae153e3b" path="/var/lib/kubelet/pods/2ef7eb90-d1ca-4423-9fba-f61bae153e3b/volumes" Oct 10 16:45:01 crc kubenswrapper[4799]: I1010 16:45:01.818690 4799 generic.go:334] "Generic (PLEG): container finished" podID="923eeaa7-33ad-4958-8c29-83d9508c527c" containerID="dc80910e86773f54ae92cfc151c285e5c20d72d27b9a21bf89d0b4d93f66e785" exitCode=0 Oct 10 16:45:01 crc kubenswrapper[4799]: I1010 16:45:01.818819 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29335245-pwfqs" event={"ID":"923eeaa7-33ad-4958-8c29-83d9508c527c","Type":"ContainerDied","Data":"dc80910e86773f54ae92cfc151c285e5c20d72d27b9a21bf89d0b4d93f66e785"} Oct 10 16:45:01 crc kubenswrapper[4799]: I1010 16:45:01.819024 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29335245-pwfqs" event={"ID":"923eeaa7-33ad-4958-8c29-83d9508c527c","Type":"ContainerStarted","Data":"e4845e85e67f871960def390e5699b7299b4ebef6dc54f593d34992e33f87e5f"} Oct 10 16:45:02 crc kubenswrapper[4799]: I1010 16:45:02.020581 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/metallb-operator-controller-manager-75cc6fd4f5-h4qjd"] Oct 10 16:45:02 crc kubenswrapper[4799]: I1010 16:45:02.021241 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-controller-manager-75cc6fd4f5-h4qjd" Oct 10 16:45:02 crc kubenswrapper[4799]: I1010 16:45:02.024598 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"openshift-service-ca.crt" Oct 10 16:45:02 crc kubenswrapper[4799]: I1010 16:45:02.025051 4799 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-webhook-server-cert" Oct 10 16:45:02 crc kubenswrapper[4799]: I1010 16:45:02.028266 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"kube-root-ca.crt" Oct 10 16:45:02 crc kubenswrapper[4799]: I1010 16:45:02.028266 4799 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"manager-account-dockercfg-5j9r5" Oct 10 16:45:02 crc kubenswrapper[4799]: I1010 16:45:02.033290 4799 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-controller-manager-service-cert" Oct 10 16:45:02 crc kubenswrapper[4799]: I1010 16:45:02.034255 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-controller-manager-75cc6fd4f5-h4qjd"] Oct 10 16:45:02 crc kubenswrapper[4799]: I1010 16:45:02.053067 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/dfd38d6c-f6ac-44c3-9602-c045dcb55735-apiservice-cert\") pod \"metallb-operator-controller-manager-75cc6fd4f5-h4qjd\" (UID: \"dfd38d6c-f6ac-44c3-9602-c045dcb55735\") " pod="metallb-system/metallb-operator-controller-manager-75cc6fd4f5-h4qjd" Oct 10 16:45:02 crc kubenswrapper[4799]: I1010 16:45:02.053147 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f4kvw\" (UniqueName: \"kubernetes.io/projected/dfd38d6c-f6ac-44c3-9602-c045dcb55735-kube-api-access-f4kvw\") pod \"metallb-operator-controller-manager-75cc6fd4f5-h4qjd\" (UID: \"dfd38d6c-f6ac-44c3-9602-c045dcb55735\") " pod="metallb-system/metallb-operator-controller-manager-75cc6fd4f5-h4qjd" Oct 10 16:45:02 crc kubenswrapper[4799]: I1010 16:45:02.053178 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/dfd38d6c-f6ac-44c3-9602-c045dcb55735-webhook-cert\") pod \"metallb-operator-controller-manager-75cc6fd4f5-h4qjd\" (UID: \"dfd38d6c-f6ac-44c3-9602-c045dcb55735\") " pod="metallb-system/metallb-operator-controller-manager-75cc6fd4f5-h4qjd" Oct 10 16:45:02 crc kubenswrapper[4799]: I1010 16:45:02.154266 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/dfd38d6c-f6ac-44c3-9602-c045dcb55735-apiservice-cert\") pod \"metallb-operator-controller-manager-75cc6fd4f5-h4qjd\" (UID: \"dfd38d6c-f6ac-44c3-9602-c045dcb55735\") " pod="metallb-system/metallb-operator-controller-manager-75cc6fd4f5-h4qjd" Oct 10 16:45:02 crc kubenswrapper[4799]: I1010 16:45:02.154334 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f4kvw\" (UniqueName: \"kubernetes.io/projected/dfd38d6c-f6ac-44c3-9602-c045dcb55735-kube-api-access-f4kvw\") pod \"metallb-operator-controller-manager-75cc6fd4f5-h4qjd\" (UID: \"dfd38d6c-f6ac-44c3-9602-c045dcb55735\") " pod="metallb-system/metallb-operator-controller-manager-75cc6fd4f5-h4qjd" Oct 10 16:45:02 crc kubenswrapper[4799]: I1010 16:45:02.154364 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/dfd38d6c-f6ac-44c3-9602-c045dcb55735-webhook-cert\") pod \"metallb-operator-controller-manager-75cc6fd4f5-h4qjd\" (UID: \"dfd38d6c-f6ac-44c3-9602-c045dcb55735\") " pod="metallb-system/metallb-operator-controller-manager-75cc6fd4f5-h4qjd" Oct 10 16:45:02 crc kubenswrapper[4799]: I1010 16:45:02.159440 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/dfd38d6c-f6ac-44c3-9602-c045dcb55735-webhook-cert\") pod \"metallb-operator-controller-manager-75cc6fd4f5-h4qjd\" (UID: \"dfd38d6c-f6ac-44c3-9602-c045dcb55735\") " pod="metallb-system/metallb-operator-controller-manager-75cc6fd4f5-h4qjd" Oct 10 16:45:02 crc kubenswrapper[4799]: I1010 16:45:02.160321 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/dfd38d6c-f6ac-44c3-9602-c045dcb55735-apiservice-cert\") pod \"metallb-operator-controller-manager-75cc6fd4f5-h4qjd\" (UID: \"dfd38d6c-f6ac-44c3-9602-c045dcb55735\") " pod="metallb-system/metallb-operator-controller-manager-75cc6fd4f5-h4qjd" Oct 10 16:45:02 crc kubenswrapper[4799]: I1010 16:45:02.174942 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f4kvw\" (UniqueName: \"kubernetes.io/projected/dfd38d6c-f6ac-44c3-9602-c045dcb55735-kube-api-access-f4kvw\") pod \"metallb-operator-controller-manager-75cc6fd4f5-h4qjd\" (UID: \"dfd38d6c-f6ac-44c3-9602-c045dcb55735\") " pod="metallb-system/metallb-operator-controller-manager-75cc6fd4f5-h4qjd" Oct 10 16:45:02 crc kubenswrapper[4799]: I1010 16:45:02.337024 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-controller-manager-75cc6fd4f5-h4qjd" Oct 10 16:45:02 crc kubenswrapper[4799]: I1010 16:45:02.357819 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/metallb-operator-webhook-server-564bc5bbdc-rzbnf"] Oct 10 16:45:02 crc kubenswrapper[4799]: I1010 16:45:02.358910 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-webhook-server-564bc5bbdc-rzbnf" Oct 10 16:45:02 crc kubenswrapper[4799]: I1010 16:45:02.361372 4799 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"controller-dockercfg-tlj4z" Oct 10 16:45:02 crc kubenswrapper[4799]: I1010 16:45:02.361949 4799 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-webhook-cert" Oct 10 16:45:02 crc kubenswrapper[4799]: I1010 16:45:02.361956 4799 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-webhook-server-service-cert" Oct 10 16:45:02 crc kubenswrapper[4799]: I1010 16:45:02.395930 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-webhook-server-564bc5bbdc-rzbnf"] Oct 10 16:45:02 crc kubenswrapper[4799]: I1010 16:45:02.458206 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/7914fde3-52d8-49eb-a258-505730801250-apiservice-cert\") pod \"metallb-operator-webhook-server-564bc5bbdc-rzbnf\" (UID: \"7914fde3-52d8-49eb-a258-505730801250\") " pod="metallb-system/metallb-operator-webhook-server-564bc5bbdc-rzbnf" Oct 10 16:45:02 crc kubenswrapper[4799]: I1010 16:45:02.458409 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/7914fde3-52d8-49eb-a258-505730801250-webhook-cert\") pod \"metallb-operator-webhook-server-564bc5bbdc-rzbnf\" (UID: \"7914fde3-52d8-49eb-a258-505730801250\") " pod="metallb-system/metallb-operator-webhook-server-564bc5bbdc-rzbnf" Oct 10 16:45:02 crc kubenswrapper[4799]: I1010 16:45:02.458475 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-77xjj\" (UniqueName: \"kubernetes.io/projected/7914fde3-52d8-49eb-a258-505730801250-kube-api-access-77xjj\") pod \"metallb-operator-webhook-server-564bc5bbdc-rzbnf\" (UID: \"7914fde3-52d8-49eb-a258-505730801250\") " pod="metallb-system/metallb-operator-webhook-server-564bc5bbdc-rzbnf" Oct 10 16:45:02 crc kubenswrapper[4799]: I1010 16:45:02.559516 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/7914fde3-52d8-49eb-a258-505730801250-apiservice-cert\") pod \"metallb-operator-webhook-server-564bc5bbdc-rzbnf\" (UID: \"7914fde3-52d8-49eb-a258-505730801250\") " pod="metallb-system/metallb-operator-webhook-server-564bc5bbdc-rzbnf" Oct 10 16:45:02 crc kubenswrapper[4799]: I1010 16:45:02.559566 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/7914fde3-52d8-49eb-a258-505730801250-webhook-cert\") pod \"metallb-operator-webhook-server-564bc5bbdc-rzbnf\" (UID: \"7914fde3-52d8-49eb-a258-505730801250\") " pod="metallb-system/metallb-operator-webhook-server-564bc5bbdc-rzbnf" Oct 10 16:45:02 crc kubenswrapper[4799]: I1010 16:45:02.560637 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-77xjj\" (UniqueName: \"kubernetes.io/projected/7914fde3-52d8-49eb-a258-505730801250-kube-api-access-77xjj\") pod \"metallb-operator-webhook-server-564bc5bbdc-rzbnf\" (UID: \"7914fde3-52d8-49eb-a258-505730801250\") " pod="metallb-system/metallb-operator-webhook-server-564bc5bbdc-rzbnf" Oct 10 16:45:02 crc kubenswrapper[4799]: I1010 16:45:02.572927 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/7914fde3-52d8-49eb-a258-505730801250-webhook-cert\") pod \"metallb-operator-webhook-server-564bc5bbdc-rzbnf\" (UID: \"7914fde3-52d8-49eb-a258-505730801250\") " pod="metallb-system/metallb-operator-webhook-server-564bc5bbdc-rzbnf" Oct 10 16:45:02 crc kubenswrapper[4799]: I1010 16:45:02.573995 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/7914fde3-52d8-49eb-a258-505730801250-apiservice-cert\") pod \"metallb-operator-webhook-server-564bc5bbdc-rzbnf\" (UID: \"7914fde3-52d8-49eb-a258-505730801250\") " pod="metallb-system/metallb-operator-webhook-server-564bc5bbdc-rzbnf" Oct 10 16:45:02 crc kubenswrapper[4799]: I1010 16:45:02.574649 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-77xjj\" (UniqueName: \"kubernetes.io/projected/7914fde3-52d8-49eb-a258-505730801250-kube-api-access-77xjj\") pod \"metallb-operator-webhook-server-564bc5bbdc-rzbnf\" (UID: \"7914fde3-52d8-49eb-a258-505730801250\") " pod="metallb-system/metallb-operator-webhook-server-564bc5bbdc-rzbnf" Oct 10 16:45:02 crc kubenswrapper[4799]: I1010 16:45:02.742582 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-webhook-server-564bc5bbdc-rzbnf" Oct 10 16:45:02 crc kubenswrapper[4799]: I1010 16:45:02.792842 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-controller-manager-75cc6fd4f5-h4qjd"] Oct 10 16:45:02 crc kubenswrapper[4799]: W1010 16:45:02.798078 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poddfd38d6c_f6ac_44c3_9602_c045dcb55735.slice/crio-879daab61192b6818e2740f34ea05cd8337425dd110bba74f2271465b97429c0 WatchSource:0}: Error finding container 879daab61192b6818e2740f34ea05cd8337425dd110bba74f2271465b97429c0: Status 404 returned error can't find the container with id 879daab61192b6818e2740f34ea05cd8337425dd110bba74f2271465b97429c0 Oct 10 16:45:02 crc kubenswrapper[4799]: I1010 16:45:02.843998 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-75cc6fd4f5-h4qjd" event={"ID":"dfd38d6c-f6ac-44c3-9602-c045dcb55735","Type":"ContainerStarted","Data":"879daab61192b6818e2740f34ea05cd8337425dd110bba74f2271465b97429c0"} Oct 10 16:45:03 crc kubenswrapper[4799]: I1010 16:45:03.131480 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29335245-pwfqs" Oct 10 16:45:03 crc kubenswrapper[4799]: I1010 16:45:03.166963 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bfmrx\" (UniqueName: \"kubernetes.io/projected/923eeaa7-33ad-4958-8c29-83d9508c527c-kube-api-access-bfmrx\") pod \"923eeaa7-33ad-4958-8c29-83d9508c527c\" (UID: \"923eeaa7-33ad-4958-8c29-83d9508c527c\") " Oct 10 16:45:03 crc kubenswrapper[4799]: I1010 16:45:03.167024 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/923eeaa7-33ad-4958-8c29-83d9508c527c-config-volume\") pod \"923eeaa7-33ad-4958-8c29-83d9508c527c\" (UID: \"923eeaa7-33ad-4958-8c29-83d9508c527c\") " Oct 10 16:45:03 crc kubenswrapper[4799]: I1010 16:45:03.167135 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/923eeaa7-33ad-4958-8c29-83d9508c527c-secret-volume\") pod \"923eeaa7-33ad-4958-8c29-83d9508c527c\" (UID: \"923eeaa7-33ad-4958-8c29-83d9508c527c\") " Oct 10 16:45:03 crc kubenswrapper[4799]: I1010 16:45:03.176654 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/923eeaa7-33ad-4958-8c29-83d9508c527c-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "923eeaa7-33ad-4958-8c29-83d9508c527c" (UID: "923eeaa7-33ad-4958-8c29-83d9508c527c"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:45:03 crc kubenswrapper[4799]: I1010 16:45:03.177681 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/923eeaa7-33ad-4958-8c29-83d9508c527c-config-volume" (OuterVolumeSpecName: "config-volume") pod "923eeaa7-33ad-4958-8c29-83d9508c527c" (UID: "923eeaa7-33ad-4958-8c29-83d9508c527c"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 16:45:03 crc kubenswrapper[4799]: I1010 16:45:03.181232 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/923eeaa7-33ad-4958-8c29-83d9508c527c-kube-api-access-bfmrx" (OuterVolumeSpecName: "kube-api-access-bfmrx") pod "923eeaa7-33ad-4958-8c29-83d9508c527c" (UID: "923eeaa7-33ad-4958-8c29-83d9508c527c"). InnerVolumeSpecName "kube-api-access-bfmrx". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 16:45:03 crc kubenswrapper[4799]: I1010 16:45:03.208601 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-webhook-server-564bc5bbdc-rzbnf"] Oct 10 16:45:03 crc kubenswrapper[4799]: W1010 16:45:03.221081 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7914fde3_52d8_49eb_a258_505730801250.slice/crio-13f0e77d41bb2c45e5b913283f2555875399dbb2b9c28d30472876cc4791a196 WatchSource:0}: Error finding container 13f0e77d41bb2c45e5b913283f2555875399dbb2b9c28d30472876cc4791a196: Status 404 returned error can't find the container with id 13f0e77d41bb2c45e5b913283f2555875399dbb2b9c28d30472876cc4791a196 Oct 10 16:45:03 crc kubenswrapper[4799]: I1010 16:45:03.268509 4799 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/923eeaa7-33ad-4958-8c29-83d9508c527c-secret-volume\") on node \"crc\" DevicePath \"\"" Oct 10 16:45:03 crc kubenswrapper[4799]: I1010 16:45:03.268540 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bfmrx\" (UniqueName: \"kubernetes.io/projected/923eeaa7-33ad-4958-8c29-83d9508c527c-kube-api-access-bfmrx\") on node \"crc\" DevicePath \"\"" Oct 10 16:45:03 crc kubenswrapper[4799]: I1010 16:45:03.268550 4799 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/923eeaa7-33ad-4958-8c29-83d9508c527c-config-volume\") on node \"crc\" DevicePath \"\"" Oct 10 16:45:03 crc kubenswrapper[4799]: I1010 16:45:03.849085 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-webhook-server-564bc5bbdc-rzbnf" event={"ID":"7914fde3-52d8-49eb-a258-505730801250","Type":"ContainerStarted","Data":"13f0e77d41bb2c45e5b913283f2555875399dbb2b9c28d30472876cc4791a196"} Oct 10 16:45:03 crc kubenswrapper[4799]: I1010 16:45:03.850411 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29335245-pwfqs" event={"ID":"923eeaa7-33ad-4958-8c29-83d9508c527c","Type":"ContainerDied","Data":"e4845e85e67f871960def390e5699b7299b4ebef6dc54f593d34992e33f87e5f"} Oct 10 16:45:03 crc kubenswrapper[4799]: I1010 16:45:03.850435 4799 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e4845e85e67f871960def390e5699b7299b4ebef6dc54f593d34992e33f87e5f" Oct 10 16:45:03 crc kubenswrapper[4799]: I1010 16:45:03.850471 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29335245-pwfqs" Oct 10 16:45:06 crc kubenswrapper[4799]: I1010 16:45:06.476320 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-l7djd"] Oct 10 16:45:06 crc kubenswrapper[4799]: E1010 16:45:06.477048 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="923eeaa7-33ad-4958-8c29-83d9508c527c" containerName="collect-profiles" Oct 10 16:45:06 crc kubenswrapper[4799]: I1010 16:45:06.477065 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="923eeaa7-33ad-4958-8c29-83d9508c527c" containerName="collect-profiles" Oct 10 16:45:06 crc kubenswrapper[4799]: I1010 16:45:06.477203 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="923eeaa7-33ad-4958-8c29-83d9508c527c" containerName="collect-profiles" Oct 10 16:45:06 crc kubenswrapper[4799]: I1010 16:45:06.480293 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-l7djd" Oct 10 16:45:06 crc kubenswrapper[4799]: I1010 16:45:06.508732 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-l7djd"] Oct 10 16:45:06 crc kubenswrapper[4799]: I1010 16:45:06.515933 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/003ac12b-b317-4b95-8b0e-7377afa1fd7b-catalog-content\") pod \"certified-operators-l7djd\" (UID: \"003ac12b-b317-4b95-8b0e-7377afa1fd7b\") " pod="openshift-marketplace/certified-operators-l7djd" Oct 10 16:45:06 crc kubenswrapper[4799]: I1010 16:45:06.515977 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/003ac12b-b317-4b95-8b0e-7377afa1fd7b-utilities\") pod \"certified-operators-l7djd\" (UID: \"003ac12b-b317-4b95-8b0e-7377afa1fd7b\") " pod="openshift-marketplace/certified-operators-l7djd" Oct 10 16:45:06 crc kubenswrapper[4799]: I1010 16:45:06.516018 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4bwgq\" (UniqueName: \"kubernetes.io/projected/003ac12b-b317-4b95-8b0e-7377afa1fd7b-kube-api-access-4bwgq\") pod \"certified-operators-l7djd\" (UID: \"003ac12b-b317-4b95-8b0e-7377afa1fd7b\") " pod="openshift-marketplace/certified-operators-l7djd" Oct 10 16:45:06 crc kubenswrapper[4799]: I1010 16:45:06.616901 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/003ac12b-b317-4b95-8b0e-7377afa1fd7b-catalog-content\") pod \"certified-operators-l7djd\" (UID: \"003ac12b-b317-4b95-8b0e-7377afa1fd7b\") " pod="openshift-marketplace/certified-operators-l7djd" Oct 10 16:45:06 crc kubenswrapper[4799]: I1010 16:45:06.616968 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/003ac12b-b317-4b95-8b0e-7377afa1fd7b-utilities\") pod \"certified-operators-l7djd\" (UID: \"003ac12b-b317-4b95-8b0e-7377afa1fd7b\") " pod="openshift-marketplace/certified-operators-l7djd" Oct 10 16:45:06 crc kubenswrapper[4799]: I1010 16:45:06.617030 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4bwgq\" (UniqueName: \"kubernetes.io/projected/003ac12b-b317-4b95-8b0e-7377afa1fd7b-kube-api-access-4bwgq\") pod \"certified-operators-l7djd\" (UID: \"003ac12b-b317-4b95-8b0e-7377afa1fd7b\") " pod="openshift-marketplace/certified-operators-l7djd" Oct 10 16:45:06 crc kubenswrapper[4799]: I1010 16:45:06.617649 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/003ac12b-b317-4b95-8b0e-7377afa1fd7b-catalog-content\") pod \"certified-operators-l7djd\" (UID: \"003ac12b-b317-4b95-8b0e-7377afa1fd7b\") " pod="openshift-marketplace/certified-operators-l7djd" Oct 10 16:45:06 crc kubenswrapper[4799]: I1010 16:45:06.617895 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/003ac12b-b317-4b95-8b0e-7377afa1fd7b-utilities\") pod \"certified-operators-l7djd\" (UID: \"003ac12b-b317-4b95-8b0e-7377afa1fd7b\") " pod="openshift-marketplace/certified-operators-l7djd" Oct 10 16:45:06 crc kubenswrapper[4799]: I1010 16:45:06.634920 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4bwgq\" (UniqueName: \"kubernetes.io/projected/003ac12b-b317-4b95-8b0e-7377afa1fd7b-kube-api-access-4bwgq\") pod \"certified-operators-l7djd\" (UID: \"003ac12b-b317-4b95-8b0e-7377afa1fd7b\") " pod="openshift-marketplace/certified-operators-l7djd" Oct 10 16:45:06 crc kubenswrapper[4799]: I1010 16:45:06.797480 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-l7djd" Oct 10 16:45:06 crc kubenswrapper[4799]: I1010 16:45:06.871362 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-75cc6fd4f5-h4qjd" event={"ID":"dfd38d6c-f6ac-44c3-9602-c045dcb55735","Type":"ContainerStarted","Data":"c079e0fbd6002b74e34ef2e8f2e6a02ddf799c8b0b689b6bf85a84023a5a1367"} Oct 10 16:45:06 crc kubenswrapper[4799]: I1010 16:45:06.871529 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/metallb-operator-controller-manager-75cc6fd4f5-h4qjd" Oct 10 16:45:06 crc kubenswrapper[4799]: I1010 16:45:06.901469 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/metallb-operator-controller-manager-75cc6fd4f5-h4qjd" podStartSLOduration=1.3465828420000001 podStartE2EDuration="4.901449918s" podCreationTimestamp="2025-10-10 16:45:02 +0000 UTC" firstStartedPulling="2025-10-10 16:45:02.80183183 +0000 UTC m=+796.310155945" lastFinishedPulling="2025-10-10 16:45:06.356698906 +0000 UTC m=+799.865023021" observedRunningTime="2025-10-10 16:45:06.897995893 +0000 UTC m=+800.406320028" watchObservedRunningTime="2025-10-10 16:45:06.901449918 +0000 UTC m=+800.409774043" Oct 10 16:45:07 crc kubenswrapper[4799]: I1010 16:45:07.056138 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-l7djd"] Oct 10 16:45:07 crc kubenswrapper[4799]: I1010 16:45:07.877929 4799 generic.go:334] "Generic (PLEG): container finished" podID="003ac12b-b317-4b95-8b0e-7377afa1fd7b" containerID="eca1d46b76d2b02c1fbbe7c62b4a79bd0d5a573855c20fc74048ec018f09f189" exitCode=0 Oct 10 16:45:07 crc kubenswrapper[4799]: I1010 16:45:07.877979 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-l7djd" event={"ID":"003ac12b-b317-4b95-8b0e-7377afa1fd7b","Type":"ContainerDied","Data":"eca1d46b76d2b02c1fbbe7c62b4a79bd0d5a573855c20fc74048ec018f09f189"} Oct 10 16:45:07 crc kubenswrapper[4799]: I1010 16:45:07.878290 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-l7djd" event={"ID":"003ac12b-b317-4b95-8b0e-7377afa1fd7b","Type":"ContainerStarted","Data":"2b0d103e59cb6d963e91c4b50b683412b63acdfb904ada08801519e4f3473258"} Oct 10 16:45:09 crc kubenswrapper[4799]: I1010 16:45:09.899452 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-webhook-server-564bc5bbdc-rzbnf" event={"ID":"7914fde3-52d8-49eb-a258-505730801250","Type":"ContainerStarted","Data":"e3d4fe7622d1789fc42af36f767c4fc46655dfbaf81eb7ab9fae6eb4118169c7"} Oct 10 16:45:09 crc kubenswrapper[4799]: I1010 16:45:09.900141 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/metallb-operator-webhook-server-564bc5bbdc-rzbnf" Oct 10 16:45:09 crc kubenswrapper[4799]: I1010 16:45:09.909807 4799 generic.go:334] "Generic (PLEG): container finished" podID="003ac12b-b317-4b95-8b0e-7377afa1fd7b" containerID="1e2c824e95ea09aa46368926c713fe642279d69027e1b281f27614cde6be280d" exitCode=0 Oct 10 16:45:09 crc kubenswrapper[4799]: I1010 16:45:09.909863 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-l7djd" event={"ID":"003ac12b-b317-4b95-8b0e-7377afa1fd7b","Type":"ContainerDied","Data":"1e2c824e95ea09aa46368926c713fe642279d69027e1b281f27614cde6be280d"} Oct 10 16:45:09 crc kubenswrapper[4799]: I1010 16:45:09.938963 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/metallb-operator-webhook-server-564bc5bbdc-rzbnf" podStartSLOduration=1.999475868 podStartE2EDuration="7.938948007s" podCreationTimestamp="2025-10-10 16:45:02 +0000 UTC" firstStartedPulling="2025-10-10 16:45:03.224234618 +0000 UTC m=+796.732558733" lastFinishedPulling="2025-10-10 16:45:09.163706757 +0000 UTC m=+802.672030872" observedRunningTime="2025-10-10 16:45:09.935748758 +0000 UTC m=+803.444072883" watchObservedRunningTime="2025-10-10 16:45:09.938948007 +0000 UTC m=+803.447272122" Oct 10 16:45:10 crc kubenswrapper[4799]: I1010 16:45:10.928583 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-l7djd" event={"ID":"003ac12b-b317-4b95-8b0e-7377afa1fd7b","Type":"ContainerStarted","Data":"2ae471d0a61b57eb7aa1de2a81892763b7102313c6126b1e82d37f20caeda6a1"} Oct 10 16:45:10 crc kubenswrapper[4799]: I1010 16:45:10.952737 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-l7djd" podStartSLOduration=2.301695237 podStartE2EDuration="4.952715394s" podCreationTimestamp="2025-10-10 16:45:06 +0000 UTC" firstStartedPulling="2025-10-10 16:45:07.879403993 +0000 UTC m=+801.387728108" lastFinishedPulling="2025-10-10 16:45:10.53042415 +0000 UTC m=+804.038748265" observedRunningTime="2025-10-10 16:45:10.947462885 +0000 UTC m=+804.455787030" watchObservedRunningTime="2025-10-10 16:45:10.952715394 +0000 UTC m=+804.461039519" Oct 10 16:45:16 crc kubenswrapper[4799]: I1010 16:45:16.798425 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-l7djd" Oct 10 16:45:16 crc kubenswrapper[4799]: I1010 16:45:16.799081 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-l7djd" Oct 10 16:45:16 crc kubenswrapper[4799]: I1010 16:45:16.892707 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-l7djd" Oct 10 16:45:16 crc kubenswrapper[4799]: I1010 16:45:16.996507 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-l7djd" Oct 10 16:45:19 crc kubenswrapper[4799]: I1010 16:45:19.060082 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-l7djd"] Oct 10 16:45:19 crc kubenswrapper[4799]: I1010 16:45:19.060439 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-l7djd" podUID="003ac12b-b317-4b95-8b0e-7377afa1fd7b" containerName="registry-server" containerID="cri-o://2ae471d0a61b57eb7aa1de2a81892763b7102313c6126b1e82d37f20caeda6a1" gracePeriod=2 Oct 10 16:45:19 crc kubenswrapper[4799]: I1010 16:45:19.517142 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-l7djd" Oct 10 16:45:19 crc kubenswrapper[4799]: I1010 16:45:19.664459 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/003ac12b-b317-4b95-8b0e-7377afa1fd7b-catalog-content\") pod \"003ac12b-b317-4b95-8b0e-7377afa1fd7b\" (UID: \"003ac12b-b317-4b95-8b0e-7377afa1fd7b\") " Oct 10 16:45:19 crc kubenswrapper[4799]: I1010 16:45:19.664514 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4bwgq\" (UniqueName: \"kubernetes.io/projected/003ac12b-b317-4b95-8b0e-7377afa1fd7b-kube-api-access-4bwgq\") pod \"003ac12b-b317-4b95-8b0e-7377afa1fd7b\" (UID: \"003ac12b-b317-4b95-8b0e-7377afa1fd7b\") " Oct 10 16:45:19 crc kubenswrapper[4799]: I1010 16:45:19.664589 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/003ac12b-b317-4b95-8b0e-7377afa1fd7b-utilities\") pod \"003ac12b-b317-4b95-8b0e-7377afa1fd7b\" (UID: \"003ac12b-b317-4b95-8b0e-7377afa1fd7b\") " Oct 10 16:45:19 crc kubenswrapper[4799]: I1010 16:45:19.666046 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/003ac12b-b317-4b95-8b0e-7377afa1fd7b-utilities" (OuterVolumeSpecName: "utilities") pod "003ac12b-b317-4b95-8b0e-7377afa1fd7b" (UID: "003ac12b-b317-4b95-8b0e-7377afa1fd7b"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 10 16:45:19 crc kubenswrapper[4799]: I1010 16:45:19.679741 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/003ac12b-b317-4b95-8b0e-7377afa1fd7b-kube-api-access-4bwgq" (OuterVolumeSpecName: "kube-api-access-4bwgq") pod "003ac12b-b317-4b95-8b0e-7377afa1fd7b" (UID: "003ac12b-b317-4b95-8b0e-7377afa1fd7b"). InnerVolumeSpecName "kube-api-access-4bwgq". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 16:45:19 crc kubenswrapper[4799]: I1010 16:45:19.711521 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/003ac12b-b317-4b95-8b0e-7377afa1fd7b-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "003ac12b-b317-4b95-8b0e-7377afa1fd7b" (UID: "003ac12b-b317-4b95-8b0e-7377afa1fd7b"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 10 16:45:19 crc kubenswrapper[4799]: I1010 16:45:19.765683 4799 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/003ac12b-b317-4b95-8b0e-7377afa1fd7b-utilities\") on node \"crc\" DevicePath \"\"" Oct 10 16:45:19 crc kubenswrapper[4799]: I1010 16:45:19.765716 4799 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/003ac12b-b317-4b95-8b0e-7377afa1fd7b-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 10 16:45:19 crc kubenswrapper[4799]: I1010 16:45:19.765729 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4bwgq\" (UniqueName: \"kubernetes.io/projected/003ac12b-b317-4b95-8b0e-7377afa1fd7b-kube-api-access-4bwgq\") on node \"crc\" DevicePath \"\"" Oct 10 16:45:19 crc kubenswrapper[4799]: I1010 16:45:19.996132 4799 generic.go:334] "Generic (PLEG): container finished" podID="003ac12b-b317-4b95-8b0e-7377afa1fd7b" containerID="2ae471d0a61b57eb7aa1de2a81892763b7102313c6126b1e82d37f20caeda6a1" exitCode=0 Oct 10 16:45:19 crc kubenswrapper[4799]: I1010 16:45:19.996210 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-l7djd" event={"ID":"003ac12b-b317-4b95-8b0e-7377afa1fd7b","Type":"ContainerDied","Data":"2ae471d0a61b57eb7aa1de2a81892763b7102313c6126b1e82d37f20caeda6a1"} Oct 10 16:45:19 crc kubenswrapper[4799]: I1010 16:45:19.996634 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-l7djd" event={"ID":"003ac12b-b317-4b95-8b0e-7377afa1fd7b","Type":"ContainerDied","Data":"2b0d103e59cb6d963e91c4b50b683412b63acdfb904ada08801519e4f3473258"} Oct 10 16:45:19 crc kubenswrapper[4799]: I1010 16:45:19.996270 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-l7djd" Oct 10 16:45:19 crc kubenswrapper[4799]: I1010 16:45:19.996668 4799 scope.go:117] "RemoveContainer" containerID="2ae471d0a61b57eb7aa1de2a81892763b7102313c6126b1e82d37f20caeda6a1" Oct 10 16:45:20 crc kubenswrapper[4799]: I1010 16:45:20.026260 4799 scope.go:117] "RemoveContainer" containerID="1e2c824e95ea09aa46368926c713fe642279d69027e1b281f27614cde6be280d" Oct 10 16:45:20 crc kubenswrapper[4799]: I1010 16:45:20.030013 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-l7djd"] Oct 10 16:45:20 crc kubenswrapper[4799]: I1010 16:45:20.034557 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-l7djd"] Oct 10 16:45:20 crc kubenswrapper[4799]: I1010 16:45:20.066788 4799 scope.go:117] "RemoveContainer" containerID="eca1d46b76d2b02c1fbbe7c62b4a79bd0d5a573855c20fc74048ec018f09f189" Oct 10 16:45:20 crc kubenswrapper[4799]: I1010 16:45:20.091873 4799 scope.go:117] "RemoveContainer" containerID="2ae471d0a61b57eb7aa1de2a81892763b7102313c6126b1e82d37f20caeda6a1" Oct 10 16:45:20 crc kubenswrapper[4799]: E1010 16:45:20.092322 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2ae471d0a61b57eb7aa1de2a81892763b7102313c6126b1e82d37f20caeda6a1\": container with ID starting with 2ae471d0a61b57eb7aa1de2a81892763b7102313c6126b1e82d37f20caeda6a1 not found: ID does not exist" containerID="2ae471d0a61b57eb7aa1de2a81892763b7102313c6126b1e82d37f20caeda6a1" Oct 10 16:45:20 crc kubenswrapper[4799]: I1010 16:45:20.092439 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2ae471d0a61b57eb7aa1de2a81892763b7102313c6126b1e82d37f20caeda6a1"} err="failed to get container status \"2ae471d0a61b57eb7aa1de2a81892763b7102313c6126b1e82d37f20caeda6a1\": rpc error: code = NotFound desc = could not find container \"2ae471d0a61b57eb7aa1de2a81892763b7102313c6126b1e82d37f20caeda6a1\": container with ID starting with 2ae471d0a61b57eb7aa1de2a81892763b7102313c6126b1e82d37f20caeda6a1 not found: ID does not exist" Oct 10 16:45:20 crc kubenswrapper[4799]: I1010 16:45:20.092531 4799 scope.go:117] "RemoveContainer" containerID="1e2c824e95ea09aa46368926c713fe642279d69027e1b281f27614cde6be280d" Oct 10 16:45:20 crc kubenswrapper[4799]: E1010 16:45:20.093065 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1e2c824e95ea09aa46368926c713fe642279d69027e1b281f27614cde6be280d\": container with ID starting with 1e2c824e95ea09aa46368926c713fe642279d69027e1b281f27614cde6be280d not found: ID does not exist" containerID="1e2c824e95ea09aa46368926c713fe642279d69027e1b281f27614cde6be280d" Oct 10 16:45:20 crc kubenswrapper[4799]: I1010 16:45:20.093086 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1e2c824e95ea09aa46368926c713fe642279d69027e1b281f27614cde6be280d"} err="failed to get container status \"1e2c824e95ea09aa46368926c713fe642279d69027e1b281f27614cde6be280d\": rpc error: code = NotFound desc = could not find container \"1e2c824e95ea09aa46368926c713fe642279d69027e1b281f27614cde6be280d\": container with ID starting with 1e2c824e95ea09aa46368926c713fe642279d69027e1b281f27614cde6be280d not found: ID does not exist" Oct 10 16:45:20 crc kubenswrapper[4799]: I1010 16:45:20.093100 4799 scope.go:117] "RemoveContainer" containerID="eca1d46b76d2b02c1fbbe7c62b4a79bd0d5a573855c20fc74048ec018f09f189" Oct 10 16:45:20 crc kubenswrapper[4799]: E1010 16:45:20.093407 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"eca1d46b76d2b02c1fbbe7c62b4a79bd0d5a573855c20fc74048ec018f09f189\": container with ID starting with eca1d46b76d2b02c1fbbe7c62b4a79bd0d5a573855c20fc74048ec018f09f189 not found: ID does not exist" containerID="eca1d46b76d2b02c1fbbe7c62b4a79bd0d5a573855c20fc74048ec018f09f189" Oct 10 16:45:20 crc kubenswrapper[4799]: I1010 16:45:20.093483 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"eca1d46b76d2b02c1fbbe7c62b4a79bd0d5a573855c20fc74048ec018f09f189"} err="failed to get container status \"eca1d46b76d2b02c1fbbe7c62b4a79bd0d5a573855c20fc74048ec018f09f189\": rpc error: code = NotFound desc = could not find container \"eca1d46b76d2b02c1fbbe7c62b4a79bd0d5a573855c20fc74048ec018f09f189\": container with ID starting with eca1d46b76d2b02c1fbbe7c62b4a79bd0d5a573855c20fc74048ec018f09f189 not found: ID does not exist" Oct 10 16:45:21 crc kubenswrapper[4799]: I1010 16:45:21.413077 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="003ac12b-b317-4b95-8b0e-7377afa1fd7b" path="/var/lib/kubelet/pods/003ac12b-b317-4b95-8b0e-7377afa1fd7b/volumes" Oct 10 16:45:22 crc kubenswrapper[4799]: I1010 16:45:22.278777 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-w994n"] Oct 10 16:45:22 crc kubenswrapper[4799]: E1010 16:45:22.279222 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="003ac12b-b317-4b95-8b0e-7377afa1fd7b" containerName="registry-server" Oct 10 16:45:22 crc kubenswrapper[4799]: I1010 16:45:22.279254 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="003ac12b-b317-4b95-8b0e-7377afa1fd7b" containerName="registry-server" Oct 10 16:45:22 crc kubenswrapper[4799]: E1010 16:45:22.279286 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="003ac12b-b317-4b95-8b0e-7377afa1fd7b" containerName="extract-utilities" Oct 10 16:45:22 crc kubenswrapper[4799]: I1010 16:45:22.279301 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="003ac12b-b317-4b95-8b0e-7377afa1fd7b" containerName="extract-utilities" Oct 10 16:45:22 crc kubenswrapper[4799]: E1010 16:45:22.279323 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="003ac12b-b317-4b95-8b0e-7377afa1fd7b" containerName="extract-content" Oct 10 16:45:22 crc kubenswrapper[4799]: I1010 16:45:22.279336 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="003ac12b-b317-4b95-8b0e-7377afa1fd7b" containerName="extract-content" Oct 10 16:45:22 crc kubenswrapper[4799]: I1010 16:45:22.279521 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="003ac12b-b317-4b95-8b0e-7377afa1fd7b" containerName="registry-server" Oct 10 16:45:22 crc kubenswrapper[4799]: I1010 16:45:22.281097 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-w994n" Oct 10 16:45:22 crc kubenswrapper[4799]: I1010 16:45:22.293780 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-w994n"] Oct 10 16:45:22 crc kubenswrapper[4799]: I1010 16:45:22.421183 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ac3e2089-5e3a-4d5c-88b8-18c39e160222-utilities\") pod \"redhat-marketplace-w994n\" (UID: \"ac3e2089-5e3a-4d5c-88b8-18c39e160222\") " pod="openshift-marketplace/redhat-marketplace-w994n" Oct 10 16:45:22 crc kubenswrapper[4799]: I1010 16:45:22.421438 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ac3e2089-5e3a-4d5c-88b8-18c39e160222-catalog-content\") pod \"redhat-marketplace-w994n\" (UID: \"ac3e2089-5e3a-4d5c-88b8-18c39e160222\") " pod="openshift-marketplace/redhat-marketplace-w994n" Oct 10 16:45:22 crc kubenswrapper[4799]: I1010 16:45:22.421498 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-trmhf\" (UniqueName: \"kubernetes.io/projected/ac3e2089-5e3a-4d5c-88b8-18c39e160222-kube-api-access-trmhf\") pod \"redhat-marketplace-w994n\" (UID: \"ac3e2089-5e3a-4d5c-88b8-18c39e160222\") " pod="openshift-marketplace/redhat-marketplace-w994n" Oct 10 16:45:22 crc kubenswrapper[4799]: I1010 16:45:22.522937 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ac3e2089-5e3a-4d5c-88b8-18c39e160222-catalog-content\") pod \"redhat-marketplace-w994n\" (UID: \"ac3e2089-5e3a-4d5c-88b8-18c39e160222\") " pod="openshift-marketplace/redhat-marketplace-w994n" Oct 10 16:45:22 crc kubenswrapper[4799]: I1010 16:45:22.523022 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-trmhf\" (UniqueName: \"kubernetes.io/projected/ac3e2089-5e3a-4d5c-88b8-18c39e160222-kube-api-access-trmhf\") pod \"redhat-marketplace-w994n\" (UID: \"ac3e2089-5e3a-4d5c-88b8-18c39e160222\") " pod="openshift-marketplace/redhat-marketplace-w994n" Oct 10 16:45:22 crc kubenswrapper[4799]: I1010 16:45:22.523079 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ac3e2089-5e3a-4d5c-88b8-18c39e160222-utilities\") pod \"redhat-marketplace-w994n\" (UID: \"ac3e2089-5e3a-4d5c-88b8-18c39e160222\") " pod="openshift-marketplace/redhat-marketplace-w994n" Oct 10 16:45:22 crc kubenswrapper[4799]: I1010 16:45:22.523541 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ac3e2089-5e3a-4d5c-88b8-18c39e160222-catalog-content\") pod \"redhat-marketplace-w994n\" (UID: \"ac3e2089-5e3a-4d5c-88b8-18c39e160222\") " pod="openshift-marketplace/redhat-marketplace-w994n" Oct 10 16:45:22 crc kubenswrapper[4799]: I1010 16:45:22.523667 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ac3e2089-5e3a-4d5c-88b8-18c39e160222-utilities\") pod \"redhat-marketplace-w994n\" (UID: \"ac3e2089-5e3a-4d5c-88b8-18c39e160222\") " pod="openshift-marketplace/redhat-marketplace-w994n" Oct 10 16:45:22 crc kubenswrapper[4799]: I1010 16:45:22.558976 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-trmhf\" (UniqueName: \"kubernetes.io/projected/ac3e2089-5e3a-4d5c-88b8-18c39e160222-kube-api-access-trmhf\") pod \"redhat-marketplace-w994n\" (UID: \"ac3e2089-5e3a-4d5c-88b8-18c39e160222\") " pod="openshift-marketplace/redhat-marketplace-w994n" Oct 10 16:45:22 crc kubenswrapper[4799]: I1010 16:45:22.612574 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-w994n" Oct 10 16:45:22 crc kubenswrapper[4799]: I1010 16:45:22.757180 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/metallb-operator-webhook-server-564bc5bbdc-rzbnf" Oct 10 16:45:23 crc kubenswrapper[4799]: I1010 16:45:23.109249 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-w994n"] Oct 10 16:45:24 crc kubenswrapper[4799]: I1010 16:45:24.024273 4799 generic.go:334] "Generic (PLEG): container finished" podID="ac3e2089-5e3a-4d5c-88b8-18c39e160222" containerID="fd1925a498f70b18960f0672489667ad2af16d59b2e2c65d3380595d8c5b8635" exitCode=0 Oct 10 16:45:24 crc kubenswrapper[4799]: I1010 16:45:24.024332 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-w994n" event={"ID":"ac3e2089-5e3a-4d5c-88b8-18c39e160222","Type":"ContainerDied","Data":"fd1925a498f70b18960f0672489667ad2af16d59b2e2c65d3380595d8c5b8635"} Oct 10 16:45:24 crc kubenswrapper[4799]: I1010 16:45:24.024902 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-w994n" event={"ID":"ac3e2089-5e3a-4d5c-88b8-18c39e160222","Type":"ContainerStarted","Data":"a0bc50995f5f8949264d966fcbe910a05d08482da4a71b2e2ffbcb7d1f6c646e"} Oct 10 16:45:25 crc kubenswrapper[4799]: I1010 16:45:25.032796 4799 generic.go:334] "Generic (PLEG): container finished" podID="ac3e2089-5e3a-4d5c-88b8-18c39e160222" containerID="6d88f617adfd8909cf90d56ab7d64a5a0cc78c508c975c23de4fae779a9001a4" exitCode=0 Oct 10 16:45:25 crc kubenswrapper[4799]: I1010 16:45:25.032863 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-w994n" event={"ID":"ac3e2089-5e3a-4d5c-88b8-18c39e160222","Type":"ContainerDied","Data":"6d88f617adfd8909cf90d56ab7d64a5a0cc78c508c975c23de4fae779a9001a4"} Oct 10 16:45:26 crc kubenswrapper[4799]: I1010 16:45:26.041948 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-w994n" event={"ID":"ac3e2089-5e3a-4d5c-88b8-18c39e160222","Type":"ContainerStarted","Data":"763e7708c5b93b0aac24dacf753290feaf99dea61a8b6d330863cb961f61fca7"} Oct 10 16:45:26 crc kubenswrapper[4799]: I1010 16:45:26.060192 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-w994n" podStartSLOduration=2.662629161 podStartE2EDuration="4.060162294s" podCreationTimestamp="2025-10-10 16:45:22 +0000 UTC" firstStartedPulling="2025-10-10 16:45:24.026207511 +0000 UTC m=+817.534531626" lastFinishedPulling="2025-10-10 16:45:25.423740614 +0000 UTC m=+818.932064759" observedRunningTime="2025-10-10 16:45:26.055006277 +0000 UTC m=+819.563330492" watchObservedRunningTime="2025-10-10 16:45:26.060162294 +0000 UTC m=+819.568486459" Oct 10 16:45:28 crc kubenswrapper[4799]: I1010 16:45:28.075058 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-sqnvn"] Oct 10 16:45:28 crc kubenswrapper[4799]: I1010 16:45:28.080851 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-sqnvn" Oct 10 16:45:28 crc kubenswrapper[4799]: I1010 16:45:28.102356 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-sqnvn"] Oct 10 16:45:28 crc kubenswrapper[4799]: I1010 16:45:28.115795 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zj86t\" (UniqueName: \"kubernetes.io/projected/186e2e27-b4a2-419b-9acb-6f5becfd9f25-kube-api-access-zj86t\") pod \"community-operators-sqnvn\" (UID: \"186e2e27-b4a2-419b-9acb-6f5becfd9f25\") " pod="openshift-marketplace/community-operators-sqnvn" Oct 10 16:45:28 crc kubenswrapper[4799]: I1010 16:45:28.115896 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/186e2e27-b4a2-419b-9acb-6f5becfd9f25-utilities\") pod \"community-operators-sqnvn\" (UID: \"186e2e27-b4a2-419b-9acb-6f5becfd9f25\") " pod="openshift-marketplace/community-operators-sqnvn" Oct 10 16:45:28 crc kubenswrapper[4799]: I1010 16:45:28.115958 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/186e2e27-b4a2-419b-9acb-6f5becfd9f25-catalog-content\") pod \"community-operators-sqnvn\" (UID: \"186e2e27-b4a2-419b-9acb-6f5becfd9f25\") " pod="openshift-marketplace/community-operators-sqnvn" Oct 10 16:45:28 crc kubenswrapper[4799]: I1010 16:45:28.218218 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/186e2e27-b4a2-419b-9acb-6f5becfd9f25-utilities\") pod \"community-operators-sqnvn\" (UID: \"186e2e27-b4a2-419b-9acb-6f5becfd9f25\") " pod="openshift-marketplace/community-operators-sqnvn" Oct 10 16:45:28 crc kubenswrapper[4799]: I1010 16:45:28.218314 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/186e2e27-b4a2-419b-9acb-6f5becfd9f25-catalog-content\") pod \"community-operators-sqnvn\" (UID: \"186e2e27-b4a2-419b-9acb-6f5becfd9f25\") " pod="openshift-marketplace/community-operators-sqnvn" Oct 10 16:45:28 crc kubenswrapper[4799]: I1010 16:45:28.218417 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zj86t\" (UniqueName: \"kubernetes.io/projected/186e2e27-b4a2-419b-9acb-6f5becfd9f25-kube-api-access-zj86t\") pod \"community-operators-sqnvn\" (UID: \"186e2e27-b4a2-419b-9acb-6f5becfd9f25\") " pod="openshift-marketplace/community-operators-sqnvn" Oct 10 16:45:28 crc kubenswrapper[4799]: I1010 16:45:28.219397 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/186e2e27-b4a2-419b-9acb-6f5becfd9f25-catalog-content\") pod \"community-operators-sqnvn\" (UID: \"186e2e27-b4a2-419b-9acb-6f5becfd9f25\") " pod="openshift-marketplace/community-operators-sqnvn" Oct 10 16:45:28 crc kubenswrapper[4799]: I1010 16:45:28.219491 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/186e2e27-b4a2-419b-9acb-6f5becfd9f25-utilities\") pod \"community-operators-sqnvn\" (UID: \"186e2e27-b4a2-419b-9acb-6f5becfd9f25\") " pod="openshift-marketplace/community-operators-sqnvn" Oct 10 16:45:28 crc kubenswrapper[4799]: I1010 16:45:28.240930 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zj86t\" (UniqueName: \"kubernetes.io/projected/186e2e27-b4a2-419b-9acb-6f5becfd9f25-kube-api-access-zj86t\") pod \"community-operators-sqnvn\" (UID: \"186e2e27-b4a2-419b-9acb-6f5becfd9f25\") " pod="openshift-marketplace/community-operators-sqnvn" Oct 10 16:45:28 crc kubenswrapper[4799]: I1010 16:45:28.452990 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-sqnvn" Oct 10 16:45:28 crc kubenswrapper[4799]: I1010 16:45:28.713598 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-sqnvn"] Oct 10 16:45:28 crc kubenswrapper[4799]: W1010 16:45:28.728769 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod186e2e27_b4a2_419b_9acb_6f5becfd9f25.slice/crio-6f5d35a444656fb30affecbbfea168eec3e3de54136e1e14742f16b21fc0ab9b WatchSource:0}: Error finding container 6f5d35a444656fb30affecbbfea168eec3e3de54136e1e14742f16b21fc0ab9b: Status 404 returned error can't find the container with id 6f5d35a444656fb30affecbbfea168eec3e3de54136e1e14742f16b21fc0ab9b Oct 10 16:45:29 crc kubenswrapper[4799]: I1010 16:45:29.065587 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-sqnvn" event={"ID":"186e2e27-b4a2-419b-9acb-6f5becfd9f25","Type":"ContainerStarted","Data":"6f5d35a444656fb30affecbbfea168eec3e3de54136e1e14742f16b21fc0ab9b"} Oct 10 16:45:30 crc kubenswrapper[4799]: I1010 16:45:30.076781 4799 generic.go:334] "Generic (PLEG): container finished" podID="186e2e27-b4a2-419b-9acb-6f5becfd9f25" containerID="a9a17f063810bee0a1e1e376cf08a697166c6ddded95310621641fe2004850a4" exitCode=0 Oct 10 16:45:30 crc kubenswrapper[4799]: I1010 16:45:30.076841 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-sqnvn" event={"ID":"186e2e27-b4a2-419b-9acb-6f5becfd9f25","Type":"ContainerDied","Data":"a9a17f063810bee0a1e1e376cf08a697166c6ddded95310621641fe2004850a4"} Oct 10 16:45:32 crc kubenswrapper[4799]: I1010 16:45:32.094945 4799 generic.go:334] "Generic (PLEG): container finished" podID="186e2e27-b4a2-419b-9acb-6f5becfd9f25" containerID="2b5d223eb808142350ba57a1674b507d3f11dcd067fa151b1089c36dcca0acd6" exitCode=0 Oct 10 16:45:32 crc kubenswrapper[4799]: I1010 16:45:32.095085 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-sqnvn" event={"ID":"186e2e27-b4a2-419b-9acb-6f5becfd9f25","Type":"ContainerDied","Data":"2b5d223eb808142350ba57a1674b507d3f11dcd067fa151b1089c36dcca0acd6"} Oct 10 16:45:32 crc kubenswrapper[4799]: I1010 16:45:32.626327 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-w994n" Oct 10 16:45:32 crc kubenswrapper[4799]: I1010 16:45:32.628241 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-w994n" Oct 10 16:45:32 crc kubenswrapper[4799]: I1010 16:45:32.676504 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-w994n" Oct 10 16:45:33 crc kubenswrapper[4799]: I1010 16:45:33.105463 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-sqnvn" event={"ID":"186e2e27-b4a2-419b-9acb-6f5becfd9f25","Type":"ContainerStarted","Data":"9eddfb63c14c1b27eedcbce48d62ecdc92f5fe2785c81ae64f12da8c92bbbe3a"} Oct 10 16:45:33 crc kubenswrapper[4799]: I1010 16:45:33.131123 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-sqnvn" podStartSLOduration=2.579581855 podStartE2EDuration="5.131078259s" podCreationTimestamp="2025-10-10 16:45:28 +0000 UTC" firstStartedPulling="2025-10-10 16:45:30.07851897 +0000 UTC m=+823.586843125" lastFinishedPulling="2025-10-10 16:45:32.630015404 +0000 UTC m=+826.138339529" observedRunningTime="2025-10-10 16:45:33.129144012 +0000 UTC m=+826.637468177" watchObservedRunningTime="2025-10-10 16:45:33.131078259 +0000 UTC m=+826.639402414" Oct 10 16:45:33 crc kubenswrapper[4799]: I1010 16:45:33.172354 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-w994n" Oct 10 16:45:35 crc kubenswrapper[4799]: I1010 16:45:35.065115 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-w994n"] Oct 10 16:45:36 crc kubenswrapper[4799]: I1010 16:45:36.126474 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-w994n" podUID="ac3e2089-5e3a-4d5c-88b8-18c39e160222" containerName="registry-server" containerID="cri-o://763e7708c5b93b0aac24dacf753290feaf99dea61a8b6d330863cb961f61fca7" gracePeriod=2 Oct 10 16:45:36 crc kubenswrapper[4799]: I1010 16:45:36.556526 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-w994n" Oct 10 16:45:36 crc kubenswrapper[4799]: I1010 16:45:36.682545 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ac3e2089-5e3a-4d5c-88b8-18c39e160222-utilities\") pod \"ac3e2089-5e3a-4d5c-88b8-18c39e160222\" (UID: \"ac3e2089-5e3a-4d5c-88b8-18c39e160222\") " Oct 10 16:45:36 crc kubenswrapper[4799]: I1010 16:45:36.683252 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ac3e2089-5e3a-4d5c-88b8-18c39e160222-utilities" (OuterVolumeSpecName: "utilities") pod "ac3e2089-5e3a-4d5c-88b8-18c39e160222" (UID: "ac3e2089-5e3a-4d5c-88b8-18c39e160222"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 10 16:45:36 crc kubenswrapper[4799]: I1010 16:45:36.683348 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ac3e2089-5e3a-4d5c-88b8-18c39e160222-catalog-content\") pod \"ac3e2089-5e3a-4d5c-88b8-18c39e160222\" (UID: \"ac3e2089-5e3a-4d5c-88b8-18c39e160222\") " Oct 10 16:45:36 crc kubenswrapper[4799]: I1010 16:45:36.690355 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-trmhf\" (UniqueName: \"kubernetes.io/projected/ac3e2089-5e3a-4d5c-88b8-18c39e160222-kube-api-access-trmhf\") pod \"ac3e2089-5e3a-4d5c-88b8-18c39e160222\" (UID: \"ac3e2089-5e3a-4d5c-88b8-18c39e160222\") " Oct 10 16:45:36 crc kubenswrapper[4799]: I1010 16:45:36.690675 4799 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ac3e2089-5e3a-4d5c-88b8-18c39e160222-utilities\") on node \"crc\" DevicePath \"\"" Oct 10 16:45:36 crc kubenswrapper[4799]: I1010 16:45:36.695649 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ac3e2089-5e3a-4d5c-88b8-18c39e160222-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "ac3e2089-5e3a-4d5c-88b8-18c39e160222" (UID: "ac3e2089-5e3a-4d5c-88b8-18c39e160222"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 10 16:45:36 crc kubenswrapper[4799]: I1010 16:45:36.704397 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ac3e2089-5e3a-4d5c-88b8-18c39e160222-kube-api-access-trmhf" (OuterVolumeSpecName: "kube-api-access-trmhf") pod "ac3e2089-5e3a-4d5c-88b8-18c39e160222" (UID: "ac3e2089-5e3a-4d5c-88b8-18c39e160222"). InnerVolumeSpecName "kube-api-access-trmhf". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 16:45:36 crc kubenswrapper[4799]: I1010 16:45:36.791415 4799 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ac3e2089-5e3a-4d5c-88b8-18c39e160222-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 10 16:45:36 crc kubenswrapper[4799]: I1010 16:45:36.791444 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-trmhf\" (UniqueName: \"kubernetes.io/projected/ac3e2089-5e3a-4d5c-88b8-18c39e160222-kube-api-access-trmhf\") on node \"crc\" DevicePath \"\"" Oct 10 16:45:37 crc kubenswrapper[4799]: I1010 16:45:37.136110 4799 generic.go:334] "Generic (PLEG): container finished" podID="ac3e2089-5e3a-4d5c-88b8-18c39e160222" containerID="763e7708c5b93b0aac24dacf753290feaf99dea61a8b6d330863cb961f61fca7" exitCode=0 Oct 10 16:45:37 crc kubenswrapper[4799]: I1010 16:45:37.136167 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-w994n" event={"ID":"ac3e2089-5e3a-4d5c-88b8-18c39e160222","Type":"ContainerDied","Data":"763e7708c5b93b0aac24dacf753290feaf99dea61a8b6d330863cb961f61fca7"} Oct 10 16:45:37 crc kubenswrapper[4799]: I1010 16:45:37.136206 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-w994n" event={"ID":"ac3e2089-5e3a-4d5c-88b8-18c39e160222","Type":"ContainerDied","Data":"a0bc50995f5f8949264d966fcbe910a05d08482da4a71b2e2ffbcb7d1f6c646e"} Oct 10 16:45:37 crc kubenswrapper[4799]: I1010 16:45:37.136212 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-w994n" Oct 10 16:45:37 crc kubenswrapper[4799]: I1010 16:45:37.136261 4799 scope.go:117] "RemoveContainer" containerID="763e7708c5b93b0aac24dacf753290feaf99dea61a8b6d330863cb961f61fca7" Oct 10 16:45:37 crc kubenswrapper[4799]: I1010 16:45:37.168300 4799 scope.go:117] "RemoveContainer" containerID="6d88f617adfd8909cf90d56ab7d64a5a0cc78c508c975c23de4fae779a9001a4" Oct 10 16:45:37 crc kubenswrapper[4799]: I1010 16:45:37.186841 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-w994n"] Oct 10 16:45:37 crc kubenswrapper[4799]: I1010 16:45:37.192431 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-w994n"] Oct 10 16:45:37 crc kubenswrapper[4799]: I1010 16:45:37.210304 4799 scope.go:117] "RemoveContainer" containerID="fd1925a498f70b18960f0672489667ad2af16d59b2e2c65d3380595d8c5b8635" Oct 10 16:45:37 crc kubenswrapper[4799]: I1010 16:45:37.235656 4799 scope.go:117] "RemoveContainer" containerID="763e7708c5b93b0aac24dacf753290feaf99dea61a8b6d330863cb961f61fca7" Oct 10 16:45:37 crc kubenswrapper[4799]: E1010 16:45:37.236374 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"763e7708c5b93b0aac24dacf753290feaf99dea61a8b6d330863cb961f61fca7\": container with ID starting with 763e7708c5b93b0aac24dacf753290feaf99dea61a8b6d330863cb961f61fca7 not found: ID does not exist" containerID="763e7708c5b93b0aac24dacf753290feaf99dea61a8b6d330863cb961f61fca7" Oct 10 16:45:37 crc kubenswrapper[4799]: I1010 16:45:37.236403 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"763e7708c5b93b0aac24dacf753290feaf99dea61a8b6d330863cb961f61fca7"} err="failed to get container status \"763e7708c5b93b0aac24dacf753290feaf99dea61a8b6d330863cb961f61fca7\": rpc error: code = NotFound desc = could not find container \"763e7708c5b93b0aac24dacf753290feaf99dea61a8b6d330863cb961f61fca7\": container with ID starting with 763e7708c5b93b0aac24dacf753290feaf99dea61a8b6d330863cb961f61fca7 not found: ID does not exist" Oct 10 16:45:37 crc kubenswrapper[4799]: I1010 16:45:37.236432 4799 scope.go:117] "RemoveContainer" containerID="6d88f617adfd8909cf90d56ab7d64a5a0cc78c508c975c23de4fae779a9001a4" Oct 10 16:45:37 crc kubenswrapper[4799]: E1010 16:45:37.236693 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6d88f617adfd8909cf90d56ab7d64a5a0cc78c508c975c23de4fae779a9001a4\": container with ID starting with 6d88f617adfd8909cf90d56ab7d64a5a0cc78c508c975c23de4fae779a9001a4 not found: ID does not exist" containerID="6d88f617adfd8909cf90d56ab7d64a5a0cc78c508c975c23de4fae779a9001a4" Oct 10 16:45:37 crc kubenswrapper[4799]: I1010 16:45:37.236720 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6d88f617adfd8909cf90d56ab7d64a5a0cc78c508c975c23de4fae779a9001a4"} err="failed to get container status \"6d88f617adfd8909cf90d56ab7d64a5a0cc78c508c975c23de4fae779a9001a4\": rpc error: code = NotFound desc = could not find container \"6d88f617adfd8909cf90d56ab7d64a5a0cc78c508c975c23de4fae779a9001a4\": container with ID starting with 6d88f617adfd8909cf90d56ab7d64a5a0cc78c508c975c23de4fae779a9001a4 not found: ID does not exist" Oct 10 16:45:37 crc kubenswrapper[4799]: I1010 16:45:37.236739 4799 scope.go:117] "RemoveContainer" containerID="fd1925a498f70b18960f0672489667ad2af16d59b2e2c65d3380595d8c5b8635" Oct 10 16:45:37 crc kubenswrapper[4799]: E1010 16:45:37.236978 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fd1925a498f70b18960f0672489667ad2af16d59b2e2c65d3380595d8c5b8635\": container with ID starting with fd1925a498f70b18960f0672489667ad2af16d59b2e2c65d3380595d8c5b8635 not found: ID does not exist" containerID="fd1925a498f70b18960f0672489667ad2af16d59b2e2c65d3380595d8c5b8635" Oct 10 16:45:37 crc kubenswrapper[4799]: I1010 16:45:37.237001 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fd1925a498f70b18960f0672489667ad2af16d59b2e2c65d3380595d8c5b8635"} err="failed to get container status \"fd1925a498f70b18960f0672489667ad2af16d59b2e2c65d3380595d8c5b8635\": rpc error: code = NotFound desc = could not find container \"fd1925a498f70b18960f0672489667ad2af16d59b2e2c65d3380595d8c5b8635\": container with ID starting with fd1925a498f70b18960f0672489667ad2af16d59b2e2c65d3380595d8c5b8635 not found: ID does not exist" Oct 10 16:45:37 crc kubenswrapper[4799]: I1010 16:45:37.413665 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ac3e2089-5e3a-4d5c-88b8-18c39e160222" path="/var/lib/kubelet/pods/ac3e2089-5e3a-4d5c-88b8-18c39e160222/volumes" Oct 10 16:45:38 crc kubenswrapper[4799]: I1010 16:45:38.453654 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-sqnvn" Oct 10 16:45:38 crc kubenswrapper[4799]: I1010 16:45:38.453735 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-sqnvn" Oct 10 16:45:38 crc kubenswrapper[4799]: I1010 16:45:38.495377 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-sqnvn" Oct 10 16:45:39 crc kubenswrapper[4799]: I1010 16:45:39.230265 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-sqnvn" Oct 10 16:45:40 crc kubenswrapper[4799]: I1010 16:45:40.663118 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-sqnvn"] Oct 10 16:45:41 crc kubenswrapper[4799]: I1010 16:45:41.169195 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-sqnvn" podUID="186e2e27-b4a2-419b-9acb-6f5becfd9f25" containerName="registry-server" containerID="cri-o://9eddfb63c14c1b27eedcbce48d62ecdc92f5fe2785c81ae64f12da8c92bbbe3a" gracePeriod=2 Oct 10 16:45:41 crc kubenswrapper[4799]: I1010 16:45:41.648893 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-sqnvn" Oct 10 16:45:41 crc kubenswrapper[4799]: I1010 16:45:41.760261 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/186e2e27-b4a2-419b-9acb-6f5becfd9f25-utilities\") pod \"186e2e27-b4a2-419b-9acb-6f5becfd9f25\" (UID: \"186e2e27-b4a2-419b-9acb-6f5becfd9f25\") " Oct 10 16:45:41 crc kubenswrapper[4799]: I1010 16:45:41.760579 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zj86t\" (UniqueName: \"kubernetes.io/projected/186e2e27-b4a2-419b-9acb-6f5becfd9f25-kube-api-access-zj86t\") pod \"186e2e27-b4a2-419b-9acb-6f5becfd9f25\" (UID: \"186e2e27-b4a2-419b-9acb-6f5becfd9f25\") " Oct 10 16:45:41 crc kubenswrapper[4799]: I1010 16:45:41.760725 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/186e2e27-b4a2-419b-9acb-6f5becfd9f25-catalog-content\") pod \"186e2e27-b4a2-419b-9acb-6f5becfd9f25\" (UID: \"186e2e27-b4a2-419b-9acb-6f5becfd9f25\") " Oct 10 16:45:41 crc kubenswrapper[4799]: I1010 16:45:41.761813 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/186e2e27-b4a2-419b-9acb-6f5becfd9f25-utilities" (OuterVolumeSpecName: "utilities") pod "186e2e27-b4a2-419b-9acb-6f5becfd9f25" (UID: "186e2e27-b4a2-419b-9acb-6f5becfd9f25"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 10 16:45:41 crc kubenswrapper[4799]: I1010 16:45:41.762189 4799 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/186e2e27-b4a2-419b-9acb-6f5becfd9f25-utilities\") on node \"crc\" DevicePath \"\"" Oct 10 16:45:41 crc kubenswrapper[4799]: I1010 16:45:41.766535 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/186e2e27-b4a2-419b-9acb-6f5becfd9f25-kube-api-access-zj86t" (OuterVolumeSpecName: "kube-api-access-zj86t") pod "186e2e27-b4a2-419b-9acb-6f5becfd9f25" (UID: "186e2e27-b4a2-419b-9acb-6f5becfd9f25"). InnerVolumeSpecName "kube-api-access-zj86t". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 16:45:41 crc kubenswrapper[4799]: I1010 16:45:41.831989 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/186e2e27-b4a2-419b-9acb-6f5becfd9f25-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "186e2e27-b4a2-419b-9acb-6f5becfd9f25" (UID: "186e2e27-b4a2-419b-9acb-6f5becfd9f25"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 10 16:45:41 crc kubenswrapper[4799]: I1010 16:45:41.864021 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zj86t\" (UniqueName: \"kubernetes.io/projected/186e2e27-b4a2-419b-9acb-6f5becfd9f25-kube-api-access-zj86t\") on node \"crc\" DevicePath \"\"" Oct 10 16:45:41 crc kubenswrapper[4799]: I1010 16:45:41.864060 4799 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/186e2e27-b4a2-419b-9acb-6f5becfd9f25-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 10 16:45:42 crc kubenswrapper[4799]: I1010 16:45:42.176078 4799 generic.go:334] "Generic (PLEG): container finished" podID="186e2e27-b4a2-419b-9acb-6f5becfd9f25" containerID="9eddfb63c14c1b27eedcbce48d62ecdc92f5fe2785c81ae64f12da8c92bbbe3a" exitCode=0 Oct 10 16:45:42 crc kubenswrapper[4799]: I1010 16:45:42.176133 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-sqnvn" event={"ID":"186e2e27-b4a2-419b-9acb-6f5becfd9f25","Type":"ContainerDied","Data":"9eddfb63c14c1b27eedcbce48d62ecdc92f5fe2785c81ae64f12da8c92bbbe3a"} Oct 10 16:45:42 crc kubenswrapper[4799]: I1010 16:45:42.176173 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-sqnvn" event={"ID":"186e2e27-b4a2-419b-9acb-6f5becfd9f25","Type":"ContainerDied","Data":"6f5d35a444656fb30affecbbfea168eec3e3de54136e1e14742f16b21fc0ab9b"} Oct 10 16:45:42 crc kubenswrapper[4799]: I1010 16:45:42.176172 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-sqnvn" Oct 10 16:45:42 crc kubenswrapper[4799]: I1010 16:45:42.176191 4799 scope.go:117] "RemoveContainer" containerID="9eddfb63c14c1b27eedcbce48d62ecdc92f5fe2785c81ae64f12da8c92bbbe3a" Oct 10 16:45:42 crc kubenswrapper[4799]: I1010 16:45:42.204238 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-sqnvn"] Oct 10 16:45:42 crc kubenswrapper[4799]: I1010 16:45:42.209307 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-sqnvn"] Oct 10 16:45:42 crc kubenswrapper[4799]: I1010 16:45:42.218149 4799 scope.go:117] "RemoveContainer" containerID="2b5d223eb808142350ba57a1674b507d3f11dcd067fa151b1089c36dcca0acd6" Oct 10 16:45:42 crc kubenswrapper[4799]: I1010 16:45:42.237025 4799 scope.go:117] "RemoveContainer" containerID="a9a17f063810bee0a1e1e376cf08a697166c6ddded95310621641fe2004850a4" Oct 10 16:45:42 crc kubenswrapper[4799]: I1010 16:45:42.256720 4799 scope.go:117] "RemoveContainer" containerID="9eddfb63c14c1b27eedcbce48d62ecdc92f5fe2785c81ae64f12da8c92bbbe3a" Oct 10 16:45:42 crc kubenswrapper[4799]: E1010 16:45:42.257166 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9eddfb63c14c1b27eedcbce48d62ecdc92f5fe2785c81ae64f12da8c92bbbe3a\": container with ID starting with 9eddfb63c14c1b27eedcbce48d62ecdc92f5fe2785c81ae64f12da8c92bbbe3a not found: ID does not exist" containerID="9eddfb63c14c1b27eedcbce48d62ecdc92f5fe2785c81ae64f12da8c92bbbe3a" Oct 10 16:45:42 crc kubenswrapper[4799]: I1010 16:45:42.257206 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9eddfb63c14c1b27eedcbce48d62ecdc92f5fe2785c81ae64f12da8c92bbbe3a"} err="failed to get container status \"9eddfb63c14c1b27eedcbce48d62ecdc92f5fe2785c81ae64f12da8c92bbbe3a\": rpc error: code = NotFound desc = could not find container \"9eddfb63c14c1b27eedcbce48d62ecdc92f5fe2785c81ae64f12da8c92bbbe3a\": container with ID starting with 9eddfb63c14c1b27eedcbce48d62ecdc92f5fe2785c81ae64f12da8c92bbbe3a not found: ID does not exist" Oct 10 16:45:42 crc kubenswrapper[4799]: I1010 16:45:42.257228 4799 scope.go:117] "RemoveContainer" containerID="2b5d223eb808142350ba57a1674b507d3f11dcd067fa151b1089c36dcca0acd6" Oct 10 16:45:42 crc kubenswrapper[4799]: E1010 16:45:42.257537 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2b5d223eb808142350ba57a1674b507d3f11dcd067fa151b1089c36dcca0acd6\": container with ID starting with 2b5d223eb808142350ba57a1674b507d3f11dcd067fa151b1089c36dcca0acd6 not found: ID does not exist" containerID="2b5d223eb808142350ba57a1674b507d3f11dcd067fa151b1089c36dcca0acd6" Oct 10 16:45:42 crc kubenswrapper[4799]: I1010 16:45:42.257563 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2b5d223eb808142350ba57a1674b507d3f11dcd067fa151b1089c36dcca0acd6"} err="failed to get container status \"2b5d223eb808142350ba57a1674b507d3f11dcd067fa151b1089c36dcca0acd6\": rpc error: code = NotFound desc = could not find container \"2b5d223eb808142350ba57a1674b507d3f11dcd067fa151b1089c36dcca0acd6\": container with ID starting with 2b5d223eb808142350ba57a1674b507d3f11dcd067fa151b1089c36dcca0acd6 not found: ID does not exist" Oct 10 16:45:42 crc kubenswrapper[4799]: I1010 16:45:42.257578 4799 scope.go:117] "RemoveContainer" containerID="a9a17f063810bee0a1e1e376cf08a697166c6ddded95310621641fe2004850a4" Oct 10 16:45:42 crc kubenswrapper[4799]: E1010 16:45:42.257940 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a9a17f063810bee0a1e1e376cf08a697166c6ddded95310621641fe2004850a4\": container with ID starting with a9a17f063810bee0a1e1e376cf08a697166c6ddded95310621641fe2004850a4 not found: ID does not exist" containerID="a9a17f063810bee0a1e1e376cf08a697166c6ddded95310621641fe2004850a4" Oct 10 16:45:42 crc kubenswrapper[4799]: I1010 16:45:42.257967 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a9a17f063810bee0a1e1e376cf08a697166c6ddded95310621641fe2004850a4"} err="failed to get container status \"a9a17f063810bee0a1e1e376cf08a697166c6ddded95310621641fe2004850a4\": rpc error: code = NotFound desc = could not find container \"a9a17f063810bee0a1e1e376cf08a697166c6ddded95310621641fe2004850a4\": container with ID starting with a9a17f063810bee0a1e1e376cf08a697166c6ddded95310621641fe2004850a4 not found: ID does not exist" Oct 10 16:45:42 crc kubenswrapper[4799]: I1010 16:45:42.339596 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/metallb-operator-controller-manager-75cc6fd4f5-h4qjd" Oct 10 16:45:43 crc kubenswrapper[4799]: I1010 16:45:43.178989 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/frr-k8s-webhook-server-64bf5d555-ktzxr"] Oct 10 16:45:43 crc kubenswrapper[4799]: E1010 16:45:43.179232 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ac3e2089-5e3a-4d5c-88b8-18c39e160222" containerName="extract-content" Oct 10 16:45:43 crc kubenswrapper[4799]: I1010 16:45:43.179246 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="ac3e2089-5e3a-4d5c-88b8-18c39e160222" containerName="extract-content" Oct 10 16:45:43 crc kubenswrapper[4799]: E1010 16:45:43.179258 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="186e2e27-b4a2-419b-9acb-6f5becfd9f25" containerName="extract-utilities" Oct 10 16:45:43 crc kubenswrapper[4799]: I1010 16:45:43.179266 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="186e2e27-b4a2-419b-9acb-6f5becfd9f25" containerName="extract-utilities" Oct 10 16:45:43 crc kubenswrapper[4799]: E1010 16:45:43.179277 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ac3e2089-5e3a-4d5c-88b8-18c39e160222" containerName="registry-server" Oct 10 16:45:43 crc kubenswrapper[4799]: I1010 16:45:43.179286 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="ac3e2089-5e3a-4d5c-88b8-18c39e160222" containerName="registry-server" Oct 10 16:45:43 crc kubenswrapper[4799]: E1010 16:45:43.179303 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="186e2e27-b4a2-419b-9acb-6f5becfd9f25" containerName="extract-content" Oct 10 16:45:43 crc kubenswrapper[4799]: I1010 16:45:43.179310 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="186e2e27-b4a2-419b-9acb-6f5becfd9f25" containerName="extract-content" Oct 10 16:45:43 crc kubenswrapper[4799]: E1010 16:45:43.179319 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="186e2e27-b4a2-419b-9acb-6f5becfd9f25" containerName="registry-server" Oct 10 16:45:43 crc kubenswrapper[4799]: I1010 16:45:43.179325 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="186e2e27-b4a2-419b-9acb-6f5becfd9f25" containerName="registry-server" Oct 10 16:45:43 crc kubenswrapper[4799]: E1010 16:45:43.179339 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ac3e2089-5e3a-4d5c-88b8-18c39e160222" containerName="extract-utilities" Oct 10 16:45:43 crc kubenswrapper[4799]: I1010 16:45:43.179345 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="ac3e2089-5e3a-4d5c-88b8-18c39e160222" containerName="extract-utilities" Oct 10 16:45:43 crc kubenswrapper[4799]: I1010 16:45:43.179471 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="ac3e2089-5e3a-4d5c-88b8-18c39e160222" containerName="registry-server" Oct 10 16:45:43 crc kubenswrapper[4799]: I1010 16:45:43.179483 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="186e2e27-b4a2-419b-9acb-6f5becfd9f25" containerName="registry-server" Oct 10 16:45:43 crc kubenswrapper[4799]: I1010 16:45:43.179955 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-webhook-server-64bf5d555-ktzxr" Oct 10 16:45:43 crc kubenswrapper[4799]: I1010 16:45:43.185278 4799 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-webhook-server-cert" Oct 10 16:45:43 crc kubenswrapper[4799]: I1010 16:45:43.186857 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/frr-k8s-zsdl8"] Oct 10 16:45:43 crc kubenswrapper[4799]: I1010 16:45:43.188963 4799 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-daemon-dockercfg-b2s7p" Oct 10 16:45:43 crc kubenswrapper[4799]: I1010 16:45:43.189471 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-zsdl8" Oct 10 16:45:43 crc kubenswrapper[4799]: I1010 16:45:43.191309 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"frr-startup" Oct 10 16:45:43 crc kubenswrapper[4799]: I1010 16:45:43.192611 4799 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-certs-secret" Oct 10 16:45:43 crc kubenswrapper[4799]: I1010 16:45:43.210124 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/frr-k8s-webhook-server-64bf5d555-ktzxr"] Oct 10 16:45:43 crc kubenswrapper[4799]: I1010 16:45:43.272533 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/speaker-6n9wt"] Oct 10 16:45:43 crc kubenswrapper[4799]: I1010 16:45:43.273408 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/speaker-6n9wt" Oct 10 16:45:43 crc kubenswrapper[4799]: I1010 16:45:43.275283 4799 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"speaker-certs-secret" Oct 10 16:45:43 crc kubenswrapper[4799]: I1010 16:45:43.275573 4799 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"speaker-dockercfg-l74ck" Oct 10 16:45:43 crc kubenswrapper[4799]: I1010 16:45:43.275768 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"metallb-excludel2" Oct 10 16:45:43 crc kubenswrapper[4799]: I1010 16:45:43.276125 4799 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-memberlist" Oct 10 16:45:43 crc kubenswrapper[4799]: I1010 16:45:43.284243 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/230d418d-5545-483e-996f-533e967cf0a4-metrics\") pod \"frr-k8s-zsdl8\" (UID: \"230d418d-5545-483e-996f-533e967cf0a4\") " pod="metallb-system/frr-k8s-zsdl8" Oct 10 16:45:43 crc kubenswrapper[4799]: I1010 16:45:43.284280 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/230d418d-5545-483e-996f-533e967cf0a4-metrics-certs\") pod \"frr-k8s-zsdl8\" (UID: \"230d418d-5545-483e-996f-533e967cf0a4\") " pod="metallb-system/frr-k8s-zsdl8" Oct 10 16:45:43 crc kubenswrapper[4799]: I1010 16:45:43.284309 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/b5825284-c8ef-4b48-9838-3da136e5a670-metallb-excludel2\") pod \"speaker-6n9wt\" (UID: \"b5825284-c8ef-4b48-9838-3da136e5a670\") " pod="metallb-system/speaker-6n9wt" Oct 10 16:45:43 crc kubenswrapper[4799]: I1010 16:45:43.284380 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vnsmc\" (UniqueName: \"kubernetes.io/projected/b5825284-c8ef-4b48-9838-3da136e5a670-kube-api-access-vnsmc\") pod \"speaker-6n9wt\" (UID: \"b5825284-c8ef-4b48-9838-3da136e5a670\") " pod="metallb-system/speaker-6n9wt" Oct 10 16:45:43 crc kubenswrapper[4799]: I1010 16:45:43.284414 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/230d418d-5545-483e-996f-533e967cf0a4-frr-sockets\") pod \"frr-k8s-zsdl8\" (UID: \"230d418d-5545-483e-996f-533e967cf0a4\") " pod="metallb-system/frr-k8s-zsdl8" Oct 10 16:45:43 crc kubenswrapper[4799]: I1010 16:45:43.284538 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/230d418d-5545-483e-996f-533e967cf0a4-frr-conf\") pod \"frr-k8s-zsdl8\" (UID: \"230d418d-5545-483e-996f-533e967cf0a4\") " pod="metallb-system/frr-k8s-zsdl8" Oct 10 16:45:43 crc kubenswrapper[4799]: I1010 16:45:43.284566 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/c6b6d702-2a2f-4ae0-8ab4-69129ebf689e-cert\") pod \"frr-k8s-webhook-server-64bf5d555-ktzxr\" (UID: \"c6b6d702-2a2f-4ae0-8ab4-69129ebf689e\") " pod="metallb-system/frr-k8s-webhook-server-64bf5d555-ktzxr" Oct 10 16:45:43 crc kubenswrapper[4799]: I1010 16:45:43.284619 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/230d418d-5545-483e-996f-533e967cf0a4-reloader\") pod \"frr-k8s-zsdl8\" (UID: \"230d418d-5545-483e-996f-533e967cf0a4\") " pod="metallb-system/frr-k8s-zsdl8" Oct 10 16:45:43 crc kubenswrapper[4799]: I1010 16:45:43.284642 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d725t\" (UniqueName: \"kubernetes.io/projected/c6b6d702-2a2f-4ae0-8ab4-69129ebf689e-kube-api-access-d725t\") pod \"frr-k8s-webhook-server-64bf5d555-ktzxr\" (UID: \"c6b6d702-2a2f-4ae0-8ab4-69129ebf689e\") " pod="metallb-system/frr-k8s-webhook-server-64bf5d555-ktzxr" Oct 10 16:45:43 crc kubenswrapper[4799]: I1010 16:45:43.284662 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/230d418d-5545-483e-996f-533e967cf0a4-frr-startup\") pod \"frr-k8s-zsdl8\" (UID: \"230d418d-5545-483e-996f-533e967cf0a4\") " pod="metallb-system/frr-k8s-zsdl8" Oct 10 16:45:43 crc kubenswrapper[4799]: I1010 16:45:43.284709 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4zkc9\" (UniqueName: \"kubernetes.io/projected/230d418d-5545-483e-996f-533e967cf0a4-kube-api-access-4zkc9\") pod \"frr-k8s-zsdl8\" (UID: \"230d418d-5545-483e-996f-533e967cf0a4\") " pod="metallb-system/frr-k8s-zsdl8" Oct 10 16:45:43 crc kubenswrapper[4799]: I1010 16:45:43.284825 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/b5825284-c8ef-4b48-9838-3da136e5a670-memberlist\") pod \"speaker-6n9wt\" (UID: \"b5825284-c8ef-4b48-9838-3da136e5a670\") " pod="metallb-system/speaker-6n9wt" Oct 10 16:45:43 crc kubenswrapper[4799]: I1010 16:45:43.284880 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/b5825284-c8ef-4b48-9838-3da136e5a670-metrics-certs\") pod \"speaker-6n9wt\" (UID: \"b5825284-c8ef-4b48-9838-3da136e5a670\") " pod="metallb-system/speaker-6n9wt" Oct 10 16:45:43 crc kubenswrapper[4799]: I1010 16:45:43.290390 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/controller-68d546b9d8-sbrtn"] Oct 10 16:45:43 crc kubenswrapper[4799]: I1010 16:45:43.291238 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/controller-68d546b9d8-sbrtn" Oct 10 16:45:43 crc kubenswrapper[4799]: I1010 16:45:43.295086 4799 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"controller-certs-secret" Oct 10 16:45:43 crc kubenswrapper[4799]: I1010 16:45:43.309521 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/controller-68d546b9d8-sbrtn"] Oct 10 16:45:43 crc kubenswrapper[4799]: I1010 16:45:43.385791 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/230d418d-5545-483e-996f-533e967cf0a4-frr-conf\") pod \"frr-k8s-zsdl8\" (UID: \"230d418d-5545-483e-996f-533e967cf0a4\") " pod="metallb-system/frr-k8s-zsdl8" Oct 10 16:45:43 crc kubenswrapper[4799]: I1010 16:45:43.385828 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/c6b6d702-2a2f-4ae0-8ab4-69129ebf689e-cert\") pod \"frr-k8s-webhook-server-64bf5d555-ktzxr\" (UID: \"c6b6d702-2a2f-4ae0-8ab4-69129ebf689e\") " pod="metallb-system/frr-k8s-webhook-server-64bf5d555-ktzxr" Oct 10 16:45:43 crc kubenswrapper[4799]: I1010 16:45:43.385852 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/230d418d-5545-483e-996f-533e967cf0a4-reloader\") pod \"frr-k8s-zsdl8\" (UID: \"230d418d-5545-483e-996f-533e967cf0a4\") " pod="metallb-system/frr-k8s-zsdl8" Oct 10 16:45:43 crc kubenswrapper[4799]: I1010 16:45:43.385882 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/230d418d-5545-483e-996f-533e967cf0a4-frr-startup\") pod \"frr-k8s-zsdl8\" (UID: \"230d418d-5545-483e-996f-533e967cf0a4\") " pod="metallb-system/frr-k8s-zsdl8" Oct 10 16:45:43 crc kubenswrapper[4799]: I1010 16:45:43.385901 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d725t\" (UniqueName: \"kubernetes.io/projected/c6b6d702-2a2f-4ae0-8ab4-69129ebf689e-kube-api-access-d725t\") pod \"frr-k8s-webhook-server-64bf5d555-ktzxr\" (UID: \"c6b6d702-2a2f-4ae0-8ab4-69129ebf689e\") " pod="metallb-system/frr-k8s-webhook-server-64bf5d555-ktzxr" Oct 10 16:45:43 crc kubenswrapper[4799]: I1010 16:45:43.385922 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ndxlc\" (UniqueName: \"kubernetes.io/projected/95668eab-11e1-4872-a646-b8573de8e2cc-kube-api-access-ndxlc\") pod \"controller-68d546b9d8-sbrtn\" (UID: \"95668eab-11e1-4872-a646-b8573de8e2cc\") " pod="metallb-system/controller-68d546b9d8-sbrtn" Oct 10 16:45:43 crc kubenswrapper[4799]: I1010 16:45:43.385945 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4zkc9\" (UniqueName: \"kubernetes.io/projected/230d418d-5545-483e-996f-533e967cf0a4-kube-api-access-4zkc9\") pod \"frr-k8s-zsdl8\" (UID: \"230d418d-5545-483e-996f-533e967cf0a4\") " pod="metallb-system/frr-k8s-zsdl8" Oct 10 16:45:43 crc kubenswrapper[4799]: I1010 16:45:43.385962 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/b5825284-c8ef-4b48-9838-3da136e5a670-memberlist\") pod \"speaker-6n9wt\" (UID: \"b5825284-c8ef-4b48-9838-3da136e5a670\") " pod="metallb-system/speaker-6n9wt" Oct 10 16:45:43 crc kubenswrapper[4799]: I1010 16:45:43.385979 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/95668eab-11e1-4872-a646-b8573de8e2cc-metrics-certs\") pod \"controller-68d546b9d8-sbrtn\" (UID: \"95668eab-11e1-4872-a646-b8573de8e2cc\") " pod="metallb-system/controller-68d546b9d8-sbrtn" Oct 10 16:45:43 crc kubenswrapper[4799]: I1010 16:45:43.385995 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/b5825284-c8ef-4b48-9838-3da136e5a670-metrics-certs\") pod \"speaker-6n9wt\" (UID: \"b5825284-c8ef-4b48-9838-3da136e5a670\") " pod="metallb-system/speaker-6n9wt" Oct 10 16:45:43 crc kubenswrapper[4799]: I1010 16:45:43.386010 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/230d418d-5545-483e-996f-533e967cf0a4-metrics\") pod \"frr-k8s-zsdl8\" (UID: \"230d418d-5545-483e-996f-533e967cf0a4\") " pod="metallb-system/frr-k8s-zsdl8" Oct 10 16:45:43 crc kubenswrapper[4799]: I1010 16:45:43.386025 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/230d418d-5545-483e-996f-533e967cf0a4-metrics-certs\") pod \"frr-k8s-zsdl8\" (UID: \"230d418d-5545-483e-996f-533e967cf0a4\") " pod="metallb-system/frr-k8s-zsdl8" Oct 10 16:45:43 crc kubenswrapper[4799]: I1010 16:45:43.386045 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/b5825284-c8ef-4b48-9838-3da136e5a670-metallb-excludel2\") pod \"speaker-6n9wt\" (UID: \"b5825284-c8ef-4b48-9838-3da136e5a670\") " pod="metallb-system/speaker-6n9wt" Oct 10 16:45:43 crc kubenswrapper[4799]: I1010 16:45:43.386067 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vnsmc\" (UniqueName: \"kubernetes.io/projected/b5825284-c8ef-4b48-9838-3da136e5a670-kube-api-access-vnsmc\") pod \"speaker-6n9wt\" (UID: \"b5825284-c8ef-4b48-9838-3da136e5a670\") " pod="metallb-system/speaker-6n9wt" Oct 10 16:45:43 crc kubenswrapper[4799]: I1010 16:45:43.386083 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/230d418d-5545-483e-996f-533e967cf0a4-frr-sockets\") pod \"frr-k8s-zsdl8\" (UID: \"230d418d-5545-483e-996f-533e967cf0a4\") " pod="metallb-system/frr-k8s-zsdl8" Oct 10 16:45:43 crc kubenswrapper[4799]: I1010 16:45:43.386122 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/95668eab-11e1-4872-a646-b8573de8e2cc-cert\") pod \"controller-68d546b9d8-sbrtn\" (UID: \"95668eab-11e1-4872-a646-b8573de8e2cc\") " pod="metallb-system/controller-68d546b9d8-sbrtn" Oct 10 16:45:43 crc kubenswrapper[4799]: I1010 16:45:43.386284 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/230d418d-5545-483e-996f-533e967cf0a4-frr-conf\") pod \"frr-k8s-zsdl8\" (UID: \"230d418d-5545-483e-996f-533e967cf0a4\") " pod="metallb-system/frr-k8s-zsdl8" Oct 10 16:45:43 crc kubenswrapper[4799]: I1010 16:45:43.386488 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/230d418d-5545-483e-996f-533e967cf0a4-reloader\") pod \"frr-k8s-zsdl8\" (UID: \"230d418d-5545-483e-996f-533e967cf0a4\") " pod="metallb-system/frr-k8s-zsdl8" Oct 10 16:45:43 crc kubenswrapper[4799]: E1010 16:45:43.386577 4799 secret.go:188] Couldn't get secret metallb-system/metallb-memberlist: secret "metallb-memberlist" not found Oct 10 16:45:43 crc kubenswrapper[4799]: E1010 16:45:43.386627 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/b5825284-c8ef-4b48-9838-3da136e5a670-memberlist podName:b5825284-c8ef-4b48-9838-3da136e5a670 nodeName:}" failed. No retries permitted until 2025-10-10 16:45:43.886610917 +0000 UTC m=+837.394935032 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "memberlist" (UniqueName: "kubernetes.io/secret/b5825284-c8ef-4b48-9838-3da136e5a670-memberlist") pod "speaker-6n9wt" (UID: "b5825284-c8ef-4b48-9838-3da136e5a670") : secret "metallb-memberlist" not found Oct 10 16:45:43 crc kubenswrapper[4799]: E1010 16:45:43.386812 4799 secret.go:188] Couldn't get secret metallb-system/speaker-certs-secret: secret "speaker-certs-secret" not found Oct 10 16:45:43 crc kubenswrapper[4799]: E1010 16:45:43.386842 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/b5825284-c8ef-4b48-9838-3da136e5a670-metrics-certs podName:b5825284-c8ef-4b48-9838-3da136e5a670 nodeName:}" failed. No retries permitted until 2025-10-10 16:45:43.886834343 +0000 UTC m=+837.395158578 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/b5825284-c8ef-4b48-9838-3da136e5a670-metrics-certs") pod "speaker-6n9wt" (UID: "b5825284-c8ef-4b48-9838-3da136e5a670") : secret "speaker-certs-secret" not found Oct 10 16:45:43 crc kubenswrapper[4799]: I1010 16:45:43.387192 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/230d418d-5545-483e-996f-533e967cf0a4-frr-sockets\") pod \"frr-k8s-zsdl8\" (UID: \"230d418d-5545-483e-996f-533e967cf0a4\") " pod="metallb-system/frr-k8s-zsdl8" Oct 10 16:45:43 crc kubenswrapper[4799]: I1010 16:45:43.387382 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/230d418d-5545-483e-996f-533e967cf0a4-metrics\") pod \"frr-k8s-zsdl8\" (UID: \"230d418d-5545-483e-996f-533e967cf0a4\") " pod="metallb-system/frr-k8s-zsdl8" Oct 10 16:45:43 crc kubenswrapper[4799]: I1010 16:45:43.387386 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/b5825284-c8ef-4b48-9838-3da136e5a670-metallb-excludel2\") pod \"speaker-6n9wt\" (UID: \"b5825284-c8ef-4b48-9838-3da136e5a670\") " pod="metallb-system/speaker-6n9wt" Oct 10 16:45:43 crc kubenswrapper[4799]: I1010 16:45:43.387386 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/230d418d-5545-483e-996f-533e967cf0a4-frr-startup\") pod \"frr-k8s-zsdl8\" (UID: \"230d418d-5545-483e-996f-533e967cf0a4\") " pod="metallb-system/frr-k8s-zsdl8" Oct 10 16:45:43 crc kubenswrapper[4799]: I1010 16:45:43.395724 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/c6b6d702-2a2f-4ae0-8ab4-69129ebf689e-cert\") pod \"frr-k8s-webhook-server-64bf5d555-ktzxr\" (UID: \"c6b6d702-2a2f-4ae0-8ab4-69129ebf689e\") " pod="metallb-system/frr-k8s-webhook-server-64bf5d555-ktzxr" Oct 10 16:45:43 crc kubenswrapper[4799]: I1010 16:45:43.395778 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/230d418d-5545-483e-996f-533e967cf0a4-metrics-certs\") pod \"frr-k8s-zsdl8\" (UID: \"230d418d-5545-483e-996f-533e967cf0a4\") " pod="metallb-system/frr-k8s-zsdl8" Oct 10 16:45:43 crc kubenswrapper[4799]: I1010 16:45:43.413088 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="186e2e27-b4a2-419b-9acb-6f5becfd9f25" path="/var/lib/kubelet/pods/186e2e27-b4a2-419b-9acb-6f5becfd9f25/volumes" Oct 10 16:45:43 crc kubenswrapper[4799]: I1010 16:45:43.414321 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4zkc9\" (UniqueName: \"kubernetes.io/projected/230d418d-5545-483e-996f-533e967cf0a4-kube-api-access-4zkc9\") pod \"frr-k8s-zsdl8\" (UID: \"230d418d-5545-483e-996f-533e967cf0a4\") " pod="metallb-system/frr-k8s-zsdl8" Oct 10 16:45:43 crc kubenswrapper[4799]: I1010 16:45:43.420924 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vnsmc\" (UniqueName: \"kubernetes.io/projected/b5825284-c8ef-4b48-9838-3da136e5a670-kube-api-access-vnsmc\") pod \"speaker-6n9wt\" (UID: \"b5825284-c8ef-4b48-9838-3da136e5a670\") " pod="metallb-system/speaker-6n9wt" Oct 10 16:45:43 crc kubenswrapper[4799]: I1010 16:45:43.424416 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d725t\" (UniqueName: \"kubernetes.io/projected/c6b6d702-2a2f-4ae0-8ab4-69129ebf689e-kube-api-access-d725t\") pod \"frr-k8s-webhook-server-64bf5d555-ktzxr\" (UID: \"c6b6d702-2a2f-4ae0-8ab4-69129ebf689e\") " pod="metallb-system/frr-k8s-webhook-server-64bf5d555-ktzxr" Oct 10 16:45:43 crc kubenswrapper[4799]: I1010 16:45:43.487608 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ndxlc\" (UniqueName: \"kubernetes.io/projected/95668eab-11e1-4872-a646-b8573de8e2cc-kube-api-access-ndxlc\") pod \"controller-68d546b9d8-sbrtn\" (UID: \"95668eab-11e1-4872-a646-b8573de8e2cc\") " pod="metallb-system/controller-68d546b9d8-sbrtn" Oct 10 16:45:43 crc kubenswrapper[4799]: I1010 16:45:43.487686 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/95668eab-11e1-4872-a646-b8573de8e2cc-metrics-certs\") pod \"controller-68d546b9d8-sbrtn\" (UID: \"95668eab-11e1-4872-a646-b8573de8e2cc\") " pod="metallb-system/controller-68d546b9d8-sbrtn" Oct 10 16:45:43 crc kubenswrapper[4799]: I1010 16:45:43.487801 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/95668eab-11e1-4872-a646-b8573de8e2cc-cert\") pod \"controller-68d546b9d8-sbrtn\" (UID: \"95668eab-11e1-4872-a646-b8573de8e2cc\") " pod="metallb-system/controller-68d546b9d8-sbrtn" Oct 10 16:45:43 crc kubenswrapper[4799]: I1010 16:45:43.491390 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/95668eab-11e1-4872-a646-b8573de8e2cc-cert\") pod \"controller-68d546b9d8-sbrtn\" (UID: \"95668eab-11e1-4872-a646-b8573de8e2cc\") " pod="metallb-system/controller-68d546b9d8-sbrtn" Oct 10 16:45:43 crc kubenswrapper[4799]: I1010 16:45:43.492521 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/95668eab-11e1-4872-a646-b8573de8e2cc-metrics-certs\") pod \"controller-68d546b9d8-sbrtn\" (UID: \"95668eab-11e1-4872-a646-b8573de8e2cc\") " pod="metallb-system/controller-68d546b9d8-sbrtn" Oct 10 16:45:43 crc kubenswrapper[4799]: I1010 16:45:43.505926 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ndxlc\" (UniqueName: \"kubernetes.io/projected/95668eab-11e1-4872-a646-b8573de8e2cc-kube-api-access-ndxlc\") pod \"controller-68d546b9d8-sbrtn\" (UID: \"95668eab-11e1-4872-a646-b8573de8e2cc\") " pod="metallb-system/controller-68d546b9d8-sbrtn" Oct 10 16:45:43 crc kubenswrapper[4799]: I1010 16:45:43.506631 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-webhook-server-64bf5d555-ktzxr" Oct 10 16:45:43 crc kubenswrapper[4799]: I1010 16:45:43.521403 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-zsdl8" Oct 10 16:45:43 crc kubenswrapper[4799]: I1010 16:45:43.601718 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/controller-68d546b9d8-sbrtn" Oct 10 16:45:43 crc kubenswrapper[4799]: I1010 16:45:43.893435 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/b5825284-c8ef-4b48-9838-3da136e5a670-memberlist\") pod \"speaker-6n9wt\" (UID: \"b5825284-c8ef-4b48-9838-3da136e5a670\") " pod="metallb-system/speaker-6n9wt" Oct 10 16:45:43 crc kubenswrapper[4799]: I1010 16:45:43.893718 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/b5825284-c8ef-4b48-9838-3da136e5a670-metrics-certs\") pod \"speaker-6n9wt\" (UID: \"b5825284-c8ef-4b48-9838-3da136e5a670\") " pod="metallb-system/speaker-6n9wt" Oct 10 16:45:43 crc kubenswrapper[4799]: E1010 16:45:43.893621 4799 secret.go:188] Couldn't get secret metallb-system/metallb-memberlist: secret "metallb-memberlist" not found Oct 10 16:45:43 crc kubenswrapper[4799]: E1010 16:45:43.893875 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/b5825284-c8ef-4b48-9838-3da136e5a670-memberlist podName:b5825284-c8ef-4b48-9838-3da136e5a670 nodeName:}" failed. No retries permitted until 2025-10-10 16:45:44.893848415 +0000 UTC m=+838.402172610 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "memberlist" (UniqueName: "kubernetes.io/secret/b5825284-c8ef-4b48-9838-3da136e5a670-memberlist") pod "speaker-6n9wt" (UID: "b5825284-c8ef-4b48-9838-3da136e5a670") : secret "metallb-memberlist" not found Oct 10 16:45:43 crc kubenswrapper[4799]: I1010 16:45:43.897490 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/b5825284-c8ef-4b48-9838-3da136e5a670-metrics-certs\") pod \"speaker-6n9wt\" (UID: \"b5825284-c8ef-4b48-9838-3da136e5a670\") " pod="metallb-system/speaker-6n9wt" Oct 10 16:45:43 crc kubenswrapper[4799]: I1010 16:45:43.927365 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/frr-k8s-webhook-server-64bf5d555-ktzxr"] Oct 10 16:45:43 crc kubenswrapper[4799]: W1010 16:45:43.939685 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc6b6d702_2a2f_4ae0_8ab4_69129ebf689e.slice/crio-bdcc0f3586ff84e1a7abe846e03b46bfcad001d8a656ab93fa79c37510e27e1f WatchSource:0}: Error finding container bdcc0f3586ff84e1a7abe846e03b46bfcad001d8a656ab93fa79c37510e27e1f: Status 404 returned error can't find the container with id bdcc0f3586ff84e1a7abe846e03b46bfcad001d8a656ab93fa79c37510e27e1f Oct 10 16:45:44 crc kubenswrapper[4799]: I1010 16:45:44.088618 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/controller-68d546b9d8-sbrtn"] Oct 10 16:45:44 crc kubenswrapper[4799]: W1010 16:45:44.097542 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod95668eab_11e1_4872_a646_b8573de8e2cc.slice/crio-cc56f3109db93f28d66c491f1944450acc1b1ae3a3bfc7611aaeac9bf0840648 WatchSource:0}: Error finding container cc56f3109db93f28d66c491f1944450acc1b1ae3a3bfc7611aaeac9bf0840648: Status 404 returned error can't find the container with id cc56f3109db93f28d66c491f1944450acc1b1ae3a3bfc7611aaeac9bf0840648 Oct 10 16:45:44 crc kubenswrapper[4799]: I1010 16:45:44.193629 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-webhook-server-64bf5d555-ktzxr" event={"ID":"c6b6d702-2a2f-4ae0-8ab4-69129ebf689e","Type":"ContainerStarted","Data":"bdcc0f3586ff84e1a7abe846e03b46bfcad001d8a656ab93fa79c37510e27e1f"} Oct 10 16:45:44 crc kubenswrapper[4799]: I1010 16:45:44.196211 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-zsdl8" event={"ID":"230d418d-5545-483e-996f-533e967cf0a4","Type":"ContainerStarted","Data":"25147541d9aede46f4f65bbc214d4d9e55db1c41fd18f35d2d284090f160e5c5"} Oct 10 16:45:44 crc kubenswrapper[4799]: I1010 16:45:44.198270 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-68d546b9d8-sbrtn" event={"ID":"95668eab-11e1-4872-a646-b8573de8e2cc","Type":"ContainerStarted","Data":"cc56f3109db93f28d66c491f1944450acc1b1ae3a3bfc7611aaeac9bf0840648"} Oct 10 16:45:44 crc kubenswrapper[4799]: I1010 16:45:44.906710 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/b5825284-c8ef-4b48-9838-3da136e5a670-memberlist\") pod \"speaker-6n9wt\" (UID: \"b5825284-c8ef-4b48-9838-3da136e5a670\") " pod="metallb-system/speaker-6n9wt" Oct 10 16:45:44 crc kubenswrapper[4799]: I1010 16:45:44.915338 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/b5825284-c8ef-4b48-9838-3da136e5a670-memberlist\") pod \"speaker-6n9wt\" (UID: \"b5825284-c8ef-4b48-9838-3da136e5a670\") " pod="metallb-system/speaker-6n9wt" Oct 10 16:45:45 crc kubenswrapper[4799]: I1010 16:45:45.087735 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/speaker-6n9wt" Oct 10 16:45:45 crc kubenswrapper[4799]: W1010 16:45:45.122976 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb5825284_c8ef_4b48_9838_3da136e5a670.slice/crio-29237e1006287a20793c39015bae6f96ca01b2a93c2b5753eaf46ba3804dfa19 WatchSource:0}: Error finding container 29237e1006287a20793c39015bae6f96ca01b2a93c2b5753eaf46ba3804dfa19: Status 404 returned error can't find the container with id 29237e1006287a20793c39015bae6f96ca01b2a93c2b5753eaf46ba3804dfa19 Oct 10 16:45:45 crc kubenswrapper[4799]: I1010 16:45:45.214042 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-68d546b9d8-sbrtn" event={"ID":"95668eab-11e1-4872-a646-b8573de8e2cc","Type":"ContainerStarted","Data":"1c7d8c883660bfd5bf5fb014d2418a4df6ea0cfd7b1c59caf96f4dcee63f6440"} Oct 10 16:45:45 crc kubenswrapper[4799]: I1010 16:45:45.214112 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-68d546b9d8-sbrtn" event={"ID":"95668eab-11e1-4872-a646-b8573de8e2cc","Type":"ContainerStarted","Data":"30dffd5321575a6e03be95b46c87fd1c4868094701e4d928e708a9b9608c97fe"} Oct 10 16:45:45 crc kubenswrapper[4799]: I1010 16:45:45.214151 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/controller-68d546b9d8-sbrtn" Oct 10 16:45:45 crc kubenswrapper[4799]: I1010 16:45:45.216164 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-6n9wt" event={"ID":"b5825284-c8ef-4b48-9838-3da136e5a670","Type":"ContainerStarted","Data":"29237e1006287a20793c39015bae6f96ca01b2a93c2b5753eaf46ba3804dfa19"} Oct 10 16:45:45 crc kubenswrapper[4799]: I1010 16:45:45.232078 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/controller-68d546b9d8-sbrtn" podStartSLOduration=2.232065337 podStartE2EDuration="2.232065337s" podCreationTimestamp="2025-10-10 16:45:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-10 16:45:45.231364579 +0000 UTC m=+838.739688694" watchObservedRunningTime="2025-10-10 16:45:45.232065337 +0000 UTC m=+838.740389452" Oct 10 16:45:46 crc kubenswrapper[4799]: I1010 16:45:46.226573 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-6n9wt" event={"ID":"b5825284-c8ef-4b48-9838-3da136e5a670","Type":"ContainerStarted","Data":"4f944d0d9c7f9286857b26323dcd3bae8bfc67a98c931524ccc7d5f5c8dd1287"} Oct 10 16:45:46 crc kubenswrapper[4799]: I1010 16:45:46.226905 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-6n9wt" event={"ID":"b5825284-c8ef-4b48-9838-3da136e5a670","Type":"ContainerStarted","Data":"60627f43a5e8d1ec663b2f1d1453517c660f37b7548b1ce00360ecdc76f20f61"} Oct 10 16:45:46 crc kubenswrapper[4799]: I1010 16:45:46.226923 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/speaker-6n9wt" Oct 10 16:45:46 crc kubenswrapper[4799]: I1010 16:45:46.252686 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/speaker-6n9wt" podStartSLOduration=3.252665262 podStartE2EDuration="3.252665262s" podCreationTimestamp="2025-10-10 16:45:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-10 16:45:46.252463877 +0000 UTC m=+839.760787992" watchObservedRunningTime="2025-10-10 16:45:46.252665262 +0000 UTC m=+839.760989377" Oct 10 16:45:51 crc kubenswrapper[4799]: I1010 16:45:51.269970 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-webhook-server-64bf5d555-ktzxr" event={"ID":"c6b6d702-2a2f-4ae0-8ab4-69129ebf689e","Type":"ContainerStarted","Data":"1f4fdf80e6395eab582010fae6ea6c0d5efcebacf2d10c9729574a9ac9259656"} Oct 10 16:45:51 crc kubenswrapper[4799]: I1010 16:45:51.270606 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/frr-k8s-webhook-server-64bf5d555-ktzxr" Oct 10 16:45:51 crc kubenswrapper[4799]: I1010 16:45:51.272675 4799 generic.go:334] "Generic (PLEG): container finished" podID="230d418d-5545-483e-996f-533e967cf0a4" containerID="2e6eed9eaefe03f602db716e96f529391958ff0228d0d78d70cdfac1b4a7b091" exitCode=0 Oct 10 16:45:51 crc kubenswrapper[4799]: I1010 16:45:51.272715 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-zsdl8" event={"ID":"230d418d-5545-483e-996f-533e967cf0a4","Type":"ContainerDied","Data":"2e6eed9eaefe03f602db716e96f529391958ff0228d0d78d70cdfac1b4a7b091"} Oct 10 16:45:51 crc kubenswrapper[4799]: I1010 16:45:51.302602 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/frr-k8s-webhook-server-64bf5d555-ktzxr" podStartSLOduration=1.384414134 podStartE2EDuration="8.302576825s" podCreationTimestamp="2025-10-10 16:45:43 +0000 UTC" firstStartedPulling="2025-10-10 16:45:43.943498938 +0000 UTC m=+837.451823053" lastFinishedPulling="2025-10-10 16:45:50.861661619 +0000 UTC m=+844.369985744" observedRunningTime="2025-10-10 16:45:51.298197287 +0000 UTC m=+844.806521412" watchObservedRunningTime="2025-10-10 16:45:51.302576825 +0000 UTC m=+844.810900980" Oct 10 16:45:52 crc kubenswrapper[4799]: I1010 16:45:52.285609 4799 generic.go:334] "Generic (PLEG): container finished" podID="230d418d-5545-483e-996f-533e967cf0a4" containerID="dfaa7e03a4bd53a299139bc86bc802a47db39feb73ef9ee0d8482f41d538b774" exitCode=0 Oct 10 16:45:52 crc kubenswrapper[4799]: I1010 16:45:52.285688 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-zsdl8" event={"ID":"230d418d-5545-483e-996f-533e967cf0a4","Type":"ContainerDied","Data":"dfaa7e03a4bd53a299139bc86bc802a47db39feb73ef9ee0d8482f41d538b774"} Oct 10 16:45:53 crc kubenswrapper[4799]: I1010 16:45:53.294254 4799 generic.go:334] "Generic (PLEG): container finished" podID="230d418d-5545-483e-996f-533e967cf0a4" containerID="25ef457305193b31a7ba22de5bd70cd50a6567a7ab78ac4881753bb4c176f1e5" exitCode=0 Oct 10 16:45:53 crc kubenswrapper[4799]: I1010 16:45:53.294544 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-zsdl8" event={"ID":"230d418d-5545-483e-996f-533e967cf0a4","Type":"ContainerDied","Data":"25ef457305193b31a7ba22de5bd70cd50a6567a7ab78ac4881753bb4c176f1e5"} Oct 10 16:45:54 crc kubenswrapper[4799]: I1010 16:45:54.308257 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-zsdl8" event={"ID":"230d418d-5545-483e-996f-533e967cf0a4","Type":"ContainerStarted","Data":"e0424e5969a72134a9defdc1d72f1adf3452270d4c217743565ac60c51f270cd"} Oct 10 16:45:54 crc kubenswrapper[4799]: I1010 16:45:54.308577 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-zsdl8" event={"ID":"230d418d-5545-483e-996f-533e967cf0a4","Type":"ContainerStarted","Data":"79af65ce8fb84a844941ebed5690d723c07a1839fb32a3e472acd8522c233682"} Oct 10 16:45:54 crc kubenswrapper[4799]: I1010 16:45:54.308591 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-zsdl8" event={"ID":"230d418d-5545-483e-996f-533e967cf0a4","Type":"ContainerStarted","Data":"9af0fa7eb8093a604af557b04e64b0c7c5b74837de4834687f58e5952e8bec3e"} Oct 10 16:45:54 crc kubenswrapper[4799]: I1010 16:45:54.308603 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-zsdl8" event={"ID":"230d418d-5545-483e-996f-533e967cf0a4","Type":"ContainerStarted","Data":"ef2bd7754a676054f538cbecb047fa954e371204d8f58c7e7dd31ee0fbf51be6"} Oct 10 16:45:54 crc kubenswrapper[4799]: I1010 16:45:54.308614 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-zsdl8" event={"ID":"230d418d-5545-483e-996f-533e967cf0a4","Type":"ContainerStarted","Data":"48fcb1993d9dec07c3732fb2e52bb3df7d6f0404aade494d00346982107802da"} Oct 10 16:45:55 crc kubenswrapper[4799]: I1010 16:45:55.094079 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/speaker-6n9wt" Oct 10 16:45:55 crc kubenswrapper[4799]: I1010 16:45:55.321076 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-zsdl8" event={"ID":"230d418d-5545-483e-996f-533e967cf0a4","Type":"ContainerStarted","Data":"2218582c744b7c441d344e29bf2e35cba67ce24044f6acc2efd0cbe42ff8635b"} Oct 10 16:45:55 crc kubenswrapper[4799]: I1010 16:45:55.321444 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/frr-k8s-zsdl8" Oct 10 16:45:55 crc kubenswrapper[4799]: I1010 16:45:55.358935 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/frr-k8s-zsdl8" podStartSLOduration=5.166903864 podStartE2EDuration="12.358910262s" podCreationTimestamp="2025-10-10 16:45:43 +0000 UTC" firstStartedPulling="2025-10-10 16:45:43.688864024 +0000 UTC m=+837.197188139" lastFinishedPulling="2025-10-10 16:45:50.880870402 +0000 UTC m=+844.389194537" observedRunningTime="2025-10-10 16:45:55.350582346 +0000 UTC m=+848.858906531" watchObservedRunningTime="2025-10-10 16:45:55.358910262 +0000 UTC m=+848.867234407" Oct 10 16:45:56 crc kubenswrapper[4799]: I1010 16:45:56.473820 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69wtj6b"] Oct 10 16:45:56 crc kubenswrapper[4799]: I1010 16:45:56.477209 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69wtj6b" Oct 10 16:45:56 crc kubenswrapper[4799]: I1010 16:45:56.481672 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Oct 10 16:45:56 crc kubenswrapper[4799]: I1010 16:45:56.487044 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69wtj6b"] Oct 10 16:45:56 crc kubenswrapper[4799]: I1010 16:45:56.575126 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/acb519d9-d10f-4149-bd99-88526d2a60c5-util\") pod \"695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69wtj6b\" (UID: \"acb519d9-d10f-4149-bd99-88526d2a60c5\") " pod="openshift-marketplace/695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69wtj6b" Oct 10 16:45:56 crc kubenswrapper[4799]: I1010 16:45:56.575205 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/acb519d9-d10f-4149-bd99-88526d2a60c5-bundle\") pod \"695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69wtj6b\" (UID: \"acb519d9-d10f-4149-bd99-88526d2a60c5\") " pod="openshift-marketplace/695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69wtj6b" Oct 10 16:45:56 crc kubenswrapper[4799]: I1010 16:45:56.575332 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6qwqz\" (UniqueName: \"kubernetes.io/projected/acb519d9-d10f-4149-bd99-88526d2a60c5-kube-api-access-6qwqz\") pod \"695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69wtj6b\" (UID: \"acb519d9-d10f-4149-bd99-88526d2a60c5\") " pod="openshift-marketplace/695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69wtj6b" Oct 10 16:45:56 crc kubenswrapper[4799]: I1010 16:45:56.676222 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/acb519d9-d10f-4149-bd99-88526d2a60c5-bundle\") pod \"695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69wtj6b\" (UID: \"acb519d9-d10f-4149-bd99-88526d2a60c5\") " pod="openshift-marketplace/695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69wtj6b" Oct 10 16:45:56 crc kubenswrapper[4799]: I1010 16:45:56.676274 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6qwqz\" (UniqueName: \"kubernetes.io/projected/acb519d9-d10f-4149-bd99-88526d2a60c5-kube-api-access-6qwqz\") pod \"695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69wtj6b\" (UID: \"acb519d9-d10f-4149-bd99-88526d2a60c5\") " pod="openshift-marketplace/695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69wtj6b" Oct 10 16:45:56 crc kubenswrapper[4799]: I1010 16:45:56.676323 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/acb519d9-d10f-4149-bd99-88526d2a60c5-util\") pod \"695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69wtj6b\" (UID: \"acb519d9-d10f-4149-bd99-88526d2a60c5\") " pod="openshift-marketplace/695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69wtj6b" Oct 10 16:45:56 crc kubenswrapper[4799]: I1010 16:45:56.676815 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/acb519d9-d10f-4149-bd99-88526d2a60c5-bundle\") pod \"695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69wtj6b\" (UID: \"acb519d9-d10f-4149-bd99-88526d2a60c5\") " pod="openshift-marketplace/695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69wtj6b" Oct 10 16:45:56 crc kubenswrapper[4799]: I1010 16:45:56.676833 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/acb519d9-d10f-4149-bd99-88526d2a60c5-util\") pod \"695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69wtj6b\" (UID: \"acb519d9-d10f-4149-bd99-88526d2a60c5\") " pod="openshift-marketplace/695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69wtj6b" Oct 10 16:45:56 crc kubenswrapper[4799]: I1010 16:45:56.695739 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6qwqz\" (UniqueName: \"kubernetes.io/projected/acb519d9-d10f-4149-bd99-88526d2a60c5-kube-api-access-6qwqz\") pod \"695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69wtj6b\" (UID: \"acb519d9-d10f-4149-bd99-88526d2a60c5\") " pod="openshift-marketplace/695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69wtj6b" Oct 10 16:45:56 crc kubenswrapper[4799]: I1010 16:45:56.798205 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69wtj6b" Oct 10 16:45:57 crc kubenswrapper[4799]: I1010 16:45:57.278911 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69wtj6b"] Oct 10 16:45:57 crc kubenswrapper[4799]: W1010 16:45:57.287961 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podacb519d9_d10f_4149_bd99_88526d2a60c5.slice/crio-4e54902e95beda9ab26c2c33aded7191c1bbf60e12cee4151da9ff99e9522da7 WatchSource:0}: Error finding container 4e54902e95beda9ab26c2c33aded7191c1bbf60e12cee4151da9ff99e9522da7: Status 404 returned error can't find the container with id 4e54902e95beda9ab26c2c33aded7191c1bbf60e12cee4151da9ff99e9522da7 Oct 10 16:45:57 crc kubenswrapper[4799]: I1010 16:45:57.333100 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69wtj6b" event={"ID":"acb519d9-d10f-4149-bd99-88526d2a60c5","Type":"ContainerStarted","Data":"4e54902e95beda9ab26c2c33aded7191c1bbf60e12cee4151da9ff99e9522da7"} Oct 10 16:45:58 crc kubenswrapper[4799]: I1010 16:45:58.340288 4799 generic.go:334] "Generic (PLEG): container finished" podID="acb519d9-d10f-4149-bd99-88526d2a60c5" containerID="f417242199254f1019987c48d4faa3141cde4e576ebd6b3d17cf3065a544be5b" exitCode=0 Oct 10 16:45:58 crc kubenswrapper[4799]: I1010 16:45:58.340367 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69wtj6b" event={"ID":"acb519d9-d10f-4149-bd99-88526d2a60c5","Type":"ContainerDied","Data":"f417242199254f1019987c48d4faa3141cde4e576ebd6b3d17cf3065a544be5b"} Oct 10 16:45:58 crc kubenswrapper[4799]: I1010 16:45:58.522021 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="metallb-system/frr-k8s-zsdl8" Oct 10 16:45:58 crc kubenswrapper[4799]: I1010 16:45:58.579864 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="metallb-system/frr-k8s-zsdl8" Oct 10 16:46:01 crc kubenswrapper[4799]: I1010 16:46:01.369135 4799 generic.go:334] "Generic (PLEG): container finished" podID="acb519d9-d10f-4149-bd99-88526d2a60c5" containerID="2bc6503bc3be0ab491755083c7070694504f797eb7711681c01ca10582f452e9" exitCode=0 Oct 10 16:46:01 crc kubenswrapper[4799]: I1010 16:46:01.369277 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69wtj6b" event={"ID":"acb519d9-d10f-4149-bd99-88526d2a60c5","Type":"ContainerDied","Data":"2bc6503bc3be0ab491755083c7070694504f797eb7711681c01ca10582f452e9"} Oct 10 16:46:02 crc kubenswrapper[4799]: I1010 16:46:02.376730 4799 generic.go:334] "Generic (PLEG): container finished" podID="acb519d9-d10f-4149-bd99-88526d2a60c5" containerID="a0db6c22f6cc98ff671b036ee8ea367573ac4c1a0bd96bb3803789e4d1599ff8" exitCode=0 Oct 10 16:46:02 crc kubenswrapper[4799]: I1010 16:46:02.376798 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69wtj6b" event={"ID":"acb519d9-d10f-4149-bd99-88526d2a60c5","Type":"ContainerDied","Data":"a0db6c22f6cc98ff671b036ee8ea367573ac4c1a0bd96bb3803789e4d1599ff8"} Oct 10 16:46:03 crc kubenswrapper[4799]: I1010 16:46:03.547244 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/frr-k8s-zsdl8" Oct 10 16:46:03 crc kubenswrapper[4799]: I1010 16:46:03.550017 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/frr-k8s-webhook-server-64bf5d555-ktzxr" Oct 10 16:46:03 crc kubenswrapper[4799]: I1010 16:46:03.609634 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/controller-68d546b9d8-sbrtn" Oct 10 16:46:03 crc kubenswrapper[4799]: I1010 16:46:03.815422 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69wtj6b" Oct 10 16:46:03 crc kubenswrapper[4799]: I1010 16:46:03.896656 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6qwqz\" (UniqueName: \"kubernetes.io/projected/acb519d9-d10f-4149-bd99-88526d2a60c5-kube-api-access-6qwqz\") pod \"acb519d9-d10f-4149-bd99-88526d2a60c5\" (UID: \"acb519d9-d10f-4149-bd99-88526d2a60c5\") " Oct 10 16:46:03 crc kubenswrapper[4799]: I1010 16:46:03.896749 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/acb519d9-d10f-4149-bd99-88526d2a60c5-bundle\") pod \"acb519d9-d10f-4149-bd99-88526d2a60c5\" (UID: \"acb519d9-d10f-4149-bd99-88526d2a60c5\") " Oct 10 16:46:03 crc kubenswrapper[4799]: I1010 16:46:03.896844 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/acb519d9-d10f-4149-bd99-88526d2a60c5-util\") pod \"acb519d9-d10f-4149-bd99-88526d2a60c5\" (UID: \"acb519d9-d10f-4149-bd99-88526d2a60c5\") " Oct 10 16:46:03 crc kubenswrapper[4799]: I1010 16:46:03.898371 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/acb519d9-d10f-4149-bd99-88526d2a60c5-bundle" (OuterVolumeSpecName: "bundle") pod "acb519d9-d10f-4149-bd99-88526d2a60c5" (UID: "acb519d9-d10f-4149-bd99-88526d2a60c5"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 10 16:46:03 crc kubenswrapper[4799]: I1010 16:46:03.907481 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/acb519d9-d10f-4149-bd99-88526d2a60c5-kube-api-access-6qwqz" (OuterVolumeSpecName: "kube-api-access-6qwqz") pod "acb519d9-d10f-4149-bd99-88526d2a60c5" (UID: "acb519d9-d10f-4149-bd99-88526d2a60c5"). InnerVolumeSpecName "kube-api-access-6qwqz". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 16:46:03 crc kubenswrapper[4799]: I1010 16:46:03.919584 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/acb519d9-d10f-4149-bd99-88526d2a60c5-util" (OuterVolumeSpecName: "util") pod "acb519d9-d10f-4149-bd99-88526d2a60c5" (UID: "acb519d9-d10f-4149-bd99-88526d2a60c5"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 10 16:46:03 crc kubenswrapper[4799]: I1010 16:46:03.998316 4799 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/acb519d9-d10f-4149-bd99-88526d2a60c5-util\") on node \"crc\" DevicePath \"\"" Oct 10 16:46:03 crc kubenswrapper[4799]: I1010 16:46:03.998360 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6qwqz\" (UniqueName: \"kubernetes.io/projected/acb519d9-d10f-4149-bd99-88526d2a60c5-kube-api-access-6qwqz\") on node \"crc\" DevicePath \"\"" Oct 10 16:46:03 crc kubenswrapper[4799]: I1010 16:46:03.998376 4799 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/acb519d9-d10f-4149-bd99-88526d2a60c5-bundle\") on node \"crc\" DevicePath \"\"" Oct 10 16:46:04 crc kubenswrapper[4799]: I1010 16:46:04.402334 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69wtj6b" event={"ID":"acb519d9-d10f-4149-bd99-88526d2a60c5","Type":"ContainerDied","Data":"4e54902e95beda9ab26c2c33aded7191c1bbf60e12cee4151da9ff99e9522da7"} Oct 10 16:46:04 crc kubenswrapper[4799]: I1010 16:46:04.402394 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69wtj6b" Oct 10 16:46:04 crc kubenswrapper[4799]: I1010 16:46:04.402403 4799 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4e54902e95beda9ab26c2c33aded7191c1bbf60e12cee4151da9ff99e9522da7" Oct 10 16:46:10 crc kubenswrapper[4799]: I1010 16:46:10.059942 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager-operator/cert-manager-operator-controller-manager-57cd46d6d-kb5km"] Oct 10 16:46:10 crc kubenswrapper[4799]: E1010 16:46:10.060552 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="acb519d9-d10f-4149-bd99-88526d2a60c5" containerName="util" Oct 10 16:46:10 crc kubenswrapper[4799]: I1010 16:46:10.060571 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="acb519d9-d10f-4149-bd99-88526d2a60c5" containerName="util" Oct 10 16:46:10 crc kubenswrapper[4799]: E1010 16:46:10.060611 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="acb519d9-d10f-4149-bd99-88526d2a60c5" containerName="pull" Oct 10 16:46:10 crc kubenswrapper[4799]: I1010 16:46:10.060621 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="acb519d9-d10f-4149-bd99-88526d2a60c5" containerName="pull" Oct 10 16:46:10 crc kubenswrapper[4799]: E1010 16:46:10.060633 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="acb519d9-d10f-4149-bd99-88526d2a60c5" containerName="extract" Oct 10 16:46:10 crc kubenswrapper[4799]: I1010 16:46:10.060641 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="acb519d9-d10f-4149-bd99-88526d2a60c5" containerName="extract" Oct 10 16:46:10 crc kubenswrapper[4799]: I1010 16:46:10.060794 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="acb519d9-d10f-4149-bd99-88526d2a60c5" containerName="extract" Oct 10 16:46:10 crc kubenswrapper[4799]: I1010 16:46:10.061247 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager-operator/cert-manager-operator-controller-manager-57cd46d6d-kb5km" Oct 10 16:46:10 crc kubenswrapper[4799]: I1010 16:46:10.064442 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager-operator"/"kube-root-ca.crt" Oct 10 16:46:10 crc kubenswrapper[4799]: I1010 16:46:10.064746 4799 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager-operator"/"cert-manager-operator-controller-manager-dockercfg-7m9k8" Oct 10 16:46:10 crc kubenswrapper[4799]: I1010 16:46:10.064775 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager-operator"/"openshift-service-ca.crt" Oct 10 16:46:10 crc kubenswrapper[4799]: I1010 16:46:10.088177 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager-operator/cert-manager-operator-controller-manager-57cd46d6d-kb5km"] Oct 10 16:46:10 crc kubenswrapper[4799]: I1010 16:46:10.188678 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wqbpw\" (UniqueName: \"kubernetes.io/projected/4059dd2a-549c-4bbc-8f54-bdc82e662e4d-kube-api-access-wqbpw\") pod \"cert-manager-operator-controller-manager-57cd46d6d-kb5km\" (UID: \"4059dd2a-549c-4bbc-8f54-bdc82e662e4d\") " pod="cert-manager-operator/cert-manager-operator-controller-manager-57cd46d6d-kb5km" Oct 10 16:46:10 crc kubenswrapper[4799]: I1010 16:46:10.290452 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wqbpw\" (UniqueName: \"kubernetes.io/projected/4059dd2a-549c-4bbc-8f54-bdc82e662e4d-kube-api-access-wqbpw\") pod \"cert-manager-operator-controller-manager-57cd46d6d-kb5km\" (UID: \"4059dd2a-549c-4bbc-8f54-bdc82e662e4d\") " pod="cert-manager-operator/cert-manager-operator-controller-manager-57cd46d6d-kb5km" Oct 10 16:46:10 crc kubenswrapper[4799]: I1010 16:46:10.316921 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wqbpw\" (UniqueName: \"kubernetes.io/projected/4059dd2a-549c-4bbc-8f54-bdc82e662e4d-kube-api-access-wqbpw\") pod \"cert-manager-operator-controller-manager-57cd46d6d-kb5km\" (UID: \"4059dd2a-549c-4bbc-8f54-bdc82e662e4d\") " pod="cert-manager-operator/cert-manager-operator-controller-manager-57cd46d6d-kb5km" Oct 10 16:46:10 crc kubenswrapper[4799]: I1010 16:46:10.383244 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager-operator/cert-manager-operator-controller-manager-57cd46d6d-kb5km" Oct 10 16:46:10 crc kubenswrapper[4799]: I1010 16:46:10.826146 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager-operator/cert-manager-operator-controller-manager-57cd46d6d-kb5km"] Oct 10 16:46:10 crc kubenswrapper[4799]: W1010 16:46:10.830990 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod4059dd2a_549c_4bbc_8f54_bdc82e662e4d.slice/crio-b7dad754aa7730f0f8b8144f2f47df7d013de73a81ddc94c0d08fca1b1dfdab4 WatchSource:0}: Error finding container b7dad754aa7730f0f8b8144f2f47df7d013de73a81ddc94c0d08fca1b1dfdab4: Status 404 returned error can't find the container with id b7dad754aa7730f0f8b8144f2f47df7d013de73a81ddc94c0d08fca1b1dfdab4 Oct 10 16:46:11 crc kubenswrapper[4799]: I1010 16:46:11.445332 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager-operator/cert-manager-operator-controller-manager-57cd46d6d-kb5km" event={"ID":"4059dd2a-549c-4bbc-8f54-bdc82e662e4d","Type":"ContainerStarted","Data":"b7dad754aa7730f0f8b8144f2f47df7d013de73a81ddc94c0d08fca1b1dfdab4"} Oct 10 16:46:18 crc kubenswrapper[4799]: I1010 16:46:18.488719 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager-operator/cert-manager-operator-controller-manager-57cd46d6d-kb5km" event={"ID":"4059dd2a-549c-4bbc-8f54-bdc82e662e4d","Type":"ContainerStarted","Data":"4ada44a6f040486c0e9a1296ed9a6b414117aab0bb1ad06c4368eb4301fceccc"} Oct 10 16:46:18 crc kubenswrapper[4799]: I1010 16:46:18.532899 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager-operator/cert-manager-operator-controller-manager-57cd46d6d-kb5km" podStartSLOduration=1.678650921 podStartE2EDuration="8.532786807s" podCreationTimestamp="2025-10-10 16:46:10 +0000 UTC" firstStartedPulling="2025-10-10 16:46:10.834168639 +0000 UTC m=+864.342492754" lastFinishedPulling="2025-10-10 16:46:17.688304515 +0000 UTC m=+871.196628640" observedRunningTime="2025-10-10 16:46:18.512582559 +0000 UTC m=+872.020906704" watchObservedRunningTime="2025-10-10 16:46:18.532786807 +0000 UTC m=+872.041110922" Oct 10 16:46:21 crc kubenswrapper[4799]: I1010 16:46:21.876984 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-webhook-d969966f-klvh7"] Oct 10 16:46:21 crc kubenswrapper[4799]: I1010 16:46:21.878189 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-webhook-d969966f-klvh7" Oct 10 16:46:21 crc kubenswrapper[4799]: I1010 16:46:21.879870 4799 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-webhook-dockercfg-wlscf" Oct 10 16:46:21 crc kubenswrapper[4799]: I1010 16:46:21.880695 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager"/"openshift-service-ca.crt" Oct 10 16:46:21 crc kubenswrapper[4799]: I1010 16:46:21.885183 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager"/"kube-root-ca.crt" Oct 10 16:46:21 crc kubenswrapper[4799]: I1010 16:46:21.895278 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-webhook-d969966f-klvh7"] Oct 10 16:46:21 crc kubenswrapper[4799]: I1010 16:46:21.956091 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/9125ea98-355f-4ec6-ac42-ae70f1dedd24-bound-sa-token\") pod \"cert-manager-webhook-d969966f-klvh7\" (UID: \"9125ea98-355f-4ec6-ac42-ae70f1dedd24\") " pod="cert-manager/cert-manager-webhook-d969966f-klvh7" Oct 10 16:46:21 crc kubenswrapper[4799]: I1010 16:46:21.956170 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kjzpk\" (UniqueName: \"kubernetes.io/projected/9125ea98-355f-4ec6-ac42-ae70f1dedd24-kube-api-access-kjzpk\") pod \"cert-manager-webhook-d969966f-klvh7\" (UID: \"9125ea98-355f-4ec6-ac42-ae70f1dedd24\") " pod="cert-manager/cert-manager-webhook-d969966f-klvh7" Oct 10 16:46:22 crc kubenswrapper[4799]: I1010 16:46:22.057851 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/9125ea98-355f-4ec6-ac42-ae70f1dedd24-bound-sa-token\") pod \"cert-manager-webhook-d969966f-klvh7\" (UID: \"9125ea98-355f-4ec6-ac42-ae70f1dedd24\") " pod="cert-manager/cert-manager-webhook-d969966f-klvh7" Oct 10 16:46:22 crc kubenswrapper[4799]: I1010 16:46:22.057914 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kjzpk\" (UniqueName: \"kubernetes.io/projected/9125ea98-355f-4ec6-ac42-ae70f1dedd24-kube-api-access-kjzpk\") pod \"cert-manager-webhook-d969966f-klvh7\" (UID: \"9125ea98-355f-4ec6-ac42-ae70f1dedd24\") " pod="cert-manager/cert-manager-webhook-d969966f-klvh7" Oct 10 16:46:22 crc kubenswrapper[4799]: I1010 16:46:22.080218 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/9125ea98-355f-4ec6-ac42-ae70f1dedd24-bound-sa-token\") pod \"cert-manager-webhook-d969966f-klvh7\" (UID: \"9125ea98-355f-4ec6-ac42-ae70f1dedd24\") " pod="cert-manager/cert-manager-webhook-d969966f-klvh7" Oct 10 16:46:22 crc kubenswrapper[4799]: I1010 16:46:22.087069 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kjzpk\" (UniqueName: \"kubernetes.io/projected/9125ea98-355f-4ec6-ac42-ae70f1dedd24-kube-api-access-kjzpk\") pod \"cert-manager-webhook-d969966f-klvh7\" (UID: \"9125ea98-355f-4ec6-ac42-ae70f1dedd24\") " pod="cert-manager/cert-manager-webhook-d969966f-klvh7" Oct 10 16:46:22 crc kubenswrapper[4799]: I1010 16:46:22.195600 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-webhook-d969966f-klvh7" Oct 10 16:46:22 crc kubenswrapper[4799]: I1010 16:46:22.461158 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-webhook-d969966f-klvh7"] Oct 10 16:46:22 crc kubenswrapper[4799]: I1010 16:46:22.509240 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-webhook-d969966f-klvh7" event={"ID":"9125ea98-355f-4ec6-ac42-ae70f1dedd24","Type":"ContainerStarted","Data":"8c99a452a0e539bb5dd0ce3c522b71fc15ccf89f0656db1cb0c6eb6db40891bf"} Oct 10 16:46:25 crc kubenswrapper[4799]: I1010 16:46:25.240173 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-cainjector-7d9f95dbf-4gd47"] Oct 10 16:46:25 crc kubenswrapper[4799]: I1010 16:46:25.242503 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-cainjector-7d9f95dbf-4gd47" Oct 10 16:46:25 crc kubenswrapper[4799]: I1010 16:46:25.244182 4799 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-cainjector-dockercfg-tjx9q" Oct 10 16:46:25 crc kubenswrapper[4799]: I1010 16:46:25.244355 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-cainjector-7d9f95dbf-4gd47"] Oct 10 16:46:25 crc kubenswrapper[4799]: I1010 16:46:25.299194 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/7cd45f74-0b01-4d01-8d27-35cd165fff89-bound-sa-token\") pod \"cert-manager-cainjector-7d9f95dbf-4gd47\" (UID: \"7cd45f74-0b01-4d01-8d27-35cd165fff89\") " pod="cert-manager/cert-manager-cainjector-7d9f95dbf-4gd47" Oct 10 16:46:25 crc kubenswrapper[4799]: I1010 16:46:25.299387 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8g9fq\" (UniqueName: \"kubernetes.io/projected/7cd45f74-0b01-4d01-8d27-35cd165fff89-kube-api-access-8g9fq\") pod \"cert-manager-cainjector-7d9f95dbf-4gd47\" (UID: \"7cd45f74-0b01-4d01-8d27-35cd165fff89\") " pod="cert-manager/cert-manager-cainjector-7d9f95dbf-4gd47" Oct 10 16:46:25 crc kubenswrapper[4799]: I1010 16:46:25.400897 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/7cd45f74-0b01-4d01-8d27-35cd165fff89-bound-sa-token\") pod \"cert-manager-cainjector-7d9f95dbf-4gd47\" (UID: \"7cd45f74-0b01-4d01-8d27-35cd165fff89\") " pod="cert-manager/cert-manager-cainjector-7d9f95dbf-4gd47" Oct 10 16:46:25 crc kubenswrapper[4799]: I1010 16:46:25.401017 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8g9fq\" (UniqueName: \"kubernetes.io/projected/7cd45f74-0b01-4d01-8d27-35cd165fff89-kube-api-access-8g9fq\") pod \"cert-manager-cainjector-7d9f95dbf-4gd47\" (UID: \"7cd45f74-0b01-4d01-8d27-35cd165fff89\") " pod="cert-manager/cert-manager-cainjector-7d9f95dbf-4gd47" Oct 10 16:46:25 crc kubenswrapper[4799]: I1010 16:46:25.424314 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/7cd45f74-0b01-4d01-8d27-35cd165fff89-bound-sa-token\") pod \"cert-manager-cainjector-7d9f95dbf-4gd47\" (UID: \"7cd45f74-0b01-4d01-8d27-35cd165fff89\") " pod="cert-manager/cert-manager-cainjector-7d9f95dbf-4gd47" Oct 10 16:46:25 crc kubenswrapper[4799]: I1010 16:46:25.424462 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8g9fq\" (UniqueName: \"kubernetes.io/projected/7cd45f74-0b01-4d01-8d27-35cd165fff89-kube-api-access-8g9fq\") pod \"cert-manager-cainjector-7d9f95dbf-4gd47\" (UID: \"7cd45f74-0b01-4d01-8d27-35cd165fff89\") " pod="cert-manager/cert-manager-cainjector-7d9f95dbf-4gd47" Oct 10 16:46:25 crc kubenswrapper[4799]: I1010 16:46:25.565983 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-cainjector-7d9f95dbf-4gd47" Oct 10 16:46:26 crc kubenswrapper[4799]: I1010 16:46:26.852900 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-cainjector-7d9f95dbf-4gd47"] Oct 10 16:46:26 crc kubenswrapper[4799]: W1010 16:46:26.854161 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7cd45f74_0b01_4d01_8d27_35cd165fff89.slice/crio-116a3c00f41eb4bc22d2804fa68799aa7a7ce9e8d1737a284088c11e91dbec0e WatchSource:0}: Error finding container 116a3c00f41eb4bc22d2804fa68799aa7a7ce9e8d1737a284088c11e91dbec0e: Status 404 returned error can't find the container with id 116a3c00f41eb4bc22d2804fa68799aa7a7ce9e8d1737a284088c11e91dbec0e Oct 10 16:46:27 crc kubenswrapper[4799]: I1010 16:46:27.542394 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-webhook-d969966f-klvh7" event={"ID":"9125ea98-355f-4ec6-ac42-ae70f1dedd24","Type":"ContainerStarted","Data":"620ca177505e1bd019a5811dcc892a548c1ff05116260c5d5308689a482524a5"} Oct 10 16:46:27 crc kubenswrapper[4799]: I1010 16:46:27.542921 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="cert-manager/cert-manager-webhook-d969966f-klvh7" Oct 10 16:46:27 crc kubenswrapper[4799]: I1010 16:46:27.546556 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-cainjector-7d9f95dbf-4gd47" event={"ID":"7cd45f74-0b01-4d01-8d27-35cd165fff89","Type":"ContainerStarted","Data":"07721aab8bc31f422fb5760acadb172a64bf7a817fc41fe635bfe5b17b7c2560"} Oct 10 16:46:27 crc kubenswrapper[4799]: I1010 16:46:27.546587 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-cainjector-7d9f95dbf-4gd47" event={"ID":"7cd45f74-0b01-4d01-8d27-35cd165fff89","Type":"ContainerStarted","Data":"116a3c00f41eb4bc22d2804fa68799aa7a7ce9e8d1737a284088c11e91dbec0e"} Oct 10 16:46:27 crc kubenswrapper[4799]: I1010 16:46:27.565909 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-webhook-d969966f-klvh7" podStartSLOduration=2.5283408400000003 podStartE2EDuration="6.565888864s" podCreationTimestamp="2025-10-10 16:46:21 +0000 UTC" firstStartedPulling="2025-10-10 16:46:22.476721983 +0000 UTC m=+875.985046108" lastFinishedPulling="2025-10-10 16:46:26.514270017 +0000 UTC m=+880.022594132" observedRunningTime="2025-10-10 16:46:27.558660215 +0000 UTC m=+881.066984350" watchObservedRunningTime="2025-10-10 16:46:27.565888864 +0000 UTC m=+881.074212999" Oct 10 16:46:27 crc kubenswrapper[4799]: I1010 16:46:27.583155 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-cainjector-7d9f95dbf-4gd47" podStartSLOduration=2.583136129 podStartE2EDuration="2.583136129s" podCreationTimestamp="2025-10-10 16:46:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-10 16:46:27.580689088 +0000 UTC m=+881.089013203" watchObservedRunningTime="2025-10-10 16:46:27.583136129 +0000 UTC m=+881.091460244" Oct 10 16:46:32 crc kubenswrapper[4799]: I1010 16:46:32.198442 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="cert-manager/cert-manager-webhook-d969966f-klvh7" Oct 10 16:46:41 crc kubenswrapper[4799]: I1010 16:46:41.424192 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-7d4cc89fcb-cwpfj"] Oct 10 16:46:41 crc kubenswrapper[4799]: I1010 16:46:41.425670 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-7d4cc89fcb-cwpfj" Oct 10 16:46:41 crc kubenswrapper[4799]: I1010 16:46:41.428204 4799 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-dockercfg-dnhg5" Oct 10 16:46:41 crc kubenswrapper[4799]: I1010 16:46:41.438026 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-7d4cc89fcb-cwpfj"] Oct 10 16:46:41 crc kubenswrapper[4799]: I1010 16:46:41.525582 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/74f3da58-30e9-4ee2-8324-e8bc0b8f84c3-bound-sa-token\") pod \"cert-manager-7d4cc89fcb-cwpfj\" (UID: \"74f3da58-30e9-4ee2-8324-e8bc0b8f84c3\") " pod="cert-manager/cert-manager-7d4cc89fcb-cwpfj" Oct 10 16:46:41 crc kubenswrapper[4799]: I1010 16:46:41.525831 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mlwsl\" (UniqueName: \"kubernetes.io/projected/74f3da58-30e9-4ee2-8324-e8bc0b8f84c3-kube-api-access-mlwsl\") pod \"cert-manager-7d4cc89fcb-cwpfj\" (UID: \"74f3da58-30e9-4ee2-8324-e8bc0b8f84c3\") " pod="cert-manager/cert-manager-7d4cc89fcb-cwpfj" Oct 10 16:46:41 crc kubenswrapper[4799]: I1010 16:46:41.627930 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/74f3da58-30e9-4ee2-8324-e8bc0b8f84c3-bound-sa-token\") pod \"cert-manager-7d4cc89fcb-cwpfj\" (UID: \"74f3da58-30e9-4ee2-8324-e8bc0b8f84c3\") " pod="cert-manager/cert-manager-7d4cc89fcb-cwpfj" Oct 10 16:46:41 crc kubenswrapper[4799]: I1010 16:46:41.628141 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mlwsl\" (UniqueName: \"kubernetes.io/projected/74f3da58-30e9-4ee2-8324-e8bc0b8f84c3-kube-api-access-mlwsl\") pod \"cert-manager-7d4cc89fcb-cwpfj\" (UID: \"74f3da58-30e9-4ee2-8324-e8bc0b8f84c3\") " pod="cert-manager/cert-manager-7d4cc89fcb-cwpfj" Oct 10 16:46:41 crc kubenswrapper[4799]: I1010 16:46:41.665664 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/74f3da58-30e9-4ee2-8324-e8bc0b8f84c3-bound-sa-token\") pod \"cert-manager-7d4cc89fcb-cwpfj\" (UID: \"74f3da58-30e9-4ee2-8324-e8bc0b8f84c3\") " pod="cert-manager/cert-manager-7d4cc89fcb-cwpfj" Oct 10 16:46:41 crc kubenswrapper[4799]: I1010 16:46:41.666225 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mlwsl\" (UniqueName: \"kubernetes.io/projected/74f3da58-30e9-4ee2-8324-e8bc0b8f84c3-kube-api-access-mlwsl\") pod \"cert-manager-7d4cc89fcb-cwpfj\" (UID: \"74f3da58-30e9-4ee2-8324-e8bc0b8f84c3\") " pod="cert-manager/cert-manager-7d4cc89fcb-cwpfj" Oct 10 16:46:41 crc kubenswrapper[4799]: I1010 16:46:41.776191 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-7d4cc89fcb-cwpfj" Oct 10 16:46:42 crc kubenswrapper[4799]: I1010 16:46:42.277478 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-7d4cc89fcb-cwpfj"] Oct 10 16:46:42 crc kubenswrapper[4799]: W1010 16:46:42.291971 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod74f3da58_30e9_4ee2_8324_e8bc0b8f84c3.slice/crio-3b1037f7bc7bcd2c8e402226bb0edc47af0e6eb704cb2859c0cbe60117e0001e WatchSource:0}: Error finding container 3b1037f7bc7bcd2c8e402226bb0edc47af0e6eb704cb2859c0cbe60117e0001e: Status 404 returned error can't find the container with id 3b1037f7bc7bcd2c8e402226bb0edc47af0e6eb704cb2859c0cbe60117e0001e Oct 10 16:46:42 crc kubenswrapper[4799]: I1010 16:46:42.654041 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-7d4cc89fcb-cwpfj" event={"ID":"74f3da58-30e9-4ee2-8324-e8bc0b8f84c3","Type":"ContainerStarted","Data":"37d6dc1d5c69830d03d3c77c22d32c9eb845e415900ba840b695f4e062b57607"} Oct 10 16:46:42 crc kubenswrapper[4799]: I1010 16:46:42.654470 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-7d4cc89fcb-cwpfj" event={"ID":"74f3da58-30e9-4ee2-8324-e8bc0b8f84c3","Type":"ContainerStarted","Data":"3b1037f7bc7bcd2c8e402226bb0edc47af0e6eb704cb2859c0cbe60117e0001e"} Oct 10 16:46:42 crc kubenswrapper[4799]: I1010 16:46:42.677313 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-7d4cc89fcb-cwpfj" podStartSLOduration=1.677282924 podStartE2EDuration="1.677282924s" podCreationTimestamp="2025-10-10 16:46:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-10 16:46:42.675961201 +0000 UTC m=+896.184285386" watchObservedRunningTime="2025-10-10 16:46:42.677282924 +0000 UTC m=+896.185607109" Oct 10 16:46:45 crc kubenswrapper[4799]: I1010 16:46:45.248377 4799 patch_prober.go:28] interesting pod/machine-config-daemon-rh8zc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 10 16:46:45 crc kubenswrapper[4799]: I1010 16:46:45.248862 4799 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 10 16:46:46 crc kubenswrapper[4799]: I1010 16:46:46.257055 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-index-g4bfn"] Oct 10 16:46:46 crc kubenswrapper[4799]: I1010 16:46:46.258259 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-g4bfn" Oct 10 16:46:46 crc kubenswrapper[4799]: I1010 16:46:46.260518 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"kube-root-ca.crt" Oct 10 16:46:46 crc kubenswrapper[4799]: I1010 16:46:46.260551 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-index-dockercfg-zhm5r" Oct 10 16:46:46 crc kubenswrapper[4799]: I1010 16:46:46.264303 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"openshift-service-ca.crt" Oct 10 16:46:46 crc kubenswrapper[4799]: I1010 16:46:46.295039 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-g4bfn"] Oct 10 16:46:46 crc kubenswrapper[4799]: I1010 16:46:46.298802 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vvsvh\" (UniqueName: \"kubernetes.io/projected/8b7ffcd9-bba6-432d-ab3a-7d4f62130602-kube-api-access-vvsvh\") pod \"openstack-operator-index-g4bfn\" (UID: \"8b7ffcd9-bba6-432d-ab3a-7d4f62130602\") " pod="openstack-operators/openstack-operator-index-g4bfn" Oct 10 16:46:46 crc kubenswrapper[4799]: I1010 16:46:46.399849 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vvsvh\" (UniqueName: \"kubernetes.io/projected/8b7ffcd9-bba6-432d-ab3a-7d4f62130602-kube-api-access-vvsvh\") pod \"openstack-operator-index-g4bfn\" (UID: \"8b7ffcd9-bba6-432d-ab3a-7d4f62130602\") " pod="openstack-operators/openstack-operator-index-g4bfn" Oct 10 16:46:46 crc kubenswrapper[4799]: I1010 16:46:46.423208 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vvsvh\" (UniqueName: \"kubernetes.io/projected/8b7ffcd9-bba6-432d-ab3a-7d4f62130602-kube-api-access-vvsvh\") pod \"openstack-operator-index-g4bfn\" (UID: \"8b7ffcd9-bba6-432d-ab3a-7d4f62130602\") " pod="openstack-operators/openstack-operator-index-g4bfn" Oct 10 16:46:46 crc kubenswrapper[4799]: I1010 16:46:46.586498 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-g4bfn" Oct 10 16:46:47 crc kubenswrapper[4799]: I1010 16:46:47.055734 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-g4bfn"] Oct 10 16:46:47 crc kubenswrapper[4799]: W1010 16:46:47.060278 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod8b7ffcd9_bba6_432d_ab3a_7d4f62130602.slice/crio-eafa4b9b4d37bf59553049bb24e747a24e42dc4d5850940479a322ad25e863e2 WatchSource:0}: Error finding container eafa4b9b4d37bf59553049bb24e747a24e42dc4d5850940479a322ad25e863e2: Status 404 returned error can't find the container with id eafa4b9b4d37bf59553049bb24e747a24e42dc4d5850940479a322ad25e863e2 Oct 10 16:46:47 crc kubenswrapper[4799]: I1010 16:46:47.696701 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-g4bfn" event={"ID":"8b7ffcd9-bba6-432d-ab3a-7d4f62130602","Type":"ContainerStarted","Data":"eafa4b9b4d37bf59553049bb24e747a24e42dc4d5850940479a322ad25e863e2"} Oct 10 16:46:49 crc kubenswrapper[4799]: I1010 16:46:49.609423 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/openstack-operator-index-g4bfn"] Oct 10 16:46:50 crc kubenswrapper[4799]: I1010 16:46:50.227716 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-index-j2rtf"] Oct 10 16:46:50 crc kubenswrapper[4799]: I1010 16:46:50.229476 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-j2rtf" Oct 10 16:46:50 crc kubenswrapper[4799]: I1010 16:46:50.242449 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-j2rtf"] Oct 10 16:46:50 crc kubenswrapper[4799]: I1010 16:46:50.377148 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7jqkb\" (UniqueName: \"kubernetes.io/projected/79620bb9-c574-4dfb-ac62-62804f00ee08-kube-api-access-7jqkb\") pod \"openstack-operator-index-j2rtf\" (UID: \"79620bb9-c574-4dfb-ac62-62804f00ee08\") " pod="openstack-operators/openstack-operator-index-j2rtf" Oct 10 16:46:50 crc kubenswrapper[4799]: I1010 16:46:50.478841 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7jqkb\" (UniqueName: \"kubernetes.io/projected/79620bb9-c574-4dfb-ac62-62804f00ee08-kube-api-access-7jqkb\") pod \"openstack-operator-index-j2rtf\" (UID: \"79620bb9-c574-4dfb-ac62-62804f00ee08\") " pod="openstack-operators/openstack-operator-index-j2rtf" Oct 10 16:46:50 crc kubenswrapper[4799]: I1010 16:46:50.525624 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7jqkb\" (UniqueName: \"kubernetes.io/projected/79620bb9-c574-4dfb-ac62-62804f00ee08-kube-api-access-7jqkb\") pod \"openstack-operator-index-j2rtf\" (UID: \"79620bb9-c574-4dfb-ac62-62804f00ee08\") " pod="openstack-operators/openstack-operator-index-j2rtf" Oct 10 16:46:50 crc kubenswrapper[4799]: I1010 16:46:50.599263 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-j2rtf" Oct 10 16:46:50 crc kubenswrapper[4799]: I1010 16:46:50.947298 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-j2rtf"] Oct 10 16:46:50 crc kubenswrapper[4799]: W1010 16:46:50.963108 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod79620bb9_c574_4dfb_ac62_62804f00ee08.slice/crio-a0d6339fadbdb18aac39dcef4116a73dba16bfb33df2b7a61a43be1857195396 WatchSource:0}: Error finding container a0d6339fadbdb18aac39dcef4116a73dba16bfb33df2b7a61a43be1857195396: Status 404 returned error can't find the container with id a0d6339fadbdb18aac39dcef4116a73dba16bfb33df2b7a61a43be1857195396 Oct 10 16:46:51 crc kubenswrapper[4799]: I1010 16:46:51.733917 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-g4bfn" event={"ID":"8b7ffcd9-bba6-432d-ab3a-7d4f62130602","Type":"ContainerStarted","Data":"d0be00310ea03a2b2c6cbdf63cd0d69673d7cffd251cd65fd8e914c3746a32c9"} Oct 10 16:46:51 crc kubenswrapper[4799]: I1010 16:46:51.734071 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/openstack-operator-index-g4bfn" podUID="8b7ffcd9-bba6-432d-ab3a-7d4f62130602" containerName="registry-server" containerID="cri-o://d0be00310ea03a2b2c6cbdf63cd0d69673d7cffd251cd65fd8e914c3746a32c9" gracePeriod=2 Oct 10 16:46:51 crc kubenswrapper[4799]: I1010 16:46:51.736672 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-j2rtf" event={"ID":"79620bb9-c574-4dfb-ac62-62804f00ee08","Type":"ContainerStarted","Data":"a0d6339fadbdb18aac39dcef4116a73dba16bfb33df2b7a61a43be1857195396"} Oct 10 16:46:51 crc kubenswrapper[4799]: I1010 16:46:51.762079 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-index-g4bfn" podStartSLOduration=2.072969218 podStartE2EDuration="5.762030363s" podCreationTimestamp="2025-10-10 16:46:46 +0000 UTC" firstStartedPulling="2025-10-10 16:46:47.063727916 +0000 UTC m=+900.572052041" lastFinishedPulling="2025-10-10 16:46:50.752789031 +0000 UTC m=+904.261113186" observedRunningTime="2025-10-10 16:46:51.754403785 +0000 UTC m=+905.262727940" watchObservedRunningTime="2025-10-10 16:46:51.762030363 +0000 UTC m=+905.270354478" Oct 10 16:46:52 crc kubenswrapper[4799]: I1010 16:46:52.132502 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-g4bfn" Oct 10 16:46:52 crc kubenswrapper[4799]: I1010 16:46:52.203487 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vvsvh\" (UniqueName: \"kubernetes.io/projected/8b7ffcd9-bba6-432d-ab3a-7d4f62130602-kube-api-access-vvsvh\") pod \"8b7ffcd9-bba6-432d-ab3a-7d4f62130602\" (UID: \"8b7ffcd9-bba6-432d-ab3a-7d4f62130602\") " Oct 10 16:46:52 crc kubenswrapper[4799]: I1010 16:46:52.211988 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8b7ffcd9-bba6-432d-ab3a-7d4f62130602-kube-api-access-vvsvh" (OuterVolumeSpecName: "kube-api-access-vvsvh") pod "8b7ffcd9-bba6-432d-ab3a-7d4f62130602" (UID: "8b7ffcd9-bba6-432d-ab3a-7d4f62130602"). InnerVolumeSpecName "kube-api-access-vvsvh". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 16:46:52 crc kubenswrapper[4799]: I1010 16:46:52.305547 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vvsvh\" (UniqueName: \"kubernetes.io/projected/8b7ffcd9-bba6-432d-ab3a-7d4f62130602-kube-api-access-vvsvh\") on node \"crc\" DevicePath \"\"" Oct 10 16:46:52 crc kubenswrapper[4799]: I1010 16:46:52.748131 4799 generic.go:334] "Generic (PLEG): container finished" podID="8b7ffcd9-bba6-432d-ab3a-7d4f62130602" containerID="d0be00310ea03a2b2c6cbdf63cd0d69673d7cffd251cd65fd8e914c3746a32c9" exitCode=0 Oct 10 16:46:52 crc kubenswrapper[4799]: I1010 16:46:52.748208 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-g4bfn" Oct 10 16:46:52 crc kubenswrapper[4799]: I1010 16:46:52.748200 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-g4bfn" event={"ID":"8b7ffcd9-bba6-432d-ab3a-7d4f62130602","Type":"ContainerDied","Data":"d0be00310ea03a2b2c6cbdf63cd0d69673d7cffd251cd65fd8e914c3746a32c9"} Oct 10 16:46:52 crc kubenswrapper[4799]: I1010 16:46:52.748433 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-g4bfn" event={"ID":"8b7ffcd9-bba6-432d-ab3a-7d4f62130602","Type":"ContainerDied","Data":"eafa4b9b4d37bf59553049bb24e747a24e42dc4d5850940479a322ad25e863e2"} Oct 10 16:46:52 crc kubenswrapper[4799]: I1010 16:46:52.748471 4799 scope.go:117] "RemoveContainer" containerID="d0be00310ea03a2b2c6cbdf63cd0d69673d7cffd251cd65fd8e914c3746a32c9" Oct 10 16:46:52 crc kubenswrapper[4799]: I1010 16:46:52.751864 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-j2rtf" event={"ID":"79620bb9-c574-4dfb-ac62-62804f00ee08","Type":"ContainerStarted","Data":"81b3aa1620efe04f8f8d3c9272b9f7904c6e5bdf059579298d2cbe69a4f43f06"} Oct 10 16:46:52 crc kubenswrapper[4799]: I1010 16:46:52.778467 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-index-j2rtf" podStartSLOduration=2.080647835 podStartE2EDuration="2.778441642s" podCreationTimestamp="2025-10-10 16:46:50 +0000 UTC" firstStartedPulling="2025-10-10 16:46:50.965952884 +0000 UTC m=+904.474277039" lastFinishedPulling="2025-10-10 16:46:51.663746731 +0000 UTC m=+905.172070846" observedRunningTime="2025-10-10 16:46:52.775148641 +0000 UTC m=+906.283472806" watchObservedRunningTime="2025-10-10 16:46:52.778441642 +0000 UTC m=+906.286765787" Oct 10 16:46:52 crc kubenswrapper[4799]: I1010 16:46:52.787152 4799 scope.go:117] "RemoveContainer" containerID="d0be00310ea03a2b2c6cbdf63cd0d69673d7cffd251cd65fd8e914c3746a32c9" Oct 10 16:46:52 crc kubenswrapper[4799]: E1010 16:46:52.790313 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d0be00310ea03a2b2c6cbdf63cd0d69673d7cffd251cd65fd8e914c3746a32c9\": container with ID starting with d0be00310ea03a2b2c6cbdf63cd0d69673d7cffd251cd65fd8e914c3746a32c9 not found: ID does not exist" containerID="d0be00310ea03a2b2c6cbdf63cd0d69673d7cffd251cd65fd8e914c3746a32c9" Oct 10 16:46:52 crc kubenswrapper[4799]: I1010 16:46:52.790422 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d0be00310ea03a2b2c6cbdf63cd0d69673d7cffd251cd65fd8e914c3746a32c9"} err="failed to get container status \"d0be00310ea03a2b2c6cbdf63cd0d69673d7cffd251cd65fd8e914c3746a32c9\": rpc error: code = NotFound desc = could not find container \"d0be00310ea03a2b2c6cbdf63cd0d69673d7cffd251cd65fd8e914c3746a32c9\": container with ID starting with d0be00310ea03a2b2c6cbdf63cd0d69673d7cffd251cd65fd8e914c3746a32c9 not found: ID does not exist" Oct 10 16:46:52 crc kubenswrapper[4799]: I1010 16:46:52.802375 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/openstack-operator-index-g4bfn"] Oct 10 16:46:52 crc kubenswrapper[4799]: I1010 16:46:52.807300 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/openstack-operator-index-g4bfn"] Oct 10 16:46:53 crc kubenswrapper[4799]: I1010 16:46:53.416325 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8b7ffcd9-bba6-432d-ab3a-7d4f62130602" path="/var/lib/kubelet/pods/8b7ffcd9-bba6-432d-ab3a-7d4f62130602/volumes" Oct 10 16:47:00 crc kubenswrapper[4799]: I1010 16:47:00.599926 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-operators/openstack-operator-index-j2rtf" Oct 10 16:47:00 crc kubenswrapper[4799]: I1010 16:47:00.600661 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-index-j2rtf" Oct 10 16:47:00 crc kubenswrapper[4799]: I1010 16:47:00.639237 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-operators/openstack-operator-index-j2rtf" Oct 10 16:47:00 crc kubenswrapper[4799]: I1010 16:47:00.857870 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-index-j2rtf" Oct 10 16:47:07 crc kubenswrapper[4799]: I1010 16:47:07.269084 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/bbf55ab9b6da9dfde4a224fc1e3f049ee7cb6cab839422fb52a09a365b8q86b"] Oct 10 16:47:07 crc kubenswrapper[4799]: E1010 16:47:07.269870 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8b7ffcd9-bba6-432d-ab3a-7d4f62130602" containerName="registry-server" Oct 10 16:47:07 crc kubenswrapper[4799]: I1010 16:47:07.269895 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="8b7ffcd9-bba6-432d-ab3a-7d4f62130602" containerName="registry-server" Oct 10 16:47:07 crc kubenswrapper[4799]: I1010 16:47:07.270143 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="8b7ffcd9-bba6-432d-ab3a-7d4f62130602" containerName="registry-server" Oct 10 16:47:07 crc kubenswrapper[4799]: I1010 16:47:07.272987 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/bbf55ab9b6da9dfde4a224fc1e3f049ee7cb6cab839422fb52a09a365b8q86b" Oct 10 16:47:07 crc kubenswrapper[4799]: I1010 16:47:07.275819 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"default-dockercfg-pk5j8" Oct 10 16:47:07 crc kubenswrapper[4799]: I1010 16:47:07.329603 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/d8eb7f5e-d8d8-4f95-ad7d-d591d0df2ee5-bundle\") pod \"bbf55ab9b6da9dfde4a224fc1e3f049ee7cb6cab839422fb52a09a365b8q86b\" (UID: \"d8eb7f5e-d8d8-4f95-ad7d-d591d0df2ee5\") " pod="openstack-operators/bbf55ab9b6da9dfde4a224fc1e3f049ee7cb6cab839422fb52a09a365b8q86b" Oct 10 16:47:07 crc kubenswrapper[4799]: I1010 16:47:07.329655 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/d8eb7f5e-d8d8-4f95-ad7d-d591d0df2ee5-util\") pod \"bbf55ab9b6da9dfde4a224fc1e3f049ee7cb6cab839422fb52a09a365b8q86b\" (UID: \"d8eb7f5e-d8d8-4f95-ad7d-d591d0df2ee5\") " pod="openstack-operators/bbf55ab9b6da9dfde4a224fc1e3f049ee7cb6cab839422fb52a09a365b8q86b" Oct 10 16:47:07 crc kubenswrapper[4799]: I1010 16:47:07.330005 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xrpnt\" (UniqueName: \"kubernetes.io/projected/d8eb7f5e-d8d8-4f95-ad7d-d591d0df2ee5-kube-api-access-xrpnt\") pod \"bbf55ab9b6da9dfde4a224fc1e3f049ee7cb6cab839422fb52a09a365b8q86b\" (UID: \"d8eb7f5e-d8d8-4f95-ad7d-d591d0df2ee5\") " pod="openstack-operators/bbf55ab9b6da9dfde4a224fc1e3f049ee7cb6cab839422fb52a09a365b8q86b" Oct 10 16:47:07 crc kubenswrapper[4799]: I1010 16:47:07.330927 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/bbf55ab9b6da9dfde4a224fc1e3f049ee7cb6cab839422fb52a09a365b8q86b"] Oct 10 16:47:07 crc kubenswrapper[4799]: I1010 16:47:07.431267 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/d8eb7f5e-d8d8-4f95-ad7d-d591d0df2ee5-bundle\") pod \"bbf55ab9b6da9dfde4a224fc1e3f049ee7cb6cab839422fb52a09a365b8q86b\" (UID: \"d8eb7f5e-d8d8-4f95-ad7d-d591d0df2ee5\") " pod="openstack-operators/bbf55ab9b6da9dfde4a224fc1e3f049ee7cb6cab839422fb52a09a365b8q86b" Oct 10 16:47:07 crc kubenswrapper[4799]: I1010 16:47:07.431351 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/d8eb7f5e-d8d8-4f95-ad7d-d591d0df2ee5-util\") pod \"bbf55ab9b6da9dfde4a224fc1e3f049ee7cb6cab839422fb52a09a365b8q86b\" (UID: \"d8eb7f5e-d8d8-4f95-ad7d-d591d0df2ee5\") " pod="openstack-operators/bbf55ab9b6da9dfde4a224fc1e3f049ee7cb6cab839422fb52a09a365b8q86b" Oct 10 16:47:07 crc kubenswrapper[4799]: I1010 16:47:07.431473 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xrpnt\" (UniqueName: \"kubernetes.io/projected/d8eb7f5e-d8d8-4f95-ad7d-d591d0df2ee5-kube-api-access-xrpnt\") pod \"bbf55ab9b6da9dfde4a224fc1e3f049ee7cb6cab839422fb52a09a365b8q86b\" (UID: \"d8eb7f5e-d8d8-4f95-ad7d-d591d0df2ee5\") " pod="openstack-operators/bbf55ab9b6da9dfde4a224fc1e3f049ee7cb6cab839422fb52a09a365b8q86b" Oct 10 16:47:07 crc kubenswrapper[4799]: I1010 16:47:07.432821 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/d8eb7f5e-d8d8-4f95-ad7d-d591d0df2ee5-bundle\") pod \"bbf55ab9b6da9dfde4a224fc1e3f049ee7cb6cab839422fb52a09a365b8q86b\" (UID: \"d8eb7f5e-d8d8-4f95-ad7d-d591d0df2ee5\") " pod="openstack-operators/bbf55ab9b6da9dfde4a224fc1e3f049ee7cb6cab839422fb52a09a365b8q86b" Oct 10 16:47:07 crc kubenswrapper[4799]: I1010 16:47:07.433121 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/d8eb7f5e-d8d8-4f95-ad7d-d591d0df2ee5-util\") pod \"bbf55ab9b6da9dfde4a224fc1e3f049ee7cb6cab839422fb52a09a365b8q86b\" (UID: \"d8eb7f5e-d8d8-4f95-ad7d-d591d0df2ee5\") " pod="openstack-operators/bbf55ab9b6da9dfde4a224fc1e3f049ee7cb6cab839422fb52a09a365b8q86b" Oct 10 16:47:07 crc kubenswrapper[4799]: I1010 16:47:07.459510 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xrpnt\" (UniqueName: \"kubernetes.io/projected/d8eb7f5e-d8d8-4f95-ad7d-d591d0df2ee5-kube-api-access-xrpnt\") pod \"bbf55ab9b6da9dfde4a224fc1e3f049ee7cb6cab839422fb52a09a365b8q86b\" (UID: \"d8eb7f5e-d8d8-4f95-ad7d-d591d0df2ee5\") " pod="openstack-operators/bbf55ab9b6da9dfde4a224fc1e3f049ee7cb6cab839422fb52a09a365b8q86b" Oct 10 16:47:07 crc kubenswrapper[4799]: I1010 16:47:07.643230 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/bbf55ab9b6da9dfde4a224fc1e3f049ee7cb6cab839422fb52a09a365b8q86b" Oct 10 16:47:08 crc kubenswrapper[4799]: I1010 16:47:08.036378 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/bbf55ab9b6da9dfde4a224fc1e3f049ee7cb6cab839422fb52a09a365b8q86b"] Oct 10 16:47:08 crc kubenswrapper[4799]: W1010 16:47:08.052883 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd8eb7f5e_d8d8_4f95_ad7d_d591d0df2ee5.slice/crio-9304d3447606f029cc3f91313c2f446566144b10e83b53d49dc4bfb1fd08b11a WatchSource:0}: Error finding container 9304d3447606f029cc3f91313c2f446566144b10e83b53d49dc4bfb1fd08b11a: Status 404 returned error can't find the container with id 9304d3447606f029cc3f91313c2f446566144b10e83b53d49dc4bfb1fd08b11a Oct 10 16:47:08 crc kubenswrapper[4799]: I1010 16:47:08.875044 4799 generic.go:334] "Generic (PLEG): container finished" podID="d8eb7f5e-d8d8-4f95-ad7d-d591d0df2ee5" containerID="e38ed0c9fadf8d6e0c0abf6f1af9eb704fdd3870e42f3f36fa49695b29823b22" exitCode=0 Oct 10 16:47:08 crc kubenswrapper[4799]: I1010 16:47:08.875178 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/bbf55ab9b6da9dfde4a224fc1e3f049ee7cb6cab839422fb52a09a365b8q86b" event={"ID":"d8eb7f5e-d8d8-4f95-ad7d-d591d0df2ee5","Type":"ContainerDied","Data":"e38ed0c9fadf8d6e0c0abf6f1af9eb704fdd3870e42f3f36fa49695b29823b22"} Oct 10 16:47:08 crc kubenswrapper[4799]: I1010 16:47:08.875534 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/bbf55ab9b6da9dfde4a224fc1e3f049ee7cb6cab839422fb52a09a365b8q86b" event={"ID":"d8eb7f5e-d8d8-4f95-ad7d-d591d0df2ee5","Type":"ContainerStarted","Data":"9304d3447606f029cc3f91313c2f446566144b10e83b53d49dc4bfb1fd08b11a"} Oct 10 16:47:09 crc kubenswrapper[4799]: I1010 16:47:09.900180 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/bbf55ab9b6da9dfde4a224fc1e3f049ee7cb6cab839422fb52a09a365b8q86b" event={"ID":"d8eb7f5e-d8d8-4f95-ad7d-d591d0df2ee5","Type":"ContainerStarted","Data":"a07a2bec5eaa357aa5402867f200114428159fcb945331b867f5afaf291cf6d4"} Oct 10 16:47:10 crc kubenswrapper[4799]: I1010 16:47:10.910510 4799 generic.go:334] "Generic (PLEG): container finished" podID="d8eb7f5e-d8d8-4f95-ad7d-d591d0df2ee5" containerID="a07a2bec5eaa357aa5402867f200114428159fcb945331b867f5afaf291cf6d4" exitCode=0 Oct 10 16:47:10 crc kubenswrapper[4799]: I1010 16:47:10.910570 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/bbf55ab9b6da9dfde4a224fc1e3f049ee7cb6cab839422fb52a09a365b8q86b" event={"ID":"d8eb7f5e-d8d8-4f95-ad7d-d591d0df2ee5","Type":"ContainerDied","Data":"a07a2bec5eaa357aa5402867f200114428159fcb945331b867f5afaf291cf6d4"} Oct 10 16:47:11 crc kubenswrapper[4799]: I1010 16:47:11.920177 4799 generic.go:334] "Generic (PLEG): container finished" podID="d8eb7f5e-d8d8-4f95-ad7d-d591d0df2ee5" containerID="c66c0fa25c199680ade6b7af0e15ae0484096f84075601c597ce2d75a7a6f78f" exitCode=0 Oct 10 16:47:11 crc kubenswrapper[4799]: I1010 16:47:11.920269 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/bbf55ab9b6da9dfde4a224fc1e3f049ee7cb6cab839422fb52a09a365b8q86b" event={"ID":"d8eb7f5e-d8d8-4f95-ad7d-d591d0df2ee5","Type":"ContainerDied","Data":"c66c0fa25c199680ade6b7af0e15ae0484096f84075601c597ce2d75a7a6f78f"} Oct 10 16:47:13 crc kubenswrapper[4799]: I1010 16:47:13.204734 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/bbf55ab9b6da9dfde4a224fc1e3f049ee7cb6cab839422fb52a09a365b8q86b" Oct 10 16:47:13 crc kubenswrapper[4799]: I1010 16:47:13.315458 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xrpnt\" (UniqueName: \"kubernetes.io/projected/d8eb7f5e-d8d8-4f95-ad7d-d591d0df2ee5-kube-api-access-xrpnt\") pod \"d8eb7f5e-d8d8-4f95-ad7d-d591d0df2ee5\" (UID: \"d8eb7f5e-d8d8-4f95-ad7d-d591d0df2ee5\") " Oct 10 16:47:13 crc kubenswrapper[4799]: I1010 16:47:13.315556 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/d8eb7f5e-d8d8-4f95-ad7d-d591d0df2ee5-bundle\") pod \"d8eb7f5e-d8d8-4f95-ad7d-d591d0df2ee5\" (UID: \"d8eb7f5e-d8d8-4f95-ad7d-d591d0df2ee5\") " Oct 10 16:47:13 crc kubenswrapper[4799]: I1010 16:47:13.315775 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/d8eb7f5e-d8d8-4f95-ad7d-d591d0df2ee5-util\") pod \"d8eb7f5e-d8d8-4f95-ad7d-d591d0df2ee5\" (UID: \"d8eb7f5e-d8d8-4f95-ad7d-d591d0df2ee5\") " Oct 10 16:47:13 crc kubenswrapper[4799]: I1010 16:47:13.316971 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d8eb7f5e-d8d8-4f95-ad7d-d591d0df2ee5-bundle" (OuterVolumeSpecName: "bundle") pod "d8eb7f5e-d8d8-4f95-ad7d-d591d0df2ee5" (UID: "d8eb7f5e-d8d8-4f95-ad7d-d591d0df2ee5"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 10 16:47:13 crc kubenswrapper[4799]: I1010 16:47:13.321061 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d8eb7f5e-d8d8-4f95-ad7d-d591d0df2ee5-kube-api-access-xrpnt" (OuterVolumeSpecName: "kube-api-access-xrpnt") pod "d8eb7f5e-d8d8-4f95-ad7d-d591d0df2ee5" (UID: "d8eb7f5e-d8d8-4f95-ad7d-d591d0df2ee5"). InnerVolumeSpecName "kube-api-access-xrpnt". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 16:47:13 crc kubenswrapper[4799]: I1010 16:47:13.346366 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d8eb7f5e-d8d8-4f95-ad7d-d591d0df2ee5-util" (OuterVolumeSpecName: "util") pod "d8eb7f5e-d8d8-4f95-ad7d-d591d0df2ee5" (UID: "d8eb7f5e-d8d8-4f95-ad7d-d591d0df2ee5"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 10 16:47:13 crc kubenswrapper[4799]: I1010 16:47:13.417649 4799 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/d8eb7f5e-d8d8-4f95-ad7d-d591d0df2ee5-util\") on node \"crc\" DevicePath \"\"" Oct 10 16:47:13 crc kubenswrapper[4799]: I1010 16:47:13.417683 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xrpnt\" (UniqueName: \"kubernetes.io/projected/d8eb7f5e-d8d8-4f95-ad7d-d591d0df2ee5-kube-api-access-xrpnt\") on node \"crc\" DevicePath \"\"" Oct 10 16:47:13 crc kubenswrapper[4799]: I1010 16:47:13.417695 4799 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/d8eb7f5e-d8d8-4f95-ad7d-d591d0df2ee5-bundle\") on node \"crc\" DevicePath \"\"" Oct 10 16:47:13 crc kubenswrapper[4799]: I1010 16:47:13.935018 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/bbf55ab9b6da9dfde4a224fc1e3f049ee7cb6cab839422fb52a09a365b8q86b" event={"ID":"d8eb7f5e-d8d8-4f95-ad7d-d591d0df2ee5","Type":"ContainerDied","Data":"9304d3447606f029cc3f91313c2f446566144b10e83b53d49dc4bfb1fd08b11a"} Oct 10 16:47:13 crc kubenswrapper[4799]: I1010 16:47:13.935059 4799 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9304d3447606f029cc3f91313c2f446566144b10e83b53d49dc4bfb1fd08b11a" Oct 10 16:47:13 crc kubenswrapper[4799]: I1010 16:47:13.935076 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/bbf55ab9b6da9dfde4a224fc1e3f049ee7cb6cab839422fb52a09a365b8q86b" Oct 10 16:47:15 crc kubenswrapper[4799]: I1010 16:47:15.248631 4799 patch_prober.go:28] interesting pod/machine-config-daemon-rh8zc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 10 16:47:15 crc kubenswrapper[4799]: I1010 16:47:15.249023 4799 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 10 16:47:25 crc kubenswrapper[4799]: I1010 16:47:25.475141 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-controller-operator-688d597459-zvstz"] Oct 10 16:47:25 crc kubenswrapper[4799]: E1010 16:47:25.476123 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d8eb7f5e-d8d8-4f95-ad7d-d591d0df2ee5" containerName="extract" Oct 10 16:47:25 crc kubenswrapper[4799]: I1010 16:47:25.476255 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="d8eb7f5e-d8d8-4f95-ad7d-d591d0df2ee5" containerName="extract" Oct 10 16:47:25 crc kubenswrapper[4799]: E1010 16:47:25.476276 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d8eb7f5e-d8d8-4f95-ad7d-d591d0df2ee5" containerName="pull" Oct 10 16:47:25 crc kubenswrapper[4799]: I1010 16:47:25.476287 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="d8eb7f5e-d8d8-4f95-ad7d-d591d0df2ee5" containerName="pull" Oct 10 16:47:25 crc kubenswrapper[4799]: E1010 16:47:25.476302 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d8eb7f5e-d8d8-4f95-ad7d-d591d0df2ee5" containerName="util" Oct 10 16:47:25 crc kubenswrapper[4799]: I1010 16:47:25.476314 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="d8eb7f5e-d8d8-4f95-ad7d-d591d0df2ee5" containerName="util" Oct 10 16:47:25 crc kubenswrapper[4799]: I1010 16:47:25.476512 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="d8eb7f5e-d8d8-4f95-ad7d-d591d0df2ee5" containerName="extract" Oct 10 16:47:25 crc kubenswrapper[4799]: I1010 16:47:25.477570 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-operator-688d597459-zvstz" Oct 10 16:47:25 crc kubenswrapper[4799]: I1010 16:47:25.479832 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-controller-operator-dockercfg-vkwpf" Oct 10 16:47:25 crc kubenswrapper[4799]: I1010 16:47:25.545798 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-operator-688d597459-zvstz"] Oct 10 16:47:25 crc kubenswrapper[4799]: I1010 16:47:25.586518 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tg7nh\" (UniqueName: \"kubernetes.io/projected/d917bfc2-cb7d-4628-9a34-3747e15e6fbf-kube-api-access-tg7nh\") pod \"openstack-operator-controller-operator-688d597459-zvstz\" (UID: \"d917bfc2-cb7d-4628-9a34-3747e15e6fbf\") " pod="openstack-operators/openstack-operator-controller-operator-688d597459-zvstz" Oct 10 16:47:25 crc kubenswrapper[4799]: I1010 16:47:25.688150 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tg7nh\" (UniqueName: \"kubernetes.io/projected/d917bfc2-cb7d-4628-9a34-3747e15e6fbf-kube-api-access-tg7nh\") pod \"openstack-operator-controller-operator-688d597459-zvstz\" (UID: \"d917bfc2-cb7d-4628-9a34-3747e15e6fbf\") " pod="openstack-operators/openstack-operator-controller-operator-688d597459-zvstz" Oct 10 16:47:25 crc kubenswrapper[4799]: I1010 16:47:25.708735 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tg7nh\" (UniqueName: \"kubernetes.io/projected/d917bfc2-cb7d-4628-9a34-3747e15e6fbf-kube-api-access-tg7nh\") pod \"openstack-operator-controller-operator-688d597459-zvstz\" (UID: \"d917bfc2-cb7d-4628-9a34-3747e15e6fbf\") " pod="openstack-operators/openstack-operator-controller-operator-688d597459-zvstz" Oct 10 16:47:25 crc kubenswrapper[4799]: I1010 16:47:25.798070 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-operator-688d597459-zvstz" Oct 10 16:47:26 crc kubenswrapper[4799]: I1010 16:47:26.022001 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-operator-688d597459-zvstz"] Oct 10 16:47:27 crc kubenswrapper[4799]: I1010 16:47:27.036128 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-operator-688d597459-zvstz" event={"ID":"d917bfc2-cb7d-4628-9a34-3747e15e6fbf","Type":"ContainerStarted","Data":"8955028aef84fdeaeffb7514ce0b84536609b06f3888a30c408f491ee97706c2"} Oct 10 16:47:31 crc kubenswrapper[4799]: I1010 16:47:31.067135 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-operator-688d597459-zvstz" event={"ID":"d917bfc2-cb7d-4628-9a34-3747e15e6fbf","Type":"ContainerStarted","Data":"b50f5f079481523a8cad9e94f17ef9ed1a3e5b83303f054f0ac1bf5e54071990"} Oct 10 16:47:34 crc kubenswrapper[4799]: I1010 16:47:34.090027 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-operator-688d597459-zvstz" event={"ID":"d917bfc2-cb7d-4628-9a34-3747e15e6fbf","Type":"ContainerStarted","Data":"bd4382237b0004bbb79e9d323ea50e3aa94e79620673daccafad1dc62c2b6f4c"} Oct 10 16:47:34 crc kubenswrapper[4799]: I1010 16:47:34.091076 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-controller-operator-688d597459-zvstz" Oct 10 16:47:34 crc kubenswrapper[4799]: I1010 16:47:34.137807 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-controller-operator-688d597459-zvstz" podStartSLOduration=1.649772509 podStartE2EDuration="9.137780767s" podCreationTimestamp="2025-10-10 16:47:25 +0000 UTC" firstStartedPulling="2025-10-10 16:47:26.031129732 +0000 UTC m=+939.539453847" lastFinishedPulling="2025-10-10 16:47:33.51913794 +0000 UTC m=+947.027462105" observedRunningTime="2025-10-10 16:47:34.131304467 +0000 UTC m=+947.639628622" watchObservedRunningTime="2025-10-10 16:47:34.137780767 +0000 UTC m=+947.646104912" Oct 10 16:47:35 crc kubenswrapper[4799]: I1010 16:47:35.102250 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-controller-operator-688d597459-zvstz" Oct 10 16:47:45 crc kubenswrapper[4799]: I1010 16:47:45.249202 4799 patch_prober.go:28] interesting pod/machine-config-daemon-rh8zc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 10 16:47:45 crc kubenswrapper[4799]: I1010 16:47:45.249869 4799 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 10 16:47:45 crc kubenswrapper[4799]: I1010 16:47:45.249934 4799 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" Oct 10 16:47:45 crc kubenswrapper[4799]: I1010 16:47:45.250766 4799 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"5d0c20be696163127fb1361e7edc7eadb541b7fccbd83cd240ae6b5f02af5dd5"} pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 10 16:47:45 crc kubenswrapper[4799]: I1010 16:47:45.250877 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" containerName="machine-config-daemon" containerID="cri-o://5d0c20be696163127fb1361e7edc7eadb541b7fccbd83cd240ae6b5f02af5dd5" gracePeriod=600 Oct 10 16:47:46 crc kubenswrapper[4799]: I1010 16:47:46.179373 4799 generic.go:334] "Generic (PLEG): container finished" podID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" containerID="5d0c20be696163127fb1361e7edc7eadb541b7fccbd83cd240ae6b5f02af5dd5" exitCode=0 Oct 10 16:47:46 crc kubenswrapper[4799]: I1010 16:47:46.179508 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" event={"ID":"6cebefda-e31d-4be2-9bf4-8e1f8ec002cb","Type":"ContainerDied","Data":"5d0c20be696163127fb1361e7edc7eadb541b7fccbd83cd240ae6b5f02af5dd5"} Oct 10 16:47:46 crc kubenswrapper[4799]: I1010 16:47:46.180187 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" event={"ID":"6cebefda-e31d-4be2-9bf4-8e1f8ec002cb","Type":"ContainerStarted","Data":"7660328ebc7154335d94320ea1d630296da5d0b7a601ee21c41b533b20ba0a49"} Oct 10 16:47:46 crc kubenswrapper[4799]: I1010 16:47:46.180263 4799 scope.go:117] "RemoveContainer" containerID="ff017f427623f1c99da82aa1f76b3d32ffeae8d4ca8e7ce1e98dc285ba08fb9c" Oct 10 16:48:05 crc kubenswrapper[4799]: I1010 16:48:05.388987 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/barbican-operator-controller-manager-658bdf4b74-stptz"] Oct 10 16:48:05 crc kubenswrapper[4799]: I1010 16:48:05.391116 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/barbican-operator-controller-manager-658bdf4b74-stptz" Oct 10 16:48:05 crc kubenswrapper[4799]: I1010 16:48:05.392364 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/cinder-operator-controller-manager-7b7fb68549-bl9j6"] Oct 10 16:48:05 crc kubenswrapper[4799]: I1010 16:48:05.392787 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"barbican-operator-controller-manager-dockercfg-s5sd8" Oct 10 16:48:05 crc kubenswrapper[4799]: I1010 16:48:05.393590 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/cinder-operator-controller-manager-7b7fb68549-bl9j6" Oct 10 16:48:05 crc kubenswrapper[4799]: I1010 16:48:05.395117 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"cinder-operator-controller-manager-dockercfg-slqdt" Oct 10 16:48:05 crc kubenswrapper[4799]: I1010 16:48:05.421594 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/cinder-operator-controller-manager-7b7fb68549-bl9j6"] Oct 10 16:48:05 crc kubenswrapper[4799]: I1010 16:48:05.421650 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/barbican-operator-controller-manager-658bdf4b74-stptz"] Oct 10 16:48:05 crc kubenswrapper[4799]: I1010 16:48:05.432080 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/designate-operator-controller-manager-85d5d9dd78-w2d2v"] Oct 10 16:48:05 crc kubenswrapper[4799]: I1010 16:48:05.433088 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/designate-operator-controller-manager-85d5d9dd78-w2d2v" Oct 10 16:48:05 crc kubenswrapper[4799]: I1010 16:48:05.440445 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"designate-operator-controller-manager-dockercfg-8cp5w" Oct 10 16:48:05 crc kubenswrapper[4799]: I1010 16:48:05.445353 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/designate-operator-controller-manager-85d5d9dd78-w2d2v"] Oct 10 16:48:05 crc kubenswrapper[4799]: I1010 16:48:05.453658 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/glance-operator-controller-manager-84b9b84486-dvk5w"] Oct 10 16:48:05 crc kubenswrapper[4799]: I1010 16:48:05.455028 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/glance-operator-controller-manager-84b9b84486-dvk5w" Oct 10 16:48:05 crc kubenswrapper[4799]: I1010 16:48:05.461190 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"glance-operator-controller-manager-dockercfg-55js7" Oct 10 16:48:05 crc kubenswrapper[4799]: I1010 16:48:05.473618 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/heat-operator-controller-manager-858f76bbdd-tzx89"] Oct 10 16:48:05 crc kubenswrapper[4799]: I1010 16:48:05.474652 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/heat-operator-controller-manager-858f76bbdd-tzx89" Oct 10 16:48:05 crc kubenswrapper[4799]: I1010 16:48:05.480811 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/glance-operator-controller-manager-84b9b84486-dvk5w"] Oct 10 16:48:05 crc kubenswrapper[4799]: I1010 16:48:05.483244 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"heat-operator-controller-manager-dockercfg-j89hl" Oct 10 16:48:05 crc kubenswrapper[4799]: I1010 16:48:05.501663 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/horizon-operator-controller-manager-7ffbcb7588-kv2gv"] Oct 10 16:48:05 crc kubenswrapper[4799]: I1010 16:48:05.502604 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/horizon-operator-controller-manager-7ffbcb7588-kv2gv" Oct 10 16:48:05 crc kubenswrapper[4799]: I1010 16:48:05.505331 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"horizon-operator-controller-manager-dockercfg-q6fk8" Oct 10 16:48:05 crc kubenswrapper[4799]: I1010 16:48:05.507868 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/heat-operator-controller-manager-858f76bbdd-tzx89"] Oct 10 16:48:05 crc kubenswrapper[4799]: I1010 16:48:05.510078 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/infra-operator-controller-manager-656bcbd775-nh542"] Oct 10 16:48:05 crc kubenswrapper[4799]: I1010 16:48:05.510966 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-656bcbd775-nh542" Oct 10 16:48:05 crc kubenswrapper[4799]: I1010 16:48:05.512739 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-controller-manager-dockercfg-f47h9" Oct 10 16:48:05 crc kubenswrapper[4799]: I1010 16:48:05.513430 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-webhook-server-cert" Oct 10 16:48:05 crc kubenswrapper[4799]: I1010 16:48:05.513697 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/horizon-operator-controller-manager-7ffbcb7588-kv2gv"] Oct 10 16:48:05 crc kubenswrapper[4799]: I1010 16:48:05.526644 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/ironic-operator-controller-manager-9c5c78d49-vjjnj"] Oct 10 16:48:05 crc kubenswrapper[4799]: I1010 16:48:05.527905 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ironic-operator-controller-manager-9c5c78d49-vjjnj" Oct 10 16:48:05 crc kubenswrapper[4799]: I1010 16:48:05.527993 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ironic-operator-controller-manager-9c5c78d49-vjjnj"] Oct 10 16:48:05 crc kubenswrapper[4799]: I1010 16:48:05.532162 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"ironic-operator-controller-manager-dockercfg-fjdrb" Oct 10 16:48:05 crc kubenswrapper[4799]: I1010 16:48:05.539144 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-controller-manager-656bcbd775-nh542"] Oct 10 16:48:05 crc kubenswrapper[4799]: I1010 16:48:05.543302 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/keystone-operator-controller-manager-55b6b7c7b8-ll94t"] Oct 10 16:48:05 crc kubenswrapper[4799]: I1010 16:48:05.544398 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-55b6b7c7b8-ll94t" Oct 10 16:48:05 crc kubenswrapper[4799]: I1010 16:48:05.546565 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gwlnb\" (UniqueName: \"kubernetes.io/projected/9e711c48-2d32-4933-b13f-a0f9fec33e0d-kube-api-access-gwlnb\") pod \"barbican-operator-controller-manager-658bdf4b74-stptz\" (UID: \"9e711c48-2d32-4933-b13f-a0f9fec33e0d\") " pod="openstack-operators/barbican-operator-controller-manager-658bdf4b74-stptz" Oct 10 16:48:05 crc kubenswrapper[4799]: I1010 16:48:05.546612 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4b4mn\" (UniqueName: \"kubernetes.io/projected/98cf31e5-618a-4363-8a3d-1b0d0bc75b48-kube-api-access-4b4mn\") pod \"cinder-operator-controller-manager-7b7fb68549-bl9j6\" (UID: \"98cf31e5-618a-4363-8a3d-1b0d0bc75b48\") " pod="openstack-operators/cinder-operator-controller-manager-7b7fb68549-bl9j6" Oct 10 16:48:05 crc kubenswrapper[4799]: I1010 16:48:05.546641 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s2df5\" (UniqueName: \"kubernetes.io/projected/324da982-9067-490a-98a5-9ad58296841a-kube-api-access-s2df5\") pod \"designate-operator-controller-manager-85d5d9dd78-w2d2v\" (UID: \"324da982-9067-490a-98a5-9ad58296841a\") " pod="openstack-operators/designate-operator-controller-manager-85d5d9dd78-w2d2v" Oct 10 16:48:05 crc kubenswrapper[4799]: I1010 16:48:05.547816 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"keystone-operator-controller-manager-dockercfg-tv5zc" Oct 10 16:48:05 crc kubenswrapper[4799]: I1010 16:48:05.554513 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/manila-operator-controller-manager-5f67fbc655-2qw6c"] Oct 10 16:48:05 crc kubenswrapper[4799]: I1010 16:48:05.555590 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/manila-operator-controller-manager-5f67fbc655-2qw6c" Oct 10 16:48:05 crc kubenswrapper[4799]: I1010 16:48:05.558010 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"manila-operator-controller-manager-dockercfg-67f8c" Oct 10 16:48:05 crc kubenswrapper[4799]: I1010 16:48:05.591944 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-55b6b7c7b8-ll94t"] Oct 10 16:48:05 crc kubenswrapper[4799]: I1010 16:48:05.604977 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/manila-operator-controller-manager-5f67fbc655-2qw6c"] Oct 10 16:48:05 crc kubenswrapper[4799]: I1010 16:48:05.615429 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-f9fb45f8f-22wxr"] Oct 10 16:48:05 crc kubenswrapper[4799]: I1010 16:48:05.616696 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-f9fb45f8f-22wxr" Oct 10 16:48:05 crc kubenswrapper[4799]: I1010 16:48:05.627138 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"mariadb-operator-controller-manager-dockercfg-9lbn7" Oct 10 16:48:05 crc kubenswrapper[4799]: I1010 16:48:05.650330 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xr2dt\" (UniqueName: \"kubernetes.io/projected/37273794-7563-423e-a2d5-86c9e9f957cb-kube-api-access-xr2dt\") pod \"heat-operator-controller-manager-858f76bbdd-tzx89\" (UID: \"37273794-7563-423e-a2d5-86c9e9f957cb\") " pod="openstack-operators/heat-operator-controller-manager-858f76bbdd-tzx89" Oct 10 16:48:05 crc kubenswrapper[4799]: I1010 16:48:05.650378 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/62fda0e4-55d3-481f-8da4-66e8f4dd39d4-cert\") pod \"infra-operator-controller-manager-656bcbd775-nh542\" (UID: \"62fda0e4-55d3-481f-8da4-66e8f4dd39d4\") " pod="openstack-operators/infra-operator-controller-manager-656bcbd775-nh542" Oct 10 16:48:05 crc kubenswrapper[4799]: I1010 16:48:05.650414 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gwlnb\" (UniqueName: \"kubernetes.io/projected/9e711c48-2d32-4933-b13f-a0f9fec33e0d-kube-api-access-gwlnb\") pod \"barbican-operator-controller-manager-658bdf4b74-stptz\" (UID: \"9e711c48-2d32-4933-b13f-a0f9fec33e0d\") " pod="openstack-operators/barbican-operator-controller-manager-658bdf4b74-stptz" Oct 10 16:48:05 crc kubenswrapper[4799]: I1010 16:48:05.650440 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jgf5v\" (UniqueName: \"kubernetes.io/projected/b0caa8f4-5c59-402a-9025-a2ba80d70577-kube-api-access-jgf5v\") pod \"horizon-operator-controller-manager-7ffbcb7588-kv2gv\" (UID: \"b0caa8f4-5c59-402a-9025-a2ba80d70577\") " pod="openstack-operators/horizon-operator-controller-manager-7ffbcb7588-kv2gv" Oct 10 16:48:05 crc kubenswrapper[4799]: I1010 16:48:05.650471 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4b4mn\" (UniqueName: \"kubernetes.io/projected/98cf31e5-618a-4363-8a3d-1b0d0bc75b48-kube-api-access-4b4mn\") pod \"cinder-operator-controller-manager-7b7fb68549-bl9j6\" (UID: \"98cf31e5-618a-4363-8a3d-1b0d0bc75b48\") " pod="openstack-operators/cinder-operator-controller-manager-7b7fb68549-bl9j6" Oct 10 16:48:05 crc kubenswrapper[4799]: I1010 16:48:05.650488 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bkgzw\" (UniqueName: \"kubernetes.io/projected/a61aa86a-a90e-439b-85e9-15b7a1466785-kube-api-access-bkgzw\") pod \"glance-operator-controller-manager-84b9b84486-dvk5w\" (UID: \"a61aa86a-a90e-439b-85e9-15b7a1466785\") " pod="openstack-operators/glance-operator-controller-manager-84b9b84486-dvk5w" Oct 10 16:48:05 crc kubenswrapper[4799]: I1010 16:48:05.650509 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2df5\" (UniqueName: \"kubernetes.io/projected/324da982-9067-490a-98a5-9ad58296841a-kube-api-access-s2df5\") pod \"designate-operator-controller-manager-85d5d9dd78-w2d2v\" (UID: \"324da982-9067-490a-98a5-9ad58296841a\") " pod="openstack-operators/designate-operator-controller-manager-85d5d9dd78-w2d2v" Oct 10 16:48:05 crc kubenswrapper[4799]: I1010 16:48:05.650527 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vtgbl\" (UniqueName: \"kubernetes.io/projected/62fda0e4-55d3-481f-8da4-66e8f4dd39d4-kube-api-access-vtgbl\") pod \"infra-operator-controller-manager-656bcbd775-nh542\" (UID: \"62fda0e4-55d3-481f-8da4-66e8f4dd39d4\") " pod="openstack-operators/infra-operator-controller-manager-656bcbd775-nh542" Oct 10 16:48:05 crc kubenswrapper[4799]: I1010 16:48:05.650570 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xrvq9\" (UniqueName: \"kubernetes.io/projected/490a3592-9d71-4182-8b1b-6f8c55a01bde-kube-api-access-xrvq9\") pod \"keystone-operator-controller-manager-55b6b7c7b8-ll94t\" (UID: \"490a3592-9d71-4182-8b1b-6f8c55a01bde\") " pod="openstack-operators/keystone-operator-controller-manager-55b6b7c7b8-ll94t" Oct 10 16:48:05 crc kubenswrapper[4799]: I1010 16:48:05.650590 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-znph9\" (UniqueName: \"kubernetes.io/projected/c89f9acf-7ee6-4600-9331-635eb7fce931-kube-api-access-znph9\") pod \"ironic-operator-controller-manager-9c5c78d49-vjjnj\" (UID: \"c89f9acf-7ee6-4600-9331-635eb7fce931\") " pod="openstack-operators/ironic-operator-controller-manager-9c5c78d49-vjjnj" Oct 10 16:48:05 crc kubenswrapper[4799]: I1010 16:48:05.655275 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/neutron-operator-controller-manager-79d585cb66-45997"] Oct 10 16:48:05 crc kubenswrapper[4799]: I1010 16:48:05.656322 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/neutron-operator-controller-manager-79d585cb66-45997" Oct 10 16:48:05 crc kubenswrapper[4799]: I1010 16:48:05.661304 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"neutron-operator-controller-manager-dockercfg-4m2kb" Oct 10 16:48:05 crc kubenswrapper[4799]: I1010 16:48:05.688624 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2df5\" (UniqueName: \"kubernetes.io/projected/324da982-9067-490a-98a5-9ad58296841a-kube-api-access-s2df5\") pod \"designate-operator-controller-manager-85d5d9dd78-w2d2v\" (UID: \"324da982-9067-490a-98a5-9ad58296841a\") " pod="openstack-operators/designate-operator-controller-manager-85d5d9dd78-w2d2v" Oct 10 16:48:05 crc kubenswrapper[4799]: I1010 16:48:05.695422 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-f9fb45f8f-22wxr"] Oct 10 16:48:05 crc kubenswrapper[4799]: I1010 16:48:05.703394 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4b4mn\" (UniqueName: \"kubernetes.io/projected/98cf31e5-618a-4363-8a3d-1b0d0bc75b48-kube-api-access-4b4mn\") pod \"cinder-operator-controller-manager-7b7fb68549-bl9j6\" (UID: \"98cf31e5-618a-4363-8a3d-1b0d0bc75b48\") " pod="openstack-operators/cinder-operator-controller-manager-7b7fb68549-bl9j6" Oct 10 16:48:05 crc kubenswrapper[4799]: I1010 16:48:05.709918 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gwlnb\" (UniqueName: \"kubernetes.io/projected/9e711c48-2d32-4933-b13f-a0f9fec33e0d-kube-api-access-gwlnb\") pod \"barbican-operator-controller-manager-658bdf4b74-stptz\" (UID: \"9e711c48-2d32-4933-b13f-a0f9fec33e0d\") " pod="openstack-operators/barbican-operator-controller-manager-658bdf4b74-stptz" Oct 10 16:48:05 crc kubenswrapper[4799]: I1010 16:48:05.721124 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/barbican-operator-controller-manager-658bdf4b74-stptz" Oct 10 16:48:05 crc kubenswrapper[4799]: I1010 16:48:05.735341 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/cinder-operator-controller-manager-7b7fb68549-bl9j6" Oct 10 16:48:05 crc kubenswrapper[4799]: I1010 16:48:05.739819 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/nova-operator-controller-manager-5df598886f-rcld5"] Oct 10 16:48:05 crc kubenswrapper[4799]: I1010 16:48:05.741006 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/nova-operator-controller-manager-5df598886f-rcld5" Oct 10 16:48:05 crc kubenswrapper[4799]: I1010 16:48:05.756464 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"nova-operator-controller-manager-dockercfg-x6x4g" Oct 10 16:48:05 crc kubenswrapper[4799]: I1010 16:48:05.769151 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jgf5v\" (UniqueName: \"kubernetes.io/projected/b0caa8f4-5c59-402a-9025-a2ba80d70577-kube-api-access-jgf5v\") pod \"horizon-operator-controller-manager-7ffbcb7588-kv2gv\" (UID: \"b0caa8f4-5c59-402a-9025-a2ba80d70577\") " pod="openstack-operators/horizon-operator-controller-manager-7ffbcb7588-kv2gv" Oct 10 16:48:05 crc kubenswrapper[4799]: I1010 16:48:05.770932 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bkgzw\" (UniqueName: \"kubernetes.io/projected/a61aa86a-a90e-439b-85e9-15b7a1466785-kube-api-access-bkgzw\") pod \"glance-operator-controller-manager-84b9b84486-dvk5w\" (UID: \"a61aa86a-a90e-439b-85e9-15b7a1466785\") " pod="openstack-operators/glance-operator-controller-manager-84b9b84486-dvk5w" Oct 10 16:48:05 crc kubenswrapper[4799]: I1010 16:48:05.771402 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vtgbl\" (UniqueName: \"kubernetes.io/projected/62fda0e4-55d3-481f-8da4-66e8f4dd39d4-kube-api-access-vtgbl\") pod \"infra-operator-controller-manager-656bcbd775-nh542\" (UID: \"62fda0e4-55d3-481f-8da4-66e8f4dd39d4\") " pod="openstack-operators/infra-operator-controller-manager-656bcbd775-nh542" Oct 10 16:48:05 crc kubenswrapper[4799]: I1010 16:48:05.771455 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zpbk7\" (UniqueName: \"kubernetes.io/projected/118111a7-9601-4a05-94b9-79601cb47623-kube-api-access-zpbk7\") pod \"mariadb-operator-controller-manager-f9fb45f8f-22wxr\" (UID: \"118111a7-9601-4a05-94b9-79601cb47623\") " pod="openstack-operators/mariadb-operator-controller-manager-f9fb45f8f-22wxr" Oct 10 16:48:05 crc kubenswrapper[4799]: I1010 16:48:05.771529 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pbsxb\" (UniqueName: \"kubernetes.io/projected/92c8227d-0d33-41f9-b186-2f17c2753fa2-kube-api-access-pbsxb\") pod \"manila-operator-controller-manager-5f67fbc655-2qw6c\" (UID: \"92c8227d-0d33-41f9-b186-2f17c2753fa2\") " pod="openstack-operators/manila-operator-controller-manager-5f67fbc655-2qw6c" Oct 10 16:48:05 crc kubenswrapper[4799]: I1010 16:48:05.771556 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xrvq9\" (UniqueName: \"kubernetes.io/projected/490a3592-9d71-4182-8b1b-6f8c55a01bde-kube-api-access-xrvq9\") pod \"keystone-operator-controller-manager-55b6b7c7b8-ll94t\" (UID: \"490a3592-9d71-4182-8b1b-6f8c55a01bde\") " pod="openstack-operators/keystone-operator-controller-manager-55b6b7c7b8-ll94t" Oct 10 16:48:05 crc kubenswrapper[4799]: I1010 16:48:05.771579 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-znph9\" (UniqueName: \"kubernetes.io/projected/c89f9acf-7ee6-4600-9331-635eb7fce931-kube-api-access-znph9\") pod \"ironic-operator-controller-manager-9c5c78d49-vjjnj\" (UID: \"c89f9acf-7ee6-4600-9331-635eb7fce931\") " pod="openstack-operators/ironic-operator-controller-manager-9c5c78d49-vjjnj" Oct 10 16:48:05 crc kubenswrapper[4799]: I1010 16:48:05.771810 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xr2dt\" (UniqueName: \"kubernetes.io/projected/37273794-7563-423e-a2d5-86c9e9f957cb-kube-api-access-xr2dt\") pod \"heat-operator-controller-manager-858f76bbdd-tzx89\" (UID: \"37273794-7563-423e-a2d5-86c9e9f957cb\") " pod="openstack-operators/heat-operator-controller-manager-858f76bbdd-tzx89" Oct 10 16:48:05 crc kubenswrapper[4799]: I1010 16:48:05.771848 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/62fda0e4-55d3-481f-8da4-66e8f4dd39d4-cert\") pod \"infra-operator-controller-manager-656bcbd775-nh542\" (UID: \"62fda0e4-55d3-481f-8da4-66e8f4dd39d4\") " pod="openstack-operators/infra-operator-controller-manager-656bcbd775-nh542" Oct 10 16:48:05 crc kubenswrapper[4799]: E1010 16:48:05.771950 4799 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Oct 10 16:48:05 crc kubenswrapper[4799]: E1010 16:48:05.771988 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/62fda0e4-55d3-481f-8da4-66e8f4dd39d4-cert podName:62fda0e4-55d3-481f-8da4-66e8f4dd39d4 nodeName:}" failed. No retries permitted until 2025-10-10 16:48:06.271973545 +0000 UTC m=+979.780297660 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/62fda0e4-55d3-481f-8da4-66e8f4dd39d4-cert") pod "infra-operator-controller-manager-656bcbd775-nh542" (UID: "62fda0e4-55d3-481f-8da4-66e8f4dd39d4") : secret "infra-operator-webhook-server-cert" not found Oct 10 16:48:05 crc kubenswrapper[4799]: I1010 16:48:05.784323 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/designate-operator-controller-manager-85d5d9dd78-w2d2v" Oct 10 16:48:05 crc kubenswrapper[4799]: I1010 16:48:05.789844 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/neutron-operator-controller-manager-79d585cb66-45997"] Oct 10 16:48:05 crc kubenswrapper[4799]: I1010 16:48:05.801794 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/nova-operator-controller-manager-5df598886f-rcld5"] Oct 10 16:48:05 crc kubenswrapper[4799]: I1010 16:48:05.814504 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vtgbl\" (UniqueName: \"kubernetes.io/projected/62fda0e4-55d3-481f-8da4-66e8f4dd39d4-kube-api-access-vtgbl\") pod \"infra-operator-controller-manager-656bcbd775-nh542\" (UID: \"62fda0e4-55d3-481f-8da4-66e8f4dd39d4\") " pod="openstack-operators/infra-operator-controller-manager-656bcbd775-nh542" Oct 10 16:48:05 crc kubenswrapper[4799]: I1010 16:48:05.823919 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xrvq9\" (UniqueName: \"kubernetes.io/projected/490a3592-9d71-4182-8b1b-6f8c55a01bde-kube-api-access-xrvq9\") pod \"keystone-operator-controller-manager-55b6b7c7b8-ll94t\" (UID: \"490a3592-9d71-4182-8b1b-6f8c55a01bde\") " pod="openstack-operators/keystone-operator-controller-manager-55b6b7c7b8-ll94t" Oct 10 16:48:05 crc kubenswrapper[4799]: I1010 16:48:05.824403 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jgf5v\" (UniqueName: \"kubernetes.io/projected/b0caa8f4-5c59-402a-9025-a2ba80d70577-kube-api-access-jgf5v\") pod \"horizon-operator-controller-manager-7ffbcb7588-kv2gv\" (UID: \"b0caa8f4-5c59-402a-9025-a2ba80d70577\") " pod="openstack-operators/horizon-operator-controller-manager-7ffbcb7588-kv2gv" Oct 10 16:48:05 crc kubenswrapper[4799]: I1010 16:48:05.830521 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/horizon-operator-controller-manager-7ffbcb7588-kv2gv" Oct 10 16:48:05 crc kubenswrapper[4799]: I1010 16:48:05.834266 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-znph9\" (UniqueName: \"kubernetes.io/projected/c89f9acf-7ee6-4600-9331-635eb7fce931-kube-api-access-znph9\") pod \"ironic-operator-controller-manager-9c5c78d49-vjjnj\" (UID: \"c89f9acf-7ee6-4600-9331-635eb7fce931\") " pod="openstack-operators/ironic-operator-controller-manager-9c5c78d49-vjjnj" Oct 10 16:48:05 crc kubenswrapper[4799]: I1010 16:48:05.834673 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xr2dt\" (UniqueName: \"kubernetes.io/projected/37273794-7563-423e-a2d5-86c9e9f957cb-kube-api-access-xr2dt\") pod \"heat-operator-controller-manager-858f76bbdd-tzx89\" (UID: \"37273794-7563-423e-a2d5-86c9e9f957cb\") " pod="openstack-operators/heat-operator-controller-manager-858f76bbdd-tzx89" Oct 10 16:48:05 crc kubenswrapper[4799]: I1010 16:48:05.844990 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bkgzw\" (UniqueName: \"kubernetes.io/projected/a61aa86a-a90e-439b-85e9-15b7a1466785-kube-api-access-bkgzw\") pod \"glance-operator-controller-manager-84b9b84486-dvk5w\" (UID: \"a61aa86a-a90e-439b-85e9-15b7a1466785\") " pod="openstack-operators/glance-operator-controller-manager-84b9b84486-dvk5w" Oct 10 16:48:05 crc kubenswrapper[4799]: I1010 16:48:05.849725 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/octavia-operator-controller-manager-69fdcfc5f5-6thbh"] Oct 10 16:48:05 crc kubenswrapper[4799]: I1010 16:48:05.850774 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/octavia-operator-controller-manager-69fdcfc5f5-6thbh" Oct 10 16:48:05 crc kubenswrapper[4799]: I1010 16:48:05.857581 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/octavia-operator-controller-manager-69fdcfc5f5-6thbh"] Oct 10 16:48:05 crc kubenswrapper[4799]: I1010 16:48:05.871216 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ironic-operator-controller-manager-9c5c78d49-vjjnj" Oct 10 16:48:05 crc kubenswrapper[4799]: I1010 16:48:05.873933 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zpbk7\" (UniqueName: \"kubernetes.io/projected/118111a7-9601-4a05-94b9-79601cb47623-kube-api-access-zpbk7\") pod \"mariadb-operator-controller-manager-f9fb45f8f-22wxr\" (UID: \"118111a7-9601-4a05-94b9-79601cb47623\") " pod="openstack-operators/mariadb-operator-controller-manager-f9fb45f8f-22wxr" Oct 10 16:48:05 crc kubenswrapper[4799]: I1010 16:48:05.873981 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pbsxb\" (UniqueName: \"kubernetes.io/projected/92c8227d-0d33-41f9-b186-2f17c2753fa2-kube-api-access-pbsxb\") pod \"manila-operator-controller-manager-5f67fbc655-2qw6c\" (UID: \"92c8227d-0d33-41f9-b186-2f17c2753fa2\") " pod="openstack-operators/manila-operator-controller-manager-5f67fbc655-2qw6c" Oct 10 16:48:05 crc kubenswrapper[4799]: I1010 16:48:05.874009 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mp69f\" (UniqueName: \"kubernetes.io/projected/71551afb-1aa4-4cdf-9cb4-a136ec3cadf5-kube-api-access-mp69f\") pod \"octavia-operator-controller-manager-69fdcfc5f5-6thbh\" (UID: \"71551afb-1aa4-4cdf-9cb4-a136ec3cadf5\") " pod="openstack-operators/octavia-operator-controller-manager-69fdcfc5f5-6thbh" Oct 10 16:48:05 crc kubenswrapper[4799]: I1010 16:48:05.874070 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fc9qw\" (UniqueName: \"kubernetes.io/projected/4367b146-2ac4-497e-b15a-c35615498938-kube-api-access-fc9qw\") pod \"neutron-operator-controller-manager-79d585cb66-45997\" (UID: \"4367b146-2ac4-497e-b15a-c35615498938\") " pod="openstack-operators/neutron-operator-controller-manager-79d585cb66-45997" Oct 10 16:48:05 crc kubenswrapper[4799]: I1010 16:48:05.874096 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2p9hj\" (UniqueName: \"kubernetes.io/projected/f9c815da-6e31-4ac7-a019-037983b9a9fd-kube-api-access-2p9hj\") pod \"nova-operator-controller-manager-5df598886f-rcld5\" (UID: \"f9c815da-6e31-4ac7-a019-037983b9a9fd\") " pod="openstack-operators/nova-operator-controller-manager-5df598886f-rcld5" Oct 10 16:48:05 crc kubenswrapper[4799]: I1010 16:48:05.881866 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"octavia-operator-controller-manager-dockercfg-zrldx" Oct 10 16:48:05 crc kubenswrapper[4799]: I1010 16:48:05.895125 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-5956dffb7b725zs"] Oct 10 16:48:05 crc kubenswrapper[4799]: I1010 16:48:05.896386 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-baremetal-operator-controller-manager-5956dffb7b725zs" Oct 10 16:48:05 crc kubenswrapper[4799]: I1010 16:48:05.902635 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-baremetal-operator-controller-manager-dockercfg-hhdkd" Oct 10 16:48:05 crc kubenswrapper[4799]: I1010 16:48:05.902849 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-baremetal-operator-webhook-server-cert" Oct 10 16:48:05 crc kubenswrapper[4799]: I1010 16:48:05.911236 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zpbk7\" (UniqueName: \"kubernetes.io/projected/118111a7-9601-4a05-94b9-79601cb47623-kube-api-access-zpbk7\") pod \"mariadb-operator-controller-manager-f9fb45f8f-22wxr\" (UID: \"118111a7-9601-4a05-94b9-79601cb47623\") " pod="openstack-operators/mariadb-operator-controller-manager-f9fb45f8f-22wxr" Oct 10 16:48:05 crc kubenswrapper[4799]: I1010 16:48:05.915626 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/ovn-operator-controller-manager-79df5fb58c-g57bz"] Oct 10 16:48:05 crc kubenswrapper[4799]: I1010 16:48:05.916881 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ovn-operator-controller-manager-79df5fb58c-g57bz" Oct 10 16:48:05 crc kubenswrapper[4799]: I1010 16:48:05.918470 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-55b6b7c7b8-ll94t" Oct 10 16:48:05 crc kubenswrapper[4799]: I1010 16:48:05.918898 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"ovn-operator-controller-manager-dockercfg-2hvxl" Oct 10 16:48:05 crc kubenswrapper[4799]: I1010 16:48:05.925188 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pbsxb\" (UniqueName: \"kubernetes.io/projected/92c8227d-0d33-41f9-b186-2f17c2753fa2-kube-api-access-pbsxb\") pod \"manila-operator-controller-manager-5f67fbc655-2qw6c\" (UID: \"92c8227d-0d33-41f9-b186-2f17c2753fa2\") " pod="openstack-operators/manila-operator-controller-manager-5f67fbc655-2qw6c" Oct 10 16:48:05 crc kubenswrapper[4799]: I1010 16:48:05.925257 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-5956dffb7b725zs"] Oct 10 16:48:05 crc kubenswrapper[4799]: I1010 16:48:05.934473 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ovn-operator-controller-manager-79df5fb58c-g57bz"] Oct 10 16:48:05 crc kubenswrapper[4799]: I1010 16:48:05.939739 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/placement-operator-controller-manager-68b6c87b68-wxv94"] Oct 10 16:48:05 crc kubenswrapper[4799]: I1010 16:48:05.940676 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/placement-operator-controller-manager-68b6c87b68-wxv94" Oct 10 16:48:05 crc kubenswrapper[4799]: I1010 16:48:05.942591 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"placement-operator-controller-manager-dockercfg-h8pds" Oct 10 16:48:05 crc kubenswrapper[4799]: I1010 16:48:05.960746 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/placement-operator-controller-manager-68b6c87b68-wxv94"] Oct 10 16:48:05 crc kubenswrapper[4799]: I1010 16:48:05.961028 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/manila-operator-controller-manager-5f67fbc655-2qw6c" Oct 10 16:48:05 crc kubenswrapper[4799]: I1010 16:48:05.965800 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-f9fb45f8f-22wxr" Oct 10 16:48:05 crc kubenswrapper[4799]: I1010 16:48:05.974887 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mp69f\" (UniqueName: \"kubernetes.io/projected/71551afb-1aa4-4cdf-9cb4-a136ec3cadf5-kube-api-access-mp69f\") pod \"octavia-operator-controller-manager-69fdcfc5f5-6thbh\" (UID: \"71551afb-1aa4-4cdf-9cb4-a136ec3cadf5\") " pod="openstack-operators/octavia-operator-controller-manager-69fdcfc5f5-6thbh" Oct 10 16:48:05 crc kubenswrapper[4799]: I1010 16:48:05.975055 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fc9qw\" (UniqueName: \"kubernetes.io/projected/4367b146-2ac4-497e-b15a-c35615498938-kube-api-access-fc9qw\") pod \"neutron-operator-controller-manager-79d585cb66-45997\" (UID: \"4367b146-2ac4-497e-b15a-c35615498938\") " pod="openstack-operators/neutron-operator-controller-manager-79d585cb66-45997" Oct 10 16:48:05 crc kubenswrapper[4799]: I1010 16:48:05.975150 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2p9hj\" (UniqueName: \"kubernetes.io/projected/f9c815da-6e31-4ac7-a019-037983b9a9fd-kube-api-access-2p9hj\") pod \"nova-operator-controller-manager-5df598886f-rcld5\" (UID: \"f9c815da-6e31-4ac7-a019-037983b9a9fd\") " pod="openstack-operators/nova-operator-controller-manager-5df598886f-rcld5" Oct 10 16:48:05 crc kubenswrapper[4799]: I1010 16:48:05.994810 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/swift-operator-controller-manager-db6d7f97b-pcn6t"] Oct 10 16:48:05 crc kubenswrapper[4799]: I1010 16:48:05.995837 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-controller-manager-db6d7f97b-pcn6t" Oct 10 16:48:06 crc kubenswrapper[4799]: I1010 16:48:06.000239 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"swift-operator-controller-manager-dockercfg-2vgs4" Oct 10 16:48:06 crc kubenswrapper[4799]: I1010 16:48:06.009084 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mp69f\" (UniqueName: \"kubernetes.io/projected/71551afb-1aa4-4cdf-9cb4-a136ec3cadf5-kube-api-access-mp69f\") pod \"octavia-operator-controller-manager-69fdcfc5f5-6thbh\" (UID: \"71551afb-1aa4-4cdf-9cb4-a136ec3cadf5\") " pod="openstack-operators/octavia-operator-controller-manager-69fdcfc5f5-6thbh" Oct 10 16:48:06 crc kubenswrapper[4799]: I1010 16:48:06.015219 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fc9qw\" (UniqueName: \"kubernetes.io/projected/4367b146-2ac4-497e-b15a-c35615498938-kube-api-access-fc9qw\") pod \"neutron-operator-controller-manager-79d585cb66-45997\" (UID: \"4367b146-2ac4-497e-b15a-c35615498938\") " pod="openstack-operators/neutron-operator-controller-manager-79d585cb66-45997" Oct 10 16:48:06 crc kubenswrapper[4799]: I1010 16:48:06.019413 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/swift-operator-controller-manager-db6d7f97b-pcn6t"] Oct 10 16:48:06 crc kubenswrapper[4799]: I1010 16:48:06.029943 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2p9hj\" (UniqueName: \"kubernetes.io/projected/f9c815da-6e31-4ac7-a019-037983b9a9fd-kube-api-access-2p9hj\") pod \"nova-operator-controller-manager-5df598886f-rcld5\" (UID: \"f9c815da-6e31-4ac7-a019-037983b9a9fd\") " pod="openstack-operators/nova-operator-controller-manager-5df598886f-rcld5" Oct 10 16:48:06 crc kubenswrapper[4799]: I1010 16:48:06.042432 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-67cfc6749b-vbq52"] Oct 10 16:48:06 crc kubenswrapper[4799]: I1010 16:48:06.043904 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/telemetry-operator-controller-manager-67cfc6749b-vbq52" Oct 10 16:48:06 crc kubenswrapper[4799]: I1010 16:48:06.054427 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"telemetry-operator-controller-manager-dockercfg-xrnmv" Oct 10 16:48:06 crc kubenswrapper[4799]: I1010 16:48:06.063172 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/test-operator-controller-manager-5458f77c4-k5cjx"] Oct 10 16:48:06 crc kubenswrapper[4799]: I1010 16:48:06.065686 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/test-operator-controller-manager-5458f77c4-k5cjx" Oct 10 16:48:06 crc kubenswrapper[4799]: I1010 16:48:06.070237 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"test-operator-controller-manager-dockercfg-hx566" Oct 10 16:48:06 crc kubenswrapper[4799]: I1010 16:48:06.076104 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-67cfc6749b-vbq52"] Oct 10 16:48:06 crc kubenswrapper[4799]: I1010 16:48:06.083252 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/ca5131fc-12e4-41b5-a4e2-6e71ed4049e1-cert\") pod \"openstack-baremetal-operator-controller-manager-5956dffb7b725zs\" (UID: \"ca5131fc-12e4-41b5-a4e2-6e71ed4049e1\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-5956dffb7b725zs" Oct 10 16:48:06 crc kubenswrapper[4799]: I1010 16:48:06.083333 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-94t6l\" (UniqueName: \"kubernetes.io/projected/2ac4be00-5d25-4bb4-8f98-599d7d637d38-kube-api-access-94t6l\") pod \"ovn-operator-controller-manager-79df5fb58c-g57bz\" (UID: \"2ac4be00-5d25-4bb4-8f98-599d7d637d38\") " pod="openstack-operators/ovn-operator-controller-manager-79df5fb58c-g57bz" Oct 10 16:48:06 crc kubenswrapper[4799]: I1010 16:48:06.083375 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x7sz9\" (UniqueName: \"kubernetes.io/projected/ca5131fc-12e4-41b5-a4e2-6e71ed4049e1-kube-api-access-x7sz9\") pod \"openstack-baremetal-operator-controller-manager-5956dffb7b725zs\" (UID: \"ca5131fc-12e4-41b5-a4e2-6e71ed4049e1\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-5956dffb7b725zs" Oct 10 16:48:06 crc kubenswrapper[4799]: I1010 16:48:06.083431 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xmfbp\" (UniqueName: \"kubernetes.io/projected/18c5ac5f-08f2-431b-9aaf-0b2e5c3f9bbf-kube-api-access-xmfbp\") pod \"placement-operator-controller-manager-68b6c87b68-wxv94\" (UID: \"18c5ac5f-08f2-431b-9aaf-0b2e5c3f9bbf\") " pod="openstack-operators/placement-operator-controller-manager-68b6c87b68-wxv94" Oct 10 16:48:06 crc kubenswrapper[4799]: I1010 16:48:06.092546 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/test-operator-controller-manager-5458f77c4-k5cjx"] Oct 10 16:48:06 crc kubenswrapper[4799]: I1010 16:48:06.102871 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/glance-operator-controller-manager-84b9b84486-dvk5w" Oct 10 16:48:06 crc kubenswrapper[4799]: I1010 16:48:06.109549 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/watcher-operator-controller-manager-7f554bff7b-dq9x7"] Oct 10 16:48:06 crc kubenswrapper[4799]: I1010 16:48:06.110825 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/watcher-operator-controller-manager-7f554bff7b-dq9x7" Oct 10 16:48:06 crc kubenswrapper[4799]: I1010 16:48:06.112942 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"watcher-operator-controller-manager-dockercfg-9vmc4" Oct 10 16:48:06 crc kubenswrapper[4799]: I1010 16:48:06.117022 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/watcher-operator-controller-manager-7f554bff7b-dq9x7"] Oct 10 16:48:06 crc kubenswrapper[4799]: I1010 16:48:06.117545 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/heat-operator-controller-manager-858f76bbdd-tzx89" Oct 10 16:48:06 crc kubenswrapper[4799]: I1010 16:48:06.161460 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/neutron-operator-controller-manager-79d585cb66-45997" Oct 10 16:48:06 crc kubenswrapper[4799]: I1010 16:48:06.187230 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7qgzr\" (UniqueName: \"kubernetes.io/projected/e669c0e2-564c-4391-b2d0-5ab8cc0f38cd-kube-api-access-7qgzr\") pod \"telemetry-operator-controller-manager-67cfc6749b-vbq52\" (UID: \"e669c0e2-564c-4391-b2d0-5ab8cc0f38cd\") " pod="openstack-operators/telemetry-operator-controller-manager-67cfc6749b-vbq52" Oct 10 16:48:06 crc kubenswrapper[4799]: I1010 16:48:06.187303 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tdmsh\" (UniqueName: \"kubernetes.io/projected/e9f4174f-ea12-45c8-840c-ccdce9dd4c1f-kube-api-access-tdmsh\") pod \"swift-operator-controller-manager-db6d7f97b-pcn6t\" (UID: \"e9f4174f-ea12-45c8-840c-ccdce9dd4c1f\") " pod="openstack-operators/swift-operator-controller-manager-db6d7f97b-pcn6t" Oct 10 16:48:06 crc kubenswrapper[4799]: I1010 16:48:06.187404 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-75s9r\" (UniqueName: \"kubernetes.io/projected/176cdbb9-4289-4322-8112-7ffae6a8efe8-kube-api-access-75s9r\") pod \"test-operator-controller-manager-5458f77c4-k5cjx\" (UID: \"176cdbb9-4289-4322-8112-7ffae6a8efe8\") " pod="openstack-operators/test-operator-controller-manager-5458f77c4-k5cjx" Oct 10 16:48:06 crc kubenswrapper[4799]: I1010 16:48:06.188137 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/ca5131fc-12e4-41b5-a4e2-6e71ed4049e1-cert\") pod \"openstack-baremetal-operator-controller-manager-5956dffb7b725zs\" (UID: \"ca5131fc-12e4-41b5-a4e2-6e71ed4049e1\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-5956dffb7b725zs" Oct 10 16:48:06 crc kubenswrapper[4799]: I1010 16:48:06.188399 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-94t6l\" (UniqueName: \"kubernetes.io/projected/2ac4be00-5d25-4bb4-8f98-599d7d637d38-kube-api-access-94t6l\") pod \"ovn-operator-controller-manager-79df5fb58c-g57bz\" (UID: \"2ac4be00-5d25-4bb4-8f98-599d7d637d38\") " pod="openstack-operators/ovn-operator-controller-manager-79df5fb58c-g57bz" Oct 10 16:48:06 crc kubenswrapper[4799]: I1010 16:48:06.188446 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x7sz9\" (UniqueName: \"kubernetes.io/projected/ca5131fc-12e4-41b5-a4e2-6e71ed4049e1-kube-api-access-x7sz9\") pod \"openstack-baremetal-operator-controller-manager-5956dffb7b725zs\" (UID: \"ca5131fc-12e4-41b5-a4e2-6e71ed4049e1\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-5956dffb7b725zs" Oct 10 16:48:06 crc kubenswrapper[4799]: I1010 16:48:06.188482 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xmfbp\" (UniqueName: \"kubernetes.io/projected/18c5ac5f-08f2-431b-9aaf-0b2e5c3f9bbf-kube-api-access-xmfbp\") pod \"placement-operator-controller-manager-68b6c87b68-wxv94\" (UID: \"18c5ac5f-08f2-431b-9aaf-0b2e5c3f9bbf\") " pod="openstack-operators/placement-operator-controller-manager-68b6c87b68-wxv94" Oct 10 16:48:06 crc kubenswrapper[4799]: E1010 16:48:06.188857 4799 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Oct 10 16:48:06 crc kubenswrapper[4799]: E1010 16:48:06.188934 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/ca5131fc-12e4-41b5-a4e2-6e71ed4049e1-cert podName:ca5131fc-12e4-41b5-a4e2-6e71ed4049e1 nodeName:}" failed. No retries permitted until 2025-10-10 16:48:06.688908133 +0000 UTC m=+980.197232238 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/ca5131fc-12e4-41b5-a4e2-6e71ed4049e1-cert") pod "openstack-baremetal-operator-controller-manager-5956dffb7b725zs" (UID: "ca5131fc-12e4-41b5-a4e2-6e71ed4049e1") : secret "openstack-baremetal-operator-webhook-server-cert" not found Oct 10 16:48:06 crc kubenswrapper[4799]: I1010 16:48:06.203270 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-controller-manager-5b95c8954b-nfst6"] Oct 10 16:48:06 crc kubenswrapper[4799]: I1010 16:48:06.204687 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-manager-5b95c8954b-nfst6" Oct 10 16:48:06 crc kubenswrapper[4799]: I1010 16:48:06.207857 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/nova-operator-controller-manager-5df598886f-rcld5" Oct 10 16:48:06 crc kubenswrapper[4799]: I1010 16:48:06.209041 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-manager-5b95c8954b-nfst6"] Oct 10 16:48:06 crc kubenswrapper[4799]: I1010 16:48:06.210826 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-94t6l\" (UniqueName: \"kubernetes.io/projected/2ac4be00-5d25-4bb4-8f98-599d7d637d38-kube-api-access-94t6l\") pod \"ovn-operator-controller-manager-79df5fb58c-g57bz\" (UID: \"2ac4be00-5d25-4bb4-8f98-599d7d637d38\") " pod="openstack-operators/ovn-operator-controller-manager-79df5fb58c-g57bz" Oct 10 16:48:06 crc kubenswrapper[4799]: I1010 16:48:06.214828 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"webhook-server-cert" Oct 10 16:48:06 crc kubenswrapper[4799]: I1010 16:48:06.215695 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-controller-manager-dockercfg-96jcc" Oct 10 16:48:06 crc kubenswrapper[4799]: I1010 16:48:06.216561 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xmfbp\" (UniqueName: \"kubernetes.io/projected/18c5ac5f-08f2-431b-9aaf-0b2e5c3f9bbf-kube-api-access-xmfbp\") pod \"placement-operator-controller-manager-68b6c87b68-wxv94\" (UID: \"18c5ac5f-08f2-431b-9aaf-0b2e5c3f9bbf\") " pod="openstack-operators/placement-operator-controller-manager-68b6c87b68-wxv94" Oct 10 16:48:06 crc kubenswrapper[4799]: I1010 16:48:06.224271 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x7sz9\" (UniqueName: \"kubernetes.io/projected/ca5131fc-12e4-41b5-a4e2-6e71ed4049e1-kube-api-access-x7sz9\") pod \"openstack-baremetal-operator-controller-manager-5956dffb7b725zs\" (UID: \"ca5131fc-12e4-41b5-a4e2-6e71ed4049e1\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-5956dffb7b725zs" Oct 10 16:48:06 crc kubenswrapper[4799]: I1010 16:48:06.225808 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-z59wk"] Oct 10 16:48:06 crc kubenswrapper[4799]: I1010 16:48:06.246808 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/octavia-operator-controller-manager-69fdcfc5f5-6thbh" Oct 10 16:48:06 crc kubenswrapper[4799]: I1010 16:48:06.264020 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-z59wk"] Oct 10 16:48:06 crc kubenswrapper[4799]: I1010 16:48:06.264145 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-z59wk" Oct 10 16:48:06 crc kubenswrapper[4799]: I1010 16:48:06.266927 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"rabbitmq-cluster-operator-controller-manager-dockercfg-zfpqv" Oct 10 16:48:06 crc kubenswrapper[4799]: I1010 16:48:06.291316 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ovn-operator-controller-manager-79df5fb58c-g57bz" Oct 10 16:48:06 crc kubenswrapper[4799]: I1010 16:48:06.293244 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/9267d3c3-fe67-41e8-9240-81955432d822-cert\") pod \"openstack-operator-controller-manager-5b95c8954b-nfst6\" (UID: \"9267d3c3-fe67-41e8-9240-81955432d822\") " pod="openstack-operators/openstack-operator-controller-manager-5b95c8954b-nfst6" Oct 10 16:48:06 crc kubenswrapper[4799]: I1010 16:48:06.293315 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7qgzr\" (UniqueName: \"kubernetes.io/projected/e669c0e2-564c-4391-b2d0-5ab8cc0f38cd-kube-api-access-7qgzr\") pod \"telemetry-operator-controller-manager-67cfc6749b-vbq52\" (UID: \"e669c0e2-564c-4391-b2d0-5ab8cc0f38cd\") " pod="openstack-operators/telemetry-operator-controller-manager-67cfc6749b-vbq52" Oct 10 16:48:06 crc kubenswrapper[4799]: I1010 16:48:06.293346 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/62fda0e4-55d3-481f-8da4-66e8f4dd39d4-cert\") pod \"infra-operator-controller-manager-656bcbd775-nh542\" (UID: \"62fda0e4-55d3-481f-8da4-66e8f4dd39d4\") " pod="openstack-operators/infra-operator-controller-manager-656bcbd775-nh542" Oct 10 16:48:06 crc kubenswrapper[4799]: I1010 16:48:06.293371 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tdmsh\" (UniqueName: \"kubernetes.io/projected/e9f4174f-ea12-45c8-840c-ccdce9dd4c1f-kube-api-access-tdmsh\") pod \"swift-operator-controller-manager-db6d7f97b-pcn6t\" (UID: \"e9f4174f-ea12-45c8-840c-ccdce9dd4c1f\") " pod="openstack-operators/swift-operator-controller-manager-db6d7f97b-pcn6t" Oct 10 16:48:06 crc kubenswrapper[4799]: I1010 16:48:06.293388 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-75s9r\" (UniqueName: \"kubernetes.io/projected/176cdbb9-4289-4322-8112-7ffae6a8efe8-kube-api-access-75s9r\") pod \"test-operator-controller-manager-5458f77c4-k5cjx\" (UID: \"176cdbb9-4289-4322-8112-7ffae6a8efe8\") " pod="openstack-operators/test-operator-controller-manager-5458f77c4-k5cjx" Oct 10 16:48:06 crc kubenswrapper[4799]: I1010 16:48:06.293416 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bzmnd\" (UniqueName: \"kubernetes.io/projected/9267d3c3-fe67-41e8-9240-81955432d822-kube-api-access-bzmnd\") pod \"openstack-operator-controller-manager-5b95c8954b-nfst6\" (UID: \"9267d3c3-fe67-41e8-9240-81955432d822\") " pod="openstack-operators/openstack-operator-controller-manager-5b95c8954b-nfst6" Oct 10 16:48:06 crc kubenswrapper[4799]: I1010 16:48:06.293449 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b8ghq\" (UniqueName: \"kubernetes.io/projected/98a13753-0658-4a22-ba99-aaebe22b5746-kube-api-access-b8ghq\") pod \"watcher-operator-controller-manager-7f554bff7b-dq9x7\" (UID: \"98a13753-0658-4a22-ba99-aaebe22b5746\") " pod="openstack-operators/watcher-operator-controller-manager-7f554bff7b-dq9x7" Oct 10 16:48:06 crc kubenswrapper[4799]: I1010 16:48:06.330868 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/placement-operator-controller-manager-68b6c87b68-wxv94" Oct 10 16:48:06 crc kubenswrapper[4799]: I1010 16:48:06.338244 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7qgzr\" (UniqueName: \"kubernetes.io/projected/e669c0e2-564c-4391-b2d0-5ab8cc0f38cd-kube-api-access-7qgzr\") pod \"telemetry-operator-controller-manager-67cfc6749b-vbq52\" (UID: \"e669c0e2-564c-4391-b2d0-5ab8cc0f38cd\") " pod="openstack-operators/telemetry-operator-controller-manager-67cfc6749b-vbq52" Oct 10 16:48:06 crc kubenswrapper[4799]: I1010 16:48:06.338410 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-75s9r\" (UniqueName: \"kubernetes.io/projected/176cdbb9-4289-4322-8112-7ffae6a8efe8-kube-api-access-75s9r\") pod \"test-operator-controller-manager-5458f77c4-k5cjx\" (UID: \"176cdbb9-4289-4322-8112-7ffae6a8efe8\") " pod="openstack-operators/test-operator-controller-manager-5458f77c4-k5cjx" Oct 10 16:48:06 crc kubenswrapper[4799]: I1010 16:48:06.338578 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tdmsh\" (UniqueName: \"kubernetes.io/projected/e9f4174f-ea12-45c8-840c-ccdce9dd4c1f-kube-api-access-tdmsh\") pod \"swift-operator-controller-manager-db6d7f97b-pcn6t\" (UID: \"e9f4174f-ea12-45c8-840c-ccdce9dd4c1f\") " pod="openstack-operators/swift-operator-controller-manager-db6d7f97b-pcn6t" Oct 10 16:48:06 crc kubenswrapper[4799]: I1010 16:48:06.345510 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/62fda0e4-55d3-481f-8da4-66e8f4dd39d4-cert\") pod \"infra-operator-controller-manager-656bcbd775-nh542\" (UID: \"62fda0e4-55d3-481f-8da4-66e8f4dd39d4\") " pod="openstack-operators/infra-operator-controller-manager-656bcbd775-nh542" Oct 10 16:48:06 crc kubenswrapper[4799]: I1010 16:48:06.394882 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bzmnd\" (UniqueName: \"kubernetes.io/projected/9267d3c3-fe67-41e8-9240-81955432d822-kube-api-access-bzmnd\") pod \"openstack-operator-controller-manager-5b95c8954b-nfst6\" (UID: \"9267d3c3-fe67-41e8-9240-81955432d822\") " pod="openstack-operators/openstack-operator-controller-manager-5b95c8954b-nfst6" Oct 10 16:48:06 crc kubenswrapper[4799]: I1010 16:48:06.394954 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b8ghq\" (UniqueName: \"kubernetes.io/projected/98a13753-0658-4a22-ba99-aaebe22b5746-kube-api-access-b8ghq\") pod \"watcher-operator-controller-manager-7f554bff7b-dq9x7\" (UID: \"98a13753-0658-4a22-ba99-aaebe22b5746\") " pod="openstack-operators/watcher-operator-controller-manager-7f554bff7b-dq9x7" Oct 10 16:48:06 crc kubenswrapper[4799]: I1010 16:48:06.395083 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/9267d3c3-fe67-41e8-9240-81955432d822-cert\") pod \"openstack-operator-controller-manager-5b95c8954b-nfst6\" (UID: \"9267d3c3-fe67-41e8-9240-81955432d822\") " pod="openstack-operators/openstack-operator-controller-manager-5b95c8954b-nfst6" Oct 10 16:48:06 crc kubenswrapper[4799]: I1010 16:48:06.395168 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5x9z6\" (UniqueName: \"kubernetes.io/projected/bd9e6a7c-702e-4424-9cbb-f9832e91d4a3-kube-api-access-5x9z6\") pod \"rabbitmq-cluster-operator-manager-5f97d8c699-z59wk\" (UID: \"bd9e6a7c-702e-4424-9cbb-f9832e91d4a3\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-z59wk" Oct 10 16:48:06 crc kubenswrapper[4799]: E1010 16:48:06.396208 4799 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Oct 10 16:48:06 crc kubenswrapper[4799]: E1010 16:48:06.396358 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/9267d3c3-fe67-41e8-9240-81955432d822-cert podName:9267d3c3-fe67-41e8-9240-81955432d822 nodeName:}" failed. No retries permitted until 2025-10-10 16:48:06.896337306 +0000 UTC m=+980.404661421 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/9267d3c3-fe67-41e8-9240-81955432d822-cert") pod "openstack-operator-controller-manager-5b95c8954b-nfst6" (UID: "9267d3c3-fe67-41e8-9240-81955432d822") : secret "webhook-server-cert" not found Oct 10 16:48:06 crc kubenswrapper[4799]: I1010 16:48:06.413444 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bzmnd\" (UniqueName: \"kubernetes.io/projected/9267d3c3-fe67-41e8-9240-81955432d822-kube-api-access-bzmnd\") pod \"openstack-operator-controller-manager-5b95c8954b-nfst6\" (UID: \"9267d3c3-fe67-41e8-9240-81955432d822\") " pod="openstack-operators/openstack-operator-controller-manager-5b95c8954b-nfst6" Oct 10 16:48:06 crc kubenswrapper[4799]: I1010 16:48:06.417343 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b8ghq\" (UniqueName: \"kubernetes.io/projected/98a13753-0658-4a22-ba99-aaebe22b5746-kube-api-access-b8ghq\") pod \"watcher-operator-controller-manager-7f554bff7b-dq9x7\" (UID: \"98a13753-0658-4a22-ba99-aaebe22b5746\") " pod="openstack-operators/watcher-operator-controller-manager-7f554bff7b-dq9x7" Oct 10 16:48:06 crc kubenswrapper[4799]: I1010 16:48:06.448812 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-656bcbd775-nh542" Oct 10 16:48:06 crc kubenswrapper[4799]: I1010 16:48:06.476014 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-controller-manager-db6d7f97b-pcn6t" Oct 10 16:48:06 crc kubenswrapper[4799]: I1010 16:48:06.496138 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5x9z6\" (UniqueName: \"kubernetes.io/projected/bd9e6a7c-702e-4424-9cbb-f9832e91d4a3-kube-api-access-5x9z6\") pod \"rabbitmq-cluster-operator-manager-5f97d8c699-z59wk\" (UID: \"bd9e6a7c-702e-4424-9cbb-f9832e91d4a3\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-z59wk" Oct 10 16:48:06 crc kubenswrapper[4799]: I1010 16:48:06.513538 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5x9z6\" (UniqueName: \"kubernetes.io/projected/bd9e6a7c-702e-4424-9cbb-f9832e91d4a3-kube-api-access-5x9z6\") pod \"rabbitmq-cluster-operator-manager-5f97d8c699-z59wk\" (UID: \"bd9e6a7c-702e-4424-9cbb-f9832e91d4a3\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-z59wk" Oct 10 16:48:06 crc kubenswrapper[4799]: I1010 16:48:06.517249 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/telemetry-operator-controller-manager-67cfc6749b-vbq52" Oct 10 16:48:06 crc kubenswrapper[4799]: I1010 16:48:06.533355 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/test-operator-controller-manager-5458f77c4-k5cjx" Oct 10 16:48:06 crc kubenswrapper[4799]: I1010 16:48:06.562423 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/watcher-operator-controller-manager-7f554bff7b-dq9x7" Oct 10 16:48:06 crc kubenswrapper[4799]: I1010 16:48:06.699251 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-z59wk" Oct 10 16:48:06 crc kubenswrapper[4799]: I1010 16:48:06.700258 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/ca5131fc-12e4-41b5-a4e2-6e71ed4049e1-cert\") pod \"openstack-baremetal-operator-controller-manager-5956dffb7b725zs\" (UID: \"ca5131fc-12e4-41b5-a4e2-6e71ed4049e1\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-5956dffb7b725zs" Oct 10 16:48:06 crc kubenswrapper[4799]: E1010 16:48:06.700420 4799 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Oct 10 16:48:06 crc kubenswrapper[4799]: E1010 16:48:06.700463 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/ca5131fc-12e4-41b5-a4e2-6e71ed4049e1-cert podName:ca5131fc-12e4-41b5-a4e2-6e71ed4049e1 nodeName:}" failed. No retries permitted until 2025-10-10 16:48:07.700447738 +0000 UTC m=+981.208771853 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/ca5131fc-12e4-41b5-a4e2-6e71ed4049e1-cert") pod "openstack-baremetal-operator-controller-manager-5956dffb7b725zs" (UID: "ca5131fc-12e4-41b5-a4e2-6e71ed4049e1") : secret "openstack-baremetal-operator-webhook-server-cert" not found Oct 10 16:48:06 crc kubenswrapper[4799]: I1010 16:48:06.835718 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/designate-operator-controller-manager-85d5d9dd78-w2d2v"] Oct 10 16:48:06 crc kubenswrapper[4799]: I1010 16:48:06.845593 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/cinder-operator-controller-manager-7b7fb68549-bl9j6"] Oct 10 16:48:06 crc kubenswrapper[4799]: I1010 16:48:06.903799 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/9267d3c3-fe67-41e8-9240-81955432d822-cert\") pod \"openstack-operator-controller-manager-5b95c8954b-nfst6\" (UID: \"9267d3c3-fe67-41e8-9240-81955432d822\") " pod="openstack-operators/openstack-operator-controller-manager-5b95c8954b-nfst6" Oct 10 16:48:06 crc kubenswrapper[4799]: I1010 16:48:06.907436 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/9267d3c3-fe67-41e8-9240-81955432d822-cert\") pod \"openstack-operator-controller-manager-5b95c8954b-nfst6\" (UID: \"9267d3c3-fe67-41e8-9240-81955432d822\") " pod="openstack-operators/openstack-operator-controller-manager-5b95c8954b-nfst6" Oct 10 16:48:06 crc kubenswrapper[4799]: I1010 16:48:06.976172 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-manager-5b95c8954b-nfst6" Oct 10 16:48:06 crc kubenswrapper[4799]: I1010 16:48:06.999623 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-55b6b7c7b8-ll94t"] Oct 10 16:48:07 crc kubenswrapper[4799]: I1010 16:48:07.007377 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/horizon-operator-controller-manager-7ffbcb7588-kv2gv"] Oct 10 16:48:07 crc kubenswrapper[4799]: I1010 16:48:07.015355 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/barbican-operator-controller-manager-658bdf4b74-stptz"] Oct 10 16:48:07 crc kubenswrapper[4799]: I1010 16:48:07.018870 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ironic-operator-controller-manager-9c5c78d49-vjjnj"] Oct 10 16:48:07 crc kubenswrapper[4799]: W1010 16:48:07.025887 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod490a3592_9d71_4182_8b1b_6f8c55a01bde.slice/crio-fd0e0cea226a59cc3b7d8b1532c948c9f60a1938763019e2d6a45977d67c1c44 WatchSource:0}: Error finding container fd0e0cea226a59cc3b7d8b1532c948c9f60a1938763019e2d6a45977d67c1c44: Status 404 returned error can't find the container with id fd0e0cea226a59cc3b7d8b1532c948c9f60a1938763019e2d6a45977d67c1c44 Oct 10 16:48:07 crc kubenswrapper[4799]: I1010 16:48:07.099959 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/placement-operator-controller-manager-68b6c87b68-wxv94"] Oct 10 16:48:07 crc kubenswrapper[4799]: I1010 16:48:07.106847 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/manila-operator-controller-manager-5f67fbc655-2qw6c"] Oct 10 16:48:07 crc kubenswrapper[4799]: W1010 16:48:07.109838 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod18c5ac5f_08f2_431b_9aaf_0b2e5c3f9bbf.slice/crio-72e84e00c215a1eaaafdd40214bb496a950ba9696fdd3968761ec815e5cf1514 WatchSource:0}: Error finding container 72e84e00c215a1eaaafdd40214bb496a950ba9696fdd3968761ec815e5cf1514: Status 404 returned error can't find the container with id 72e84e00c215a1eaaafdd40214bb496a950ba9696fdd3968761ec815e5cf1514 Oct 10 16:48:07 crc kubenswrapper[4799]: W1010 16:48:07.117538 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod92c8227d_0d33_41f9_b186_2f17c2753fa2.slice/crio-efd02bf1babd52161d37a2f828a87a0fa07e558388caf321234589cf0ec29fcb WatchSource:0}: Error finding container efd02bf1babd52161d37a2f828a87a0fa07e558388caf321234589cf0ec29fcb: Status 404 returned error can't find the container with id efd02bf1babd52161d37a2f828a87a0fa07e558388caf321234589cf0ec29fcb Oct 10 16:48:07 crc kubenswrapper[4799]: I1010 16:48:07.306226 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-67cfc6749b-vbq52"] Oct 10 16:48:07 crc kubenswrapper[4799]: I1010 16:48:07.323290 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/glance-operator-controller-manager-84b9b84486-dvk5w"] Oct 10 16:48:07 crc kubenswrapper[4799]: I1010 16:48:07.326373 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-85d5d9dd78-w2d2v" event={"ID":"324da982-9067-490a-98a5-9ad58296841a","Type":"ContainerStarted","Data":"c43e1d0c26551b276b45c9706588be2f78eb722f54b7510bd59885a91fded835"} Oct 10 16:48:07 crc kubenswrapper[4799]: I1010 16:48:07.328078 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/swift-operator-controller-manager-db6d7f97b-pcn6t"] Oct 10 16:48:07 crc kubenswrapper[4799]: W1010 16:48:07.329897 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode669c0e2_564c_4391_b2d0_5ab8cc0f38cd.slice/crio-3e7114936e96683c6e5149680d5a66d3b9ad21a5ae43d0cc80c1c789238539e7 WatchSource:0}: Error finding container 3e7114936e96683c6e5149680d5a66d3b9ad21a5ae43d0cc80c1c789238539e7: Status 404 returned error can't find the container with id 3e7114936e96683c6e5149680d5a66d3b9ad21a5ae43d0cc80c1c789238539e7 Oct 10 16:48:07 crc kubenswrapper[4799]: I1010 16:48:07.339748 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-7b7fb68549-bl9j6" event={"ID":"98cf31e5-618a-4363-8a3d-1b0d0bc75b48","Type":"ContainerStarted","Data":"57f8efbd3067eb6ef0a2c0ca1ab829ea337be48e45f629ecc0e9e98de3893397"} Oct 10 16:48:07 crc kubenswrapper[4799]: I1010 16:48:07.345886 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-9c5c78d49-vjjnj" event={"ID":"c89f9acf-7ee6-4600-9331-635eb7fce931","Type":"ContainerStarted","Data":"98ba0d186c55b3872a3569fb785bfd7c3c2b524ce8808979b4de91500f6f81fa"} Oct 10 16:48:07 crc kubenswrapper[4799]: I1010 16:48:07.349434 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-68b6c87b68-wxv94" event={"ID":"18c5ac5f-08f2-431b-9aaf-0b2e5c3f9bbf","Type":"ContainerStarted","Data":"72e84e00c215a1eaaafdd40214bb496a950ba9696fdd3968761ec815e5cf1514"} Oct 10 16:48:07 crc kubenswrapper[4799]: I1010 16:48:07.351486 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/nova-operator-controller-manager-5df598886f-rcld5"] Oct 10 16:48:07 crc kubenswrapper[4799]: I1010 16:48:07.357604 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/heat-operator-controller-manager-858f76bbdd-tzx89"] Oct 10 16:48:07 crc kubenswrapper[4799]: I1010 16:48:07.358478 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-7ffbcb7588-kv2gv" event={"ID":"b0caa8f4-5c59-402a-9025-a2ba80d70577","Type":"ContainerStarted","Data":"70f88bc061b5779024b177a063b86326dd0a1adb3a0fe84dccd56d794227f80c"} Oct 10 16:48:07 crc kubenswrapper[4799]: I1010 16:48:07.361043 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-5f67fbc655-2qw6c" event={"ID":"92c8227d-0d33-41f9-b186-2f17c2753fa2","Type":"ContainerStarted","Data":"efd02bf1babd52161d37a2f828a87a0fa07e558388caf321234589cf0ec29fcb"} Oct 10 16:48:07 crc kubenswrapper[4799]: I1010 16:48:07.363716 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ovn-operator-controller-manager-79df5fb58c-g57bz"] Oct 10 16:48:07 crc kubenswrapper[4799]: I1010 16:48:07.364070 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-55b6b7c7b8-ll94t" event={"ID":"490a3592-9d71-4182-8b1b-6f8c55a01bde","Type":"ContainerStarted","Data":"fd0e0cea226a59cc3b7d8b1532c948c9f60a1938763019e2d6a45977d67c1c44"} Oct 10 16:48:07 crc kubenswrapper[4799]: I1010 16:48:07.365548 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-658bdf4b74-stptz" event={"ID":"9e711c48-2d32-4933-b13f-a0f9fec33e0d","Type":"ContainerStarted","Data":"934ff938c06f3b51f46727e2d78c91cd811dbd8d7a0e003a46bed6c6faa30483"} Oct 10 16:48:07 crc kubenswrapper[4799]: E1010 16:48:07.367602 4799 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/neutron-operator@sha256:33652e75a03a058769019fe8d8c51585a6eeefef5e1ecb96f9965434117954f2,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-fc9qw,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod neutron-operator-controller-manager-79d585cb66-45997_openstack-operators(4367b146-2ac4-497e-b15a-c35615498938): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Oct 10 16:48:07 crc kubenswrapper[4799]: I1010 16:48:07.370723 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/neutron-operator-controller-manager-79d585cb66-45997"] Oct 10 16:48:07 crc kubenswrapper[4799]: W1010 16:48:07.375821 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod71551afb_1aa4_4cdf_9cb4_a136ec3cadf5.slice/crio-7a5bcef3f6c80e707d6dd63c5e7132e5278910882207de79723d6ceb495e055b WatchSource:0}: Error finding container 7a5bcef3f6c80e707d6dd63c5e7132e5278910882207de79723d6ceb495e055b: Status 404 returned error can't find the container with id 7a5bcef3f6c80e707d6dd63c5e7132e5278910882207de79723d6ceb495e055b Oct 10 16:48:07 crc kubenswrapper[4799]: I1010 16:48:07.376022 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/octavia-operator-controller-manager-69fdcfc5f5-6thbh"] Oct 10 16:48:07 crc kubenswrapper[4799]: W1010 16:48:07.376199 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2ac4be00_5d25_4bb4_8f98_599d7d637d38.slice/crio-8b4ec4397cd3ba0e68a1cb2d42413f972534b378ed8c0a0030a0003269db9f7c WatchSource:0}: Error finding container 8b4ec4397cd3ba0e68a1cb2d42413f972534b378ed8c0a0030a0003269db9f7c: Status 404 returned error can't find the container with id 8b4ec4397cd3ba0e68a1cb2d42413f972534b378ed8c0a0030a0003269db9f7c Oct 10 16:48:07 crc kubenswrapper[4799]: W1010 16:48:07.377931 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod118111a7_9601_4a05_94b9_79601cb47623.slice/crio-0f0a04fadfb6ba79d287a680f4b17e32c28bf4a5a73a32b5ff317a5c862eba02 WatchSource:0}: Error finding container 0f0a04fadfb6ba79d287a680f4b17e32c28bf4a5a73a32b5ff317a5c862eba02: Status 404 returned error can't find the container with id 0f0a04fadfb6ba79d287a680f4b17e32c28bf4a5a73a32b5ff317a5c862eba02 Oct 10 16:48:07 crc kubenswrapper[4799]: E1010 16:48:07.379655 4799 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/ovn-operator@sha256:315e558023b41ac1aa215082096995a03810c5b42910a33b00427ffcac9c6a14,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-94t6l,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod ovn-operator-controller-manager-79df5fb58c-g57bz_openstack-operators(2ac4be00-5d25-4bb4-8f98-599d7d637d38): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Oct 10 16:48:07 crc kubenswrapper[4799]: I1010 16:48:07.383905 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-f9fb45f8f-22wxr"] Oct 10 16:48:07 crc kubenswrapper[4799]: E1010 16:48:07.393174 4799 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/swift-operator@sha256:4b4a17fe08ce00e375afaaec6a28835f5c1784f03d11c4558376ac04130f3a9e,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-tdmsh,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000660000,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod swift-operator-controller-manager-db6d7f97b-pcn6t_openstack-operators(e9f4174f-ea12-45c8-840c-ccdce9dd4c1f): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Oct 10 16:48:07 crc kubenswrapper[4799]: E1010 16:48:07.393284 4799 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/mariadb-operator@sha256:47278ed28e02df00892f941763aa0d69547327318e8a983e07f4577acd288167,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-zpbk7,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod mariadb-operator-controller-manager-f9fb45f8f-22wxr_openstack-operators(118111a7-9601-4a05-94b9-79601cb47623): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Oct 10 16:48:07 crc kubenswrapper[4799]: E1010 16:48:07.393672 4799 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/octavia-operator@sha256:09deecf840d38ff6af3c924729cf0a9444bc985848bfbe7c918019b88a6bc4d7,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-mp69f,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod octavia-operator-controller-manager-69fdcfc5f5-6thbh_openstack-operators(71551afb-1aa4-4cdf-9cb4-a136ec3cadf5): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Oct 10 16:48:07 crc kubenswrapper[4799]: I1010 16:48:07.527281 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/watcher-operator-controller-manager-7f554bff7b-dq9x7"] Oct 10 16:48:07 crc kubenswrapper[4799]: I1010 16:48:07.566923 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-z59wk"] Oct 10 16:48:07 crc kubenswrapper[4799]: I1010 16:48:07.580151 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/test-operator-controller-manager-5458f77c4-k5cjx"] Oct 10 16:48:07 crc kubenswrapper[4799]: W1010 16:48:07.580347 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod98a13753_0658_4a22_ba99_aaebe22b5746.slice/crio-0bbf5c0c0d5dc744805d64e27b5b01350469ff7297bea295cacc3f9eb6b1b94a WatchSource:0}: Error finding container 0bbf5c0c0d5dc744805d64e27b5b01350469ff7297bea295cacc3f9eb6b1b94a: Status 404 returned error can't find the container with id 0bbf5c0c0d5dc744805d64e27b5b01350469ff7297bea295cacc3f9eb6b1b94a Oct 10 16:48:07 crc kubenswrapper[4799]: I1010 16:48:07.586783 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-controller-manager-656bcbd775-nh542"] Oct 10 16:48:07 crc kubenswrapper[4799]: W1010 16:48:07.590784 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod176cdbb9_4289_4322_8112_7ffae6a8efe8.slice/crio-5731e5ca032863a394aa05447f606579443da390cfc1cada1e231e6f5dead030 WatchSource:0}: Error finding container 5731e5ca032863a394aa05447f606579443da390cfc1cada1e231e6f5dead030: Status 404 returned error can't find the container with id 5731e5ca032863a394aa05447f606579443da390cfc1cada1e231e6f5dead030 Oct 10 16:48:07 crc kubenswrapper[4799]: E1010 16:48:07.600843 4799 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/test-operator@sha256:7e584b1c430441c8b6591dadeff32e065de8a185ad37ef90d2e08d37e59aab4a,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-75s9r,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod test-operator-controller-manager-5458f77c4-k5cjx_openstack-operators(176cdbb9-4289-4322-8112-7ffae6a8efe8): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Oct 10 16:48:07 crc kubenswrapper[4799]: W1010 16:48:07.602454 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod62fda0e4_55d3_481f_8da4_66e8f4dd39d4.slice/crio-bd55822953110555d639b3e0de1e9182b8581ccad23e86ba5d3b99e6ab6c0c10 WatchSource:0}: Error finding container bd55822953110555d639b3e0de1e9182b8581ccad23e86ba5d3b99e6ab6c0c10: Status 404 returned error can't find the container with id bd55822953110555d639b3e0de1e9182b8581ccad23e86ba5d3b99e6ab6c0c10 Oct 10 16:48:07 crc kubenswrapper[4799]: E1010 16:48:07.603970 4799 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:operator,Image:quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2,Command:[/manager],Args:[],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:metrics,HostPort:0,ContainerPort:9782,Protocol:TCP,HostIP:,},},Env:[]EnvVar{EnvVar{Name:OPERATOR_NAMESPACE,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:metadata.namespace,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{200 -3} {} 200m DecimalSI},memory: {{524288000 0} {} 500Mi BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-5x9z6,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000660000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod rabbitmq-cluster-operator-manager-5f97d8c699-z59wk_openstack-operators(bd9e6a7c-702e-4424-9cbb-f9832e91d4a3): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Oct 10 16:48:07 crc kubenswrapper[4799]: E1010 16:48:07.605048 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-z59wk" podUID="bd9e6a7c-702e-4424-9cbb-f9832e91d4a3" Oct 10 16:48:07 crc kubenswrapper[4799]: E1010 16:48:07.609945 4799 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/infra-operator@sha256:5cfb2ae1092445950b39dd59caa9a8c9367f42fb8353a8c3848d3bc729f24492,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:true,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{600 -3} {} 600m DecimalSI},memory: {{2147483648 0} {} 2Gi BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{536870912 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:cert,ReadOnly:true,MountPath:/tmp/k8s-webhook-server/serving-certs,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-vtgbl,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod infra-operator-controller-manager-656bcbd775-nh542_openstack-operators(62fda0e4-55d3-481f-8da4-66e8f4dd39d4): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Oct 10 16:48:07 crc kubenswrapper[4799]: W1010 16:48:07.612644 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9267d3c3_fe67_41e8_9240_81955432d822.slice/crio-966f59acb0c9a7259ba5d477689e5d3cfa3df02170932838a638225e51b6645a WatchSource:0}: Error finding container 966f59acb0c9a7259ba5d477689e5d3cfa3df02170932838a638225e51b6645a: Status 404 returned error can't find the container with id 966f59acb0c9a7259ba5d477689e5d3cfa3df02170932838a638225e51b6645a Oct 10 16:48:07 crc kubenswrapper[4799]: E1010 16:48:07.625898 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/neutron-operator-controller-manager-79d585cb66-45997" podUID="4367b146-2ac4-497e-b15a-c35615498938" Oct 10 16:48:07 crc kubenswrapper[4799]: I1010 16:48:07.628409 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-manager-5b95c8954b-nfst6"] Oct 10 16:48:07 crc kubenswrapper[4799]: E1010 16:48:07.706317 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/ovn-operator-controller-manager-79df5fb58c-g57bz" podUID="2ac4be00-5d25-4bb4-8f98-599d7d637d38" Oct 10 16:48:07 crc kubenswrapper[4799]: I1010 16:48:07.714009 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/ca5131fc-12e4-41b5-a4e2-6e71ed4049e1-cert\") pod \"openstack-baremetal-operator-controller-manager-5956dffb7b725zs\" (UID: \"ca5131fc-12e4-41b5-a4e2-6e71ed4049e1\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-5956dffb7b725zs" Oct 10 16:48:07 crc kubenswrapper[4799]: I1010 16:48:07.743158 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/ca5131fc-12e4-41b5-a4e2-6e71ed4049e1-cert\") pod \"openstack-baremetal-operator-controller-manager-5956dffb7b725zs\" (UID: \"ca5131fc-12e4-41b5-a4e2-6e71ed4049e1\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-5956dffb7b725zs" Oct 10 16:48:07 crc kubenswrapper[4799]: E1010 16:48:07.743378 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/mariadb-operator-controller-manager-f9fb45f8f-22wxr" podUID="118111a7-9601-4a05-94b9-79601cb47623" Oct 10 16:48:07 crc kubenswrapper[4799]: E1010 16:48:07.763571 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/swift-operator-controller-manager-db6d7f97b-pcn6t" podUID="e9f4174f-ea12-45c8-840c-ccdce9dd4c1f" Oct 10 16:48:07 crc kubenswrapper[4799]: I1010 16:48:07.768093 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-baremetal-operator-controller-manager-5956dffb7b725zs" Oct 10 16:48:07 crc kubenswrapper[4799]: E1010 16:48:07.768837 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/octavia-operator-controller-manager-69fdcfc5f5-6thbh" podUID="71551afb-1aa4-4cdf-9cb4-a136ec3cadf5" Oct 10 16:48:07 crc kubenswrapper[4799]: E1010 16:48:07.998429 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/test-operator-controller-manager-5458f77c4-k5cjx" podUID="176cdbb9-4289-4322-8112-7ffae6a8efe8" Oct 10 16:48:08 crc kubenswrapper[4799]: E1010 16:48:08.041359 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/infra-operator-controller-manager-656bcbd775-nh542" podUID="62fda0e4-55d3-481f-8da4-66e8f4dd39d4" Oct 10 16:48:08 crc kubenswrapper[4799]: I1010 16:48:08.063347 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-5956dffb7b725zs"] Oct 10 16:48:08 crc kubenswrapper[4799]: I1010 16:48:08.434963 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-5458f77c4-k5cjx" event={"ID":"176cdbb9-4289-4322-8112-7ffae6a8efe8","Type":"ContainerStarted","Data":"5ece0c58d791c7de79ff0abdf145af4570be9ceda9d58132b9081f166ef19c76"} Oct 10 16:48:08 crc kubenswrapper[4799]: I1010 16:48:08.435014 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-5458f77c4-k5cjx" event={"ID":"176cdbb9-4289-4322-8112-7ffae6a8efe8","Type":"ContainerStarted","Data":"5731e5ca032863a394aa05447f606579443da390cfc1cada1e231e6f5dead030"} Oct 10 16:48:08 crc kubenswrapper[4799]: E1010 16:48:08.441986 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/test-operator@sha256:7e584b1c430441c8b6591dadeff32e065de8a185ad37ef90d2e08d37e59aab4a\\\"\"" pod="openstack-operators/test-operator-controller-manager-5458f77c4-k5cjx" podUID="176cdbb9-4289-4322-8112-7ffae6a8efe8" Oct 10 16:48:08 crc kubenswrapper[4799]: I1010 16:48:08.444862 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-manager-5b95c8954b-nfst6" event={"ID":"9267d3c3-fe67-41e8-9240-81955432d822","Type":"ContainerStarted","Data":"00178e02a6936e1f7b0839f3456f8ae6276ba940d52a8708b5179c6da236e386"} Oct 10 16:48:08 crc kubenswrapper[4799]: I1010 16:48:08.444892 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-manager-5b95c8954b-nfst6" event={"ID":"9267d3c3-fe67-41e8-9240-81955432d822","Type":"ContainerStarted","Data":"39f303fc7d1fa6aac240b4de36f262056f740145b8a2d19fe62faf32d014689c"} Oct 10 16:48:08 crc kubenswrapper[4799]: I1010 16:48:08.444901 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-manager-5b95c8954b-nfst6" event={"ID":"9267d3c3-fe67-41e8-9240-81955432d822","Type":"ContainerStarted","Data":"966f59acb0c9a7259ba5d477689e5d3cfa3df02170932838a638225e51b6645a"} Oct 10 16:48:08 crc kubenswrapper[4799]: I1010 16:48:08.445143 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-controller-manager-5b95c8954b-nfst6" Oct 10 16:48:08 crc kubenswrapper[4799]: I1010 16:48:08.448378 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-db6d7f97b-pcn6t" event={"ID":"e9f4174f-ea12-45c8-840c-ccdce9dd4c1f","Type":"ContainerStarted","Data":"4c5ba17834251a39c474f5f4ee837f5715ffc6c27855451325db688fc011aad5"} Oct 10 16:48:08 crc kubenswrapper[4799]: I1010 16:48:08.448444 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-db6d7f97b-pcn6t" event={"ID":"e9f4174f-ea12-45c8-840c-ccdce9dd4c1f","Type":"ContainerStarted","Data":"1b754c19131dbd1c47a5fc555d271b457976f18b44dcc88e16d839446adfa49b"} Oct 10 16:48:08 crc kubenswrapper[4799]: E1010 16:48:08.452471 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/swift-operator@sha256:4b4a17fe08ce00e375afaaec6a28835f5c1784f03d11c4558376ac04130f3a9e\\\"\"" pod="openstack-operators/swift-operator-controller-manager-db6d7f97b-pcn6t" podUID="e9f4174f-ea12-45c8-840c-ccdce9dd4c1f" Oct 10 16:48:08 crc kubenswrapper[4799]: I1010 16:48:08.453451 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-67cfc6749b-vbq52" event={"ID":"e669c0e2-564c-4391-b2d0-5ab8cc0f38cd","Type":"ContainerStarted","Data":"3e7114936e96683c6e5149680d5a66d3b9ad21a5ae43d0cc80c1c789238539e7"} Oct 10 16:48:08 crc kubenswrapper[4799]: I1010 16:48:08.473140 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-69fdcfc5f5-6thbh" event={"ID":"71551afb-1aa4-4cdf-9cb4-a136ec3cadf5","Type":"ContainerStarted","Data":"2fd80d3274021ef9c5173aa22404d3163b62dd33f2022e099835c75673c831ee"} Oct 10 16:48:08 crc kubenswrapper[4799]: I1010 16:48:08.473200 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-69fdcfc5f5-6thbh" event={"ID":"71551afb-1aa4-4cdf-9cb4-a136ec3cadf5","Type":"ContainerStarted","Data":"7a5bcef3f6c80e707d6dd63c5e7132e5278910882207de79723d6ceb495e055b"} Oct 10 16:48:08 crc kubenswrapper[4799]: E1010 16:48:08.478499 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/octavia-operator@sha256:09deecf840d38ff6af3c924729cf0a9444bc985848bfbe7c918019b88a6bc4d7\\\"\"" pod="openstack-operators/octavia-operator-controller-manager-69fdcfc5f5-6thbh" podUID="71551afb-1aa4-4cdf-9cb4-a136ec3cadf5" Oct 10 16:48:08 crc kubenswrapper[4799]: I1010 16:48:08.487389 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-858f76bbdd-tzx89" event={"ID":"37273794-7563-423e-a2d5-86c9e9f957cb","Type":"ContainerStarted","Data":"d1365d5a94dac1316bcaf28d0c3a94a8c04aeffcaa570c2adffec4b03f36789a"} Oct 10 16:48:08 crc kubenswrapper[4799]: I1010 16:48:08.491384 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-5956dffb7b725zs" event={"ID":"ca5131fc-12e4-41b5-a4e2-6e71ed4049e1","Type":"ContainerStarted","Data":"2b994a0003052ca925d2a34c9a2f8f503218ad5afc37aef9f3fafd659fc8e28b"} Oct 10 16:48:08 crc kubenswrapper[4799]: I1010 16:48:08.515631 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-f9fb45f8f-22wxr" event={"ID":"118111a7-9601-4a05-94b9-79601cb47623","Type":"ContainerStarted","Data":"12fb9c49c7f25fecf833e1047cfb5e5bf0f3781e31230c27f1ae535ef8a537ab"} Oct 10 16:48:08 crc kubenswrapper[4799]: I1010 16:48:08.515672 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-f9fb45f8f-22wxr" event={"ID":"118111a7-9601-4a05-94b9-79601cb47623","Type":"ContainerStarted","Data":"0f0a04fadfb6ba79d287a680f4b17e32c28bf4a5a73a32b5ff317a5c862eba02"} Oct 10 16:48:08 crc kubenswrapper[4799]: E1010 16:48:08.517381 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/mariadb-operator@sha256:47278ed28e02df00892f941763aa0d69547327318e8a983e07f4577acd288167\\\"\"" pod="openstack-operators/mariadb-operator-controller-manager-f9fb45f8f-22wxr" podUID="118111a7-9601-4a05-94b9-79601cb47623" Oct 10 16:48:08 crc kubenswrapper[4799]: I1010 16:48:08.524831 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-controller-manager-5b95c8954b-nfst6" podStartSLOduration=2.52481741 podStartE2EDuration="2.52481741s" podCreationTimestamp="2025-10-10 16:48:06 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-10 16:48:08.516844263 +0000 UTC m=+982.025168378" watchObservedRunningTime="2025-10-10 16:48:08.52481741 +0000 UTC m=+982.033141525" Oct 10 16:48:08 crc kubenswrapper[4799]: I1010 16:48:08.531215 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-84b9b84486-dvk5w" event={"ID":"a61aa86a-a90e-439b-85e9-15b7a1466785","Type":"ContainerStarted","Data":"2d5f00300eb0c902fc420cc8536eafc0d7a89a0829de1d7785853a76a4fc84bd"} Oct 10 16:48:08 crc kubenswrapper[4799]: I1010 16:48:08.541927 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-z59wk" event={"ID":"bd9e6a7c-702e-4424-9cbb-f9832e91d4a3","Type":"ContainerStarted","Data":"6f2ab328bf7b6a4afb0c4f8de773bccbfb8da7e7a5216d68e35f2c8d52db14e1"} Oct 10 16:48:08 crc kubenswrapper[4799]: E1010 16:48:08.546097 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2\\\"\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-z59wk" podUID="bd9e6a7c-702e-4424-9cbb-f9832e91d4a3" Oct 10 16:48:08 crc kubenswrapper[4799]: I1010 16:48:08.556014 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-79df5fb58c-g57bz" event={"ID":"2ac4be00-5d25-4bb4-8f98-599d7d637d38","Type":"ContainerStarted","Data":"b1cac6937052df84d66ba0ed9cad98f5e4c453c1c6bf14aa4191abcd7b1beb0f"} Oct 10 16:48:08 crc kubenswrapper[4799]: I1010 16:48:08.556056 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-79df5fb58c-g57bz" event={"ID":"2ac4be00-5d25-4bb4-8f98-599d7d637d38","Type":"ContainerStarted","Data":"8b4ec4397cd3ba0e68a1cb2d42413f972534b378ed8c0a0030a0003269db9f7c"} Oct 10 16:48:08 crc kubenswrapper[4799]: E1010 16:48:08.559676 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/ovn-operator@sha256:315e558023b41ac1aa215082096995a03810c5b42910a33b00427ffcac9c6a14\\\"\"" pod="openstack-operators/ovn-operator-controller-manager-79df5fb58c-g57bz" podUID="2ac4be00-5d25-4bb4-8f98-599d7d637d38" Oct 10 16:48:08 crc kubenswrapper[4799]: I1010 16:48:08.578992 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-79d585cb66-45997" event={"ID":"4367b146-2ac4-497e-b15a-c35615498938","Type":"ContainerStarted","Data":"a93eb5e4721299ad09f3f03042db0c70189345d64a701a2a6ad707e3739bc4a4"} Oct 10 16:48:08 crc kubenswrapper[4799]: I1010 16:48:08.579256 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-79d585cb66-45997" event={"ID":"4367b146-2ac4-497e-b15a-c35615498938","Type":"ContainerStarted","Data":"d7849e8d5087d47f73f181638736ddabdc9a8feabfa6000a77aa61dff152e959"} Oct 10 16:48:08 crc kubenswrapper[4799]: E1010 16:48:08.581780 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/neutron-operator@sha256:33652e75a03a058769019fe8d8c51585a6eeefef5e1ecb96f9965434117954f2\\\"\"" pod="openstack-operators/neutron-operator-controller-manager-79d585cb66-45997" podUID="4367b146-2ac4-497e-b15a-c35615498938" Oct 10 16:48:08 crc kubenswrapper[4799]: I1010 16:48:08.582072 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-656bcbd775-nh542" event={"ID":"62fda0e4-55d3-481f-8da4-66e8f4dd39d4","Type":"ContainerStarted","Data":"ff941abddd1841a139516fdc69f5e8de8ad53b6245f4f5211a327ebcaf45cdf7"} Oct 10 16:48:08 crc kubenswrapper[4799]: I1010 16:48:08.582163 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-656bcbd775-nh542" event={"ID":"62fda0e4-55d3-481f-8da4-66e8f4dd39d4","Type":"ContainerStarted","Data":"bd55822953110555d639b3e0de1e9182b8581ccad23e86ba5d3b99e6ab6c0c10"} Oct 10 16:48:08 crc kubenswrapper[4799]: E1010 16:48:08.585324 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/infra-operator@sha256:5cfb2ae1092445950b39dd59caa9a8c9367f42fb8353a8c3848d3bc729f24492\\\"\"" pod="openstack-operators/infra-operator-controller-manager-656bcbd775-nh542" podUID="62fda0e4-55d3-481f-8da4-66e8f4dd39d4" Oct 10 16:48:08 crc kubenswrapper[4799]: I1010 16:48:08.606714 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-7f554bff7b-dq9x7" event={"ID":"98a13753-0658-4a22-ba99-aaebe22b5746","Type":"ContainerStarted","Data":"0bbf5c0c0d5dc744805d64e27b5b01350469ff7297bea295cacc3f9eb6b1b94a"} Oct 10 16:48:08 crc kubenswrapper[4799]: I1010 16:48:08.614296 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-5df598886f-rcld5" event={"ID":"f9c815da-6e31-4ac7-a019-037983b9a9fd","Type":"ContainerStarted","Data":"e1f162f816fdf8d3ebd37aae684c0a1acef814ffb77e25b890cd6af74551d749"} Oct 10 16:48:09 crc kubenswrapper[4799]: E1010 16:48:09.623326 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/ovn-operator@sha256:315e558023b41ac1aa215082096995a03810c5b42910a33b00427ffcac9c6a14\\\"\"" pod="openstack-operators/ovn-operator-controller-manager-79df5fb58c-g57bz" podUID="2ac4be00-5d25-4bb4-8f98-599d7d637d38" Oct 10 16:48:09 crc kubenswrapper[4799]: E1010 16:48:09.623455 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/mariadb-operator@sha256:47278ed28e02df00892f941763aa0d69547327318e8a983e07f4577acd288167\\\"\"" pod="openstack-operators/mariadb-operator-controller-manager-f9fb45f8f-22wxr" podUID="118111a7-9601-4a05-94b9-79601cb47623" Oct 10 16:48:09 crc kubenswrapper[4799]: E1010 16:48:09.624167 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/infra-operator@sha256:5cfb2ae1092445950b39dd59caa9a8c9367f42fb8353a8c3848d3bc729f24492\\\"\"" pod="openstack-operators/infra-operator-controller-manager-656bcbd775-nh542" podUID="62fda0e4-55d3-481f-8da4-66e8f4dd39d4" Oct 10 16:48:09 crc kubenswrapper[4799]: E1010 16:48:09.624252 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/swift-operator@sha256:4b4a17fe08ce00e375afaaec6a28835f5c1784f03d11c4558376ac04130f3a9e\\\"\"" pod="openstack-operators/swift-operator-controller-manager-db6d7f97b-pcn6t" podUID="e9f4174f-ea12-45c8-840c-ccdce9dd4c1f" Oct 10 16:48:09 crc kubenswrapper[4799]: E1010 16:48:09.624290 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2\\\"\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-z59wk" podUID="bd9e6a7c-702e-4424-9cbb-f9832e91d4a3" Oct 10 16:48:09 crc kubenswrapper[4799]: E1010 16:48:09.626861 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/neutron-operator@sha256:33652e75a03a058769019fe8d8c51585a6eeefef5e1ecb96f9965434117954f2\\\"\"" pod="openstack-operators/neutron-operator-controller-manager-79d585cb66-45997" podUID="4367b146-2ac4-497e-b15a-c35615498938" Oct 10 16:48:09 crc kubenswrapper[4799]: E1010 16:48:09.626993 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/test-operator@sha256:7e584b1c430441c8b6591dadeff32e065de8a185ad37ef90d2e08d37e59aab4a\\\"\"" pod="openstack-operators/test-operator-controller-manager-5458f77c4-k5cjx" podUID="176cdbb9-4289-4322-8112-7ffae6a8efe8" Oct 10 16:48:09 crc kubenswrapper[4799]: E1010 16:48:09.627325 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/octavia-operator@sha256:09deecf840d38ff6af3c924729cf0a9444bc985848bfbe7c918019b88a6bc4d7\\\"\"" pod="openstack-operators/octavia-operator-controller-manager-69fdcfc5f5-6thbh" podUID="71551afb-1aa4-4cdf-9cb4-a136ec3cadf5" Oct 10 16:48:16 crc kubenswrapper[4799]: I1010 16:48:16.983027 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-controller-manager-5b95c8954b-nfst6" Oct 10 16:48:19 crc kubenswrapper[4799]: I1010 16:48:19.712469 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-7ffbcb7588-kv2gv" event={"ID":"b0caa8f4-5c59-402a-9025-a2ba80d70577","Type":"ContainerStarted","Data":"c4786ac482af30ed33adfccc1b0642274f8e7c6237dbea0ee109aefac091f91b"} Oct 10 16:48:19 crc kubenswrapper[4799]: I1010 16:48:19.718216 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-858f76bbdd-tzx89" event={"ID":"37273794-7563-423e-a2d5-86c9e9f957cb","Type":"ContainerStarted","Data":"e046e37b5c28aa3dc7f6cb3a15ae07efbc220ad373da8499056f955f2c9a619d"} Oct 10 16:48:19 crc kubenswrapper[4799]: I1010 16:48:19.722100 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-7f554bff7b-dq9x7" event={"ID":"98a13753-0658-4a22-ba99-aaebe22b5746","Type":"ContainerStarted","Data":"d917c7b4fe6f4ec22c235182b4a7e4514b9a63968652da34e6bf6abb716b8a6b"} Oct 10 16:48:19 crc kubenswrapper[4799]: I1010 16:48:19.727189 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-7b7fb68549-bl9j6" event={"ID":"98cf31e5-618a-4363-8a3d-1b0d0bc75b48","Type":"ContainerStarted","Data":"503cca5628daa22ccf80301acae06c83800d28b1db098ee51c7c6ec3546ae97b"} Oct 10 16:48:19 crc kubenswrapper[4799]: I1010 16:48:19.732158 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-9c5c78d49-vjjnj" event={"ID":"c89f9acf-7ee6-4600-9331-635eb7fce931","Type":"ContainerStarted","Data":"68b46f137f2785e6704766a61a34eda592bdc910f6ac861d38e6f493edfbb9dd"} Oct 10 16:48:19 crc kubenswrapper[4799]: I1010 16:48:19.734247 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-68b6c87b68-wxv94" event={"ID":"18c5ac5f-08f2-431b-9aaf-0b2e5c3f9bbf","Type":"ContainerStarted","Data":"7ddb6f528aa1edbecdf73dd29c16e3e2dd36297226a4e03f5caed4bcee12e7b3"} Oct 10 16:48:19 crc kubenswrapper[4799]: I1010 16:48:19.735772 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-67cfc6749b-vbq52" event={"ID":"e669c0e2-564c-4391-b2d0-5ab8cc0f38cd","Type":"ContainerStarted","Data":"72e82b4c4fea79f1a88089abefa6cde45fc5f10be0673f28843bcb0140acac64"} Oct 10 16:48:19 crc kubenswrapper[4799]: I1010 16:48:19.738889 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-85d5d9dd78-w2d2v" event={"ID":"324da982-9067-490a-98a5-9ad58296841a","Type":"ContainerStarted","Data":"2f2dbc3172488e77168ff7551c175ac435b186686cd204e703313f1f21329e29"} Oct 10 16:48:19 crc kubenswrapper[4799]: I1010 16:48:19.741508 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-5956dffb7b725zs" event={"ID":"ca5131fc-12e4-41b5-a4e2-6e71ed4049e1","Type":"ContainerStarted","Data":"0f920c72770c20154254b52e351029281215cf2dac5ac40208e86232a04b33c5"} Oct 10 16:48:19 crc kubenswrapper[4799]: I1010 16:48:19.742796 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-5df598886f-rcld5" event={"ID":"f9c815da-6e31-4ac7-a019-037983b9a9fd","Type":"ContainerStarted","Data":"39ff5bdf6d3552688d9ac912f2ba369bf269a2fc6d27df1fe3c30a939683b0dd"} Oct 10 16:48:19 crc kubenswrapper[4799]: I1010 16:48:19.743596 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-84b9b84486-dvk5w" event={"ID":"a61aa86a-a90e-439b-85e9-15b7a1466785","Type":"ContainerStarted","Data":"9b84bc3b9e0f99296de1f10bfd8d371c8a6e2e02fa49396b5f1bcb40a5443a5e"} Oct 10 16:48:19 crc kubenswrapper[4799]: I1010 16:48:19.744473 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-5f67fbc655-2qw6c" event={"ID":"92c8227d-0d33-41f9-b186-2f17c2753fa2","Type":"ContainerStarted","Data":"df8e3444b20597d378c01cee8797aaebe54726ca76a97b4281457ea3ef95ee89"} Oct 10 16:48:19 crc kubenswrapper[4799]: I1010 16:48:19.745310 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-55b6b7c7b8-ll94t" event={"ID":"490a3592-9d71-4182-8b1b-6f8c55a01bde","Type":"ContainerStarted","Data":"dc40c2534348f45eb3f61363c74512206dc01fbc63cc529d95e6a43e3764cc42"} Oct 10 16:48:20 crc kubenswrapper[4799]: I1010 16:48:20.753203 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-5f67fbc655-2qw6c" event={"ID":"92c8227d-0d33-41f9-b186-2f17c2753fa2","Type":"ContainerStarted","Data":"0eead99b5e20c9ce269f6208b2e066a95174e0b36729ffbaa57662ec81d4b01c"} Oct 10 16:48:20 crc kubenswrapper[4799]: I1010 16:48:20.753497 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/manila-operator-controller-manager-5f67fbc655-2qw6c" Oct 10 16:48:20 crc kubenswrapper[4799]: I1010 16:48:20.755041 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-55b6b7c7b8-ll94t" event={"ID":"490a3592-9d71-4182-8b1b-6f8c55a01bde","Type":"ContainerStarted","Data":"387659e3f2fef82939e4c5dce7fd226a078f0732e3daba211915df4569b77239"} Oct 10 16:48:20 crc kubenswrapper[4799]: I1010 16:48:20.755186 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/keystone-operator-controller-manager-55b6b7c7b8-ll94t" Oct 10 16:48:20 crc kubenswrapper[4799]: I1010 16:48:20.757148 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-85d5d9dd78-w2d2v" event={"ID":"324da982-9067-490a-98a5-9ad58296841a","Type":"ContainerStarted","Data":"c9b1894ca5d8789ebc693e900d437af84f41f3a124e925f174a22f32c617d058"} Oct 10 16:48:20 crc kubenswrapper[4799]: I1010 16:48:20.757313 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/designate-operator-controller-manager-85d5d9dd78-w2d2v" Oct 10 16:48:20 crc kubenswrapper[4799]: I1010 16:48:20.758919 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-5956dffb7b725zs" event={"ID":"ca5131fc-12e4-41b5-a4e2-6e71ed4049e1","Type":"ContainerStarted","Data":"f8b563fd4fb6a2df837185d7dac9985a683e185813ffd930490904480ef2c347"} Oct 10 16:48:20 crc kubenswrapper[4799]: I1010 16:48:20.759039 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-baremetal-operator-controller-manager-5956dffb7b725zs" Oct 10 16:48:20 crc kubenswrapper[4799]: I1010 16:48:20.760579 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-7b7fb68549-bl9j6" event={"ID":"98cf31e5-618a-4363-8a3d-1b0d0bc75b48","Type":"ContainerStarted","Data":"c46cb1df787c2cd17f60848f483a217c3ed7b08f6804f195212f4e0a0e968d2b"} Oct 10 16:48:20 crc kubenswrapper[4799]: I1010 16:48:20.760718 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/cinder-operator-controller-manager-7b7fb68549-bl9j6" Oct 10 16:48:20 crc kubenswrapper[4799]: I1010 16:48:20.761951 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-84b9b84486-dvk5w" event={"ID":"a61aa86a-a90e-439b-85e9-15b7a1466785","Type":"ContainerStarted","Data":"414e1d0378f041307d051d67710dad90789132da58bf2cf2c1f685610d786477"} Oct 10 16:48:20 crc kubenswrapper[4799]: I1010 16:48:20.762084 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/glance-operator-controller-manager-84b9b84486-dvk5w" Oct 10 16:48:20 crc kubenswrapper[4799]: I1010 16:48:20.763563 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-9c5c78d49-vjjnj" event={"ID":"c89f9acf-7ee6-4600-9331-635eb7fce931","Type":"ContainerStarted","Data":"38f9578d72af5d8007020b42291a4026f295bd388f42c27d032c8d30ee810b10"} Oct 10 16:48:20 crc kubenswrapper[4799]: I1010 16:48:20.763649 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/ironic-operator-controller-manager-9c5c78d49-vjjnj" Oct 10 16:48:20 crc kubenswrapper[4799]: I1010 16:48:20.765948 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-5df598886f-rcld5" event={"ID":"f9c815da-6e31-4ac7-a019-037983b9a9fd","Type":"ContainerStarted","Data":"952125383b915f5db84329e489cbc49d028df72fdaa89fc37148136c35ab4dce"} Oct 10 16:48:20 crc kubenswrapper[4799]: I1010 16:48:20.766081 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/nova-operator-controller-manager-5df598886f-rcld5" Oct 10 16:48:20 crc kubenswrapper[4799]: I1010 16:48:20.767452 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-68b6c87b68-wxv94" event={"ID":"18c5ac5f-08f2-431b-9aaf-0b2e5c3f9bbf","Type":"ContainerStarted","Data":"8b3e9c7e356fc8d83aed9cf7748094f3595008810e18803ae9c3a1c5c8ad2e52"} Oct 10 16:48:20 crc kubenswrapper[4799]: I1010 16:48:20.767585 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/placement-operator-controller-manager-68b6c87b68-wxv94" Oct 10 16:48:20 crc kubenswrapper[4799]: I1010 16:48:20.768734 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-7ffbcb7588-kv2gv" event={"ID":"b0caa8f4-5c59-402a-9025-a2ba80d70577","Type":"ContainerStarted","Data":"191123bea5a2c6d07ae48befbd6c8aa780d5c48d72067471daca9c180f0937ab"} Oct 10 16:48:20 crc kubenswrapper[4799]: I1010 16:48:20.768887 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/horizon-operator-controller-manager-7ffbcb7588-kv2gv" Oct 10 16:48:20 crc kubenswrapper[4799]: I1010 16:48:20.769773 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-658bdf4b74-stptz" event={"ID":"9e711c48-2d32-4933-b13f-a0f9fec33e0d","Type":"ContainerStarted","Data":"fc501bb26b235e1302e11ad261c977572f85a5f2ecc7cd37842a58e257572157"} Oct 10 16:48:20 crc kubenswrapper[4799]: I1010 16:48:20.769851 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-658bdf4b74-stptz" event={"ID":"9e711c48-2d32-4933-b13f-a0f9fec33e0d","Type":"ContainerStarted","Data":"7e8f480fcbc8664dcbaf5430ae2d21cf0cc14bdbeca066b732cfcb535de348f7"} Oct 10 16:48:20 crc kubenswrapper[4799]: I1010 16:48:20.769913 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/barbican-operator-controller-manager-658bdf4b74-stptz" Oct 10 16:48:20 crc kubenswrapper[4799]: I1010 16:48:20.770952 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-858f76bbdd-tzx89" event={"ID":"37273794-7563-423e-a2d5-86c9e9f957cb","Type":"ContainerStarted","Data":"ab9a39f48074e4060abaccac2fb3808a92be2a03ac887cf2e2b3579b4d92a81d"} Oct 10 16:48:20 crc kubenswrapper[4799]: I1010 16:48:20.771030 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/heat-operator-controller-manager-858f76bbdd-tzx89" Oct 10 16:48:20 crc kubenswrapper[4799]: I1010 16:48:20.772157 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-7f554bff7b-dq9x7" event={"ID":"98a13753-0658-4a22-ba99-aaebe22b5746","Type":"ContainerStarted","Data":"a789e349baf6436427ea0afe068e5b0065892e580f115c90711a27a40f4831f0"} Oct 10 16:48:20 crc kubenswrapper[4799]: I1010 16:48:20.772276 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/watcher-operator-controller-manager-7f554bff7b-dq9x7" Oct 10 16:48:20 crc kubenswrapper[4799]: I1010 16:48:20.773329 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-67cfc6749b-vbq52" event={"ID":"e669c0e2-564c-4391-b2d0-5ab8cc0f38cd","Type":"ContainerStarted","Data":"665c82ab0f170c2826da1905af85719ba04aff676a515eaf3fdf0ef89b1e2b50"} Oct 10 16:48:20 crc kubenswrapper[4799]: I1010 16:48:20.773486 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/telemetry-operator-controller-manager-67cfc6749b-vbq52" Oct 10 16:48:20 crc kubenswrapper[4799]: I1010 16:48:20.780611 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/manila-operator-controller-manager-5f67fbc655-2qw6c" podStartSLOduration=3.942325292 podStartE2EDuration="15.780588233s" podCreationTimestamp="2025-10-10 16:48:05 +0000 UTC" firstStartedPulling="2025-10-10 16:48:07.120084593 +0000 UTC m=+980.628408708" lastFinishedPulling="2025-10-10 16:48:18.958347534 +0000 UTC m=+992.466671649" observedRunningTime="2025-10-10 16:48:20.776646405 +0000 UTC m=+994.284970530" watchObservedRunningTime="2025-10-10 16:48:20.780588233 +0000 UTC m=+994.288912348" Oct 10 16:48:20 crc kubenswrapper[4799]: I1010 16:48:20.796247 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/watcher-operator-controller-manager-7f554bff7b-dq9x7" podStartSLOduration=4.444113176 podStartE2EDuration="15.796230029s" podCreationTimestamp="2025-10-10 16:48:05 +0000 UTC" firstStartedPulling="2025-10-10 16:48:07.583032718 +0000 UTC m=+981.091356833" lastFinishedPulling="2025-10-10 16:48:18.935149571 +0000 UTC m=+992.443473686" observedRunningTime="2025-10-10 16:48:20.792663951 +0000 UTC m=+994.300988076" watchObservedRunningTime="2025-10-10 16:48:20.796230029 +0000 UTC m=+994.304554144" Oct 10 16:48:20 crc kubenswrapper[4799]: I1010 16:48:20.840180 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/heat-operator-controller-manager-858f76bbdd-tzx89" podStartSLOduration=4.239688836 podStartE2EDuration="15.840162634s" podCreationTimestamp="2025-10-10 16:48:05 +0000 UTC" firstStartedPulling="2025-10-10 16:48:07.360495331 +0000 UTC m=+980.868819446" lastFinishedPulling="2025-10-10 16:48:18.960969089 +0000 UTC m=+992.469293244" observedRunningTime="2025-10-10 16:48:20.835686473 +0000 UTC m=+994.344010588" watchObservedRunningTime="2025-10-10 16:48:20.840162634 +0000 UTC m=+994.348486749" Oct 10 16:48:20 crc kubenswrapper[4799]: I1010 16:48:20.858787 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-baremetal-operator-controller-manager-5956dffb7b725zs" podStartSLOduration=5.010943547 podStartE2EDuration="15.858769644s" podCreationTimestamp="2025-10-10 16:48:05 +0000 UTC" firstStartedPulling="2025-10-10 16:48:08.10943283 +0000 UTC m=+981.617756945" lastFinishedPulling="2025-10-10 16:48:18.957258917 +0000 UTC m=+992.465583042" observedRunningTime="2025-10-10 16:48:20.857190355 +0000 UTC m=+994.365514470" watchObservedRunningTime="2025-10-10 16:48:20.858769644 +0000 UTC m=+994.367093759" Oct 10 16:48:20 crc kubenswrapper[4799]: I1010 16:48:20.880420 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/barbican-operator-controller-manager-658bdf4b74-stptz" podStartSLOduration=3.906822214 podStartE2EDuration="15.880386157s" podCreationTimestamp="2025-10-10 16:48:05 +0000 UTC" firstStartedPulling="2025-10-10 16:48:07.032437088 +0000 UTC m=+980.540761203" lastFinishedPulling="2025-10-10 16:48:19.006001001 +0000 UTC m=+992.514325146" observedRunningTime="2025-10-10 16:48:20.876533392 +0000 UTC m=+994.384857527" watchObservedRunningTime="2025-10-10 16:48:20.880386157 +0000 UTC m=+994.388710282" Oct 10 16:48:20 crc kubenswrapper[4799]: I1010 16:48:20.898661 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/designate-operator-controller-manager-85d5d9dd78-w2d2v" podStartSLOduration=3.823877865 podStartE2EDuration="15.898639258s" podCreationTimestamp="2025-10-10 16:48:05 +0000 UTC" firstStartedPulling="2025-10-10 16:48:06.863923706 +0000 UTC m=+980.372247821" lastFinishedPulling="2025-10-10 16:48:18.938685099 +0000 UTC m=+992.447009214" observedRunningTime="2025-10-10 16:48:20.892200839 +0000 UTC m=+994.400524954" watchObservedRunningTime="2025-10-10 16:48:20.898639258 +0000 UTC m=+994.406963373" Oct 10 16:48:20 crc kubenswrapper[4799]: I1010 16:48:20.919662 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/nova-operator-controller-manager-5df598886f-rcld5" podStartSLOduration=4.319547008 podStartE2EDuration="15.919643927s" podCreationTimestamp="2025-10-10 16:48:05 +0000 UTC" firstStartedPulling="2025-10-10 16:48:07.360731607 +0000 UTC m=+980.869055722" lastFinishedPulling="2025-10-10 16:48:18.960828516 +0000 UTC m=+992.469152641" observedRunningTime="2025-10-10 16:48:20.910133842 +0000 UTC m=+994.418457957" watchObservedRunningTime="2025-10-10 16:48:20.919643927 +0000 UTC m=+994.427968042" Oct 10 16:48:20 crc kubenswrapper[4799]: I1010 16:48:20.943675 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/placement-operator-controller-manager-68b6c87b68-wxv94" podStartSLOduration=4.052671146 podStartE2EDuration="15.94365941s" podCreationTimestamp="2025-10-10 16:48:05 +0000 UTC" firstStartedPulling="2025-10-10 16:48:07.115021078 +0000 UTC m=+980.623345193" lastFinishedPulling="2025-10-10 16:48:19.006009342 +0000 UTC m=+992.514333457" observedRunningTime="2025-10-10 16:48:20.938060332 +0000 UTC m=+994.446384447" watchObservedRunningTime="2025-10-10 16:48:20.94365941 +0000 UTC m=+994.451983515" Oct 10 16:48:20 crc kubenswrapper[4799]: I1010 16:48:20.967845 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/ironic-operator-controller-manager-9c5c78d49-vjjnj" podStartSLOduration=4.078997686 podStartE2EDuration="15.967749585s" podCreationTimestamp="2025-10-10 16:48:05 +0000 UTC" firstStartedPulling="2025-10-10 16:48:07.032692384 +0000 UTC m=+980.541016499" lastFinishedPulling="2025-10-10 16:48:18.921444273 +0000 UTC m=+992.429768398" observedRunningTime="2025-10-10 16:48:20.961091661 +0000 UTC m=+994.469415776" watchObservedRunningTime="2025-10-10 16:48:20.967749585 +0000 UTC m=+994.476073700" Oct 10 16:48:20 crc kubenswrapper[4799]: I1010 16:48:20.980565 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/glance-operator-controller-manager-84b9b84486-dvk5w" podStartSLOduration=4.365074133 podStartE2EDuration="15.980551372s" podCreationTimestamp="2025-10-10 16:48:05 +0000 UTC" firstStartedPulling="2025-10-10 16:48:07.340023435 +0000 UTC m=+980.848347560" lastFinishedPulling="2025-10-10 16:48:18.955500684 +0000 UTC m=+992.463824799" observedRunningTime="2025-10-10 16:48:20.979971407 +0000 UTC m=+994.488295522" watchObservedRunningTime="2025-10-10 16:48:20.980551372 +0000 UTC m=+994.488875487" Oct 10 16:48:21 crc kubenswrapper[4799]: I1010 16:48:21.015579 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/cinder-operator-controller-manager-7b7fb68549-bl9j6" podStartSLOduration=3.9001495090000002 podStartE2EDuration="16.015561896s" podCreationTimestamp="2025-10-10 16:48:05 +0000 UTC" firstStartedPulling="2025-10-10 16:48:06.863584107 +0000 UTC m=+980.371908222" lastFinishedPulling="2025-10-10 16:48:18.978996494 +0000 UTC m=+992.487320609" observedRunningTime="2025-10-10 16:48:21.004165995 +0000 UTC m=+994.512490110" watchObservedRunningTime="2025-10-10 16:48:21.015561896 +0000 UTC m=+994.523886011" Oct 10 16:48:21 crc kubenswrapper[4799]: I1010 16:48:21.051353 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/keystone-operator-controller-manager-55b6b7c7b8-ll94t" podStartSLOduration=4.158389508 podStartE2EDuration="16.05133406s" podCreationTimestamp="2025-10-10 16:48:05 +0000 UTC" firstStartedPulling="2025-10-10 16:48:07.02969635 +0000 UTC m=+980.538020475" lastFinishedPulling="2025-10-10 16:48:18.922640912 +0000 UTC m=+992.430965027" observedRunningTime="2025-10-10 16:48:21.039964719 +0000 UTC m=+994.548288844" watchObservedRunningTime="2025-10-10 16:48:21.05133406 +0000 UTC m=+994.559658165" Oct 10 16:48:21 crc kubenswrapper[4799]: I1010 16:48:21.066156 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/telemetry-operator-controller-manager-67cfc6749b-vbq52" podStartSLOduration=4.432475588 podStartE2EDuration="16.064653539s" podCreationTimestamp="2025-10-10 16:48:05 +0000 UTC" firstStartedPulling="2025-10-10 16:48:07.339658656 +0000 UTC m=+980.847982771" lastFinishedPulling="2025-10-10 16:48:18.971836607 +0000 UTC m=+992.480160722" observedRunningTime="2025-10-10 16:48:21.064557646 +0000 UTC m=+994.572881771" watchObservedRunningTime="2025-10-10 16:48:21.064653539 +0000 UTC m=+994.572977654" Oct 10 16:48:22 crc kubenswrapper[4799]: I1010 16:48:22.793764 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-5458f77c4-k5cjx" event={"ID":"176cdbb9-4289-4322-8112-7ffae6a8efe8","Type":"ContainerStarted","Data":"02a19075b0b7a4d44f446e30cf7058f6291edb6626122b8d6efed2f41c30c7d7"} Oct 10 16:48:22 crc kubenswrapper[4799]: I1010 16:48:22.794236 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/test-operator-controller-manager-5458f77c4-k5cjx" Oct 10 16:48:22 crc kubenswrapper[4799]: I1010 16:48:22.812063 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/test-operator-controller-manager-5458f77c4-k5cjx" podStartSLOduration=3.360030639 podStartE2EDuration="17.812046529s" podCreationTimestamp="2025-10-10 16:48:05 +0000 UTC" firstStartedPulling="2025-10-10 16:48:07.600660433 +0000 UTC m=+981.108984548" lastFinishedPulling="2025-10-10 16:48:22.052676323 +0000 UTC m=+995.561000438" observedRunningTime="2025-10-10 16:48:22.811131217 +0000 UTC m=+996.319455342" watchObservedRunningTime="2025-10-10 16:48:22.812046529 +0000 UTC m=+996.320370644" Oct 10 16:48:22 crc kubenswrapper[4799]: I1010 16:48:22.813767 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/horizon-operator-controller-manager-7ffbcb7588-kv2gv" podStartSLOduration=5.878702369 podStartE2EDuration="17.813734621s" podCreationTimestamp="2025-10-10 16:48:05 +0000 UTC" firstStartedPulling="2025-10-10 16:48:07.025887496 +0000 UTC m=+980.534211621" lastFinishedPulling="2025-10-10 16:48:18.960919758 +0000 UTC m=+992.469243873" observedRunningTime="2025-10-10 16:48:21.089121623 +0000 UTC m=+994.597445738" watchObservedRunningTime="2025-10-10 16:48:22.813734621 +0000 UTC m=+996.322058736" Oct 10 16:48:23 crc kubenswrapper[4799]: I1010 16:48:23.802711 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-z59wk" event={"ID":"bd9e6a7c-702e-4424-9cbb-f9832e91d4a3","Type":"ContainerStarted","Data":"dbd9faf3259f6c24a658f02b4edf5d4c6653f4f21765e792f4cfb27f56b20852"} Oct 10 16:48:23 crc kubenswrapper[4799]: I1010 16:48:23.819332 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-z59wk" podStartSLOduration=2.426170973 podStartE2EDuration="17.819311918s" podCreationTimestamp="2025-10-10 16:48:06 +0000 UTC" firstStartedPulling="2025-10-10 16:48:07.603852952 +0000 UTC m=+981.112177067" lastFinishedPulling="2025-10-10 16:48:22.996993867 +0000 UTC m=+996.505318012" observedRunningTime="2025-10-10 16:48:23.815730549 +0000 UTC m=+997.324054704" watchObservedRunningTime="2025-10-10 16:48:23.819311918 +0000 UTC m=+997.327636043" Oct 10 16:48:24 crc kubenswrapper[4799]: I1010 16:48:24.839330 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-79d585cb66-45997" event={"ID":"4367b146-2ac4-497e-b15a-c35615498938","Type":"ContainerStarted","Data":"77931fe4d809b5ea7f93db851ead37f64a136287fcce04f08eed5cfae3c036e2"} Oct 10 16:48:24 crc kubenswrapper[4799]: I1010 16:48:24.840124 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/neutron-operator-controller-manager-79d585cb66-45997" Oct 10 16:48:24 crc kubenswrapper[4799]: I1010 16:48:24.856455 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/neutron-operator-controller-manager-79d585cb66-45997" podStartSLOduration=3.327733822 podStartE2EDuration="19.856436285s" podCreationTimestamp="2025-10-10 16:48:05 +0000 UTC" firstStartedPulling="2025-10-10 16:48:07.36734747 +0000 UTC m=+980.875671585" lastFinishedPulling="2025-10-10 16:48:23.896049933 +0000 UTC m=+997.404374048" observedRunningTime="2025-10-10 16:48:24.854889096 +0000 UTC m=+998.363213231" watchObservedRunningTime="2025-10-10 16:48:24.856436285 +0000 UTC m=+998.364760400" Oct 10 16:48:25 crc kubenswrapper[4799]: I1010 16:48:25.725502 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/barbican-operator-controller-manager-658bdf4b74-stptz" Oct 10 16:48:25 crc kubenswrapper[4799]: I1010 16:48:25.740808 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/cinder-operator-controller-manager-7b7fb68549-bl9j6" Oct 10 16:48:25 crc kubenswrapper[4799]: I1010 16:48:25.788114 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/designate-operator-controller-manager-85d5d9dd78-w2d2v" Oct 10 16:48:25 crc kubenswrapper[4799]: I1010 16:48:25.833469 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/horizon-operator-controller-manager-7ffbcb7588-kv2gv" Oct 10 16:48:25 crc kubenswrapper[4799]: I1010 16:48:25.905415 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/ironic-operator-controller-manager-9c5c78d49-vjjnj" Oct 10 16:48:25 crc kubenswrapper[4799]: I1010 16:48:25.924397 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/keystone-operator-controller-manager-55b6b7c7b8-ll94t" Oct 10 16:48:25 crc kubenswrapper[4799]: I1010 16:48:25.976453 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/manila-operator-controller-manager-5f67fbc655-2qw6c" Oct 10 16:48:26 crc kubenswrapper[4799]: I1010 16:48:26.105680 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/glance-operator-controller-manager-84b9b84486-dvk5w" Oct 10 16:48:26 crc kubenswrapper[4799]: I1010 16:48:26.120783 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/heat-operator-controller-manager-858f76bbdd-tzx89" Oct 10 16:48:26 crc kubenswrapper[4799]: I1010 16:48:26.211331 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/nova-operator-controller-manager-5df598886f-rcld5" Oct 10 16:48:26 crc kubenswrapper[4799]: I1010 16:48:26.333939 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/placement-operator-controller-manager-68b6c87b68-wxv94" Oct 10 16:48:26 crc kubenswrapper[4799]: I1010 16:48:26.519608 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/telemetry-operator-controller-manager-67cfc6749b-vbq52" Oct 10 16:48:26 crc kubenswrapper[4799]: I1010 16:48:26.565138 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/watcher-operator-controller-manager-7f554bff7b-dq9x7" Oct 10 16:48:27 crc kubenswrapper[4799]: I1010 16:48:27.776254 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-baremetal-operator-controller-manager-5956dffb7b725zs" Oct 10 16:48:28 crc kubenswrapper[4799]: I1010 16:48:28.876218 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-656bcbd775-nh542" event={"ID":"62fda0e4-55d3-481f-8da4-66e8f4dd39d4","Type":"ContainerStarted","Data":"f347724f8414daf08e4eb1f7557732853aabe9920499deec49af62415946d27c"} Oct 10 16:48:28 crc kubenswrapper[4799]: I1010 16:48:28.876841 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/infra-operator-controller-manager-656bcbd775-nh542" Oct 10 16:48:28 crc kubenswrapper[4799]: I1010 16:48:28.878972 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-f9fb45f8f-22wxr" event={"ID":"118111a7-9601-4a05-94b9-79601cb47623","Type":"ContainerStarted","Data":"038967d301be04007c337d14b87dacc649e3895daf5d74d0dca1783292eb8e34"} Oct 10 16:48:28 crc kubenswrapper[4799]: I1010 16:48:28.879243 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/mariadb-operator-controller-manager-f9fb45f8f-22wxr" Oct 10 16:48:28 crc kubenswrapper[4799]: I1010 16:48:28.881191 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-db6d7f97b-pcn6t" event={"ID":"e9f4174f-ea12-45c8-840c-ccdce9dd4c1f","Type":"ContainerStarted","Data":"c1446a7b429a4595ca7d7a9f36462b87f3ea26d57e20ca52e4ffd4aa7873f50d"} Oct 10 16:48:28 crc kubenswrapper[4799]: I1010 16:48:28.881421 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/swift-operator-controller-manager-db6d7f97b-pcn6t" Oct 10 16:48:28 crc kubenswrapper[4799]: I1010 16:48:28.886204 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-79df5fb58c-g57bz" event={"ID":"2ac4be00-5d25-4bb4-8f98-599d7d637d38","Type":"ContainerStarted","Data":"e50b01ef18786d25667ac6bdeb788c6ad2c23df606d635928100516ea5d9660a"} Oct 10 16:48:28 crc kubenswrapper[4799]: I1010 16:48:28.886573 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/ovn-operator-controller-manager-79df5fb58c-g57bz" Oct 10 16:48:28 crc kubenswrapper[4799]: I1010 16:48:28.889908 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-69fdcfc5f5-6thbh" event={"ID":"71551afb-1aa4-4cdf-9cb4-a136ec3cadf5","Type":"ContainerStarted","Data":"ea3b3e43d85dbde6870f79a6948ef392fecb75d1ac8c063adf5c7aeea3868a6a"} Oct 10 16:48:28 crc kubenswrapper[4799]: I1010 16:48:28.890244 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/octavia-operator-controller-manager-69fdcfc5f5-6thbh" Oct 10 16:48:28 crc kubenswrapper[4799]: I1010 16:48:28.906036 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/infra-operator-controller-manager-656bcbd775-nh542" podStartSLOduration=3.919654372 podStartE2EDuration="23.906009588s" podCreationTimestamp="2025-10-10 16:48:05 +0000 UTC" firstStartedPulling="2025-10-10 16:48:07.609813589 +0000 UTC m=+981.118137704" lastFinishedPulling="2025-10-10 16:48:27.596168805 +0000 UTC m=+1001.104492920" observedRunningTime="2025-10-10 16:48:28.902431399 +0000 UTC m=+1002.410755514" watchObservedRunningTime="2025-10-10 16:48:28.906009588 +0000 UTC m=+1002.414333713" Oct 10 16:48:28 crc kubenswrapper[4799]: I1010 16:48:28.924169 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/mariadb-operator-controller-manager-f9fb45f8f-22wxr" podStartSLOduration=3.352255437 podStartE2EDuration="23.924139875s" podCreationTimestamp="2025-10-10 16:48:05 +0000 UTC" firstStartedPulling="2025-10-10 16:48:07.39323345 +0000 UTC m=+980.901557555" lastFinishedPulling="2025-10-10 16:48:27.965117878 +0000 UTC m=+1001.473441993" observedRunningTime="2025-10-10 16:48:28.91947074 +0000 UTC m=+1002.427794845" watchObservedRunningTime="2025-10-10 16:48:28.924139875 +0000 UTC m=+1002.432463990" Oct 10 16:48:28 crc kubenswrapper[4799]: I1010 16:48:28.940134 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/ovn-operator-controller-manager-79df5fb58c-g57bz" podStartSLOduration=3.300513569 podStartE2EDuration="23.94011781s" podCreationTimestamp="2025-10-10 16:48:05 +0000 UTC" firstStartedPulling="2025-10-10 16:48:07.37949348 +0000 UTC m=+980.887817595" lastFinishedPulling="2025-10-10 16:48:28.019097711 +0000 UTC m=+1001.527421836" observedRunningTime="2025-10-10 16:48:28.939045173 +0000 UTC m=+1002.447369298" watchObservedRunningTime="2025-10-10 16:48:28.94011781 +0000 UTC m=+1002.448441925" Oct 10 16:48:28 crc kubenswrapper[4799]: I1010 16:48:28.960476 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/swift-operator-controller-manager-db6d7f97b-pcn6t" podStartSLOduration=3.75722161 podStartE2EDuration="23.960453812s" podCreationTimestamp="2025-10-10 16:48:05 +0000 UTC" firstStartedPulling="2025-10-10 16:48:07.393029545 +0000 UTC m=+980.901353660" lastFinishedPulling="2025-10-10 16:48:27.596261737 +0000 UTC m=+1001.104585862" observedRunningTime="2025-10-10 16:48:28.95751038 +0000 UTC m=+1002.465834525" watchObservedRunningTime="2025-10-10 16:48:28.960453812 +0000 UTC m=+1002.468777937" Oct 10 16:48:28 crc kubenswrapper[4799]: I1010 16:48:28.979031 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/octavia-operator-controller-manager-69fdcfc5f5-6thbh" podStartSLOduration=3.429523056 podStartE2EDuration="23.979007721s" podCreationTimestamp="2025-10-10 16:48:05 +0000 UTC" firstStartedPulling="2025-10-10 16:48:07.393509256 +0000 UTC m=+980.901833371" lastFinishedPulling="2025-10-10 16:48:27.942993921 +0000 UTC m=+1001.451318036" observedRunningTime="2025-10-10 16:48:28.973390352 +0000 UTC m=+1002.481714457" watchObservedRunningTime="2025-10-10 16:48:28.979007721 +0000 UTC m=+1002.487331846" Oct 10 16:48:35 crc kubenswrapper[4799]: I1010 16:48:35.970050 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/mariadb-operator-controller-manager-f9fb45f8f-22wxr" Oct 10 16:48:36 crc kubenswrapper[4799]: I1010 16:48:36.166369 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/neutron-operator-controller-manager-79d585cb66-45997" Oct 10 16:48:36 crc kubenswrapper[4799]: I1010 16:48:36.249705 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/octavia-operator-controller-manager-69fdcfc5f5-6thbh" Oct 10 16:48:36 crc kubenswrapper[4799]: I1010 16:48:36.294400 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/ovn-operator-controller-manager-79df5fb58c-g57bz" Oct 10 16:48:36 crc kubenswrapper[4799]: I1010 16:48:36.455740 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/infra-operator-controller-manager-656bcbd775-nh542" Oct 10 16:48:36 crc kubenswrapper[4799]: I1010 16:48:36.483423 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/swift-operator-controller-manager-db6d7f97b-pcn6t" Oct 10 16:48:36 crc kubenswrapper[4799]: I1010 16:48:36.535602 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/test-operator-controller-manager-5458f77c4-k5cjx" Oct 10 16:48:52 crc kubenswrapper[4799]: I1010 16:48:52.470889 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-7bfcb9d745-x7l2v"] Oct 10 16:48:52 crc kubenswrapper[4799]: I1010 16:48:52.472614 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7bfcb9d745-x7l2v" Oct 10 16:48:52 crc kubenswrapper[4799]: I1010 16:48:52.475518 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dnsmasq-dns-dockercfg-pkljw" Oct 10 16:48:52 crc kubenswrapper[4799]: I1010 16:48:52.476877 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"kube-root-ca.crt" Oct 10 16:48:52 crc kubenswrapper[4799]: I1010 16:48:52.476922 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns" Oct 10 16:48:52 crc kubenswrapper[4799]: I1010 16:48:52.476944 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openshift-service-ca.crt" Oct 10 16:48:52 crc kubenswrapper[4799]: I1010 16:48:52.497924 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7bfcb9d745-x7l2v"] Oct 10 16:48:52 crc kubenswrapper[4799]: I1010 16:48:52.561280 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/51ebfb45-0683-4095-9ce4-1224edf571f3-config\") pod \"dnsmasq-dns-7bfcb9d745-x7l2v\" (UID: \"51ebfb45-0683-4095-9ce4-1224edf571f3\") " pod="openstack/dnsmasq-dns-7bfcb9d745-x7l2v" Oct 10 16:48:52 crc kubenswrapper[4799]: I1010 16:48:52.561408 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nztnk\" (UniqueName: \"kubernetes.io/projected/51ebfb45-0683-4095-9ce4-1224edf571f3-kube-api-access-nztnk\") pod \"dnsmasq-dns-7bfcb9d745-x7l2v\" (UID: \"51ebfb45-0683-4095-9ce4-1224edf571f3\") " pod="openstack/dnsmasq-dns-7bfcb9d745-x7l2v" Oct 10 16:48:52 crc kubenswrapper[4799]: I1010 16:48:52.577343 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-758b79db4c-wzjph"] Oct 10 16:48:52 crc kubenswrapper[4799]: I1010 16:48:52.581973 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-758b79db4c-wzjph" Oct 10 16:48:52 crc kubenswrapper[4799]: I1010 16:48:52.587898 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns-svc" Oct 10 16:48:52 crc kubenswrapper[4799]: I1010 16:48:52.594609 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-758b79db4c-wzjph"] Oct 10 16:48:52 crc kubenswrapper[4799]: I1010 16:48:52.662908 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d31fd219-0ef3-47e3-9106-9546b0829c53-dns-svc\") pod \"dnsmasq-dns-758b79db4c-wzjph\" (UID: \"d31fd219-0ef3-47e3-9106-9546b0829c53\") " pod="openstack/dnsmasq-dns-758b79db4c-wzjph" Oct 10 16:48:52 crc kubenswrapper[4799]: I1010 16:48:52.662987 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nztnk\" (UniqueName: \"kubernetes.io/projected/51ebfb45-0683-4095-9ce4-1224edf571f3-kube-api-access-nztnk\") pod \"dnsmasq-dns-7bfcb9d745-x7l2v\" (UID: \"51ebfb45-0683-4095-9ce4-1224edf571f3\") " pod="openstack/dnsmasq-dns-7bfcb9d745-x7l2v" Oct 10 16:48:52 crc kubenswrapper[4799]: I1010 16:48:52.663116 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dhdrk\" (UniqueName: \"kubernetes.io/projected/d31fd219-0ef3-47e3-9106-9546b0829c53-kube-api-access-dhdrk\") pod \"dnsmasq-dns-758b79db4c-wzjph\" (UID: \"d31fd219-0ef3-47e3-9106-9546b0829c53\") " pod="openstack/dnsmasq-dns-758b79db4c-wzjph" Oct 10 16:48:52 crc kubenswrapper[4799]: I1010 16:48:52.663215 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d31fd219-0ef3-47e3-9106-9546b0829c53-config\") pod \"dnsmasq-dns-758b79db4c-wzjph\" (UID: \"d31fd219-0ef3-47e3-9106-9546b0829c53\") " pod="openstack/dnsmasq-dns-758b79db4c-wzjph" Oct 10 16:48:52 crc kubenswrapper[4799]: I1010 16:48:52.663276 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/51ebfb45-0683-4095-9ce4-1224edf571f3-config\") pod \"dnsmasq-dns-7bfcb9d745-x7l2v\" (UID: \"51ebfb45-0683-4095-9ce4-1224edf571f3\") " pod="openstack/dnsmasq-dns-7bfcb9d745-x7l2v" Oct 10 16:48:52 crc kubenswrapper[4799]: I1010 16:48:52.664106 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/51ebfb45-0683-4095-9ce4-1224edf571f3-config\") pod \"dnsmasq-dns-7bfcb9d745-x7l2v\" (UID: \"51ebfb45-0683-4095-9ce4-1224edf571f3\") " pod="openstack/dnsmasq-dns-7bfcb9d745-x7l2v" Oct 10 16:48:52 crc kubenswrapper[4799]: I1010 16:48:52.690978 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nztnk\" (UniqueName: \"kubernetes.io/projected/51ebfb45-0683-4095-9ce4-1224edf571f3-kube-api-access-nztnk\") pod \"dnsmasq-dns-7bfcb9d745-x7l2v\" (UID: \"51ebfb45-0683-4095-9ce4-1224edf571f3\") " pod="openstack/dnsmasq-dns-7bfcb9d745-x7l2v" Oct 10 16:48:52 crc kubenswrapper[4799]: I1010 16:48:52.764614 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dhdrk\" (UniqueName: \"kubernetes.io/projected/d31fd219-0ef3-47e3-9106-9546b0829c53-kube-api-access-dhdrk\") pod \"dnsmasq-dns-758b79db4c-wzjph\" (UID: \"d31fd219-0ef3-47e3-9106-9546b0829c53\") " pod="openstack/dnsmasq-dns-758b79db4c-wzjph" Oct 10 16:48:52 crc kubenswrapper[4799]: I1010 16:48:52.764719 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d31fd219-0ef3-47e3-9106-9546b0829c53-config\") pod \"dnsmasq-dns-758b79db4c-wzjph\" (UID: \"d31fd219-0ef3-47e3-9106-9546b0829c53\") " pod="openstack/dnsmasq-dns-758b79db4c-wzjph" Oct 10 16:48:52 crc kubenswrapper[4799]: I1010 16:48:52.765837 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d31fd219-0ef3-47e3-9106-9546b0829c53-config\") pod \"dnsmasq-dns-758b79db4c-wzjph\" (UID: \"d31fd219-0ef3-47e3-9106-9546b0829c53\") " pod="openstack/dnsmasq-dns-758b79db4c-wzjph" Oct 10 16:48:52 crc kubenswrapper[4799]: I1010 16:48:52.765978 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d31fd219-0ef3-47e3-9106-9546b0829c53-dns-svc\") pod \"dnsmasq-dns-758b79db4c-wzjph\" (UID: \"d31fd219-0ef3-47e3-9106-9546b0829c53\") " pod="openstack/dnsmasq-dns-758b79db4c-wzjph" Oct 10 16:48:52 crc kubenswrapper[4799]: I1010 16:48:52.766576 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d31fd219-0ef3-47e3-9106-9546b0829c53-dns-svc\") pod \"dnsmasq-dns-758b79db4c-wzjph\" (UID: \"d31fd219-0ef3-47e3-9106-9546b0829c53\") " pod="openstack/dnsmasq-dns-758b79db4c-wzjph" Oct 10 16:48:52 crc kubenswrapper[4799]: I1010 16:48:52.782624 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dhdrk\" (UniqueName: \"kubernetes.io/projected/d31fd219-0ef3-47e3-9106-9546b0829c53-kube-api-access-dhdrk\") pod \"dnsmasq-dns-758b79db4c-wzjph\" (UID: \"d31fd219-0ef3-47e3-9106-9546b0829c53\") " pod="openstack/dnsmasq-dns-758b79db4c-wzjph" Oct 10 16:48:52 crc kubenswrapper[4799]: I1010 16:48:52.795493 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7bfcb9d745-x7l2v" Oct 10 16:48:52 crc kubenswrapper[4799]: I1010 16:48:52.904143 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-758b79db4c-wzjph" Oct 10 16:48:53 crc kubenswrapper[4799]: I1010 16:48:53.138905 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-758b79db4c-wzjph"] Oct 10 16:48:53 crc kubenswrapper[4799]: I1010 16:48:53.145652 4799 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 10 16:48:53 crc kubenswrapper[4799]: W1010 16:48:53.253336 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod51ebfb45_0683_4095_9ce4_1224edf571f3.slice/crio-3add22576b5449e37798a2b755c666bbb81322f70e190bcc062cc4912061da9a WatchSource:0}: Error finding container 3add22576b5449e37798a2b755c666bbb81322f70e190bcc062cc4912061da9a: Status 404 returned error can't find the container with id 3add22576b5449e37798a2b755c666bbb81322f70e190bcc062cc4912061da9a Oct 10 16:48:53 crc kubenswrapper[4799]: I1010 16:48:53.253367 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7bfcb9d745-x7l2v"] Oct 10 16:48:54 crc kubenswrapper[4799]: I1010 16:48:54.108977 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-758b79db4c-wzjph" event={"ID":"d31fd219-0ef3-47e3-9106-9546b0829c53","Type":"ContainerStarted","Data":"cc52994ad214f36320d6ac84b508183d2d5174f09eab2830575d8f1f30a6d95a"} Oct 10 16:48:54 crc kubenswrapper[4799]: I1010 16:48:54.110225 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7bfcb9d745-x7l2v" event={"ID":"51ebfb45-0683-4095-9ce4-1224edf571f3","Type":"ContainerStarted","Data":"3add22576b5449e37798a2b755c666bbb81322f70e190bcc062cc4912061da9a"} Oct 10 16:48:54 crc kubenswrapper[4799]: I1010 16:48:54.639835 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-758b79db4c-wzjph"] Oct 10 16:48:54 crc kubenswrapper[4799]: I1010 16:48:54.661096 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-644597f84c-zx44x"] Oct 10 16:48:54 crc kubenswrapper[4799]: I1010 16:48:54.665037 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-644597f84c-zx44x" Oct 10 16:48:54 crc kubenswrapper[4799]: I1010 16:48:54.675513 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-644597f84c-zx44x"] Oct 10 16:48:54 crc kubenswrapper[4799]: I1010 16:48:54.698157 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/49436e14-c65e-4c80-aaf9-189e76f5a90c-dns-svc\") pod \"dnsmasq-dns-644597f84c-zx44x\" (UID: \"49436e14-c65e-4c80-aaf9-189e76f5a90c\") " pod="openstack/dnsmasq-dns-644597f84c-zx44x" Oct 10 16:48:54 crc kubenswrapper[4799]: I1010 16:48:54.698196 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/49436e14-c65e-4c80-aaf9-189e76f5a90c-config\") pod \"dnsmasq-dns-644597f84c-zx44x\" (UID: \"49436e14-c65e-4c80-aaf9-189e76f5a90c\") " pod="openstack/dnsmasq-dns-644597f84c-zx44x" Oct 10 16:48:54 crc kubenswrapper[4799]: I1010 16:48:54.698246 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pffhn\" (UniqueName: \"kubernetes.io/projected/49436e14-c65e-4c80-aaf9-189e76f5a90c-kube-api-access-pffhn\") pod \"dnsmasq-dns-644597f84c-zx44x\" (UID: \"49436e14-c65e-4c80-aaf9-189e76f5a90c\") " pod="openstack/dnsmasq-dns-644597f84c-zx44x" Oct 10 16:48:54 crc kubenswrapper[4799]: I1010 16:48:54.800396 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/49436e14-c65e-4c80-aaf9-189e76f5a90c-dns-svc\") pod \"dnsmasq-dns-644597f84c-zx44x\" (UID: \"49436e14-c65e-4c80-aaf9-189e76f5a90c\") " pod="openstack/dnsmasq-dns-644597f84c-zx44x" Oct 10 16:48:54 crc kubenswrapper[4799]: I1010 16:48:54.800439 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/49436e14-c65e-4c80-aaf9-189e76f5a90c-config\") pod \"dnsmasq-dns-644597f84c-zx44x\" (UID: \"49436e14-c65e-4c80-aaf9-189e76f5a90c\") " pod="openstack/dnsmasq-dns-644597f84c-zx44x" Oct 10 16:48:54 crc kubenswrapper[4799]: I1010 16:48:54.800493 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pffhn\" (UniqueName: \"kubernetes.io/projected/49436e14-c65e-4c80-aaf9-189e76f5a90c-kube-api-access-pffhn\") pod \"dnsmasq-dns-644597f84c-zx44x\" (UID: \"49436e14-c65e-4c80-aaf9-189e76f5a90c\") " pod="openstack/dnsmasq-dns-644597f84c-zx44x" Oct 10 16:48:54 crc kubenswrapper[4799]: I1010 16:48:54.801644 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/49436e14-c65e-4c80-aaf9-189e76f5a90c-dns-svc\") pod \"dnsmasq-dns-644597f84c-zx44x\" (UID: \"49436e14-c65e-4c80-aaf9-189e76f5a90c\") " pod="openstack/dnsmasq-dns-644597f84c-zx44x" Oct 10 16:48:54 crc kubenswrapper[4799]: I1010 16:48:54.806969 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/49436e14-c65e-4c80-aaf9-189e76f5a90c-config\") pod \"dnsmasq-dns-644597f84c-zx44x\" (UID: \"49436e14-c65e-4c80-aaf9-189e76f5a90c\") " pod="openstack/dnsmasq-dns-644597f84c-zx44x" Oct 10 16:48:54 crc kubenswrapper[4799]: I1010 16:48:54.840043 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pffhn\" (UniqueName: \"kubernetes.io/projected/49436e14-c65e-4c80-aaf9-189e76f5a90c-kube-api-access-pffhn\") pod \"dnsmasq-dns-644597f84c-zx44x\" (UID: \"49436e14-c65e-4c80-aaf9-189e76f5a90c\") " pod="openstack/dnsmasq-dns-644597f84c-zx44x" Oct 10 16:48:54 crc kubenswrapper[4799]: I1010 16:48:54.996258 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-644597f84c-zx44x" Oct 10 16:48:55 crc kubenswrapper[4799]: I1010 16:48:55.322445 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7bfcb9d745-x7l2v"] Oct 10 16:48:55 crc kubenswrapper[4799]: I1010 16:48:55.336069 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-644597f84c-zx44x"] Oct 10 16:48:55 crc kubenswrapper[4799]: W1010 16:48:55.353466 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod49436e14_c65e_4c80_aaf9_189e76f5a90c.slice/crio-31e7e4f1dcff9d956b4cde3f0aaf43761f08f3b4d5f4bd731780bafea61badce WatchSource:0}: Error finding container 31e7e4f1dcff9d956b4cde3f0aaf43761f08f3b4d5f4bd731780bafea61badce: Status 404 returned error can't find the container with id 31e7e4f1dcff9d956b4cde3f0aaf43761f08f3b4d5f4bd731780bafea61badce Oct 10 16:48:55 crc kubenswrapper[4799]: I1010 16:48:55.359866 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-77597f887-p8x7f"] Oct 10 16:48:55 crc kubenswrapper[4799]: I1010 16:48:55.361376 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-77597f887-p8x7f" Oct 10 16:48:55 crc kubenswrapper[4799]: I1010 16:48:55.371747 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-77597f887-p8x7f"] Oct 10 16:48:55 crc kubenswrapper[4799]: I1010 16:48:55.414198 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/029ee4ad-013d-47f2-9590-ca0a0d85d51d-config\") pod \"dnsmasq-dns-77597f887-p8x7f\" (UID: \"029ee4ad-013d-47f2-9590-ca0a0d85d51d\") " pod="openstack/dnsmasq-dns-77597f887-p8x7f" Oct 10 16:48:55 crc kubenswrapper[4799]: I1010 16:48:55.414320 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/029ee4ad-013d-47f2-9590-ca0a0d85d51d-dns-svc\") pod \"dnsmasq-dns-77597f887-p8x7f\" (UID: \"029ee4ad-013d-47f2-9590-ca0a0d85d51d\") " pod="openstack/dnsmasq-dns-77597f887-p8x7f" Oct 10 16:48:55 crc kubenswrapper[4799]: I1010 16:48:55.414447 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j6f7b\" (UniqueName: \"kubernetes.io/projected/029ee4ad-013d-47f2-9590-ca0a0d85d51d-kube-api-access-j6f7b\") pod \"dnsmasq-dns-77597f887-p8x7f\" (UID: \"029ee4ad-013d-47f2-9590-ca0a0d85d51d\") " pod="openstack/dnsmasq-dns-77597f887-p8x7f" Oct 10 16:48:55 crc kubenswrapper[4799]: I1010 16:48:55.515656 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/029ee4ad-013d-47f2-9590-ca0a0d85d51d-dns-svc\") pod \"dnsmasq-dns-77597f887-p8x7f\" (UID: \"029ee4ad-013d-47f2-9590-ca0a0d85d51d\") " pod="openstack/dnsmasq-dns-77597f887-p8x7f" Oct 10 16:48:55 crc kubenswrapper[4799]: I1010 16:48:55.516583 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j6f7b\" (UniqueName: \"kubernetes.io/projected/029ee4ad-013d-47f2-9590-ca0a0d85d51d-kube-api-access-j6f7b\") pod \"dnsmasq-dns-77597f887-p8x7f\" (UID: \"029ee4ad-013d-47f2-9590-ca0a0d85d51d\") " pod="openstack/dnsmasq-dns-77597f887-p8x7f" Oct 10 16:48:55 crc kubenswrapper[4799]: I1010 16:48:55.516621 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/029ee4ad-013d-47f2-9590-ca0a0d85d51d-config\") pod \"dnsmasq-dns-77597f887-p8x7f\" (UID: \"029ee4ad-013d-47f2-9590-ca0a0d85d51d\") " pod="openstack/dnsmasq-dns-77597f887-p8x7f" Oct 10 16:48:55 crc kubenswrapper[4799]: I1010 16:48:55.517000 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/029ee4ad-013d-47f2-9590-ca0a0d85d51d-dns-svc\") pod \"dnsmasq-dns-77597f887-p8x7f\" (UID: \"029ee4ad-013d-47f2-9590-ca0a0d85d51d\") " pod="openstack/dnsmasq-dns-77597f887-p8x7f" Oct 10 16:48:55 crc kubenswrapper[4799]: I1010 16:48:55.518190 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/029ee4ad-013d-47f2-9590-ca0a0d85d51d-config\") pod \"dnsmasq-dns-77597f887-p8x7f\" (UID: \"029ee4ad-013d-47f2-9590-ca0a0d85d51d\") " pod="openstack/dnsmasq-dns-77597f887-p8x7f" Oct 10 16:48:55 crc kubenswrapper[4799]: I1010 16:48:55.537135 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j6f7b\" (UniqueName: \"kubernetes.io/projected/029ee4ad-013d-47f2-9590-ca0a0d85d51d-kube-api-access-j6f7b\") pod \"dnsmasq-dns-77597f887-p8x7f\" (UID: \"029ee4ad-013d-47f2-9590-ca0a0d85d51d\") " pod="openstack/dnsmasq-dns-77597f887-p8x7f" Oct 10 16:48:55 crc kubenswrapper[4799]: I1010 16:48:55.700063 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-77597f887-p8x7f" Oct 10 16:48:55 crc kubenswrapper[4799]: I1010 16:48:55.816979 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-server-0"] Oct 10 16:48:55 crc kubenswrapper[4799]: I1010 16:48:55.818318 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Oct 10 16:48:55 crc kubenswrapper[4799]: I1010 16:48:55.820889 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-server-dockercfg-2jll2" Oct 10 16:48:55 crc kubenswrapper[4799]: I1010 16:48:55.821465 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-default-user" Oct 10 16:48:55 crc kubenswrapper[4799]: I1010 16:48:55.821875 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-server-conf" Oct 10 16:48:55 crc kubenswrapper[4799]: I1010 16:48:55.822393 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-config-data" Oct 10 16:48:55 crc kubenswrapper[4799]: I1010 16:48:55.823469 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-svc" Oct 10 16:48:55 crc kubenswrapper[4799]: I1010 16:48:55.825114 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-plugins-conf" Oct 10 16:48:55 crc kubenswrapper[4799]: I1010 16:48:55.837487 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Oct 10 16:48:55 crc kubenswrapper[4799]: I1010 16:48:55.839391 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-erlang-cookie" Oct 10 16:48:55 crc kubenswrapper[4799]: I1010 16:48:55.921581 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/9fd6f03f-abea-4c29-8060-0705bb0af2c7-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"9fd6f03f-abea-4c29-8060-0705bb0af2c7\") " pod="openstack/rabbitmq-server-0" Oct 10 16:48:55 crc kubenswrapper[4799]: I1010 16:48:55.922022 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/9fd6f03f-abea-4c29-8060-0705bb0af2c7-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"9fd6f03f-abea-4c29-8060-0705bb0af2c7\") " pod="openstack/rabbitmq-server-0" Oct 10 16:48:55 crc kubenswrapper[4799]: I1010 16:48:55.922060 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/9fd6f03f-abea-4c29-8060-0705bb0af2c7-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"9fd6f03f-abea-4c29-8060-0705bb0af2c7\") " pod="openstack/rabbitmq-server-0" Oct 10 16:48:55 crc kubenswrapper[4799]: I1010 16:48:55.922082 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/9fd6f03f-abea-4c29-8060-0705bb0af2c7-pod-info\") pod \"rabbitmq-server-0\" (UID: \"9fd6f03f-abea-4c29-8060-0705bb0af2c7\") " pod="openstack/rabbitmq-server-0" Oct 10 16:48:55 crc kubenswrapper[4799]: I1010 16:48:55.922102 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/9fd6f03f-abea-4c29-8060-0705bb0af2c7-server-conf\") pod \"rabbitmq-server-0\" (UID: \"9fd6f03f-abea-4c29-8060-0705bb0af2c7\") " pod="openstack/rabbitmq-server-0" Oct 10 16:48:55 crc kubenswrapper[4799]: I1010 16:48:55.922135 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fx9nd\" (UniqueName: \"kubernetes.io/projected/9fd6f03f-abea-4c29-8060-0705bb0af2c7-kube-api-access-fx9nd\") pod \"rabbitmq-server-0\" (UID: \"9fd6f03f-abea-4c29-8060-0705bb0af2c7\") " pod="openstack/rabbitmq-server-0" Oct 10 16:48:55 crc kubenswrapper[4799]: I1010 16:48:55.922166 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/9fd6f03f-abea-4c29-8060-0705bb0af2c7-config-data\") pod \"rabbitmq-server-0\" (UID: \"9fd6f03f-abea-4c29-8060-0705bb0af2c7\") " pod="openstack/rabbitmq-server-0" Oct 10 16:48:55 crc kubenswrapper[4799]: I1010 16:48:55.922187 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/9fd6f03f-abea-4c29-8060-0705bb0af2c7-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"9fd6f03f-abea-4c29-8060-0705bb0af2c7\") " pod="openstack/rabbitmq-server-0" Oct 10 16:48:55 crc kubenswrapper[4799]: I1010 16:48:55.922203 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/9fd6f03f-abea-4c29-8060-0705bb0af2c7-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"9fd6f03f-abea-4c29-8060-0705bb0af2c7\") " pod="openstack/rabbitmq-server-0" Oct 10 16:48:55 crc kubenswrapper[4799]: I1010 16:48:55.922225 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/9fd6f03f-abea-4c29-8060-0705bb0af2c7-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"9fd6f03f-abea-4c29-8060-0705bb0af2c7\") " pod="openstack/rabbitmq-server-0" Oct 10 16:48:55 crc kubenswrapper[4799]: I1010 16:48:55.922254 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"rabbitmq-server-0\" (UID: \"9fd6f03f-abea-4c29-8060-0705bb0af2c7\") " pod="openstack/rabbitmq-server-0" Oct 10 16:48:56 crc kubenswrapper[4799]: I1010 16:48:56.024384 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fx9nd\" (UniqueName: \"kubernetes.io/projected/9fd6f03f-abea-4c29-8060-0705bb0af2c7-kube-api-access-fx9nd\") pod \"rabbitmq-server-0\" (UID: \"9fd6f03f-abea-4c29-8060-0705bb0af2c7\") " pod="openstack/rabbitmq-server-0" Oct 10 16:48:56 crc kubenswrapper[4799]: I1010 16:48:56.024444 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/9fd6f03f-abea-4c29-8060-0705bb0af2c7-config-data\") pod \"rabbitmq-server-0\" (UID: \"9fd6f03f-abea-4c29-8060-0705bb0af2c7\") " pod="openstack/rabbitmq-server-0" Oct 10 16:48:56 crc kubenswrapper[4799]: I1010 16:48:56.024472 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/9fd6f03f-abea-4c29-8060-0705bb0af2c7-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"9fd6f03f-abea-4c29-8060-0705bb0af2c7\") " pod="openstack/rabbitmq-server-0" Oct 10 16:48:56 crc kubenswrapper[4799]: I1010 16:48:56.024490 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/9fd6f03f-abea-4c29-8060-0705bb0af2c7-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"9fd6f03f-abea-4c29-8060-0705bb0af2c7\") " pod="openstack/rabbitmq-server-0" Oct 10 16:48:56 crc kubenswrapper[4799]: I1010 16:48:56.024510 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/9fd6f03f-abea-4c29-8060-0705bb0af2c7-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"9fd6f03f-abea-4c29-8060-0705bb0af2c7\") " pod="openstack/rabbitmq-server-0" Oct 10 16:48:56 crc kubenswrapper[4799]: I1010 16:48:56.024538 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"rabbitmq-server-0\" (UID: \"9fd6f03f-abea-4c29-8060-0705bb0af2c7\") " pod="openstack/rabbitmq-server-0" Oct 10 16:48:56 crc kubenswrapper[4799]: I1010 16:48:56.024557 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/9fd6f03f-abea-4c29-8060-0705bb0af2c7-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"9fd6f03f-abea-4c29-8060-0705bb0af2c7\") " pod="openstack/rabbitmq-server-0" Oct 10 16:48:56 crc kubenswrapper[4799]: I1010 16:48:56.024574 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/9fd6f03f-abea-4c29-8060-0705bb0af2c7-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"9fd6f03f-abea-4c29-8060-0705bb0af2c7\") " pod="openstack/rabbitmq-server-0" Oct 10 16:48:56 crc kubenswrapper[4799]: I1010 16:48:56.024606 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/9fd6f03f-abea-4c29-8060-0705bb0af2c7-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"9fd6f03f-abea-4c29-8060-0705bb0af2c7\") " pod="openstack/rabbitmq-server-0" Oct 10 16:48:56 crc kubenswrapper[4799]: I1010 16:48:56.024625 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/9fd6f03f-abea-4c29-8060-0705bb0af2c7-pod-info\") pod \"rabbitmq-server-0\" (UID: \"9fd6f03f-abea-4c29-8060-0705bb0af2c7\") " pod="openstack/rabbitmq-server-0" Oct 10 16:48:56 crc kubenswrapper[4799]: I1010 16:48:56.024643 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/9fd6f03f-abea-4c29-8060-0705bb0af2c7-server-conf\") pod \"rabbitmq-server-0\" (UID: \"9fd6f03f-abea-4c29-8060-0705bb0af2c7\") " pod="openstack/rabbitmq-server-0" Oct 10 16:48:56 crc kubenswrapper[4799]: I1010 16:48:56.026106 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/9fd6f03f-abea-4c29-8060-0705bb0af2c7-server-conf\") pod \"rabbitmq-server-0\" (UID: \"9fd6f03f-abea-4c29-8060-0705bb0af2c7\") " pod="openstack/rabbitmq-server-0" Oct 10 16:48:56 crc kubenswrapper[4799]: I1010 16:48:56.027072 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/9fd6f03f-abea-4c29-8060-0705bb0af2c7-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"9fd6f03f-abea-4c29-8060-0705bb0af2c7\") " pod="openstack/rabbitmq-server-0" Oct 10 16:48:56 crc kubenswrapper[4799]: I1010 16:48:56.027788 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/9fd6f03f-abea-4c29-8060-0705bb0af2c7-config-data\") pod \"rabbitmq-server-0\" (UID: \"9fd6f03f-abea-4c29-8060-0705bb0af2c7\") " pod="openstack/rabbitmq-server-0" Oct 10 16:48:56 crc kubenswrapper[4799]: I1010 16:48:56.028400 4799 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"rabbitmq-server-0\" (UID: \"9fd6f03f-abea-4c29-8060-0705bb0af2c7\") device mount path \"/mnt/openstack/pv09\"" pod="openstack/rabbitmq-server-0" Oct 10 16:48:56 crc kubenswrapper[4799]: I1010 16:48:56.031152 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/9fd6f03f-abea-4c29-8060-0705bb0af2c7-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"9fd6f03f-abea-4c29-8060-0705bb0af2c7\") " pod="openstack/rabbitmq-server-0" Oct 10 16:48:56 crc kubenswrapper[4799]: I1010 16:48:56.031475 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/9fd6f03f-abea-4c29-8060-0705bb0af2c7-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"9fd6f03f-abea-4c29-8060-0705bb0af2c7\") " pod="openstack/rabbitmq-server-0" Oct 10 16:48:56 crc kubenswrapper[4799]: I1010 16:48:56.034882 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/9fd6f03f-abea-4c29-8060-0705bb0af2c7-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"9fd6f03f-abea-4c29-8060-0705bb0af2c7\") " pod="openstack/rabbitmq-server-0" Oct 10 16:48:56 crc kubenswrapper[4799]: I1010 16:48:56.035014 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/9fd6f03f-abea-4c29-8060-0705bb0af2c7-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"9fd6f03f-abea-4c29-8060-0705bb0af2c7\") " pod="openstack/rabbitmq-server-0" Oct 10 16:48:56 crc kubenswrapper[4799]: I1010 16:48:56.037565 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/9fd6f03f-abea-4c29-8060-0705bb0af2c7-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"9fd6f03f-abea-4c29-8060-0705bb0af2c7\") " pod="openstack/rabbitmq-server-0" Oct 10 16:48:56 crc kubenswrapper[4799]: I1010 16:48:56.039425 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/9fd6f03f-abea-4c29-8060-0705bb0af2c7-pod-info\") pod \"rabbitmq-server-0\" (UID: \"9fd6f03f-abea-4c29-8060-0705bb0af2c7\") " pod="openstack/rabbitmq-server-0" Oct 10 16:48:56 crc kubenswrapper[4799]: I1010 16:48:56.050605 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fx9nd\" (UniqueName: \"kubernetes.io/projected/9fd6f03f-abea-4c29-8060-0705bb0af2c7-kube-api-access-fx9nd\") pod \"rabbitmq-server-0\" (UID: \"9fd6f03f-abea-4c29-8060-0705bb0af2c7\") " pod="openstack/rabbitmq-server-0" Oct 10 16:48:56 crc kubenswrapper[4799]: I1010 16:48:56.071657 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"rabbitmq-server-0\" (UID: \"9fd6f03f-abea-4c29-8060-0705bb0af2c7\") " pod="openstack/rabbitmq-server-0" Oct 10 16:48:56 crc kubenswrapper[4799]: I1010 16:48:56.152117 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-644597f84c-zx44x" event={"ID":"49436e14-c65e-4c80-aaf9-189e76f5a90c","Type":"ContainerStarted","Data":"31e7e4f1dcff9d956b4cde3f0aaf43761f08f3b4d5f4bd731780bafea61badce"} Oct 10 16:48:56 crc kubenswrapper[4799]: I1010 16:48:56.176865 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Oct 10 16:48:56 crc kubenswrapper[4799]: I1010 16:48:56.220287 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-77597f887-p8x7f"] Oct 10 16:48:56 crc kubenswrapper[4799]: W1010 16:48:56.223568 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod029ee4ad_013d_47f2_9590_ca0a0d85d51d.slice/crio-9797a4e02da1b48c5d27c4a5f1d8ae05e69979a4982dae41413c4f5bb630eca1 WatchSource:0}: Error finding container 9797a4e02da1b48c5d27c4a5f1d8ae05e69979a4982dae41413c4f5bb630eca1: Status 404 returned error can't find the container with id 9797a4e02da1b48c5d27c4a5f1d8ae05e69979a4982dae41413c4f5bb630eca1 Oct 10 16:48:56 crc kubenswrapper[4799]: I1010 16:48:56.487918 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Oct 10 16:48:56 crc kubenswrapper[4799]: I1010 16:48:56.489551 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Oct 10 16:48:56 crc kubenswrapper[4799]: I1010 16:48:56.492854 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-cell1-svc" Oct 10 16:48:56 crc kubenswrapper[4799]: I1010 16:48:56.492921 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-erlang-cookie" Oct 10 16:48:56 crc kubenswrapper[4799]: I1010 16:48:56.493586 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-server-dockercfg-fvt99" Oct 10 16:48:56 crc kubenswrapper[4799]: I1010 16:48:56.493969 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-default-user" Oct 10 16:48:56 crc kubenswrapper[4799]: I1010 16:48:56.494231 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-plugins-conf" Oct 10 16:48:56 crc kubenswrapper[4799]: I1010 16:48:56.494296 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-server-conf" Oct 10 16:48:56 crc kubenswrapper[4799]: I1010 16:48:56.496874 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-config-data" Oct 10 16:48:56 crc kubenswrapper[4799]: I1010 16:48:56.501702 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Oct 10 16:48:56 crc kubenswrapper[4799]: I1010 16:48:56.635650 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/7858ee88-c7b9-4fb7-b825-569154134201-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"7858ee88-c7b9-4fb7-b825-569154134201\") " pod="openstack/rabbitmq-cell1-server-0" Oct 10 16:48:56 crc kubenswrapper[4799]: I1010 16:48:56.635688 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/7858ee88-c7b9-4fb7-b825-569154134201-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"7858ee88-c7b9-4fb7-b825-569154134201\") " pod="openstack/rabbitmq-cell1-server-0" Oct 10 16:48:56 crc kubenswrapper[4799]: I1010 16:48:56.635711 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/7858ee88-c7b9-4fb7-b825-569154134201-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"7858ee88-c7b9-4fb7-b825-569154134201\") " pod="openstack/rabbitmq-cell1-server-0" Oct 10 16:48:56 crc kubenswrapper[4799]: I1010 16:48:56.635735 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/7858ee88-c7b9-4fb7-b825-569154134201-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"7858ee88-c7b9-4fb7-b825-569154134201\") " pod="openstack/rabbitmq-cell1-server-0" Oct 10 16:48:56 crc kubenswrapper[4799]: I1010 16:48:56.635766 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/7858ee88-c7b9-4fb7-b825-569154134201-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"7858ee88-c7b9-4fb7-b825-569154134201\") " pod="openstack/rabbitmq-cell1-server-0" Oct 10 16:48:56 crc kubenswrapper[4799]: I1010 16:48:56.635782 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/7858ee88-c7b9-4fb7-b825-569154134201-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"7858ee88-c7b9-4fb7-b825-569154134201\") " pod="openstack/rabbitmq-cell1-server-0" Oct 10 16:48:56 crc kubenswrapper[4799]: I1010 16:48:56.635801 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/7858ee88-c7b9-4fb7-b825-569154134201-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"7858ee88-c7b9-4fb7-b825-569154134201\") " pod="openstack/rabbitmq-cell1-server-0" Oct 10 16:48:56 crc kubenswrapper[4799]: I1010 16:48:56.635823 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/7858ee88-c7b9-4fb7-b825-569154134201-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"7858ee88-c7b9-4fb7-b825-569154134201\") " pod="openstack/rabbitmq-cell1-server-0" Oct 10 16:48:56 crc kubenswrapper[4799]: I1010 16:48:56.635842 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"7858ee88-c7b9-4fb7-b825-569154134201\") " pod="openstack/rabbitmq-cell1-server-0" Oct 10 16:48:56 crc kubenswrapper[4799]: I1010 16:48:56.635868 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/7858ee88-c7b9-4fb7-b825-569154134201-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"7858ee88-c7b9-4fb7-b825-569154134201\") " pod="openstack/rabbitmq-cell1-server-0" Oct 10 16:48:56 crc kubenswrapper[4799]: I1010 16:48:56.636953 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4rpnw\" (UniqueName: \"kubernetes.io/projected/7858ee88-c7b9-4fb7-b825-569154134201-kube-api-access-4rpnw\") pod \"rabbitmq-cell1-server-0\" (UID: \"7858ee88-c7b9-4fb7-b825-569154134201\") " pod="openstack/rabbitmq-cell1-server-0" Oct 10 16:48:56 crc kubenswrapper[4799]: I1010 16:48:56.708275 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Oct 10 16:48:56 crc kubenswrapper[4799]: W1010 16:48:56.730071 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9fd6f03f_abea_4c29_8060_0705bb0af2c7.slice/crio-25e32e81cf2a1229f1f01e83272289d9d568d9b1b40bb8d385017d4faaff08d5 WatchSource:0}: Error finding container 25e32e81cf2a1229f1f01e83272289d9d568d9b1b40bb8d385017d4faaff08d5: Status 404 returned error can't find the container with id 25e32e81cf2a1229f1f01e83272289d9d568d9b1b40bb8d385017d4faaff08d5 Oct 10 16:48:56 crc kubenswrapper[4799]: I1010 16:48:56.737958 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/7858ee88-c7b9-4fb7-b825-569154134201-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"7858ee88-c7b9-4fb7-b825-569154134201\") " pod="openstack/rabbitmq-cell1-server-0" Oct 10 16:48:56 crc kubenswrapper[4799]: I1010 16:48:56.737997 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/7858ee88-c7b9-4fb7-b825-569154134201-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"7858ee88-c7b9-4fb7-b825-569154134201\") " pod="openstack/rabbitmq-cell1-server-0" Oct 10 16:48:56 crc kubenswrapper[4799]: I1010 16:48:56.738026 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/7858ee88-c7b9-4fb7-b825-569154134201-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"7858ee88-c7b9-4fb7-b825-569154134201\") " pod="openstack/rabbitmq-cell1-server-0" Oct 10 16:48:56 crc kubenswrapper[4799]: I1010 16:48:56.738049 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/7858ee88-c7b9-4fb7-b825-569154134201-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"7858ee88-c7b9-4fb7-b825-569154134201\") " pod="openstack/rabbitmq-cell1-server-0" Oct 10 16:48:56 crc kubenswrapper[4799]: I1010 16:48:56.738075 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/7858ee88-c7b9-4fb7-b825-569154134201-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"7858ee88-c7b9-4fb7-b825-569154134201\") " pod="openstack/rabbitmq-cell1-server-0" Oct 10 16:48:56 crc kubenswrapper[4799]: I1010 16:48:56.738102 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/7858ee88-c7b9-4fb7-b825-569154134201-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"7858ee88-c7b9-4fb7-b825-569154134201\") " pod="openstack/rabbitmq-cell1-server-0" Oct 10 16:48:56 crc kubenswrapper[4799]: I1010 16:48:56.738132 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"7858ee88-c7b9-4fb7-b825-569154134201\") " pod="openstack/rabbitmq-cell1-server-0" Oct 10 16:48:56 crc kubenswrapper[4799]: I1010 16:48:56.738169 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/7858ee88-c7b9-4fb7-b825-569154134201-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"7858ee88-c7b9-4fb7-b825-569154134201\") " pod="openstack/rabbitmq-cell1-server-0" Oct 10 16:48:56 crc kubenswrapper[4799]: I1010 16:48:56.738208 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4rpnw\" (UniqueName: \"kubernetes.io/projected/7858ee88-c7b9-4fb7-b825-569154134201-kube-api-access-4rpnw\") pod \"rabbitmq-cell1-server-0\" (UID: \"7858ee88-c7b9-4fb7-b825-569154134201\") " pod="openstack/rabbitmq-cell1-server-0" Oct 10 16:48:56 crc kubenswrapper[4799]: I1010 16:48:56.738272 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/7858ee88-c7b9-4fb7-b825-569154134201-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"7858ee88-c7b9-4fb7-b825-569154134201\") " pod="openstack/rabbitmq-cell1-server-0" Oct 10 16:48:56 crc kubenswrapper[4799]: I1010 16:48:56.738292 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/7858ee88-c7b9-4fb7-b825-569154134201-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"7858ee88-c7b9-4fb7-b825-569154134201\") " pod="openstack/rabbitmq-cell1-server-0" Oct 10 16:48:56 crc kubenswrapper[4799]: I1010 16:48:56.739035 4799 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"7858ee88-c7b9-4fb7-b825-569154134201\") device mount path \"/mnt/openstack/pv02\"" pod="openstack/rabbitmq-cell1-server-0" Oct 10 16:48:56 crc kubenswrapper[4799]: I1010 16:48:56.739520 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/7858ee88-c7b9-4fb7-b825-569154134201-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"7858ee88-c7b9-4fb7-b825-569154134201\") " pod="openstack/rabbitmq-cell1-server-0" Oct 10 16:48:56 crc kubenswrapper[4799]: I1010 16:48:56.739652 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/7858ee88-c7b9-4fb7-b825-569154134201-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"7858ee88-c7b9-4fb7-b825-569154134201\") " pod="openstack/rabbitmq-cell1-server-0" Oct 10 16:48:56 crc kubenswrapper[4799]: I1010 16:48:56.740142 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/7858ee88-c7b9-4fb7-b825-569154134201-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"7858ee88-c7b9-4fb7-b825-569154134201\") " pod="openstack/rabbitmq-cell1-server-0" Oct 10 16:48:56 crc kubenswrapper[4799]: I1010 16:48:56.740390 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/7858ee88-c7b9-4fb7-b825-569154134201-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"7858ee88-c7b9-4fb7-b825-569154134201\") " pod="openstack/rabbitmq-cell1-server-0" Oct 10 16:48:56 crc kubenswrapper[4799]: I1010 16:48:56.742966 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/7858ee88-c7b9-4fb7-b825-569154134201-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"7858ee88-c7b9-4fb7-b825-569154134201\") " pod="openstack/rabbitmq-cell1-server-0" Oct 10 16:48:56 crc kubenswrapper[4799]: I1010 16:48:56.745413 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/7858ee88-c7b9-4fb7-b825-569154134201-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"7858ee88-c7b9-4fb7-b825-569154134201\") " pod="openstack/rabbitmq-cell1-server-0" Oct 10 16:48:56 crc kubenswrapper[4799]: I1010 16:48:56.745503 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/7858ee88-c7b9-4fb7-b825-569154134201-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"7858ee88-c7b9-4fb7-b825-569154134201\") " pod="openstack/rabbitmq-cell1-server-0" Oct 10 16:48:56 crc kubenswrapper[4799]: I1010 16:48:56.746973 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/7858ee88-c7b9-4fb7-b825-569154134201-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"7858ee88-c7b9-4fb7-b825-569154134201\") " pod="openstack/rabbitmq-cell1-server-0" Oct 10 16:48:56 crc kubenswrapper[4799]: I1010 16:48:56.747273 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/7858ee88-c7b9-4fb7-b825-569154134201-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"7858ee88-c7b9-4fb7-b825-569154134201\") " pod="openstack/rabbitmq-cell1-server-0" Oct 10 16:48:56 crc kubenswrapper[4799]: I1010 16:48:56.757913 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4rpnw\" (UniqueName: \"kubernetes.io/projected/7858ee88-c7b9-4fb7-b825-569154134201-kube-api-access-4rpnw\") pod \"rabbitmq-cell1-server-0\" (UID: \"7858ee88-c7b9-4fb7-b825-569154134201\") " pod="openstack/rabbitmq-cell1-server-0" Oct 10 16:48:56 crc kubenswrapper[4799]: I1010 16:48:56.780073 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"7858ee88-c7b9-4fb7-b825-569154134201\") " pod="openstack/rabbitmq-cell1-server-0" Oct 10 16:48:56 crc kubenswrapper[4799]: I1010 16:48:56.805331 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Oct 10 16:48:57 crc kubenswrapper[4799]: I1010 16:48:57.175826 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"9fd6f03f-abea-4c29-8060-0705bb0af2c7","Type":"ContainerStarted","Data":"25e32e81cf2a1229f1f01e83272289d9d568d9b1b40bb8d385017d4faaff08d5"} Oct 10 16:48:57 crc kubenswrapper[4799]: I1010 16:48:57.178667 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-77597f887-p8x7f" event={"ID":"029ee4ad-013d-47f2-9590-ca0a0d85d51d","Type":"ContainerStarted","Data":"9797a4e02da1b48c5d27c4a5f1d8ae05e69979a4982dae41413c4f5bb630eca1"} Oct 10 16:48:57 crc kubenswrapper[4799]: I1010 16:48:57.292868 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Oct 10 16:48:58 crc kubenswrapper[4799]: I1010 16:48:58.378268 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstack-galera-0"] Oct 10 16:48:58 crc kubenswrapper[4799]: I1010 16:48:58.380284 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Oct 10 16:48:58 crc kubenswrapper[4799]: I1010 16:48:58.387812 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"osp-secret" Oct 10 16:48:58 crc kubenswrapper[4799]: I1010 16:48:58.390005 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-galera-openstack-svc" Oct 10 16:48:58 crc kubenswrapper[4799]: I1010 16:48:58.390262 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"galera-openstack-dockercfg-wnspl" Oct 10 16:48:58 crc kubenswrapper[4799]: I1010 16:48:58.390451 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-config-data" Oct 10 16:48:58 crc kubenswrapper[4799]: I1010 16:48:58.390524 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-scripts" Oct 10 16:48:58 crc kubenswrapper[4799]: I1010 16:48:58.391137 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-galera-0"] Oct 10 16:48:58 crc kubenswrapper[4799]: I1010 16:48:58.396102 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"combined-ca-bundle" Oct 10 16:48:58 crc kubenswrapper[4799]: I1010 16:48:58.578897 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fe3f0980-0eb7-4267-953a-3fcfa08a22b3-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"fe3f0980-0eb7-4267-953a-3fcfa08a22b3\") " pod="openstack/openstack-galera-0" Oct 10 16:48:58 crc kubenswrapper[4799]: I1010 16:48:58.578962 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/fe3f0980-0eb7-4267-953a-3fcfa08a22b3-secrets\") pod \"openstack-galera-0\" (UID: \"fe3f0980-0eb7-4267-953a-3fcfa08a22b3\") " pod="openstack/openstack-galera-0" Oct 10 16:48:58 crc kubenswrapper[4799]: I1010 16:48:58.578980 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/fe3f0980-0eb7-4267-953a-3fcfa08a22b3-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"fe3f0980-0eb7-4267-953a-3fcfa08a22b3\") " pod="openstack/openstack-galera-0" Oct 10 16:48:58 crc kubenswrapper[4799]: I1010 16:48:58.578996 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/fe3f0980-0eb7-4267-953a-3fcfa08a22b3-config-data-default\") pod \"openstack-galera-0\" (UID: \"fe3f0980-0eb7-4267-953a-3fcfa08a22b3\") " pod="openstack/openstack-galera-0" Oct 10 16:48:58 crc kubenswrapper[4799]: I1010 16:48:58.579213 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q4v4t\" (UniqueName: \"kubernetes.io/projected/fe3f0980-0eb7-4267-953a-3fcfa08a22b3-kube-api-access-q4v4t\") pod \"openstack-galera-0\" (UID: \"fe3f0980-0eb7-4267-953a-3fcfa08a22b3\") " pod="openstack/openstack-galera-0" Oct 10 16:48:58 crc kubenswrapper[4799]: I1010 16:48:58.579559 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/fe3f0980-0eb7-4267-953a-3fcfa08a22b3-operator-scripts\") pod \"openstack-galera-0\" (UID: \"fe3f0980-0eb7-4267-953a-3fcfa08a22b3\") " pod="openstack/openstack-galera-0" Oct 10 16:48:58 crc kubenswrapper[4799]: I1010 16:48:58.579716 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"openstack-galera-0\" (UID: \"fe3f0980-0eb7-4267-953a-3fcfa08a22b3\") " pod="openstack/openstack-galera-0" Oct 10 16:48:58 crc kubenswrapper[4799]: I1010 16:48:58.579808 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/fe3f0980-0eb7-4267-953a-3fcfa08a22b3-config-data-generated\") pod \"openstack-galera-0\" (UID: \"fe3f0980-0eb7-4267-953a-3fcfa08a22b3\") " pod="openstack/openstack-galera-0" Oct 10 16:48:58 crc kubenswrapper[4799]: I1010 16:48:58.579844 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/fe3f0980-0eb7-4267-953a-3fcfa08a22b3-kolla-config\") pod \"openstack-galera-0\" (UID: \"fe3f0980-0eb7-4267-953a-3fcfa08a22b3\") " pod="openstack/openstack-galera-0" Oct 10 16:48:58 crc kubenswrapper[4799]: I1010 16:48:58.681620 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/fe3f0980-0eb7-4267-953a-3fcfa08a22b3-operator-scripts\") pod \"openstack-galera-0\" (UID: \"fe3f0980-0eb7-4267-953a-3fcfa08a22b3\") " pod="openstack/openstack-galera-0" Oct 10 16:48:58 crc kubenswrapper[4799]: I1010 16:48:58.681695 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"openstack-galera-0\" (UID: \"fe3f0980-0eb7-4267-953a-3fcfa08a22b3\") " pod="openstack/openstack-galera-0" Oct 10 16:48:58 crc kubenswrapper[4799]: I1010 16:48:58.681721 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/fe3f0980-0eb7-4267-953a-3fcfa08a22b3-config-data-generated\") pod \"openstack-galera-0\" (UID: \"fe3f0980-0eb7-4267-953a-3fcfa08a22b3\") " pod="openstack/openstack-galera-0" Oct 10 16:48:58 crc kubenswrapper[4799]: I1010 16:48:58.681866 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/fe3f0980-0eb7-4267-953a-3fcfa08a22b3-kolla-config\") pod \"openstack-galera-0\" (UID: \"fe3f0980-0eb7-4267-953a-3fcfa08a22b3\") " pod="openstack/openstack-galera-0" Oct 10 16:48:58 crc kubenswrapper[4799]: I1010 16:48:58.681925 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fe3f0980-0eb7-4267-953a-3fcfa08a22b3-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"fe3f0980-0eb7-4267-953a-3fcfa08a22b3\") " pod="openstack/openstack-galera-0" Oct 10 16:48:58 crc kubenswrapper[4799]: I1010 16:48:58.681962 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/fe3f0980-0eb7-4267-953a-3fcfa08a22b3-secrets\") pod \"openstack-galera-0\" (UID: \"fe3f0980-0eb7-4267-953a-3fcfa08a22b3\") " pod="openstack/openstack-galera-0" Oct 10 16:48:58 crc kubenswrapper[4799]: I1010 16:48:58.681980 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/fe3f0980-0eb7-4267-953a-3fcfa08a22b3-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"fe3f0980-0eb7-4267-953a-3fcfa08a22b3\") " pod="openstack/openstack-galera-0" Oct 10 16:48:58 crc kubenswrapper[4799]: I1010 16:48:58.682011 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/fe3f0980-0eb7-4267-953a-3fcfa08a22b3-config-data-default\") pod \"openstack-galera-0\" (UID: \"fe3f0980-0eb7-4267-953a-3fcfa08a22b3\") " pod="openstack/openstack-galera-0" Oct 10 16:48:58 crc kubenswrapper[4799]: I1010 16:48:58.682037 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q4v4t\" (UniqueName: \"kubernetes.io/projected/fe3f0980-0eb7-4267-953a-3fcfa08a22b3-kube-api-access-q4v4t\") pod \"openstack-galera-0\" (UID: \"fe3f0980-0eb7-4267-953a-3fcfa08a22b3\") " pod="openstack/openstack-galera-0" Oct 10 16:48:58 crc kubenswrapper[4799]: I1010 16:48:58.682073 4799 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"openstack-galera-0\" (UID: \"fe3f0980-0eb7-4267-953a-3fcfa08a22b3\") device mount path \"/mnt/openstack/pv01\"" pod="openstack/openstack-galera-0" Oct 10 16:48:58 crc kubenswrapper[4799]: I1010 16:48:58.682444 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/fe3f0980-0eb7-4267-953a-3fcfa08a22b3-config-data-generated\") pod \"openstack-galera-0\" (UID: \"fe3f0980-0eb7-4267-953a-3fcfa08a22b3\") " pod="openstack/openstack-galera-0" Oct 10 16:48:58 crc kubenswrapper[4799]: I1010 16:48:58.683145 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/fe3f0980-0eb7-4267-953a-3fcfa08a22b3-kolla-config\") pod \"openstack-galera-0\" (UID: \"fe3f0980-0eb7-4267-953a-3fcfa08a22b3\") " pod="openstack/openstack-galera-0" Oct 10 16:48:58 crc kubenswrapper[4799]: I1010 16:48:58.683348 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/fe3f0980-0eb7-4267-953a-3fcfa08a22b3-config-data-default\") pod \"openstack-galera-0\" (UID: \"fe3f0980-0eb7-4267-953a-3fcfa08a22b3\") " pod="openstack/openstack-galera-0" Oct 10 16:48:58 crc kubenswrapper[4799]: I1010 16:48:58.684124 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/fe3f0980-0eb7-4267-953a-3fcfa08a22b3-operator-scripts\") pod \"openstack-galera-0\" (UID: \"fe3f0980-0eb7-4267-953a-3fcfa08a22b3\") " pod="openstack/openstack-galera-0" Oct 10 16:48:58 crc kubenswrapper[4799]: I1010 16:48:58.688332 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/fe3f0980-0eb7-4267-953a-3fcfa08a22b3-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"fe3f0980-0eb7-4267-953a-3fcfa08a22b3\") " pod="openstack/openstack-galera-0" Oct 10 16:48:58 crc kubenswrapper[4799]: I1010 16:48:58.689150 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/fe3f0980-0eb7-4267-953a-3fcfa08a22b3-secrets\") pod \"openstack-galera-0\" (UID: \"fe3f0980-0eb7-4267-953a-3fcfa08a22b3\") " pod="openstack/openstack-galera-0" Oct 10 16:48:58 crc kubenswrapper[4799]: I1010 16:48:58.695931 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fe3f0980-0eb7-4267-953a-3fcfa08a22b3-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"fe3f0980-0eb7-4267-953a-3fcfa08a22b3\") " pod="openstack/openstack-galera-0" Oct 10 16:48:58 crc kubenswrapper[4799]: I1010 16:48:58.698311 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q4v4t\" (UniqueName: \"kubernetes.io/projected/fe3f0980-0eb7-4267-953a-3fcfa08a22b3-kube-api-access-q4v4t\") pod \"openstack-galera-0\" (UID: \"fe3f0980-0eb7-4267-953a-3fcfa08a22b3\") " pod="openstack/openstack-galera-0" Oct 10 16:48:58 crc kubenswrapper[4799]: I1010 16:48:58.702713 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"openstack-galera-0\" (UID: \"fe3f0980-0eb7-4267-953a-3fcfa08a22b3\") " pod="openstack/openstack-galera-0" Oct 10 16:48:58 crc kubenswrapper[4799]: I1010 16:48:58.716987 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Oct 10 16:48:59 crc kubenswrapper[4799]: I1010 16:48:59.281904 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstack-cell1-galera-0"] Oct 10 16:48:59 crc kubenswrapper[4799]: I1010 16:48:59.286069 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Oct 10 16:48:59 crc kubenswrapper[4799]: I1010 16:48:59.295398 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-cell1-galera-0"] Oct 10 16:48:59 crc kubenswrapper[4799]: I1010 16:48:59.335071 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1-scripts" Oct 10 16:48:59 crc kubenswrapper[4799]: I1010 16:48:59.335712 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"galera-openstack-cell1-dockercfg-rd48b" Oct 10 16:48:59 crc kubenswrapper[4799]: I1010 16:48:59.335802 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1-config-data" Oct 10 16:48:59 crc kubenswrapper[4799]: I1010 16:48:59.336919 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-galera-openstack-cell1-svc" Oct 10 16:48:59 crc kubenswrapper[4799]: I1010 16:48:59.394191 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2a28d2a6-5dfc-414b-9eed-2f412cfc7063-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"2a28d2a6-5dfc-414b-9eed-2f412cfc7063\") " pod="openstack/openstack-cell1-galera-0" Oct 10 16:48:59 crc kubenswrapper[4799]: I1010 16:48:59.394258 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xbns9\" (UniqueName: \"kubernetes.io/projected/2a28d2a6-5dfc-414b-9eed-2f412cfc7063-kube-api-access-xbns9\") pod \"openstack-cell1-galera-0\" (UID: \"2a28d2a6-5dfc-414b-9eed-2f412cfc7063\") " pod="openstack/openstack-cell1-galera-0" Oct 10 16:48:59 crc kubenswrapper[4799]: I1010 16:48:59.394291 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/2a28d2a6-5dfc-414b-9eed-2f412cfc7063-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"2a28d2a6-5dfc-414b-9eed-2f412cfc7063\") " pod="openstack/openstack-cell1-galera-0" Oct 10 16:48:59 crc kubenswrapper[4799]: I1010 16:48:59.394305 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2a28d2a6-5dfc-414b-9eed-2f412cfc7063-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"2a28d2a6-5dfc-414b-9eed-2f412cfc7063\") " pod="openstack/openstack-cell1-galera-0" Oct 10 16:48:59 crc kubenswrapper[4799]: I1010 16:48:59.394522 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/2a28d2a6-5dfc-414b-9eed-2f412cfc7063-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"2a28d2a6-5dfc-414b-9eed-2f412cfc7063\") " pod="openstack/openstack-cell1-galera-0" Oct 10 16:48:59 crc kubenswrapper[4799]: I1010 16:48:59.394597 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/2a28d2a6-5dfc-414b-9eed-2f412cfc7063-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"2a28d2a6-5dfc-414b-9eed-2f412cfc7063\") " pod="openstack/openstack-cell1-galera-0" Oct 10 16:48:59 crc kubenswrapper[4799]: I1010 16:48:59.394628 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/2a28d2a6-5dfc-414b-9eed-2f412cfc7063-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"2a28d2a6-5dfc-414b-9eed-2f412cfc7063\") " pod="openstack/openstack-cell1-galera-0" Oct 10 16:48:59 crc kubenswrapper[4799]: I1010 16:48:59.394675 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/2a28d2a6-5dfc-414b-9eed-2f412cfc7063-secrets\") pod \"openstack-cell1-galera-0\" (UID: \"2a28d2a6-5dfc-414b-9eed-2f412cfc7063\") " pod="openstack/openstack-cell1-galera-0" Oct 10 16:48:59 crc kubenswrapper[4799]: I1010 16:48:59.394710 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"openstack-cell1-galera-0\" (UID: \"2a28d2a6-5dfc-414b-9eed-2f412cfc7063\") " pod="openstack/openstack-cell1-galera-0" Oct 10 16:48:59 crc kubenswrapper[4799]: I1010 16:48:59.501710 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/2a28d2a6-5dfc-414b-9eed-2f412cfc7063-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"2a28d2a6-5dfc-414b-9eed-2f412cfc7063\") " pod="openstack/openstack-cell1-galera-0" Oct 10 16:48:59 crc kubenswrapper[4799]: I1010 16:48:59.501790 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/2a28d2a6-5dfc-414b-9eed-2f412cfc7063-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"2a28d2a6-5dfc-414b-9eed-2f412cfc7063\") " pod="openstack/openstack-cell1-galera-0" Oct 10 16:48:59 crc kubenswrapper[4799]: I1010 16:48:59.501830 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/2a28d2a6-5dfc-414b-9eed-2f412cfc7063-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"2a28d2a6-5dfc-414b-9eed-2f412cfc7063\") " pod="openstack/openstack-cell1-galera-0" Oct 10 16:48:59 crc kubenswrapper[4799]: I1010 16:48:59.501855 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/2a28d2a6-5dfc-414b-9eed-2f412cfc7063-secrets\") pod \"openstack-cell1-galera-0\" (UID: \"2a28d2a6-5dfc-414b-9eed-2f412cfc7063\") " pod="openstack/openstack-cell1-galera-0" Oct 10 16:48:59 crc kubenswrapper[4799]: I1010 16:48:59.501885 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"openstack-cell1-galera-0\" (UID: \"2a28d2a6-5dfc-414b-9eed-2f412cfc7063\") " pod="openstack/openstack-cell1-galera-0" Oct 10 16:48:59 crc kubenswrapper[4799]: I1010 16:48:59.501933 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2a28d2a6-5dfc-414b-9eed-2f412cfc7063-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"2a28d2a6-5dfc-414b-9eed-2f412cfc7063\") " pod="openstack/openstack-cell1-galera-0" Oct 10 16:48:59 crc kubenswrapper[4799]: I1010 16:48:59.501957 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xbns9\" (UniqueName: \"kubernetes.io/projected/2a28d2a6-5dfc-414b-9eed-2f412cfc7063-kube-api-access-xbns9\") pod \"openstack-cell1-galera-0\" (UID: \"2a28d2a6-5dfc-414b-9eed-2f412cfc7063\") " pod="openstack/openstack-cell1-galera-0" Oct 10 16:48:59 crc kubenswrapper[4799]: I1010 16:48:59.501995 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/2a28d2a6-5dfc-414b-9eed-2f412cfc7063-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"2a28d2a6-5dfc-414b-9eed-2f412cfc7063\") " pod="openstack/openstack-cell1-galera-0" Oct 10 16:48:59 crc kubenswrapper[4799]: I1010 16:48:59.502010 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2a28d2a6-5dfc-414b-9eed-2f412cfc7063-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"2a28d2a6-5dfc-414b-9eed-2f412cfc7063\") " pod="openstack/openstack-cell1-galera-0" Oct 10 16:48:59 crc kubenswrapper[4799]: I1010 16:48:59.502513 4799 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"openstack-cell1-galera-0\" (UID: \"2a28d2a6-5dfc-414b-9eed-2f412cfc7063\") device mount path \"/mnt/openstack/pv10\"" pod="openstack/openstack-cell1-galera-0" Oct 10 16:48:59 crc kubenswrapper[4799]: I1010 16:48:59.503151 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/2a28d2a6-5dfc-414b-9eed-2f412cfc7063-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"2a28d2a6-5dfc-414b-9eed-2f412cfc7063\") " pod="openstack/openstack-cell1-galera-0" Oct 10 16:48:59 crc kubenswrapper[4799]: I1010 16:48:59.503339 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/2a28d2a6-5dfc-414b-9eed-2f412cfc7063-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"2a28d2a6-5dfc-414b-9eed-2f412cfc7063\") " pod="openstack/openstack-cell1-galera-0" Oct 10 16:48:59 crc kubenswrapper[4799]: I1010 16:48:59.504295 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2a28d2a6-5dfc-414b-9eed-2f412cfc7063-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"2a28d2a6-5dfc-414b-9eed-2f412cfc7063\") " pod="openstack/openstack-cell1-galera-0" Oct 10 16:48:59 crc kubenswrapper[4799]: I1010 16:48:59.504464 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/2a28d2a6-5dfc-414b-9eed-2f412cfc7063-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"2a28d2a6-5dfc-414b-9eed-2f412cfc7063\") " pod="openstack/openstack-cell1-galera-0" Oct 10 16:48:59 crc kubenswrapper[4799]: I1010 16:48:59.505724 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2a28d2a6-5dfc-414b-9eed-2f412cfc7063-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"2a28d2a6-5dfc-414b-9eed-2f412cfc7063\") " pod="openstack/openstack-cell1-galera-0" Oct 10 16:48:59 crc kubenswrapper[4799]: I1010 16:48:59.508985 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/2a28d2a6-5dfc-414b-9eed-2f412cfc7063-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"2a28d2a6-5dfc-414b-9eed-2f412cfc7063\") " pod="openstack/openstack-cell1-galera-0" Oct 10 16:48:59 crc kubenswrapper[4799]: I1010 16:48:59.519481 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/2a28d2a6-5dfc-414b-9eed-2f412cfc7063-secrets\") pod \"openstack-cell1-galera-0\" (UID: \"2a28d2a6-5dfc-414b-9eed-2f412cfc7063\") " pod="openstack/openstack-cell1-galera-0" Oct 10 16:48:59 crc kubenswrapper[4799]: I1010 16:48:59.526104 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xbns9\" (UniqueName: \"kubernetes.io/projected/2a28d2a6-5dfc-414b-9eed-2f412cfc7063-kube-api-access-xbns9\") pod \"openstack-cell1-galera-0\" (UID: \"2a28d2a6-5dfc-414b-9eed-2f412cfc7063\") " pod="openstack/openstack-cell1-galera-0" Oct 10 16:48:59 crc kubenswrapper[4799]: I1010 16:48:59.538788 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"openstack-cell1-galera-0\" (UID: \"2a28d2a6-5dfc-414b-9eed-2f412cfc7063\") " pod="openstack/openstack-cell1-galera-0" Oct 10 16:48:59 crc kubenswrapper[4799]: I1010 16:48:59.660852 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Oct 10 16:48:59 crc kubenswrapper[4799]: I1010 16:48:59.661586 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/memcached-0"] Oct 10 16:48:59 crc kubenswrapper[4799]: I1010 16:48:59.663056 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Oct 10 16:48:59 crc kubenswrapper[4799]: I1010 16:48:59.665428 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-memcached-svc" Oct 10 16:48:59 crc kubenswrapper[4799]: I1010 16:48:59.666158 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"memcached-config-data" Oct 10 16:48:59 crc kubenswrapper[4799]: I1010 16:48:59.666284 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"memcached-memcached-dockercfg-tpsgw" Oct 10 16:48:59 crc kubenswrapper[4799]: I1010 16:48:59.692460 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/memcached-0"] Oct 10 16:48:59 crc kubenswrapper[4799]: I1010 16:48:59.806651 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/985d1485-7054-475b-8e60-85db5dc5afa3-config-data\") pod \"memcached-0\" (UID: \"985d1485-7054-475b-8e60-85db5dc5afa3\") " pod="openstack/memcached-0" Oct 10 16:48:59 crc kubenswrapper[4799]: I1010 16:48:59.806730 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/985d1485-7054-475b-8e60-85db5dc5afa3-kolla-config\") pod \"memcached-0\" (UID: \"985d1485-7054-475b-8e60-85db5dc5afa3\") " pod="openstack/memcached-0" Oct 10 16:48:59 crc kubenswrapper[4799]: I1010 16:48:59.806764 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/985d1485-7054-475b-8e60-85db5dc5afa3-memcached-tls-certs\") pod \"memcached-0\" (UID: \"985d1485-7054-475b-8e60-85db5dc5afa3\") " pod="openstack/memcached-0" Oct 10 16:48:59 crc kubenswrapper[4799]: I1010 16:48:59.806784 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/985d1485-7054-475b-8e60-85db5dc5afa3-combined-ca-bundle\") pod \"memcached-0\" (UID: \"985d1485-7054-475b-8e60-85db5dc5afa3\") " pod="openstack/memcached-0" Oct 10 16:48:59 crc kubenswrapper[4799]: I1010 16:48:59.806821 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tp8sb\" (UniqueName: \"kubernetes.io/projected/985d1485-7054-475b-8e60-85db5dc5afa3-kube-api-access-tp8sb\") pod \"memcached-0\" (UID: \"985d1485-7054-475b-8e60-85db5dc5afa3\") " pod="openstack/memcached-0" Oct 10 16:48:59 crc kubenswrapper[4799]: I1010 16:48:59.927981 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/985d1485-7054-475b-8e60-85db5dc5afa3-kolla-config\") pod \"memcached-0\" (UID: \"985d1485-7054-475b-8e60-85db5dc5afa3\") " pod="openstack/memcached-0" Oct 10 16:48:59 crc kubenswrapper[4799]: I1010 16:48:59.928032 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/985d1485-7054-475b-8e60-85db5dc5afa3-memcached-tls-certs\") pod \"memcached-0\" (UID: \"985d1485-7054-475b-8e60-85db5dc5afa3\") " pod="openstack/memcached-0" Oct 10 16:48:59 crc kubenswrapper[4799]: I1010 16:48:59.928062 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/985d1485-7054-475b-8e60-85db5dc5afa3-combined-ca-bundle\") pod \"memcached-0\" (UID: \"985d1485-7054-475b-8e60-85db5dc5afa3\") " pod="openstack/memcached-0" Oct 10 16:48:59 crc kubenswrapper[4799]: I1010 16:48:59.928131 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tp8sb\" (UniqueName: \"kubernetes.io/projected/985d1485-7054-475b-8e60-85db5dc5afa3-kube-api-access-tp8sb\") pod \"memcached-0\" (UID: \"985d1485-7054-475b-8e60-85db5dc5afa3\") " pod="openstack/memcached-0" Oct 10 16:48:59 crc kubenswrapper[4799]: I1010 16:48:59.928255 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/985d1485-7054-475b-8e60-85db5dc5afa3-config-data\") pod \"memcached-0\" (UID: \"985d1485-7054-475b-8e60-85db5dc5afa3\") " pod="openstack/memcached-0" Oct 10 16:48:59 crc kubenswrapper[4799]: I1010 16:48:59.930315 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/985d1485-7054-475b-8e60-85db5dc5afa3-kolla-config\") pod \"memcached-0\" (UID: \"985d1485-7054-475b-8e60-85db5dc5afa3\") " pod="openstack/memcached-0" Oct 10 16:48:59 crc kubenswrapper[4799]: I1010 16:48:59.932658 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/985d1485-7054-475b-8e60-85db5dc5afa3-config-data\") pod \"memcached-0\" (UID: \"985d1485-7054-475b-8e60-85db5dc5afa3\") " pod="openstack/memcached-0" Oct 10 16:48:59 crc kubenswrapper[4799]: I1010 16:48:59.939564 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/985d1485-7054-475b-8e60-85db5dc5afa3-combined-ca-bundle\") pod \"memcached-0\" (UID: \"985d1485-7054-475b-8e60-85db5dc5afa3\") " pod="openstack/memcached-0" Oct 10 16:48:59 crc kubenswrapper[4799]: I1010 16:48:59.952621 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tp8sb\" (UniqueName: \"kubernetes.io/projected/985d1485-7054-475b-8e60-85db5dc5afa3-kube-api-access-tp8sb\") pod \"memcached-0\" (UID: \"985d1485-7054-475b-8e60-85db5dc5afa3\") " pod="openstack/memcached-0" Oct 10 16:48:59 crc kubenswrapper[4799]: I1010 16:48:59.954732 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/985d1485-7054-475b-8e60-85db5dc5afa3-memcached-tls-certs\") pod \"memcached-0\" (UID: \"985d1485-7054-475b-8e60-85db5dc5afa3\") " pod="openstack/memcached-0" Oct 10 16:48:59 crc kubenswrapper[4799]: I1010 16:48:59.991334 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Oct 10 16:49:01 crc kubenswrapper[4799]: I1010 16:49:01.154682 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/kube-state-metrics-0"] Oct 10 16:49:01 crc kubenswrapper[4799]: I1010 16:49:01.159180 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Oct 10 16:49:01 crc kubenswrapper[4799]: I1010 16:49:01.162956 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Oct 10 16:49:01 crc kubenswrapper[4799]: I1010 16:49:01.167160 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"telemetry-ceilometer-dockercfg-krq25" Oct 10 16:49:01 crc kubenswrapper[4799]: I1010 16:49:01.254632 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qmzzr\" (UniqueName: \"kubernetes.io/projected/55f598f2-d46d-4810-9b39-315e6d90221a-kube-api-access-qmzzr\") pod \"kube-state-metrics-0\" (UID: \"55f598f2-d46d-4810-9b39-315e6d90221a\") " pod="openstack/kube-state-metrics-0" Oct 10 16:49:01 crc kubenswrapper[4799]: W1010 16:49:01.262570 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7858ee88_c7b9_4fb7_b825_569154134201.slice/crio-d86026ff1c29ab1594cd0dff83494d16f80431f3dbdbbe06584ec4d4ea404aea WatchSource:0}: Error finding container d86026ff1c29ab1594cd0dff83494d16f80431f3dbdbbe06584ec4d4ea404aea: Status 404 returned error can't find the container with id d86026ff1c29ab1594cd0dff83494d16f80431f3dbdbbe06584ec4d4ea404aea Oct 10 16:49:01 crc kubenswrapper[4799]: I1010 16:49:01.356596 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qmzzr\" (UniqueName: \"kubernetes.io/projected/55f598f2-d46d-4810-9b39-315e6d90221a-kube-api-access-qmzzr\") pod \"kube-state-metrics-0\" (UID: \"55f598f2-d46d-4810-9b39-315e6d90221a\") " pod="openstack/kube-state-metrics-0" Oct 10 16:49:01 crc kubenswrapper[4799]: I1010 16:49:01.381091 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qmzzr\" (UniqueName: \"kubernetes.io/projected/55f598f2-d46d-4810-9b39-315e6d90221a-kube-api-access-qmzzr\") pod \"kube-state-metrics-0\" (UID: \"55f598f2-d46d-4810-9b39-315e6d90221a\") " pod="openstack/kube-state-metrics-0" Oct 10 16:49:01 crc kubenswrapper[4799]: I1010 16:49:01.482060 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Oct 10 16:49:02 crc kubenswrapper[4799]: I1010 16:49:02.255477 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"7858ee88-c7b9-4fb7-b825-569154134201","Type":"ContainerStarted","Data":"d86026ff1c29ab1594cd0dff83494d16f80431f3dbdbbe06584ec4d4ea404aea"} Oct 10 16:49:02 crc kubenswrapper[4799]: I1010 16:49:02.413543 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-cell1-galera-0"] Oct 10 16:49:04 crc kubenswrapper[4799]: I1010 16:49:04.956389 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-s8gsd"] Oct 10 16:49:04 crc kubenswrapper[4799]: I1010 16:49:04.958326 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-s8gsd" Oct 10 16:49:04 crc kubenswrapper[4799]: I1010 16:49:04.960578 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-ovs-dtplc"] Oct 10 16:49:04 crc kubenswrapper[4799]: I1010 16:49:04.962177 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-dtplc" Oct 10 16:49:04 crc kubenswrapper[4799]: I1010 16:49:04.963141 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncontroller-ovncontroller-dockercfg-sdd5n" Oct 10 16:49:04 crc kubenswrapper[4799]: I1010 16:49:04.963708 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-scripts" Oct 10 16:49:04 crc kubenswrapper[4799]: I1010 16:49:04.964457 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovncontroller-ovndbs" Oct 10 16:49:04 crc kubenswrapper[4799]: I1010 16:49:04.980864 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-s8gsd"] Oct 10 16:49:04 crc kubenswrapper[4799]: I1010 16:49:04.988849 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-ovs-dtplc"] Oct 10 16:49:05 crc kubenswrapper[4799]: I1010 16:49:05.137476 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/038759ba-4122-4104-8699-81c76590eb2b-combined-ca-bundle\") pod \"ovn-controller-s8gsd\" (UID: \"038759ba-4122-4104-8699-81c76590eb2b\") " pod="openstack/ovn-controller-s8gsd" Oct 10 16:49:05 crc kubenswrapper[4799]: I1010 16:49:05.137525 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/e0ab4194-18b4-4c6d-96b2-d7a4a85e17d6-var-lib\") pod \"ovn-controller-ovs-dtplc\" (UID: \"e0ab4194-18b4-4c6d-96b2-d7a4a85e17d6\") " pod="openstack/ovn-controller-ovs-dtplc" Oct 10 16:49:05 crc kubenswrapper[4799]: I1010 16:49:05.137558 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/038759ba-4122-4104-8699-81c76590eb2b-ovn-controller-tls-certs\") pod \"ovn-controller-s8gsd\" (UID: \"038759ba-4122-4104-8699-81c76590eb2b\") " pod="openstack/ovn-controller-s8gsd" Oct 10 16:49:05 crc kubenswrapper[4799]: I1010 16:49:05.137606 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8mg29\" (UniqueName: \"kubernetes.io/projected/038759ba-4122-4104-8699-81c76590eb2b-kube-api-access-8mg29\") pod \"ovn-controller-s8gsd\" (UID: \"038759ba-4122-4104-8699-81c76590eb2b\") " pod="openstack/ovn-controller-s8gsd" Oct 10 16:49:05 crc kubenswrapper[4799]: I1010 16:49:05.137650 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/038759ba-4122-4104-8699-81c76590eb2b-var-run-ovn\") pod \"ovn-controller-s8gsd\" (UID: \"038759ba-4122-4104-8699-81c76590eb2b\") " pod="openstack/ovn-controller-s8gsd" Oct 10 16:49:05 crc kubenswrapper[4799]: I1010 16:49:05.137678 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/038759ba-4122-4104-8699-81c76590eb2b-var-run\") pod \"ovn-controller-s8gsd\" (UID: \"038759ba-4122-4104-8699-81c76590eb2b\") " pod="openstack/ovn-controller-s8gsd" Oct 10 16:49:05 crc kubenswrapper[4799]: I1010 16:49:05.137697 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/038759ba-4122-4104-8699-81c76590eb2b-var-log-ovn\") pod \"ovn-controller-s8gsd\" (UID: \"038759ba-4122-4104-8699-81c76590eb2b\") " pod="openstack/ovn-controller-s8gsd" Oct 10 16:49:05 crc kubenswrapper[4799]: I1010 16:49:05.137716 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/e0ab4194-18b4-4c6d-96b2-d7a4a85e17d6-var-run\") pod \"ovn-controller-ovs-dtplc\" (UID: \"e0ab4194-18b4-4c6d-96b2-d7a4a85e17d6\") " pod="openstack/ovn-controller-ovs-dtplc" Oct 10 16:49:05 crc kubenswrapper[4799]: I1010 16:49:05.137732 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/e0ab4194-18b4-4c6d-96b2-d7a4a85e17d6-var-log\") pod \"ovn-controller-ovs-dtplc\" (UID: \"e0ab4194-18b4-4c6d-96b2-d7a4a85e17d6\") " pod="openstack/ovn-controller-ovs-dtplc" Oct 10 16:49:05 crc kubenswrapper[4799]: I1010 16:49:05.137773 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9sclh\" (UniqueName: \"kubernetes.io/projected/e0ab4194-18b4-4c6d-96b2-d7a4a85e17d6-kube-api-access-9sclh\") pod \"ovn-controller-ovs-dtplc\" (UID: \"e0ab4194-18b4-4c6d-96b2-d7a4a85e17d6\") " pod="openstack/ovn-controller-ovs-dtplc" Oct 10 16:49:05 crc kubenswrapper[4799]: I1010 16:49:05.137795 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/038759ba-4122-4104-8699-81c76590eb2b-scripts\") pod \"ovn-controller-s8gsd\" (UID: \"038759ba-4122-4104-8699-81c76590eb2b\") " pod="openstack/ovn-controller-s8gsd" Oct 10 16:49:05 crc kubenswrapper[4799]: I1010 16:49:05.137817 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/e0ab4194-18b4-4c6d-96b2-d7a4a85e17d6-etc-ovs\") pod \"ovn-controller-ovs-dtplc\" (UID: \"e0ab4194-18b4-4c6d-96b2-d7a4a85e17d6\") " pod="openstack/ovn-controller-ovs-dtplc" Oct 10 16:49:05 crc kubenswrapper[4799]: I1010 16:49:05.137837 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/e0ab4194-18b4-4c6d-96b2-d7a4a85e17d6-scripts\") pod \"ovn-controller-ovs-dtplc\" (UID: \"e0ab4194-18b4-4c6d-96b2-d7a4a85e17d6\") " pod="openstack/ovn-controller-ovs-dtplc" Oct 10 16:49:05 crc kubenswrapper[4799]: I1010 16:49:05.239287 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/e0ab4194-18b4-4c6d-96b2-d7a4a85e17d6-scripts\") pod \"ovn-controller-ovs-dtplc\" (UID: \"e0ab4194-18b4-4c6d-96b2-d7a4a85e17d6\") " pod="openstack/ovn-controller-ovs-dtplc" Oct 10 16:49:05 crc kubenswrapper[4799]: I1010 16:49:05.239667 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/038759ba-4122-4104-8699-81c76590eb2b-combined-ca-bundle\") pod \"ovn-controller-s8gsd\" (UID: \"038759ba-4122-4104-8699-81c76590eb2b\") " pod="openstack/ovn-controller-s8gsd" Oct 10 16:49:05 crc kubenswrapper[4799]: I1010 16:49:05.239709 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/e0ab4194-18b4-4c6d-96b2-d7a4a85e17d6-var-lib\") pod \"ovn-controller-ovs-dtplc\" (UID: \"e0ab4194-18b4-4c6d-96b2-d7a4a85e17d6\") " pod="openstack/ovn-controller-ovs-dtplc" Oct 10 16:49:05 crc kubenswrapper[4799]: I1010 16:49:05.239740 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/038759ba-4122-4104-8699-81c76590eb2b-ovn-controller-tls-certs\") pod \"ovn-controller-s8gsd\" (UID: \"038759ba-4122-4104-8699-81c76590eb2b\") " pod="openstack/ovn-controller-s8gsd" Oct 10 16:49:05 crc kubenswrapper[4799]: I1010 16:49:05.239783 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8mg29\" (UniqueName: \"kubernetes.io/projected/038759ba-4122-4104-8699-81c76590eb2b-kube-api-access-8mg29\") pod \"ovn-controller-s8gsd\" (UID: \"038759ba-4122-4104-8699-81c76590eb2b\") " pod="openstack/ovn-controller-s8gsd" Oct 10 16:49:05 crc kubenswrapper[4799]: I1010 16:49:05.239806 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/038759ba-4122-4104-8699-81c76590eb2b-var-run-ovn\") pod \"ovn-controller-s8gsd\" (UID: \"038759ba-4122-4104-8699-81c76590eb2b\") " pod="openstack/ovn-controller-s8gsd" Oct 10 16:49:05 crc kubenswrapper[4799]: I1010 16:49:05.239833 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/038759ba-4122-4104-8699-81c76590eb2b-var-run\") pod \"ovn-controller-s8gsd\" (UID: \"038759ba-4122-4104-8699-81c76590eb2b\") " pod="openstack/ovn-controller-s8gsd" Oct 10 16:49:05 crc kubenswrapper[4799]: I1010 16:49:05.239850 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/038759ba-4122-4104-8699-81c76590eb2b-var-log-ovn\") pod \"ovn-controller-s8gsd\" (UID: \"038759ba-4122-4104-8699-81c76590eb2b\") " pod="openstack/ovn-controller-s8gsd" Oct 10 16:49:05 crc kubenswrapper[4799]: I1010 16:49:05.239870 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/e0ab4194-18b4-4c6d-96b2-d7a4a85e17d6-var-run\") pod \"ovn-controller-ovs-dtplc\" (UID: \"e0ab4194-18b4-4c6d-96b2-d7a4a85e17d6\") " pod="openstack/ovn-controller-ovs-dtplc" Oct 10 16:49:05 crc kubenswrapper[4799]: I1010 16:49:05.239888 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/e0ab4194-18b4-4c6d-96b2-d7a4a85e17d6-var-log\") pod \"ovn-controller-ovs-dtplc\" (UID: \"e0ab4194-18b4-4c6d-96b2-d7a4a85e17d6\") " pod="openstack/ovn-controller-ovs-dtplc" Oct 10 16:49:05 crc kubenswrapper[4799]: I1010 16:49:05.239914 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9sclh\" (UniqueName: \"kubernetes.io/projected/e0ab4194-18b4-4c6d-96b2-d7a4a85e17d6-kube-api-access-9sclh\") pod \"ovn-controller-ovs-dtplc\" (UID: \"e0ab4194-18b4-4c6d-96b2-d7a4a85e17d6\") " pod="openstack/ovn-controller-ovs-dtplc" Oct 10 16:49:05 crc kubenswrapper[4799]: I1010 16:49:05.239934 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/038759ba-4122-4104-8699-81c76590eb2b-scripts\") pod \"ovn-controller-s8gsd\" (UID: \"038759ba-4122-4104-8699-81c76590eb2b\") " pod="openstack/ovn-controller-s8gsd" Oct 10 16:49:05 crc kubenswrapper[4799]: I1010 16:49:05.239958 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/e0ab4194-18b4-4c6d-96b2-d7a4a85e17d6-etc-ovs\") pod \"ovn-controller-ovs-dtplc\" (UID: \"e0ab4194-18b4-4c6d-96b2-d7a4a85e17d6\") " pod="openstack/ovn-controller-ovs-dtplc" Oct 10 16:49:05 crc kubenswrapper[4799]: I1010 16:49:05.241057 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/e0ab4194-18b4-4c6d-96b2-d7a4a85e17d6-var-lib\") pod \"ovn-controller-ovs-dtplc\" (UID: \"e0ab4194-18b4-4c6d-96b2-d7a4a85e17d6\") " pod="openstack/ovn-controller-ovs-dtplc" Oct 10 16:49:05 crc kubenswrapper[4799]: I1010 16:49:05.241220 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/e0ab4194-18b4-4c6d-96b2-d7a4a85e17d6-etc-ovs\") pod \"ovn-controller-ovs-dtplc\" (UID: \"e0ab4194-18b4-4c6d-96b2-d7a4a85e17d6\") " pod="openstack/ovn-controller-ovs-dtplc" Oct 10 16:49:05 crc kubenswrapper[4799]: I1010 16:49:05.242177 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/e0ab4194-18b4-4c6d-96b2-d7a4a85e17d6-scripts\") pod \"ovn-controller-ovs-dtplc\" (UID: \"e0ab4194-18b4-4c6d-96b2-d7a4a85e17d6\") " pod="openstack/ovn-controller-ovs-dtplc" Oct 10 16:49:05 crc kubenswrapper[4799]: I1010 16:49:05.242390 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/038759ba-4122-4104-8699-81c76590eb2b-var-log-ovn\") pod \"ovn-controller-s8gsd\" (UID: \"038759ba-4122-4104-8699-81c76590eb2b\") " pod="openstack/ovn-controller-s8gsd" Oct 10 16:49:05 crc kubenswrapper[4799]: I1010 16:49:05.242468 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/e0ab4194-18b4-4c6d-96b2-d7a4a85e17d6-var-log\") pod \"ovn-controller-ovs-dtplc\" (UID: \"e0ab4194-18b4-4c6d-96b2-d7a4a85e17d6\") " pod="openstack/ovn-controller-ovs-dtplc" Oct 10 16:49:05 crc kubenswrapper[4799]: I1010 16:49:05.242664 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/038759ba-4122-4104-8699-81c76590eb2b-var-run-ovn\") pod \"ovn-controller-s8gsd\" (UID: \"038759ba-4122-4104-8699-81c76590eb2b\") " pod="openstack/ovn-controller-s8gsd" Oct 10 16:49:05 crc kubenswrapper[4799]: I1010 16:49:05.243897 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/e0ab4194-18b4-4c6d-96b2-d7a4a85e17d6-var-run\") pod \"ovn-controller-ovs-dtplc\" (UID: \"e0ab4194-18b4-4c6d-96b2-d7a4a85e17d6\") " pod="openstack/ovn-controller-ovs-dtplc" Oct 10 16:49:05 crc kubenswrapper[4799]: I1010 16:49:05.244253 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/038759ba-4122-4104-8699-81c76590eb2b-scripts\") pod \"ovn-controller-s8gsd\" (UID: \"038759ba-4122-4104-8699-81c76590eb2b\") " pod="openstack/ovn-controller-s8gsd" Oct 10 16:49:05 crc kubenswrapper[4799]: I1010 16:49:05.244267 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/038759ba-4122-4104-8699-81c76590eb2b-var-run\") pod \"ovn-controller-s8gsd\" (UID: \"038759ba-4122-4104-8699-81c76590eb2b\") " pod="openstack/ovn-controller-s8gsd" Oct 10 16:49:05 crc kubenswrapper[4799]: I1010 16:49:05.245791 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/038759ba-4122-4104-8699-81c76590eb2b-combined-ca-bundle\") pod \"ovn-controller-s8gsd\" (UID: \"038759ba-4122-4104-8699-81c76590eb2b\") " pod="openstack/ovn-controller-s8gsd" Oct 10 16:49:05 crc kubenswrapper[4799]: I1010 16:49:05.246374 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/038759ba-4122-4104-8699-81c76590eb2b-ovn-controller-tls-certs\") pod \"ovn-controller-s8gsd\" (UID: \"038759ba-4122-4104-8699-81c76590eb2b\") " pod="openstack/ovn-controller-s8gsd" Oct 10 16:49:05 crc kubenswrapper[4799]: I1010 16:49:05.261779 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8mg29\" (UniqueName: \"kubernetes.io/projected/038759ba-4122-4104-8699-81c76590eb2b-kube-api-access-8mg29\") pod \"ovn-controller-s8gsd\" (UID: \"038759ba-4122-4104-8699-81c76590eb2b\") " pod="openstack/ovn-controller-s8gsd" Oct 10 16:49:05 crc kubenswrapper[4799]: I1010 16:49:05.262355 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9sclh\" (UniqueName: \"kubernetes.io/projected/e0ab4194-18b4-4c6d-96b2-d7a4a85e17d6-kube-api-access-9sclh\") pod \"ovn-controller-ovs-dtplc\" (UID: \"e0ab4194-18b4-4c6d-96b2-d7a4a85e17d6\") " pod="openstack/ovn-controller-ovs-dtplc" Oct 10 16:49:05 crc kubenswrapper[4799]: I1010 16:49:05.290386 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-nb-0"] Oct 10 16:49:05 crc kubenswrapper[4799]: I1010 16:49:05.292261 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Oct 10 16:49:05 crc kubenswrapper[4799]: I1010 16:49:05.298047 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovndbcluster-nb-ovndbs" Oct 10 16:49:05 crc kubenswrapper[4799]: I1010 16:49:05.298369 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-nb-scripts" Oct 10 16:49:05 crc kubenswrapper[4799]: I1010 16:49:05.299126 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncluster-ovndbcluster-nb-dockercfg-84xml" Oct 10 16:49:05 crc kubenswrapper[4799]: I1010 16:49:05.302331 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovn-metrics" Oct 10 16:49:05 crc kubenswrapper[4799]: I1010 16:49:05.303051 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-nb-config" Oct 10 16:49:05 crc kubenswrapper[4799]: I1010 16:49:05.303331 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"2a28d2a6-5dfc-414b-9eed-2f412cfc7063","Type":"ContainerStarted","Data":"5b99a6b1fcc73cc34255feadfa95870bdeb8d577bd9cef261035e98f0256eb68"} Oct 10 16:49:05 crc kubenswrapper[4799]: I1010 16:49:05.309801 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-0"] Oct 10 16:49:05 crc kubenswrapper[4799]: I1010 16:49:05.316863 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-s8gsd" Oct 10 16:49:05 crc kubenswrapper[4799]: I1010 16:49:05.319803 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-dtplc" Oct 10 16:49:05 crc kubenswrapper[4799]: I1010 16:49:05.448541 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/37642fb0-1d93-4e14-a176-fea38410097f-config\") pod \"ovsdbserver-nb-0\" (UID: \"37642fb0-1d93-4e14-a176-fea38410097f\") " pod="openstack/ovsdbserver-nb-0" Oct 10 16:49:05 crc kubenswrapper[4799]: I1010 16:49:05.448637 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/37642fb0-1d93-4e14-a176-fea38410097f-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"37642fb0-1d93-4e14-a176-fea38410097f\") " pod="openstack/ovsdbserver-nb-0" Oct 10 16:49:05 crc kubenswrapper[4799]: I1010 16:49:05.448696 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"ovsdbserver-nb-0\" (UID: \"37642fb0-1d93-4e14-a176-fea38410097f\") " pod="openstack/ovsdbserver-nb-0" Oct 10 16:49:05 crc kubenswrapper[4799]: I1010 16:49:05.448794 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/37642fb0-1d93-4e14-a176-fea38410097f-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"37642fb0-1d93-4e14-a176-fea38410097f\") " pod="openstack/ovsdbserver-nb-0" Oct 10 16:49:05 crc kubenswrapper[4799]: I1010 16:49:05.448853 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/37642fb0-1d93-4e14-a176-fea38410097f-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"37642fb0-1d93-4e14-a176-fea38410097f\") " pod="openstack/ovsdbserver-nb-0" Oct 10 16:49:05 crc kubenswrapper[4799]: I1010 16:49:05.448890 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/37642fb0-1d93-4e14-a176-fea38410097f-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"37642fb0-1d93-4e14-a176-fea38410097f\") " pod="openstack/ovsdbserver-nb-0" Oct 10 16:49:05 crc kubenswrapper[4799]: I1010 16:49:05.448937 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zf4qn\" (UniqueName: \"kubernetes.io/projected/37642fb0-1d93-4e14-a176-fea38410097f-kube-api-access-zf4qn\") pod \"ovsdbserver-nb-0\" (UID: \"37642fb0-1d93-4e14-a176-fea38410097f\") " pod="openstack/ovsdbserver-nb-0" Oct 10 16:49:05 crc kubenswrapper[4799]: I1010 16:49:05.449026 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/37642fb0-1d93-4e14-a176-fea38410097f-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"37642fb0-1d93-4e14-a176-fea38410097f\") " pod="openstack/ovsdbserver-nb-0" Oct 10 16:49:05 crc kubenswrapper[4799]: I1010 16:49:05.550441 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/37642fb0-1d93-4e14-a176-fea38410097f-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"37642fb0-1d93-4e14-a176-fea38410097f\") " pod="openstack/ovsdbserver-nb-0" Oct 10 16:49:05 crc kubenswrapper[4799]: I1010 16:49:05.550488 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/37642fb0-1d93-4e14-a176-fea38410097f-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"37642fb0-1d93-4e14-a176-fea38410097f\") " pod="openstack/ovsdbserver-nb-0" Oct 10 16:49:05 crc kubenswrapper[4799]: I1010 16:49:05.550527 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zf4qn\" (UniqueName: \"kubernetes.io/projected/37642fb0-1d93-4e14-a176-fea38410097f-kube-api-access-zf4qn\") pod \"ovsdbserver-nb-0\" (UID: \"37642fb0-1d93-4e14-a176-fea38410097f\") " pod="openstack/ovsdbserver-nb-0" Oct 10 16:49:05 crc kubenswrapper[4799]: I1010 16:49:05.550564 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/37642fb0-1d93-4e14-a176-fea38410097f-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"37642fb0-1d93-4e14-a176-fea38410097f\") " pod="openstack/ovsdbserver-nb-0" Oct 10 16:49:05 crc kubenswrapper[4799]: I1010 16:49:05.550608 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/37642fb0-1d93-4e14-a176-fea38410097f-config\") pod \"ovsdbserver-nb-0\" (UID: \"37642fb0-1d93-4e14-a176-fea38410097f\") " pod="openstack/ovsdbserver-nb-0" Oct 10 16:49:05 crc kubenswrapper[4799]: I1010 16:49:05.550625 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/37642fb0-1d93-4e14-a176-fea38410097f-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"37642fb0-1d93-4e14-a176-fea38410097f\") " pod="openstack/ovsdbserver-nb-0" Oct 10 16:49:05 crc kubenswrapper[4799]: I1010 16:49:05.550647 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"ovsdbserver-nb-0\" (UID: \"37642fb0-1d93-4e14-a176-fea38410097f\") " pod="openstack/ovsdbserver-nb-0" Oct 10 16:49:05 crc kubenswrapper[4799]: I1010 16:49:05.550690 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/37642fb0-1d93-4e14-a176-fea38410097f-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"37642fb0-1d93-4e14-a176-fea38410097f\") " pod="openstack/ovsdbserver-nb-0" Oct 10 16:49:05 crc kubenswrapper[4799]: I1010 16:49:05.551214 4799 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"ovsdbserver-nb-0\" (UID: \"37642fb0-1d93-4e14-a176-fea38410097f\") device mount path \"/mnt/openstack/pv03\"" pod="openstack/ovsdbserver-nb-0" Oct 10 16:49:05 crc kubenswrapper[4799]: I1010 16:49:05.551806 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/37642fb0-1d93-4e14-a176-fea38410097f-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"37642fb0-1d93-4e14-a176-fea38410097f\") " pod="openstack/ovsdbserver-nb-0" Oct 10 16:49:05 crc kubenswrapper[4799]: I1010 16:49:05.552032 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/37642fb0-1d93-4e14-a176-fea38410097f-config\") pod \"ovsdbserver-nb-0\" (UID: \"37642fb0-1d93-4e14-a176-fea38410097f\") " pod="openstack/ovsdbserver-nb-0" Oct 10 16:49:05 crc kubenswrapper[4799]: I1010 16:49:05.552342 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/37642fb0-1d93-4e14-a176-fea38410097f-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"37642fb0-1d93-4e14-a176-fea38410097f\") " pod="openstack/ovsdbserver-nb-0" Oct 10 16:49:05 crc kubenswrapper[4799]: I1010 16:49:05.555634 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/37642fb0-1d93-4e14-a176-fea38410097f-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"37642fb0-1d93-4e14-a176-fea38410097f\") " pod="openstack/ovsdbserver-nb-0" Oct 10 16:49:05 crc kubenswrapper[4799]: I1010 16:49:05.557183 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/37642fb0-1d93-4e14-a176-fea38410097f-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"37642fb0-1d93-4e14-a176-fea38410097f\") " pod="openstack/ovsdbserver-nb-0" Oct 10 16:49:05 crc kubenswrapper[4799]: I1010 16:49:05.561502 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/37642fb0-1d93-4e14-a176-fea38410097f-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"37642fb0-1d93-4e14-a176-fea38410097f\") " pod="openstack/ovsdbserver-nb-0" Oct 10 16:49:05 crc kubenswrapper[4799]: I1010 16:49:05.566801 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zf4qn\" (UniqueName: \"kubernetes.io/projected/37642fb0-1d93-4e14-a176-fea38410097f-kube-api-access-zf4qn\") pod \"ovsdbserver-nb-0\" (UID: \"37642fb0-1d93-4e14-a176-fea38410097f\") " pod="openstack/ovsdbserver-nb-0" Oct 10 16:49:05 crc kubenswrapper[4799]: I1010 16:49:05.586566 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"ovsdbserver-nb-0\" (UID: \"37642fb0-1d93-4e14-a176-fea38410097f\") " pod="openstack/ovsdbserver-nb-0" Oct 10 16:49:05 crc kubenswrapper[4799]: I1010 16:49:05.631643 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Oct 10 16:49:07 crc kubenswrapper[4799]: I1010 16:49:07.823517 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-sb-0"] Oct 10 16:49:07 crc kubenswrapper[4799]: I1010 16:49:07.825782 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Oct 10 16:49:07 crc kubenswrapper[4799]: I1010 16:49:07.829023 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovndbcluster-sb-ovndbs" Oct 10 16:49:07 crc kubenswrapper[4799]: I1010 16:49:07.829333 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncluster-ovndbcluster-sb-dockercfg-9s7kq" Oct 10 16:49:07 crc kubenswrapper[4799]: I1010 16:49:07.829489 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-sb-scripts" Oct 10 16:49:07 crc kubenswrapper[4799]: I1010 16:49:07.829717 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-sb-config" Oct 10 16:49:07 crc kubenswrapper[4799]: I1010 16:49:07.854932 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-0"] Oct 10 16:49:07 crc kubenswrapper[4799]: I1010 16:49:07.994545 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/68f5ed12-8abe-46e0-a60a-086d13b7f038-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"68f5ed12-8abe-46e0-a60a-086d13b7f038\") " pod="openstack/ovsdbserver-sb-0" Oct 10 16:49:07 crc kubenswrapper[4799]: I1010 16:49:07.994601 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/68f5ed12-8abe-46e0-a60a-086d13b7f038-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"68f5ed12-8abe-46e0-a60a-086d13b7f038\") " pod="openstack/ovsdbserver-sb-0" Oct 10 16:49:07 crc kubenswrapper[4799]: I1010 16:49:07.994626 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"ovsdbserver-sb-0\" (UID: \"68f5ed12-8abe-46e0-a60a-086d13b7f038\") " pod="openstack/ovsdbserver-sb-0" Oct 10 16:49:07 crc kubenswrapper[4799]: I1010 16:49:07.994642 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dl6cb\" (UniqueName: \"kubernetes.io/projected/68f5ed12-8abe-46e0-a60a-086d13b7f038-kube-api-access-dl6cb\") pod \"ovsdbserver-sb-0\" (UID: \"68f5ed12-8abe-46e0-a60a-086d13b7f038\") " pod="openstack/ovsdbserver-sb-0" Oct 10 16:49:07 crc kubenswrapper[4799]: I1010 16:49:07.994669 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/68f5ed12-8abe-46e0-a60a-086d13b7f038-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"68f5ed12-8abe-46e0-a60a-086d13b7f038\") " pod="openstack/ovsdbserver-sb-0" Oct 10 16:49:07 crc kubenswrapper[4799]: I1010 16:49:07.994709 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/68f5ed12-8abe-46e0-a60a-086d13b7f038-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"68f5ed12-8abe-46e0-a60a-086d13b7f038\") " pod="openstack/ovsdbserver-sb-0" Oct 10 16:49:07 crc kubenswrapper[4799]: I1010 16:49:07.994726 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/68f5ed12-8abe-46e0-a60a-086d13b7f038-config\") pod \"ovsdbserver-sb-0\" (UID: \"68f5ed12-8abe-46e0-a60a-086d13b7f038\") " pod="openstack/ovsdbserver-sb-0" Oct 10 16:49:07 crc kubenswrapper[4799]: I1010 16:49:07.994770 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/68f5ed12-8abe-46e0-a60a-086d13b7f038-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"68f5ed12-8abe-46e0-a60a-086d13b7f038\") " pod="openstack/ovsdbserver-sb-0" Oct 10 16:49:08 crc kubenswrapper[4799]: I1010 16:49:08.096541 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/68f5ed12-8abe-46e0-a60a-086d13b7f038-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"68f5ed12-8abe-46e0-a60a-086d13b7f038\") " pod="openstack/ovsdbserver-sb-0" Oct 10 16:49:08 crc kubenswrapper[4799]: I1010 16:49:08.096612 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/68f5ed12-8abe-46e0-a60a-086d13b7f038-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"68f5ed12-8abe-46e0-a60a-086d13b7f038\") " pod="openstack/ovsdbserver-sb-0" Oct 10 16:49:08 crc kubenswrapper[4799]: I1010 16:49:08.096632 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/68f5ed12-8abe-46e0-a60a-086d13b7f038-config\") pod \"ovsdbserver-sb-0\" (UID: \"68f5ed12-8abe-46e0-a60a-086d13b7f038\") " pod="openstack/ovsdbserver-sb-0" Oct 10 16:49:08 crc kubenswrapper[4799]: I1010 16:49:08.096667 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/68f5ed12-8abe-46e0-a60a-086d13b7f038-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"68f5ed12-8abe-46e0-a60a-086d13b7f038\") " pod="openstack/ovsdbserver-sb-0" Oct 10 16:49:08 crc kubenswrapper[4799]: I1010 16:49:08.096732 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/68f5ed12-8abe-46e0-a60a-086d13b7f038-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"68f5ed12-8abe-46e0-a60a-086d13b7f038\") " pod="openstack/ovsdbserver-sb-0" Oct 10 16:49:08 crc kubenswrapper[4799]: I1010 16:49:08.096805 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/68f5ed12-8abe-46e0-a60a-086d13b7f038-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"68f5ed12-8abe-46e0-a60a-086d13b7f038\") " pod="openstack/ovsdbserver-sb-0" Oct 10 16:49:08 crc kubenswrapper[4799]: I1010 16:49:08.096824 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"ovsdbserver-sb-0\" (UID: \"68f5ed12-8abe-46e0-a60a-086d13b7f038\") " pod="openstack/ovsdbserver-sb-0" Oct 10 16:49:08 crc kubenswrapper[4799]: I1010 16:49:08.096838 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dl6cb\" (UniqueName: \"kubernetes.io/projected/68f5ed12-8abe-46e0-a60a-086d13b7f038-kube-api-access-dl6cb\") pod \"ovsdbserver-sb-0\" (UID: \"68f5ed12-8abe-46e0-a60a-086d13b7f038\") " pod="openstack/ovsdbserver-sb-0" Oct 10 16:49:08 crc kubenswrapper[4799]: I1010 16:49:08.098169 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/68f5ed12-8abe-46e0-a60a-086d13b7f038-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"68f5ed12-8abe-46e0-a60a-086d13b7f038\") " pod="openstack/ovsdbserver-sb-0" Oct 10 16:49:08 crc kubenswrapper[4799]: I1010 16:49:08.099257 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/68f5ed12-8abe-46e0-a60a-086d13b7f038-config\") pod \"ovsdbserver-sb-0\" (UID: \"68f5ed12-8abe-46e0-a60a-086d13b7f038\") " pod="openstack/ovsdbserver-sb-0" Oct 10 16:49:08 crc kubenswrapper[4799]: I1010 16:49:08.099448 4799 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"ovsdbserver-sb-0\" (UID: \"68f5ed12-8abe-46e0-a60a-086d13b7f038\") device mount path \"/mnt/openstack/pv05\"" pod="openstack/ovsdbserver-sb-0" Oct 10 16:49:08 crc kubenswrapper[4799]: I1010 16:49:08.099530 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/68f5ed12-8abe-46e0-a60a-086d13b7f038-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"68f5ed12-8abe-46e0-a60a-086d13b7f038\") " pod="openstack/ovsdbserver-sb-0" Oct 10 16:49:08 crc kubenswrapper[4799]: I1010 16:49:08.103501 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/68f5ed12-8abe-46e0-a60a-086d13b7f038-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"68f5ed12-8abe-46e0-a60a-086d13b7f038\") " pod="openstack/ovsdbserver-sb-0" Oct 10 16:49:08 crc kubenswrapper[4799]: I1010 16:49:08.107362 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/68f5ed12-8abe-46e0-a60a-086d13b7f038-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"68f5ed12-8abe-46e0-a60a-086d13b7f038\") " pod="openstack/ovsdbserver-sb-0" Oct 10 16:49:08 crc kubenswrapper[4799]: I1010 16:49:08.109997 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/68f5ed12-8abe-46e0-a60a-086d13b7f038-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"68f5ed12-8abe-46e0-a60a-086d13b7f038\") " pod="openstack/ovsdbserver-sb-0" Oct 10 16:49:08 crc kubenswrapper[4799]: I1010 16:49:08.112224 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dl6cb\" (UniqueName: \"kubernetes.io/projected/68f5ed12-8abe-46e0-a60a-086d13b7f038-kube-api-access-dl6cb\") pod \"ovsdbserver-sb-0\" (UID: \"68f5ed12-8abe-46e0-a60a-086d13b7f038\") " pod="openstack/ovsdbserver-sb-0" Oct 10 16:49:08 crc kubenswrapper[4799]: I1010 16:49:08.125242 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"ovsdbserver-sb-0\" (UID: \"68f5ed12-8abe-46e0-a60a-086d13b7f038\") " pod="openstack/ovsdbserver-sb-0" Oct 10 16:49:08 crc kubenswrapper[4799]: I1010 16:49:08.165323 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Oct 10 16:49:11 crc kubenswrapper[4799]: E1010 16:49:11.570164 4799 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server@sha256:c4e71b2158fd939dad8b8e705273493051d3023273d23b279f2699dce6db33df" Oct 10 16:49:11 crc kubenswrapper[4799]: E1010 16:49:11.571073 4799 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server@sha256:c4e71b2158fd939dad8b8e705273493051d3023273d23b279f2699dce6db33df,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:nffh5bdhf4h5f8h79h55h77h58fh56dh7bh6fh578hbch55dh68h56bhd9h65dh57ch658hc9h566h666h688h58h65dh684h5d7h6ch575h5d6h88q,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-nztnk,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-7bfcb9d745-x7l2v_openstack(51ebfb45-0683-4095-9ce4-1224edf571f3): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Oct 10 16:49:11 crc kubenswrapper[4799]: E1010 16:49:11.573176 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-7bfcb9d745-x7l2v" podUID="51ebfb45-0683-4095-9ce4-1224edf571f3" Oct 10 16:49:12 crc kubenswrapper[4799]: E1010 16:49:12.582659 4799 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server@sha256:c4e71b2158fd939dad8b8e705273493051d3023273d23b279f2699dce6db33df" Oct 10 16:49:12 crc kubenswrapper[4799]: E1010 16:49:12.583217 4799 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server@sha256:c4e71b2158fd939dad8b8e705273493051d3023273d23b279f2699dce6db33df,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:ndfhb5h667h568h584h5f9h58dh565h664h587h597h577h64bh5c4h66fh647hbdh68ch5c5h68dh686h5f7h64hd7hc6h55fh57bh98h57fh87h5fh57fq,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-dhdrk,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-758b79db4c-wzjph_openstack(d31fd219-0ef3-47e3-9106-9546b0829c53): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Oct 10 16:49:12 crc kubenswrapper[4799]: E1010 16:49:12.584612 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-758b79db4c-wzjph" podUID="d31fd219-0ef3-47e3-9106-9546b0829c53" Oct 10 16:49:12 crc kubenswrapper[4799]: E1010 16:49:12.612108 4799 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server@sha256:c4e71b2158fd939dad8b8e705273493051d3023273d23b279f2699dce6db33df" Oct 10 16:49:12 crc kubenswrapper[4799]: E1010 16:49:12.612317 4799 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server@sha256:c4e71b2158fd939dad8b8e705273493051d3023273d23b279f2699dce6db33df,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n68chd6h679hbfh55fhc6h5ffh5d8h94h56ch589hb4hc5h57bh677hcdh655h8dh667h675h654h66ch567h8fh659h5b4h675h566h55bh54h67dh6dq,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-pffhn,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-644597f84c-zx44x_openstack(49436e14-c65e-4c80-aaf9-189e76f5a90c): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Oct 10 16:49:12 crc kubenswrapper[4799]: E1010 16:49:12.614221 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-644597f84c-zx44x" podUID="49436e14-c65e-4c80-aaf9-189e76f5a90c" Oct 10 16:49:12 crc kubenswrapper[4799]: E1010 16:49:12.662945 4799 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server@sha256:c4e71b2158fd939dad8b8e705273493051d3023273d23b279f2699dce6db33df" Oct 10 16:49:12 crc kubenswrapper[4799]: E1010 16:49:12.663155 4799 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server@sha256:c4e71b2158fd939dad8b8e705273493051d3023273d23b279f2699dce6db33df,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n659h4h664hbh658h587h67ch89h587h8fh679hc6hf9h55fh644h5d5h698h68dh5cdh5ffh669h54ch9h689hb8hd4h5bfhd8h5d7h5fh665h574q,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-j6f7b,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-77597f887-p8x7f_openstack(029ee4ad-013d-47f2-9590-ca0a0d85d51d): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Oct 10 16:49:12 crc kubenswrapper[4799]: E1010 16:49:12.664851 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-77597f887-p8x7f" podUID="029ee4ad-013d-47f2-9590-ca0a0d85d51d" Oct 10 16:49:12 crc kubenswrapper[4799]: I1010 16:49:12.985402 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/memcached-0"] Oct 10 16:49:13 crc kubenswrapper[4799]: I1010 16:49:13.052466 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-galera-0"] Oct 10 16:49:13 crc kubenswrapper[4799]: E1010 16:49:13.371074 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-neutron-server@sha256:c4e71b2158fd939dad8b8e705273493051d3023273d23b279f2699dce6db33df\\\"\"" pod="openstack/dnsmasq-dns-644597f84c-zx44x" podUID="49436e14-c65e-4c80-aaf9-189e76f5a90c" Oct 10 16:49:13 crc kubenswrapper[4799]: E1010 16:49:13.371090 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-neutron-server@sha256:c4e71b2158fd939dad8b8e705273493051d3023273d23b279f2699dce6db33df\\\"\"" pod="openstack/dnsmasq-dns-77597f887-p8x7f" podUID="029ee4ad-013d-47f2-9590-ca0a0d85d51d" Oct 10 16:49:16 crc kubenswrapper[4799]: W1010 16:49:16.897127 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod985d1485_7054_475b_8e60_85db5dc5afa3.slice/crio-84483c379ac0b0a6a31e4b9cb3e92368a20abf5e0b33eaffc729e36b4500d232 WatchSource:0}: Error finding container 84483c379ac0b0a6a31e4b9cb3e92368a20abf5e0b33eaffc729e36b4500d232: Status 404 returned error can't find the container with id 84483c379ac0b0a6a31e4b9cb3e92368a20abf5e0b33eaffc729e36b4500d232 Oct 10 16:49:17 crc kubenswrapper[4799]: I1010 16:49:17.068209 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7bfcb9d745-x7l2v" Oct 10 16:49:17 crc kubenswrapper[4799]: I1010 16:49:17.072105 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-758b79db4c-wzjph" Oct 10 16:49:17 crc kubenswrapper[4799]: I1010 16:49:17.154587 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nztnk\" (UniqueName: \"kubernetes.io/projected/51ebfb45-0683-4095-9ce4-1224edf571f3-kube-api-access-nztnk\") pod \"51ebfb45-0683-4095-9ce4-1224edf571f3\" (UID: \"51ebfb45-0683-4095-9ce4-1224edf571f3\") " Oct 10 16:49:17 crc kubenswrapper[4799]: I1010 16:49:17.154646 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dhdrk\" (UniqueName: \"kubernetes.io/projected/d31fd219-0ef3-47e3-9106-9546b0829c53-kube-api-access-dhdrk\") pod \"d31fd219-0ef3-47e3-9106-9546b0829c53\" (UID: \"d31fd219-0ef3-47e3-9106-9546b0829c53\") " Oct 10 16:49:17 crc kubenswrapper[4799]: I1010 16:49:17.154713 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d31fd219-0ef3-47e3-9106-9546b0829c53-config\") pod \"d31fd219-0ef3-47e3-9106-9546b0829c53\" (UID: \"d31fd219-0ef3-47e3-9106-9546b0829c53\") " Oct 10 16:49:17 crc kubenswrapper[4799]: I1010 16:49:17.154737 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d31fd219-0ef3-47e3-9106-9546b0829c53-dns-svc\") pod \"d31fd219-0ef3-47e3-9106-9546b0829c53\" (UID: \"d31fd219-0ef3-47e3-9106-9546b0829c53\") " Oct 10 16:49:17 crc kubenswrapper[4799]: I1010 16:49:17.154794 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/51ebfb45-0683-4095-9ce4-1224edf571f3-config\") pod \"51ebfb45-0683-4095-9ce4-1224edf571f3\" (UID: \"51ebfb45-0683-4095-9ce4-1224edf571f3\") " Oct 10 16:49:17 crc kubenswrapper[4799]: I1010 16:49:17.155500 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/51ebfb45-0683-4095-9ce4-1224edf571f3-config" (OuterVolumeSpecName: "config") pod "51ebfb45-0683-4095-9ce4-1224edf571f3" (UID: "51ebfb45-0683-4095-9ce4-1224edf571f3"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 16:49:17 crc kubenswrapper[4799]: I1010 16:49:17.156361 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d31fd219-0ef3-47e3-9106-9546b0829c53-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "d31fd219-0ef3-47e3-9106-9546b0829c53" (UID: "d31fd219-0ef3-47e3-9106-9546b0829c53"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 16:49:17 crc kubenswrapper[4799]: I1010 16:49:17.156399 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d31fd219-0ef3-47e3-9106-9546b0829c53-config" (OuterVolumeSpecName: "config") pod "d31fd219-0ef3-47e3-9106-9546b0829c53" (UID: "d31fd219-0ef3-47e3-9106-9546b0829c53"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 16:49:17 crc kubenswrapper[4799]: I1010 16:49:17.160548 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d31fd219-0ef3-47e3-9106-9546b0829c53-kube-api-access-dhdrk" (OuterVolumeSpecName: "kube-api-access-dhdrk") pod "d31fd219-0ef3-47e3-9106-9546b0829c53" (UID: "d31fd219-0ef3-47e3-9106-9546b0829c53"). InnerVolumeSpecName "kube-api-access-dhdrk". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 16:49:17 crc kubenswrapper[4799]: I1010 16:49:17.164802 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/51ebfb45-0683-4095-9ce4-1224edf571f3-kube-api-access-nztnk" (OuterVolumeSpecName: "kube-api-access-nztnk") pod "51ebfb45-0683-4095-9ce4-1224edf571f3" (UID: "51ebfb45-0683-4095-9ce4-1224edf571f3"). InnerVolumeSpecName "kube-api-access-nztnk". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 16:49:17 crc kubenswrapper[4799]: I1010 16:49:17.256353 4799 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/51ebfb45-0683-4095-9ce4-1224edf571f3-config\") on node \"crc\" DevicePath \"\"" Oct 10 16:49:17 crc kubenswrapper[4799]: I1010 16:49:17.256656 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nztnk\" (UniqueName: \"kubernetes.io/projected/51ebfb45-0683-4095-9ce4-1224edf571f3-kube-api-access-nztnk\") on node \"crc\" DevicePath \"\"" Oct 10 16:49:17 crc kubenswrapper[4799]: I1010 16:49:17.256668 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dhdrk\" (UniqueName: \"kubernetes.io/projected/d31fd219-0ef3-47e3-9106-9546b0829c53-kube-api-access-dhdrk\") on node \"crc\" DevicePath \"\"" Oct 10 16:49:17 crc kubenswrapper[4799]: I1010 16:49:17.256679 4799 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d31fd219-0ef3-47e3-9106-9546b0829c53-config\") on node \"crc\" DevicePath \"\"" Oct 10 16:49:17 crc kubenswrapper[4799]: I1010 16:49:17.256687 4799 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d31fd219-0ef3-47e3-9106-9546b0829c53-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 10 16:49:17 crc kubenswrapper[4799]: I1010 16:49:17.420475 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"2a28d2a6-5dfc-414b-9eed-2f412cfc7063","Type":"ContainerStarted","Data":"0c1e78ba8474ec6005a2d9c9475e87357ba32aba07afc86bc8dd92174c72576e"} Oct 10 16:49:17 crc kubenswrapper[4799]: I1010 16:49:17.422366 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-758b79db4c-wzjph" event={"ID":"d31fd219-0ef3-47e3-9106-9546b0829c53","Type":"ContainerDied","Data":"cc52994ad214f36320d6ac84b508183d2d5174f09eab2830575d8f1f30a6d95a"} Oct 10 16:49:17 crc kubenswrapper[4799]: I1010 16:49:17.422384 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-758b79db4c-wzjph" Oct 10 16:49:17 crc kubenswrapper[4799]: I1010 16:49:17.426388 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7bfcb9d745-x7l2v" Oct 10 16:49:17 crc kubenswrapper[4799]: I1010 16:49:17.426412 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7bfcb9d745-x7l2v" event={"ID":"51ebfb45-0683-4095-9ce4-1224edf571f3","Type":"ContainerDied","Data":"3add22576b5449e37798a2b755c666bbb81322f70e190bcc062cc4912061da9a"} Oct 10 16:49:17 crc kubenswrapper[4799]: I1010 16:49:17.437182 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Oct 10 16:49:17 crc kubenswrapper[4799]: I1010 16:49:17.438452 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"985d1485-7054-475b-8e60-85db5dc5afa3","Type":"ContainerStarted","Data":"84483c379ac0b0a6a31e4b9cb3e92368a20abf5e0b33eaffc729e36b4500d232"} Oct 10 16:49:17 crc kubenswrapper[4799]: I1010 16:49:17.444447 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"fe3f0980-0eb7-4267-953a-3fcfa08a22b3","Type":"ContainerStarted","Data":"5eaca428f78ae2e2d16bc7508311a4fa042608c0c33fe3fc2756c0df348f7aa2"} Oct 10 16:49:17 crc kubenswrapper[4799]: I1010 16:49:17.486174 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-ovs-dtplc"] Oct 10 16:49:17 crc kubenswrapper[4799]: I1010 16:49:17.523718 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-s8gsd"] Oct 10 16:49:17 crc kubenswrapper[4799]: I1010 16:49:17.570165 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7bfcb9d745-x7l2v"] Oct 10 16:49:17 crc kubenswrapper[4799]: I1010 16:49:17.576578 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-7bfcb9d745-x7l2v"] Oct 10 16:49:17 crc kubenswrapper[4799]: I1010 16:49:17.597660 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-758b79db4c-wzjph"] Oct 10 16:49:17 crc kubenswrapper[4799]: I1010 16:49:17.604067 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-758b79db4c-wzjph"] Oct 10 16:49:17 crc kubenswrapper[4799]: I1010 16:49:17.630987 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-0"] Oct 10 16:49:17 crc kubenswrapper[4799]: W1010 16:49:17.787432 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod68f5ed12_8abe_46e0_a60a_086d13b7f038.slice/crio-e84c8665d27eab7d53d5aa48205d379735b8c243dfc311437100e8672795f50e WatchSource:0}: Error finding container e84c8665d27eab7d53d5aa48205d379735b8c243dfc311437100e8672795f50e: Status 404 returned error can't find the container with id e84c8665d27eab7d53d5aa48205d379735b8c243dfc311437100e8672795f50e Oct 10 16:49:18 crc kubenswrapper[4799]: I1010 16:49:18.318480 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-0"] Oct 10 16:49:18 crc kubenswrapper[4799]: I1010 16:49:18.453023 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"9fd6f03f-abea-4c29-8060-0705bb0af2c7","Type":"ContainerStarted","Data":"adfdbb90972668f2d71dad450618269e6685fe2f84e1846228c2c17d1cd7c04c"} Oct 10 16:49:18 crc kubenswrapper[4799]: I1010 16:49:18.456318 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-s8gsd" event={"ID":"038759ba-4122-4104-8699-81c76590eb2b","Type":"ContainerStarted","Data":"98d66bf6f4ca25f2a01f7ec13370c35dda5b7a842e7cb12577c912dd8a2b9b48"} Oct 10 16:49:18 crc kubenswrapper[4799]: I1010 16:49:18.459532 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"68f5ed12-8abe-46e0-a60a-086d13b7f038","Type":"ContainerStarted","Data":"e84c8665d27eab7d53d5aa48205d379735b8c243dfc311437100e8672795f50e"} Oct 10 16:49:18 crc kubenswrapper[4799]: I1010 16:49:18.461127 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"55f598f2-d46d-4810-9b39-315e6d90221a","Type":"ContainerStarted","Data":"8d23b1caa185848314227f8c56ea77f53784e096cee997d6152c271a1935c0ed"} Oct 10 16:49:18 crc kubenswrapper[4799]: I1010 16:49:18.462904 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"fe3f0980-0eb7-4267-953a-3fcfa08a22b3","Type":"ContainerStarted","Data":"ea589f2e4da954eafaee6ac906e5b45cec97bf54ed54d2037ffdc855f4a6f323"} Oct 10 16:49:18 crc kubenswrapper[4799]: I1010 16:49:18.467417 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"7858ee88-c7b9-4fb7-b825-569154134201","Type":"ContainerStarted","Data":"530054c73abfb931af9932880c554d60aaa19e406d6b80e4c78cfa9e40a7c9a7"} Oct 10 16:49:18 crc kubenswrapper[4799]: I1010 16:49:18.469913 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-dtplc" event={"ID":"e0ab4194-18b4-4c6d-96b2-d7a4a85e17d6","Type":"ContainerStarted","Data":"2ac6e83071a616986284e4825a2b05c663588602a39cf3bd68ac0ca47c7c0741"} Oct 10 16:49:18 crc kubenswrapper[4799]: W1010 16:49:18.803392 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod37642fb0_1d93_4e14_a176_fea38410097f.slice/crio-b23b73380c8a03acfdfd5e7a3be56d0bd43f81bcef1d051586f929206650c736 WatchSource:0}: Error finding container b23b73380c8a03acfdfd5e7a3be56d0bd43f81bcef1d051586f929206650c736: Status 404 returned error can't find the container with id b23b73380c8a03acfdfd5e7a3be56d0bd43f81bcef1d051586f929206650c736 Oct 10 16:49:19 crc kubenswrapper[4799]: I1010 16:49:19.412324 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="51ebfb45-0683-4095-9ce4-1224edf571f3" path="/var/lib/kubelet/pods/51ebfb45-0683-4095-9ce4-1224edf571f3/volumes" Oct 10 16:49:19 crc kubenswrapper[4799]: I1010 16:49:19.413508 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d31fd219-0ef3-47e3-9106-9546b0829c53" path="/var/lib/kubelet/pods/d31fd219-0ef3-47e3-9106-9546b0829c53/volumes" Oct 10 16:49:19 crc kubenswrapper[4799]: I1010 16:49:19.476551 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"37642fb0-1d93-4e14-a176-fea38410097f","Type":"ContainerStarted","Data":"b23b73380c8a03acfdfd5e7a3be56d0bd43f81bcef1d051586f929206650c736"} Oct 10 16:49:21 crc kubenswrapper[4799]: I1010 16:49:21.497679 4799 generic.go:334] "Generic (PLEG): container finished" podID="fe3f0980-0eb7-4267-953a-3fcfa08a22b3" containerID="ea589f2e4da954eafaee6ac906e5b45cec97bf54ed54d2037ffdc855f4a6f323" exitCode=0 Oct 10 16:49:21 crc kubenswrapper[4799]: I1010 16:49:21.497814 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"fe3f0980-0eb7-4267-953a-3fcfa08a22b3","Type":"ContainerDied","Data":"ea589f2e4da954eafaee6ac906e5b45cec97bf54ed54d2037ffdc855f4a6f323"} Oct 10 16:49:21 crc kubenswrapper[4799]: I1010 16:49:21.500576 4799 generic.go:334] "Generic (PLEG): container finished" podID="2a28d2a6-5dfc-414b-9eed-2f412cfc7063" containerID="0c1e78ba8474ec6005a2d9c9475e87357ba32aba07afc86bc8dd92174c72576e" exitCode=0 Oct 10 16:49:21 crc kubenswrapper[4799]: I1010 16:49:21.500748 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"2a28d2a6-5dfc-414b-9eed-2f412cfc7063","Type":"ContainerDied","Data":"0c1e78ba8474ec6005a2d9c9475e87357ba32aba07afc86bc8dd92174c72576e"} Oct 10 16:49:23 crc kubenswrapper[4799]: I1010 16:49:23.523719 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"37642fb0-1d93-4e14-a176-fea38410097f","Type":"ContainerStarted","Data":"78ebb853c6f1f78b3bad7b9528e8955d736b8b1c888e5a761ed264cd5256d2c8"} Oct 10 16:49:23 crc kubenswrapper[4799]: I1010 16:49:23.525613 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"2a28d2a6-5dfc-414b-9eed-2f412cfc7063","Type":"ContainerStarted","Data":"2252cc52e03872ad264363f4a4f2c8970a681e759e68c07793c60a2df2a41d55"} Oct 10 16:49:23 crc kubenswrapper[4799]: I1010 16:49:23.529609 4799 generic.go:334] "Generic (PLEG): container finished" podID="e0ab4194-18b4-4c6d-96b2-d7a4a85e17d6" containerID="65d844d41d22e8a89359b0ab4a69c944a58fa895f58a1f8c5e6ad153c601d704" exitCode=0 Oct 10 16:49:23 crc kubenswrapper[4799]: I1010 16:49:23.529676 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-dtplc" event={"ID":"e0ab4194-18b4-4c6d-96b2-d7a4a85e17d6","Type":"ContainerDied","Data":"65d844d41d22e8a89359b0ab4a69c944a58fa895f58a1f8c5e6ad153c601d704"} Oct 10 16:49:23 crc kubenswrapper[4799]: I1010 16:49:23.534231 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"68f5ed12-8abe-46e0-a60a-086d13b7f038","Type":"ContainerStarted","Data":"9a442c2442efda9014b2e49c109e3fd6db0be3a601326fff77372e592aa5bef9"} Oct 10 16:49:23 crc kubenswrapper[4799]: I1010 16:49:23.537937 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-s8gsd" event={"ID":"038759ba-4122-4104-8699-81c76590eb2b","Type":"ContainerStarted","Data":"b3235f6df91cdc5e70f1254705cb25138102f64441cbd1220edb479a98f3de0a"} Oct 10 16:49:23 crc kubenswrapper[4799]: I1010 16:49:23.538550 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-s8gsd" Oct 10 16:49:23 crc kubenswrapper[4799]: I1010 16:49:23.540444 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"55f598f2-d46d-4810-9b39-315e6d90221a","Type":"ContainerStarted","Data":"7eb5912860b51cfc7a126e352bd00277971d2df409a2d5e241478f27fcf39ec0"} Oct 10 16:49:23 crc kubenswrapper[4799]: I1010 16:49:23.540817 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/kube-state-metrics-0" Oct 10 16:49:23 crc kubenswrapper[4799]: I1010 16:49:23.543275 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"985d1485-7054-475b-8e60-85db5dc5afa3","Type":"ContainerStarted","Data":"d7c80cad377e5693f3e27682ebd24a34de4bbf7f43f72423036babd6bf753968"} Oct 10 16:49:23 crc kubenswrapper[4799]: I1010 16:49:23.543476 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/memcached-0" Oct 10 16:49:23 crc kubenswrapper[4799]: I1010 16:49:23.551218 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"fe3f0980-0eb7-4267-953a-3fcfa08a22b3","Type":"ContainerStarted","Data":"fad55af2475f84915150c67ad522384e34d6b8041a129873b036bf99434ad863"} Oct 10 16:49:23 crc kubenswrapper[4799]: I1010 16:49:23.597833 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstack-cell1-galera-0" podStartSLOduration=13.277982781 podStartE2EDuration="25.597814056s" podCreationTimestamp="2025-10-10 16:48:58 +0000 UTC" firstStartedPulling="2025-10-10 16:49:04.691722501 +0000 UTC m=+1038.200046656" lastFinishedPulling="2025-10-10 16:49:17.011553806 +0000 UTC m=+1050.519877931" observedRunningTime="2025-10-10 16:49:23.550338413 +0000 UTC m=+1057.058662538" watchObservedRunningTime="2025-10-10 16:49:23.597814056 +0000 UTC m=+1057.106138171" Oct 10 16:49:23 crc kubenswrapper[4799]: I1010 16:49:23.617728 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/memcached-0" podStartSLOduration=22.532038521 podStartE2EDuration="24.617707497s" podCreationTimestamp="2025-10-10 16:48:59 +0000 UTC" firstStartedPulling="2025-10-10 16:49:16.941571277 +0000 UTC m=+1050.449895402" lastFinishedPulling="2025-10-10 16:49:19.027240263 +0000 UTC m=+1052.535564378" observedRunningTime="2025-10-10 16:49:23.599432206 +0000 UTC m=+1057.107756321" watchObservedRunningTime="2025-10-10 16:49:23.617707497 +0000 UTC m=+1057.126031612" Oct 10 16:49:23 crc kubenswrapper[4799]: I1010 16:49:23.619510 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/kube-state-metrics-0" podStartSLOduration=17.598749709 podStartE2EDuration="22.619501681s" podCreationTimestamp="2025-10-10 16:49:01 +0000 UTC" firstStartedPulling="2025-10-10 16:49:17.537412984 +0000 UTC m=+1051.045737099" lastFinishedPulling="2025-10-10 16:49:22.558164946 +0000 UTC m=+1056.066489071" observedRunningTime="2025-10-10 16:49:23.614670932 +0000 UTC m=+1057.122995047" watchObservedRunningTime="2025-10-10 16:49:23.619501681 +0000 UTC m=+1057.127825796" Oct 10 16:49:23 crc kubenswrapper[4799]: I1010 16:49:23.640505 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-s8gsd" podStartSLOduration=14.781518624 podStartE2EDuration="19.64048928s" podCreationTimestamp="2025-10-10 16:49:04 +0000 UTC" firstStartedPulling="2025-10-10 16:49:17.548029376 +0000 UTC m=+1051.056353491" lastFinishedPulling="2025-10-10 16:49:22.407000042 +0000 UTC m=+1055.915324147" observedRunningTime="2025-10-10 16:49:23.632919353 +0000 UTC m=+1057.141243478" watchObservedRunningTime="2025-10-10 16:49:23.64048928 +0000 UTC m=+1057.148813395" Oct 10 16:49:24 crc kubenswrapper[4799]: I1010 16:49:24.422726 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstack-galera-0" podStartSLOduration=26.965473096 podStartE2EDuration="27.42270843s" podCreationTimestamp="2025-10-10 16:48:57 +0000 UTC" firstStartedPulling="2025-10-10 16:49:16.892742521 +0000 UTC m=+1050.401066686" lastFinishedPulling="2025-10-10 16:49:17.349977905 +0000 UTC m=+1050.858302020" observedRunningTime="2025-10-10 16:49:23.673560017 +0000 UTC m=+1057.181884122" watchObservedRunningTime="2025-10-10 16:49:24.42270843 +0000 UTC m=+1057.931032545" Oct 10 16:49:24 crc kubenswrapper[4799]: I1010 16:49:24.562975 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-dtplc" event={"ID":"e0ab4194-18b4-4c6d-96b2-d7a4a85e17d6","Type":"ContainerStarted","Data":"387afba31b4e67a9ba9f7f2877d3f3af184a7c60b3843119336ceb5759893e62"} Oct 10 16:49:24 crc kubenswrapper[4799]: I1010 16:49:24.563055 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-dtplc" event={"ID":"e0ab4194-18b4-4c6d-96b2-d7a4a85e17d6","Type":"ContainerStarted","Data":"24f52eab75b89d5e7b9cf09d4b2c644e6fdfdfe3ddfc83b09a9363aa7efda1d1"} Oct 10 16:49:24 crc kubenswrapper[4799]: I1010 16:49:24.563417 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-ovs-dtplc" Oct 10 16:49:24 crc kubenswrapper[4799]: I1010 16:49:24.563466 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-ovs-dtplc" Oct 10 16:49:24 crc kubenswrapper[4799]: I1010 16:49:24.588250 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-ovs-dtplc" podStartSLOduration=15.728998126 podStartE2EDuration="20.588230808s" podCreationTimestamp="2025-10-10 16:49:04 +0000 UTC" firstStartedPulling="2025-10-10 16:49:17.538704916 +0000 UTC m=+1051.047029031" lastFinishedPulling="2025-10-10 16:49:22.397937598 +0000 UTC m=+1055.906261713" observedRunningTime="2025-10-10 16:49:24.585452729 +0000 UTC m=+1058.093776864" watchObservedRunningTime="2025-10-10 16:49:24.588230808 +0000 UTC m=+1058.096554923" Oct 10 16:49:26 crc kubenswrapper[4799]: I1010 16:49:26.578464 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"37642fb0-1d93-4e14-a176-fea38410097f","Type":"ContainerStarted","Data":"f05670d9c23e36e24162d124b779c8a5b0a8aa589baac01d94d0aecd8b1875ef"} Oct 10 16:49:26 crc kubenswrapper[4799]: I1010 16:49:26.589598 4799 generic.go:334] "Generic (PLEG): container finished" podID="029ee4ad-013d-47f2-9590-ca0a0d85d51d" containerID="70fbe4e9e45ade1b80f1b373d107742d184e1ad80eef9999f36f3695a64f6dbb" exitCode=0 Oct 10 16:49:26 crc kubenswrapper[4799]: I1010 16:49:26.590044 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-77597f887-p8x7f" event={"ID":"029ee4ad-013d-47f2-9590-ca0a0d85d51d","Type":"ContainerDied","Data":"70fbe4e9e45ade1b80f1b373d107742d184e1ad80eef9999f36f3695a64f6dbb"} Oct 10 16:49:26 crc kubenswrapper[4799]: I1010 16:49:26.592504 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"68f5ed12-8abe-46e0-a60a-086d13b7f038","Type":"ContainerStarted","Data":"1626175edad35c9d874dcd747ee3e25c9a1d3785dcb4a248d8af85626d8ee7f2"} Oct 10 16:49:26 crc kubenswrapper[4799]: I1010 16:49:26.610500 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-nb-0" podStartSLOduration=15.591901501 podStartE2EDuration="22.610470598s" podCreationTimestamp="2025-10-10 16:49:04 +0000 UTC" firstStartedPulling="2025-10-10 16:49:18.805569818 +0000 UTC m=+1052.313893933" lastFinishedPulling="2025-10-10 16:49:25.824138875 +0000 UTC m=+1059.332463030" observedRunningTime="2025-10-10 16:49:26.607793142 +0000 UTC m=+1060.116117267" watchObservedRunningTime="2025-10-10 16:49:26.610470598 +0000 UTC m=+1060.118794723" Oct 10 16:49:26 crc kubenswrapper[4799]: I1010 16:49:26.632273 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-nb-0" Oct 10 16:49:26 crc kubenswrapper[4799]: I1010 16:49:26.648460 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-sb-0" podStartSLOduration=12.578570701 podStartE2EDuration="20.648436146s" podCreationTimestamp="2025-10-10 16:49:06 +0000 UTC" firstStartedPulling="2025-10-10 16:49:17.790978077 +0000 UTC m=+1051.299302192" lastFinishedPulling="2025-10-10 16:49:25.860843522 +0000 UTC m=+1059.369167637" observedRunningTime="2025-10-10 16:49:26.640749116 +0000 UTC m=+1060.149073261" watchObservedRunningTime="2025-10-10 16:49:26.648436146 +0000 UTC m=+1060.156760271" Oct 10 16:49:26 crc kubenswrapper[4799]: I1010 16:49:26.688006 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-nb-0" Oct 10 16:49:27 crc kubenswrapper[4799]: I1010 16:49:27.607196 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-77597f887-p8x7f" event={"ID":"029ee4ad-013d-47f2-9590-ca0a0d85d51d","Type":"ContainerStarted","Data":"22653454922cdecce643b707c9d9d8ae4eb73824c19893a802a888d7026ab942"} Oct 10 16:49:27 crc kubenswrapper[4799]: I1010 16:49:27.607663 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-nb-0" Oct 10 16:49:27 crc kubenswrapper[4799]: I1010 16:49:27.608183 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-77597f887-p8x7f" Oct 10 16:49:27 crc kubenswrapper[4799]: I1010 16:49:27.637219 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-77597f887-p8x7f" podStartSLOduration=3.001832133 podStartE2EDuration="32.637178057s" podCreationTimestamp="2025-10-10 16:48:55 +0000 UTC" firstStartedPulling="2025-10-10 16:48:56.226906133 +0000 UTC m=+1029.735230248" lastFinishedPulling="2025-10-10 16:49:25.862252057 +0000 UTC m=+1059.370576172" observedRunningTime="2025-10-10 16:49:27.629261221 +0000 UTC m=+1061.137585346" watchObservedRunningTime="2025-10-10 16:49:27.637178057 +0000 UTC m=+1061.145502172" Oct 10 16:49:27 crc kubenswrapper[4799]: I1010 16:49:27.653246 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-nb-0" Oct 10 16:49:27 crc kubenswrapper[4799]: I1010 16:49:27.979703 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-644597f84c-zx44x"] Oct 10 16:49:28 crc kubenswrapper[4799]: I1010 16:49:28.052488 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-f6b595d95-jhcqh"] Oct 10 16:49:28 crc kubenswrapper[4799]: I1010 16:49:28.053903 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-f6b595d95-jhcqh" Oct 10 16:49:28 crc kubenswrapper[4799]: I1010 16:49:28.058010 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovsdbserver-nb" Oct 10 16:49:28 crc kubenswrapper[4799]: I1010 16:49:28.086567 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-metrics-rlrvx"] Oct 10 16:49:28 crc kubenswrapper[4799]: I1010 16:49:28.087927 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-rlrvx" Oct 10 16:49:28 crc kubenswrapper[4799]: I1010 16:49:28.091286 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-metrics-config" Oct 10 16:49:28 crc kubenswrapper[4799]: I1010 16:49:28.094884 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-f6b595d95-jhcqh"] Oct 10 16:49:28 crc kubenswrapper[4799]: I1010 16:49:28.104669 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-metrics-rlrvx"] Oct 10 16:49:28 crc kubenswrapper[4799]: I1010 16:49:28.148980 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/361acf90-3fb7-4266-b2ff-20e514aa5719-config\") pod \"dnsmasq-dns-f6b595d95-jhcqh\" (UID: \"361acf90-3fb7-4266-b2ff-20e514aa5719\") " pod="openstack/dnsmasq-dns-f6b595d95-jhcqh" Oct 10 16:49:28 crc kubenswrapper[4799]: I1010 16:49:28.149288 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/8f591cb1-902a-406e-b93a-56c2b7ec9cb8-ovs-rundir\") pod \"ovn-controller-metrics-rlrvx\" (UID: \"8f591cb1-902a-406e-b93a-56c2b7ec9cb8\") " pod="openstack/ovn-controller-metrics-rlrvx" Oct 10 16:49:28 crc kubenswrapper[4799]: I1010 16:49:28.149418 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/8f591cb1-902a-406e-b93a-56c2b7ec9cb8-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-rlrvx\" (UID: \"8f591cb1-902a-406e-b93a-56c2b7ec9cb8\") " pod="openstack/ovn-controller-metrics-rlrvx" Oct 10 16:49:28 crc kubenswrapper[4799]: I1010 16:49:28.149550 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jstrj\" (UniqueName: \"kubernetes.io/projected/361acf90-3fb7-4266-b2ff-20e514aa5719-kube-api-access-jstrj\") pod \"dnsmasq-dns-f6b595d95-jhcqh\" (UID: \"361acf90-3fb7-4266-b2ff-20e514aa5719\") " pod="openstack/dnsmasq-dns-f6b595d95-jhcqh" Oct 10 16:49:28 crc kubenswrapper[4799]: I1010 16:49:28.149683 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/361acf90-3fb7-4266-b2ff-20e514aa5719-dns-svc\") pod \"dnsmasq-dns-f6b595d95-jhcqh\" (UID: \"361acf90-3fb7-4266-b2ff-20e514aa5719\") " pod="openstack/dnsmasq-dns-f6b595d95-jhcqh" Oct 10 16:49:28 crc kubenswrapper[4799]: I1010 16:49:28.149829 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8f591cb1-902a-406e-b93a-56c2b7ec9cb8-combined-ca-bundle\") pod \"ovn-controller-metrics-rlrvx\" (UID: \"8f591cb1-902a-406e-b93a-56c2b7ec9cb8\") " pod="openstack/ovn-controller-metrics-rlrvx" Oct 10 16:49:28 crc kubenswrapper[4799]: I1010 16:49:28.149972 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8f591cb1-902a-406e-b93a-56c2b7ec9cb8-config\") pod \"ovn-controller-metrics-rlrvx\" (UID: \"8f591cb1-902a-406e-b93a-56c2b7ec9cb8\") " pod="openstack/ovn-controller-metrics-rlrvx" Oct 10 16:49:28 crc kubenswrapper[4799]: I1010 16:49:28.150083 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5zcgm\" (UniqueName: \"kubernetes.io/projected/8f591cb1-902a-406e-b93a-56c2b7ec9cb8-kube-api-access-5zcgm\") pod \"ovn-controller-metrics-rlrvx\" (UID: \"8f591cb1-902a-406e-b93a-56c2b7ec9cb8\") " pod="openstack/ovn-controller-metrics-rlrvx" Oct 10 16:49:28 crc kubenswrapper[4799]: I1010 16:49:28.150233 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/8f591cb1-902a-406e-b93a-56c2b7ec9cb8-ovn-rundir\") pod \"ovn-controller-metrics-rlrvx\" (UID: \"8f591cb1-902a-406e-b93a-56c2b7ec9cb8\") " pod="openstack/ovn-controller-metrics-rlrvx" Oct 10 16:49:28 crc kubenswrapper[4799]: I1010 16:49:28.150402 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/361acf90-3fb7-4266-b2ff-20e514aa5719-ovsdbserver-nb\") pod \"dnsmasq-dns-f6b595d95-jhcqh\" (UID: \"361acf90-3fb7-4266-b2ff-20e514aa5719\") " pod="openstack/dnsmasq-dns-f6b595d95-jhcqh" Oct 10 16:49:28 crc kubenswrapper[4799]: I1010 16:49:28.165596 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-sb-0" Oct 10 16:49:28 crc kubenswrapper[4799]: I1010 16:49:28.219932 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-77597f887-p8x7f"] Oct 10 16:49:28 crc kubenswrapper[4799]: I1010 16:49:28.250326 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-dc9d58d7-n7n2p"] Oct 10 16:49:28 crc kubenswrapper[4799]: I1010 16:49:28.251514 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-dc9d58d7-n7n2p" Oct 10 16:49:28 crc kubenswrapper[4799]: I1010 16:49:28.252221 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/361acf90-3fb7-4266-b2ff-20e514aa5719-ovsdbserver-nb\") pod \"dnsmasq-dns-f6b595d95-jhcqh\" (UID: \"361acf90-3fb7-4266-b2ff-20e514aa5719\") " pod="openstack/dnsmasq-dns-f6b595d95-jhcqh" Oct 10 16:49:28 crc kubenswrapper[4799]: I1010 16:49:28.252285 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/361acf90-3fb7-4266-b2ff-20e514aa5719-config\") pod \"dnsmasq-dns-f6b595d95-jhcqh\" (UID: \"361acf90-3fb7-4266-b2ff-20e514aa5719\") " pod="openstack/dnsmasq-dns-f6b595d95-jhcqh" Oct 10 16:49:28 crc kubenswrapper[4799]: I1010 16:49:28.252324 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/8f591cb1-902a-406e-b93a-56c2b7ec9cb8-ovs-rundir\") pod \"ovn-controller-metrics-rlrvx\" (UID: \"8f591cb1-902a-406e-b93a-56c2b7ec9cb8\") " pod="openstack/ovn-controller-metrics-rlrvx" Oct 10 16:49:28 crc kubenswrapper[4799]: I1010 16:49:28.252348 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/8f591cb1-902a-406e-b93a-56c2b7ec9cb8-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-rlrvx\" (UID: \"8f591cb1-902a-406e-b93a-56c2b7ec9cb8\") " pod="openstack/ovn-controller-metrics-rlrvx" Oct 10 16:49:28 crc kubenswrapper[4799]: I1010 16:49:28.252372 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jstrj\" (UniqueName: \"kubernetes.io/projected/361acf90-3fb7-4266-b2ff-20e514aa5719-kube-api-access-jstrj\") pod \"dnsmasq-dns-f6b595d95-jhcqh\" (UID: \"361acf90-3fb7-4266-b2ff-20e514aa5719\") " pod="openstack/dnsmasq-dns-f6b595d95-jhcqh" Oct 10 16:49:28 crc kubenswrapper[4799]: I1010 16:49:28.252395 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/361acf90-3fb7-4266-b2ff-20e514aa5719-dns-svc\") pod \"dnsmasq-dns-f6b595d95-jhcqh\" (UID: \"361acf90-3fb7-4266-b2ff-20e514aa5719\") " pod="openstack/dnsmasq-dns-f6b595d95-jhcqh" Oct 10 16:49:28 crc kubenswrapper[4799]: I1010 16:49:28.252416 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8f591cb1-902a-406e-b93a-56c2b7ec9cb8-combined-ca-bundle\") pod \"ovn-controller-metrics-rlrvx\" (UID: \"8f591cb1-902a-406e-b93a-56c2b7ec9cb8\") " pod="openstack/ovn-controller-metrics-rlrvx" Oct 10 16:49:28 crc kubenswrapper[4799]: I1010 16:49:28.252446 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8f591cb1-902a-406e-b93a-56c2b7ec9cb8-config\") pod \"ovn-controller-metrics-rlrvx\" (UID: \"8f591cb1-902a-406e-b93a-56c2b7ec9cb8\") " pod="openstack/ovn-controller-metrics-rlrvx" Oct 10 16:49:28 crc kubenswrapper[4799]: I1010 16:49:28.252463 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5zcgm\" (UniqueName: \"kubernetes.io/projected/8f591cb1-902a-406e-b93a-56c2b7ec9cb8-kube-api-access-5zcgm\") pod \"ovn-controller-metrics-rlrvx\" (UID: \"8f591cb1-902a-406e-b93a-56c2b7ec9cb8\") " pod="openstack/ovn-controller-metrics-rlrvx" Oct 10 16:49:28 crc kubenswrapper[4799]: I1010 16:49:28.252491 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/8f591cb1-902a-406e-b93a-56c2b7ec9cb8-ovn-rundir\") pod \"ovn-controller-metrics-rlrvx\" (UID: \"8f591cb1-902a-406e-b93a-56c2b7ec9cb8\") " pod="openstack/ovn-controller-metrics-rlrvx" Oct 10 16:49:28 crc kubenswrapper[4799]: I1010 16:49:28.253114 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/8f591cb1-902a-406e-b93a-56c2b7ec9cb8-ovn-rundir\") pod \"ovn-controller-metrics-rlrvx\" (UID: \"8f591cb1-902a-406e-b93a-56c2b7ec9cb8\") " pod="openstack/ovn-controller-metrics-rlrvx" Oct 10 16:49:28 crc kubenswrapper[4799]: I1010 16:49:28.253176 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/8f591cb1-902a-406e-b93a-56c2b7ec9cb8-ovs-rundir\") pod \"ovn-controller-metrics-rlrvx\" (UID: \"8f591cb1-902a-406e-b93a-56c2b7ec9cb8\") " pod="openstack/ovn-controller-metrics-rlrvx" Oct 10 16:49:28 crc kubenswrapper[4799]: I1010 16:49:28.253368 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/361acf90-3fb7-4266-b2ff-20e514aa5719-config\") pod \"dnsmasq-dns-f6b595d95-jhcqh\" (UID: \"361acf90-3fb7-4266-b2ff-20e514aa5719\") " pod="openstack/dnsmasq-dns-f6b595d95-jhcqh" Oct 10 16:49:28 crc kubenswrapper[4799]: I1010 16:49:28.253824 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/361acf90-3fb7-4266-b2ff-20e514aa5719-dns-svc\") pod \"dnsmasq-dns-f6b595d95-jhcqh\" (UID: \"361acf90-3fb7-4266-b2ff-20e514aa5719\") " pod="openstack/dnsmasq-dns-f6b595d95-jhcqh" Oct 10 16:49:28 crc kubenswrapper[4799]: I1010 16:49:28.253824 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/361acf90-3fb7-4266-b2ff-20e514aa5719-ovsdbserver-nb\") pod \"dnsmasq-dns-f6b595d95-jhcqh\" (UID: \"361acf90-3fb7-4266-b2ff-20e514aa5719\") " pod="openstack/dnsmasq-dns-f6b595d95-jhcqh" Oct 10 16:49:28 crc kubenswrapper[4799]: I1010 16:49:28.254012 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8f591cb1-902a-406e-b93a-56c2b7ec9cb8-config\") pod \"ovn-controller-metrics-rlrvx\" (UID: \"8f591cb1-902a-406e-b93a-56c2b7ec9cb8\") " pod="openstack/ovn-controller-metrics-rlrvx" Oct 10 16:49:28 crc kubenswrapper[4799]: I1010 16:49:28.254611 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovsdbserver-sb" Oct 10 16:49:28 crc kubenswrapper[4799]: I1010 16:49:28.266998 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8f591cb1-902a-406e-b93a-56c2b7ec9cb8-combined-ca-bundle\") pod \"ovn-controller-metrics-rlrvx\" (UID: \"8f591cb1-902a-406e-b93a-56c2b7ec9cb8\") " pod="openstack/ovn-controller-metrics-rlrvx" Oct 10 16:49:28 crc kubenswrapper[4799]: I1010 16:49:28.269432 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/8f591cb1-902a-406e-b93a-56c2b7ec9cb8-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-rlrvx\" (UID: \"8f591cb1-902a-406e-b93a-56c2b7ec9cb8\") " pod="openstack/ovn-controller-metrics-rlrvx" Oct 10 16:49:28 crc kubenswrapper[4799]: I1010 16:49:28.269588 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5zcgm\" (UniqueName: \"kubernetes.io/projected/8f591cb1-902a-406e-b93a-56c2b7ec9cb8-kube-api-access-5zcgm\") pod \"ovn-controller-metrics-rlrvx\" (UID: \"8f591cb1-902a-406e-b93a-56c2b7ec9cb8\") " pod="openstack/ovn-controller-metrics-rlrvx" Oct 10 16:49:28 crc kubenswrapper[4799]: I1010 16:49:28.271450 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jstrj\" (UniqueName: \"kubernetes.io/projected/361acf90-3fb7-4266-b2ff-20e514aa5719-kube-api-access-jstrj\") pod \"dnsmasq-dns-f6b595d95-jhcqh\" (UID: \"361acf90-3fb7-4266-b2ff-20e514aa5719\") " pod="openstack/dnsmasq-dns-f6b595d95-jhcqh" Oct 10 16:49:28 crc kubenswrapper[4799]: I1010 16:49:28.282807 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-dc9d58d7-n7n2p"] Oct 10 16:49:28 crc kubenswrapper[4799]: I1010 16:49:28.353863 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f8634816-4d14-4ca6-87b4-a99d49fba1f2-ovsdbserver-nb\") pod \"dnsmasq-dns-dc9d58d7-n7n2p\" (UID: \"f8634816-4d14-4ca6-87b4-a99d49fba1f2\") " pod="openstack/dnsmasq-dns-dc9d58d7-n7n2p" Oct 10 16:49:28 crc kubenswrapper[4799]: I1010 16:49:28.354250 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f8634816-4d14-4ca6-87b4-a99d49fba1f2-dns-svc\") pod \"dnsmasq-dns-dc9d58d7-n7n2p\" (UID: \"f8634816-4d14-4ca6-87b4-a99d49fba1f2\") " pod="openstack/dnsmasq-dns-dc9d58d7-n7n2p" Oct 10 16:49:28 crc kubenswrapper[4799]: I1010 16:49:28.354289 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n6rm6\" (UniqueName: \"kubernetes.io/projected/f8634816-4d14-4ca6-87b4-a99d49fba1f2-kube-api-access-n6rm6\") pod \"dnsmasq-dns-dc9d58d7-n7n2p\" (UID: \"f8634816-4d14-4ca6-87b4-a99d49fba1f2\") " pod="openstack/dnsmasq-dns-dc9d58d7-n7n2p" Oct 10 16:49:28 crc kubenswrapper[4799]: I1010 16:49:28.354330 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f8634816-4d14-4ca6-87b4-a99d49fba1f2-config\") pod \"dnsmasq-dns-dc9d58d7-n7n2p\" (UID: \"f8634816-4d14-4ca6-87b4-a99d49fba1f2\") " pod="openstack/dnsmasq-dns-dc9d58d7-n7n2p" Oct 10 16:49:28 crc kubenswrapper[4799]: I1010 16:49:28.354361 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f8634816-4d14-4ca6-87b4-a99d49fba1f2-ovsdbserver-sb\") pod \"dnsmasq-dns-dc9d58d7-n7n2p\" (UID: \"f8634816-4d14-4ca6-87b4-a99d49fba1f2\") " pod="openstack/dnsmasq-dns-dc9d58d7-n7n2p" Oct 10 16:49:28 crc kubenswrapper[4799]: I1010 16:49:28.376825 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-f6b595d95-jhcqh" Oct 10 16:49:28 crc kubenswrapper[4799]: I1010 16:49:28.407116 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-rlrvx" Oct 10 16:49:28 crc kubenswrapper[4799]: I1010 16:49:28.455593 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f8634816-4d14-4ca6-87b4-a99d49fba1f2-dns-svc\") pod \"dnsmasq-dns-dc9d58d7-n7n2p\" (UID: \"f8634816-4d14-4ca6-87b4-a99d49fba1f2\") " pod="openstack/dnsmasq-dns-dc9d58d7-n7n2p" Oct 10 16:49:28 crc kubenswrapper[4799]: I1010 16:49:28.455646 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n6rm6\" (UniqueName: \"kubernetes.io/projected/f8634816-4d14-4ca6-87b4-a99d49fba1f2-kube-api-access-n6rm6\") pod \"dnsmasq-dns-dc9d58d7-n7n2p\" (UID: \"f8634816-4d14-4ca6-87b4-a99d49fba1f2\") " pod="openstack/dnsmasq-dns-dc9d58d7-n7n2p" Oct 10 16:49:28 crc kubenswrapper[4799]: I1010 16:49:28.455707 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f8634816-4d14-4ca6-87b4-a99d49fba1f2-config\") pod \"dnsmasq-dns-dc9d58d7-n7n2p\" (UID: \"f8634816-4d14-4ca6-87b4-a99d49fba1f2\") " pod="openstack/dnsmasq-dns-dc9d58d7-n7n2p" Oct 10 16:49:28 crc kubenswrapper[4799]: I1010 16:49:28.455733 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f8634816-4d14-4ca6-87b4-a99d49fba1f2-ovsdbserver-sb\") pod \"dnsmasq-dns-dc9d58d7-n7n2p\" (UID: \"f8634816-4d14-4ca6-87b4-a99d49fba1f2\") " pod="openstack/dnsmasq-dns-dc9d58d7-n7n2p" Oct 10 16:49:28 crc kubenswrapper[4799]: I1010 16:49:28.456510 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f8634816-4d14-4ca6-87b4-a99d49fba1f2-dns-svc\") pod \"dnsmasq-dns-dc9d58d7-n7n2p\" (UID: \"f8634816-4d14-4ca6-87b4-a99d49fba1f2\") " pod="openstack/dnsmasq-dns-dc9d58d7-n7n2p" Oct 10 16:49:28 crc kubenswrapper[4799]: I1010 16:49:28.456570 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f8634816-4d14-4ca6-87b4-a99d49fba1f2-config\") pod \"dnsmasq-dns-dc9d58d7-n7n2p\" (UID: \"f8634816-4d14-4ca6-87b4-a99d49fba1f2\") " pod="openstack/dnsmasq-dns-dc9d58d7-n7n2p" Oct 10 16:49:28 crc kubenswrapper[4799]: I1010 16:49:28.456691 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f8634816-4d14-4ca6-87b4-a99d49fba1f2-ovsdbserver-nb\") pod \"dnsmasq-dns-dc9d58d7-n7n2p\" (UID: \"f8634816-4d14-4ca6-87b4-a99d49fba1f2\") " pod="openstack/dnsmasq-dns-dc9d58d7-n7n2p" Oct 10 16:49:28 crc kubenswrapper[4799]: I1010 16:49:28.456946 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f8634816-4d14-4ca6-87b4-a99d49fba1f2-ovsdbserver-sb\") pod \"dnsmasq-dns-dc9d58d7-n7n2p\" (UID: \"f8634816-4d14-4ca6-87b4-a99d49fba1f2\") " pod="openstack/dnsmasq-dns-dc9d58d7-n7n2p" Oct 10 16:49:28 crc kubenswrapper[4799]: I1010 16:49:28.457797 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f8634816-4d14-4ca6-87b4-a99d49fba1f2-ovsdbserver-nb\") pod \"dnsmasq-dns-dc9d58d7-n7n2p\" (UID: \"f8634816-4d14-4ca6-87b4-a99d49fba1f2\") " pod="openstack/dnsmasq-dns-dc9d58d7-n7n2p" Oct 10 16:49:28 crc kubenswrapper[4799]: I1010 16:49:28.487840 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n6rm6\" (UniqueName: \"kubernetes.io/projected/f8634816-4d14-4ca6-87b4-a99d49fba1f2-kube-api-access-n6rm6\") pod \"dnsmasq-dns-dc9d58d7-n7n2p\" (UID: \"f8634816-4d14-4ca6-87b4-a99d49fba1f2\") " pod="openstack/dnsmasq-dns-dc9d58d7-n7n2p" Oct 10 16:49:28 crc kubenswrapper[4799]: I1010 16:49:28.616258 4799 generic.go:334] "Generic (PLEG): container finished" podID="49436e14-c65e-4c80-aaf9-189e76f5a90c" containerID="117643d0fec08823b9a6438240b047c325fc3c94f150ce84d070ef9d1364f820" exitCode=0 Oct 10 16:49:28 crc kubenswrapper[4799]: I1010 16:49:28.616650 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-644597f84c-zx44x" event={"ID":"49436e14-c65e-4c80-aaf9-189e76f5a90c","Type":"ContainerDied","Data":"117643d0fec08823b9a6438240b047c325fc3c94f150ce84d070ef9d1364f820"} Oct 10 16:49:28 crc kubenswrapper[4799]: I1010 16:49:28.639966 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-dc9d58d7-n7n2p" Oct 10 16:49:28 crc kubenswrapper[4799]: I1010 16:49:28.717997 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/openstack-galera-0" Oct 10 16:49:28 crc kubenswrapper[4799]: I1010 16:49:28.718455 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/openstack-galera-0" Oct 10 16:49:28 crc kubenswrapper[4799]: I1010 16:49:28.785979 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/openstack-galera-0" Oct 10 16:49:28 crc kubenswrapper[4799]: I1010 16:49:28.826729 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-f6b595d95-jhcqh"] Oct 10 16:49:28 crc kubenswrapper[4799]: I1010 16:49:28.917268 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-644597f84c-zx44x" Oct 10 16:49:28 crc kubenswrapper[4799]: I1010 16:49:28.923121 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-metrics-rlrvx"] Oct 10 16:49:28 crc kubenswrapper[4799]: I1010 16:49:28.969043 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/49436e14-c65e-4c80-aaf9-189e76f5a90c-dns-svc\") pod \"49436e14-c65e-4c80-aaf9-189e76f5a90c\" (UID: \"49436e14-c65e-4c80-aaf9-189e76f5a90c\") " Oct 10 16:49:28 crc kubenswrapper[4799]: I1010 16:49:28.969093 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pffhn\" (UniqueName: \"kubernetes.io/projected/49436e14-c65e-4c80-aaf9-189e76f5a90c-kube-api-access-pffhn\") pod \"49436e14-c65e-4c80-aaf9-189e76f5a90c\" (UID: \"49436e14-c65e-4c80-aaf9-189e76f5a90c\") " Oct 10 16:49:28 crc kubenswrapper[4799]: I1010 16:49:28.969253 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/49436e14-c65e-4c80-aaf9-189e76f5a90c-config\") pod \"49436e14-c65e-4c80-aaf9-189e76f5a90c\" (UID: \"49436e14-c65e-4c80-aaf9-189e76f5a90c\") " Oct 10 16:49:28 crc kubenswrapper[4799]: I1010 16:49:28.972937 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49436e14-c65e-4c80-aaf9-189e76f5a90c-kube-api-access-pffhn" (OuterVolumeSpecName: "kube-api-access-pffhn") pod "49436e14-c65e-4c80-aaf9-189e76f5a90c" (UID: "49436e14-c65e-4c80-aaf9-189e76f5a90c"). InnerVolumeSpecName "kube-api-access-pffhn". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 16:49:28 crc kubenswrapper[4799]: I1010 16:49:28.993802 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49436e14-c65e-4c80-aaf9-189e76f5a90c-config" (OuterVolumeSpecName: "config") pod "49436e14-c65e-4c80-aaf9-189e76f5a90c" (UID: "49436e14-c65e-4c80-aaf9-189e76f5a90c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 16:49:28 crc kubenswrapper[4799]: I1010 16:49:28.994576 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49436e14-c65e-4c80-aaf9-189e76f5a90c-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "49436e14-c65e-4c80-aaf9-189e76f5a90c" (UID: "49436e14-c65e-4c80-aaf9-189e76f5a90c"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 16:49:29 crc kubenswrapper[4799]: I1010 16:49:29.071818 4799 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/49436e14-c65e-4c80-aaf9-189e76f5a90c-config\") on node \"crc\" DevicePath \"\"" Oct 10 16:49:29 crc kubenswrapper[4799]: I1010 16:49:29.071857 4799 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/49436e14-c65e-4c80-aaf9-189e76f5a90c-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 10 16:49:29 crc kubenswrapper[4799]: I1010 16:49:29.071869 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pffhn\" (UniqueName: \"kubernetes.io/projected/49436e14-c65e-4c80-aaf9-189e76f5a90c-kube-api-access-pffhn\") on node \"crc\" DevicePath \"\"" Oct 10 16:49:29 crc kubenswrapper[4799]: I1010 16:49:29.120006 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-dc9d58d7-n7n2p"] Oct 10 16:49:29 crc kubenswrapper[4799]: W1010 16:49:29.122773 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf8634816_4d14_4ca6_87b4_a99d49fba1f2.slice/crio-1686fc1fe45a50a6297e7bde004545948bff5293446cc0d1f68d84bca1c73ea9 WatchSource:0}: Error finding container 1686fc1fe45a50a6297e7bde004545948bff5293446cc0d1f68d84bca1c73ea9: Status 404 returned error can't find the container with id 1686fc1fe45a50a6297e7bde004545948bff5293446cc0d1f68d84bca1c73ea9 Oct 10 16:49:29 crc kubenswrapper[4799]: I1010 16:49:29.165917 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-sb-0" Oct 10 16:49:29 crc kubenswrapper[4799]: I1010 16:49:29.230649 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-sb-0" Oct 10 16:49:29 crc kubenswrapper[4799]: I1010 16:49:29.624805 4799 generic.go:334] "Generic (PLEG): container finished" podID="361acf90-3fb7-4266-b2ff-20e514aa5719" containerID="a834ec30d8d48687f5475575dcea4fabb24eacb955fcfec5280fb404fb6fa8d2" exitCode=0 Oct 10 16:49:29 crc kubenswrapper[4799]: I1010 16:49:29.624865 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-f6b595d95-jhcqh" event={"ID":"361acf90-3fb7-4266-b2ff-20e514aa5719","Type":"ContainerDied","Data":"a834ec30d8d48687f5475575dcea4fabb24eacb955fcfec5280fb404fb6fa8d2"} Oct 10 16:49:29 crc kubenswrapper[4799]: I1010 16:49:29.624900 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-f6b595d95-jhcqh" event={"ID":"361acf90-3fb7-4266-b2ff-20e514aa5719","Type":"ContainerStarted","Data":"7c27e32f6073c0f3556a6580dbe07669da93d67d2567c37e040d77c12f298eb2"} Oct 10 16:49:29 crc kubenswrapper[4799]: I1010 16:49:29.627495 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-rlrvx" event={"ID":"8f591cb1-902a-406e-b93a-56c2b7ec9cb8","Type":"ContainerStarted","Data":"a9f8bcc0a5609ec522020652528dba810f803f6d9045a461106095c9c271fefd"} Oct 10 16:49:29 crc kubenswrapper[4799]: I1010 16:49:29.627900 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-rlrvx" event={"ID":"8f591cb1-902a-406e-b93a-56c2b7ec9cb8","Type":"ContainerStarted","Data":"b2ea6e48cc96c927f39e9c23086aaa69e556051e0d653f9e5f4a9a8694982633"} Oct 10 16:49:29 crc kubenswrapper[4799]: I1010 16:49:29.631293 4799 generic.go:334] "Generic (PLEG): container finished" podID="f8634816-4d14-4ca6-87b4-a99d49fba1f2" containerID="7c82fa1520da2915128d0d5a9ff3afb1f85fe7782493c3fc868159880ebaa749" exitCode=0 Oct 10 16:49:29 crc kubenswrapper[4799]: I1010 16:49:29.631363 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-dc9d58d7-n7n2p" event={"ID":"f8634816-4d14-4ca6-87b4-a99d49fba1f2","Type":"ContainerDied","Data":"7c82fa1520da2915128d0d5a9ff3afb1f85fe7782493c3fc868159880ebaa749"} Oct 10 16:49:29 crc kubenswrapper[4799]: I1010 16:49:29.631384 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-dc9d58d7-n7n2p" event={"ID":"f8634816-4d14-4ca6-87b4-a99d49fba1f2","Type":"ContainerStarted","Data":"1686fc1fe45a50a6297e7bde004545948bff5293446cc0d1f68d84bca1c73ea9"} Oct 10 16:49:29 crc kubenswrapper[4799]: I1010 16:49:29.634082 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-77597f887-p8x7f" podUID="029ee4ad-013d-47f2-9590-ca0a0d85d51d" containerName="dnsmasq-dns" containerID="cri-o://22653454922cdecce643b707c9d9d8ae4eb73824c19893a802a888d7026ab942" gracePeriod=10 Oct 10 16:49:29 crc kubenswrapper[4799]: I1010 16:49:29.634212 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-644597f84c-zx44x" Oct 10 16:49:29 crc kubenswrapper[4799]: I1010 16:49:29.634461 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-644597f84c-zx44x" event={"ID":"49436e14-c65e-4c80-aaf9-189e76f5a90c","Type":"ContainerDied","Data":"31e7e4f1dcff9d956b4cde3f0aaf43761f08f3b4d5f4bd731780bafea61badce"} Oct 10 16:49:29 crc kubenswrapper[4799]: I1010 16:49:29.634614 4799 scope.go:117] "RemoveContainer" containerID="117643d0fec08823b9a6438240b047c325fc3c94f150ce84d070ef9d1364f820" Oct 10 16:49:29 crc kubenswrapper[4799]: I1010 16:49:29.661688 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/openstack-cell1-galera-0" Oct 10 16:49:29 crc kubenswrapper[4799]: I1010 16:49:29.661746 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/openstack-cell1-galera-0" Oct 10 16:49:29 crc kubenswrapper[4799]: I1010 16:49:29.681519 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-metrics-rlrvx" podStartSLOduration=1.681496751 podStartE2EDuration="1.681496751s" podCreationTimestamp="2025-10-10 16:49:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-10 16:49:29.675147544 +0000 UTC m=+1063.183471679" watchObservedRunningTime="2025-10-10 16:49:29.681496751 +0000 UTC m=+1063.189820876" Oct 10 16:49:29 crc kubenswrapper[4799]: I1010 16:49:29.723940 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/openstack-galera-0" Oct 10 16:49:29 crc kubenswrapper[4799]: I1010 16:49:29.769090 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-sb-0" Oct 10 16:49:29 crc kubenswrapper[4799]: I1010 16:49:29.774940 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/openstack-cell1-galera-0" Oct 10 16:49:29 crc kubenswrapper[4799]: I1010 16:49:29.951731 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-644597f84c-zx44x"] Oct 10 16:49:29 crc kubenswrapper[4799]: I1010 16:49:29.963634 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-644597f84c-zx44x"] Oct 10 16:49:29 crc kubenswrapper[4799]: I1010 16:49:29.994884 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/memcached-0" Oct 10 16:49:30 crc kubenswrapper[4799]: I1010 16:49:30.042464 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-northd-0"] Oct 10 16:49:30 crc kubenswrapper[4799]: E1010 16:49:30.042995 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="49436e14-c65e-4c80-aaf9-189e76f5a90c" containerName="init" Oct 10 16:49:30 crc kubenswrapper[4799]: I1010 16:49:30.043006 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="49436e14-c65e-4c80-aaf9-189e76f5a90c" containerName="init" Oct 10 16:49:30 crc kubenswrapper[4799]: I1010 16:49:30.043163 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="49436e14-c65e-4c80-aaf9-189e76f5a90c" containerName="init" Oct 10 16:49:30 crc kubenswrapper[4799]: I1010 16:49:30.043976 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Oct 10 16:49:30 crc kubenswrapper[4799]: I1010 16:49:30.047447 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovnnorthd-ovndbs" Oct 10 16:49:30 crc kubenswrapper[4799]: I1010 16:49:30.047655 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovnnorthd-config" Oct 10 16:49:30 crc kubenswrapper[4799]: I1010 16:49:30.047792 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovnnorthd-scripts" Oct 10 16:49:30 crc kubenswrapper[4799]: I1010 16:49:30.053806 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovnnorthd-ovnnorthd-dockercfg-pqrzn" Oct 10 16:49:30 crc kubenswrapper[4799]: I1010 16:49:30.062362 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-northd-0"] Oct 10 16:49:30 crc kubenswrapper[4799]: I1010 16:49:30.113967 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-db-create-dd8jn"] Oct 10 16:49:30 crc kubenswrapper[4799]: I1010 16:49:30.115613 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-dd8jn" Oct 10 16:49:30 crc kubenswrapper[4799]: I1010 16:49:30.123350 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-create-dd8jn"] Oct 10 16:49:30 crc kubenswrapper[4799]: I1010 16:49:30.214564 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/fdfeebc0-d50f-42f8-a461-b0aea7ba6a11-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"fdfeebc0-d50f-42f8-a461-b0aea7ba6a11\") " pod="openstack/ovn-northd-0" Oct 10 16:49:30 crc kubenswrapper[4799]: I1010 16:49:30.214645 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mnr4x\" (UniqueName: \"kubernetes.io/projected/fdfeebc0-d50f-42f8-a461-b0aea7ba6a11-kube-api-access-mnr4x\") pod \"ovn-northd-0\" (UID: \"fdfeebc0-d50f-42f8-a461-b0aea7ba6a11\") " pod="openstack/ovn-northd-0" Oct 10 16:49:30 crc kubenswrapper[4799]: I1010 16:49:30.214724 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/fdfeebc0-d50f-42f8-a461-b0aea7ba6a11-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"fdfeebc0-d50f-42f8-a461-b0aea7ba6a11\") " pod="openstack/ovn-northd-0" Oct 10 16:49:30 crc kubenswrapper[4799]: I1010 16:49:30.214795 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/fdfeebc0-d50f-42f8-a461-b0aea7ba6a11-scripts\") pod \"ovn-northd-0\" (UID: \"fdfeebc0-d50f-42f8-a461-b0aea7ba6a11\") " pod="openstack/ovn-northd-0" Oct 10 16:49:30 crc kubenswrapper[4799]: I1010 16:49:30.214820 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/fdfeebc0-d50f-42f8-a461-b0aea7ba6a11-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"fdfeebc0-d50f-42f8-a461-b0aea7ba6a11\") " pod="openstack/ovn-northd-0" Oct 10 16:49:30 crc kubenswrapper[4799]: I1010 16:49:30.214847 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9kkht\" (UniqueName: \"kubernetes.io/projected/b8ed5bbd-eed6-4474-80c1-ea9aed201450-kube-api-access-9kkht\") pod \"placement-db-create-dd8jn\" (UID: \"b8ed5bbd-eed6-4474-80c1-ea9aed201450\") " pod="openstack/placement-db-create-dd8jn" Oct 10 16:49:30 crc kubenswrapper[4799]: I1010 16:49:30.214907 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fdfeebc0-d50f-42f8-a461-b0aea7ba6a11-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"fdfeebc0-d50f-42f8-a461-b0aea7ba6a11\") " pod="openstack/ovn-northd-0" Oct 10 16:49:30 crc kubenswrapper[4799]: I1010 16:49:30.214928 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fdfeebc0-d50f-42f8-a461-b0aea7ba6a11-config\") pod \"ovn-northd-0\" (UID: \"fdfeebc0-d50f-42f8-a461-b0aea7ba6a11\") " pod="openstack/ovn-northd-0" Oct 10 16:49:30 crc kubenswrapper[4799]: I1010 16:49:30.219268 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-77597f887-p8x7f" Oct 10 16:49:30 crc kubenswrapper[4799]: I1010 16:49:30.271703 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-db-create-vc285"] Oct 10 16:49:30 crc kubenswrapper[4799]: E1010 16:49:30.272053 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="029ee4ad-013d-47f2-9590-ca0a0d85d51d" containerName="dnsmasq-dns" Oct 10 16:49:30 crc kubenswrapper[4799]: I1010 16:49:30.272070 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="029ee4ad-013d-47f2-9590-ca0a0d85d51d" containerName="dnsmasq-dns" Oct 10 16:49:30 crc kubenswrapper[4799]: E1010 16:49:30.272099 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="029ee4ad-013d-47f2-9590-ca0a0d85d51d" containerName="init" Oct 10 16:49:30 crc kubenswrapper[4799]: I1010 16:49:30.272104 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="029ee4ad-013d-47f2-9590-ca0a0d85d51d" containerName="init" Oct 10 16:49:30 crc kubenswrapper[4799]: I1010 16:49:30.272241 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="029ee4ad-013d-47f2-9590-ca0a0d85d51d" containerName="dnsmasq-dns" Oct 10 16:49:30 crc kubenswrapper[4799]: I1010 16:49:30.272867 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-vc285" Oct 10 16:49:30 crc kubenswrapper[4799]: I1010 16:49:30.309215 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-create-vc285"] Oct 10 16:49:30 crc kubenswrapper[4799]: I1010 16:49:30.315498 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-j6f7b\" (UniqueName: \"kubernetes.io/projected/029ee4ad-013d-47f2-9590-ca0a0d85d51d-kube-api-access-j6f7b\") pod \"029ee4ad-013d-47f2-9590-ca0a0d85d51d\" (UID: \"029ee4ad-013d-47f2-9590-ca0a0d85d51d\") " Oct 10 16:49:30 crc kubenswrapper[4799]: I1010 16:49:30.315541 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/029ee4ad-013d-47f2-9590-ca0a0d85d51d-config\") pod \"029ee4ad-013d-47f2-9590-ca0a0d85d51d\" (UID: \"029ee4ad-013d-47f2-9590-ca0a0d85d51d\") " Oct 10 16:49:30 crc kubenswrapper[4799]: I1010 16:49:30.315619 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/029ee4ad-013d-47f2-9590-ca0a0d85d51d-dns-svc\") pod \"029ee4ad-013d-47f2-9590-ca0a0d85d51d\" (UID: \"029ee4ad-013d-47f2-9590-ca0a0d85d51d\") " Oct 10 16:49:30 crc kubenswrapper[4799]: I1010 16:49:30.315851 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/fdfeebc0-d50f-42f8-a461-b0aea7ba6a11-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"fdfeebc0-d50f-42f8-a461-b0aea7ba6a11\") " pod="openstack/ovn-northd-0" Oct 10 16:49:30 crc kubenswrapper[4799]: I1010 16:49:30.315887 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/fdfeebc0-d50f-42f8-a461-b0aea7ba6a11-scripts\") pod \"ovn-northd-0\" (UID: \"fdfeebc0-d50f-42f8-a461-b0aea7ba6a11\") " pod="openstack/ovn-northd-0" Oct 10 16:49:30 crc kubenswrapper[4799]: I1010 16:49:30.315906 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/fdfeebc0-d50f-42f8-a461-b0aea7ba6a11-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"fdfeebc0-d50f-42f8-a461-b0aea7ba6a11\") " pod="openstack/ovn-northd-0" Oct 10 16:49:30 crc kubenswrapper[4799]: I1010 16:49:30.315927 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9kkht\" (UniqueName: \"kubernetes.io/projected/b8ed5bbd-eed6-4474-80c1-ea9aed201450-kube-api-access-9kkht\") pod \"placement-db-create-dd8jn\" (UID: \"b8ed5bbd-eed6-4474-80c1-ea9aed201450\") " pod="openstack/placement-db-create-dd8jn" Oct 10 16:49:30 crc kubenswrapper[4799]: I1010 16:49:30.315973 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fdfeebc0-d50f-42f8-a461-b0aea7ba6a11-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"fdfeebc0-d50f-42f8-a461-b0aea7ba6a11\") " pod="openstack/ovn-northd-0" Oct 10 16:49:30 crc kubenswrapper[4799]: I1010 16:49:30.315989 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fdfeebc0-d50f-42f8-a461-b0aea7ba6a11-config\") pod \"ovn-northd-0\" (UID: \"fdfeebc0-d50f-42f8-a461-b0aea7ba6a11\") " pod="openstack/ovn-northd-0" Oct 10 16:49:30 crc kubenswrapper[4799]: I1010 16:49:30.316014 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/fdfeebc0-d50f-42f8-a461-b0aea7ba6a11-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"fdfeebc0-d50f-42f8-a461-b0aea7ba6a11\") " pod="openstack/ovn-northd-0" Oct 10 16:49:30 crc kubenswrapper[4799]: I1010 16:49:30.316049 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mnr4x\" (UniqueName: \"kubernetes.io/projected/fdfeebc0-d50f-42f8-a461-b0aea7ba6a11-kube-api-access-mnr4x\") pod \"ovn-northd-0\" (UID: \"fdfeebc0-d50f-42f8-a461-b0aea7ba6a11\") " pod="openstack/ovn-northd-0" Oct 10 16:49:30 crc kubenswrapper[4799]: I1010 16:49:30.318080 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/fdfeebc0-d50f-42f8-a461-b0aea7ba6a11-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"fdfeebc0-d50f-42f8-a461-b0aea7ba6a11\") " pod="openstack/ovn-northd-0" Oct 10 16:49:30 crc kubenswrapper[4799]: I1010 16:49:30.318106 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fdfeebc0-d50f-42f8-a461-b0aea7ba6a11-config\") pod \"ovn-northd-0\" (UID: \"fdfeebc0-d50f-42f8-a461-b0aea7ba6a11\") " pod="openstack/ovn-northd-0" Oct 10 16:49:30 crc kubenswrapper[4799]: I1010 16:49:30.318550 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/fdfeebc0-d50f-42f8-a461-b0aea7ba6a11-scripts\") pod \"ovn-northd-0\" (UID: \"fdfeebc0-d50f-42f8-a461-b0aea7ba6a11\") " pod="openstack/ovn-northd-0" Oct 10 16:49:30 crc kubenswrapper[4799]: I1010 16:49:30.322913 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/fdfeebc0-d50f-42f8-a461-b0aea7ba6a11-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"fdfeebc0-d50f-42f8-a461-b0aea7ba6a11\") " pod="openstack/ovn-northd-0" Oct 10 16:49:30 crc kubenswrapper[4799]: I1010 16:49:30.323695 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/fdfeebc0-d50f-42f8-a461-b0aea7ba6a11-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"fdfeebc0-d50f-42f8-a461-b0aea7ba6a11\") " pod="openstack/ovn-northd-0" Oct 10 16:49:30 crc kubenswrapper[4799]: I1010 16:49:30.329419 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fdfeebc0-d50f-42f8-a461-b0aea7ba6a11-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"fdfeebc0-d50f-42f8-a461-b0aea7ba6a11\") " pod="openstack/ovn-northd-0" Oct 10 16:49:30 crc kubenswrapper[4799]: I1010 16:49:30.332701 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9kkht\" (UniqueName: \"kubernetes.io/projected/b8ed5bbd-eed6-4474-80c1-ea9aed201450-kube-api-access-9kkht\") pod \"placement-db-create-dd8jn\" (UID: \"b8ed5bbd-eed6-4474-80c1-ea9aed201450\") " pod="openstack/placement-db-create-dd8jn" Oct 10 16:49:30 crc kubenswrapper[4799]: I1010 16:49:30.334543 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/029ee4ad-013d-47f2-9590-ca0a0d85d51d-kube-api-access-j6f7b" (OuterVolumeSpecName: "kube-api-access-j6f7b") pod "029ee4ad-013d-47f2-9590-ca0a0d85d51d" (UID: "029ee4ad-013d-47f2-9590-ca0a0d85d51d"). InnerVolumeSpecName "kube-api-access-j6f7b". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 16:49:30 crc kubenswrapper[4799]: I1010 16:49:30.335793 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mnr4x\" (UniqueName: \"kubernetes.io/projected/fdfeebc0-d50f-42f8-a461-b0aea7ba6a11-kube-api-access-mnr4x\") pod \"ovn-northd-0\" (UID: \"fdfeebc0-d50f-42f8-a461-b0aea7ba6a11\") " pod="openstack/ovn-northd-0" Oct 10 16:49:30 crc kubenswrapper[4799]: I1010 16:49:30.374117 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Oct 10 16:49:30 crc kubenswrapper[4799]: I1010 16:49:30.376612 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/029ee4ad-013d-47f2-9590-ca0a0d85d51d-config" (OuterVolumeSpecName: "config") pod "029ee4ad-013d-47f2-9590-ca0a0d85d51d" (UID: "029ee4ad-013d-47f2-9590-ca0a0d85d51d"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 16:49:30 crc kubenswrapper[4799]: I1010 16:49:30.378120 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/029ee4ad-013d-47f2-9590-ca0a0d85d51d-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "029ee4ad-013d-47f2-9590-ca0a0d85d51d" (UID: "029ee4ad-013d-47f2-9590-ca0a0d85d51d"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 16:49:30 crc kubenswrapper[4799]: I1010 16:49:30.417313 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zpxcw\" (UniqueName: \"kubernetes.io/projected/2fcb92c1-d267-4996-9e6f-d89982c6379d-kube-api-access-zpxcw\") pod \"glance-db-create-vc285\" (UID: \"2fcb92c1-d267-4996-9e6f-d89982c6379d\") " pod="openstack/glance-db-create-vc285" Oct 10 16:49:30 crc kubenswrapper[4799]: I1010 16:49:30.417397 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-j6f7b\" (UniqueName: \"kubernetes.io/projected/029ee4ad-013d-47f2-9590-ca0a0d85d51d-kube-api-access-j6f7b\") on node \"crc\" DevicePath \"\"" Oct 10 16:49:30 crc kubenswrapper[4799]: I1010 16:49:30.417413 4799 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/029ee4ad-013d-47f2-9590-ca0a0d85d51d-config\") on node \"crc\" DevicePath \"\"" Oct 10 16:49:30 crc kubenswrapper[4799]: I1010 16:49:30.417426 4799 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/029ee4ad-013d-47f2-9590-ca0a0d85d51d-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 10 16:49:30 crc kubenswrapper[4799]: I1010 16:49:30.465531 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-dd8jn" Oct 10 16:49:30 crc kubenswrapper[4799]: I1010 16:49:30.521421 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zpxcw\" (UniqueName: \"kubernetes.io/projected/2fcb92c1-d267-4996-9e6f-d89982c6379d-kube-api-access-zpxcw\") pod \"glance-db-create-vc285\" (UID: \"2fcb92c1-d267-4996-9e6f-d89982c6379d\") " pod="openstack/glance-db-create-vc285" Oct 10 16:49:30 crc kubenswrapper[4799]: I1010 16:49:30.542480 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zpxcw\" (UniqueName: \"kubernetes.io/projected/2fcb92c1-d267-4996-9e6f-d89982c6379d-kube-api-access-zpxcw\") pod \"glance-db-create-vc285\" (UID: \"2fcb92c1-d267-4996-9e6f-d89982c6379d\") " pod="openstack/glance-db-create-vc285" Oct 10 16:49:30 crc kubenswrapper[4799]: I1010 16:49:30.623300 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-vc285" Oct 10 16:49:30 crc kubenswrapper[4799]: I1010 16:49:30.652540 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-f6b595d95-jhcqh" event={"ID":"361acf90-3fb7-4266-b2ff-20e514aa5719","Type":"ContainerStarted","Data":"c7837ffcf72637b8ddab1ef93e86267c646273d3a258aba43e5f77f95856ef58"} Oct 10 16:49:30 crc kubenswrapper[4799]: I1010 16:49:30.657326 4799 generic.go:334] "Generic (PLEG): container finished" podID="029ee4ad-013d-47f2-9590-ca0a0d85d51d" containerID="22653454922cdecce643b707c9d9d8ae4eb73824c19893a802a888d7026ab942" exitCode=0 Oct 10 16:49:30 crc kubenswrapper[4799]: I1010 16:49:30.657443 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-77597f887-p8x7f" Oct 10 16:49:30 crc kubenswrapper[4799]: I1010 16:49:30.659614 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-f6b595d95-jhcqh" Oct 10 16:49:30 crc kubenswrapper[4799]: I1010 16:49:30.659649 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-77597f887-p8x7f" event={"ID":"029ee4ad-013d-47f2-9590-ca0a0d85d51d","Type":"ContainerDied","Data":"22653454922cdecce643b707c9d9d8ae4eb73824c19893a802a888d7026ab942"} Oct 10 16:49:30 crc kubenswrapper[4799]: I1010 16:49:30.659688 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-77597f887-p8x7f" event={"ID":"029ee4ad-013d-47f2-9590-ca0a0d85d51d","Type":"ContainerDied","Data":"9797a4e02da1b48c5d27c4a5f1d8ae05e69979a4982dae41413c4f5bb630eca1"} Oct 10 16:49:30 crc kubenswrapper[4799]: I1010 16:49:30.659704 4799 scope.go:117] "RemoveContainer" containerID="22653454922cdecce643b707c9d9d8ae4eb73824c19893a802a888d7026ab942" Oct 10 16:49:30 crc kubenswrapper[4799]: I1010 16:49:30.664615 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-dc9d58d7-n7n2p" event={"ID":"f8634816-4d14-4ca6-87b4-a99d49fba1f2","Type":"ContainerStarted","Data":"3666feec049d8ede205041982bdb850b076fda18e0a3174503953202ee12b045"} Oct 10 16:49:30 crc kubenswrapper[4799]: I1010 16:49:30.664747 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-dc9d58d7-n7n2p" Oct 10 16:49:30 crc kubenswrapper[4799]: I1010 16:49:30.673667 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-f6b595d95-jhcqh" podStartSLOduration=2.673646067 podStartE2EDuration="2.673646067s" podCreationTimestamp="2025-10-10 16:49:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-10 16:49:30.668866949 +0000 UTC m=+1064.177191074" watchObservedRunningTime="2025-10-10 16:49:30.673646067 +0000 UTC m=+1064.181970182" Oct 10 16:49:30 crc kubenswrapper[4799]: I1010 16:49:30.687206 4799 scope.go:117] "RemoveContainer" containerID="70fbe4e9e45ade1b80f1b373d107742d184e1ad80eef9999f36f3695a64f6dbb" Oct 10 16:49:30 crc kubenswrapper[4799]: I1010 16:49:30.711158 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-dc9d58d7-n7n2p" podStartSLOduration=2.711140893 podStartE2EDuration="2.711140893s" podCreationTimestamp="2025-10-10 16:49:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-10 16:49:30.695026035 +0000 UTC m=+1064.203350150" watchObservedRunningTime="2025-10-10 16:49:30.711140893 +0000 UTC m=+1064.219465008" Oct 10 16:49:30 crc kubenswrapper[4799]: I1010 16:49:30.713033 4799 scope.go:117] "RemoveContainer" containerID="22653454922cdecce643b707c9d9d8ae4eb73824c19893a802a888d7026ab942" Oct 10 16:49:30 crc kubenswrapper[4799]: I1010 16:49:30.713865 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-77597f887-p8x7f"] Oct 10 16:49:30 crc kubenswrapper[4799]: E1010 16:49:30.714096 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"22653454922cdecce643b707c9d9d8ae4eb73824c19893a802a888d7026ab942\": container with ID starting with 22653454922cdecce643b707c9d9d8ae4eb73824c19893a802a888d7026ab942 not found: ID does not exist" containerID="22653454922cdecce643b707c9d9d8ae4eb73824c19893a802a888d7026ab942" Oct 10 16:49:30 crc kubenswrapper[4799]: I1010 16:49:30.714128 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"22653454922cdecce643b707c9d9d8ae4eb73824c19893a802a888d7026ab942"} err="failed to get container status \"22653454922cdecce643b707c9d9d8ae4eb73824c19893a802a888d7026ab942\": rpc error: code = NotFound desc = could not find container \"22653454922cdecce643b707c9d9d8ae4eb73824c19893a802a888d7026ab942\": container with ID starting with 22653454922cdecce643b707c9d9d8ae4eb73824c19893a802a888d7026ab942 not found: ID does not exist" Oct 10 16:49:30 crc kubenswrapper[4799]: I1010 16:49:30.714154 4799 scope.go:117] "RemoveContainer" containerID="70fbe4e9e45ade1b80f1b373d107742d184e1ad80eef9999f36f3695a64f6dbb" Oct 10 16:49:30 crc kubenswrapper[4799]: E1010 16:49:30.714409 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"70fbe4e9e45ade1b80f1b373d107742d184e1ad80eef9999f36f3695a64f6dbb\": container with ID starting with 70fbe4e9e45ade1b80f1b373d107742d184e1ad80eef9999f36f3695a64f6dbb not found: ID does not exist" containerID="70fbe4e9e45ade1b80f1b373d107742d184e1ad80eef9999f36f3695a64f6dbb" Oct 10 16:49:30 crc kubenswrapper[4799]: I1010 16:49:30.714448 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"70fbe4e9e45ade1b80f1b373d107742d184e1ad80eef9999f36f3695a64f6dbb"} err="failed to get container status \"70fbe4e9e45ade1b80f1b373d107742d184e1ad80eef9999f36f3695a64f6dbb\": rpc error: code = NotFound desc = could not find container \"70fbe4e9e45ade1b80f1b373d107742d184e1ad80eef9999f36f3695a64f6dbb\": container with ID starting with 70fbe4e9e45ade1b80f1b373d107742d184e1ad80eef9999f36f3695a64f6dbb not found: ID does not exist" Oct 10 16:49:30 crc kubenswrapper[4799]: I1010 16:49:30.719177 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-77597f887-p8x7f"] Oct 10 16:49:30 crc kubenswrapper[4799]: I1010 16:49:30.729504 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/openstack-cell1-galera-0" Oct 10 16:49:30 crc kubenswrapper[4799]: I1010 16:49:30.782660 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-northd-0"] Oct 10 16:49:30 crc kubenswrapper[4799]: W1010 16:49:30.828950 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podfdfeebc0_d50f_42f8_a461_b0aea7ba6a11.slice/crio-367957c4450034f9a361064a669daefa18feb7cc6133316ac9035859a9695dc3 WatchSource:0}: Error finding container 367957c4450034f9a361064a669daefa18feb7cc6133316ac9035859a9695dc3: Status 404 returned error can't find the container with id 367957c4450034f9a361064a669daefa18feb7cc6133316ac9035859a9695dc3 Oct 10 16:49:30 crc kubenswrapper[4799]: I1010 16:49:30.887628 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-create-dd8jn"] Oct 10 16:49:30 crc kubenswrapper[4799]: W1010 16:49:30.890080 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb8ed5bbd_eed6_4474_80c1_ea9aed201450.slice/crio-ca0d82485472f54cbce241fe15b6231356a390e4111ad1bf30ddbbec4a0ebb5a WatchSource:0}: Error finding container ca0d82485472f54cbce241fe15b6231356a390e4111ad1bf30ddbbec4a0ebb5a: Status 404 returned error can't find the container with id ca0d82485472f54cbce241fe15b6231356a390e4111ad1bf30ddbbec4a0ebb5a Oct 10 16:49:31 crc kubenswrapper[4799]: I1010 16:49:31.131320 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-create-vc285"] Oct 10 16:49:31 crc kubenswrapper[4799]: W1010 16:49:31.135063 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2fcb92c1_d267_4996_9e6f_d89982c6379d.slice/crio-552076376b9f730afd508b2d96e8d6576706c99185cbad4266179099ba03f664 WatchSource:0}: Error finding container 552076376b9f730afd508b2d96e8d6576706c99185cbad4266179099ba03f664: Status 404 returned error can't find the container with id 552076376b9f730afd508b2d96e8d6576706c99185cbad4266179099ba03f664 Oct 10 16:49:31 crc kubenswrapper[4799]: I1010 16:49:31.415005 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="029ee4ad-013d-47f2-9590-ca0a0d85d51d" path="/var/lib/kubelet/pods/029ee4ad-013d-47f2-9590-ca0a0d85d51d/volumes" Oct 10 16:49:31 crc kubenswrapper[4799]: I1010 16:49:31.416362 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49436e14-c65e-4c80-aaf9-189e76f5a90c" path="/var/lib/kubelet/pods/49436e14-c65e-4c80-aaf9-189e76f5a90c/volumes" Oct 10 16:49:31 crc kubenswrapper[4799]: I1010 16:49:31.503683 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/kube-state-metrics-0" Oct 10 16:49:31 crc kubenswrapper[4799]: I1010 16:49:31.567435 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-f6b595d95-jhcqh"] Oct 10 16:49:31 crc kubenswrapper[4799]: I1010 16:49:31.591913 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-7b587f8db7-jxsc9"] Oct 10 16:49:31 crc kubenswrapper[4799]: I1010 16:49:31.593179 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7b587f8db7-jxsc9" Oct 10 16:49:31 crc kubenswrapper[4799]: I1010 16:49:31.603235 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7b587f8db7-jxsc9"] Oct 10 16:49:31 crc kubenswrapper[4799]: I1010 16:49:31.691341 4799 generic.go:334] "Generic (PLEG): container finished" podID="b8ed5bbd-eed6-4474-80c1-ea9aed201450" containerID="20ca13845dacc22922c1aceaf375f23c384acc1113f31fe2986a13feeabab05e" exitCode=0 Oct 10 16:49:31 crc kubenswrapper[4799]: I1010 16:49:31.691431 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-dd8jn" event={"ID":"b8ed5bbd-eed6-4474-80c1-ea9aed201450","Type":"ContainerDied","Data":"20ca13845dacc22922c1aceaf375f23c384acc1113f31fe2986a13feeabab05e"} Oct 10 16:49:31 crc kubenswrapper[4799]: I1010 16:49:31.691459 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-dd8jn" event={"ID":"b8ed5bbd-eed6-4474-80c1-ea9aed201450","Type":"ContainerStarted","Data":"ca0d82485472f54cbce241fe15b6231356a390e4111ad1bf30ddbbec4a0ebb5a"} Oct 10 16:49:31 crc kubenswrapper[4799]: I1010 16:49:31.702824 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"fdfeebc0-d50f-42f8-a461-b0aea7ba6a11","Type":"ContainerStarted","Data":"367957c4450034f9a361064a669daefa18feb7cc6133316ac9035859a9695dc3"} Oct 10 16:49:31 crc kubenswrapper[4799]: I1010 16:49:31.704720 4799 generic.go:334] "Generic (PLEG): container finished" podID="2fcb92c1-d267-4996-9e6f-d89982c6379d" containerID="95dc04655e0b7d260ecc540674c64342a8f66e0ec033ce83971f3cfb94ab556c" exitCode=0 Oct 10 16:49:31 crc kubenswrapper[4799]: I1010 16:49:31.704795 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-vc285" event={"ID":"2fcb92c1-d267-4996-9e6f-d89982c6379d","Type":"ContainerDied","Data":"95dc04655e0b7d260ecc540674c64342a8f66e0ec033ce83971f3cfb94ab556c"} Oct 10 16:49:31 crc kubenswrapper[4799]: I1010 16:49:31.705704 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-vc285" event={"ID":"2fcb92c1-d267-4996-9e6f-d89982c6379d","Type":"ContainerStarted","Data":"552076376b9f730afd508b2d96e8d6576706c99185cbad4266179099ba03f664"} Oct 10 16:49:31 crc kubenswrapper[4799]: I1010 16:49:31.746009 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/07d3db2f-d9ff-4ac9-bbaa-db30f610a7a6-dns-svc\") pod \"dnsmasq-dns-7b587f8db7-jxsc9\" (UID: \"07d3db2f-d9ff-4ac9-bbaa-db30f610a7a6\") " pod="openstack/dnsmasq-dns-7b587f8db7-jxsc9" Oct 10 16:49:31 crc kubenswrapper[4799]: I1010 16:49:31.746111 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/07d3db2f-d9ff-4ac9-bbaa-db30f610a7a6-ovsdbserver-nb\") pod \"dnsmasq-dns-7b587f8db7-jxsc9\" (UID: \"07d3db2f-d9ff-4ac9-bbaa-db30f610a7a6\") " pod="openstack/dnsmasq-dns-7b587f8db7-jxsc9" Oct 10 16:49:31 crc kubenswrapper[4799]: I1010 16:49:31.746147 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/07d3db2f-d9ff-4ac9-bbaa-db30f610a7a6-ovsdbserver-sb\") pod \"dnsmasq-dns-7b587f8db7-jxsc9\" (UID: \"07d3db2f-d9ff-4ac9-bbaa-db30f610a7a6\") " pod="openstack/dnsmasq-dns-7b587f8db7-jxsc9" Oct 10 16:49:31 crc kubenswrapper[4799]: I1010 16:49:31.746203 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ph7bl\" (UniqueName: \"kubernetes.io/projected/07d3db2f-d9ff-4ac9-bbaa-db30f610a7a6-kube-api-access-ph7bl\") pod \"dnsmasq-dns-7b587f8db7-jxsc9\" (UID: \"07d3db2f-d9ff-4ac9-bbaa-db30f610a7a6\") " pod="openstack/dnsmasq-dns-7b587f8db7-jxsc9" Oct 10 16:49:31 crc kubenswrapper[4799]: I1010 16:49:31.746264 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/07d3db2f-d9ff-4ac9-bbaa-db30f610a7a6-config\") pod \"dnsmasq-dns-7b587f8db7-jxsc9\" (UID: \"07d3db2f-d9ff-4ac9-bbaa-db30f610a7a6\") " pod="openstack/dnsmasq-dns-7b587f8db7-jxsc9" Oct 10 16:49:31 crc kubenswrapper[4799]: I1010 16:49:31.848239 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/07d3db2f-d9ff-4ac9-bbaa-db30f610a7a6-config\") pod \"dnsmasq-dns-7b587f8db7-jxsc9\" (UID: \"07d3db2f-d9ff-4ac9-bbaa-db30f610a7a6\") " pod="openstack/dnsmasq-dns-7b587f8db7-jxsc9" Oct 10 16:49:31 crc kubenswrapper[4799]: I1010 16:49:31.848381 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/07d3db2f-d9ff-4ac9-bbaa-db30f610a7a6-dns-svc\") pod \"dnsmasq-dns-7b587f8db7-jxsc9\" (UID: \"07d3db2f-d9ff-4ac9-bbaa-db30f610a7a6\") " pod="openstack/dnsmasq-dns-7b587f8db7-jxsc9" Oct 10 16:49:31 crc kubenswrapper[4799]: I1010 16:49:31.848525 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/07d3db2f-d9ff-4ac9-bbaa-db30f610a7a6-ovsdbserver-nb\") pod \"dnsmasq-dns-7b587f8db7-jxsc9\" (UID: \"07d3db2f-d9ff-4ac9-bbaa-db30f610a7a6\") " pod="openstack/dnsmasq-dns-7b587f8db7-jxsc9" Oct 10 16:49:31 crc kubenswrapper[4799]: I1010 16:49:31.848554 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/07d3db2f-d9ff-4ac9-bbaa-db30f610a7a6-ovsdbserver-sb\") pod \"dnsmasq-dns-7b587f8db7-jxsc9\" (UID: \"07d3db2f-d9ff-4ac9-bbaa-db30f610a7a6\") " pod="openstack/dnsmasq-dns-7b587f8db7-jxsc9" Oct 10 16:49:31 crc kubenswrapper[4799]: I1010 16:49:31.848629 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ph7bl\" (UniqueName: \"kubernetes.io/projected/07d3db2f-d9ff-4ac9-bbaa-db30f610a7a6-kube-api-access-ph7bl\") pod \"dnsmasq-dns-7b587f8db7-jxsc9\" (UID: \"07d3db2f-d9ff-4ac9-bbaa-db30f610a7a6\") " pod="openstack/dnsmasq-dns-7b587f8db7-jxsc9" Oct 10 16:49:31 crc kubenswrapper[4799]: I1010 16:49:31.850666 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/07d3db2f-d9ff-4ac9-bbaa-db30f610a7a6-ovsdbserver-nb\") pod \"dnsmasq-dns-7b587f8db7-jxsc9\" (UID: \"07d3db2f-d9ff-4ac9-bbaa-db30f610a7a6\") " pod="openstack/dnsmasq-dns-7b587f8db7-jxsc9" Oct 10 16:49:31 crc kubenswrapper[4799]: I1010 16:49:31.851501 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/07d3db2f-d9ff-4ac9-bbaa-db30f610a7a6-dns-svc\") pod \"dnsmasq-dns-7b587f8db7-jxsc9\" (UID: \"07d3db2f-d9ff-4ac9-bbaa-db30f610a7a6\") " pod="openstack/dnsmasq-dns-7b587f8db7-jxsc9" Oct 10 16:49:31 crc kubenswrapper[4799]: I1010 16:49:31.851543 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/07d3db2f-d9ff-4ac9-bbaa-db30f610a7a6-ovsdbserver-sb\") pod \"dnsmasq-dns-7b587f8db7-jxsc9\" (UID: \"07d3db2f-d9ff-4ac9-bbaa-db30f610a7a6\") " pod="openstack/dnsmasq-dns-7b587f8db7-jxsc9" Oct 10 16:49:31 crc kubenswrapper[4799]: I1010 16:49:31.852226 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/07d3db2f-d9ff-4ac9-bbaa-db30f610a7a6-config\") pod \"dnsmasq-dns-7b587f8db7-jxsc9\" (UID: \"07d3db2f-d9ff-4ac9-bbaa-db30f610a7a6\") " pod="openstack/dnsmasq-dns-7b587f8db7-jxsc9" Oct 10 16:49:31 crc kubenswrapper[4799]: I1010 16:49:31.885636 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ph7bl\" (UniqueName: \"kubernetes.io/projected/07d3db2f-d9ff-4ac9-bbaa-db30f610a7a6-kube-api-access-ph7bl\") pod \"dnsmasq-dns-7b587f8db7-jxsc9\" (UID: \"07d3db2f-d9ff-4ac9-bbaa-db30f610a7a6\") " pod="openstack/dnsmasq-dns-7b587f8db7-jxsc9" Oct 10 16:49:31 crc kubenswrapper[4799]: I1010 16:49:31.920683 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7b587f8db7-jxsc9" Oct 10 16:49:32 crc kubenswrapper[4799]: I1010 16:49:32.493363 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7b587f8db7-jxsc9"] Oct 10 16:49:32 crc kubenswrapper[4799]: I1010 16:49:32.725872 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"fdfeebc0-d50f-42f8-a461-b0aea7ba6a11","Type":"ContainerStarted","Data":"0468ca6c6abf7cc599e692ce9780b9732d1baafe60c9e8e67b98e76b296d2b35"} Oct 10 16:49:32 crc kubenswrapper[4799]: I1010 16:49:32.726307 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"fdfeebc0-d50f-42f8-a461-b0aea7ba6a11","Type":"ContainerStarted","Data":"ff8624f34fcb1a15fffee56784a5608f01adefaa3172b0477e52de09e0786400"} Oct 10 16:49:32 crc kubenswrapper[4799]: I1010 16:49:32.726351 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-northd-0" Oct 10 16:49:32 crc kubenswrapper[4799]: I1010 16:49:32.731862 4799 generic.go:334] "Generic (PLEG): container finished" podID="07d3db2f-d9ff-4ac9-bbaa-db30f610a7a6" containerID="b7b8fc584c61d1416fc6fe84452ee391b63af3b2562fbacb013a63f417d8343d" exitCode=0 Oct 10 16:49:32 crc kubenswrapper[4799]: I1010 16:49:32.732255 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7b587f8db7-jxsc9" event={"ID":"07d3db2f-d9ff-4ac9-bbaa-db30f610a7a6","Type":"ContainerDied","Data":"b7b8fc584c61d1416fc6fe84452ee391b63af3b2562fbacb013a63f417d8343d"} Oct 10 16:49:32 crc kubenswrapper[4799]: I1010 16:49:32.732305 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7b587f8db7-jxsc9" event={"ID":"07d3db2f-d9ff-4ac9-bbaa-db30f610a7a6","Type":"ContainerStarted","Data":"974112c8150d77187fa02da9b5d0481fb5c198221c36009668bc6fd00e58ecc3"} Oct 10 16:49:32 crc kubenswrapper[4799]: I1010 16:49:32.732398 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-f6b595d95-jhcqh" podUID="361acf90-3fb7-4266-b2ff-20e514aa5719" containerName="dnsmasq-dns" containerID="cri-o://c7837ffcf72637b8ddab1ef93e86267c646273d3a258aba43e5f77f95856ef58" gracePeriod=10 Oct 10 16:49:32 crc kubenswrapper[4799]: I1010 16:49:32.746434 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-storage-0"] Oct 10 16:49:32 crc kubenswrapper[4799]: I1010 16:49:32.766332 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-storage-0" Oct 10 16:49:32 crc kubenswrapper[4799]: I1010 16:49:32.768381 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-conf" Oct 10 16:49:32 crc kubenswrapper[4799]: I1010 16:49:32.769566 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-files" Oct 10 16:49:32 crc kubenswrapper[4799]: I1010 16:49:32.769735 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-storage-config-data" Oct 10 16:49:32 crc kubenswrapper[4799]: I1010 16:49:32.773656 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-swift-dockercfg-klvb9" Oct 10 16:49:32 crc kubenswrapper[4799]: I1010 16:49:32.783056 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-storage-0"] Oct 10 16:49:32 crc kubenswrapper[4799]: I1010 16:49:32.788856 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-northd-0" podStartSLOduration=1.5145164370000002 podStartE2EDuration="2.788833441s" podCreationTimestamp="2025-10-10 16:49:30 +0000 UTC" firstStartedPulling="2025-10-10 16:49:30.83082984 +0000 UTC m=+1064.339153945" lastFinishedPulling="2025-10-10 16:49:32.105146834 +0000 UTC m=+1065.613470949" observedRunningTime="2025-10-10 16:49:32.758462601 +0000 UTC m=+1066.266786726" watchObservedRunningTime="2025-10-10 16:49:32.788833441 +0000 UTC m=+1066.297157566" Oct 10 16:49:32 crc kubenswrapper[4799]: I1010 16:49:32.866622 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"swift-storage-0\" (UID: \"68ea0968-070a-41d4-b023-31557446c4dc\") " pod="openstack/swift-storage-0" Oct 10 16:49:32 crc kubenswrapper[4799]: I1010 16:49:32.866684 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/68ea0968-070a-41d4-b023-31557446c4dc-cache\") pod \"swift-storage-0\" (UID: \"68ea0968-070a-41d4-b023-31557446c4dc\") " pod="openstack/swift-storage-0" Oct 10 16:49:32 crc kubenswrapper[4799]: I1010 16:49:32.866739 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/68ea0968-070a-41d4-b023-31557446c4dc-etc-swift\") pod \"swift-storage-0\" (UID: \"68ea0968-070a-41d4-b023-31557446c4dc\") " pod="openstack/swift-storage-0" Oct 10 16:49:32 crc kubenswrapper[4799]: I1010 16:49:32.866774 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cc9s9\" (UniqueName: \"kubernetes.io/projected/68ea0968-070a-41d4-b023-31557446c4dc-kube-api-access-cc9s9\") pod \"swift-storage-0\" (UID: \"68ea0968-070a-41d4-b023-31557446c4dc\") " pod="openstack/swift-storage-0" Oct 10 16:49:32 crc kubenswrapper[4799]: I1010 16:49:32.866972 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/68ea0968-070a-41d4-b023-31557446c4dc-lock\") pod \"swift-storage-0\" (UID: \"68ea0968-070a-41d4-b023-31557446c4dc\") " pod="openstack/swift-storage-0" Oct 10 16:49:32 crc kubenswrapper[4799]: I1010 16:49:32.968669 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/68ea0968-070a-41d4-b023-31557446c4dc-lock\") pod \"swift-storage-0\" (UID: \"68ea0968-070a-41d4-b023-31557446c4dc\") " pod="openstack/swift-storage-0" Oct 10 16:49:32 crc kubenswrapper[4799]: I1010 16:49:32.968810 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"swift-storage-0\" (UID: \"68ea0968-070a-41d4-b023-31557446c4dc\") " pod="openstack/swift-storage-0" Oct 10 16:49:32 crc kubenswrapper[4799]: I1010 16:49:32.968839 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/68ea0968-070a-41d4-b023-31557446c4dc-cache\") pod \"swift-storage-0\" (UID: \"68ea0968-070a-41d4-b023-31557446c4dc\") " pod="openstack/swift-storage-0" Oct 10 16:49:32 crc kubenswrapper[4799]: I1010 16:49:32.968867 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/68ea0968-070a-41d4-b023-31557446c4dc-etc-swift\") pod \"swift-storage-0\" (UID: \"68ea0968-070a-41d4-b023-31557446c4dc\") " pod="openstack/swift-storage-0" Oct 10 16:49:32 crc kubenswrapper[4799]: I1010 16:49:32.968890 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cc9s9\" (UniqueName: \"kubernetes.io/projected/68ea0968-070a-41d4-b023-31557446c4dc-kube-api-access-cc9s9\") pod \"swift-storage-0\" (UID: \"68ea0968-070a-41d4-b023-31557446c4dc\") " pod="openstack/swift-storage-0" Oct 10 16:49:32 crc kubenswrapper[4799]: I1010 16:49:32.969141 4799 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"swift-storage-0\" (UID: \"68ea0968-070a-41d4-b023-31557446c4dc\") device mount path \"/mnt/openstack/pv04\"" pod="openstack/swift-storage-0" Oct 10 16:49:32 crc kubenswrapper[4799]: E1010 16:49:32.969172 4799 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Oct 10 16:49:32 crc kubenswrapper[4799]: E1010 16:49:32.969197 4799 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Oct 10 16:49:32 crc kubenswrapper[4799]: E1010 16:49:32.969256 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/68ea0968-070a-41d4-b023-31557446c4dc-etc-swift podName:68ea0968-070a-41d4-b023-31557446c4dc nodeName:}" failed. No retries permitted until 2025-10-10 16:49:33.469235237 +0000 UTC m=+1066.977559432 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/68ea0968-070a-41d4-b023-31557446c4dc-etc-swift") pod "swift-storage-0" (UID: "68ea0968-070a-41d4-b023-31557446c4dc") : configmap "swift-ring-files" not found Oct 10 16:49:32 crc kubenswrapper[4799]: I1010 16:49:32.969330 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/68ea0968-070a-41d4-b023-31557446c4dc-lock\") pod \"swift-storage-0\" (UID: \"68ea0968-070a-41d4-b023-31557446c4dc\") " pod="openstack/swift-storage-0" Oct 10 16:49:32 crc kubenswrapper[4799]: I1010 16:49:32.969380 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/68ea0968-070a-41d4-b023-31557446c4dc-cache\") pod \"swift-storage-0\" (UID: \"68ea0968-070a-41d4-b023-31557446c4dc\") " pod="openstack/swift-storage-0" Oct 10 16:49:32 crc kubenswrapper[4799]: I1010 16:49:32.991335 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cc9s9\" (UniqueName: \"kubernetes.io/projected/68ea0968-070a-41d4-b023-31557446c4dc-kube-api-access-cc9s9\") pod \"swift-storage-0\" (UID: \"68ea0968-070a-41d4-b023-31557446c4dc\") " pod="openstack/swift-storage-0" Oct 10 16:49:32 crc kubenswrapper[4799]: I1010 16:49:32.995901 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"swift-storage-0\" (UID: \"68ea0968-070a-41d4-b023-31557446c4dc\") " pod="openstack/swift-storage-0" Oct 10 16:49:33 crc kubenswrapper[4799]: I1010 16:49:33.072434 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-dd8jn" Oct 10 16:49:33 crc kubenswrapper[4799]: I1010 16:49:33.174344 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9kkht\" (UniqueName: \"kubernetes.io/projected/b8ed5bbd-eed6-4474-80c1-ea9aed201450-kube-api-access-9kkht\") pod \"b8ed5bbd-eed6-4474-80c1-ea9aed201450\" (UID: \"b8ed5bbd-eed6-4474-80c1-ea9aed201450\") " Oct 10 16:49:33 crc kubenswrapper[4799]: I1010 16:49:33.223067 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b8ed5bbd-eed6-4474-80c1-ea9aed201450-kube-api-access-9kkht" (OuterVolumeSpecName: "kube-api-access-9kkht") pod "b8ed5bbd-eed6-4474-80c1-ea9aed201450" (UID: "b8ed5bbd-eed6-4474-80c1-ea9aed201450"). InnerVolumeSpecName "kube-api-access-9kkht". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 16:49:33 crc kubenswrapper[4799]: I1010 16:49:33.229689 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-vc285" Oct 10 16:49:33 crc kubenswrapper[4799]: I1010 16:49:33.283422 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zpxcw\" (UniqueName: \"kubernetes.io/projected/2fcb92c1-d267-4996-9e6f-d89982c6379d-kube-api-access-zpxcw\") pod \"2fcb92c1-d267-4996-9e6f-d89982c6379d\" (UID: \"2fcb92c1-d267-4996-9e6f-d89982c6379d\") " Oct 10 16:49:33 crc kubenswrapper[4799]: I1010 16:49:33.290401 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-f6b595d95-jhcqh" Oct 10 16:49:33 crc kubenswrapper[4799]: I1010 16:49:33.298395 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2fcb92c1-d267-4996-9e6f-d89982c6379d-kube-api-access-zpxcw" (OuterVolumeSpecName: "kube-api-access-zpxcw") pod "2fcb92c1-d267-4996-9e6f-d89982c6379d" (UID: "2fcb92c1-d267-4996-9e6f-d89982c6379d"). InnerVolumeSpecName "kube-api-access-zpxcw". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 16:49:33 crc kubenswrapper[4799]: I1010 16:49:33.344811 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-ring-rebalance-46sn9"] Oct 10 16:49:33 crc kubenswrapper[4799]: E1010 16:49:33.345614 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="361acf90-3fb7-4266-b2ff-20e514aa5719" containerName="dnsmasq-dns" Oct 10 16:49:33 crc kubenswrapper[4799]: I1010 16:49:33.345634 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="361acf90-3fb7-4266-b2ff-20e514aa5719" containerName="dnsmasq-dns" Oct 10 16:49:33 crc kubenswrapper[4799]: E1010 16:49:33.345650 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b8ed5bbd-eed6-4474-80c1-ea9aed201450" containerName="mariadb-database-create" Oct 10 16:49:33 crc kubenswrapper[4799]: I1010 16:49:33.345657 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="b8ed5bbd-eed6-4474-80c1-ea9aed201450" containerName="mariadb-database-create" Oct 10 16:49:33 crc kubenswrapper[4799]: E1010 16:49:33.345703 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="361acf90-3fb7-4266-b2ff-20e514aa5719" containerName="init" Oct 10 16:49:33 crc kubenswrapper[4799]: I1010 16:49:33.345710 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="361acf90-3fb7-4266-b2ff-20e514aa5719" containerName="init" Oct 10 16:49:33 crc kubenswrapper[4799]: E1010 16:49:33.345717 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2fcb92c1-d267-4996-9e6f-d89982c6379d" containerName="mariadb-database-create" Oct 10 16:49:33 crc kubenswrapper[4799]: I1010 16:49:33.345723 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="2fcb92c1-d267-4996-9e6f-d89982c6379d" containerName="mariadb-database-create" Oct 10 16:49:33 crc kubenswrapper[4799]: I1010 16:49:33.345976 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="361acf90-3fb7-4266-b2ff-20e514aa5719" containerName="dnsmasq-dns" Oct 10 16:49:33 crc kubenswrapper[4799]: I1010 16:49:33.346000 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="2fcb92c1-d267-4996-9e6f-d89982c6379d" containerName="mariadb-database-create" Oct 10 16:49:33 crc kubenswrapper[4799]: I1010 16:49:33.346019 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="b8ed5bbd-eed6-4474-80c1-ea9aed201450" containerName="mariadb-database-create" Oct 10 16:49:33 crc kubenswrapper[4799]: I1010 16:49:33.346663 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-46sn9" Oct 10 16:49:33 crc kubenswrapper[4799]: I1010 16:49:33.350193 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9kkht\" (UniqueName: \"kubernetes.io/projected/b8ed5bbd-eed6-4474-80c1-ea9aed201450-kube-api-access-9kkht\") on node \"crc\" DevicePath \"\"" Oct 10 16:49:33 crc kubenswrapper[4799]: I1010 16:49:33.350226 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zpxcw\" (UniqueName: \"kubernetes.io/projected/2fcb92c1-d267-4996-9e6f-d89982c6379d-kube-api-access-zpxcw\") on node \"crc\" DevicePath \"\"" Oct 10 16:49:33 crc kubenswrapper[4799]: I1010 16:49:33.350259 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-proxy-config-data" Oct 10 16:49:33 crc kubenswrapper[4799]: I1010 16:49:33.350402 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-config-data" Oct 10 16:49:33 crc kubenswrapper[4799]: I1010 16:49:33.358631 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-scripts" Oct 10 16:49:33 crc kubenswrapper[4799]: I1010 16:49:33.388396 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-ring-rebalance-46sn9"] Oct 10 16:49:33 crc kubenswrapper[4799]: I1010 16:49:33.450778 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/361acf90-3fb7-4266-b2ff-20e514aa5719-config\") pod \"361acf90-3fb7-4266-b2ff-20e514aa5719\" (UID: \"361acf90-3fb7-4266-b2ff-20e514aa5719\") " Oct 10 16:49:33 crc kubenswrapper[4799]: I1010 16:49:33.450854 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/361acf90-3fb7-4266-b2ff-20e514aa5719-ovsdbserver-nb\") pod \"361acf90-3fb7-4266-b2ff-20e514aa5719\" (UID: \"361acf90-3fb7-4266-b2ff-20e514aa5719\") " Oct 10 16:49:33 crc kubenswrapper[4799]: I1010 16:49:33.450926 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jstrj\" (UniqueName: \"kubernetes.io/projected/361acf90-3fb7-4266-b2ff-20e514aa5719-kube-api-access-jstrj\") pod \"361acf90-3fb7-4266-b2ff-20e514aa5719\" (UID: \"361acf90-3fb7-4266-b2ff-20e514aa5719\") " Oct 10 16:49:33 crc kubenswrapper[4799]: I1010 16:49:33.450943 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/361acf90-3fb7-4266-b2ff-20e514aa5719-dns-svc\") pod \"361acf90-3fb7-4266-b2ff-20e514aa5719\" (UID: \"361acf90-3fb7-4266-b2ff-20e514aa5719\") " Oct 10 16:49:33 crc kubenswrapper[4799]: I1010 16:49:33.451156 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/d9956afc-c1f0-42b9-929b-596957f7c72f-ring-data-devices\") pod \"swift-ring-rebalance-46sn9\" (UID: \"d9956afc-c1f0-42b9-929b-596957f7c72f\") " pod="openstack/swift-ring-rebalance-46sn9" Oct 10 16:49:33 crc kubenswrapper[4799]: I1010 16:49:33.451185 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/d9956afc-c1f0-42b9-929b-596957f7c72f-swiftconf\") pod \"swift-ring-rebalance-46sn9\" (UID: \"d9956afc-c1f0-42b9-929b-596957f7c72f\") " pod="openstack/swift-ring-rebalance-46sn9" Oct 10 16:49:33 crc kubenswrapper[4799]: I1010 16:49:33.451201 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/d9956afc-c1f0-42b9-929b-596957f7c72f-etc-swift\") pod \"swift-ring-rebalance-46sn9\" (UID: \"d9956afc-c1f0-42b9-929b-596957f7c72f\") " pod="openstack/swift-ring-rebalance-46sn9" Oct 10 16:49:33 crc kubenswrapper[4799]: I1010 16:49:33.451238 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/d9956afc-c1f0-42b9-929b-596957f7c72f-scripts\") pod \"swift-ring-rebalance-46sn9\" (UID: \"d9956afc-c1f0-42b9-929b-596957f7c72f\") " pod="openstack/swift-ring-rebalance-46sn9" Oct 10 16:49:33 crc kubenswrapper[4799]: I1010 16:49:33.451273 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d9956afc-c1f0-42b9-929b-596957f7c72f-combined-ca-bundle\") pod \"swift-ring-rebalance-46sn9\" (UID: \"d9956afc-c1f0-42b9-929b-596957f7c72f\") " pod="openstack/swift-ring-rebalance-46sn9" Oct 10 16:49:33 crc kubenswrapper[4799]: I1010 16:49:33.451294 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/d9956afc-c1f0-42b9-929b-596957f7c72f-dispersionconf\") pod \"swift-ring-rebalance-46sn9\" (UID: \"d9956afc-c1f0-42b9-929b-596957f7c72f\") " pod="openstack/swift-ring-rebalance-46sn9" Oct 10 16:49:33 crc kubenswrapper[4799]: I1010 16:49:33.451332 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lp74n\" (UniqueName: \"kubernetes.io/projected/d9956afc-c1f0-42b9-929b-596957f7c72f-kube-api-access-lp74n\") pod \"swift-ring-rebalance-46sn9\" (UID: \"d9956afc-c1f0-42b9-929b-596957f7c72f\") " pod="openstack/swift-ring-rebalance-46sn9" Oct 10 16:49:33 crc kubenswrapper[4799]: I1010 16:49:33.459885 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/361acf90-3fb7-4266-b2ff-20e514aa5719-kube-api-access-jstrj" (OuterVolumeSpecName: "kube-api-access-jstrj") pod "361acf90-3fb7-4266-b2ff-20e514aa5719" (UID: "361acf90-3fb7-4266-b2ff-20e514aa5719"). InnerVolumeSpecName "kube-api-access-jstrj". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 16:49:33 crc kubenswrapper[4799]: I1010 16:49:33.501616 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/361acf90-3fb7-4266-b2ff-20e514aa5719-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "361acf90-3fb7-4266-b2ff-20e514aa5719" (UID: "361acf90-3fb7-4266-b2ff-20e514aa5719"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 16:49:33 crc kubenswrapper[4799]: I1010 16:49:33.533222 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/361acf90-3fb7-4266-b2ff-20e514aa5719-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "361acf90-3fb7-4266-b2ff-20e514aa5719" (UID: "361acf90-3fb7-4266-b2ff-20e514aa5719"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 16:49:33 crc kubenswrapper[4799]: I1010 16:49:33.535145 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/361acf90-3fb7-4266-b2ff-20e514aa5719-config" (OuterVolumeSpecName: "config") pod "361acf90-3fb7-4266-b2ff-20e514aa5719" (UID: "361acf90-3fb7-4266-b2ff-20e514aa5719"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 16:49:33 crc kubenswrapper[4799]: I1010 16:49:33.552734 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d9956afc-c1f0-42b9-929b-596957f7c72f-combined-ca-bundle\") pod \"swift-ring-rebalance-46sn9\" (UID: \"d9956afc-c1f0-42b9-929b-596957f7c72f\") " pod="openstack/swift-ring-rebalance-46sn9" Oct 10 16:49:33 crc kubenswrapper[4799]: I1010 16:49:33.553051 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/d9956afc-c1f0-42b9-929b-596957f7c72f-dispersionconf\") pod \"swift-ring-rebalance-46sn9\" (UID: \"d9956afc-c1f0-42b9-929b-596957f7c72f\") " pod="openstack/swift-ring-rebalance-46sn9" Oct 10 16:49:33 crc kubenswrapper[4799]: I1010 16:49:33.553105 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lp74n\" (UniqueName: \"kubernetes.io/projected/d9956afc-c1f0-42b9-929b-596957f7c72f-kube-api-access-lp74n\") pod \"swift-ring-rebalance-46sn9\" (UID: \"d9956afc-c1f0-42b9-929b-596957f7c72f\") " pod="openstack/swift-ring-rebalance-46sn9" Oct 10 16:49:33 crc kubenswrapper[4799]: I1010 16:49:33.553160 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/68ea0968-070a-41d4-b023-31557446c4dc-etc-swift\") pod \"swift-storage-0\" (UID: \"68ea0968-070a-41d4-b023-31557446c4dc\") " pod="openstack/swift-storage-0" Oct 10 16:49:33 crc kubenswrapper[4799]: I1010 16:49:33.553187 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/d9956afc-c1f0-42b9-929b-596957f7c72f-ring-data-devices\") pod \"swift-ring-rebalance-46sn9\" (UID: \"d9956afc-c1f0-42b9-929b-596957f7c72f\") " pod="openstack/swift-ring-rebalance-46sn9" Oct 10 16:49:33 crc kubenswrapper[4799]: I1010 16:49:33.553203 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/d9956afc-c1f0-42b9-929b-596957f7c72f-swiftconf\") pod \"swift-ring-rebalance-46sn9\" (UID: \"d9956afc-c1f0-42b9-929b-596957f7c72f\") " pod="openstack/swift-ring-rebalance-46sn9" Oct 10 16:49:33 crc kubenswrapper[4799]: I1010 16:49:33.553217 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/d9956afc-c1f0-42b9-929b-596957f7c72f-etc-swift\") pod \"swift-ring-rebalance-46sn9\" (UID: \"d9956afc-c1f0-42b9-929b-596957f7c72f\") " pod="openstack/swift-ring-rebalance-46sn9" Oct 10 16:49:33 crc kubenswrapper[4799]: I1010 16:49:33.553253 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/d9956afc-c1f0-42b9-929b-596957f7c72f-scripts\") pod \"swift-ring-rebalance-46sn9\" (UID: \"d9956afc-c1f0-42b9-929b-596957f7c72f\") " pod="openstack/swift-ring-rebalance-46sn9" Oct 10 16:49:33 crc kubenswrapper[4799]: I1010 16:49:33.553298 4799 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/361acf90-3fb7-4266-b2ff-20e514aa5719-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 10 16:49:33 crc kubenswrapper[4799]: I1010 16:49:33.553310 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jstrj\" (UniqueName: \"kubernetes.io/projected/361acf90-3fb7-4266-b2ff-20e514aa5719-kube-api-access-jstrj\") on node \"crc\" DevicePath \"\"" Oct 10 16:49:33 crc kubenswrapper[4799]: I1010 16:49:33.553322 4799 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/361acf90-3fb7-4266-b2ff-20e514aa5719-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 10 16:49:33 crc kubenswrapper[4799]: I1010 16:49:33.553333 4799 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/361acf90-3fb7-4266-b2ff-20e514aa5719-config\") on node \"crc\" DevicePath \"\"" Oct 10 16:49:33 crc kubenswrapper[4799]: I1010 16:49:33.554653 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/d9956afc-c1f0-42b9-929b-596957f7c72f-scripts\") pod \"swift-ring-rebalance-46sn9\" (UID: \"d9956afc-c1f0-42b9-929b-596957f7c72f\") " pod="openstack/swift-ring-rebalance-46sn9" Oct 10 16:49:33 crc kubenswrapper[4799]: E1010 16:49:33.554788 4799 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Oct 10 16:49:33 crc kubenswrapper[4799]: E1010 16:49:33.554804 4799 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Oct 10 16:49:33 crc kubenswrapper[4799]: E1010 16:49:33.554857 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/68ea0968-070a-41d4-b023-31557446c4dc-etc-swift podName:68ea0968-070a-41d4-b023-31557446c4dc nodeName:}" failed. No retries permitted until 2025-10-10 16:49:34.554827961 +0000 UTC m=+1068.063152066 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/68ea0968-070a-41d4-b023-31557446c4dc-etc-swift") pod "swift-storage-0" (UID: "68ea0968-070a-41d4-b023-31557446c4dc") : configmap "swift-ring-files" not found Oct 10 16:49:33 crc kubenswrapper[4799]: I1010 16:49:33.555589 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/d9956afc-c1f0-42b9-929b-596957f7c72f-etc-swift\") pod \"swift-ring-rebalance-46sn9\" (UID: \"d9956afc-c1f0-42b9-929b-596957f7c72f\") " pod="openstack/swift-ring-rebalance-46sn9" Oct 10 16:49:33 crc kubenswrapper[4799]: I1010 16:49:33.557264 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/d9956afc-c1f0-42b9-929b-596957f7c72f-ring-data-devices\") pod \"swift-ring-rebalance-46sn9\" (UID: \"d9956afc-c1f0-42b9-929b-596957f7c72f\") " pod="openstack/swift-ring-rebalance-46sn9" Oct 10 16:49:33 crc kubenswrapper[4799]: I1010 16:49:33.559212 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/d9956afc-c1f0-42b9-929b-596957f7c72f-dispersionconf\") pod \"swift-ring-rebalance-46sn9\" (UID: \"d9956afc-c1f0-42b9-929b-596957f7c72f\") " pod="openstack/swift-ring-rebalance-46sn9" Oct 10 16:49:33 crc kubenswrapper[4799]: I1010 16:49:33.559433 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d9956afc-c1f0-42b9-929b-596957f7c72f-combined-ca-bundle\") pod \"swift-ring-rebalance-46sn9\" (UID: \"d9956afc-c1f0-42b9-929b-596957f7c72f\") " pod="openstack/swift-ring-rebalance-46sn9" Oct 10 16:49:33 crc kubenswrapper[4799]: I1010 16:49:33.562132 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/d9956afc-c1f0-42b9-929b-596957f7c72f-swiftconf\") pod \"swift-ring-rebalance-46sn9\" (UID: \"d9956afc-c1f0-42b9-929b-596957f7c72f\") " pod="openstack/swift-ring-rebalance-46sn9" Oct 10 16:49:33 crc kubenswrapper[4799]: I1010 16:49:33.571266 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lp74n\" (UniqueName: \"kubernetes.io/projected/d9956afc-c1f0-42b9-929b-596957f7c72f-kube-api-access-lp74n\") pod \"swift-ring-rebalance-46sn9\" (UID: \"d9956afc-c1f0-42b9-929b-596957f7c72f\") " pod="openstack/swift-ring-rebalance-46sn9" Oct 10 16:49:33 crc kubenswrapper[4799]: I1010 16:49:33.721960 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-46sn9" Oct 10 16:49:33 crc kubenswrapper[4799]: I1010 16:49:33.741670 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-dd8jn" event={"ID":"b8ed5bbd-eed6-4474-80c1-ea9aed201450","Type":"ContainerDied","Data":"ca0d82485472f54cbce241fe15b6231356a390e4111ad1bf30ddbbec4a0ebb5a"} Oct 10 16:49:33 crc kubenswrapper[4799]: I1010 16:49:33.741710 4799 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ca0d82485472f54cbce241fe15b6231356a390e4111ad1bf30ddbbec4a0ebb5a" Oct 10 16:49:33 crc kubenswrapper[4799]: I1010 16:49:33.741716 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-dd8jn" Oct 10 16:49:33 crc kubenswrapper[4799]: I1010 16:49:33.745148 4799 generic.go:334] "Generic (PLEG): container finished" podID="361acf90-3fb7-4266-b2ff-20e514aa5719" containerID="c7837ffcf72637b8ddab1ef93e86267c646273d3a258aba43e5f77f95856ef58" exitCode=0 Oct 10 16:49:33 crc kubenswrapper[4799]: I1010 16:49:33.745204 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-f6b595d95-jhcqh" event={"ID":"361acf90-3fb7-4266-b2ff-20e514aa5719","Type":"ContainerDied","Data":"c7837ffcf72637b8ddab1ef93e86267c646273d3a258aba43e5f77f95856ef58"} Oct 10 16:49:33 crc kubenswrapper[4799]: I1010 16:49:33.745225 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-f6b595d95-jhcqh" event={"ID":"361acf90-3fb7-4266-b2ff-20e514aa5719","Type":"ContainerDied","Data":"7c27e32f6073c0f3556a6580dbe07669da93d67d2567c37e040d77c12f298eb2"} Oct 10 16:49:33 crc kubenswrapper[4799]: I1010 16:49:33.745244 4799 scope.go:117] "RemoveContainer" containerID="c7837ffcf72637b8ddab1ef93e86267c646273d3a258aba43e5f77f95856ef58" Oct 10 16:49:33 crc kubenswrapper[4799]: I1010 16:49:33.745363 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-f6b595d95-jhcqh" Oct 10 16:49:33 crc kubenswrapper[4799]: I1010 16:49:33.752532 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-vc285" event={"ID":"2fcb92c1-d267-4996-9e6f-d89982c6379d","Type":"ContainerDied","Data":"552076376b9f730afd508b2d96e8d6576706c99185cbad4266179099ba03f664"} Oct 10 16:49:33 crc kubenswrapper[4799]: I1010 16:49:33.752586 4799 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="552076376b9f730afd508b2d96e8d6576706c99185cbad4266179099ba03f664" Oct 10 16:49:33 crc kubenswrapper[4799]: I1010 16:49:33.752667 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-vc285" Oct 10 16:49:33 crc kubenswrapper[4799]: I1010 16:49:33.760678 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7b587f8db7-jxsc9" event={"ID":"07d3db2f-d9ff-4ac9-bbaa-db30f610a7a6","Type":"ContainerStarted","Data":"17d83bee82a2e1f670f14fc38a4b800a5bc5d1d3bcc4daf243627e79b435f4d6"} Oct 10 16:49:33 crc kubenswrapper[4799]: I1010 16:49:33.760747 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-7b587f8db7-jxsc9" Oct 10 16:49:33 crc kubenswrapper[4799]: I1010 16:49:33.770050 4799 scope.go:117] "RemoveContainer" containerID="a834ec30d8d48687f5475575dcea4fabb24eacb955fcfec5280fb404fb6fa8d2" Oct 10 16:49:33 crc kubenswrapper[4799]: I1010 16:49:33.789326 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-7b587f8db7-jxsc9" podStartSLOduration=2.789280512 podStartE2EDuration="2.789280512s" podCreationTimestamp="2025-10-10 16:49:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-10 16:49:33.785309064 +0000 UTC m=+1067.293633199" watchObservedRunningTime="2025-10-10 16:49:33.789280512 +0000 UTC m=+1067.297604637" Oct 10 16:49:33 crc kubenswrapper[4799]: I1010 16:49:33.797578 4799 scope.go:117] "RemoveContainer" containerID="c7837ffcf72637b8ddab1ef93e86267c646273d3a258aba43e5f77f95856ef58" Oct 10 16:49:33 crc kubenswrapper[4799]: E1010 16:49:33.801211 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c7837ffcf72637b8ddab1ef93e86267c646273d3a258aba43e5f77f95856ef58\": container with ID starting with c7837ffcf72637b8ddab1ef93e86267c646273d3a258aba43e5f77f95856ef58 not found: ID does not exist" containerID="c7837ffcf72637b8ddab1ef93e86267c646273d3a258aba43e5f77f95856ef58" Oct 10 16:49:33 crc kubenswrapper[4799]: I1010 16:49:33.801284 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c7837ffcf72637b8ddab1ef93e86267c646273d3a258aba43e5f77f95856ef58"} err="failed to get container status \"c7837ffcf72637b8ddab1ef93e86267c646273d3a258aba43e5f77f95856ef58\": rpc error: code = NotFound desc = could not find container \"c7837ffcf72637b8ddab1ef93e86267c646273d3a258aba43e5f77f95856ef58\": container with ID starting with c7837ffcf72637b8ddab1ef93e86267c646273d3a258aba43e5f77f95856ef58 not found: ID does not exist" Oct 10 16:49:33 crc kubenswrapper[4799]: I1010 16:49:33.801326 4799 scope.go:117] "RemoveContainer" containerID="a834ec30d8d48687f5475575dcea4fabb24eacb955fcfec5280fb404fb6fa8d2" Oct 10 16:49:33 crc kubenswrapper[4799]: E1010 16:49:33.801624 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a834ec30d8d48687f5475575dcea4fabb24eacb955fcfec5280fb404fb6fa8d2\": container with ID starting with a834ec30d8d48687f5475575dcea4fabb24eacb955fcfec5280fb404fb6fa8d2 not found: ID does not exist" containerID="a834ec30d8d48687f5475575dcea4fabb24eacb955fcfec5280fb404fb6fa8d2" Oct 10 16:49:33 crc kubenswrapper[4799]: I1010 16:49:33.801652 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a834ec30d8d48687f5475575dcea4fabb24eacb955fcfec5280fb404fb6fa8d2"} err="failed to get container status \"a834ec30d8d48687f5475575dcea4fabb24eacb955fcfec5280fb404fb6fa8d2\": rpc error: code = NotFound desc = could not find container \"a834ec30d8d48687f5475575dcea4fabb24eacb955fcfec5280fb404fb6fa8d2\": container with ID starting with a834ec30d8d48687f5475575dcea4fabb24eacb955fcfec5280fb404fb6fa8d2 not found: ID does not exist" Oct 10 16:49:33 crc kubenswrapper[4799]: I1010 16:49:33.808073 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-f6b595d95-jhcqh"] Oct 10 16:49:33 crc kubenswrapper[4799]: I1010 16:49:33.814689 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-f6b595d95-jhcqh"] Oct 10 16:49:34 crc kubenswrapper[4799]: I1010 16:49:34.040821 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-ring-rebalance-46sn9"] Oct 10 16:49:34 crc kubenswrapper[4799]: W1010 16:49:34.050082 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd9956afc_c1f0_42b9_929b_596957f7c72f.slice/crio-c09d8a36f2821896a8c018e009b53bae101e1bce75260fff6848701bd8db5533 WatchSource:0}: Error finding container c09d8a36f2821896a8c018e009b53bae101e1bce75260fff6848701bd8db5533: Status 404 returned error can't find the container with id c09d8a36f2821896a8c018e009b53bae101e1bce75260fff6848701bd8db5533 Oct 10 16:49:34 crc kubenswrapper[4799]: I1010 16:49:34.571588 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/68ea0968-070a-41d4-b023-31557446c4dc-etc-swift\") pod \"swift-storage-0\" (UID: \"68ea0968-070a-41d4-b023-31557446c4dc\") " pod="openstack/swift-storage-0" Oct 10 16:49:34 crc kubenswrapper[4799]: E1010 16:49:34.571926 4799 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Oct 10 16:49:34 crc kubenswrapper[4799]: E1010 16:49:34.572106 4799 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Oct 10 16:49:34 crc kubenswrapper[4799]: E1010 16:49:34.572185 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/68ea0968-070a-41d4-b023-31557446c4dc-etc-swift podName:68ea0968-070a-41d4-b023-31557446c4dc nodeName:}" failed. No retries permitted until 2025-10-10 16:49:36.572162348 +0000 UTC m=+1070.080486503 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/68ea0968-070a-41d4-b023-31557446c4dc-etc-swift") pod "swift-storage-0" (UID: "68ea0968-070a-41d4-b023-31557446c4dc") : configmap "swift-ring-files" not found Oct 10 16:49:34 crc kubenswrapper[4799]: I1010 16:49:34.786047 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-46sn9" event={"ID":"d9956afc-c1f0-42b9-929b-596957f7c72f","Type":"ContainerStarted","Data":"c09d8a36f2821896a8c018e009b53bae101e1bce75260fff6848701bd8db5533"} Oct 10 16:49:35 crc kubenswrapper[4799]: I1010 16:49:35.419741 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="361acf90-3fb7-4266-b2ff-20e514aa5719" path="/var/lib/kubelet/pods/361acf90-3fb7-4266-b2ff-20e514aa5719/volumes" Oct 10 16:49:36 crc kubenswrapper[4799]: I1010 16:49:36.608280 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/68ea0968-070a-41d4-b023-31557446c4dc-etc-swift\") pod \"swift-storage-0\" (UID: \"68ea0968-070a-41d4-b023-31557446c4dc\") " pod="openstack/swift-storage-0" Oct 10 16:49:36 crc kubenswrapper[4799]: E1010 16:49:36.608492 4799 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Oct 10 16:49:36 crc kubenswrapper[4799]: E1010 16:49:36.608511 4799 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Oct 10 16:49:36 crc kubenswrapper[4799]: E1010 16:49:36.608559 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/68ea0968-070a-41d4-b023-31557446c4dc-etc-swift podName:68ea0968-070a-41d4-b023-31557446c4dc nodeName:}" failed. No retries permitted until 2025-10-10 16:49:40.608543167 +0000 UTC m=+1074.116867292 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/68ea0968-070a-41d4-b023-31557446c4dc-etc-swift") pod "swift-storage-0" (UID: "68ea0968-070a-41d4-b023-31557446c4dc") : configmap "swift-ring-files" not found Oct 10 16:49:38 crc kubenswrapper[4799]: I1010 16:49:38.642168 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-dc9d58d7-n7n2p" Oct 10 16:49:38 crc kubenswrapper[4799]: I1010 16:49:38.825918 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-46sn9" event={"ID":"d9956afc-c1f0-42b9-929b-596957f7c72f","Type":"ContainerStarted","Data":"714553f7ebe432f9fd4eeeae273403d9c3ff673b9a55481665624e3067441b8c"} Oct 10 16:49:38 crc kubenswrapper[4799]: I1010 16:49:38.854619 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-ring-rebalance-46sn9" podStartSLOduration=2.003569084 podStartE2EDuration="5.854601263s" podCreationTimestamp="2025-10-10 16:49:33 +0000 UTC" firstStartedPulling="2025-10-10 16:49:34.052633597 +0000 UTC m=+1067.560957732" lastFinishedPulling="2025-10-10 16:49:37.903665786 +0000 UTC m=+1071.411989911" observedRunningTime="2025-10-10 16:49:38.852141192 +0000 UTC m=+1072.360465327" watchObservedRunningTime="2025-10-10 16:49:38.854601263 +0000 UTC m=+1072.362925378" Oct 10 16:49:39 crc kubenswrapper[4799]: I1010 16:49:39.624426 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-db-create-qv6gf"] Oct 10 16:49:39 crc kubenswrapper[4799]: I1010 16:49:39.626065 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-qv6gf" Oct 10 16:49:39 crc kubenswrapper[4799]: I1010 16:49:39.678272 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-create-qv6gf"] Oct 10 16:49:39 crc kubenswrapper[4799]: I1010 16:49:39.773525 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qddbt\" (UniqueName: \"kubernetes.io/projected/0e585e37-862b-4a19-bb06-36f89755f4e0-kube-api-access-qddbt\") pod \"keystone-db-create-qv6gf\" (UID: \"0e585e37-862b-4a19-bb06-36f89755f4e0\") " pod="openstack/keystone-db-create-qv6gf" Oct 10 16:49:39 crc kubenswrapper[4799]: I1010 16:49:39.874972 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qddbt\" (UniqueName: \"kubernetes.io/projected/0e585e37-862b-4a19-bb06-36f89755f4e0-kube-api-access-qddbt\") pod \"keystone-db-create-qv6gf\" (UID: \"0e585e37-862b-4a19-bb06-36f89755f4e0\") " pod="openstack/keystone-db-create-qv6gf" Oct 10 16:49:39 crc kubenswrapper[4799]: I1010 16:49:39.899685 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qddbt\" (UniqueName: \"kubernetes.io/projected/0e585e37-862b-4a19-bb06-36f89755f4e0-kube-api-access-qddbt\") pod \"keystone-db-create-qv6gf\" (UID: \"0e585e37-862b-4a19-bb06-36f89755f4e0\") " pod="openstack/keystone-db-create-qv6gf" Oct 10 16:49:39 crc kubenswrapper[4799]: I1010 16:49:39.987237 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-qv6gf" Oct 10 16:49:40 crc kubenswrapper[4799]: I1010 16:49:40.180297 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-cbdc-account-create-jrcz2"] Oct 10 16:49:40 crc kubenswrapper[4799]: I1010 16:49:40.181831 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-cbdc-account-create-jrcz2" Oct 10 16:49:40 crc kubenswrapper[4799]: I1010 16:49:40.184737 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-db-secret" Oct 10 16:49:40 crc kubenswrapper[4799]: I1010 16:49:40.189405 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-cbdc-account-create-jrcz2"] Oct 10 16:49:40 crc kubenswrapper[4799]: I1010 16:49:40.283747 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mbp2h\" (UniqueName: \"kubernetes.io/projected/c222456d-86d0-45ea-aace-e359a8f0b78a-kube-api-access-mbp2h\") pod \"placement-cbdc-account-create-jrcz2\" (UID: \"c222456d-86d0-45ea-aace-e359a8f0b78a\") " pod="openstack/placement-cbdc-account-create-jrcz2" Oct 10 16:49:40 crc kubenswrapper[4799]: I1010 16:49:40.385295 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mbp2h\" (UniqueName: \"kubernetes.io/projected/c222456d-86d0-45ea-aace-e359a8f0b78a-kube-api-access-mbp2h\") pod \"placement-cbdc-account-create-jrcz2\" (UID: \"c222456d-86d0-45ea-aace-e359a8f0b78a\") " pod="openstack/placement-cbdc-account-create-jrcz2" Oct 10 16:49:40 crc kubenswrapper[4799]: I1010 16:49:40.390026 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-2b60-account-create-88ssw"] Oct 10 16:49:40 crc kubenswrapper[4799]: I1010 16:49:40.391540 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-2b60-account-create-88ssw" Oct 10 16:49:40 crc kubenswrapper[4799]: I1010 16:49:40.393483 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-db-secret" Oct 10 16:49:40 crc kubenswrapper[4799]: I1010 16:49:40.400358 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-2b60-account-create-88ssw"] Oct 10 16:49:40 crc kubenswrapper[4799]: I1010 16:49:40.416217 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mbp2h\" (UniqueName: \"kubernetes.io/projected/c222456d-86d0-45ea-aace-e359a8f0b78a-kube-api-access-mbp2h\") pod \"placement-cbdc-account-create-jrcz2\" (UID: \"c222456d-86d0-45ea-aace-e359a8f0b78a\") " pod="openstack/placement-cbdc-account-create-jrcz2" Oct 10 16:49:40 crc kubenswrapper[4799]: I1010 16:49:40.475875 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-create-qv6gf"] Oct 10 16:49:40 crc kubenswrapper[4799]: I1010 16:49:40.486956 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cktsz\" (UniqueName: \"kubernetes.io/projected/108ef7b4-9cd2-41c6-b4d4-6f84d0cc2f64-kube-api-access-cktsz\") pod \"glance-2b60-account-create-88ssw\" (UID: \"108ef7b4-9cd2-41c6-b4d4-6f84d0cc2f64\") " pod="openstack/glance-2b60-account-create-88ssw" Oct 10 16:49:40 crc kubenswrapper[4799]: I1010 16:49:40.507863 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-cbdc-account-create-jrcz2" Oct 10 16:49:40 crc kubenswrapper[4799]: I1010 16:49:40.588799 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cktsz\" (UniqueName: \"kubernetes.io/projected/108ef7b4-9cd2-41c6-b4d4-6f84d0cc2f64-kube-api-access-cktsz\") pod \"glance-2b60-account-create-88ssw\" (UID: \"108ef7b4-9cd2-41c6-b4d4-6f84d0cc2f64\") " pod="openstack/glance-2b60-account-create-88ssw" Oct 10 16:49:40 crc kubenswrapper[4799]: I1010 16:49:40.618899 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cktsz\" (UniqueName: \"kubernetes.io/projected/108ef7b4-9cd2-41c6-b4d4-6f84d0cc2f64-kube-api-access-cktsz\") pod \"glance-2b60-account-create-88ssw\" (UID: \"108ef7b4-9cd2-41c6-b4d4-6f84d0cc2f64\") " pod="openstack/glance-2b60-account-create-88ssw" Oct 10 16:49:40 crc kubenswrapper[4799]: I1010 16:49:40.690816 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/68ea0968-070a-41d4-b023-31557446c4dc-etc-swift\") pod \"swift-storage-0\" (UID: \"68ea0968-070a-41d4-b023-31557446c4dc\") " pod="openstack/swift-storage-0" Oct 10 16:49:40 crc kubenswrapper[4799]: E1010 16:49:40.690983 4799 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Oct 10 16:49:40 crc kubenswrapper[4799]: E1010 16:49:40.690998 4799 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Oct 10 16:49:40 crc kubenswrapper[4799]: E1010 16:49:40.691042 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/68ea0968-070a-41d4-b023-31557446c4dc-etc-swift podName:68ea0968-070a-41d4-b023-31557446c4dc nodeName:}" failed. No retries permitted until 2025-10-10 16:49:48.691028683 +0000 UTC m=+1082.199352798 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/68ea0968-070a-41d4-b023-31557446c4dc-etc-swift") pod "swift-storage-0" (UID: "68ea0968-070a-41d4-b023-31557446c4dc") : configmap "swift-ring-files" not found Oct 10 16:49:40 crc kubenswrapper[4799]: I1010 16:49:40.711766 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-2b60-account-create-88ssw" Oct 10 16:49:40 crc kubenswrapper[4799]: I1010 16:49:40.770686 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-cbdc-account-create-jrcz2"] Oct 10 16:49:40 crc kubenswrapper[4799]: W1010 16:49:40.784425 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc222456d_86d0_45ea_aace_e359a8f0b78a.slice/crio-c513375b2ef7559dfd24d3717a6798e006453d37a41dd8dec116b64dc8655e8a WatchSource:0}: Error finding container c513375b2ef7559dfd24d3717a6798e006453d37a41dd8dec116b64dc8655e8a: Status 404 returned error can't find the container with id c513375b2ef7559dfd24d3717a6798e006453d37a41dd8dec116b64dc8655e8a Oct 10 16:49:40 crc kubenswrapper[4799]: I1010 16:49:40.841572 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-cbdc-account-create-jrcz2" event={"ID":"c222456d-86d0-45ea-aace-e359a8f0b78a","Type":"ContainerStarted","Data":"c513375b2ef7559dfd24d3717a6798e006453d37a41dd8dec116b64dc8655e8a"} Oct 10 16:49:40 crc kubenswrapper[4799]: I1010 16:49:40.842792 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-qv6gf" event={"ID":"0e585e37-862b-4a19-bb06-36f89755f4e0","Type":"ContainerStarted","Data":"74d7360b72f474ddc74b93ae5b6e0548f79c9d780deff76dfa7aa8ae531e3d0c"} Oct 10 16:49:40 crc kubenswrapper[4799]: I1010 16:49:40.958421 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-2b60-account-create-88ssw"] Oct 10 16:49:41 crc kubenswrapper[4799]: I1010 16:49:41.852124 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-qv6gf" event={"ID":"0e585e37-862b-4a19-bb06-36f89755f4e0","Type":"ContainerStarted","Data":"46d306b5bea3bc195ac0b91193e1f5645352cac207d4c286eb08fc864f88ab77"} Oct 10 16:49:41 crc kubenswrapper[4799]: I1010 16:49:41.854488 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-cbdc-account-create-jrcz2" event={"ID":"c222456d-86d0-45ea-aace-e359a8f0b78a","Type":"ContainerStarted","Data":"6a79ce82fdaad196267612d6635747e431f63cb7b8ed234aa89621488cd6187d"} Oct 10 16:49:41 crc kubenswrapper[4799]: I1010 16:49:41.856062 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-2b60-account-create-88ssw" event={"ID":"108ef7b4-9cd2-41c6-b4d4-6f84d0cc2f64","Type":"ContainerStarted","Data":"31d6aa31490ff7aaded0daf0398838f4567228d35b367d267909a86536b475a5"} Oct 10 16:49:41 crc kubenswrapper[4799]: I1010 16:49:41.856161 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-2b60-account-create-88ssw" event={"ID":"108ef7b4-9cd2-41c6-b4d4-6f84d0cc2f64","Type":"ContainerStarted","Data":"c964dfbe0d8121658d8a6ba20f8b7c0b41d770bc1abef25b08da38aa3a7284d3"} Oct 10 16:49:41 crc kubenswrapper[4799]: I1010 16:49:41.922014 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-7b587f8db7-jxsc9" Oct 10 16:49:42 crc kubenswrapper[4799]: I1010 16:49:42.011770 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-dc9d58d7-n7n2p"] Oct 10 16:49:42 crc kubenswrapper[4799]: I1010 16:49:42.012301 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-dc9d58d7-n7n2p" podUID="f8634816-4d14-4ca6-87b4-a99d49fba1f2" containerName="dnsmasq-dns" containerID="cri-o://3666feec049d8ede205041982bdb850b076fda18e0a3174503953202ee12b045" gracePeriod=10 Oct 10 16:49:42 crc kubenswrapper[4799]: I1010 16:49:42.866960 4799 generic.go:334] "Generic (PLEG): container finished" podID="f8634816-4d14-4ca6-87b4-a99d49fba1f2" containerID="3666feec049d8ede205041982bdb850b076fda18e0a3174503953202ee12b045" exitCode=0 Oct 10 16:49:42 crc kubenswrapper[4799]: I1010 16:49:42.867085 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-dc9d58d7-n7n2p" event={"ID":"f8634816-4d14-4ca6-87b4-a99d49fba1f2","Type":"ContainerDied","Data":"3666feec049d8ede205041982bdb850b076fda18e0a3174503953202ee12b045"} Oct 10 16:49:42 crc kubenswrapper[4799]: I1010 16:49:42.885107 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-cbdc-account-create-jrcz2" podStartSLOduration=2.885081675 podStartE2EDuration="2.885081675s" podCreationTimestamp="2025-10-10 16:49:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-10 16:49:42.883879025 +0000 UTC m=+1076.392203170" watchObservedRunningTime="2025-10-10 16:49:42.885081675 +0000 UTC m=+1076.393405800" Oct 10 16:49:42 crc kubenswrapper[4799]: I1010 16:49:42.904209 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-db-create-qv6gf" podStartSLOduration=3.904189877 podStartE2EDuration="3.904189877s" podCreationTimestamp="2025-10-10 16:49:39 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-10 16:49:42.900340611 +0000 UTC m=+1076.408664746" watchObservedRunningTime="2025-10-10 16:49:42.904189877 +0000 UTC m=+1076.412514012" Oct 10 16:49:42 crc kubenswrapper[4799]: I1010 16:49:42.925166 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-2b60-account-create-88ssw" podStartSLOduration=2.925145784 podStartE2EDuration="2.925145784s" podCreationTimestamp="2025-10-10 16:49:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-10 16:49:42.923427822 +0000 UTC m=+1076.431751937" watchObservedRunningTime="2025-10-10 16:49:42.925145784 +0000 UTC m=+1076.433469899" Oct 10 16:49:43 crc kubenswrapper[4799]: I1010 16:49:43.641650 4799 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-dc9d58d7-n7n2p" podUID="f8634816-4d14-4ca6-87b4-a99d49fba1f2" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.118:5353: connect: connection refused" Oct 10 16:49:44 crc kubenswrapper[4799]: I1010 16:49:44.312462 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-dc9d58d7-n7n2p" Oct 10 16:49:44 crc kubenswrapper[4799]: I1010 16:49:44.459885 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f8634816-4d14-4ca6-87b4-a99d49fba1f2-ovsdbserver-nb\") pod \"f8634816-4d14-4ca6-87b4-a99d49fba1f2\" (UID: \"f8634816-4d14-4ca6-87b4-a99d49fba1f2\") " Oct 10 16:49:44 crc kubenswrapper[4799]: I1010 16:49:44.460342 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f8634816-4d14-4ca6-87b4-a99d49fba1f2-config\") pod \"f8634816-4d14-4ca6-87b4-a99d49fba1f2\" (UID: \"f8634816-4d14-4ca6-87b4-a99d49fba1f2\") " Oct 10 16:49:44 crc kubenswrapper[4799]: I1010 16:49:44.460569 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-n6rm6\" (UniqueName: \"kubernetes.io/projected/f8634816-4d14-4ca6-87b4-a99d49fba1f2-kube-api-access-n6rm6\") pod \"f8634816-4d14-4ca6-87b4-a99d49fba1f2\" (UID: \"f8634816-4d14-4ca6-87b4-a99d49fba1f2\") " Oct 10 16:49:44 crc kubenswrapper[4799]: I1010 16:49:44.460653 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f8634816-4d14-4ca6-87b4-a99d49fba1f2-dns-svc\") pod \"f8634816-4d14-4ca6-87b4-a99d49fba1f2\" (UID: \"f8634816-4d14-4ca6-87b4-a99d49fba1f2\") " Oct 10 16:49:44 crc kubenswrapper[4799]: I1010 16:49:44.460706 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f8634816-4d14-4ca6-87b4-a99d49fba1f2-ovsdbserver-sb\") pod \"f8634816-4d14-4ca6-87b4-a99d49fba1f2\" (UID: \"f8634816-4d14-4ca6-87b4-a99d49fba1f2\") " Oct 10 16:49:44 crc kubenswrapper[4799]: I1010 16:49:44.467249 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f8634816-4d14-4ca6-87b4-a99d49fba1f2-kube-api-access-n6rm6" (OuterVolumeSpecName: "kube-api-access-n6rm6") pod "f8634816-4d14-4ca6-87b4-a99d49fba1f2" (UID: "f8634816-4d14-4ca6-87b4-a99d49fba1f2"). InnerVolumeSpecName "kube-api-access-n6rm6". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 16:49:44 crc kubenswrapper[4799]: I1010 16:49:44.514133 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f8634816-4d14-4ca6-87b4-a99d49fba1f2-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "f8634816-4d14-4ca6-87b4-a99d49fba1f2" (UID: "f8634816-4d14-4ca6-87b4-a99d49fba1f2"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 16:49:44 crc kubenswrapper[4799]: I1010 16:49:44.514600 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f8634816-4d14-4ca6-87b4-a99d49fba1f2-config" (OuterVolumeSpecName: "config") pod "f8634816-4d14-4ca6-87b4-a99d49fba1f2" (UID: "f8634816-4d14-4ca6-87b4-a99d49fba1f2"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 16:49:44 crc kubenswrapper[4799]: I1010 16:49:44.528126 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f8634816-4d14-4ca6-87b4-a99d49fba1f2-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "f8634816-4d14-4ca6-87b4-a99d49fba1f2" (UID: "f8634816-4d14-4ca6-87b4-a99d49fba1f2"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 16:49:44 crc kubenswrapper[4799]: I1010 16:49:44.536557 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f8634816-4d14-4ca6-87b4-a99d49fba1f2-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "f8634816-4d14-4ca6-87b4-a99d49fba1f2" (UID: "f8634816-4d14-4ca6-87b4-a99d49fba1f2"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 16:49:44 crc kubenswrapper[4799]: I1010 16:49:44.565276 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-n6rm6\" (UniqueName: \"kubernetes.io/projected/f8634816-4d14-4ca6-87b4-a99d49fba1f2-kube-api-access-n6rm6\") on node \"crc\" DevicePath \"\"" Oct 10 16:49:44 crc kubenswrapper[4799]: I1010 16:49:44.565334 4799 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f8634816-4d14-4ca6-87b4-a99d49fba1f2-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 10 16:49:44 crc kubenswrapper[4799]: I1010 16:49:44.565352 4799 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f8634816-4d14-4ca6-87b4-a99d49fba1f2-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 10 16:49:44 crc kubenswrapper[4799]: I1010 16:49:44.565369 4799 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f8634816-4d14-4ca6-87b4-a99d49fba1f2-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 10 16:49:44 crc kubenswrapper[4799]: I1010 16:49:44.565385 4799 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f8634816-4d14-4ca6-87b4-a99d49fba1f2-config\") on node \"crc\" DevicePath \"\"" Oct 10 16:49:44 crc kubenswrapper[4799]: I1010 16:49:44.905146 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-dc9d58d7-n7n2p" event={"ID":"f8634816-4d14-4ca6-87b4-a99d49fba1f2","Type":"ContainerDied","Data":"1686fc1fe45a50a6297e7bde004545948bff5293446cc0d1f68d84bca1c73ea9"} Oct 10 16:49:44 crc kubenswrapper[4799]: I1010 16:49:44.905214 4799 scope.go:117] "RemoveContainer" containerID="3666feec049d8ede205041982bdb850b076fda18e0a3174503953202ee12b045" Oct 10 16:49:44 crc kubenswrapper[4799]: I1010 16:49:44.905369 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-dc9d58d7-n7n2p" Oct 10 16:49:44 crc kubenswrapper[4799]: I1010 16:49:44.945875 4799 scope.go:117] "RemoveContainer" containerID="7c82fa1520da2915128d0d5a9ff3afb1f85fe7782493c3fc868159880ebaa749" Oct 10 16:49:44 crc kubenswrapper[4799]: I1010 16:49:44.950875 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-dc9d58d7-n7n2p"] Oct 10 16:49:44 crc kubenswrapper[4799]: I1010 16:49:44.962860 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-dc9d58d7-n7n2p"] Oct 10 16:49:45 crc kubenswrapper[4799]: I1010 16:49:45.248811 4799 patch_prober.go:28] interesting pod/machine-config-daemon-rh8zc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 10 16:49:45 crc kubenswrapper[4799]: I1010 16:49:45.248903 4799 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 10 16:49:45 crc kubenswrapper[4799]: I1010 16:49:45.414185 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f8634816-4d14-4ca6-87b4-a99d49fba1f2" path="/var/lib/kubelet/pods/f8634816-4d14-4ca6-87b4-a99d49fba1f2/volumes" Oct 10 16:49:45 crc kubenswrapper[4799]: I1010 16:49:45.479665 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-northd-0" Oct 10 16:49:47 crc kubenswrapper[4799]: I1010 16:49:47.954333 4799 generic.go:334] "Generic (PLEG): container finished" podID="0e585e37-862b-4a19-bb06-36f89755f4e0" containerID="46d306b5bea3bc195ac0b91193e1f5645352cac207d4c286eb08fc864f88ab77" exitCode=0 Oct 10 16:49:47 crc kubenswrapper[4799]: I1010 16:49:47.954431 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-qv6gf" event={"ID":"0e585e37-862b-4a19-bb06-36f89755f4e0","Type":"ContainerDied","Data":"46d306b5bea3bc195ac0b91193e1f5645352cac207d4c286eb08fc864f88ab77"} Oct 10 16:49:48 crc kubenswrapper[4799]: I1010 16:49:48.738502 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/68ea0968-070a-41d4-b023-31557446c4dc-etc-swift\") pod \"swift-storage-0\" (UID: \"68ea0968-070a-41d4-b023-31557446c4dc\") " pod="openstack/swift-storage-0" Oct 10 16:49:48 crc kubenswrapper[4799]: E1010 16:49:48.738737 4799 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Oct 10 16:49:48 crc kubenswrapper[4799]: E1010 16:49:48.738784 4799 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Oct 10 16:49:48 crc kubenswrapper[4799]: E1010 16:49:48.738856 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/68ea0968-070a-41d4-b023-31557446c4dc-etc-swift podName:68ea0968-070a-41d4-b023-31557446c4dc nodeName:}" failed. No retries permitted until 2025-10-10 16:50:04.738833631 +0000 UTC m=+1098.247157756 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/68ea0968-070a-41d4-b023-31557446c4dc-etc-swift") pod "swift-storage-0" (UID: "68ea0968-070a-41d4-b023-31557446c4dc") : configmap "swift-ring-files" not found Oct 10 16:49:48 crc kubenswrapper[4799]: I1010 16:49:48.968245 4799 generic.go:334] "Generic (PLEG): container finished" podID="c222456d-86d0-45ea-aace-e359a8f0b78a" containerID="6a79ce82fdaad196267612d6635747e431f63cb7b8ed234aa89621488cd6187d" exitCode=0 Oct 10 16:49:48 crc kubenswrapper[4799]: I1010 16:49:48.968387 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-cbdc-account-create-jrcz2" event={"ID":"c222456d-86d0-45ea-aace-e359a8f0b78a","Type":"ContainerDied","Data":"6a79ce82fdaad196267612d6635747e431f63cb7b8ed234aa89621488cd6187d"} Oct 10 16:49:49 crc kubenswrapper[4799]: I1010 16:49:49.367139 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-qv6gf" Oct 10 16:49:49 crc kubenswrapper[4799]: I1010 16:49:49.452002 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qddbt\" (UniqueName: \"kubernetes.io/projected/0e585e37-862b-4a19-bb06-36f89755f4e0-kube-api-access-qddbt\") pod \"0e585e37-862b-4a19-bb06-36f89755f4e0\" (UID: \"0e585e37-862b-4a19-bb06-36f89755f4e0\") " Oct 10 16:49:49 crc kubenswrapper[4799]: I1010 16:49:49.460083 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0e585e37-862b-4a19-bb06-36f89755f4e0-kube-api-access-qddbt" (OuterVolumeSpecName: "kube-api-access-qddbt") pod "0e585e37-862b-4a19-bb06-36f89755f4e0" (UID: "0e585e37-862b-4a19-bb06-36f89755f4e0"). InnerVolumeSpecName "kube-api-access-qddbt". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 16:49:49 crc kubenswrapper[4799]: I1010 16:49:49.555095 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qddbt\" (UniqueName: \"kubernetes.io/projected/0e585e37-862b-4a19-bb06-36f89755f4e0-kube-api-access-qddbt\") on node \"crc\" DevicePath \"\"" Oct 10 16:49:49 crc kubenswrapper[4799]: I1010 16:49:49.981373 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-qv6gf" Oct 10 16:49:49 crc kubenswrapper[4799]: I1010 16:49:49.981384 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-qv6gf" event={"ID":"0e585e37-862b-4a19-bb06-36f89755f4e0","Type":"ContainerDied","Data":"74d7360b72f474ddc74b93ae5b6e0548f79c9d780deff76dfa7aa8ae531e3d0c"} Oct 10 16:49:49 crc kubenswrapper[4799]: I1010 16:49:49.981466 4799 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="74d7360b72f474ddc74b93ae5b6e0548f79c9d780deff76dfa7aa8ae531e3d0c" Oct 10 16:49:50 crc kubenswrapper[4799]: I1010 16:49:50.467874 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-cbdc-account-create-jrcz2" Oct 10 16:49:50 crc kubenswrapper[4799]: I1010 16:49:50.588483 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mbp2h\" (UniqueName: \"kubernetes.io/projected/c222456d-86d0-45ea-aace-e359a8f0b78a-kube-api-access-mbp2h\") pod \"c222456d-86d0-45ea-aace-e359a8f0b78a\" (UID: \"c222456d-86d0-45ea-aace-e359a8f0b78a\") " Oct 10 16:49:50 crc kubenswrapper[4799]: I1010 16:49:50.608691 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c222456d-86d0-45ea-aace-e359a8f0b78a-kube-api-access-mbp2h" (OuterVolumeSpecName: "kube-api-access-mbp2h") pod "c222456d-86d0-45ea-aace-e359a8f0b78a" (UID: "c222456d-86d0-45ea-aace-e359a8f0b78a"). InnerVolumeSpecName "kube-api-access-mbp2h". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 16:49:50 crc kubenswrapper[4799]: I1010 16:49:50.690327 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mbp2h\" (UniqueName: \"kubernetes.io/projected/c222456d-86d0-45ea-aace-e359a8f0b78a-kube-api-access-mbp2h\") on node \"crc\" DevicePath \"\"" Oct 10 16:49:51 crc kubenswrapper[4799]: I1010 16:49:50.999933 4799 generic.go:334] "Generic (PLEG): container finished" podID="108ef7b4-9cd2-41c6-b4d4-6f84d0cc2f64" containerID="31d6aa31490ff7aaded0daf0398838f4567228d35b367d267909a86536b475a5" exitCode=0 Oct 10 16:49:51 crc kubenswrapper[4799]: I1010 16:49:51.000016 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-2b60-account-create-88ssw" event={"ID":"108ef7b4-9cd2-41c6-b4d4-6f84d0cc2f64","Type":"ContainerDied","Data":"31d6aa31490ff7aaded0daf0398838f4567228d35b367d267909a86536b475a5"} Oct 10 16:49:51 crc kubenswrapper[4799]: I1010 16:49:51.002204 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"9fd6f03f-abea-4c29-8060-0705bb0af2c7","Type":"ContainerDied","Data":"adfdbb90972668f2d71dad450618269e6685fe2f84e1846228c2c17d1cd7c04c"} Oct 10 16:49:51 crc kubenswrapper[4799]: I1010 16:49:51.002137 4799 generic.go:334] "Generic (PLEG): container finished" podID="9fd6f03f-abea-4c29-8060-0705bb0af2c7" containerID="adfdbb90972668f2d71dad450618269e6685fe2f84e1846228c2c17d1cd7c04c" exitCode=0 Oct 10 16:49:51 crc kubenswrapper[4799]: I1010 16:49:51.008404 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-cbdc-account-create-jrcz2" Oct 10 16:49:51 crc kubenswrapper[4799]: I1010 16:49:51.008660 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-cbdc-account-create-jrcz2" event={"ID":"c222456d-86d0-45ea-aace-e359a8f0b78a","Type":"ContainerDied","Data":"c513375b2ef7559dfd24d3717a6798e006453d37a41dd8dec116b64dc8655e8a"} Oct 10 16:49:51 crc kubenswrapper[4799]: I1010 16:49:51.008700 4799 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c513375b2ef7559dfd24d3717a6798e006453d37a41dd8dec116b64dc8655e8a" Oct 10 16:49:51 crc kubenswrapper[4799]: I1010 16:49:51.009958 4799 generic.go:334] "Generic (PLEG): container finished" podID="7858ee88-c7b9-4fb7-b825-569154134201" containerID="530054c73abfb931af9932880c554d60aaa19e406d6b80e4c78cfa9e40a7c9a7" exitCode=0 Oct 10 16:49:51 crc kubenswrapper[4799]: I1010 16:49:51.009990 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"7858ee88-c7b9-4fb7-b825-569154134201","Type":"ContainerDied","Data":"530054c73abfb931af9932880c554d60aaa19e406d6b80e4c78cfa9e40a7c9a7"} Oct 10 16:49:52 crc kubenswrapper[4799]: I1010 16:49:52.018348 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"7858ee88-c7b9-4fb7-b825-569154134201","Type":"ContainerStarted","Data":"5618f2fc1181b1ea35b16860a7cfaa8ed80f4249c7cd93a63b30a487631e90ec"} Oct 10 16:49:52 crc kubenswrapper[4799]: I1010 16:49:52.018977 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-cell1-server-0" Oct 10 16:49:52 crc kubenswrapper[4799]: I1010 16:49:52.020133 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"9fd6f03f-abea-4c29-8060-0705bb0af2c7","Type":"ContainerStarted","Data":"22fa1f105dd6a9317c7bfadf3f75a8ae31d05888ae18ed6871e94cd9824e1b96"} Oct 10 16:49:52 crc kubenswrapper[4799]: I1010 16:49:52.020269 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-server-0" Oct 10 16:49:52 crc kubenswrapper[4799]: I1010 16:49:52.022090 4799 generic.go:334] "Generic (PLEG): container finished" podID="d9956afc-c1f0-42b9-929b-596957f7c72f" containerID="714553f7ebe432f9fd4eeeae273403d9c3ff673b9a55481665624e3067441b8c" exitCode=0 Oct 10 16:49:52 crc kubenswrapper[4799]: I1010 16:49:52.022170 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-46sn9" event={"ID":"d9956afc-c1f0-42b9-929b-596957f7c72f","Type":"ContainerDied","Data":"714553f7ebe432f9fd4eeeae273403d9c3ff673b9a55481665624e3067441b8c"} Oct 10 16:49:52 crc kubenswrapper[4799]: I1010 16:49:52.050777 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-cell1-server-0" podStartSLOduration=41.306334973 podStartE2EDuration="57.050735223s" podCreationTimestamp="2025-10-10 16:48:55 +0000 UTC" firstStartedPulling="2025-10-10 16:49:01.272860647 +0000 UTC m=+1034.781184762" lastFinishedPulling="2025-10-10 16:49:17.017260887 +0000 UTC m=+1050.525585012" observedRunningTime="2025-10-10 16:49:52.044495259 +0000 UTC m=+1085.552819384" watchObservedRunningTime="2025-10-10 16:49:52.050735223 +0000 UTC m=+1085.559059338" Oct 10 16:49:52 crc kubenswrapper[4799]: I1010 16:49:52.092358 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-server-0" podStartSLOduration=37.853575242 podStartE2EDuration="58.092334491s" podCreationTimestamp="2025-10-10 16:48:54 +0000 UTC" firstStartedPulling="2025-10-10 16:48:56.736561282 +0000 UTC m=+1030.244885397" lastFinishedPulling="2025-10-10 16:49:16.975320501 +0000 UTC m=+1050.483644646" observedRunningTime="2025-10-10 16:49:52.084773054 +0000 UTC m=+1085.593097179" watchObservedRunningTime="2025-10-10 16:49:52.092334491 +0000 UTC m=+1085.600658626" Oct 10 16:49:52 crc kubenswrapper[4799]: I1010 16:49:52.461087 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-2b60-account-create-88ssw" Oct 10 16:49:52 crc kubenswrapper[4799]: I1010 16:49:52.618544 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cktsz\" (UniqueName: \"kubernetes.io/projected/108ef7b4-9cd2-41c6-b4d4-6f84d0cc2f64-kube-api-access-cktsz\") pod \"108ef7b4-9cd2-41c6-b4d4-6f84d0cc2f64\" (UID: \"108ef7b4-9cd2-41c6-b4d4-6f84d0cc2f64\") " Oct 10 16:49:52 crc kubenswrapper[4799]: I1010 16:49:52.623613 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/108ef7b4-9cd2-41c6-b4d4-6f84d0cc2f64-kube-api-access-cktsz" (OuterVolumeSpecName: "kube-api-access-cktsz") pod "108ef7b4-9cd2-41c6-b4d4-6f84d0cc2f64" (UID: "108ef7b4-9cd2-41c6-b4d4-6f84d0cc2f64"). InnerVolumeSpecName "kube-api-access-cktsz". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 16:49:52 crc kubenswrapper[4799]: I1010 16:49:52.721372 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cktsz\" (UniqueName: \"kubernetes.io/projected/108ef7b4-9cd2-41c6-b4d4-6f84d0cc2f64-kube-api-access-cktsz\") on node \"crc\" DevicePath \"\"" Oct 10 16:49:53 crc kubenswrapper[4799]: I1010 16:49:53.031086 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-2b60-account-create-88ssw" event={"ID":"108ef7b4-9cd2-41c6-b4d4-6f84d0cc2f64","Type":"ContainerDied","Data":"c964dfbe0d8121658d8a6ba20f8b7c0b41d770bc1abef25b08da38aa3a7284d3"} Oct 10 16:49:53 crc kubenswrapper[4799]: I1010 16:49:53.031157 4799 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c964dfbe0d8121658d8a6ba20f8b7c0b41d770bc1abef25b08da38aa3a7284d3" Oct 10 16:49:53 crc kubenswrapper[4799]: I1010 16:49:53.031157 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-2b60-account-create-88ssw" Oct 10 16:49:53 crc kubenswrapper[4799]: I1010 16:49:53.416636 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-46sn9" Oct 10 16:49:53 crc kubenswrapper[4799]: I1010 16:49:53.537992 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/d9956afc-c1f0-42b9-929b-596957f7c72f-swiftconf\") pod \"d9956afc-c1f0-42b9-929b-596957f7c72f\" (UID: \"d9956afc-c1f0-42b9-929b-596957f7c72f\") " Oct 10 16:49:53 crc kubenswrapper[4799]: I1010 16:49:53.538668 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/d9956afc-c1f0-42b9-929b-596957f7c72f-scripts\") pod \"d9956afc-c1f0-42b9-929b-596957f7c72f\" (UID: \"d9956afc-c1f0-42b9-929b-596957f7c72f\") " Oct 10 16:49:53 crc kubenswrapper[4799]: I1010 16:49:53.538795 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/d9956afc-c1f0-42b9-929b-596957f7c72f-dispersionconf\") pod \"d9956afc-c1f0-42b9-929b-596957f7c72f\" (UID: \"d9956afc-c1f0-42b9-929b-596957f7c72f\") " Oct 10 16:49:53 crc kubenswrapper[4799]: I1010 16:49:53.538929 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lp74n\" (UniqueName: \"kubernetes.io/projected/d9956afc-c1f0-42b9-929b-596957f7c72f-kube-api-access-lp74n\") pod \"d9956afc-c1f0-42b9-929b-596957f7c72f\" (UID: \"d9956afc-c1f0-42b9-929b-596957f7c72f\") " Oct 10 16:49:53 crc kubenswrapper[4799]: I1010 16:49:53.538953 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d9956afc-c1f0-42b9-929b-596957f7c72f-combined-ca-bundle\") pod \"d9956afc-c1f0-42b9-929b-596957f7c72f\" (UID: \"d9956afc-c1f0-42b9-929b-596957f7c72f\") " Oct 10 16:49:53 crc kubenswrapper[4799]: I1010 16:49:53.538994 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/d9956afc-c1f0-42b9-929b-596957f7c72f-etc-swift\") pod \"d9956afc-c1f0-42b9-929b-596957f7c72f\" (UID: \"d9956afc-c1f0-42b9-929b-596957f7c72f\") " Oct 10 16:49:53 crc kubenswrapper[4799]: I1010 16:49:53.539040 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/d9956afc-c1f0-42b9-929b-596957f7c72f-ring-data-devices\") pod \"d9956afc-c1f0-42b9-929b-596957f7c72f\" (UID: \"d9956afc-c1f0-42b9-929b-596957f7c72f\") " Oct 10 16:49:53 crc kubenswrapper[4799]: I1010 16:49:53.540326 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d9956afc-c1f0-42b9-929b-596957f7c72f-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "d9956afc-c1f0-42b9-929b-596957f7c72f" (UID: "d9956afc-c1f0-42b9-929b-596957f7c72f"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 10 16:49:53 crc kubenswrapper[4799]: I1010 16:49:53.541816 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d9956afc-c1f0-42b9-929b-596957f7c72f-ring-data-devices" (OuterVolumeSpecName: "ring-data-devices") pod "d9956afc-c1f0-42b9-929b-596957f7c72f" (UID: "d9956afc-c1f0-42b9-929b-596957f7c72f"). InnerVolumeSpecName "ring-data-devices". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 16:49:53 crc kubenswrapper[4799]: I1010 16:49:53.548004 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d9956afc-c1f0-42b9-929b-596957f7c72f-kube-api-access-lp74n" (OuterVolumeSpecName: "kube-api-access-lp74n") pod "d9956afc-c1f0-42b9-929b-596957f7c72f" (UID: "d9956afc-c1f0-42b9-929b-596957f7c72f"). InnerVolumeSpecName "kube-api-access-lp74n". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 16:49:53 crc kubenswrapper[4799]: I1010 16:49:53.559977 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d9956afc-c1f0-42b9-929b-596957f7c72f-dispersionconf" (OuterVolumeSpecName: "dispersionconf") pod "d9956afc-c1f0-42b9-929b-596957f7c72f" (UID: "d9956afc-c1f0-42b9-929b-596957f7c72f"). InnerVolumeSpecName "dispersionconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:49:53 crc kubenswrapper[4799]: I1010 16:49:53.571022 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d9956afc-c1f0-42b9-929b-596957f7c72f-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d9956afc-c1f0-42b9-929b-596957f7c72f" (UID: "d9956afc-c1f0-42b9-929b-596957f7c72f"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:49:53 crc kubenswrapper[4799]: I1010 16:49:53.575820 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d9956afc-c1f0-42b9-929b-596957f7c72f-scripts" (OuterVolumeSpecName: "scripts") pod "d9956afc-c1f0-42b9-929b-596957f7c72f" (UID: "d9956afc-c1f0-42b9-929b-596957f7c72f"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 16:49:53 crc kubenswrapper[4799]: I1010 16:49:53.577859 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d9956afc-c1f0-42b9-929b-596957f7c72f-swiftconf" (OuterVolumeSpecName: "swiftconf") pod "d9956afc-c1f0-42b9-929b-596957f7c72f" (UID: "d9956afc-c1f0-42b9-929b-596957f7c72f"). InnerVolumeSpecName "swiftconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:49:53 crc kubenswrapper[4799]: I1010 16:49:53.641408 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lp74n\" (UniqueName: \"kubernetes.io/projected/d9956afc-c1f0-42b9-929b-596957f7c72f-kube-api-access-lp74n\") on node \"crc\" DevicePath \"\"" Oct 10 16:49:53 crc kubenswrapper[4799]: I1010 16:49:53.641463 4799 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d9956afc-c1f0-42b9-929b-596957f7c72f-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 10 16:49:53 crc kubenswrapper[4799]: I1010 16:49:53.641478 4799 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/d9956afc-c1f0-42b9-929b-596957f7c72f-etc-swift\") on node \"crc\" DevicePath \"\"" Oct 10 16:49:53 crc kubenswrapper[4799]: I1010 16:49:53.641491 4799 reconciler_common.go:293] "Volume detached for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/d9956afc-c1f0-42b9-929b-596957f7c72f-ring-data-devices\") on node \"crc\" DevicePath \"\"" Oct 10 16:49:53 crc kubenswrapper[4799]: I1010 16:49:53.641505 4799 reconciler_common.go:293] "Volume detached for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/d9956afc-c1f0-42b9-929b-596957f7c72f-swiftconf\") on node \"crc\" DevicePath \"\"" Oct 10 16:49:53 crc kubenswrapper[4799]: I1010 16:49:53.641518 4799 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/d9956afc-c1f0-42b9-929b-596957f7c72f-scripts\") on node \"crc\" DevicePath \"\"" Oct 10 16:49:53 crc kubenswrapper[4799]: I1010 16:49:53.641530 4799 reconciler_common.go:293] "Volume detached for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/d9956afc-c1f0-42b9-929b-596957f7c72f-dispersionconf\") on node \"crc\" DevicePath \"\"" Oct 10 16:49:54 crc kubenswrapper[4799]: I1010 16:49:54.040888 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-46sn9" event={"ID":"d9956afc-c1f0-42b9-929b-596957f7c72f","Type":"ContainerDied","Data":"c09d8a36f2821896a8c018e009b53bae101e1bce75260fff6848701bd8db5533"} Oct 10 16:49:54 crc kubenswrapper[4799]: I1010 16:49:54.040917 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-46sn9" Oct 10 16:49:54 crc kubenswrapper[4799]: I1010 16:49:54.040930 4799 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c09d8a36f2821896a8c018e009b53bae101e1bce75260fff6848701bd8db5533" Oct 10 16:49:55 crc kubenswrapper[4799]: I1010 16:49:55.356515 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-ovs-dtplc" Oct 10 16:49:55 crc kubenswrapper[4799]: I1010 16:49:55.408890 4799 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ovn-controller-s8gsd" podUID="038759ba-4122-4104-8699-81c76590eb2b" containerName="ovn-controller" probeResult="failure" output=< Oct 10 16:49:55 crc kubenswrapper[4799]: ERROR - ovn-controller connection status is 'not connected', expecting 'connected' status Oct 10 16:49:55 crc kubenswrapper[4799]: > Oct 10 16:49:55 crc kubenswrapper[4799]: I1010 16:49:55.423320 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-ovs-dtplc" Oct 10 16:49:55 crc kubenswrapper[4799]: I1010 16:49:55.628239 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-s8gsd-config-b7sm2"] Oct 10 16:49:55 crc kubenswrapper[4799]: E1010 16:49:55.628799 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="108ef7b4-9cd2-41c6-b4d4-6f84d0cc2f64" containerName="mariadb-account-create" Oct 10 16:49:55 crc kubenswrapper[4799]: I1010 16:49:55.628822 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="108ef7b4-9cd2-41c6-b4d4-6f84d0cc2f64" containerName="mariadb-account-create" Oct 10 16:49:55 crc kubenswrapper[4799]: E1010 16:49:55.628843 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f8634816-4d14-4ca6-87b4-a99d49fba1f2" containerName="init" Oct 10 16:49:55 crc kubenswrapper[4799]: I1010 16:49:55.628855 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="f8634816-4d14-4ca6-87b4-a99d49fba1f2" containerName="init" Oct 10 16:49:55 crc kubenswrapper[4799]: E1010 16:49:55.628872 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c222456d-86d0-45ea-aace-e359a8f0b78a" containerName="mariadb-account-create" Oct 10 16:49:55 crc kubenswrapper[4799]: I1010 16:49:55.628879 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="c222456d-86d0-45ea-aace-e359a8f0b78a" containerName="mariadb-account-create" Oct 10 16:49:55 crc kubenswrapper[4799]: E1010 16:49:55.628891 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0e585e37-862b-4a19-bb06-36f89755f4e0" containerName="mariadb-database-create" Oct 10 16:49:55 crc kubenswrapper[4799]: I1010 16:49:55.628897 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="0e585e37-862b-4a19-bb06-36f89755f4e0" containerName="mariadb-database-create" Oct 10 16:49:55 crc kubenswrapper[4799]: E1010 16:49:55.628913 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f8634816-4d14-4ca6-87b4-a99d49fba1f2" containerName="dnsmasq-dns" Oct 10 16:49:55 crc kubenswrapper[4799]: I1010 16:49:55.628919 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="f8634816-4d14-4ca6-87b4-a99d49fba1f2" containerName="dnsmasq-dns" Oct 10 16:49:55 crc kubenswrapper[4799]: E1010 16:49:55.628930 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d9956afc-c1f0-42b9-929b-596957f7c72f" containerName="swift-ring-rebalance" Oct 10 16:49:55 crc kubenswrapper[4799]: I1010 16:49:55.628937 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="d9956afc-c1f0-42b9-929b-596957f7c72f" containerName="swift-ring-rebalance" Oct 10 16:49:55 crc kubenswrapper[4799]: I1010 16:49:55.629137 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="108ef7b4-9cd2-41c6-b4d4-6f84d0cc2f64" containerName="mariadb-account-create" Oct 10 16:49:55 crc kubenswrapper[4799]: I1010 16:49:55.629184 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="f8634816-4d14-4ca6-87b4-a99d49fba1f2" containerName="dnsmasq-dns" Oct 10 16:49:55 crc kubenswrapper[4799]: I1010 16:49:55.629205 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="0e585e37-862b-4a19-bb06-36f89755f4e0" containerName="mariadb-database-create" Oct 10 16:49:55 crc kubenswrapper[4799]: I1010 16:49:55.629223 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="d9956afc-c1f0-42b9-929b-596957f7c72f" containerName="swift-ring-rebalance" Oct 10 16:49:55 crc kubenswrapper[4799]: I1010 16:49:55.629237 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="c222456d-86d0-45ea-aace-e359a8f0b78a" containerName="mariadb-account-create" Oct 10 16:49:55 crc kubenswrapper[4799]: I1010 16:49:55.630174 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-s8gsd-config-b7sm2" Oct 10 16:49:55 crc kubenswrapper[4799]: I1010 16:49:55.648104 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-extra-scripts" Oct 10 16:49:55 crc kubenswrapper[4799]: I1010 16:49:55.649436 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-db-sync-v8rpx"] Oct 10 16:49:55 crc kubenswrapper[4799]: I1010 16:49:55.651219 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-v8rpx" Oct 10 16:49:55 crc kubenswrapper[4799]: I1010 16:49:55.653033 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-l4znb" Oct 10 16:49:55 crc kubenswrapper[4799]: I1010 16:49:55.655875 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-config-data" Oct 10 16:49:55 crc kubenswrapper[4799]: I1010 16:49:55.676228 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-s8gsd-config-b7sm2"] Oct 10 16:49:55 crc kubenswrapper[4799]: I1010 16:49:55.680961 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-sync-v8rpx"] Oct 10 16:49:55 crc kubenswrapper[4799]: I1010 16:49:55.781726 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/cdd3fea6-2a66-4fc2-8292-7bf68c32b2c1-var-log-ovn\") pod \"ovn-controller-s8gsd-config-b7sm2\" (UID: \"cdd3fea6-2a66-4fc2-8292-7bf68c32b2c1\") " pod="openstack/ovn-controller-s8gsd-config-b7sm2" Oct 10 16:49:55 crc kubenswrapper[4799]: I1010 16:49:55.782090 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/4e0d692a-cc78-4807-a2a3-5b39c5729ee6-db-sync-config-data\") pod \"glance-db-sync-v8rpx\" (UID: \"4e0d692a-cc78-4807-a2a3-5b39c5729ee6\") " pod="openstack/glance-db-sync-v8rpx" Oct 10 16:49:55 crc kubenswrapper[4799]: I1010 16:49:55.782121 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hnpwq\" (UniqueName: \"kubernetes.io/projected/4e0d692a-cc78-4807-a2a3-5b39c5729ee6-kube-api-access-hnpwq\") pod \"glance-db-sync-v8rpx\" (UID: \"4e0d692a-cc78-4807-a2a3-5b39c5729ee6\") " pod="openstack/glance-db-sync-v8rpx" Oct 10 16:49:55 crc kubenswrapper[4799]: I1010 16:49:55.782164 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/cdd3fea6-2a66-4fc2-8292-7bf68c32b2c1-additional-scripts\") pod \"ovn-controller-s8gsd-config-b7sm2\" (UID: \"cdd3fea6-2a66-4fc2-8292-7bf68c32b2c1\") " pod="openstack/ovn-controller-s8gsd-config-b7sm2" Oct 10 16:49:55 crc kubenswrapper[4799]: I1010 16:49:55.782193 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4e0d692a-cc78-4807-a2a3-5b39c5729ee6-combined-ca-bundle\") pod \"glance-db-sync-v8rpx\" (UID: \"4e0d692a-cc78-4807-a2a3-5b39c5729ee6\") " pod="openstack/glance-db-sync-v8rpx" Oct 10 16:49:55 crc kubenswrapper[4799]: I1010 16:49:55.782234 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/cdd3fea6-2a66-4fc2-8292-7bf68c32b2c1-scripts\") pod \"ovn-controller-s8gsd-config-b7sm2\" (UID: \"cdd3fea6-2a66-4fc2-8292-7bf68c32b2c1\") " pod="openstack/ovn-controller-s8gsd-config-b7sm2" Oct 10 16:49:55 crc kubenswrapper[4799]: I1010 16:49:55.782269 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/cdd3fea6-2a66-4fc2-8292-7bf68c32b2c1-var-run\") pod \"ovn-controller-s8gsd-config-b7sm2\" (UID: \"cdd3fea6-2a66-4fc2-8292-7bf68c32b2c1\") " pod="openstack/ovn-controller-s8gsd-config-b7sm2" Oct 10 16:49:55 crc kubenswrapper[4799]: I1010 16:49:55.782300 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/cdd3fea6-2a66-4fc2-8292-7bf68c32b2c1-var-run-ovn\") pod \"ovn-controller-s8gsd-config-b7sm2\" (UID: \"cdd3fea6-2a66-4fc2-8292-7bf68c32b2c1\") " pod="openstack/ovn-controller-s8gsd-config-b7sm2" Oct 10 16:49:55 crc kubenswrapper[4799]: I1010 16:49:55.782361 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kkb9w\" (UniqueName: \"kubernetes.io/projected/cdd3fea6-2a66-4fc2-8292-7bf68c32b2c1-kube-api-access-kkb9w\") pod \"ovn-controller-s8gsd-config-b7sm2\" (UID: \"cdd3fea6-2a66-4fc2-8292-7bf68c32b2c1\") " pod="openstack/ovn-controller-s8gsd-config-b7sm2" Oct 10 16:49:55 crc kubenswrapper[4799]: I1010 16:49:55.782383 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4e0d692a-cc78-4807-a2a3-5b39c5729ee6-config-data\") pod \"glance-db-sync-v8rpx\" (UID: \"4e0d692a-cc78-4807-a2a3-5b39c5729ee6\") " pod="openstack/glance-db-sync-v8rpx" Oct 10 16:49:55 crc kubenswrapper[4799]: I1010 16:49:55.885595 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/cdd3fea6-2a66-4fc2-8292-7bf68c32b2c1-var-log-ovn\") pod \"ovn-controller-s8gsd-config-b7sm2\" (UID: \"cdd3fea6-2a66-4fc2-8292-7bf68c32b2c1\") " pod="openstack/ovn-controller-s8gsd-config-b7sm2" Oct 10 16:49:55 crc kubenswrapper[4799]: I1010 16:49:55.885102 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/cdd3fea6-2a66-4fc2-8292-7bf68c32b2c1-var-log-ovn\") pod \"ovn-controller-s8gsd-config-b7sm2\" (UID: \"cdd3fea6-2a66-4fc2-8292-7bf68c32b2c1\") " pod="openstack/ovn-controller-s8gsd-config-b7sm2" Oct 10 16:49:55 crc kubenswrapper[4799]: I1010 16:49:55.886901 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/4e0d692a-cc78-4807-a2a3-5b39c5729ee6-db-sync-config-data\") pod \"glance-db-sync-v8rpx\" (UID: \"4e0d692a-cc78-4807-a2a3-5b39c5729ee6\") " pod="openstack/glance-db-sync-v8rpx" Oct 10 16:49:55 crc kubenswrapper[4799]: I1010 16:49:55.886954 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hnpwq\" (UniqueName: \"kubernetes.io/projected/4e0d692a-cc78-4807-a2a3-5b39c5729ee6-kube-api-access-hnpwq\") pod \"glance-db-sync-v8rpx\" (UID: \"4e0d692a-cc78-4807-a2a3-5b39c5729ee6\") " pod="openstack/glance-db-sync-v8rpx" Oct 10 16:49:55 crc kubenswrapper[4799]: I1010 16:49:55.887014 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/cdd3fea6-2a66-4fc2-8292-7bf68c32b2c1-additional-scripts\") pod \"ovn-controller-s8gsd-config-b7sm2\" (UID: \"cdd3fea6-2a66-4fc2-8292-7bf68c32b2c1\") " pod="openstack/ovn-controller-s8gsd-config-b7sm2" Oct 10 16:49:55 crc kubenswrapper[4799]: I1010 16:49:55.887043 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4e0d692a-cc78-4807-a2a3-5b39c5729ee6-combined-ca-bundle\") pod \"glance-db-sync-v8rpx\" (UID: \"4e0d692a-cc78-4807-a2a3-5b39c5729ee6\") " pod="openstack/glance-db-sync-v8rpx" Oct 10 16:49:55 crc kubenswrapper[4799]: I1010 16:49:55.887097 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/cdd3fea6-2a66-4fc2-8292-7bf68c32b2c1-scripts\") pod \"ovn-controller-s8gsd-config-b7sm2\" (UID: \"cdd3fea6-2a66-4fc2-8292-7bf68c32b2c1\") " pod="openstack/ovn-controller-s8gsd-config-b7sm2" Oct 10 16:49:55 crc kubenswrapper[4799]: I1010 16:49:55.887145 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/cdd3fea6-2a66-4fc2-8292-7bf68c32b2c1-var-run\") pod \"ovn-controller-s8gsd-config-b7sm2\" (UID: \"cdd3fea6-2a66-4fc2-8292-7bf68c32b2c1\") " pod="openstack/ovn-controller-s8gsd-config-b7sm2" Oct 10 16:49:55 crc kubenswrapper[4799]: I1010 16:49:55.887189 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/cdd3fea6-2a66-4fc2-8292-7bf68c32b2c1-var-run-ovn\") pod \"ovn-controller-s8gsd-config-b7sm2\" (UID: \"cdd3fea6-2a66-4fc2-8292-7bf68c32b2c1\") " pod="openstack/ovn-controller-s8gsd-config-b7sm2" Oct 10 16:49:55 crc kubenswrapper[4799]: I1010 16:49:55.887269 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kkb9w\" (UniqueName: \"kubernetes.io/projected/cdd3fea6-2a66-4fc2-8292-7bf68c32b2c1-kube-api-access-kkb9w\") pod \"ovn-controller-s8gsd-config-b7sm2\" (UID: \"cdd3fea6-2a66-4fc2-8292-7bf68c32b2c1\") " pod="openstack/ovn-controller-s8gsd-config-b7sm2" Oct 10 16:49:55 crc kubenswrapper[4799]: I1010 16:49:55.887291 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4e0d692a-cc78-4807-a2a3-5b39c5729ee6-config-data\") pod \"glance-db-sync-v8rpx\" (UID: \"4e0d692a-cc78-4807-a2a3-5b39c5729ee6\") " pod="openstack/glance-db-sync-v8rpx" Oct 10 16:49:55 crc kubenswrapper[4799]: I1010 16:49:55.888663 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/cdd3fea6-2a66-4fc2-8292-7bf68c32b2c1-additional-scripts\") pod \"ovn-controller-s8gsd-config-b7sm2\" (UID: \"cdd3fea6-2a66-4fc2-8292-7bf68c32b2c1\") " pod="openstack/ovn-controller-s8gsd-config-b7sm2" Oct 10 16:49:55 crc kubenswrapper[4799]: I1010 16:49:55.889177 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/cdd3fea6-2a66-4fc2-8292-7bf68c32b2c1-var-run-ovn\") pod \"ovn-controller-s8gsd-config-b7sm2\" (UID: \"cdd3fea6-2a66-4fc2-8292-7bf68c32b2c1\") " pod="openstack/ovn-controller-s8gsd-config-b7sm2" Oct 10 16:49:55 crc kubenswrapper[4799]: I1010 16:49:55.889256 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/cdd3fea6-2a66-4fc2-8292-7bf68c32b2c1-var-run\") pod \"ovn-controller-s8gsd-config-b7sm2\" (UID: \"cdd3fea6-2a66-4fc2-8292-7bf68c32b2c1\") " pod="openstack/ovn-controller-s8gsd-config-b7sm2" Oct 10 16:49:55 crc kubenswrapper[4799]: I1010 16:49:55.890210 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/cdd3fea6-2a66-4fc2-8292-7bf68c32b2c1-scripts\") pod \"ovn-controller-s8gsd-config-b7sm2\" (UID: \"cdd3fea6-2a66-4fc2-8292-7bf68c32b2c1\") " pod="openstack/ovn-controller-s8gsd-config-b7sm2" Oct 10 16:49:55 crc kubenswrapper[4799]: I1010 16:49:55.893413 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/4e0d692a-cc78-4807-a2a3-5b39c5729ee6-db-sync-config-data\") pod \"glance-db-sync-v8rpx\" (UID: \"4e0d692a-cc78-4807-a2a3-5b39c5729ee6\") " pod="openstack/glance-db-sync-v8rpx" Oct 10 16:49:55 crc kubenswrapper[4799]: I1010 16:49:55.893945 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4e0d692a-cc78-4807-a2a3-5b39c5729ee6-config-data\") pod \"glance-db-sync-v8rpx\" (UID: \"4e0d692a-cc78-4807-a2a3-5b39c5729ee6\") " pod="openstack/glance-db-sync-v8rpx" Oct 10 16:49:55 crc kubenswrapper[4799]: I1010 16:49:55.906886 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4e0d692a-cc78-4807-a2a3-5b39c5729ee6-combined-ca-bundle\") pod \"glance-db-sync-v8rpx\" (UID: \"4e0d692a-cc78-4807-a2a3-5b39c5729ee6\") " pod="openstack/glance-db-sync-v8rpx" Oct 10 16:49:55 crc kubenswrapper[4799]: I1010 16:49:55.914319 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kkb9w\" (UniqueName: \"kubernetes.io/projected/cdd3fea6-2a66-4fc2-8292-7bf68c32b2c1-kube-api-access-kkb9w\") pod \"ovn-controller-s8gsd-config-b7sm2\" (UID: \"cdd3fea6-2a66-4fc2-8292-7bf68c32b2c1\") " pod="openstack/ovn-controller-s8gsd-config-b7sm2" Oct 10 16:49:55 crc kubenswrapper[4799]: I1010 16:49:55.925985 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hnpwq\" (UniqueName: \"kubernetes.io/projected/4e0d692a-cc78-4807-a2a3-5b39c5729ee6-kube-api-access-hnpwq\") pod \"glance-db-sync-v8rpx\" (UID: \"4e0d692a-cc78-4807-a2a3-5b39c5729ee6\") " pod="openstack/glance-db-sync-v8rpx" Oct 10 16:49:55 crc kubenswrapper[4799]: I1010 16:49:55.946382 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-s8gsd-config-b7sm2" Oct 10 16:49:55 crc kubenswrapper[4799]: I1010 16:49:55.977206 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-v8rpx" Oct 10 16:49:56 crc kubenswrapper[4799]: I1010 16:49:56.258170 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-s8gsd-config-b7sm2"] Oct 10 16:49:56 crc kubenswrapper[4799]: W1010 16:49:56.259301 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podcdd3fea6_2a66_4fc2_8292_7bf68c32b2c1.slice/crio-82b43b118e79724228dbd6249a5bb38b2a523f2f7869aa09ae865202f162ffcd WatchSource:0}: Error finding container 82b43b118e79724228dbd6249a5bb38b2a523f2f7869aa09ae865202f162ffcd: Status 404 returned error can't find the container with id 82b43b118e79724228dbd6249a5bb38b2a523f2f7869aa09ae865202f162ffcd Oct 10 16:49:56 crc kubenswrapper[4799]: I1010 16:49:56.609152 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-sync-v8rpx"] Oct 10 16:49:56 crc kubenswrapper[4799]: W1010 16:49:56.612950 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4e0d692a_cc78_4807_a2a3_5b39c5729ee6.slice/crio-5671bafb0c1422e6dc132f4ba3fdb76dd5e6588466815efdd5839f48f12d34cd WatchSource:0}: Error finding container 5671bafb0c1422e6dc132f4ba3fdb76dd5e6588466815efdd5839f48f12d34cd: Status 404 returned error can't find the container with id 5671bafb0c1422e6dc132f4ba3fdb76dd5e6588466815efdd5839f48f12d34cd Oct 10 16:49:57 crc kubenswrapper[4799]: I1010 16:49:57.072982 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-v8rpx" event={"ID":"4e0d692a-cc78-4807-a2a3-5b39c5729ee6","Type":"ContainerStarted","Data":"5671bafb0c1422e6dc132f4ba3fdb76dd5e6588466815efdd5839f48f12d34cd"} Oct 10 16:49:57 crc kubenswrapper[4799]: I1010 16:49:57.075540 4799 generic.go:334] "Generic (PLEG): container finished" podID="cdd3fea6-2a66-4fc2-8292-7bf68c32b2c1" containerID="8ed2fd28620331dafe1aae7fcdf98ed40484c59fd04f142204fdfc59012c7cc2" exitCode=0 Oct 10 16:49:57 crc kubenswrapper[4799]: I1010 16:49:57.075570 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-s8gsd-config-b7sm2" event={"ID":"cdd3fea6-2a66-4fc2-8292-7bf68c32b2c1","Type":"ContainerDied","Data":"8ed2fd28620331dafe1aae7fcdf98ed40484c59fd04f142204fdfc59012c7cc2"} Oct 10 16:49:57 crc kubenswrapper[4799]: I1010 16:49:57.075587 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-s8gsd-config-b7sm2" event={"ID":"cdd3fea6-2a66-4fc2-8292-7bf68c32b2c1","Type":"ContainerStarted","Data":"82b43b118e79724228dbd6249a5bb38b2a523f2f7869aa09ae865202f162ffcd"} Oct 10 16:49:58 crc kubenswrapper[4799]: I1010 16:49:58.450581 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-s8gsd-config-b7sm2" Oct 10 16:49:58 crc kubenswrapper[4799]: I1010 16:49:58.550349 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/cdd3fea6-2a66-4fc2-8292-7bf68c32b2c1-scripts\") pod \"cdd3fea6-2a66-4fc2-8292-7bf68c32b2c1\" (UID: \"cdd3fea6-2a66-4fc2-8292-7bf68c32b2c1\") " Oct 10 16:49:58 crc kubenswrapper[4799]: I1010 16:49:58.550416 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/cdd3fea6-2a66-4fc2-8292-7bf68c32b2c1-additional-scripts\") pod \"cdd3fea6-2a66-4fc2-8292-7bf68c32b2c1\" (UID: \"cdd3fea6-2a66-4fc2-8292-7bf68c32b2c1\") " Oct 10 16:49:58 crc kubenswrapper[4799]: I1010 16:49:58.550505 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/cdd3fea6-2a66-4fc2-8292-7bf68c32b2c1-var-log-ovn\") pod \"cdd3fea6-2a66-4fc2-8292-7bf68c32b2c1\" (UID: \"cdd3fea6-2a66-4fc2-8292-7bf68c32b2c1\") " Oct 10 16:49:58 crc kubenswrapper[4799]: I1010 16:49:58.550655 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kkb9w\" (UniqueName: \"kubernetes.io/projected/cdd3fea6-2a66-4fc2-8292-7bf68c32b2c1-kube-api-access-kkb9w\") pod \"cdd3fea6-2a66-4fc2-8292-7bf68c32b2c1\" (UID: \"cdd3fea6-2a66-4fc2-8292-7bf68c32b2c1\") " Oct 10 16:49:58 crc kubenswrapper[4799]: I1010 16:49:58.550690 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/cdd3fea6-2a66-4fc2-8292-7bf68c32b2c1-var-run-ovn\") pod \"cdd3fea6-2a66-4fc2-8292-7bf68c32b2c1\" (UID: \"cdd3fea6-2a66-4fc2-8292-7bf68c32b2c1\") " Oct 10 16:49:58 crc kubenswrapper[4799]: I1010 16:49:58.550727 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/cdd3fea6-2a66-4fc2-8292-7bf68c32b2c1-var-run\") pod \"cdd3fea6-2a66-4fc2-8292-7bf68c32b2c1\" (UID: \"cdd3fea6-2a66-4fc2-8292-7bf68c32b2c1\") " Oct 10 16:49:58 crc kubenswrapper[4799]: I1010 16:49:58.550751 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/cdd3fea6-2a66-4fc2-8292-7bf68c32b2c1-var-log-ovn" (OuterVolumeSpecName: "var-log-ovn") pod "cdd3fea6-2a66-4fc2-8292-7bf68c32b2c1" (UID: "cdd3fea6-2a66-4fc2-8292-7bf68c32b2c1"). InnerVolumeSpecName "var-log-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 10 16:49:58 crc kubenswrapper[4799]: I1010 16:49:58.550858 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/cdd3fea6-2a66-4fc2-8292-7bf68c32b2c1-var-run-ovn" (OuterVolumeSpecName: "var-run-ovn") pod "cdd3fea6-2a66-4fc2-8292-7bf68c32b2c1" (UID: "cdd3fea6-2a66-4fc2-8292-7bf68c32b2c1"). InnerVolumeSpecName "var-run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 10 16:49:58 crc kubenswrapper[4799]: I1010 16:49:58.551000 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/cdd3fea6-2a66-4fc2-8292-7bf68c32b2c1-var-run" (OuterVolumeSpecName: "var-run") pod "cdd3fea6-2a66-4fc2-8292-7bf68c32b2c1" (UID: "cdd3fea6-2a66-4fc2-8292-7bf68c32b2c1"). InnerVolumeSpecName "var-run". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 10 16:49:58 crc kubenswrapper[4799]: I1010 16:49:58.551548 4799 reconciler_common.go:293] "Volume detached for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/cdd3fea6-2a66-4fc2-8292-7bf68c32b2c1-var-log-ovn\") on node \"crc\" DevicePath \"\"" Oct 10 16:49:58 crc kubenswrapper[4799]: I1010 16:49:58.551584 4799 reconciler_common.go:293] "Volume detached for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/cdd3fea6-2a66-4fc2-8292-7bf68c32b2c1-var-run-ovn\") on node \"crc\" DevicePath \"\"" Oct 10 16:49:58 crc kubenswrapper[4799]: I1010 16:49:58.551603 4799 reconciler_common.go:293] "Volume detached for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/cdd3fea6-2a66-4fc2-8292-7bf68c32b2c1-var-run\") on node \"crc\" DevicePath \"\"" Oct 10 16:49:58 crc kubenswrapper[4799]: I1010 16:49:58.551984 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/cdd3fea6-2a66-4fc2-8292-7bf68c32b2c1-scripts" (OuterVolumeSpecName: "scripts") pod "cdd3fea6-2a66-4fc2-8292-7bf68c32b2c1" (UID: "cdd3fea6-2a66-4fc2-8292-7bf68c32b2c1"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 16:49:58 crc kubenswrapper[4799]: I1010 16:49:58.552435 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/cdd3fea6-2a66-4fc2-8292-7bf68c32b2c1-additional-scripts" (OuterVolumeSpecName: "additional-scripts") pod "cdd3fea6-2a66-4fc2-8292-7bf68c32b2c1" (UID: "cdd3fea6-2a66-4fc2-8292-7bf68c32b2c1"). InnerVolumeSpecName "additional-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 16:49:58 crc kubenswrapper[4799]: I1010 16:49:58.570640 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cdd3fea6-2a66-4fc2-8292-7bf68c32b2c1-kube-api-access-kkb9w" (OuterVolumeSpecName: "kube-api-access-kkb9w") pod "cdd3fea6-2a66-4fc2-8292-7bf68c32b2c1" (UID: "cdd3fea6-2a66-4fc2-8292-7bf68c32b2c1"). InnerVolumeSpecName "kube-api-access-kkb9w". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 16:49:58 crc kubenswrapper[4799]: I1010 16:49:58.653126 4799 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/cdd3fea6-2a66-4fc2-8292-7bf68c32b2c1-scripts\") on node \"crc\" DevicePath \"\"" Oct 10 16:49:58 crc kubenswrapper[4799]: I1010 16:49:58.653162 4799 reconciler_common.go:293] "Volume detached for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/cdd3fea6-2a66-4fc2-8292-7bf68c32b2c1-additional-scripts\") on node \"crc\" DevicePath \"\"" Oct 10 16:49:58 crc kubenswrapper[4799]: I1010 16:49:58.653190 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kkb9w\" (UniqueName: \"kubernetes.io/projected/cdd3fea6-2a66-4fc2-8292-7bf68c32b2c1-kube-api-access-kkb9w\") on node \"crc\" DevicePath \"\"" Oct 10 16:49:59 crc kubenswrapper[4799]: I1010 16:49:59.094910 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-s8gsd-config-b7sm2" event={"ID":"cdd3fea6-2a66-4fc2-8292-7bf68c32b2c1","Type":"ContainerDied","Data":"82b43b118e79724228dbd6249a5bb38b2a523f2f7869aa09ae865202f162ffcd"} Oct 10 16:49:59 crc kubenswrapper[4799]: I1010 16:49:59.094999 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-s8gsd-config-b7sm2" Oct 10 16:49:59 crc kubenswrapper[4799]: I1010 16:49:59.094997 4799 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="82b43b118e79724228dbd6249a5bb38b2a523f2f7869aa09ae865202f162ffcd" Oct 10 16:49:59 crc kubenswrapper[4799]: I1010 16:49:59.575878 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-s8gsd-config-b7sm2"] Oct 10 16:49:59 crc kubenswrapper[4799]: I1010 16:49:59.590640 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-controller-s8gsd-config-b7sm2"] Oct 10 16:49:59 crc kubenswrapper[4799]: I1010 16:49:59.656056 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-s8gsd-config-qcrrx"] Oct 10 16:49:59 crc kubenswrapper[4799]: E1010 16:49:59.656454 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cdd3fea6-2a66-4fc2-8292-7bf68c32b2c1" containerName="ovn-config" Oct 10 16:49:59 crc kubenswrapper[4799]: I1010 16:49:59.656478 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="cdd3fea6-2a66-4fc2-8292-7bf68c32b2c1" containerName="ovn-config" Oct 10 16:49:59 crc kubenswrapper[4799]: I1010 16:49:59.656675 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="cdd3fea6-2a66-4fc2-8292-7bf68c32b2c1" containerName="ovn-config" Oct 10 16:49:59 crc kubenswrapper[4799]: I1010 16:49:59.660035 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-s8gsd-config-qcrrx" Oct 10 16:49:59 crc kubenswrapper[4799]: I1010 16:49:59.662813 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-extra-scripts" Oct 10 16:49:59 crc kubenswrapper[4799]: I1010 16:49:59.666949 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-s8gsd-config-qcrrx"] Oct 10 16:49:59 crc kubenswrapper[4799]: I1010 16:49:59.741409 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-cab5-account-create-xs7tg"] Oct 10 16:49:59 crc kubenswrapper[4799]: I1010 16:49:59.742806 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cab5-account-create-xs7tg" Oct 10 16:49:59 crc kubenswrapper[4799]: I1010 16:49:59.744922 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-db-secret" Oct 10 16:49:59 crc kubenswrapper[4799]: I1010 16:49:59.747684 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-cab5-account-create-xs7tg"] Oct 10 16:49:59 crc kubenswrapper[4799]: I1010 16:49:59.776265 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/4bf47326-cb7c-46c7-812f-2c3bc3cef12d-scripts\") pod \"ovn-controller-s8gsd-config-qcrrx\" (UID: \"4bf47326-cb7c-46c7-812f-2c3bc3cef12d\") " pod="openstack/ovn-controller-s8gsd-config-qcrrx" Oct 10 16:49:59 crc kubenswrapper[4799]: I1010 16:49:59.776375 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8zzcq\" (UniqueName: \"kubernetes.io/projected/4bf47326-cb7c-46c7-812f-2c3bc3cef12d-kube-api-access-8zzcq\") pod \"ovn-controller-s8gsd-config-qcrrx\" (UID: \"4bf47326-cb7c-46c7-812f-2c3bc3cef12d\") " pod="openstack/ovn-controller-s8gsd-config-qcrrx" Oct 10 16:49:59 crc kubenswrapper[4799]: I1010 16:49:59.776429 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/4bf47326-cb7c-46c7-812f-2c3bc3cef12d-var-run\") pod \"ovn-controller-s8gsd-config-qcrrx\" (UID: \"4bf47326-cb7c-46c7-812f-2c3bc3cef12d\") " pod="openstack/ovn-controller-s8gsd-config-qcrrx" Oct 10 16:49:59 crc kubenswrapper[4799]: I1010 16:49:59.776479 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/4bf47326-cb7c-46c7-812f-2c3bc3cef12d-var-log-ovn\") pod \"ovn-controller-s8gsd-config-qcrrx\" (UID: \"4bf47326-cb7c-46c7-812f-2c3bc3cef12d\") " pod="openstack/ovn-controller-s8gsd-config-qcrrx" Oct 10 16:49:59 crc kubenswrapper[4799]: I1010 16:49:59.776511 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/4bf47326-cb7c-46c7-812f-2c3bc3cef12d-var-run-ovn\") pod \"ovn-controller-s8gsd-config-qcrrx\" (UID: \"4bf47326-cb7c-46c7-812f-2c3bc3cef12d\") " pod="openstack/ovn-controller-s8gsd-config-qcrrx" Oct 10 16:49:59 crc kubenswrapper[4799]: I1010 16:49:59.776603 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/4bf47326-cb7c-46c7-812f-2c3bc3cef12d-additional-scripts\") pod \"ovn-controller-s8gsd-config-qcrrx\" (UID: \"4bf47326-cb7c-46c7-812f-2c3bc3cef12d\") " pod="openstack/ovn-controller-s8gsd-config-qcrrx" Oct 10 16:49:59 crc kubenswrapper[4799]: I1010 16:49:59.878973 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8zzcq\" (UniqueName: \"kubernetes.io/projected/4bf47326-cb7c-46c7-812f-2c3bc3cef12d-kube-api-access-8zzcq\") pod \"ovn-controller-s8gsd-config-qcrrx\" (UID: \"4bf47326-cb7c-46c7-812f-2c3bc3cef12d\") " pod="openstack/ovn-controller-s8gsd-config-qcrrx" Oct 10 16:49:59 crc kubenswrapper[4799]: I1010 16:49:59.879058 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/4bf47326-cb7c-46c7-812f-2c3bc3cef12d-var-run\") pod \"ovn-controller-s8gsd-config-qcrrx\" (UID: \"4bf47326-cb7c-46c7-812f-2c3bc3cef12d\") " pod="openstack/ovn-controller-s8gsd-config-qcrrx" Oct 10 16:49:59 crc kubenswrapper[4799]: I1010 16:49:59.879377 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/4bf47326-cb7c-46c7-812f-2c3bc3cef12d-var-run\") pod \"ovn-controller-s8gsd-config-qcrrx\" (UID: \"4bf47326-cb7c-46c7-812f-2c3bc3cef12d\") " pod="openstack/ovn-controller-s8gsd-config-qcrrx" Oct 10 16:49:59 crc kubenswrapper[4799]: I1010 16:49:59.879626 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/4bf47326-cb7c-46c7-812f-2c3bc3cef12d-var-log-ovn\") pod \"ovn-controller-s8gsd-config-qcrrx\" (UID: \"4bf47326-cb7c-46c7-812f-2c3bc3cef12d\") " pod="openstack/ovn-controller-s8gsd-config-qcrrx" Oct 10 16:49:59 crc kubenswrapper[4799]: I1010 16:49:59.879856 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/4bf47326-cb7c-46c7-812f-2c3bc3cef12d-var-log-ovn\") pod \"ovn-controller-s8gsd-config-qcrrx\" (UID: \"4bf47326-cb7c-46c7-812f-2c3bc3cef12d\") " pod="openstack/ovn-controller-s8gsd-config-qcrrx" Oct 10 16:49:59 crc kubenswrapper[4799]: I1010 16:49:59.880030 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dlkf9\" (UniqueName: \"kubernetes.io/projected/730fe650-110f-4bae-8f7d-6b3d6755b4f8-kube-api-access-dlkf9\") pod \"keystone-cab5-account-create-xs7tg\" (UID: \"730fe650-110f-4bae-8f7d-6b3d6755b4f8\") " pod="openstack/keystone-cab5-account-create-xs7tg" Oct 10 16:49:59 crc kubenswrapper[4799]: I1010 16:49:59.880204 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/4bf47326-cb7c-46c7-812f-2c3bc3cef12d-var-run-ovn\") pod \"ovn-controller-s8gsd-config-qcrrx\" (UID: \"4bf47326-cb7c-46c7-812f-2c3bc3cef12d\") " pod="openstack/ovn-controller-s8gsd-config-qcrrx" Oct 10 16:49:59 crc kubenswrapper[4799]: I1010 16:49:59.880337 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/4bf47326-cb7c-46c7-812f-2c3bc3cef12d-additional-scripts\") pod \"ovn-controller-s8gsd-config-qcrrx\" (UID: \"4bf47326-cb7c-46c7-812f-2c3bc3cef12d\") " pod="openstack/ovn-controller-s8gsd-config-qcrrx" Oct 10 16:49:59 crc kubenswrapper[4799]: I1010 16:49:59.880368 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/4bf47326-cb7c-46c7-812f-2c3bc3cef12d-var-run-ovn\") pod \"ovn-controller-s8gsd-config-qcrrx\" (UID: \"4bf47326-cb7c-46c7-812f-2c3bc3cef12d\") " pod="openstack/ovn-controller-s8gsd-config-qcrrx" Oct 10 16:49:59 crc kubenswrapper[4799]: I1010 16:49:59.880554 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/4bf47326-cb7c-46c7-812f-2c3bc3cef12d-scripts\") pod \"ovn-controller-s8gsd-config-qcrrx\" (UID: \"4bf47326-cb7c-46c7-812f-2c3bc3cef12d\") " pod="openstack/ovn-controller-s8gsd-config-qcrrx" Oct 10 16:49:59 crc kubenswrapper[4799]: I1010 16:49:59.882312 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/4bf47326-cb7c-46c7-812f-2c3bc3cef12d-additional-scripts\") pod \"ovn-controller-s8gsd-config-qcrrx\" (UID: \"4bf47326-cb7c-46c7-812f-2c3bc3cef12d\") " pod="openstack/ovn-controller-s8gsd-config-qcrrx" Oct 10 16:49:59 crc kubenswrapper[4799]: I1010 16:49:59.885882 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/4bf47326-cb7c-46c7-812f-2c3bc3cef12d-scripts\") pod \"ovn-controller-s8gsd-config-qcrrx\" (UID: \"4bf47326-cb7c-46c7-812f-2c3bc3cef12d\") " pod="openstack/ovn-controller-s8gsd-config-qcrrx" Oct 10 16:49:59 crc kubenswrapper[4799]: I1010 16:49:59.905656 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8zzcq\" (UniqueName: \"kubernetes.io/projected/4bf47326-cb7c-46c7-812f-2c3bc3cef12d-kube-api-access-8zzcq\") pod \"ovn-controller-s8gsd-config-qcrrx\" (UID: \"4bf47326-cb7c-46c7-812f-2c3bc3cef12d\") " pod="openstack/ovn-controller-s8gsd-config-qcrrx" Oct 10 16:49:59 crc kubenswrapper[4799]: I1010 16:49:59.982604 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dlkf9\" (UniqueName: \"kubernetes.io/projected/730fe650-110f-4bae-8f7d-6b3d6755b4f8-kube-api-access-dlkf9\") pod \"keystone-cab5-account-create-xs7tg\" (UID: \"730fe650-110f-4bae-8f7d-6b3d6755b4f8\") " pod="openstack/keystone-cab5-account-create-xs7tg" Oct 10 16:49:59 crc kubenswrapper[4799]: I1010 16:49:59.982962 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-s8gsd-config-qcrrx" Oct 10 16:50:00 crc kubenswrapper[4799]: I1010 16:50:00.001210 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dlkf9\" (UniqueName: \"kubernetes.io/projected/730fe650-110f-4bae-8f7d-6b3d6755b4f8-kube-api-access-dlkf9\") pod \"keystone-cab5-account-create-xs7tg\" (UID: \"730fe650-110f-4bae-8f7d-6b3d6755b4f8\") " pod="openstack/keystone-cab5-account-create-xs7tg" Oct 10 16:50:00 crc kubenswrapper[4799]: I1010 16:50:00.058601 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cab5-account-create-xs7tg" Oct 10 16:50:00 crc kubenswrapper[4799]: I1010 16:50:00.376804 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-s8gsd" Oct 10 16:50:01 crc kubenswrapper[4799]: I1010 16:50:01.283973 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-s8gsd-config-qcrrx"] Oct 10 16:50:01 crc kubenswrapper[4799]: W1010 16:50:01.297675 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod730fe650_110f_4bae_8f7d_6b3d6755b4f8.slice/crio-ed253559d1ff896d6d8de99fad781a6566f15ca1d6ed69cd74706d6722ebadbf WatchSource:0}: Error finding container ed253559d1ff896d6d8de99fad781a6566f15ca1d6ed69cd74706d6722ebadbf: Status 404 returned error can't find the container with id ed253559d1ff896d6d8de99fad781a6566f15ca1d6ed69cd74706d6722ebadbf Oct 10 16:50:01 crc kubenswrapper[4799]: I1010 16:50:01.309238 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-cab5-account-create-xs7tg"] Oct 10 16:50:01 crc kubenswrapper[4799]: I1010 16:50:01.416325 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cdd3fea6-2a66-4fc2-8292-7bf68c32b2c1" path="/var/lib/kubelet/pods/cdd3fea6-2a66-4fc2-8292-7bf68c32b2c1/volumes" Oct 10 16:50:02 crc kubenswrapper[4799]: I1010 16:50:02.133126 4799 generic.go:334] "Generic (PLEG): container finished" podID="4bf47326-cb7c-46c7-812f-2c3bc3cef12d" containerID="a771d17cd9b8383520937bd5d1de25a135fd8d690577e07820b96a54f49d1faa" exitCode=0 Oct 10 16:50:02 crc kubenswrapper[4799]: I1010 16:50:02.133195 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-s8gsd-config-qcrrx" event={"ID":"4bf47326-cb7c-46c7-812f-2c3bc3cef12d","Type":"ContainerDied","Data":"a771d17cd9b8383520937bd5d1de25a135fd8d690577e07820b96a54f49d1faa"} Oct 10 16:50:02 crc kubenswrapper[4799]: I1010 16:50:02.133486 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-s8gsd-config-qcrrx" event={"ID":"4bf47326-cb7c-46c7-812f-2c3bc3cef12d","Type":"ContainerStarted","Data":"cce4c811a2a6a29bdd2f138a427cd80030825e13bd6115743f0c42523b34ac34"} Oct 10 16:50:02 crc kubenswrapper[4799]: I1010 16:50:02.136070 4799 generic.go:334] "Generic (PLEG): container finished" podID="730fe650-110f-4bae-8f7d-6b3d6755b4f8" containerID="ccf69d61dbdbccfd51b38731cf2fa1a5da19e5eec374fe7dcb94d8b10259b899" exitCode=0 Oct 10 16:50:02 crc kubenswrapper[4799]: I1010 16:50:02.136123 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cab5-account-create-xs7tg" event={"ID":"730fe650-110f-4bae-8f7d-6b3d6755b4f8","Type":"ContainerDied","Data":"ccf69d61dbdbccfd51b38731cf2fa1a5da19e5eec374fe7dcb94d8b10259b899"} Oct 10 16:50:02 crc kubenswrapper[4799]: I1010 16:50:02.136152 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cab5-account-create-xs7tg" event={"ID":"730fe650-110f-4bae-8f7d-6b3d6755b4f8","Type":"ContainerStarted","Data":"ed253559d1ff896d6d8de99fad781a6566f15ca1d6ed69cd74706d6722ebadbf"} Oct 10 16:50:04 crc kubenswrapper[4799]: I1010 16:50:04.779589 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/68ea0968-070a-41d4-b023-31557446c4dc-etc-swift\") pod \"swift-storage-0\" (UID: \"68ea0968-070a-41d4-b023-31557446c4dc\") " pod="openstack/swift-storage-0" Oct 10 16:50:04 crc kubenswrapper[4799]: I1010 16:50:04.788135 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/68ea0968-070a-41d4-b023-31557446c4dc-etc-swift\") pod \"swift-storage-0\" (UID: \"68ea0968-070a-41d4-b023-31557446c4dc\") " pod="openstack/swift-storage-0" Oct 10 16:50:04 crc kubenswrapper[4799]: I1010 16:50:04.933716 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-storage-0" Oct 10 16:50:06 crc kubenswrapper[4799]: I1010 16:50:06.179986 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-server-0" Oct 10 16:50:06 crc kubenswrapper[4799]: I1010 16:50:06.529490 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-db-create-8k2k8"] Oct 10 16:50:06 crc kubenswrapper[4799]: I1010 16:50:06.530947 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-8k2k8" Oct 10 16:50:06 crc kubenswrapper[4799]: I1010 16:50:06.540130 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-create-8k2k8"] Oct 10 16:50:06 crc kubenswrapper[4799]: I1010 16:50:06.614069 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ghnfm\" (UniqueName: \"kubernetes.io/projected/a78f157f-ef19-4bc2-abe9-9eafd833b89f-kube-api-access-ghnfm\") pod \"cinder-db-create-8k2k8\" (UID: \"a78f157f-ef19-4bc2-abe9-9eafd833b89f\") " pod="openstack/cinder-db-create-8k2k8" Oct 10 16:50:06 crc kubenswrapper[4799]: I1010 16:50:06.627406 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-db-create-nvghl"] Oct 10 16:50:06 crc kubenswrapper[4799]: I1010 16:50:06.628380 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-nvghl" Oct 10 16:50:06 crc kubenswrapper[4799]: I1010 16:50:06.640155 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-create-nvghl"] Oct 10 16:50:06 crc kubenswrapper[4799]: I1010 16:50:06.716569 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-876jm\" (UniqueName: \"kubernetes.io/projected/4786fb15-fe63-491e-aa13-1e3cbd1f7eef-kube-api-access-876jm\") pod \"barbican-db-create-nvghl\" (UID: \"4786fb15-fe63-491e-aa13-1e3cbd1f7eef\") " pod="openstack/barbican-db-create-nvghl" Oct 10 16:50:06 crc kubenswrapper[4799]: I1010 16:50:06.717107 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ghnfm\" (UniqueName: \"kubernetes.io/projected/a78f157f-ef19-4bc2-abe9-9eafd833b89f-kube-api-access-ghnfm\") pod \"cinder-db-create-8k2k8\" (UID: \"a78f157f-ef19-4bc2-abe9-9eafd833b89f\") " pod="openstack/cinder-db-create-8k2k8" Oct 10 16:50:06 crc kubenswrapper[4799]: I1010 16:50:06.734106 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ghnfm\" (UniqueName: \"kubernetes.io/projected/a78f157f-ef19-4bc2-abe9-9eafd833b89f-kube-api-access-ghnfm\") pod \"cinder-db-create-8k2k8\" (UID: \"a78f157f-ef19-4bc2-abe9-9eafd833b89f\") " pod="openstack/cinder-db-create-8k2k8" Oct 10 16:50:06 crc kubenswrapper[4799]: I1010 16:50:06.809171 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-cell1-server-0" Oct 10 16:50:06 crc kubenswrapper[4799]: I1010 16:50:06.818888 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-876jm\" (UniqueName: \"kubernetes.io/projected/4786fb15-fe63-491e-aa13-1e3cbd1f7eef-kube-api-access-876jm\") pod \"barbican-db-create-nvghl\" (UID: \"4786fb15-fe63-491e-aa13-1e3cbd1f7eef\") " pod="openstack/barbican-db-create-nvghl" Oct 10 16:50:06 crc kubenswrapper[4799]: I1010 16:50:06.837912 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-876jm\" (UniqueName: \"kubernetes.io/projected/4786fb15-fe63-491e-aa13-1e3cbd1f7eef-kube-api-access-876jm\") pod \"barbican-db-create-nvghl\" (UID: \"4786fb15-fe63-491e-aa13-1e3cbd1f7eef\") " pod="openstack/barbican-db-create-nvghl" Oct 10 16:50:06 crc kubenswrapper[4799]: I1010 16:50:06.847059 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-db-create-mqm27"] Oct 10 16:50:06 crc kubenswrapper[4799]: I1010 16:50:06.850838 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-mqm27" Oct 10 16:50:06 crc kubenswrapper[4799]: I1010 16:50:06.853122 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-8k2k8" Oct 10 16:50:06 crc kubenswrapper[4799]: I1010 16:50:06.888069 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-create-mqm27"] Oct 10 16:50:06 crc kubenswrapper[4799]: I1010 16:50:06.921163 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5rrj5\" (UniqueName: \"kubernetes.io/projected/688b5025-24db-4d05-a046-d26deb669312-kube-api-access-5rrj5\") pod \"neutron-db-create-mqm27\" (UID: \"688b5025-24db-4d05-a046-d26deb669312\") " pod="openstack/neutron-db-create-mqm27" Oct 10 16:50:06 crc kubenswrapper[4799]: I1010 16:50:06.961174 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-nvghl" Oct 10 16:50:07 crc kubenswrapper[4799]: I1010 16:50:07.026127 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5rrj5\" (UniqueName: \"kubernetes.io/projected/688b5025-24db-4d05-a046-d26deb669312-kube-api-access-5rrj5\") pod \"neutron-db-create-mqm27\" (UID: \"688b5025-24db-4d05-a046-d26deb669312\") " pod="openstack/neutron-db-create-mqm27" Oct 10 16:50:07 crc kubenswrapper[4799]: I1010 16:50:07.060552 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5rrj5\" (UniqueName: \"kubernetes.io/projected/688b5025-24db-4d05-a046-d26deb669312-kube-api-access-5rrj5\") pod \"neutron-db-create-mqm27\" (UID: \"688b5025-24db-4d05-a046-d26deb669312\") " pod="openstack/neutron-db-create-mqm27" Oct 10 16:50:07 crc kubenswrapper[4799]: I1010 16:50:07.196417 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-mqm27" Oct 10 16:50:08 crc kubenswrapper[4799]: I1010 16:50:08.449772 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cab5-account-create-xs7tg" Oct 10 16:50:08 crc kubenswrapper[4799]: I1010 16:50:08.565890 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dlkf9\" (UniqueName: \"kubernetes.io/projected/730fe650-110f-4bae-8f7d-6b3d6755b4f8-kube-api-access-dlkf9\") pod \"730fe650-110f-4bae-8f7d-6b3d6755b4f8\" (UID: \"730fe650-110f-4bae-8f7d-6b3d6755b4f8\") " Oct 10 16:50:08 crc kubenswrapper[4799]: I1010 16:50:08.571428 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/730fe650-110f-4bae-8f7d-6b3d6755b4f8-kube-api-access-dlkf9" (OuterVolumeSpecName: "kube-api-access-dlkf9") pod "730fe650-110f-4bae-8f7d-6b3d6755b4f8" (UID: "730fe650-110f-4bae-8f7d-6b3d6755b4f8"). InnerVolumeSpecName "kube-api-access-dlkf9". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 16:50:08 crc kubenswrapper[4799]: I1010 16:50:08.578689 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-s8gsd-config-qcrrx" Oct 10 16:50:08 crc kubenswrapper[4799]: I1010 16:50:08.668381 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/4bf47326-cb7c-46c7-812f-2c3bc3cef12d-var-run-ovn\") pod \"4bf47326-cb7c-46c7-812f-2c3bc3cef12d\" (UID: \"4bf47326-cb7c-46c7-812f-2c3bc3cef12d\") " Oct 10 16:50:08 crc kubenswrapper[4799]: I1010 16:50:08.668468 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8zzcq\" (UniqueName: \"kubernetes.io/projected/4bf47326-cb7c-46c7-812f-2c3bc3cef12d-kube-api-access-8zzcq\") pod \"4bf47326-cb7c-46c7-812f-2c3bc3cef12d\" (UID: \"4bf47326-cb7c-46c7-812f-2c3bc3cef12d\") " Oct 10 16:50:08 crc kubenswrapper[4799]: I1010 16:50:08.668564 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/4bf47326-cb7c-46c7-812f-2c3bc3cef12d-additional-scripts\") pod \"4bf47326-cb7c-46c7-812f-2c3bc3cef12d\" (UID: \"4bf47326-cb7c-46c7-812f-2c3bc3cef12d\") " Oct 10 16:50:08 crc kubenswrapper[4799]: I1010 16:50:08.668583 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/4bf47326-cb7c-46c7-812f-2c3bc3cef12d-var-run\") pod \"4bf47326-cb7c-46c7-812f-2c3bc3cef12d\" (UID: \"4bf47326-cb7c-46c7-812f-2c3bc3cef12d\") " Oct 10 16:50:08 crc kubenswrapper[4799]: I1010 16:50:08.668636 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/4bf47326-cb7c-46c7-812f-2c3bc3cef12d-var-log-ovn\") pod \"4bf47326-cb7c-46c7-812f-2c3bc3cef12d\" (UID: \"4bf47326-cb7c-46c7-812f-2c3bc3cef12d\") " Oct 10 16:50:08 crc kubenswrapper[4799]: I1010 16:50:08.668673 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/4bf47326-cb7c-46c7-812f-2c3bc3cef12d-scripts\") pod \"4bf47326-cb7c-46c7-812f-2c3bc3cef12d\" (UID: \"4bf47326-cb7c-46c7-812f-2c3bc3cef12d\") " Oct 10 16:50:08 crc kubenswrapper[4799]: I1010 16:50:08.668987 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/4bf47326-cb7c-46c7-812f-2c3bc3cef12d-var-run" (OuterVolumeSpecName: "var-run") pod "4bf47326-cb7c-46c7-812f-2c3bc3cef12d" (UID: "4bf47326-cb7c-46c7-812f-2c3bc3cef12d"). InnerVolumeSpecName "var-run". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 10 16:50:08 crc kubenswrapper[4799]: I1010 16:50:08.669027 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/4bf47326-cb7c-46c7-812f-2c3bc3cef12d-var-run-ovn" (OuterVolumeSpecName: "var-run-ovn") pod "4bf47326-cb7c-46c7-812f-2c3bc3cef12d" (UID: "4bf47326-cb7c-46c7-812f-2c3bc3cef12d"). InnerVolumeSpecName "var-run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 10 16:50:08 crc kubenswrapper[4799]: I1010 16:50:08.668994 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/4bf47326-cb7c-46c7-812f-2c3bc3cef12d-var-log-ovn" (OuterVolumeSpecName: "var-log-ovn") pod "4bf47326-cb7c-46c7-812f-2c3bc3cef12d" (UID: "4bf47326-cb7c-46c7-812f-2c3bc3cef12d"). InnerVolumeSpecName "var-log-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 10 16:50:08 crc kubenswrapper[4799]: I1010 16:50:08.669076 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dlkf9\" (UniqueName: \"kubernetes.io/projected/730fe650-110f-4bae-8f7d-6b3d6755b4f8-kube-api-access-dlkf9\") on node \"crc\" DevicePath \"\"" Oct 10 16:50:08 crc kubenswrapper[4799]: I1010 16:50:08.669626 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bf47326-cb7c-46c7-812f-2c3bc3cef12d-additional-scripts" (OuterVolumeSpecName: "additional-scripts") pod "4bf47326-cb7c-46c7-812f-2c3bc3cef12d" (UID: "4bf47326-cb7c-46c7-812f-2c3bc3cef12d"). InnerVolumeSpecName "additional-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 16:50:08 crc kubenswrapper[4799]: I1010 16:50:08.670117 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bf47326-cb7c-46c7-812f-2c3bc3cef12d-scripts" (OuterVolumeSpecName: "scripts") pod "4bf47326-cb7c-46c7-812f-2c3bc3cef12d" (UID: "4bf47326-cb7c-46c7-812f-2c3bc3cef12d"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 16:50:08 crc kubenswrapper[4799]: I1010 16:50:08.672826 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4bf47326-cb7c-46c7-812f-2c3bc3cef12d-kube-api-access-8zzcq" (OuterVolumeSpecName: "kube-api-access-8zzcq") pod "4bf47326-cb7c-46c7-812f-2c3bc3cef12d" (UID: "4bf47326-cb7c-46c7-812f-2c3bc3cef12d"). InnerVolumeSpecName "kube-api-access-8zzcq". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 16:50:08 crc kubenswrapper[4799]: I1010 16:50:08.771665 4799 reconciler_common.go:293] "Volume detached for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/4bf47326-cb7c-46c7-812f-2c3bc3cef12d-additional-scripts\") on node \"crc\" DevicePath \"\"" Oct 10 16:50:08 crc kubenswrapper[4799]: I1010 16:50:08.771697 4799 reconciler_common.go:293] "Volume detached for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/4bf47326-cb7c-46c7-812f-2c3bc3cef12d-var-run\") on node \"crc\" DevicePath \"\"" Oct 10 16:50:08 crc kubenswrapper[4799]: I1010 16:50:08.771707 4799 reconciler_common.go:293] "Volume detached for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/4bf47326-cb7c-46c7-812f-2c3bc3cef12d-var-log-ovn\") on node \"crc\" DevicePath \"\"" Oct 10 16:50:08 crc kubenswrapper[4799]: I1010 16:50:08.771718 4799 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/4bf47326-cb7c-46c7-812f-2c3bc3cef12d-scripts\") on node \"crc\" DevicePath \"\"" Oct 10 16:50:08 crc kubenswrapper[4799]: I1010 16:50:08.771726 4799 reconciler_common.go:293] "Volume detached for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/4bf47326-cb7c-46c7-812f-2c3bc3cef12d-var-run-ovn\") on node \"crc\" DevicePath \"\"" Oct 10 16:50:08 crc kubenswrapper[4799]: I1010 16:50:08.771735 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8zzcq\" (UniqueName: \"kubernetes.io/projected/4bf47326-cb7c-46c7-812f-2c3bc3cef12d-kube-api-access-8zzcq\") on node \"crc\" DevicePath \"\"" Oct 10 16:50:08 crc kubenswrapper[4799]: I1010 16:50:08.826830 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-create-nvghl"] Oct 10 16:50:08 crc kubenswrapper[4799]: W1010 16:50:08.844153 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4786fb15_fe63_491e_aa13_1e3cbd1f7eef.slice/crio-62ac4bc855a857fd9bc7fa161bb86b0da0d117c444b0cd7ac27f6bcfb0adee61 WatchSource:0}: Error finding container 62ac4bc855a857fd9bc7fa161bb86b0da0d117c444b0cd7ac27f6bcfb0adee61: Status 404 returned error can't find the container with id 62ac4bc855a857fd9bc7fa161bb86b0da0d117c444b0cd7ac27f6bcfb0adee61 Oct 10 16:50:08 crc kubenswrapper[4799]: I1010 16:50:08.990849 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-create-mqm27"] Oct 10 16:50:08 crc kubenswrapper[4799]: W1010 16:50:08.995002 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod688b5025_24db_4d05_a046_d26deb669312.slice/crio-773a5670efdfead139c1f30ab79c92111be57d38cf8c36ab591ee9403a4b51d2 WatchSource:0}: Error finding container 773a5670efdfead139c1f30ab79c92111be57d38cf8c36ab591ee9403a4b51d2: Status 404 returned error can't find the container with id 773a5670efdfead139c1f30ab79c92111be57d38cf8c36ab591ee9403a4b51d2 Oct 10 16:50:09 crc kubenswrapper[4799]: I1010 16:50:09.003572 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-create-8k2k8"] Oct 10 16:50:09 crc kubenswrapper[4799]: W1010 16:50:09.021458 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda78f157f_ef19_4bc2_abe9_9eafd833b89f.slice/crio-8a84c422261f8ef1e162a5900a42b5462e92583b13c0f2c21e0e3aaafc4f6412 WatchSource:0}: Error finding container 8a84c422261f8ef1e162a5900a42b5462e92583b13c0f2c21e0e3aaafc4f6412: Status 404 returned error can't find the container with id 8a84c422261f8ef1e162a5900a42b5462e92583b13c0f2c21e0e3aaafc4f6412 Oct 10 16:50:09 crc kubenswrapper[4799]: I1010 16:50:09.202627 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-storage-0"] Oct 10 16:50:09 crc kubenswrapper[4799]: I1010 16:50:09.213822 4799 generic.go:334] "Generic (PLEG): container finished" podID="4786fb15-fe63-491e-aa13-1e3cbd1f7eef" containerID="98ec02c95b376af3e09c59e35caeec0f9973d9de6538cd288bc37643ef1c1cff" exitCode=0 Oct 10 16:50:09 crc kubenswrapper[4799]: I1010 16:50:09.213872 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-nvghl" event={"ID":"4786fb15-fe63-491e-aa13-1e3cbd1f7eef","Type":"ContainerDied","Data":"98ec02c95b376af3e09c59e35caeec0f9973d9de6538cd288bc37643ef1c1cff"} Oct 10 16:50:09 crc kubenswrapper[4799]: I1010 16:50:09.213897 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-nvghl" event={"ID":"4786fb15-fe63-491e-aa13-1e3cbd1f7eef","Type":"ContainerStarted","Data":"62ac4bc855a857fd9bc7fa161bb86b0da0d117c444b0cd7ac27f6bcfb0adee61"} Oct 10 16:50:09 crc kubenswrapper[4799]: I1010 16:50:09.215415 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-v8rpx" event={"ID":"4e0d692a-cc78-4807-a2a3-5b39c5729ee6","Type":"ContainerStarted","Data":"30ebb4c5a6f4b490de87b23dfbca83edb669e1b06479d40a92b5b82a0cd80d33"} Oct 10 16:50:09 crc kubenswrapper[4799]: I1010 16:50:09.217404 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cab5-account-create-xs7tg" event={"ID":"730fe650-110f-4bae-8f7d-6b3d6755b4f8","Type":"ContainerDied","Data":"ed253559d1ff896d6d8de99fad781a6566f15ca1d6ed69cd74706d6722ebadbf"} Oct 10 16:50:09 crc kubenswrapper[4799]: I1010 16:50:09.217423 4799 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ed253559d1ff896d6d8de99fad781a6566f15ca1d6ed69cd74706d6722ebadbf" Oct 10 16:50:09 crc kubenswrapper[4799]: I1010 16:50:09.217457 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cab5-account-create-xs7tg" Oct 10 16:50:09 crc kubenswrapper[4799]: W1010 16:50:09.222824 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod68ea0968_070a_41d4_b023_31557446c4dc.slice/crio-9855e476445a0ac9b17564ca4265acfe54663f1f39acf56d013baebb701fa764 WatchSource:0}: Error finding container 9855e476445a0ac9b17564ca4265acfe54663f1f39acf56d013baebb701fa764: Status 404 returned error can't find the container with id 9855e476445a0ac9b17564ca4265acfe54663f1f39acf56d013baebb701fa764 Oct 10 16:50:09 crc kubenswrapper[4799]: I1010 16:50:09.223657 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-8k2k8" event={"ID":"a78f157f-ef19-4bc2-abe9-9eafd833b89f","Type":"ContainerStarted","Data":"125ba823d513b27ef11a444f22553a11ba55c0f70d03679b66ec233ce953c8e0"} Oct 10 16:50:09 crc kubenswrapper[4799]: I1010 16:50:09.223704 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-8k2k8" event={"ID":"a78f157f-ef19-4bc2-abe9-9eafd833b89f","Type":"ContainerStarted","Data":"8a84c422261f8ef1e162a5900a42b5462e92583b13c0f2c21e0e3aaafc4f6412"} Oct 10 16:50:09 crc kubenswrapper[4799]: I1010 16:50:09.230284 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-mqm27" event={"ID":"688b5025-24db-4d05-a046-d26deb669312","Type":"ContainerStarted","Data":"a72c77a71b8c17e28293a02f3a365989a67c30c588ba4cdf5ef928ea4ed49719"} Oct 10 16:50:09 crc kubenswrapper[4799]: I1010 16:50:09.230355 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-mqm27" event={"ID":"688b5025-24db-4d05-a046-d26deb669312","Type":"ContainerStarted","Data":"773a5670efdfead139c1f30ab79c92111be57d38cf8c36ab591ee9403a4b51d2"} Oct 10 16:50:09 crc kubenswrapper[4799]: I1010 16:50:09.235139 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-s8gsd-config-qcrrx" event={"ID":"4bf47326-cb7c-46c7-812f-2c3bc3cef12d","Type":"ContainerDied","Data":"cce4c811a2a6a29bdd2f138a427cd80030825e13bd6115743f0c42523b34ac34"} Oct 10 16:50:09 crc kubenswrapper[4799]: I1010 16:50:09.235182 4799 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="cce4c811a2a6a29bdd2f138a427cd80030825e13bd6115743f0c42523b34ac34" Oct 10 16:50:09 crc kubenswrapper[4799]: I1010 16:50:09.235262 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-s8gsd-config-qcrrx" Oct 10 16:50:09 crc kubenswrapper[4799]: I1010 16:50:09.267115 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-db-sync-v8rpx" podStartSLOduration=2.37559891 podStartE2EDuration="14.267090593s" podCreationTimestamp="2025-10-10 16:49:55 +0000 UTC" firstStartedPulling="2025-10-10 16:49:56.615300536 +0000 UTC m=+1090.123624651" lastFinishedPulling="2025-10-10 16:50:08.506792219 +0000 UTC m=+1102.015116334" observedRunningTime="2025-10-10 16:50:09.260037699 +0000 UTC m=+1102.768361824" watchObservedRunningTime="2025-10-10 16:50:09.267090593 +0000 UTC m=+1102.775414718" Oct 10 16:50:09 crc kubenswrapper[4799]: I1010 16:50:09.276477 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-db-create-8k2k8" podStartSLOduration=3.276451414 podStartE2EDuration="3.276451414s" podCreationTimestamp="2025-10-10 16:50:06 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-10 16:50:09.269844881 +0000 UTC m=+1102.778169006" watchObservedRunningTime="2025-10-10 16:50:09.276451414 +0000 UTC m=+1102.784775529" Oct 10 16:50:09 crc kubenswrapper[4799]: I1010 16:50:09.289660 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-db-create-mqm27" podStartSLOduration=3.28964168 podStartE2EDuration="3.28964168s" podCreationTimestamp="2025-10-10 16:50:06 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-10 16:50:09.285493627 +0000 UTC m=+1102.793817752" watchObservedRunningTime="2025-10-10 16:50:09.28964168 +0000 UTC m=+1102.797965795" Oct 10 16:50:09 crc kubenswrapper[4799]: I1010 16:50:09.663483 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-s8gsd-config-qcrrx"] Oct 10 16:50:09 crc kubenswrapper[4799]: I1010 16:50:09.671282 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-controller-s8gsd-config-qcrrx"] Oct 10 16:50:10 crc kubenswrapper[4799]: I1010 16:50:10.246095 4799 generic.go:334] "Generic (PLEG): container finished" podID="a78f157f-ef19-4bc2-abe9-9eafd833b89f" containerID="125ba823d513b27ef11a444f22553a11ba55c0f70d03679b66ec233ce953c8e0" exitCode=0 Oct 10 16:50:10 crc kubenswrapper[4799]: I1010 16:50:10.246162 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-8k2k8" event={"ID":"a78f157f-ef19-4bc2-abe9-9eafd833b89f","Type":"ContainerDied","Data":"125ba823d513b27ef11a444f22553a11ba55c0f70d03679b66ec233ce953c8e0"} Oct 10 16:50:10 crc kubenswrapper[4799]: I1010 16:50:10.248633 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"68ea0968-070a-41d4-b023-31557446c4dc","Type":"ContainerStarted","Data":"9855e476445a0ac9b17564ca4265acfe54663f1f39acf56d013baebb701fa764"} Oct 10 16:50:10 crc kubenswrapper[4799]: I1010 16:50:10.252672 4799 generic.go:334] "Generic (PLEG): container finished" podID="688b5025-24db-4d05-a046-d26deb669312" containerID="a72c77a71b8c17e28293a02f3a365989a67c30c588ba4cdf5ef928ea4ed49719" exitCode=0 Oct 10 16:50:10 crc kubenswrapper[4799]: I1010 16:50:10.252747 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-mqm27" event={"ID":"688b5025-24db-4d05-a046-d26deb669312","Type":"ContainerDied","Data":"a72c77a71b8c17e28293a02f3a365989a67c30c588ba4cdf5ef928ea4ed49719"} Oct 10 16:50:10 crc kubenswrapper[4799]: I1010 16:50:10.334435 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-db-sync-x5gkm"] Oct 10 16:50:10 crc kubenswrapper[4799]: E1010 16:50:10.334843 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="730fe650-110f-4bae-8f7d-6b3d6755b4f8" containerName="mariadb-account-create" Oct 10 16:50:10 crc kubenswrapper[4799]: I1010 16:50:10.334864 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="730fe650-110f-4bae-8f7d-6b3d6755b4f8" containerName="mariadb-account-create" Oct 10 16:50:10 crc kubenswrapper[4799]: E1010 16:50:10.334881 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4bf47326-cb7c-46c7-812f-2c3bc3cef12d" containerName="ovn-config" Oct 10 16:50:10 crc kubenswrapper[4799]: I1010 16:50:10.334889 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="4bf47326-cb7c-46c7-812f-2c3bc3cef12d" containerName="ovn-config" Oct 10 16:50:10 crc kubenswrapper[4799]: I1010 16:50:10.335091 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="4bf47326-cb7c-46c7-812f-2c3bc3cef12d" containerName="ovn-config" Oct 10 16:50:10 crc kubenswrapper[4799]: I1010 16:50:10.335109 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="730fe650-110f-4bae-8f7d-6b3d6755b4f8" containerName="mariadb-account-create" Oct 10 16:50:10 crc kubenswrapper[4799]: I1010 16:50:10.335819 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-x5gkm" Oct 10 16:50:10 crc kubenswrapper[4799]: I1010 16:50:10.341506 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Oct 10 16:50:10 crc kubenswrapper[4799]: I1010 16:50:10.342777 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-jwbbp" Oct 10 16:50:10 crc kubenswrapper[4799]: I1010 16:50:10.343136 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Oct 10 16:50:10 crc kubenswrapper[4799]: I1010 16:50:10.343802 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Oct 10 16:50:10 crc kubenswrapper[4799]: I1010 16:50:10.349917 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-sync-x5gkm"] Oct 10 16:50:10 crc kubenswrapper[4799]: I1010 16:50:10.506619 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z4k2w\" (UniqueName: \"kubernetes.io/projected/6f8a5006-c9ab-4b58-850e-a044229b2460-kube-api-access-z4k2w\") pod \"keystone-db-sync-x5gkm\" (UID: \"6f8a5006-c9ab-4b58-850e-a044229b2460\") " pod="openstack/keystone-db-sync-x5gkm" Oct 10 16:50:10 crc kubenswrapper[4799]: I1010 16:50:10.506683 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6f8a5006-c9ab-4b58-850e-a044229b2460-combined-ca-bundle\") pod \"keystone-db-sync-x5gkm\" (UID: \"6f8a5006-c9ab-4b58-850e-a044229b2460\") " pod="openstack/keystone-db-sync-x5gkm" Oct 10 16:50:10 crc kubenswrapper[4799]: I1010 16:50:10.506705 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6f8a5006-c9ab-4b58-850e-a044229b2460-config-data\") pod \"keystone-db-sync-x5gkm\" (UID: \"6f8a5006-c9ab-4b58-850e-a044229b2460\") " pod="openstack/keystone-db-sync-x5gkm" Oct 10 16:50:10 crc kubenswrapper[4799]: I1010 16:50:10.608115 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z4k2w\" (UniqueName: \"kubernetes.io/projected/6f8a5006-c9ab-4b58-850e-a044229b2460-kube-api-access-z4k2w\") pod \"keystone-db-sync-x5gkm\" (UID: \"6f8a5006-c9ab-4b58-850e-a044229b2460\") " pod="openstack/keystone-db-sync-x5gkm" Oct 10 16:50:10 crc kubenswrapper[4799]: I1010 16:50:10.608173 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6f8a5006-c9ab-4b58-850e-a044229b2460-combined-ca-bundle\") pod \"keystone-db-sync-x5gkm\" (UID: \"6f8a5006-c9ab-4b58-850e-a044229b2460\") " pod="openstack/keystone-db-sync-x5gkm" Oct 10 16:50:10 crc kubenswrapper[4799]: I1010 16:50:10.608194 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6f8a5006-c9ab-4b58-850e-a044229b2460-config-data\") pod \"keystone-db-sync-x5gkm\" (UID: \"6f8a5006-c9ab-4b58-850e-a044229b2460\") " pod="openstack/keystone-db-sync-x5gkm" Oct 10 16:50:10 crc kubenswrapper[4799]: I1010 16:50:10.615599 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6f8a5006-c9ab-4b58-850e-a044229b2460-config-data\") pod \"keystone-db-sync-x5gkm\" (UID: \"6f8a5006-c9ab-4b58-850e-a044229b2460\") " pod="openstack/keystone-db-sync-x5gkm" Oct 10 16:50:10 crc kubenswrapper[4799]: I1010 16:50:10.616839 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6f8a5006-c9ab-4b58-850e-a044229b2460-combined-ca-bundle\") pod \"keystone-db-sync-x5gkm\" (UID: \"6f8a5006-c9ab-4b58-850e-a044229b2460\") " pod="openstack/keystone-db-sync-x5gkm" Oct 10 16:50:10 crc kubenswrapper[4799]: I1010 16:50:10.627908 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z4k2w\" (UniqueName: \"kubernetes.io/projected/6f8a5006-c9ab-4b58-850e-a044229b2460-kube-api-access-z4k2w\") pod \"keystone-db-sync-x5gkm\" (UID: \"6f8a5006-c9ab-4b58-850e-a044229b2460\") " pod="openstack/keystone-db-sync-x5gkm" Oct 10 16:50:10 crc kubenswrapper[4799]: I1010 16:50:10.667155 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-x5gkm" Oct 10 16:50:10 crc kubenswrapper[4799]: I1010 16:50:10.749344 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-nvghl" Oct 10 16:50:10 crc kubenswrapper[4799]: I1010 16:50:10.912007 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-876jm\" (UniqueName: \"kubernetes.io/projected/4786fb15-fe63-491e-aa13-1e3cbd1f7eef-kube-api-access-876jm\") pod \"4786fb15-fe63-491e-aa13-1e3cbd1f7eef\" (UID: \"4786fb15-fe63-491e-aa13-1e3cbd1f7eef\") " Oct 10 16:50:10 crc kubenswrapper[4799]: I1010 16:50:10.916069 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4786fb15-fe63-491e-aa13-1e3cbd1f7eef-kube-api-access-876jm" (OuterVolumeSpecName: "kube-api-access-876jm") pod "4786fb15-fe63-491e-aa13-1e3cbd1f7eef" (UID: "4786fb15-fe63-491e-aa13-1e3cbd1f7eef"). InnerVolumeSpecName "kube-api-access-876jm". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 16:50:11 crc kubenswrapper[4799]: I1010 16:50:11.014318 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-876jm\" (UniqueName: \"kubernetes.io/projected/4786fb15-fe63-491e-aa13-1e3cbd1f7eef-kube-api-access-876jm\") on node \"crc\" DevicePath \"\"" Oct 10 16:50:11 crc kubenswrapper[4799]: I1010 16:50:11.104191 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-sync-x5gkm"] Oct 10 16:50:11 crc kubenswrapper[4799]: I1010 16:50:11.262493 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-nvghl" event={"ID":"4786fb15-fe63-491e-aa13-1e3cbd1f7eef","Type":"ContainerDied","Data":"62ac4bc855a857fd9bc7fa161bb86b0da0d117c444b0cd7ac27f6bcfb0adee61"} Oct 10 16:50:11 crc kubenswrapper[4799]: I1010 16:50:11.262528 4799 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="62ac4bc855a857fd9bc7fa161bb86b0da0d117c444b0cd7ac27f6bcfb0adee61" Oct 10 16:50:11 crc kubenswrapper[4799]: I1010 16:50:11.262552 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-nvghl" Oct 10 16:50:11 crc kubenswrapper[4799]: I1010 16:50:11.264552 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"68ea0968-070a-41d4-b023-31557446c4dc","Type":"ContainerStarted","Data":"5dff4f861c205e245abbd15cc4d3d0d0becdc5a50f4d9bd4f1427cabc2fa0347"} Oct 10 16:50:11 crc kubenswrapper[4799]: I1010 16:50:11.265966 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-x5gkm" event={"ID":"6f8a5006-c9ab-4b58-850e-a044229b2460","Type":"ContainerStarted","Data":"5d6a1b463c4680c839e35436d76542408af63468551546cf6243407ea9a204bb"} Oct 10 16:50:11 crc kubenswrapper[4799]: I1010 16:50:11.411885 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4bf47326-cb7c-46c7-812f-2c3bc3cef12d" path="/var/lib/kubelet/pods/4bf47326-cb7c-46c7-812f-2c3bc3cef12d/volumes" Oct 10 16:50:11 crc kubenswrapper[4799]: I1010 16:50:11.539548 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-8k2k8" Oct 10 16:50:11 crc kubenswrapper[4799]: I1010 16:50:11.588071 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-mqm27" Oct 10 16:50:11 crc kubenswrapper[4799]: I1010 16:50:11.624804 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ghnfm\" (UniqueName: \"kubernetes.io/projected/a78f157f-ef19-4bc2-abe9-9eafd833b89f-kube-api-access-ghnfm\") pod \"a78f157f-ef19-4bc2-abe9-9eafd833b89f\" (UID: \"a78f157f-ef19-4bc2-abe9-9eafd833b89f\") " Oct 10 16:50:11 crc kubenswrapper[4799]: I1010 16:50:11.634945 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a78f157f-ef19-4bc2-abe9-9eafd833b89f-kube-api-access-ghnfm" (OuterVolumeSpecName: "kube-api-access-ghnfm") pod "a78f157f-ef19-4bc2-abe9-9eafd833b89f" (UID: "a78f157f-ef19-4bc2-abe9-9eafd833b89f"). InnerVolumeSpecName "kube-api-access-ghnfm". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 16:50:11 crc kubenswrapper[4799]: I1010 16:50:11.726509 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5rrj5\" (UniqueName: \"kubernetes.io/projected/688b5025-24db-4d05-a046-d26deb669312-kube-api-access-5rrj5\") pod \"688b5025-24db-4d05-a046-d26deb669312\" (UID: \"688b5025-24db-4d05-a046-d26deb669312\") " Oct 10 16:50:11 crc kubenswrapper[4799]: I1010 16:50:11.726948 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ghnfm\" (UniqueName: \"kubernetes.io/projected/a78f157f-ef19-4bc2-abe9-9eafd833b89f-kube-api-access-ghnfm\") on node \"crc\" DevicePath \"\"" Oct 10 16:50:11 crc kubenswrapper[4799]: I1010 16:50:11.729247 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/688b5025-24db-4d05-a046-d26deb669312-kube-api-access-5rrj5" (OuterVolumeSpecName: "kube-api-access-5rrj5") pod "688b5025-24db-4d05-a046-d26deb669312" (UID: "688b5025-24db-4d05-a046-d26deb669312"). InnerVolumeSpecName "kube-api-access-5rrj5". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 16:50:11 crc kubenswrapper[4799]: I1010 16:50:11.829009 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5rrj5\" (UniqueName: \"kubernetes.io/projected/688b5025-24db-4d05-a046-d26deb669312-kube-api-access-5rrj5\") on node \"crc\" DevicePath \"\"" Oct 10 16:50:12 crc kubenswrapper[4799]: I1010 16:50:12.275785 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"68ea0968-070a-41d4-b023-31557446c4dc","Type":"ContainerStarted","Data":"3283576ff42b8ebc10b8fec21ffc203d5257c048d20b76e1f90800f9758835db"} Oct 10 16:50:12 crc kubenswrapper[4799]: I1010 16:50:12.276147 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"68ea0968-070a-41d4-b023-31557446c4dc","Type":"ContainerStarted","Data":"e611df1a7b5dee2c47fcfa489e23af4e0028a72aa26eea7950fe0ec36316b663"} Oct 10 16:50:12 crc kubenswrapper[4799]: I1010 16:50:12.276159 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"68ea0968-070a-41d4-b023-31557446c4dc","Type":"ContainerStarted","Data":"2e2a8373854753a4479c039fcd2e9fbdfba1493d4e774ff602b6e261202c606e"} Oct 10 16:50:12 crc kubenswrapper[4799]: I1010 16:50:12.277408 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-mqm27" event={"ID":"688b5025-24db-4d05-a046-d26deb669312","Type":"ContainerDied","Data":"773a5670efdfead139c1f30ab79c92111be57d38cf8c36ab591ee9403a4b51d2"} Oct 10 16:50:12 crc kubenswrapper[4799]: I1010 16:50:12.277437 4799 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="773a5670efdfead139c1f30ab79c92111be57d38cf8c36ab591ee9403a4b51d2" Oct 10 16:50:12 crc kubenswrapper[4799]: I1010 16:50:12.277442 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-mqm27" Oct 10 16:50:12 crc kubenswrapper[4799]: I1010 16:50:12.279296 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-8k2k8" event={"ID":"a78f157f-ef19-4bc2-abe9-9eafd833b89f","Type":"ContainerDied","Data":"8a84c422261f8ef1e162a5900a42b5462e92583b13c0f2c21e0e3aaafc4f6412"} Oct 10 16:50:12 crc kubenswrapper[4799]: I1010 16:50:12.279334 4799 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8a84c422261f8ef1e162a5900a42b5462e92583b13c0f2c21e0e3aaafc4f6412" Oct 10 16:50:12 crc kubenswrapper[4799]: I1010 16:50:12.279349 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-8k2k8" Oct 10 16:50:15 crc kubenswrapper[4799]: I1010 16:50:15.248815 4799 patch_prober.go:28] interesting pod/machine-config-daemon-rh8zc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 10 16:50:15 crc kubenswrapper[4799]: I1010 16:50:15.249190 4799 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 10 16:50:16 crc kubenswrapper[4799]: I1010 16:50:16.694037 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-0700-account-create-krzq2"] Oct 10 16:50:16 crc kubenswrapper[4799]: E1010 16:50:16.694942 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a78f157f-ef19-4bc2-abe9-9eafd833b89f" containerName="mariadb-database-create" Oct 10 16:50:16 crc kubenswrapper[4799]: I1010 16:50:16.694956 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="a78f157f-ef19-4bc2-abe9-9eafd833b89f" containerName="mariadb-database-create" Oct 10 16:50:16 crc kubenswrapper[4799]: E1010 16:50:16.694970 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4786fb15-fe63-491e-aa13-1e3cbd1f7eef" containerName="mariadb-database-create" Oct 10 16:50:16 crc kubenswrapper[4799]: I1010 16:50:16.694976 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="4786fb15-fe63-491e-aa13-1e3cbd1f7eef" containerName="mariadb-database-create" Oct 10 16:50:16 crc kubenswrapper[4799]: E1010 16:50:16.694986 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="688b5025-24db-4d05-a046-d26deb669312" containerName="mariadb-database-create" Oct 10 16:50:16 crc kubenswrapper[4799]: I1010 16:50:16.694994 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="688b5025-24db-4d05-a046-d26deb669312" containerName="mariadb-database-create" Oct 10 16:50:16 crc kubenswrapper[4799]: I1010 16:50:16.695151 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="a78f157f-ef19-4bc2-abe9-9eafd833b89f" containerName="mariadb-database-create" Oct 10 16:50:16 crc kubenswrapper[4799]: I1010 16:50:16.695163 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="4786fb15-fe63-491e-aa13-1e3cbd1f7eef" containerName="mariadb-database-create" Oct 10 16:50:16 crc kubenswrapper[4799]: I1010 16:50:16.695181 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="688b5025-24db-4d05-a046-d26deb669312" containerName="mariadb-database-create" Oct 10 16:50:16 crc kubenswrapper[4799]: I1010 16:50:16.695714 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-0700-account-create-krzq2" Oct 10 16:50:16 crc kubenswrapper[4799]: I1010 16:50:16.698005 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-db-secret" Oct 10 16:50:16 crc kubenswrapper[4799]: I1010 16:50:16.710367 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-0700-account-create-krzq2"] Oct 10 16:50:16 crc kubenswrapper[4799]: I1010 16:50:16.816701 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-69zzp\" (UniqueName: \"kubernetes.io/projected/be996514-22c0-4da7-b8fc-cc423de20fc8-kube-api-access-69zzp\") pod \"barbican-0700-account-create-krzq2\" (UID: \"be996514-22c0-4da7-b8fc-cc423de20fc8\") " pod="openstack/barbican-0700-account-create-krzq2" Oct 10 16:50:16 crc kubenswrapper[4799]: I1010 16:50:16.918042 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-69zzp\" (UniqueName: \"kubernetes.io/projected/be996514-22c0-4da7-b8fc-cc423de20fc8-kube-api-access-69zzp\") pod \"barbican-0700-account-create-krzq2\" (UID: \"be996514-22c0-4da7-b8fc-cc423de20fc8\") " pod="openstack/barbican-0700-account-create-krzq2" Oct 10 16:50:16 crc kubenswrapper[4799]: I1010 16:50:16.940086 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-69zzp\" (UniqueName: \"kubernetes.io/projected/be996514-22c0-4da7-b8fc-cc423de20fc8-kube-api-access-69zzp\") pod \"barbican-0700-account-create-krzq2\" (UID: \"be996514-22c0-4da7-b8fc-cc423de20fc8\") " pod="openstack/barbican-0700-account-create-krzq2" Oct 10 16:50:17 crc kubenswrapper[4799]: I1010 16:50:17.018223 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-0700-account-create-krzq2" Oct 10 16:50:17 crc kubenswrapper[4799]: I1010 16:50:17.332730 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"68ea0968-070a-41d4-b023-31557446c4dc","Type":"ContainerStarted","Data":"892c2480a25d808a995817609b9bbb27b39738b861f9e1834be4106363fa31e8"} Oct 10 16:50:17 crc kubenswrapper[4799]: I1010 16:50:17.333142 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"68ea0968-070a-41d4-b023-31557446c4dc","Type":"ContainerStarted","Data":"d0489a361daf254795ccae9ddca687c512362e7b439ec64189bd20c8ab4310b0"} Oct 10 16:50:17 crc kubenswrapper[4799]: I1010 16:50:17.333183 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"68ea0968-070a-41d4-b023-31557446c4dc","Type":"ContainerStarted","Data":"5b34f901fc61925f0938ed04472b26863bfffe70a6291e77a0980d1be5dc5aa4"} Oct 10 16:50:17 crc kubenswrapper[4799]: I1010 16:50:17.334332 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-x5gkm" event={"ID":"6f8a5006-c9ab-4b58-850e-a044229b2460","Type":"ContainerStarted","Data":"7c99ddca75e3e8ee09f4a867e51dabb2d051c4f46efe616d1137e55c2a018e7e"} Oct 10 16:50:17 crc kubenswrapper[4799]: I1010 16:50:17.465981 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-db-sync-x5gkm" podStartSLOduration=2.382794327 podStartE2EDuration="7.465956839s" podCreationTimestamp="2025-10-10 16:50:10 +0000 UTC" firstStartedPulling="2025-10-10 16:50:11.123286723 +0000 UTC m=+1104.631610838" lastFinishedPulling="2025-10-10 16:50:16.206449235 +0000 UTC m=+1109.714773350" observedRunningTime="2025-10-10 16:50:17.353007643 +0000 UTC m=+1110.861331758" watchObservedRunningTime="2025-10-10 16:50:17.465956839 +0000 UTC m=+1110.974280954" Oct 10 16:50:17 crc kubenswrapper[4799]: W1010 16:50:17.477988 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podbe996514_22c0_4da7_b8fc_cc423de20fc8.slice/crio-e94bb8e16253c6d62d23a23c1d7d48adcf9da0a2a908ed630ba9d24f640c3eec WatchSource:0}: Error finding container e94bb8e16253c6d62d23a23c1d7d48adcf9da0a2a908ed630ba9d24f640c3eec: Status 404 returned error can't find the container with id e94bb8e16253c6d62d23a23c1d7d48adcf9da0a2a908ed630ba9d24f640c3eec Oct 10 16:50:17 crc kubenswrapper[4799]: I1010 16:50:17.479587 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-0700-account-create-krzq2"] Oct 10 16:50:18 crc kubenswrapper[4799]: I1010 16:50:18.356022 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"68ea0968-070a-41d4-b023-31557446c4dc","Type":"ContainerStarted","Data":"196848f6327818ee365268243d9ccad984ffe2d581cc81fbfd2de38d6676c1a0"} Oct 10 16:50:18 crc kubenswrapper[4799]: I1010 16:50:18.358500 4799 generic.go:334] "Generic (PLEG): container finished" podID="be996514-22c0-4da7-b8fc-cc423de20fc8" containerID="7ac4115cc5be558a70d0208dfd901d2138a9ae99495f1c58e108568e2e2fac0e" exitCode=0 Oct 10 16:50:18 crc kubenswrapper[4799]: I1010 16:50:18.359277 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-0700-account-create-krzq2" event={"ID":"be996514-22c0-4da7-b8fc-cc423de20fc8","Type":"ContainerDied","Data":"7ac4115cc5be558a70d0208dfd901d2138a9ae99495f1c58e108568e2e2fac0e"} Oct 10 16:50:18 crc kubenswrapper[4799]: I1010 16:50:18.359311 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-0700-account-create-krzq2" event={"ID":"be996514-22c0-4da7-b8fc-cc423de20fc8","Type":"ContainerStarted","Data":"e94bb8e16253c6d62d23a23c1d7d48adcf9da0a2a908ed630ba9d24f640c3eec"} Oct 10 16:50:19 crc kubenswrapper[4799]: I1010 16:50:19.373399 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"68ea0968-070a-41d4-b023-31557446c4dc","Type":"ContainerStarted","Data":"a0a44ae2f612b300ed982c9b9af495c2acaaf967a0729c5e19eda110019db7eb"} Oct 10 16:50:19 crc kubenswrapper[4799]: I1010 16:50:19.373686 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"68ea0968-070a-41d4-b023-31557446c4dc","Type":"ContainerStarted","Data":"d4aecb0e485406b0a1fb96b8e50caa65a29728439d08b5b6330706ef802ddeb2"} Oct 10 16:50:19 crc kubenswrapper[4799]: I1010 16:50:19.373696 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"68ea0968-070a-41d4-b023-31557446c4dc","Type":"ContainerStarted","Data":"c3e06a4a05023171ceb2e34c51e209015c056f45bfa0faadf50fd4785e2e4d80"} Oct 10 16:50:19 crc kubenswrapper[4799]: I1010 16:50:19.655096 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-0700-account-create-krzq2" Oct 10 16:50:19 crc kubenswrapper[4799]: I1010 16:50:19.772846 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-69zzp\" (UniqueName: \"kubernetes.io/projected/be996514-22c0-4da7-b8fc-cc423de20fc8-kube-api-access-69zzp\") pod \"be996514-22c0-4da7-b8fc-cc423de20fc8\" (UID: \"be996514-22c0-4da7-b8fc-cc423de20fc8\") " Oct 10 16:50:19 crc kubenswrapper[4799]: I1010 16:50:19.782106 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/be996514-22c0-4da7-b8fc-cc423de20fc8-kube-api-access-69zzp" (OuterVolumeSpecName: "kube-api-access-69zzp") pod "be996514-22c0-4da7-b8fc-cc423de20fc8" (UID: "be996514-22c0-4da7-b8fc-cc423de20fc8"). InnerVolumeSpecName "kube-api-access-69zzp". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 16:50:19 crc kubenswrapper[4799]: I1010 16:50:19.874500 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-69zzp\" (UniqueName: \"kubernetes.io/projected/be996514-22c0-4da7-b8fc-cc423de20fc8-kube-api-access-69zzp\") on node \"crc\" DevicePath \"\"" Oct 10 16:50:20 crc kubenswrapper[4799]: I1010 16:50:20.409508 4799 generic.go:334] "Generic (PLEG): container finished" podID="4e0d692a-cc78-4807-a2a3-5b39c5729ee6" containerID="30ebb4c5a6f4b490de87b23dfbca83edb669e1b06479d40a92b5b82a0cd80d33" exitCode=0 Oct 10 16:50:20 crc kubenswrapper[4799]: I1010 16:50:20.409587 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-v8rpx" event={"ID":"4e0d692a-cc78-4807-a2a3-5b39c5729ee6","Type":"ContainerDied","Data":"30ebb4c5a6f4b490de87b23dfbca83edb669e1b06479d40a92b5b82a0cd80d33"} Oct 10 16:50:20 crc kubenswrapper[4799]: I1010 16:50:20.435263 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-0700-account-create-krzq2" event={"ID":"be996514-22c0-4da7-b8fc-cc423de20fc8","Type":"ContainerDied","Data":"e94bb8e16253c6d62d23a23c1d7d48adcf9da0a2a908ed630ba9d24f640c3eec"} Oct 10 16:50:20 crc kubenswrapper[4799]: I1010 16:50:20.435301 4799 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e94bb8e16253c6d62d23a23c1d7d48adcf9da0a2a908ed630ba9d24f640c3eec" Oct 10 16:50:20 crc kubenswrapper[4799]: I1010 16:50:20.435360 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-0700-account-create-krzq2" Oct 10 16:50:20 crc kubenswrapper[4799]: I1010 16:50:20.518565 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"68ea0968-070a-41d4-b023-31557446c4dc","Type":"ContainerStarted","Data":"d7211c49780feb5fa0e4a94a5ced7f5a84311b8cae847b8935e7948aa4a99e2c"} Oct 10 16:50:20 crc kubenswrapper[4799]: I1010 16:50:20.518610 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"68ea0968-070a-41d4-b023-31557446c4dc","Type":"ContainerStarted","Data":"94b0e5fe4497d52c34e39558472e6848a5c209b522dd73f975bdb4dc0e01da73"} Oct 10 16:50:20 crc kubenswrapper[4799]: I1010 16:50:20.518619 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"68ea0968-070a-41d4-b023-31557446c4dc","Type":"ContainerStarted","Data":"432fefd63b99c8d28abb812f0362dcacaa5d81c188e06c3668e637ba465daf44"} Oct 10 16:50:20 crc kubenswrapper[4799]: I1010 16:50:20.518629 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"68ea0968-070a-41d4-b023-31557446c4dc","Type":"ContainerStarted","Data":"c4d301a6fc96fe120eff75102918fb4f4f64a0c1d74276e5a9d732cecede51be"} Oct 10 16:50:20 crc kubenswrapper[4799]: I1010 16:50:20.529368 4799 generic.go:334] "Generic (PLEG): container finished" podID="6f8a5006-c9ab-4b58-850e-a044229b2460" containerID="7c99ddca75e3e8ee09f4a867e51dabb2d051c4f46efe616d1137e55c2a018e7e" exitCode=0 Oct 10 16:50:20 crc kubenswrapper[4799]: I1010 16:50:20.529409 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-x5gkm" event={"ID":"6f8a5006-c9ab-4b58-850e-a044229b2460","Type":"ContainerDied","Data":"7c99ddca75e3e8ee09f4a867e51dabb2d051c4f46efe616d1137e55c2a018e7e"} Oct 10 16:50:20 crc kubenswrapper[4799]: I1010 16:50:20.573328 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-storage-0" podStartSLOduration=40.164292157 podStartE2EDuration="49.573305019s" podCreationTimestamp="2025-10-10 16:49:31 +0000 UTC" firstStartedPulling="2025-10-10 16:50:09.22561988 +0000 UTC m=+1102.733943995" lastFinishedPulling="2025-10-10 16:50:18.634632732 +0000 UTC m=+1112.142956857" observedRunningTime="2025-10-10 16:50:20.56526709 +0000 UTC m=+1114.073591225" watchObservedRunningTime="2025-10-10 16:50:20.573305019 +0000 UTC m=+1114.081629134" Oct 10 16:50:20 crc kubenswrapper[4799]: I1010 16:50:20.830074 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-677c858757-mwwqx"] Oct 10 16:50:20 crc kubenswrapper[4799]: E1010 16:50:20.830398 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="be996514-22c0-4da7-b8fc-cc423de20fc8" containerName="mariadb-account-create" Oct 10 16:50:20 crc kubenswrapper[4799]: I1010 16:50:20.830416 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="be996514-22c0-4da7-b8fc-cc423de20fc8" containerName="mariadb-account-create" Oct 10 16:50:20 crc kubenswrapper[4799]: I1010 16:50:20.830598 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="be996514-22c0-4da7-b8fc-cc423de20fc8" containerName="mariadb-account-create" Oct 10 16:50:20 crc kubenswrapper[4799]: I1010 16:50:20.831405 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-677c858757-mwwqx" Oct 10 16:50:20 crc kubenswrapper[4799]: I1010 16:50:20.835710 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns-swift-storage-0" Oct 10 16:50:20 crc kubenswrapper[4799]: I1010 16:50:20.843597 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-677c858757-mwwqx"] Oct 10 16:50:20 crc kubenswrapper[4799]: I1010 16:50:20.890823 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c451444b-d58d-4383-801c-6df3ee8d8adf-config\") pod \"dnsmasq-dns-677c858757-mwwqx\" (UID: \"c451444b-d58d-4383-801c-6df3ee8d8adf\") " pod="openstack/dnsmasq-dns-677c858757-mwwqx" Oct 10 16:50:20 crc kubenswrapper[4799]: I1010 16:50:20.890891 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c451444b-d58d-4383-801c-6df3ee8d8adf-ovsdbserver-nb\") pod \"dnsmasq-dns-677c858757-mwwqx\" (UID: \"c451444b-d58d-4383-801c-6df3ee8d8adf\") " pod="openstack/dnsmasq-dns-677c858757-mwwqx" Oct 10 16:50:20 crc kubenswrapper[4799]: I1010 16:50:20.890961 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lbjp2\" (UniqueName: \"kubernetes.io/projected/c451444b-d58d-4383-801c-6df3ee8d8adf-kube-api-access-lbjp2\") pod \"dnsmasq-dns-677c858757-mwwqx\" (UID: \"c451444b-d58d-4383-801c-6df3ee8d8adf\") " pod="openstack/dnsmasq-dns-677c858757-mwwqx" Oct 10 16:50:20 crc kubenswrapper[4799]: I1010 16:50:20.891029 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c451444b-d58d-4383-801c-6df3ee8d8adf-ovsdbserver-sb\") pod \"dnsmasq-dns-677c858757-mwwqx\" (UID: \"c451444b-d58d-4383-801c-6df3ee8d8adf\") " pod="openstack/dnsmasq-dns-677c858757-mwwqx" Oct 10 16:50:20 crc kubenswrapper[4799]: I1010 16:50:20.891063 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c451444b-d58d-4383-801c-6df3ee8d8adf-dns-svc\") pod \"dnsmasq-dns-677c858757-mwwqx\" (UID: \"c451444b-d58d-4383-801c-6df3ee8d8adf\") " pod="openstack/dnsmasq-dns-677c858757-mwwqx" Oct 10 16:50:20 crc kubenswrapper[4799]: I1010 16:50:20.891096 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/c451444b-d58d-4383-801c-6df3ee8d8adf-dns-swift-storage-0\") pod \"dnsmasq-dns-677c858757-mwwqx\" (UID: \"c451444b-d58d-4383-801c-6df3ee8d8adf\") " pod="openstack/dnsmasq-dns-677c858757-mwwqx" Oct 10 16:50:20 crc kubenswrapper[4799]: I1010 16:50:20.992049 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c451444b-d58d-4383-801c-6df3ee8d8adf-ovsdbserver-sb\") pod \"dnsmasq-dns-677c858757-mwwqx\" (UID: \"c451444b-d58d-4383-801c-6df3ee8d8adf\") " pod="openstack/dnsmasq-dns-677c858757-mwwqx" Oct 10 16:50:20 crc kubenswrapper[4799]: I1010 16:50:20.992112 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c451444b-d58d-4383-801c-6df3ee8d8adf-dns-svc\") pod \"dnsmasq-dns-677c858757-mwwqx\" (UID: \"c451444b-d58d-4383-801c-6df3ee8d8adf\") " pod="openstack/dnsmasq-dns-677c858757-mwwqx" Oct 10 16:50:20 crc kubenswrapper[4799]: I1010 16:50:20.992151 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/c451444b-d58d-4383-801c-6df3ee8d8adf-dns-swift-storage-0\") pod \"dnsmasq-dns-677c858757-mwwqx\" (UID: \"c451444b-d58d-4383-801c-6df3ee8d8adf\") " pod="openstack/dnsmasq-dns-677c858757-mwwqx" Oct 10 16:50:20 crc kubenswrapper[4799]: I1010 16:50:20.992179 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c451444b-d58d-4383-801c-6df3ee8d8adf-config\") pod \"dnsmasq-dns-677c858757-mwwqx\" (UID: \"c451444b-d58d-4383-801c-6df3ee8d8adf\") " pod="openstack/dnsmasq-dns-677c858757-mwwqx" Oct 10 16:50:20 crc kubenswrapper[4799]: I1010 16:50:20.992217 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c451444b-d58d-4383-801c-6df3ee8d8adf-ovsdbserver-nb\") pod \"dnsmasq-dns-677c858757-mwwqx\" (UID: \"c451444b-d58d-4383-801c-6df3ee8d8adf\") " pod="openstack/dnsmasq-dns-677c858757-mwwqx" Oct 10 16:50:20 crc kubenswrapper[4799]: I1010 16:50:20.992282 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lbjp2\" (UniqueName: \"kubernetes.io/projected/c451444b-d58d-4383-801c-6df3ee8d8adf-kube-api-access-lbjp2\") pod \"dnsmasq-dns-677c858757-mwwqx\" (UID: \"c451444b-d58d-4383-801c-6df3ee8d8adf\") " pod="openstack/dnsmasq-dns-677c858757-mwwqx" Oct 10 16:50:20 crc kubenswrapper[4799]: I1010 16:50:20.993234 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c451444b-d58d-4383-801c-6df3ee8d8adf-config\") pod \"dnsmasq-dns-677c858757-mwwqx\" (UID: \"c451444b-d58d-4383-801c-6df3ee8d8adf\") " pod="openstack/dnsmasq-dns-677c858757-mwwqx" Oct 10 16:50:20 crc kubenswrapper[4799]: I1010 16:50:20.993283 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c451444b-d58d-4383-801c-6df3ee8d8adf-ovsdbserver-sb\") pod \"dnsmasq-dns-677c858757-mwwqx\" (UID: \"c451444b-d58d-4383-801c-6df3ee8d8adf\") " pod="openstack/dnsmasq-dns-677c858757-mwwqx" Oct 10 16:50:20 crc kubenswrapper[4799]: I1010 16:50:20.993318 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c451444b-d58d-4383-801c-6df3ee8d8adf-dns-svc\") pod \"dnsmasq-dns-677c858757-mwwqx\" (UID: \"c451444b-d58d-4383-801c-6df3ee8d8adf\") " pod="openstack/dnsmasq-dns-677c858757-mwwqx" Oct 10 16:50:20 crc kubenswrapper[4799]: I1010 16:50:20.993356 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/c451444b-d58d-4383-801c-6df3ee8d8adf-dns-swift-storage-0\") pod \"dnsmasq-dns-677c858757-mwwqx\" (UID: \"c451444b-d58d-4383-801c-6df3ee8d8adf\") " pod="openstack/dnsmasq-dns-677c858757-mwwqx" Oct 10 16:50:20 crc kubenswrapper[4799]: I1010 16:50:20.993484 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c451444b-d58d-4383-801c-6df3ee8d8adf-ovsdbserver-nb\") pod \"dnsmasq-dns-677c858757-mwwqx\" (UID: \"c451444b-d58d-4383-801c-6df3ee8d8adf\") " pod="openstack/dnsmasq-dns-677c858757-mwwqx" Oct 10 16:50:21 crc kubenswrapper[4799]: I1010 16:50:21.009890 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lbjp2\" (UniqueName: \"kubernetes.io/projected/c451444b-d58d-4383-801c-6df3ee8d8adf-kube-api-access-lbjp2\") pod \"dnsmasq-dns-677c858757-mwwqx\" (UID: \"c451444b-d58d-4383-801c-6df3ee8d8adf\") " pod="openstack/dnsmasq-dns-677c858757-mwwqx" Oct 10 16:50:21 crc kubenswrapper[4799]: I1010 16:50:21.154898 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-677c858757-mwwqx" Oct 10 16:50:21 crc kubenswrapper[4799]: I1010 16:50:21.625752 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-677c858757-mwwqx"] Oct 10 16:50:21 crc kubenswrapper[4799]: I1010 16:50:21.804107 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-x5gkm" Oct 10 16:50:21 crc kubenswrapper[4799]: I1010 16:50:21.908798 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-z4k2w\" (UniqueName: \"kubernetes.io/projected/6f8a5006-c9ab-4b58-850e-a044229b2460-kube-api-access-z4k2w\") pod \"6f8a5006-c9ab-4b58-850e-a044229b2460\" (UID: \"6f8a5006-c9ab-4b58-850e-a044229b2460\") " Oct 10 16:50:21 crc kubenswrapper[4799]: I1010 16:50:21.909112 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6f8a5006-c9ab-4b58-850e-a044229b2460-config-data\") pod \"6f8a5006-c9ab-4b58-850e-a044229b2460\" (UID: \"6f8a5006-c9ab-4b58-850e-a044229b2460\") " Oct 10 16:50:21 crc kubenswrapper[4799]: I1010 16:50:21.909199 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6f8a5006-c9ab-4b58-850e-a044229b2460-combined-ca-bundle\") pod \"6f8a5006-c9ab-4b58-850e-a044229b2460\" (UID: \"6f8a5006-c9ab-4b58-850e-a044229b2460\") " Oct 10 16:50:21 crc kubenswrapper[4799]: I1010 16:50:21.915708 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6f8a5006-c9ab-4b58-850e-a044229b2460-kube-api-access-z4k2w" (OuterVolumeSpecName: "kube-api-access-z4k2w") pod "6f8a5006-c9ab-4b58-850e-a044229b2460" (UID: "6f8a5006-c9ab-4b58-850e-a044229b2460"). InnerVolumeSpecName "kube-api-access-z4k2w". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 16:50:21 crc kubenswrapper[4799]: I1010 16:50:21.937162 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6f8a5006-c9ab-4b58-850e-a044229b2460-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "6f8a5006-c9ab-4b58-850e-a044229b2460" (UID: "6f8a5006-c9ab-4b58-850e-a044229b2460"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:50:21 crc kubenswrapper[4799]: I1010 16:50:21.967099 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6f8a5006-c9ab-4b58-850e-a044229b2460-config-data" (OuterVolumeSpecName: "config-data") pod "6f8a5006-c9ab-4b58-850e-a044229b2460" (UID: "6f8a5006-c9ab-4b58-850e-a044229b2460"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:50:22 crc kubenswrapper[4799]: I1010 16:50:22.014913 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-z4k2w\" (UniqueName: \"kubernetes.io/projected/6f8a5006-c9ab-4b58-850e-a044229b2460-kube-api-access-z4k2w\") on node \"crc\" DevicePath \"\"" Oct 10 16:50:22 crc kubenswrapper[4799]: I1010 16:50:22.014950 4799 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6f8a5006-c9ab-4b58-850e-a044229b2460-config-data\") on node \"crc\" DevicePath \"\"" Oct 10 16:50:22 crc kubenswrapper[4799]: I1010 16:50:22.014963 4799 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6f8a5006-c9ab-4b58-850e-a044229b2460-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 10 16:50:22 crc kubenswrapper[4799]: I1010 16:50:22.061771 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-v8rpx" Oct 10 16:50:22 crc kubenswrapper[4799]: I1010 16:50:22.115666 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hnpwq\" (UniqueName: \"kubernetes.io/projected/4e0d692a-cc78-4807-a2a3-5b39c5729ee6-kube-api-access-hnpwq\") pod \"4e0d692a-cc78-4807-a2a3-5b39c5729ee6\" (UID: \"4e0d692a-cc78-4807-a2a3-5b39c5729ee6\") " Oct 10 16:50:22 crc kubenswrapper[4799]: I1010 16:50:22.115821 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4e0d692a-cc78-4807-a2a3-5b39c5729ee6-config-data\") pod \"4e0d692a-cc78-4807-a2a3-5b39c5729ee6\" (UID: \"4e0d692a-cc78-4807-a2a3-5b39c5729ee6\") " Oct 10 16:50:22 crc kubenswrapper[4799]: I1010 16:50:22.115847 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4e0d692a-cc78-4807-a2a3-5b39c5729ee6-combined-ca-bundle\") pod \"4e0d692a-cc78-4807-a2a3-5b39c5729ee6\" (UID: \"4e0d692a-cc78-4807-a2a3-5b39c5729ee6\") " Oct 10 16:50:22 crc kubenswrapper[4799]: I1010 16:50:22.115978 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/4e0d692a-cc78-4807-a2a3-5b39c5729ee6-db-sync-config-data\") pod \"4e0d692a-cc78-4807-a2a3-5b39c5729ee6\" (UID: \"4e0d692a-cc78-4807-a2a3-5b39c5729ee6\") " Oct 10 16:50:22 crc kubenswrapper[4799]: I1010 16:50:22.119555 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4e0d692a-cc78-4807-a2a3-5b39c5729ee6-kube-api-access-hnpwq" (OuterVolumeSpecName: "kube-api-access-hnpwq") pod "4e0d692a-cc78-4807-a2a3-5b39c5729ee6" (UID: "4e0d692a-cc78-4807-a2a3-5b39c5729ee6"). InnerVolumeSpecName "kube-api-access-hnpwq". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 16:50:22 crc kubenswrapper[4799]: I1010 16:50:22.121181 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4e0d692a-cc78-4807-a2a3-5b39c5729ee6-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "4e0d692a-cc78-4807-a2a3-5b39c5729ee6" (UID: "4e0d692a-cc78-4807-a2a3-5b39c5729ee6"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:50:22 crc kubenswrapper[4799]: I1010 16:50:22.144879 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4e0d692a-cc78-4807-a2a3-5b39c5729ee6-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "4e0d692a-cc78-4807-a2a3-5b39c5729ee6" (UID: "4e0d692a-cc78-4807-a2a3-5b39c5729ee6"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:50:22 crc kubenswrapper[4799]: I1010 16:50:22.158564 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4e0d692a-cc78-4807-a2a3-5b39c5729ee6-config-data" (OuterVolumeSpecName: "config-data") pod "4e0d692a-cc78-4807-a2a3-5b39c5729ee6" (UID: "4e0d692a-cc78-4807-a2a3-5b39c5729ee6"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:50:22 crc kubenswrapper[4799]: I1010 16:50:22.218441 4799 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/4e0d692a-cc78-4807-a2a3-5b39c5729ee6-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Oct 10 16:50:22 crc kubenswrapper[4799]: I1010 16:50:22.218484 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hnpwq\" (UniqueName: \"kubernetes.io/projected/4e0d692a-cc78-4807-a2a3-5b39c5729ee6-kube-api-access-hnpwq\") on node \"crc\" DevicePath \"\"" Oct 10 16:50:22 crc kubenswrapper[4799]: I1010 16:50:22.218499 4799 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4e0d692a-cc78-4807-a2a3-5b39c5729ee6-config-data\") on node \"crc\" DevicePath \"\"" Oct 10 16:50:22 crc kubenswrapper[4799]: I1010 16:50:22.218578 4799 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4e0d692a-cc78-4807-a2a3-5b39c5729ee6-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 10 16:50:22 crc kubenswrapper[4799]: I1010 16:50:22.544263 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-x5gkm" event={"ID":"6f8a5006-c9ab-4b58-850e-a044229b2460","Type":"ContainerDied","Data":"5d6a1b463c4680c839e35436d76542408af63468551546cf6243407ea9a204bb"} Oct 10 16:50:22 crc kubenswrapper[4799]: I1010 16:50:22.544313 4799 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5d6a1b463c4680c839e35436d76542408af63468551546cf6243407ea9a204bb" Oct 10 16:50:22 crc kubenswrapper[4799]: I1010 16:50:22.544779 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-x5gkm" Oct 10 16:50:22 crc kubenswrapper[4799]: I1010 16:50:22.546459 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-v8rpx" Oct 10 16:50:22 crc kubenswrapper[4799]: I1010 16:50:22.546503 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-v8rpx" event={"ID":"4e0d692a-cc78-4807-a2a3-5b39c5729ee6","Type":"ContainerDied","Data":"5671bafb0c1422e6dc132f4ba3fdb76dd5e6588466815efdd5839f48f12d34cd"} Oct 10 16:50:22 crc kubenswrapper[4799]: I1010 16:50:22.546554 4799 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5671bafb0c1422e6dc132f4ba3fdb76dd5e6588466815efdd5839f48f12d34cd" Oct 10 16:50:22 crc kubenswrapper[4799]: I1010 16:50:22.548196 4799 generic.go:334] "Generic (PLEG): container finished" podID="c451444b-d58d-4383-801c-6df3ee8d8adf" containerID="e67e5ad07227279d88ed90b21c16d59edaf11b294afb0c88931442903f3e9d25" exitCode=0 Oct 10 16:50:22 crc kubenswrapper[4799]: I1010 16:50:22.548239 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-677c858757-mwwqx" event={"ID":"c451444b-d58d-4383-801c-6df3ee8d8adf","Type":"ContainerDied","Data":"e67e5ad07227279d88ed90b21c16d59edaf11b294afb0c88931442903f3e9d25"} Oct 10 16:50:22 crc kubenswrapper[4799]: I1010 16:50:22.548270 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-677c858757-mwwqx" event={"ID":"c451444b-d58d-4383-801c-6df3ee8d8adf","Type":"ContainerStarted","Data":"ad8694bed47720aacdf17ecc0067580874425191c6d8c3a1423f77ea75e3b475"} Oct 10 16:50:22 crc kubenswrapper[4799]: I1010 16:50:22.897994 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-677c858757-mwwqx"] Oct 10 16:50:22 crc kubenswrapper[4799]: I1010 16:50:22.975839 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-bootstrap-g45lb"] Oct 10 16:50:22 crc kubenswrapper[4799]: E1010 16:50:22.976349 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6f8a5006-c9ab-4b58-850e-a044229b2460" containerName="keystone-db-sync" Oct 10 16:50:22 crc kubenswrapper[4799]: I1010 16:50:22.976374 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="6f8a5006-c9ab-4b58-850e-a044229b2460" containerName="keystone-db-sync" Oct 10 16:50:22 crc kubenswrapper[4799]: E1010 16:50:22.976391 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4e0d692a-cc78-4807-a2a3-5b39c5729ee6" containerName="glance-db-sync" Oct 10 16:50:22 crc kubenswrapper[4799]: I1010 16:50:22.976400 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="4e0d692a-cc78-4807-a2a3-5b39c5729ee6" containerName="glance-db-sync" Oct 10 16:50:22 crc kubenswrapper[4799]: I1010 16:50:22.976605 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="4e0d692a-cc78-4807-a2a3-5b39c5729ee6" containerName="glance-db-sync" Oct 10 16:50:22 crc kubenswrapper[4799]: I1010 16:50:22.976630 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="6f8a5006-c9ab-4b58-850e-a044229b2460" containerName="keystone-db-sync" Oct 10 16:50:22 crc kubenswrapper[4799]: I1010 16:50:22.977333 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-g45lb" Oct 10 16:50:22 crc kubenswrapper[4799]: I1010 16:50:22.985470 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Oct 10 16:50:22 crc kubenswrapper[4799]: I1010 16:50:22.985709 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Oct 10 16:50:22 crc kubenswrapper[4799]: I1010 16:50:22.987630 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Oct 10 16:50:22 crc kubenswrapper[4799]: I1010 16:50:22.987849 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-jwbbp" Oct 10 16:50:22 crc kubenswrapper[4799]: I1010 16:50:22.988101 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-7c769b4797-w8znp"] Oct 10 16:50:22 crc kubenswrapper[4799]: I1010 16:50:22.989926 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7c769b4797-w8znp" Oct 10 16:50:23 crc kubenswrapper[4799]: I1010 16:50:23.003858 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-g45lb"] Oct 10 16:50:23 crc kubenswrapper[4799]: I1010 16:50:23.021529 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7c769b4797-w8znp"] Oct 10 16:50:23 crc kubenswrapper[4799]: I1010 16:50:23.048772 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mdvns\" (UniqueName: \"kubernetes.io/projected/43eb6e50-c6a9-4bbd-a301-926e6b3742d5-kube-api-access-mdvns\") pod \"dnsmasq-dns-7c769b4797-w8znp\" (UID: \"43eb6e50-c6a9-4bbd-a301-926e6b3742d5\") " pod="openstack/dnsmasq-dns-7c769b4797-w8znp" Oct 10 16:50:23 crc kubenswrapper[4799]: I1010 16:50:23.048843 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/43eb6e50-c6a9-4bbd-a301-926e6b3742d5-dns-swift-storage-0\") pod \"dnsmasq-dns-7c769b4797-w8znp\" (UID: \"43eb6e50-c6a9-4bbd-a301-926e6b3742d5\") " pod="openstack/dnsmasq-dns-7c769b4797-w8znp" Oct 10 16:50:23 crc kubenswrapper[4799]: I1010 16:50:23.048868 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/454f77a5-dadb-4038-8ec3-c3a1bd0c2c22-config-data\") pod \"keystone-bootstrap-g45lb\" (UID: \"454f77a5-dadb-4038-8ec3-c3a1bd0c2c22\") " pod="openstack/keystone-bootstrap-g45lb" Oct 10 16:50:23 crc kubenswrapper[4799]: I1010 16:50:23.048894 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/454f77a5-dadb-4038-8ec3-c3a1bd0c2c22-credential-keys\") pod \"keystone-bootstrap-g45lb\" (UID: \"454f77a5-dadb-4038-8ec3-c3a1bd0c2c22\") " pod="openstack/keystone-bootstrap-g45lb" Oct 10 16:50:23 crc kubenswrapper[4799]: I1010 16:50:23.048919 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zdhp4\" (UniqueName: \"kubernetes.io/projected/454f77a5-dadb-4038-8ec3-c3a1bd0c2c22-kube-api-access-zdhp4\") pod \"keystone-bootstrap-g45lb\" (UID: \"454f77a5-dadb-4038-8ec3-c3a1bd0c2c22\") " pod="openstack/keystone-bootstrap-g45lb" Oct 10 16:50:23 crc kubenswrapper[4799]: I1010 16:50:23.048938 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/43eb6e50-c6a9-4bbd-a301-926e6b3742d5-config\") pod \"dnsmasq-dns-7c769b4797-w8znp\" (UID: \"43eb6e50-c6a9-4bbd-a301-926e6b3742d5\") " pod="openstack/dnsmasq-dns-7c769b4797-w8znp" Oct 10 16:50:23 crc kubenswrapper[4799]: I1010 16:50:23.048966 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/43eb6e50-c6a9-4bbd-a301-926e6b3742d5-dns-svc\") pod \"dnsmasq-dns-7c769b4797-w8znp\" (UID: \"43eb6e50-c6a9-4bbd-a301-926e6b3742d5\") " pod="openstack/dnsmasq-dns-7c769b4797-w8znp" Oct 10 16:50:23 crc kubenswrapper[4799]: I1010 16:50:23.049023 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/454f77a5-dadb-4038-8ec3-c3a1bd0c2c22-combined-ca-bundle\") pod \"keystone-bootstrap-g45lb\" (UID: \"454f77a5-dadb-4038-8ec3-c3a1bd0c2c22\") " pod="openstack/keystone-bootstrap-g45lb" Oct 10 16:50:23 crc kubenswrapper[4799]: I1010 16:50:23.049043 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/43eb6e50-c6a9-4bbd-a301-926e6b3742d5-ovsdbserver-sb\") pod \"dnsmasq-dns-7c769b4797-w8znp\" (UID: \"43eb6e50-c6a9-4bbd-a301-926e6b3742d5\") " pod="openstack/dnsmasq-dns-7c769b4797-w8znp" Oct 10 16:50:23 crc kubenswrapper[4799]: I1010 16:50:23.049091 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/454f77a5-dadb-4038-8ec3-c3a1bd0c2c22-fernet-keys\") pod \"keystone-bootstrap-g45lb\" (UID: \"454f77a5-dadb-4038-8ec3-c3a1bd0c2c22\") " pod="openstack/keystone-bootstrap-g45lb" Oct 10 16:50:23 crc kubenswrapper[4799]: I1010 16:50:23.049107 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/454f77a5-dadb-4038-8ec3-c3a1bd0c2c22-scripts\") pod \"keystone-bootstrap-g45lb\" (UID: \"454f77a5-dadb-4038-8ec3-c3a1bd0c2c22\") " pod="openstack/keystone-bootstrap-g45lb" Oct 10 16:50:23 crc kubenswrapper[4799]: I1010 16:50:23.049126 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/43eb6e50-c6a9-4bbd-a301-926e6b3742d5-ovsdbserver-nb\") pod \"dnsmasq-dns-7c769b4797-w8znp\" (UID: \"43eb6e50-c6a9-4bbd-a301-926e6b3742d5\") " pod="openstack/dnsmasq-dns-7c769b4797-w8znp" Oct 10 16:50:23 crc kubenswrapper[4799]: I1010 16:50:23.138540 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7c769b4797-w8znp"] Oct 10 16:50:23 crc kubenswrapper[4799]: E1010 16:50:23.155482 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="unmounted volumes=[config dns-svc dns-swift-storage-0 kube-api-access-mdvns ovsdbserver-nb ovsdbserver-sb], unattached volumes=[], failed to process volumes=[]: context canceled" pod="openstack/dnsmasq-dns-7c769b4797-w8znp" podUID="43eb6e50-c6a9-4bbd-a301-926e6b3742d5" Oct 10 16:50:23 crc kubenswrapper[4799]: I1010 16:50:23.174100 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/454f77a5-dadb-4038-8ec3-c3a1bd0c2c22-combined-ca-bundle\") pod \"keystone-bootstrap-g45lb\" (UID: \"454f77a5-dadb-4038-8ec3-c3a1bd0c2c22\") " pod="openstack/keystone-bootstrap-g45lb" Oct 10 16:50:23 crc kubenswrapper[4799]: I1010 16:50:23.174167 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/43eb6e50-c6a9-4bbd-a301-926e6b3742d5-ovsdbserver-sb\") pod \"dnsmasq-dns-7c769b4797-w8znp\" (UID: \"43eb6e50-c6a9-4bbd-a301-926e6b3742d5\") " pod="openstack/dnsmasq-dns-7c769b4797-w8znp" Oct 10 16:50:23 crc kubenswrapper[4799]: I1010 16:50:23.174288 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/454f77a5-dadb-4038-8ec3-c3a1bd0c2c22-fernet-keys\") pod \"keystone-bootstrap-g45lb\" (UID: \"454f77a5-dadb-4038-8ec3-c3a1bd0c2c22\") " pod="openstack/keystone-bootstrap-g45lb" Oct 10 16:50:23 crc kubenswrapper[4799]: I1010 16:50:23.174311 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/454f77a5-dadb-4038-8ec3-c3a1bd0c2c22-scripts\") pod \"keystone-bootstrap-g45lb\" (UID: \"454f77a5-dadb-4038-8ec3-c3a1bd0c2c22\") " pod="openstack/keystone-bootstrap-g45lb" Oct 10 16:50:23 crc kubenswrapper[4799]: I1010 16:50:23.174337 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/43eb6e50-c6a9-4bbd-a301-926e6b3742d5-ovsdbserver-nb\") pod \"dnsmasq-dns-7c769b4797-w8znp\" (UID: \"43eb6e50-c6a9-4bbd-a301-926e6b3742d5\") " pod="openstack/dnsmasq-dns-7c769b4797-w8znp" Oct 10 16:50:23 crc kubenswrapper[4799]: I1010 16:50:23.174380 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mdvns\" (UniqueName: \"kubernetes.io/projected/43eb6e50-c6a9-4bbd-a301-926e6b3742d5-kube-api-access-mdvns\") pod \"dnsmasq-dns-7c769b4797-w8znp\" (UID: \"43eb6e50-c6a9-4bbd-a301-926e6b3742d5\") " pod="openstack/dnsmasq-dns-7c769b4797-w8znp" Oct 10 16:50:23 crc kubenswrapper[4799]: I1010 16:50:23.174410 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/43eb6e50-c6a9-4bbd-a301-926e6b3742d5-dns-swift-storage-0\") pod \"dnsmasq-dns-7c769b4797-w8znp\" (UID: \"43eb6e50-c6a9-4bbd-a301-926e6b3742d5\") " pod="openstack/dnsmasq-dns-7c769b4797-w8znp" Oct 10 16:50:23 crc kubenswrapper[4799]: I1010 16:50:23.174436 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/454f77a5-dadb-4038-8ec3-c3a1bd0c2c22-config-data\") pod \"keystone-bootstrap-g45lb\" (UID: \"454f77a5-dadb-4038-8ec3-c3a1bd0c2c22\") " pod="openstack/keystone-bootstrap-g45lb" Oct 10 16:50:23 crc kubenswrapper[4799]: I1010 16:50:23.174478 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/454f77a5-dadb-4038-8ec3-c3a1bd0c2c22-credential-keys\") pod \"keystone-bootstrap-g45lb\" (UID: \"454f77a5-dadb-4038-8ec3-c3a1bd0c2c22\") " pod="openstack/keystone-bootstrap-g45lb" Oct 10 16:50:23 crc kubenswrapper[4799]: I1010 16:50:23.174515 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zdhp4\" (UniqueName: \"kubernetes.io/projected/454f77a5-dadb-4038-8ec3-c3a1bd0c2c22-kube-api-access-zdhp4\") pod \"keystone-bootstrap-g45lb\" (UID: \"454f77a5-dadb-4038-8ec3-c3a1bd0c2c22\") " pod="openstack/keystone-bootstrap-g45lb" Oct 10 16:50:23 crc kubenswrapper[4799]: I1010 16:50:23.174539 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/43eb6e50-c6a9-4bbd-a301-926e6b3742d5-config\") pod \"dnsmasq-dns-7c769b4797-w8znp\" (UID: \"43eb6e50-c6a9-4bbd-a301-926e6b3742d5\") " pod="openstack/dnsmasq-dns-7c769b4797-w8znp" Oct 10 16:50:23 crc kubenswrapper[4799]: I1010 16:50:23.174599 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/43eb6e50-c6a9-4bbd-a301-926e6b3742d5-dns-svc\") pod \"dnsmasq-dns-7c769b4797-w8znp\" (UID: \"43eb6e50-c6a9-4bbd-a301-926e6b3742d5\") " pod="openstack/dnsmasq-dns-7c769b4797-w8znp" Oct 10 16:50:23 crc kubenswrapper[4799]: I1010 16:50:23.175571 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/43eb6e50-c6a9-4bbd-a301-926e6b3742d5-dns-svc\") pod \"dnsmasq-dns-7c769b4797-w8znp\" (UID: \"43eb6e50-c6a9-4bbd-a301-926e6b3742d5\") " pod="openstack/dnsmasq-dns-7c769b4797-w8znp" Oct 10 16:50:23 crc kubenswrapper[4799]: I1010 16:50:23.192494 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/454f77a5-dadb-4038-8ec3-c3a1bd0c2c22-scripts\") pod \"keystone-bootstrap-g45lb\" (UID: \"454f77a5-dadb-4038-8ec3-c3a1bd0c2c22\") " pod="openstack/keystone-bootstrap-g45lb" Oct 10 16:50:23 crc kubenswrapper[4799]: I1010 16:50:23.194154 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/454f77a5-dadb-4038-8ec3-c3a1bd0c2c22-combined-ca-bundle\") pod \"keystone-bootstrap-g45lb\" (UID: \"454f77a5-dadb-4038-8ec3-c3a1bd0c2c22\") " pod="openstack/keystone-bootstrap-g45lb" Oct 10 16:50:23 crc kubenswrapper[4799]: I1010 16:50:23.196101 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/43eb6e50-c6a9-4bbd-a301-926e6b3742d5-ovsdbserver-sb\") pod \"dnsmasq-dns-7c769b4797-w8znp\" (UID: \"43eb6e50-c6a9-4bbd-a301-926e6b3742d5\") " pod="openstack/dnsmasq-dns-7c769b4797-w8znp" Oct 10 16:50:23 crc kubenswrapper[4799]: I1010 16:50:23.238680 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/43eb6e50-c6a9-4bbd-a301-926e6b3742d5-config\") pod \"dnsmasq-dns-7c769b4797-w8znp\" (UID: \"43eb6e50-c6a9-4bbd-a301-926e6b3742d5\") " pod="openstack/dnsmasq-dns-7c769b4797-w8znp" Oct 10 16:50:23 crc kubenswrapper[4799]: I1010 16:50:23.239202 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/43eb6e50-c6a9-4bbd-a301-926e6b3742d5-ovsdbserver-nb\") pod \"dnsmasq-dns-7c769b4797-w8znp\" (UID: \"43eb6e50-c6a9-4bbd-a301-926e6b3742d5\") " pod="openstack/dnsmasq-dns-7c769b4797-w8znp" Oct 10 16:50:23 crc kubenswrapper[4799]: I1010 16:50:23.240431 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/454f77a5-dadb-4038-8ec3-c3a1bd0c2c22-fernet-keys\") pod \"keystone-bootstrap-g45lb\" (UID: \"454f77a5-dadb-4038-8ec3-c3a1bd0c2c22\") " pod="openstack/keystone-bootstrap-g45lb" Oct 10 16:50:23 crc kubenswrapper[4799]: I1010 16:50:23.240703 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/454f77a5-dadb-4038-8ec3-c3a1bd0c2c22-credential-keys\") pod \"keystone-bootstrap-g45lb\" (UID: \"454f77a5-dadb-4038-8ec3-c3a1bd0c2c22\") " pod="openstack/keystone-bootstrap-g45lb" Oct 10 16:50:23 crc kubenswrapper[4799]: I1010 16:50:23.240986 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-59df9f9d49-8pz9m"] Oct 10 16:50:23 crc kubenswrapper[4799]: I1010 16:50:23.242317 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-59df9f9d49-8pz9m" Oct 10 16:50:23 crc kubenswrapper[4799]: I1010 16:50:23.243943 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/454f77a5-dadb-4038-8ec3-c3a1bd0c2c22-config-data\") pod \"keystone-bootstrap-g45lb\" (UID: \"454f77a5-dadb-4038-8ec3-c3a1bd0c2c22\") " pod="openstack/keystone-bootstrap-g45lb" Oct 10 16:50:23 crc kubenswrapper[4799]: I1010 16:50:23.249179 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/43eb6e50-c6a9-4bbd-a301-926e6b3742d5-dns-swift-storage-0\") pod \"dnsmasq-dns-7c769b4797-w8znp\" (UID: \"43eb6e50-c6a9-4bbd-a301-926e6b3742d5\") " pod="openstack/dnsmasq-dns-7c769b4797-w8znp" Oct 10 16:50:23 crc kubenswrapper[4799]: I1010 16:50:23.262135 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mdvns\" (UniqueName: \"kubernetes.io/projected/43eb6e50-c6a9-4bbd-a301-926e6b3742d5-kube-api-access-mdvns\") pod \"dnsmasq-dns-7c769b4797-w8znp\" (UID: \"43eb6e50-c6a9-4bbd-a301-926e6b3742d5\") " pod="openstack/dnsmasq-dns-7c769b4797-w8znp" Oct 10 16:50:23 crc kubenswrapper[4799]: I1010 16:50:23.273994 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zdhp4\" (UniqueName: \"kubernetes.io/projected/454f77a5-dadb-4038-8ec3-c3a1bd0c2c22-kube-api-access-zdhp4\") pod \"keystone-bootstrap-g45lb\" (UID: \"454f77a5-dadb-4038-8ec3-c3a1bd0c2c22\") " pod="openstack/keystone-bootstrap-g45lb" Oct 10 16:50:23 crc kubenswrapper[4799]: I1010 16:50:23.307569 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-g45lb" Oct 10 16:50:23 crc kubenswrapper[4799]: I1010 16:50:23.313232 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-59df9f9d49-8pz9m"] Oct 10 16:50:23 crc kubenswrapper[4799]: I1010 16:50:23.355508 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Oct 10 16:50:23 crc kubenswrapper[4799]: I1010 16:50:23.385995 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 10 16:50:23 crc kubenswrapper[4799]: I1010 16:50:23.387786 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 10 16:50:23 crc kubenswrapper[4799]: I1010 16:50:23.397324 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Oct 10 16:50:23 crc kubenswrapper[4799]: I1010 16:50:23.397551 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Oct 10 16:50:23 crc kubenswrapper[4799]: I1010 16:50:23.410909 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/0b3db818-0776-4b6f-98b0-b67e64292226-dns-swift-storage-0\") pod \"dnsmasq-dns-59df9f9d49-8pz9m\" (UID: \"0b3db818-0776-4b6f-98b0-b67e64292226\") " pod="openstack/dnsmasq-dns-59df9f9d49-8pz9m" Oct 10 16:50:23 crc kubenswrapper[4799]: I1010 16:50:23.410953 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/0b3db818-0776-4b6f-98b0-b67e64292226-dns-svc\") pod \"dnsmasq-dns-59df9f9d49-8pz9m\" (UID: \"0b3db818-0776-4b6f-98b0-b67e64292226\") " pod="openstack/dnsmasq-dns-59df9f9d49-8pz9m" Oct 10 16:50:23 crc kubenswrapper[4799]: I1010 16:50:23.410978 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0b3db818-0776-4b6f-98b0-b67e64292226-config\") pod \"dnsmasq-dns-59df9f9d49-8pz9m\" (UID: \"0b3db818-0776-4b6f-98b0-b67e64292226\") " pod="openstack/dnsmasq-dns-59df9f9d49-8pz9m" Oct 10 16:50:23 crc kubenswrapper[4799]: I1010 16:50:23.411099 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s45xc\" (UniqueName: \"kubernetes.io/projected/0b3db818-0776-4b6f-98b0-b67e64292226-kube-api-access-s45xc\") pod \"dnsmasq-dns-59df9f9d49-8pz9m\" (UID: \"0b3db818-0776-4b6f-98b0-b67e64292226\") " pod="openstack/dnsmasq-dns-59df9f9d49-8pz9m" Oct 10 16:50:23 crc kubenswrapper[4799]: I1010 16:50:23.411324 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/0b3db818-0776-4b6f-98b0-b67e64292226-ovsdbserver-sb\") pod \"dnsmasq-dns-59df9f9d49-8pz9m\" (UID: \"0b3db818-0776-4b6f-98b0-b67e64292226\") " pod="openstack/dnsmasq-dns-59df9f9d49-8pz9m" Oct 10 16:50:23 crc kubenswrapper[4799]: I1010 16:50:23.411423 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/0b3db818-0776-4b6f-98b0-b67e64292226-ovsdbserver-nb\") pod \"dnsmasq-dns-59df9f9d49-8pz9m\" (UID: \"0b3db818-0776-4b6f-98b0-b67e64292226\") " pod="openstack/dnsmasq-dns-59df9f9d49-8pz9m" Oct 10 16:50:23 crc kubenswrapper[4799]: I1010 16:50:23.454859 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-59df9f9d49-8pz9m"] Oct 10 16:50:23 crc kubenswrapper[4799]: E1010 16:50:23.455449 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="unmounted volumes=[config dns-svc dns-swift-storage-0 kube-api-access-s45xc ovsdbserver-nb ovsdbserver-sb], unattached volumes=[], failed to process volumes=[]: context canceled" pod="openstack/dnsmasq-dns-59df9f9d49-8pz9m" podUID="0b3db818-0776-4b6f-98b0-b67e64292226" Oct 10 16:50:23 crc kubenswrapper[4799]: I1010 16:50:23.461091 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-db-sync-5fpmb"] Oct 10 16:50:23 crc kubenswrapper[4799]: I1010 16:50:23.462270 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-5fpmb" Oct 10 16:50:23 crc kubenswrapper[4799]: I1010 16:50:23.466894 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-config-data" Oct 10 16:50:23 crc kubenswrapper[4799]: I1010 16:50:23.467077 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-scripts" Oct 10 16:50:23 crc kubenswrapper[4799]: I1010 16:50:23.467170 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-placement-dockercfg-gv9md" Oct 10 16:50:23 crc kubenswrapper[4799]: I1010 16:50:23.472905 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-74776f5dd7-7jxz4"] Oct 10 16:50:23 crc kubenswrapper[4799]: I1010 16:50:23.474907 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-74776f5dd7-7jxz4" Oct 10 16:50:23 crc kubenswrapper[4799]: I1010 16:50:23.478944 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-sync-5fpmb"] Oct 10 16:50:23 crc kubenswrapper[4799]: I1010 16:50:23.507630 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-74776f5dd7-7jxz4"] Oct 10 16:50:23 crc kubenswrapper[4799]: I1010 16:50:23.512647 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/766235b0-a5b9-4448-8dac-1afd1ca60e50-scripts\") pod \"ceilometer-0\" (UID: \"766235b0-a5b9-4448-8dac-1afd1ca60e50\") " pod="openstack/ceilometer-0" Oct 10 16:50:23 crc kubenswrapper[4799]: I1010 16:50:23.512696 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6nkg9\" (UniqueName: \"kubernetes.io/projected/766235b0-a5b9-4448-8dac-1afd1ca60e50-kube-api-access-6nkg9\") pod \"ceilometer-0\" (UID: \"766235b0-a5b9-4448-8dac-1afd1ca60e50\") " pod="openstack/ceilometer-0" Oct 10 16:50:23 crc kubenswrapper[4799]: I1010 16:50:23.512719 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/766235b0-a5b9-4448-8dac-1afd1ca60e50-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"766235b0-a5b9-4448-8dac-1afd1ca60e50\") " pod="openstack/ceilometer-0" Oct 10 16:50:23 crc kubenswrapper[4799]: I1010 16:50:23.512773 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/766235b0-a5b9-4448-8dac-1afd1ca60e50-config-data\") pod \"ceilometer-0\" (UID: \"766235b0-a5b9-4448-8dac-1afd1ca60e50\") " pod="openstack/ceilometer-0" Oct 10 16:50:23 crc kubenswrapper[4799]: I1010 16:50:23.512834 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s45xc\" (UniqueName: \"kubernetes.io/projected/0b3db818-0776-4b6f-98b0-b67e64292226-kube-api-access-s45xc\") pod \"dnsmasq-dns-59df9f9d49-8pz9m\" (UID: \"0b3db818-0776-4b6f-98b0-b67e64292226\") " pod="openstack/dnsmasq-dns-59df9f9d49-8pz9m" Oct 10 16:50:23 crc kubenswrapper[4799]: I1010 16:50:23.512908 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/766235b0-a5b9-4448-8dac-1afd1ca60e50-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"766235b0-a5b9-4448-8dac-1afd1ca60e50\") " pod="openstack/ceilometer-0" Oct 10 16:50:23 crc kubenswrapper[4799]: I1010 16:50:23.513031 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/0b3db818-0776-4b6f-98b0-b67e64292226-ovsdbserver-sb\") pod \"dnsmasq-dns-59df9f9d49-8pz9m\" (UID: \"0b3db818-0776-4b6f-98b0-b67e64292226\") " pod="openstack/dnsmasq-dns-59df9f9d49-8pz9m" Oct 10 16:50:23 crc kubenswrapper[4799]: I1010 16:50:23.514023 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/0b3db818-0776-4b6f-98b0-b67e64292226-ovsdbserver-sb\") pod \"dnsmasq-dns-59df9f9d49-8pz9m\" (UID: \"0b3db818-0776-4b6f-98b0-b67e64292226\") " pod="openstack/dnsmasq-dns-59df9f9d49-8pz9m" Oct 10 16:50:23 crc kubenswrapper[4799]: I1010 16:50:23.514079 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/0b3db818-0776-4b6f-98b0-b67e64292226-ovsdbserver-nb\") pod \"dnsmasq-dns-59df9f9d49-8pz9m\" (UID: \"0b3db818-0776-4b6f-98b0-b67e64292226\") " pod="openstack/dnsmasq-dns-59df9f9d49-8pz9m" Oct 10 16:50:23 crc kubenswrapper[4799]: I1010 16:50:23.514165 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/766235b0-a5b9-4448-8dac-1afd1ca60e50-run-httpd\") pod \"ceilometer-0\" (UID: \"766235b0-a5b9-4448-8dac-1afd1ca60e50\") " pod="openstack/ceilometer-0" Oct 10 16:50:23 crc kubenswrapper[4799]: I1010 16:50:23.514213 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/0b3db818-0776-4b6f-98b0-b67e64292226-dns-swift-storage-0\") pod \"dnsmasq-dns-59df9f9d49-8pz9m\" (UID: \"0b3db818-0776-4b6f-98b0-b67e64292226\") " pod="openstack/dnsmasq-dns-59df9f9d49-8pz9m" Oct 10 16:50:23 crc kubenswrapper[4799]: I1010 16:50:23.514261 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/766235b0-a5b9-4448-8dac-1afd1ca60e50-log-httpd\") pod \"ceilometer-0\" (UID: \"766235b0-a5b9-4448-8dac-1afd1ca60e50\") " pod="openstack/ceilometer-0" Oct 10 16:50:23 crc kubenswrapper[4799]: I1010 16:50:23.514365 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/0b3db818-0776-4b6f-98b0-b67e64292226-dns-svc\") pod \"dnsmasq-dns-59df9f9d49-8pz9m\" (UID: \"0b3db818-0776-4b6f-98b0-b67e64292226\") " pod="openstack/dnsmasq-dns-59df9f9d49-8pz9m" Oct 10 16:50:23 crc kubenswrapper[4799]: I1010 16:50:23.514416 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0b3db818-0776-4b6f-98b0-b67e64292226-config\") pod \"dnsmasq-dns-59df9f9d49-8pz9m\" (UID: \"0b3db818-0776-4b6f-98b0-b67e64292226\") " pod="openstack/dnsmasq-dns-59df9f9d49-8pz9m" Oct 10 16:50:23 crc kubenswrapper[4799]: I1010 16:50:23.515010 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/0b3db818-0776-4b6f-98b0-b67e64292226-dns-svc\") pod \"dnsmasq-dns-59df9f9d49-8pz9m\" (UID: \"0b3db818-0776-4b6f-98b0-b67e64292226\") " pod="openstack/dnsmasq-dns-59df9f9d49-8pz9m" Oct 10 16:50:23 crc kubenswrapper[4799]: I1010 16:50:23.515111 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/0b3db818-0776-4b6f-98b0-b67e64292226-dns-swift-storage-0\") pod \"dnsmasq-dns-59df9f9d49-8pz9m\" (UID: \"0b3db818-0776-4b6f-98b0-b67e64292226\") " pod="openstack/dnsmasq-dns-59df9f9d49-8pz9m" Oct 10 16:50:23 crc kubenswrapper[4799]: I1010 16:50:23.515454 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/0b3db818-0776-4b6f-98b0-b67e64292226-ovsdbserver-nb\") pod \"dnsmasq-dns-59df9f9d49-8pz9m\" (UID: \"0b3db818-0776-4b6f-98b0-b67e64292226\") " pod="openstack/dnsmasq-dns-59df9f9d49-8pz9m" Oct 10 16:50:23 crc kubenswrapper[4799]: I1010 16:50:23.515848 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0b3db818-0776-4b6f-98b0-b67e64292226-config\") pod \"dnsmasq-dns-59df9f9d49-8pz9m\" (UID: \"0b3db818-0776-4b6f-98b0-b67e64292226\") " pod="openstack/dnsmasq-dns-59df9f9d49-8pz9m" Oct 10 16:50:23 crc kubenswrapper[4799]: I1010 16:50:23.571771 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s45xc\" (UniqueName: \"kubernetes.io/projected/0b3db818-0776-4b6f-98b0-b67e64292226-kube-api-access-s45xc\") pod \"dnsmasq-dns-59df9f9d49-8pz9m\" (UID: \"0b3db818-0776-4b6f-98b0-b67e64292226\") " pod="openstack/dnsmasq-dns-59df9f9d49-8pz9m" Oct 10 16:50:23 crc kubenswrapper[4799]: I1010 16:50:23.605647 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-59df9f9d49-8pz9m" Oct 10 16:50:23 crc kubenswrapper[4799]: I1010 16:50:23.606489 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-677c858757-mwwqx" podUID="c451444b-d58d-4383-801c-6df3ee8d8adf" containerName="dnsmasq-dns" containerID="cri-o://b7b397836ef927790d2c5e00f062d9078e2d75a5e100614ce09a8d55dc22800b" gracePeriod=10 Oct 10 16:50:23 crc kubenswrapper[4799]: I1010 16:50:23.606915 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7c769b4797-w8znp" Oct 10 16:50:23 crc kubenswrapper[4799]: I1010 16:50:23.606922 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-db-sync-mrh2w"] Oct 10 16:50:23 crc kubenswrapper[4799]: I1010 16:50:23.609184 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-677c858757-mwwqx" event={"ID":"c451444b-d58d-4383-801c-6df3ee8d8adf","Type":"ContainerStarted","Data":"b7b397836ef927790d2c5e00f062d9078e2d75a5e100614ce09a8d55dc22800b"} Oct 10 16:50:23 crc kubenswrapper[4799]: I1010 16:50:23.609223 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-677c858757-mwwqx" Oct 10 16:50:23 crc kubenswrapper[4799]: I1010 16:50:23.609369 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-mrh2w" Oct 10 16:50:23 crc kubenswrapper[4799]: I1010 16:50:23.614685 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-barbican-dockercfg-kspsv" Oct 10 16:50:23 crc kubenswrapper[4799]: I1010 16:50:23.616288 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z89c9\" (UniqueName: \"kubernetes.io/projected/6d2833c7-9e1e-4063-93a1-54aded9b6daf-kube-api-access-z89c9\") pod \"placement-db-sync-5fpmb\" (UID: \"6d2833c7-9e1e-4063-93a1-54aded9b6daf\") " pod="openstack/placement-db-sync-5fpmb" Oct 10 16:50:23 crc kubenswrapper[4799]: I1010 16:50:23.616331 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6d2833c7-9e1e-4063-93a1-54aded9b6daf-scripts\") pod \"placement-db-sync-5fpmb\" (UID: \"6d2833c7-9e1e-4063-93a1-54aded9b6daf\") " pod="openstack/placement-db-sync-5fpmb" Oct 10 16:50:23 crc kubenswrapper[4799]: I1010 16:50:23.616375 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5a8fddc9-9cab-41e0-90c4-3c797749e5e2-ovsdbserver-sb\") pod \"dnsmasq-dns-74776f5dd7-7jxz4\" (UID: \"5a8fddc9-9cab-41e0-90c4-3c797749e5e2\") " pod="openstack/dnsmasq-dns-74776f5dd7-7jxz4" Oct 10 16:50:23 crc kubenswrapper[4799]: I1010 16:50:23.616398 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nrz7m\" (UniqueName: \"kubernetes.io/projected/5a8fddc9-9cab-41e0-90c4-3c797749e5e2-kube-api-access-nrz7m\") pod \"dnsmasq-dns-74776f5dd7-7jxz4\" (UID: \"5a8fddc9-9cab-41e0-90c4-3c797749e5e2\") " pod="openstack/dnsmasq-dns-74776f5dd7-7jxz4" Oct 10 16:50:23 crc kubenswrapper[4799]: I1010 16:50:23.616419 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6d2833c7-9e1e-4063-93a1-54aded9b6daf-config-data\") pod \"placement-db-sync-5fpmb\" (UID: \"6d2833c7-9e1e-4063-93a1-54aded9b6daf\") " pod="openstack/placement-db-sync-5fpmb" Oct 10 16:50:23 crc kubenswrapper[4799]: I1010 16:50:23.616437 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6d2833c7-9e1e-4063-93a1-54aded9b6daf-logs\") pod \"placement-db-sync-5fpmb\" (UID: \"6d2833c7-9e1e-4063-93a1-54aded9b6daf\") " pod="openstack/placement-db-sync-5fpmb" Oct 10 16:50:23 crc kubenswrapper[4799]: I1010 16:50:23.616459 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6d2833c7-9e1e-4063-93a1-54aded9b6daf-combined-ca-bundle\") pod \"placement-db-sync-5fpmb\" (UID: \"6d2833c7-9e1e-4063-93a1-54aded9b6daf\") " pod="openstack/placement-db-sync-5fpmb" Oct 10 16:50:23 crc kubenswrapper[4799]: I1010 16:50:23.616487 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5a8fddc9-9cab-41e0-90c4-3c797749e5e2-config\") pod \"dnsmasq-dns-74776f5dd7-7jxz4\" (UID: \"5a8fddc9-9cab-41e0-90c4-3c797749e5e2\") " pod="openstack/dnsmasq-dns-74776f5dd7-7jxz4" Oct 10 16:50:23 crc kubenswrapper[4799]: I1010 16:50:23.616513 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/766235b0-a5b9-4448-8dac-1afd1ca60e50-run-httpd\") pod \"ceilometer-0\" (UID: \"766235b0-a5b9-4448-8dac-1afd1ca60e50\") " pod="openstack/ceilometer-0" Oct 10 16:50:23 crc kubenswrapper[4799]: I1010 16:50:23.616544 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/766235b0-a5b9-4448-8dac-1afd1ca60e50-log-httpd\") pod \"ceilometer-0\" (UID: \"766235b0-a5b9-4448-8dac-1afd1ca60e50\") " pod="openstack/ceilometer-0" Oct 10 16:50:23 crc kubenswrapper[4799]: I1010 16:50:23.616590 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/766235b0-a5b9-4448-8dac-1afd1ca60e50-scripts\") pod \"ceilometer-0\" (UID: \"766235b0-a5b9-4448-8dac-1afd1ca60e50\") " pod="openstack/ceilometer-0" Oct 10 16:50:23 crc kubenswrapper[4799]: I1010 16:50:23.616610 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6nkg9\" (UniqueName: \"kubernetes.io/projected/766235b0-a5b9-4448-8dac-1afd1ca60e50-kube-api-access-6nkg9\") pod \"ceilometer-0\" (UID: \"766235b0-a5b9-4448-8dac-1afd1ca60e50\") " pod="openstack/ceilometer-0" Oct 10 16:50:23 crc kubenswrapper[4799]: I1010 16:50:23.616631 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/766235b0-a5b9-4448-8dac-1afd1ca60e50-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"766235b0-a5b9-4448-8dac-1afd1ca60e50\") " pod="openstack/ceilometer-0" Oct 10 16:50:23 crc kubenswrapper[4799]: I1010 16:50:23.616654 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/766235b0-a5b9-4448-8dac-1afd1ca60e50-config-data\") pod \"ceilometer-0\" (UID: \"766235b0-a5b9-4448-8dac-1afd1ca60e50\") " pod="openstack/ceilometer-0" Oct 10 16:50:23 crc kubenswrapper[4799]: I1010 16:50:23.616678 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/5a8fddc9-9cab-41e0-90c4-3c797749e5e2-dns-swift-storage-0\") pod \"dnsmasq-dns-74776f5dd7-7jxz4\" (UID: \"5a8fddc9-9cab-41e0-90c4-3c797749e5e2\") " pod="openstack/dnsmasq-dns-74776f5dd7-7jxz4" Oct 10 16:50:23 crc kubenswrapper[4799]: I1010 16:50:23.616730 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5a8fddc9-9cab-41e0-90c4-3c797749e5e2-ovsdbserver-nb\") pod \"dnsmasq-dns-74776f5dd7-7jxz4\" (UID: \"5a8fddc9-9cab-41e0-90c4-3c797749e5e2\") " pod="openstack/dnsmasq-dns-74776f5dd7-7jxz4" Oct 10 16:50:23 crc kubenswrapper[4799]: I1010 16:50:23.616875 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5a8fddc9-9cab-41e0-90c4-3c797749e5e2-dns-svc\") pod \"dnsmasq-dns-74776f5dd7-7jxz4\" (UID: \"5a8fddc9-9cab-41e0-90c4-3c797749e5e2\") " pod="openstack/dnsmasq-dns-74776f5dd7-7jxz4" Oct 10 16:50:23 crc kubenswrapper[4799]: I1010 16:50:23.616906 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/766235b0-a5b9-4448-8dac-1afd1ca60e50-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"766235b0-a5b9-4448-8dac-1afd1ca60e50\") " pod="openstack/ceilometer-0" Oct 10 16:50:23 crc kubenswrapper[4799]: I1010 16:50:23.617011 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-config-data" Oct 10 16:50:23 crc kubenswrapper[4799]: I1010 16:50:23.617709 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/766235b0-a5b9-4448-8dac-1afd1ca60e50-log-httpd\") pod \"ceilometer-0\" (UID: \"766235b0-a5b9-4448-8dac-1afd1ca60e50\") " pod="openstack/ceilometer-0" Oct 10 16:50:23 crc kubenswrapper[4799]: I1010 16:50:23.617932 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/766235b0-a5b9-4448-8dac-1afd1ca60e50-run-httpd\") pod \"ceilometer-0\" (UID: \"766235b0-a5b9-4448-8dac-1afd1ca60e50\") " pod="openstack/ceilometer-0" Oct 10 16:50:23 crc kubenswrapper[4799]: I1010 16:50:23.622147 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/766235b0-a5b9-4448-8dac-1afd1ca60e50-config-data\") pod \"ceilometer-0\" (UID: \"766235b0-a5b9-4448-8dac-1afd1ca60e50\") " pod="openstack/ceilometer-0" Oct 10 16:50:23 crc kubenswrapper[4799]: I1010 16:50:23.624546 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/766235b0-a5b9-4448-8dac-1afd1ca60e50-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"766235b0-a5b9-4448-8dac-1afd1ca60e50\") " pod="openstack/ceilometer-0" Oct 10 16:50:23 crc kubenswrapper[4799]: I1010 16:50:23.624775 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-sync-mrh2w"] Oct 10 16:50:23 crc kubenswrapper[4799]: I1010 16:50:23.625465 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/766235b0-a5b9-4448-8dac-1afd1ca60e50-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"766235b0-a5b9-4448-8dac-1afd1ca60e50\") " pod="openstack/ceilometer-0" Oct 10 16:50:23 crc kubenswrapper[4799]: I1010 16:50:23.626590 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-59df9f9d49-8pz9m" Oct 10 16:50:23 crc kubenswrapper[4799]: I1010 16:50:23.639618 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-677c858757-mwwqx" podStartSLOduration=3.639592624 podStartE2EDuration="3.639592624s" podCreationTimestamp="2025-10-10 16:50:20 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-10 16:50:23.636827486 +0000 UTC m=+1117.145151611" watchObservedRunningTime="2025-10-10 16:50:23.639592624 +0000 UTC m=+1117.147916739" Oct 10 16:50:23 crc kubenswrapper[4799]: I1010 16:50:23.645826 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/766235b0-a5b9-4448-8dac-1afd1ca60e50-scripts\") pod \"ceilometer-0\" (UID: \"766235b0-a5b9-4448-8dac-1afd1ca60e50\") " pod="openstack/ceilometer-0" Oct 10 16:50:23 crc kubenswrapper[4799]: I1010 16:50:23.652549 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6nkg9\" (UniqueName: \"kubernetes.io/projected/766235b0-a5b9-4448-8dac-1afd1ca60e50-kube-api-access-6nkg9\") pod \"ceilometer-0\" (UID: \"766235b0-a5b9-4448-8dac-1afd1ca60e50\") " pod="openstack/ceilometer-0" Oct 10 16:50:23 crc kubenswrapper[4799]: I1010 16:50:23.660160 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7c769b4797-w8znp" Oct 10 16:50:23 crc kubenswrapper[4799]: I1010 16:50:23.718158 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/0b3db818-0776-4b6f-98b0-b67e64292226-ovsdbserver-sb\") pod \"0b3db818-0776-4b6f-98b0-b67e64292226\" (UID: \"0b3db818-0776-4b6f-98b0-b67e64292226\") " Oct 10 16:50:23 crc kubenswrapper[4799]: I1010 16:50:23.718264 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/0b3db818-0776-4b6f-98b0-b67e64292226-dns-svc\") pod \"0b3db818-0776-4b6f-98b0-b67e64292226\" (UID: \"0b3db818-0776-4b6f-98b0-b67e64292226\") " Oct 10 16:50:23 crc kubenswrapper[4799]: I1010 16:50:23.718422 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/0b3db818-0776-4b6f-98b0-b67e64292226-dns-swift-storage-0\") pod \"0b3db818-0776-4b6f-98b0-b67e64292226\" (UID: \"0b3db818-0776-4b6f-98b0-b67e64292226\") " Oct 10 16:50:23 crc kubenswrapper[4799]: I1010 16:50:23.718460 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0b3db818-0776-4b6f-98b0-b67e64292226-config\") pod \"0b3db818-0776-4b6f-98b0-b67e64292226\" (UID: \"0b3db818-0776-4b6f-98b0-b67e64292226\") " Oct 10 16:50:23 crc kubenswrapper[4799]: I1010 16:50:23.718511 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/0b3db818-0776-4b6f-98b0-b67e64292226-ovsdbserver-nb\") pod \"0b3db818-0776-4b6f-98b0-b67e64292226\" (UID: \"0b3db818-0776-4b6f-98b0-b67e64292226\") " Oct 10 16:50:23 crc kubenswrapper[4799]: I1010 16:50:23.718576 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s45xc\" (UniqueName: \"kubernetes.io/projected/0b3db818-0776-4b6f-98b0-b67e64292226-kube-api-access-s45xc\") pod \"0b3db818-0776-4b6f-98b0-b67e64292226\" (UID: \"0b3db818-0776-4b6f-98b0-b67e64292226\") " Oct 10 16:50:23 crc kubenswrapper[4799]: I1010 16:50:23.718887 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/5a8fddc9-9cab-41e0-90c4-3c797749e5e2-dns-swift-storage-0\") pod \"dnsmasq-dns-74776f5dd7-7jxz4\" (UID: \"5a8fddc9-9cab-41e0-90c4-3c797749e5e2\") " pod="openstack/dnsmasq-dns-74776f5dd7-7jxz4" Oct 10 16:50:23 crc kubenswrapper[4799]: I1010 16:50:23.718940 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/a8dccd24-a3ca-4f98-90b4-e2943cd228d3-db-sync-config-data\") pod \"barbican-db-sync-mrh2w\" (UID: \"a8dccd24-a3ca-4f98-90b4-e2943cd228d3\") " pod="openstack/barbican-db-sync-mrh2w" Oct 10 16:50:23 crc kubenswrapper[4799]: I1010 16:50:23.719020 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5a8fddc9-9cab-41e0-90c4-3c797749e5e2-ovsdbserver-nb\") pod \"dnsmasq-dns-74776f5dd7-7jxz4\" (UID: \"5a8fddc9-9cab-41e0-90c4-3c797749e5e2\") " pod="openstack/dnsmasq-dns-74776f5dd7-7jxz4" Oct 10 16:50:23 crc kubenswrapper[4799]: I1010 16:50:23.719081 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5a8fddc9-9cab-41e0-90c4-3c797749e5e2-dns-svc\") pod \"dnsmasq-dns-74776f5dd7-7jxz4\" (UID: \"5a8fddc9-9cab-41e0-90c4-3c797749e5e2\") " pod="openstack/dnsmasq-dns-74776f5dd7-7jxz4" Oct 10 16:50:23 crc kubenswrapper[4799]: I1010 16:50:23.719137 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z89c9\" (UniqueName: \"kubernetes.io/projected/6d2833c7-9e1e-4063-93a1-54aded9b6daf-kube-api-access-z89c9\") pod \"placement-db-sync-5fpmb\" (UID: \"6d2833c7-9e1e-4063-93a1-54aded9b6daf\") " pod="openstack/placement-db-sync-5fpmb" Oct 10 16:50:23 crc kubenswrapper[4799]: I1010 16:50:23.719177 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6d2833c7-9e1e-4063-93a1-54aded9b6daf-scripts\") pod \"placement-db-sync-5fpmb\" (UID: \"6d2833c7-9e1e-4063-93a1-54aded9b6daf\") " pod="openstack/placement-db-sync-5fpmb" Oct 10 16:50:23 crc kubenswrapper[4799]: I1010 16:50:23.719217 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5a8fddc9-9cab-41e0-90c4-3c797749e5e2-ovsdbserver-sb\") pod \"dnsmasq-dns-74776f5dd7-7jxz4\" (UID: \"5a8fddc9-9cab-41e0-90c4-3c797749e5e2\") " pod="openstack/dnsmasq-dns-74776f5dd7-7jxz4" Oct 10 16:50:23 crc kubenswrapper[4799]: I1010 16:50:23.719281 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nrz7m\" (UniqueName: \"kubernetes.io/projected/5a8fddc9-9cab-41e0-90c4-3c797749e5e2-kube-api-access-nrz7m\") pod \"dnsmasq-dns-74776f5dd7-7jxz4\" (UID: \"5a8fddc9-9cab-41e0-90c4-3c797749e5e2\") " pod="openstack/dnsmasq-dns-74776f5dd7-7jxz4" Oct 10 16:50:23 crc kubenswrapper[4799]: I1010 16:50:23.719324 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6d2833c7-9e1e-4063-93a1-54aded9b6daf-config-data\") pod \"placement-db-sync-5fpmb\" (UID: \"6d2833c7-9e1e-4063-93a1-54aded9b6daf\") " pod="openstack/placement-db-sync-5fpmb" Oct 10 16:50:23 crc kubenswrapper[4799]: I1010 16:50:23.719348 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6d2833c7-9e1e-4063-93a1-54aded9b6daf-logs\") pod \"placement-db-sync-5fpmb\" (UID: \"6d2833c7-9e1e-4063-93a1-54aded9b6daf\") " pod="openstack/placement-db-sync-5fpmb" Oct 10 16:50:23 crc kubenswrapper[4799]: I1010 16:50:23.719394 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6d2833c7-9e1e-4063-93a1-54aded9b6daf-combined-ca-bundle\") pod \"placement-db-sync-5fpmb\" (UID: \"6d2833c7-9e1e-4063-93a1-54aded9b6daf\") " pod="openstack/placement-db-sync-5fpmb" Oct 10 16:50:23 crc kubenswrapper[4799]: I1010 16:50:23.719422 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x72db\" (UniqueName: \"kubernetes.io/projected/a8dccd24-a3ca-4f98-90b4-e2943cd228d3-kube-api-access-x72db\") pod \"barbican-db-sync-mrh2w\" (UID: \"a8dccd24-a3ca-4f98-90b4-e2943cd228d3\") " pod="openstack/barbican-db-sync-mrh2w" Oct 10 16:50:23 crc kubenswrapper[4799]: I1010 16:50:23.719453 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5a8fddc9-9cab-41e0-90c4-3c797749e5e2-config\") pod \"dnsmasq-dns-74776f5dd7-7jxz4\" (UID: \"5a8fddc9-9cab-41e0-90c4-3c797749e5e2\") " pod="openstack/dnsmasq-dns-74776f5dd7-7jxz4" Oct 10 16:50:23 crc kubenswrapper[4799]: I1010 16:50:23.719557 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a8dccd24-a3ca-4f98-90b4-e2943cd228d3-combined-ca-bundle\") pod \"barbican-db-sync-mrh2w\" (UID: \"a8dccd24-a3ca-4f98-90b4-e2943cd228d3\") " pod="openstack/barbican-db-sync-mrh2w" Oct 10 16:50:23 crc kubenswrapper[4799]: I1010 16:50:23.720628 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5a8fddc9-9cab-41e0-90c4-3c797749e5e2-dns-svc\") pod \"dnsmasq-dns-74776f5dd7-7jxz4\" (UID: \"5a8fddc9-9cab-41e0-90c4-3c797749e5e2\") " pod="openstack/dnsmasq-dns-74776f5dd7-7jxz4" Oct 10 16:50:23 crc kubenswrapper[4799]: I1010 16:50:23.721968 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b3db818-0776-4b6f-98b0-b67e64292226-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "0b3db818-0776-4b6f-98b0-b67e64292226" (UID: "0b3db818-0776-4b6f-98b0-b67e64292226"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 16:50:23 crc kubenswrapper[4799]: I1010 16:50:23.725093 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b3db818-0776-4b6f-98b0-b67e64292226-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "0b3db818-0776-4b6f-98b0-b67e64292226" (UID: "0b3db818-0776-4b6f-98b0-b67e64292226"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 16:50:23 crc kubenswrapper[4799]: I1010 16:50:23.725654 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/5a8fddc9-9cab-41e0-90c4-3c797749e5e2-dns-swift-storage-0\") pod \"dnsmasq-dns-74776f5dd7-7jxz4\" (UID: \"5a8fddc9-9cab-41e0-90c4-3c797749e5e2\") " pod="openstack/dnsmasq-dns-74776f5dd7-7jxz4" Oct 10 16:50:23 crc kubenswrapper[4799]: I1010 16:50:23.726202 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b3db818-0776-4b6f-98b0-b67e64292226-config" (OuterVolumeSpecName: "config") pod "0b3db818-0776-4b6f-98b0-b67e64292226" (UID: "0b3db818-0776-4b6f-98b0-b67e64292226"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 16:50:23 crc kubenswrapper[4799]: I1010 16:50:23.726387 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5a8fddc9-9cab-41e0-90c4-3c797749e5e2-ovsdbserver-nb\") pod \"dnsmasq-dns-74776f5dd7-7jxz4\" (UID: \"5a8fddc9-9cab-41e0-90c4-3c797749e5e2\") " pod="openstack/dnsmasq-dns-74776f5dd7-7jxz4" Oct 10 16:50:23 crc kubenswrapper[4799]: I1010 16:50:23.726647 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b3db818-0776-4b6f-98b0-b67e64292226-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "0b3db818-0776-4b6f-98b0-b67e64292226" (UID: "0b3db818-0776-4b6f-98b0-b67e64292226"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 16:50:23 crc kubenswrapper[4799]: I1010 16:50:23.726690 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6d2833c7-9e1e-4063-93a1-54aded9b6daf-logs\") pod \"placement-db-sync-5fpmb\" (UID: \"6d2833c7-9e1e-4063-93a1-54aded9b6daf\") " pod="openstack/placement-db-sync-5fpmb" Oct 10 16:50:23 crc kubenswrapper[4799]: I1010 16:50:23.727137 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b3db818-0776-4b6f-98b0-b67e64292226-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "0b3db818-0776-4b6f-98b0-b67e64292226" (UID: "0b3db818-0776-4b6f-98b0-b67e64292226"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 16:50:23 crc kubenswrapper[4799]: I1010 16:50:23.728331 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5a8fddc9-9cab-41e0-90c4-3c797749e5e2-ovsdbserver-sb\") pod \"dnsmasq-dns-74776f5dd7-7jxz4\" (UID: \"5a8fddc9-9cab-41e0-90c4-3c797749e5e2\") " pod="openstack/dnsmasq-dns-74776f5dd7-7jxz4" Oct 10 16:50:23 crc kubenswrapper[4799]: I1010 16:50:23.729402 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6d2833c7-9e1e-4063-93a1-54aded9b6daf-scripts\") pod \"placement-db-sync-5fpmb\" (UID: \"6d2833c7-9e1e-4063-93a1-54aded9b6daf\") " pod="openstack/placement-db-sync-5fpmb" Oct 10 16:50:23 crc kubenswrapper[4799]: I1010 16:50:23.730309 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5a8fddc9-9cab-41e0-90c4-3c797749e5e2-config\") pod \"dnsmasq-dns-74776f5dd7-7jxz4\" (UID: \"5a8fddc9-9cab-41e0-90c4-3c797749e5e2\") " pod="openstack/dnsmasq-dns-74776f5dd7-7jxz4" Oct 10 16:50:23 crc kubenswrapper[4799]: I1010 16:50:23.730703 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 10 16:50:23 crc kubenswrapper[4799]: I1010 16:50:23.732391 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6d2833c7-9e1e-4063-93a1-54aded9b6daf-config-data\") pod \"placement-db-sync-5fpmb\" (UID: \"6d2833c7-9e1e-4063-93a1-54aded9b6daf\") " pod="openstack/placement-db-sync-5fpmb" Oct 10 16:50:23 crc kubenswrapper[4799]: I1010 16:50:23.736101 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z89c9\" (UniqueName: \"kubernetes.io/projected/6d2833c7-9e1e-4063-93a1-54aded9b6daf-kube-api-access-z89c9\") pod \"placement-db-sync-5fpmb\" (UID: \"6d2833c7-9e1e-4063-93a1-54aded9b6daf\") " pod="openstack/placement-db-sync-5fpmb" Oct 10 16:50:23 crc kubenswrapper[4799]: I1010 16:50:23.742466 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nrz7m\" (UniqueName: \"kubernetes.io/projected/5a8fddc9-9cab-41e0-90c4-3c797749e5e2-kube-api-access-nrz7m\") pod \"dnsmasq-dns-74776f5dd7-7jxz4\" (UID: \"5a8fddc9-9cab-41e0-90c4-3c797749e5e2\") " pod="openstack/dnsmasq-dns-74776f5dd7-7jxz4" Oct 10 16:50:23 crc kubenswrapper[4799]: I1010 16:50:23.757636 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b3db818-0776-4b6f-98b0-b67e64292226-kube-api-access-s45xc" (OuterVolumeSpecName: "kube-api-access-s45xc") pod "0b3db818-0776-4b6f-98b0-b67e64292226" (UID: "0b3db818-0776-4b6f-98b0-b67e64292226"). InnerVolumeSpecName "kube-api-access-s45xc". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 16:50:23 crc kubenswrapper[4799]: I1010 16:50:23.767431 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6d2833c7-9e1e-4063-93a1-54aded9b6daf-combined-ca-bundle\") pod \"placement-db-sync-5fpmb\" (UID: \"6d2833c7-9e1e-4063-93a1-54aded9b6daf\") " pod="openstack/placement-db-sync-5fpmb" Oct 10 16:50:23 crc kubenswrapper[4799]: I1010 16:50:23.805543 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-5fpmb" Oct 10 16:50:23 crc kubenswrapper[4799]: I1010 16:50:23.820369 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/43eb6e50-c6a9-4bbd-a301-926e6b3742d5-config\") pod \"43eb6e50-c6a9-4bbd-a301-926e6b3742d5\" (UID: \"43eb6e50-c6a9-4bbd-a301-926e6b3742d5\") " Oct 10 16:50:23 crc kubenswrapper[4799]: I1010 16:50:23.820433 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/43eb6e50-c6a9-4bbd-a301-926e6b3742d5-dns-swift-storage-0\") pod \"43eb6e50-c6a9-4bbd-a301-926e6b3742d5\" (UID: \"43eb6e50-c6a9-4bbd-a301-926e6b3742d5\") " Oct 10 16:50:23 crc kubenswrapper[4799]: I1010 16:50:23.820489 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mdvns\" (UniqueName: \"kubernetes.io/projected/43eb6e50-c6a9-4bbd-a301-926e6b3742d5-kube-api-access-mdvns\") pod \"43eb6e50-c6a9-4bbd-a301-926e6b3742d5\" (UID: \"43eb6e50-c6a9-4bbd-a301-926e6b3742d5\") " Oct 10 16:50:23 crc kubenswrapper[4799]: I1010 16:50:23.820528 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/43eb6e50-c6a9-4bbd-a301-926e6b3742d5-dns-svc\") pod \"43eb6e50-c6a9-4bbd-a301-926e6b3742d5\" (UID: \"43eb6e50-c6a9-4bbd-a301-926e6b3742d5\") " Oct 10 16:50:23 crc kubenswrapper[4799]: I1010 16:50:23.820547 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/43eb6e50-c6a9-4bbd-a301-926e6b3742d5-ovsdbserver-sb\") pod \"43eb6e50-c6a9-4bbd-a301-926e6b3742d5\" (UID: \"43eb6e50-c6a9-4bbd-a301-926e6b3742d5\") " Oct 10 16:50:23 crc kubenswrapper[4799]: I1010 16:50:23.820645 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/43eb6e50-c6a9-4bbd-a301-926e6b3742d5-ovsdbserver-nb\") pod \"43eb6e50-c6a9-4bbd-a301-926e6b3742d5\" (UID: \"43eb6e50-c6a9-4bbd-a301-926e6b3742d5\") " Oct 10 16:50:23 crc kubenswrapper[4799]: I1010 16:50:23.820850 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/a8dccd24-a3ca-4f98-90b4-e2943cd228d3-db-sync-config-data\") pod \"barbican-db-sync-mrh2w\" (UID: \"a8dccd24-a3ca-4f98-90b4-e2943cd228d3\") " pod="openstack/barbican-db-sync-mrh2w" Oct 10 16:50:23 crc kubenswrapper[4799]: I1010 16:50:23.820953 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x72db\" (UniqueName: \"kubernetes.io/projected/a8dccd24-a3ca-4f98-90b4-e2943cd228d3-kube-api-access-x72db\") pod \"barbican-db-sync-mrh2w\" (UID: \"a8dccd24-a3ca-4f98-90b4-e2943cd228d3\") " pod="openstack/barbican-db-sync-mrh2w" Oct 10 16:50:23 crc kubenswrapper[4799]: I1010 16:50:23.821002 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a8dccd24-a3ca-4f98-90b4-e2943cd228d3-combined-ca-bundle\") pod \"barbican-db-sync-mrh2w\" (UID: \"a8dccd24-a3ca-4f98-90b4-e2943cd228d3\") " pod="openstack/barbican-db-sync-mrh2w" Oct 10 16:50:23 crc kubenswrapper[4799]: I1010 16:50:23.821045 4799 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/0b3db818-0776-4b6f-98b0-b67e64292226-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 10 16:50:23 crc kubenswrapper[4799]: I1010 16:50:23.821056 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s45xc\" (UniqueName: \"kubernetes.io/projected/0b3db818-0776-4b6f-98b0-b67e64292226-kube-api-access-s45xc\") on node \"crc\" DevicePath \"\"" Oct 10 16:50:23 crc kubenswrapper[4799]: I1010 16:50:23.821066 4799 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/0b3db818-0776-4b6f-98b0-b67e64292226-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 10 16:50:23 crc kubenswrapper[4799]: I1010 16:50:23.821075 4799 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/0b3db818-0776-4b6f-98b0-b67e64292226-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 10 16:50:23 crc kubenswrapper[4799]: I1010 16:50:23.821087 4799 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/0b3db818-0776-4b6f-98b0-b67e64292226-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Oct 10 16:50:23 crc kubenswrapper[4799]: I1010 16:50:23.821098 4799 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0b3db818-0776-4b6f-98b0-b67e64292226-config\") on node \"crc\" DevicePath \"\"" Oct 10 16:50:23 crc kubenswrapper[4799]: I1010 16:50:23.822976 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43eb6e50-c6a9-4bbd-a301-926e6b3742d5-config" (OuterVolumeSpecName: "config") pod "43eb6e50-c6a9-4bbd-a301-926e6b3742d5" (UID: "43eb6e50-c6a9-4bbd-a301-926e6b3742d5"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 16:50:23 crc kubenswrapper[4799]: I1010 16:50:23.823405 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43eb6e50-c6a9-4bbd-a301-926e6b3742d5-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "43eb6e50-c6a9-4bbd-a301-926e6b3742d5" (UID: "43eb6e50-c6a9-4bbd-a301-926e6b3742d5"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 16:50:23 crc kubenswrapper[4799]: I1010 16:50:23.824751 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/a8dccd24-a3ca-4f98-90b4-e2943cd228d3-db-sync-config-data\") pod \"barbican-db-sync-mrh2w\" (UID: \"a8dccd24-a3ca-4f98-90b4-e2943cd228d3\") " pod="openstack/barbican-db-sync-mrh2w" Oct 10 16:50:23 crc kubenswrapper[4799]: I1010 16:50:23.825264 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/43eb6e50-c6a9-4bbd-a301-926e6b3742d5-kube-api-access-mdvns" (OuterVolumeSpecName: "kube-api-access-mdvns") pod "43eb6e50-c6a9-4bbd-a301-926e6b3742d5" (UID: "43eb6e50-c6a9-4bbd-a301-926e6b3742d5"). InnerVolumeSpecName "kube-api-access-mdvns". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 16:50:23 crc kubenswrapper[4799]: I1010 16:50:23.826584 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a8dccd24-a3ca-4f98-90b4-e2943cd228d3-combined-ca-bundle\") pod \"barbican-db-sync-mrh2w\" (UID: \"a8dccd24-a3ca-4f98-90b4-e2943cd228d3\") " pod="openstack/barbican-db-sync-mrh2w" Oct 10 16:50:23 crc kubenswrapper[4799]: I1010 16:50:23.828197 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-74776f5dd7-7jxz4" Oct 10 16:50:23 crc kubenswrapper[4799]: I1010 16:50:23.830009 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43eb6e50-c6a9-4bbd-a301-926e6b3742d5-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "43eb6e50-c6a9-4bbd-a301-926e6b3742d5" (UID: "43eb6e50-c6a9-4bbd-a301-926e6b3742d5"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 16:50:23 crc kubenswrapper[4799]: I1010 16:50:23.830056 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43eb6e50-c6a9-4bbd-a301-926e6b3742d5-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "43eb6e50-c6a9-4bbd-a301-926e6b3742d5" (UID: "43eb6e50-c6a9-4bbd-a301-926e6b3742d5"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 16:50:23 crc kubenswrapper[4799]: I1010 16:50:23.830113 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43eb6e50-c6a9-4bbd-a301-926e6b3742d5-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "43eb6e50-c6a9-4bbd-a301-926e6b3742d5" (UID: "43eb6e50-c6a9-4bbd-a301-926e6b3742d5"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 16:50:23 crc kubenswrapper[4799]: I1010 16:50:23.845938 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x72db\" (UniqueName: \"kubernetes.io/projected/a8dccd24-a3ca-4f98-90b4-e2943cd228d3-kube-api-access-x72db\") pod \"barbican-db-sync-mrh2w\" (UID: \"a8dccd24-a3ca-4f98-90b4-e2943cd228d3\") " pod="openstack/barbican-db-sync-mrh2w" Oct 10 16:50:23 crc kubenswrapper[4799]: I1010 16:50:23.922590 4799 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/43eb6e50-c6a9-4bbd-a301-926e6b3742d5-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 10 16:50:23 crc kubenswrapper[4799]: I1010 16:50:23.923002 4799 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/43eb6e50-c6a9-4bbd-a301-926e6b3742d5-config\") on node \"crc\" DevicePath \"\"" Oct 10 16:50:23 crc kubenswrapper[4799]: I1010 16:50:23.923017 4799 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/43eb6e50-c6a9-4bbd-a301-926e6b3742d5-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Oct 10 16:50:23 crc kubenswrapper[4799]: I1010 16:50:23.923030 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mdvns\" (UniqueName: \"kubernetes.io/projected/43eb6e50-c6a9-4bbd-a301-926e6b3742d5-kube-api-access-mdvns\") on node \"crc\" DevicePath \"\"" Oct 10 16:50:23 crc kubenswrapper[4799]: I1010 16:50:23.923043 4799 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/43eb6e50-c6a9-4bbd-a301-926e6b3742d5-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 10 16:50:23 crc kubenswrapper[4799]: I1010 16:50:23.923056 4799 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/43eb6e50-c6a9-4bbd-a301-926e6b3742d5-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 10 16:50:24 crc kubenswrapper[4799]: I1010 16:50:24.057165 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-g45lb"] Oct 10 16:50:24 crc kubenswrapper[4799]: I1010 16:50:24.086141 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-mrh2w" Oct 10 16:50:24 crc kubenswrapper[4799]: I1010 16:50:24.238339 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-677c858757-mwwqx" Oct 10 16:50:24 crc kubenswrapper[4799]: I1010 16:50:24.279200 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Oct 10 16:50:24 crc kubenswrapper[4799]: E1010 16:50:24.289219 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c451444b-d58d-4383-801c-6df3ee8d8adf" containerName="dnsmasq-dns" Oct 10 16:50:24 crc kubenswrapper[4799]: I1010 16:50:24.289246 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="c451444b-d58d-4383-801c-6df3ee8d8adf" containerName="dnsmasq-dns" Oct 10 16:50:24 crc kubenswrapper[4799]: E1010 16:50:24.289279 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c451444b-d58d-4383-801c-6df3ee8d8adf" containerName="init" Oct 10 16:50:24 crc kubenswrapper[4799]: I1010 16:50:24.289289 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="c451444b-d58d-4383-801c-6df3ee8d8adf" containerName="init" Oct 10 16:50:24 crc kubenswrapper[4799]: I1010 16:50:24.289834 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="c451444b-d58d-4383-801c-6df3ee8d8adf" containerName="dnsmasq-dns" Oct 10 16:50:24 crc kubenswrapper[4799]: I1010 16:50:24.291502 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Oct 10 16:50:24 crc kubenswrapper[4799]: I1010 16:50:24.294643 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Oct 10 16:50:24 crc kubenswrapper[4799]: I1010 16:50:24.300556 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-scripts" Oct 10 16:50:24 crc kubenswrapper[4799]: I1010 16:50:24.300868 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-l4znb" Oct 10 16:50:24 crc kubenswrapper[4799]: I1010 16:50:24.336663 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c451444b-d58d-4383-801c-6df3ee8d8adf-dns-svc\") pod \"c451444b-d58d-4383-801c-6df3ee8d8adf\" (UID: \"c451444b-d58d-4383-801c-6df3ee8d8adf\") " Oct 10 16:50:24 crc kubenswrapper[4799]: I1010 16:50:24.336731 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c451444b-d58d-4383-801c-6df3ee8d8adf-ovsdbserver-sb\") pod \"c451444b-d58d-4383-801c-6df3ee8d8adf\" (UID: \"c451444b-d58d-4383-801c-6df3ee8d8adf\") " Oct 10 16:50:24 crc kubenswrapper[4799]: I1010 16:50:24.336833 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lbjp2\" (UniqueName: \"kubernetes.io/projected/c451444b-d58d-4383-801c-6df3ee8d8adf-kube-api-access-lbjp2\") pod \"c451444b-d58d-4383-801c-6df3ee8d8adf\" (UID: \"c451444b-d58d-4383-801c-6df3ee8d8adf\") " Oct 10 16:50:24 crc kubenswrapper[4799]: I1010 16:50:24.336930 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c451444b-d58d-4383-801c-6df3ee8d8adf-config\") pod \"c451444b-d58d-4383-801c-6df3ee8d8adf\" (UID: \"c451444b-d58d-4383-801c-6df3ee8d8adf\") " Oct 10 16:50:24 crc kubenswrapper[4799]: I1010 16:50:24.336973 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c451444b-d58d-4383-801c-6df3ee8d8adf-ovsdbserver-nb\") pod \"c451444b-d58d-4383-801c-6df3ee8d8adf\" (UID: \"c451444b-d58d-4383-801c-6df3ee8d8adf\") " Oct 10 16:50:24 crc kubenswrapper[4799]: I1010 16:50:24.337132 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/c451444b-d58d-4383-801c-6df3ee8d8adf-dns-swift-storage-0\") pod \"c451444b-d58d-4383-801c-6df3ee8d8adf\" (UID: \"c451444b-d58d-4383-801c-6df3ee8d8adf\") " Oct 10 16:50:24 crc kubenswrapper[4799]: I1010 16:50:24.352909 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c451444b-d58d-4383-801c-6df3ee8d8adf-kube-api-access-lbjp2" (OuterVolumeSpecName: "kube-api-access-lbjp2") pod "c451444b-d58d-4383-801c-6df3ee8d8adf" (UID: "c451444b-d58d-4383-801c-6df3ee8d8adf"). InnerVolumeSpecName "kube-api-access-lbjp2". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 16:50:24 crc kubenswrapper[4799]: I1010 16:50:24.370575 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 10 16:50:24 crc kubenswrapper[4799]: I1010 16:50:24.386559 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-sync-5fpmb"] Oct 10 16:50:24 crc kubenswrapper[4799]: I1010 16:50:24.415970 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 10 16:50:24 crc kubenswrapper[4799]: I1010 16:50:24.418571 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 10 16:50:24 crc kubenswrapper[4799]: I1010 16:50:24.420821 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Oct 10 16:50:24 crc kubenswrapper[4799]: I1010 16:50:24.424253 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Oct 10 16:50:24 crc kubenswrapper[4799]: I1010 16:50:24.427629 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-sync-mrh2w"] Oct 10 16:50:24 crc kubenswrapper[4799]: I1010 16:50:24.436409 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 10 16:50:24 crc kubenswrapper[4799]: I1010 16:50:24.439178 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xk2d2\" (UniqueName: \"kubernetes.io/projected/546e566a-e924-4d27-8776-74b0b1ae123a-kube-api-access-xk2d2\") pod \"glance-default-external-api-0\" (UID: \"546e566a-e924-4d27-8776-74b0b1ae123a\") " pod="openstack/glance-default-external-api-0" Oct 10 16:50:24 crc kubenswrapper[4799]: I1010 16:50:24.439427 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/546e566a-e924-4d27-8776-74b0b1ae123a-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"546e566a-e924-4d27-8776-74b0b1ae123a\") " pod="openstack/glance-default-external-api-0" Oct 10 16:50:24 crc kubenswrapper[4799]: I1010 16:50:24.439492 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"glance-default-external-api-0\" (UID: \"546e566a-e924-4d27-8776-74b0b1ae123a\") " pod="openstack/glance-default-external-api-0" Oct 10 16:50:24 crc kubenswrapper[4799]: I1010 16:50:24.439981 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/546e566a-e924-4d27-8776-74b0b1ae123a-scripts\") pod \"glance-default-external-api-0\" (UID: \"546e566a-e924-4d27-8776-74b0b1ae123a\") " pod="openstack/glance-default-external-api-0" Oct 10 16:50:24 crc kubenswrapper[4799]: I1010 16:50:24.440339 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/546e566a-e924-4d27-8776-74b0b1ae123a-config-data\") pod \"glance-default-external-api-0\" (UID: \"546e566a-e924-4d27-8776-74b0b1ae123a\") " pod="openstack/glance-default-external-api-0" Oct 10 16:50:24 crc kubenswrapper[4799]: I1010 16:50:24.440599 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/546e566a-e924-4d27-8776-74b0b1ae123a-logs\") pod \"glance-default-external-api-0\" (UID: \"546e566a-e924-4d27-8776-74b0b1ae123a\") " pod="openstack/glance-default-external-api-0" Oct 10 16:50:24 crc kubenswrapper[4799]: I1010 16:50:24.440773 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c451444b-d58d-4383-801c-6df3ee8d8adf-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "c451444b-d58d-4383-801c-6df3ee8d8adf" (UID: "c451444b-d58d-4383-801c-6df3ee8d8adf"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 16:50:24 crc kubenswrapper[4799]: I1010 16:50:24.440964 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/546e566a-e924-4d27-8776-74b0b1ae123a-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"546e566a-e924-4d27-8776-74b0b1ae123a\") " pod="openstack/glance-default-external-api-0" Oct 10 16:50:24 crc kubenswrapper[4799]: I1010 16:50:24.441356 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lbjp2\" (UniqueName: \"kubernetes.io/projected/c451444b-d58d-4383-801c-6df3ee8d8adf-kube-api-access-lbjp2\") on node \"crc\" DevicePath \"\"" Oct 10 16:50:24 crc kubenswrapper[4799]: I1010 16:50:24.441548 4799 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c451444b-d58d-4383-801c-6df3ee8d8adf-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 10 16:50:24 crc kubenswrapper[4799]: I1010 16:50:24.461714 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c451444b-d58d-4383-801c-6df3ee8d8adf-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "c451444b-d58d-4383-801c-6df3ee8d8adf" (UID: "c451444b-d58d-4383-801c-6df3ee8d8adf"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 16:50:24 crc kubenswrapper[4799]: I1010 16:50:24.470849 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c451444b-d58d-4383-801c-6df3ee8d8adf-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "c451444b-d58d-4383-801c-6df3ee8d8adf" (UID: "c451444b-d58d-4383-801c-6df3ee8d8adf"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 16:50:24 crc kubenswrapper[4799]: I1010 16:50:24.483134 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c451444b-d58d-4383-801c-6df3ee8d8adf-config" (OuterVolumeSpecName: "config") pod "c451444b-d58d-4383-801c-6df3ee8d8adf" (UID: "c451444b-d58d-4383-801c-6df3ee8d8adf"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 16:50:24 crc kubenswrapper[4799]: W1010 16:50:24.484096 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5a8fddc9_9cab_41e0_90c4_3c797749e5e2.slice/crio-a8b3d6d44948faf7188152133bb1654533232a3262d8ae716cf232eed375ee46 WatchSource:0}: Error finding container a8b3d6d44948faf7188152133bb1654533232a3262d8ae716cf232eed375ee46: Status 404 returned error can't find the container with id a8b3d6d44948faf7188152133bb1654533232a3262d8ae716cf232eed375ee46 Oct 10 16:50:24 crc kubenswrapper[4799]: I1010 16:50:24.487711 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-74776f5dd7-7jxz4"] Oct 10 16:50:24 crc kubenswrapper[4799]: I1010 16:50:24.489561 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c451444b-d58d-4383-801c-6df3ee8d8adf-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "c451444b-d58d-4383-801c-6df3ee8d8adf" (UID: "c451444b-d58d-4383-801c-6df3ee8d8adf"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 16:50:24 crc kubenswrapper[4799]: I1010 16:50:24.542885 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/546e566a-e924-4d27-8776-74b0b1ae123a-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"546e566a-e924-4d27-8776-74b0b1ae123a\") " pod="openstack/glance-default-external-api-0" Oct 10 16:50:24 crc kubenswrapper[4799]: I1010 16:50:24.542946 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"glance-default-external-api-0\" (UID: \"546e566a-e924-4d27-8776-74b0b1ae123a\") " pod="openstack/glance-default-external-api-0" Oct 10 16:50:24 crc kubenswrapper[4799]: I1010 16:50:24.542968 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9b51d046-e9cd-4cba-9f17-800b4e1a223b-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"9b51d046-e9cd-4cba-9f17-800b4e1a223b\") " pod="openstack/glance-default-internal-api-0" Oct 10 16:50:24 crc kubenswrapper[4799]: I1010 16:50:24.542998 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9b51d046-e9cd-4cba-9f17-800b4e1a223b-scripts\") pod \"glance-default-internal-api-0\" (UID: \"9b51d046-e9cd-4cba-9f17-800b4e1a223b\") " pod="openstack/glance-default-internal-api-0" Oct 10 16:50:24 crc kubenswrapper[4799]: I1010 16:50:24.543443 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9b51d046-e9cd-4cba-9f17-800b4e1a223b-config-data\") pod \"glance-default-internal-api-0\" (UID: \"9b51d046-e9cd-4cba-9f17-800b4e1a223b\") " pod="openstack/glance-default-internal-api-0" Oct 10 16:50:24 crc kubenswrapper[4799]: I1010 16:50:24.543494 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"glance-default-internal-api-0\" (UID: \"9b51d046-e9cd-4cba-9f17-800b4e1a223b\") " pod="openstack/glance-default-internal-api-0" Oct 10 16:50:24 crc kubenswrapper[4799]: I1010 16:50:24.543521 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/546e566a-e924-4d27-8776-74b0b1ae123a-scripts\") pod \"glance-default-external-api-0\" (UID: \"546e566a-e924-4d27-8776-74b0b1ae123a\") " pod="openstack/glance-default-external-api-0" Oct 10 16:50:24 crc kubenswrapper[4799]: I1010 16:50:24.543551 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9b51d046-e9cd-4cba-9f17-800b4e1a223b-logs\") pod \"glance-default-internal-api-0\" (UID: \"9b51d046-e9cd-4cba-9f17-800b4e1a223b\") " pod="openstack/glance-default-internal-api-0" Oct 10 16:50:24 crc kubenswrapper[4799]: I1010 16:50:24.543573 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/9b51d046-e9cd-4cba-9f17-800b4e1a223b-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"9b51d046-e9cd-4cba-9f17-800b4e1a223b\") " pod="openstack/glance-default-internal-api-0" Oct 10 16:50:24 crc kubenswrapper[4799]: I1010 16:50:24.543589 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/546e566a-e924-4d27-8776-74b0b1ae123a-config-data\") pod \"glance-default-external-api-0\" (UID: \"546e566a-e924-4d27-8776-74b0b1ae123a\") " pod="openstack/glance-default-external-api-0" Oct 10 16:50:24 crc kubenswrapper[4799]: I1010 16:50:24.543604 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f49p4\" (UniqueName: \"kubernetes.io/projected/9b51d046-e9cd-4cba-9f17-800b4e1a223b-kube-api-access-f49p4\") pod \"glance-default-internal-api-0\" (UID: \"9b51d046-e9cd-4cba-9f17-800b4e1a223b\") " pod="openstack/glance-default-internal-api-0" Oct 10 16:50:24 crc kubenswrapper[4799]: I1010 16:50:24.543636 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/546e566a-e924-4d27-8776-74b0b1ae123a-logs\") pod \"glance-default-external-api-0\" (UID: \"546e566a-e924-4d27-8776-74b0b1ae123a\") " pod="openstack/glance-default-external-api-0" Oct 10 16:50:24 crc kubenswrapper[4799]: I1010 16:50:24.543675 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/546e566a-e924-4d27-8776-74b0b1ae123a-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"546e566a-e924-4d27-8776-74b0b1ae123a\") " pod="openstack/glance-default-external-api-0" Oct 10 16:50:24 crc kubenswrapper[4799]: I1010 16:50:24.543704 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xk2d2\" (UniqueName: \"kubernetes.io/projected/546e566a-e924-4d27-8776-74b0b1ae123a-kube-api-access-xk2d2\") pod \"glance-default-external-api-0\" (UID: \"546e566a-e924-4d27-8776-74b0b1ae123a\") " pod="openstack/glance-default-external-api-0" Oct 10 16:50:24 crc kubenswrapper[4799]: I1010 16:50:24.543808 4799 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/c451444b-d58d-4383-801c-6df3ee8d8adf-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Oct 10 16:50:24 crc kubenswrapper[4799]: I1010 16:50:24.543826 4799 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c451444b-d58d-4383-801c-6df3ee8d8adf-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 10 16:50:24 crc kubenswrapper[4799]: I1010 16:50:24.543837 4799 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c451444b-d58d-4383-801c-6df3ee8d8adf-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 10 16:50:24 crc kubenswrapper[4799]: I1010 16:50:24.543848 4799 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c451444b-d58d-4383-801c-6df3ee8d8adf-config\") on node \"crc\" DevicePath \"\"" Oct 10 16:50:24 crc kubenswrapper[4799]: I1010 16:50:24.547568 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/546e566a-e924-4d27-8776-74b0b1ae123a-logs\") pod \"glance-default-external-api-0\" (UID: \"546e566a-e924-4d27-8776-74b0b1ae123a\") " pod="openstack/glance-default-external-api-0" Oct 10 16:50:24 crc kubenswrapper[4799]: I1010 16:50:24.547912 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/546e566a-e924-4d27-8776-74b0b1ae123a-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"546e566a-e924-4d27-8776-74b0b1ae123a\") " pod="openstack/glance-default-external-api-0" Oct 10 16:50:24 crc kubenswrapper[4799]: I1010 16:50:24.548281 4799 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"glance-default-external-api-0\" (UID: \"546e566a-e924-4d27-8776-74b0b1ae123a\") device mount path \"/mnt/openstack/pv11\"" pod="openstack/glance-default-external-api-0" Oct 10 16:50:24 crc kubenswrapper[4799]: I1010 16:50:24.548426 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/546e566a-e924-4d27-8776-74b0b1ae123a-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"546e566a-e924-4d27-8776-74b0b1ae123a\") " pod="openstack/glance-default-external-api-0" Oct 10 16:50:24 crc kubenswrapper[4799]: I1010 16:50:24.555349 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/546e566a-e924-4d27-8776-74b0b1ae123a-config-data\") pod \"glance-default-external-api-0\" (UID: \"546e566a-e924-4d27-8776-74b0b1ae123a\") " pod="openstack/glance-default-external-api-0" Oct 10 16:50:24 crc kubenswrapper[4799]: I1010 16:50:24.555416 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/546e566a-e924-4d27-8776-74b0b1ae123a-scripts\") pod \"glance-default-external-api-0\" (UID: \"546e566a-e924-4d27-8776-74b0b1ae123a\") " pod="openstack/glance-default-external-api-0" Oct 10 16:50:24 crc kubenswrapper[4799]: I1010 16:50:24.570557 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xk2d2\" (UniqueName: \"kubernetes.io/projected/546e566a-e924-4d27-8776-74b0b1ae123a-kube-api-access-xk2d2\") pod \"glance-default-external-api-0\" (UID: \"546e566a-e924-4d27-8776-74b0b1ae123a\") " pod="openstack/glance-default-external-api-0" Oct 10 16:50:24 crc kubenswrapper[4799]: I1010 16:50:24.599822 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"glance-default-external-api-0\" (UID: \"546e566a-e924-4d27-8776-74b0b1ae123a\") " pod="openstack/glance-default-external-api-0" Oct 10 16:50:24 crc kubenswrapper[4799]: I1010 16:50:24.626189 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-mrh2w" event={"ID":"a8dccd24-a3ca-4f98-90b4-e2943cd228d3","Type":"ContainerStarted","Data":"e3313f07f4776368beb200ef3f9748dc26824ad436bc705b8083926f0e14d489"} Oct 10 16:50:24 crc kubenswrapper[4799]: I1010 16:50:24.627111 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-74776f5dd7-7jxz4" event={"ID":"5a8fddc9-9cab-41e0-90c4-3c797749e5e2","Type":"ContainerStarted","Data":"a8b3d6d44948faf7188152133bb1654533232a3262d8ae716cf232eed375ee46"} Oct 10 16:50:24 crc kubenswrapper[4799]: I1010 16:50:24.627864 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"766235b0-a5b9-4448-8dac-1afd1ca60e50","Type":"ContainerStarted","Data":"e599caec4b8b12a22e3ad54782bf7285344c71fac82fa3661c389ca3959918f8"} Oct 10 16:50:24 crc kubenswrapper[4799]: I1010 16:50:24.629942 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-g45lb" event={"ID":"454f77a5-dadb-4038-8ec3-c3a1bd0c2c22","Type":"ContainerStarted","Data":"194dec3f34924527e2cec1db990873105b661b96fad790933950a8b6a9a87518"} Oct 10 16:50:24 crc kubenswrapper[4799]: I1010 16:50:24.629966 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-g45lb" event={"ID":"454f77a5-dadb-4038-8ec3-c3a1bd0c2c22","Type":"ContainerStarted","Data":"cb081e6518cb1542b5204c27ef689039816791fdd84f7a7ec2035e0b3f47f2ba"} Oct 10 16:50:24 crc kubenswrapper[4799]: I1010 16:50:24.633621 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Oct 10 16:50:24 crc kubenswrapper[4799]: I1010 16:50:24.635042 4799 generic.go:334] "Generic (PLEG): container finished" podID="c451444b-d58d-4383-801c-6df3ee8d8adf" containerID="b7b397836ef927790d2c5e00f062d9078e2d75a5e100614ce09a8d55dc22800b" exitCode=0 Oct 10 16:50:24 crc kubenswrapper[4799]: I1010 16:50:24.635094 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-677c858757-mwwqx" event={"ID":"c451444b-d58d-4383-801c-6df3ee8d8adf","Type":"ContainerDied","Data":"b7b397836ef927790d2c5e00f062d9078e2d75a5e100614ce09a8d55dc22800b"} Oct 10 16:50:24 crc kubenswrapper[4799]: I1010 16:50:24.635114 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-677c858757-mwwqx" event={"ID":"c451444b-d58d-4383-801c-6df3ee8d8adf","Type":"ContainerDied","Data":"ad8694bed47720aacdf17ecc0067580874425191c6d8c3a1423f77ea75e3b475"} Oct 10 16:50:24 crc kubenswrapper[4799]: I1010 16:50:24.635129 4799 scope.go:117] "RemoveContainer" containerID="b7b397836ef927790d2c5e00f062d9078e2d75a5e100614ce09a8d55dc22800b" Oct 10 16:50:24 crc kubenswrapper[4799]: I1010 16:50:24.635251 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-677c858757-mwwqx" Oct 10 16:50:24 crc kubenswrapper[4799]: I1010 16:50:24.638947 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7c769b4797-w8znp" Oct 10 16:50:24 crc kubenswrapper[4799]: I1010 16:50:24.639101 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-5fpmb" event={"ID":"6d2833c7-9e1e-4063-93a1-54aded9b6daf","Type":"ContainerStarted","Data":"19d7dac27f91892a97c8d46b6058de7c6bc0cfb0ca66247b95d8c2cf6b014697"} Oct 10 16:50:24 crc kubenswrapper[4799]: I1010 16:50:24.639296 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-59df9f9d49-8pz9m" Oct 10 16:50:24 crc kubenswrapper[4799]: I1010 16:50:24.650083 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9b51d046-e9cd-4cba-9f17-800b4e1a223b-logs\") pod \"glance-default-internal-api-0\" (UID: \"9b51d046-e9cd-4cba-9f17-800b4e1a223b\") " pod="openstack/glance-default-internal-api-0" Oct 10 16:50:24 crc kubenswrapper[4799]: I1010 16:50:24.650155 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/9b51d046-e9cd-4cba-9f17-800b4e1a223b-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"9b51d046-e9cd-4cba-9f17-800b4e1a223b\") " pod="openstack/glance-default-internal-api-0" Oct 10 16:50:24 crc kubenswrapper[4799]: I1010 16:50:24.650186 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f49p4\" (UniqueName: \"kubernetes.io/projected/9b51d046-e9cd-4cba-9f17-800b4e1a223b-kube-api-access-f49p4\") pod \"glance-default-internal-api-0\" (UID: \"9b51d046-e9cd-4cba-9f17-800b4e1a223b\") " pod="openstack/glance-default-internal-api-0" Oct 10 16:50:24 crc kubenswrapper[4799]: I1010 16:50:24.650346 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9b51d046-e9cd-4cba-9f17-800b4e1a223b-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"9b51d046-e9cd-4cba-9f17-800b4e1a223b\") " pod="openstack/glance-default-internal-api-0" Oct 10 16:50:24 crc kubenswrapper[4799]: I1010 16:50:24.650407 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9b51d046-e9cd-4cba-9f17-800b4e1a223b-scripts\") pod \"glance-default-internal-api-0\" (UID: \"9b51d046-e9cd-4cba-9f17-800b4e1a223b\") " pod="openstack/glance-default-internal-api-0" Oct 10 16:50:24 crc kubenswrapper[4799]: I1010 16:50:24.650466 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9b51d046-e9cd-4cba-9f17-800b4e1a223b-config-data\") pod \"glance-default-internal-api-0\" (UID: \"9b51d046-e9cd-4cba-9f17-800b4e1a223b\") " pod="openstack/glance-default-internal-api-0" Oct 10 16:50:24 crc kubenswrapper[4799]: I1010 16:50:24.650531 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"glance-default-internal-api-0\" (UID: \"9b51d046-e9cd-4cba-9f17-800b4e1a223b\") " pod="openstack/glance-default-internal-api-0" Oct 10 16:50:24 crc kubenswrapper[4799]: I1010 16:50:24.650870 4799 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"glance-default-internal-api-0\" (UID: \"9b51d046-e9cd-4cba-9f17-800b4e1a223b\") device mount path \"/mnt/openstack/pv12\"" pod="openstack/glance-default-internal-api-0" Oct 10 16:50:24 crc kubenswrapper[4799]: I1010 16:50:24.651378 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9b51d046-e9cd-4cba-9f17-800b4e1a223b-logs\") pod \"glance-default-internal-api-0\" (UID: \"9b51d046-e9cd-4cba-9f17-800b4e1a223b\") " pod="openstack/glance-default-internal-api-0" Oct 10 16:50:24 crc kubenswrapper[4799]: I1010 16:50:24.660024 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/9b51d046-e9cd-4cba-9f17-800b4e1a223b-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"9b51d046-e9cd-4cba-9f17-800b4e1a223b\") " pod="openstack/glance-default-internal-api-0" Oct 10 16:50:24 crc kubenswrapper[4799]: I1010 16:50:24.677887 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9b51d046-e9cd-4cba-9f17-800b4e1a223b-scripts\") pod \"glance-default-internal-api-0\" (UID: \"9b51d046-e9cd-4cba-9f17-800b4e1a223b\") " pod="openstack/glance-default-internal-api-0" Oct 10 16:50:24 crc kubenswrapper[4799]: I1010 16:50:24.690162 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9b51d046-e9cd-4cba-9f17-800b4e1a223b-config-data\") pod \"glance-default-internal-api-0\" (UID: \"9b51d046-e9cd-4cba-9f17-800b4e1a223b\") " pod="openstack/glance-default-internal-api-0" Oct 10 16:50:24 crc kubenswrapper[4799]: I1010 16:50:24.690683 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9b51d046-e9cd-4cba-9f17-800b4e1a223b-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"9b51d046-e9cd-4cba-9f17-800b4e1a223b\") " pod="openstack/glance-default-internal-api-0" Oct 10 16:50:24 crc kubenswrapper[4799]: I1010 16:50:24.691419 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f49p4\" (UniqueName: \"kubernetes.io/projected/9b51d046-e9cd-4cba-9f17-800b4e1a223b-kube-api-access-f49p4\") pod \"glance-default-internal-api-0\" (UID: \"9b51d046-e9cd-4cba-9f17-800b4e1a223b\") " pod="openstack/glance-default-internal-api-0" Oct 10 16:50:24 crc kubenswrapper[4799]: I1010 16:50:24.702840 4799 scope.go:117] "RemoveContainer" containerID="e67e5ad07227279d88ed90b21c16d59edaf11b294afb0c88931442903f3e9d25" Oct 10 16:50:24 crc kubenswrapper[4799]: I1010 16:50:24.724743 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"glance-default-internal-api-0\" (UID: \"9b51d046-e9cd-4cba-9f17-800b4e1a223b\") " pod="openstack/glance-default-internal-api-0" Oct 10 16:50:24 crc kubenswrapper[4799]: I1010 16:50:24.746126 4799 scope.go:117] "RemoveContainer" containerID="b7b397836ef927790d2c5e00f062d9078e2d75a5e100614ce09a8d55dc22800b" Oct 10 16:50:24 crc kubenswrapper[4799]: E1010 16:50:24.747905 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b7b397836ef927790d2c5e00f062d9078e2d75a5e100614ce09a8d55dc22800b\": container with ID starting with b7b397836ef927790d2c5e00f062d9078e2d75a5e100614ce09a8d55dc22800b not found: ID does not exist" containerID="b7b397836ef927790d2c5e00f062d9078e2d75a5e100614ce09a8d55dc22800b" Oct 10 16:50:24 crc kubenswrapper[4799]: I1010 16:50:24.747951 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b7b397836ef927790d2c5e00f062d9078e2d75a5e100614ce09a8d55dc22800b"} err="failed to get container status \"b7b397836ef927790d2c5e00f062d9078e2d75a5e100614ce09a8d55dc22800b\": rpc error: code = NotFound desc = could not find container \"b7b397836ef927790d2c5e00f062d9078e2d75a5e100614ce09a8d55dc22800b\": container with ID starting with b7b397836ef927790d2c5e00f062d9078e2d75a5e100614ce09a8d55dc22800b not found: ID does not exist" Oct 10 16:50:24 crc kubenswrapper[4799]: I1010 16:50:24.747977 4799 scope.go:117] "RemoveContainer" containerID="e67e5ad07227279d88ed90b21c16d59edaf11b294afb0c88931442903f3e9d25" Oct 10 16:50:24 crc kubenswrapper[4799]: I1010 16:50:24.752404 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Oct 10 16:50:24 crc kubenswrapper[4799]: I1010 16:50:24.754467 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-bootstrap-g45lb" podStartSLOduration=2.754450838 podStartE2EDuration="2.754450838s" podCreationTimestamp="2025-10-10 16:50:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-10 16:50:24.65079733 +0000 UTC m=+1118.159121445" watchObservedRunningTime="2025-10-10 16:50:24.754450838 +0000 UTC m=+1118.262774943" Oct 10 16:50:24 crc kubenswrapper[4799]: E1010 16:50:24.759219 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e67e5ad07227279d88ed90b21c16d59edaf11b294afb0c88931442903f3e9d25\": container with ID starting with e67e5ad07227279d88ed90b21c16d59edaf11b294afb0c88931442903f3e9d25 not found: ID does not exist" containerID="e67e5ad07227279d88ed90b21c16d59edaf11b294afb0c88931442903f3e9d25" Oct 10 16:50:24 crc kubenswrapper[4799]: I1010 16:50:24.759290 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e67e5ad07227279d88ed90b21c16d59edaf11b294afb0c88931442903f3e9d25"} err="failed to get container status \"e67e5ad07227279d88ed90b21c16d59edaf11b294afb0c88931442903f3e9d25\": rpc error: code = NotFound desc = could not find container \"e67e5ad07227279d88ed90b21c16d59edaf11b294afb0c88931442903f3e9d25\": container with ID starting with e67e5ad07227279d88ed90b21c16d59edaf11b294afb0c88931442903f3e9d25 not found: ID does not exist" Oct 10 16:50:24 crc kubenswrapper[4799]: I1010 16:50:24.764793 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7c769b4797-w8znp"] Oct 10 16:50:24 crc kubenswrapper[4799]: I1010 16:50:24.807691 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-7c769b4797-w8znp"] Oct 10 16:50:24 crc kubenswrapper[4799]: I1010 16:50:24.845611 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-677c858757-mwwqx"] Oct 10 16:50:24 crc kubenswrapper[4799]: I1010 16:50:24.858935 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-677c858757-mwwqx"] Oct 10 16:50:24 crc kubenswrapper[4799]: I1010 16:50:24.896977 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-59df9f9d49-8pz9m"] Oct 10 16:50:24 crc kubenswrapper[4799]: I1010 16:50:24.904486 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-59df9f9d49-8pz9m"] Oct 10 16:50:25 crc kubenswrapper[4799]: I1010 16:50:25.352471 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 10 16:50:25 crc kubenswrapper[4799]: I1010 16:50:25.430544 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b3db818-0776-4b6f-98b0-b67e64292226" path="/var/lib/kubelet/pods/0b3db818-0776-4b6f-98b0-b67e64292226/volumes" Oct 10 16:50:25 crc kubenswrapper[4799]: I1010 16:50:25.433241 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="43eb6e50-c6a9-4bbd-a301-926e6b3742d5" path="/var/lib/kubelet/pods/43eb6e50-c6a9-4bbd-a301-926e6b3742d5/volumes" Oct 10 16:50:25 crc kubenswrapper[4799]: I1010 16:50:25.433589 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c451444b-d58d-4383-801c-6df3ee8d8adf" path="/var/lib/kubelet/pods/c451444b-d58d-4383-801c-6df3ee8d8adf/volumes" Oct 10 16:50:25 crc kubenswrapper[4799]: I1010 16:50:25.524429 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 10 16:50:25 crc kubenswrapper[4799]: W1010 16:50:25.539944 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9b51d046_e9cd_4cba_9f17_800b4e1a223b.slice/crio-6dd2ceca981701a2677d2f5bc94111277b8aa0fde6dcdbcc04248f33a9fe3d67 WatchSource:0}: Error finding container 6dd2ceca981701a2677d2f5bc94111277b8aa0fde6dcdbcc04248f33a9fe3d67: Status 404 returned error can't find the container with id 6dd2ceca981701a2677d2f5bc94111277b8aa0fde6dcdbcc04248f33a9fe3d67 Oct 10 16:50:25 crc kubenswrapper[4799]: I1010 16:50:25.674819 4799 generic.go:334] "Generic (PLEG): container finished" podID="5a8fddc9-9cab-41e0-90c4-3c797749e5e2" containerID="9439cc6e202ea07c09476fcf8d57e61e2dccdd23fec47bfd562e6adabebb3783" exitCode=0 Oct 10 16:50:25 crc kubenswrapper[4799]: I1010 16:50:25.675028 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-74776f5dd7-7jxz4" event={"ID":"5a8fddc9-9cab-41e0-90c4-3c797749e5e2","Type":"ContainerDied","Data":"9439cc6e202ea07c09476fcf8d57e61e2dccdd23fec47bfd562e6adabebb3783"} Oct 10 16:50:25 crc kubenswrapper[4799]: I1010 16:50:25.693039 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"546e566a-e924-4d27-8776-74b0b1ae123a","Type":"ContainerStarted","Data":"2adfdd1cf13db12d28cab792662aa0acd7157f5faba657e92e14e0c41755b43d"} Oct 10 16:50:25 crc kubenswrapper[4799]: I1010 16:50:25.709339 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"9b51d046-e9cd-4cba-9f17-800b4e1a223b","Type":"ContainerStarted","Data":"6dd2ceca981701a2677d2f5bc94111277b8aa0fde6dcdbcc04248f33a9fe3d67"} Oct 10 16:50:26 crc kubenswrapper[4799]: I1010 16:50:26.494547 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-a843-account-create-t5zlh"] Oct 10 16:50:26 crc kubenswrapper[4799]: I1010 16:50:26.503993 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-a843-account-create-t5zlh" Oct 10 16:50:26 crc kubenswrapper[4799]: I1010 16:50:26.506672 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-a843-account-create-t5zlh"] Oct 10 16:50:26 crc kubenswrapper[4799]: I1010 16:50:26.509144 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-db-secret" Oct 10 16:50:26 crc kubenswrapper[4799]: I1010 16:50:26.598171 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zpqng\" (UniqueName: \"kubernetes.io/projected/ab2e1e52-21c9-40c3-b87f-59f38ebb7bff-kube-api-access-zpqng\") pod \"cinder-a843-account-create-t5zlh\" (UID: \"ab2e1e52-21c9-40c3-b87f-59f38ebb7bff\") " pod="openstack/cinder-a843-account-create-t5zlh" Oct 10 16:50:26 crc kubenswrapper[4799]: I1010 16:50:26.695957 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 10 16:50:26 crc kubenswrapper[4799]: I1010 16:50:26.700676 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zpqng\" (UniqueName: \"kubernetes.io/projected/ab2e1e52-21c9-40c3-b87f-59f38ebb7bff-kube-api-access-zpqng\") pod \"cinder-a843-account-create-t5zlh\" (UID: \"ab2e1e52-21c9-40c3-b87f-59f38ebb7bff\") " pod="openstack/cinder-a843-account-create-t5zlh" Oct 10 16:50:26 crc kubenswrapper[4799]: I1010 16:50:26.730865 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zpqng\" (UniqueName: \"kubernetes.io/projected/ab2e1e52-21c9-40c3-b87f-59f38ebb7bff-kube-api-access-zpqng\") pod \"cinder-a843-account-create-t5zlh\" (UID: \"ab2e1e52-21c9-40c3-b87f-59f38ebb7bff\") " pod="openstack/cinder-a843-account-create-t5zlh" Oct 10 16:50:26 crc kubenswrapper[4799]: I1010 16:50:26.737864 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 10 16:50:26 crc kubenswrapper[4799]: I1010 16:50:26.766095 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"9b51d046-e9cd-4cba-9f17-800b4e1a223b","Type":"ContainerStarted","Data":"b327acc1082eeba77796979df7e35dd7e2577e624ec569449ee1eb5d9f6e5691"} Oct 10 16:50:26 crc kubenswrapper[4799]: I1010 16:50:26.775914 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-74776f5dd7-7jxz4" event={"ID":"5a8fddc9-9cab-41e0-90c4-3c797749e5e2","Type":"ContainerStarted","Data":"6ed3353899f4c0225963fc765a124e1b2d481711fafe23a817601fba30fa1384"} Oct 10 16:50:26 crc kubenswrapper[4799]: I1010 16:50:26.777167 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-74776f5dd7-7jxz4" Oct 10 16:50:26 crc kubenswrapper[4799]: I1010 16:50:26.782122 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"546e566a-e924-4d27-8776-74b0b1ae123a","Type":"ContainerStarted","Data":"7c9e0cb82352e781508fa3f533280eb8ec8a5f6a55b4aa0ab256355371442062"} Oct 10 16:50:26 crc kubenswrapper[4799]: I1010 16:50:26.823903 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-fdbc-account-create-kr25d"] Oct 10 16:50:26 crc kubenswrapper[4799]: I1010 16:50:26.826258 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-fdbc-account-create-kr25d" Oct 10 16:50:26 crc kubenswrapper[4799]: I1010 16:50:26.831068 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-db-secret" Oct 10 16:50:26 crc kubenswrapper[4799]: I1010 16:50:26.831496 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-a843-account-create-t5zlh" Oct 10 16:50:26 crc kubenswrapper[4799]: I1010 16:50:26.838181 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-fdbc-account-create-kr25d"] Oct 10 16:50:26 crc kubenswrapper[4799]: I1010 16:50:26.842531 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-74776f5dd7-7jxz4" podStartSLOduration=3.842513092 podStartE2EDuration="3.842513092s" podCreationTimestamp="2025-10-10 16:50:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-10 16:50:26.824748313 +0000 UTC m=+1120.333072418" watchObservedRunningTime="2025-10-10 16:50:26.842513092 +0000 UTC m=+1120.350837207" Oct 10 16:50:26 crc kubenswrapper[4799]: I1010 16:50:26.905920 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hrr7q\" (UniqueName: \"kubernetes.io/projected/e1dd8255-75af-4083-ae20-bba4b5760b3f-kube-api-access-hrr7q\") pod \"neutron-fdbc-account-create-kr25d\" (UID: \"e1dd8255-75af-4083-ae20-bba4b5760b3f\") " pod="openstack/neutron-fdbc-account-create-kr25d" Oct 10 16:50:26 crc kubenswrapper[4799]: I1010 16:50:26.906168 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 10 16:50:27 crc kubenswrapper[4799]: I1010 16:50:27.007106 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hrr7q\" (UniqueName: \"kubernetes.io/projected/e1dd8255-75af-4083-ae20-bba4b5760b3f-kube-api-access-hrr7q\") pod \"neutron-fdbc-account-create-kr25d\" (UID: \"e1dd8255-75af-4083-ae20-bba4b5760b3f\") " pod="openstack/neutron-fdbc-account-create-kr25d" Oct 10 16:50:27 crc kubenswrapper[4799]: I1010 16:50:27.025628 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hrr7q\" (UniqueName: \"kubernetes.io/projected/e1dd8255-75af-4083-ae20-bba4b5760b3f-kube-api-access-hrr7q\") pod \"neutron-fdbc-account-create-kr25d\" (UID: \"e1dd8255-75af-4083-ae20-bba4b5760b3f\") " pod="openstack/neutron-fdbc-account-create-kr25d" Oct 10 16:50:27 crc kubenswrapper[4799]: I1010 16:50:27.163198 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-fdbc-account-create-kr25d" Oct 10 16:50:27 crc kubenswrapper[4799]: I1010 16:50:27.469554 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-a843-account-create-t5zlh"] Oct 10 16:50:27 crc kubenswrapper[4799]: I1010 16:50:27.799315 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-a843-account-create-t5zlh" event={"ID":"ab2e1e52-21c9-40c3-b87f-59f38ebb7bff","Type":"ContainerStarted","Data":"e0f865b58ad5a365298ef3d503d2862f51abaae4b78ba76578a371ef732177bd"} Oct 10 16:50:27 crc kubenswrapper[4799]: I1010 16:50:27.800638 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-a843-account-create-t5zlh" event={"ID":"ab2e1e52-21c9-40c3-b87f-59f38ebb7bff","Type":"ContainerStarted","Data":"851cf54fa05911f95a68c47ffb0ef5d1dee11d077baad5673ed3280f44624345"} Oct 10 16:50:27 crc kubenswrapper[4799]: I1010 16:50:27.806638 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"546e566a-e924-4d27-8776-74b0b1ae123a","Type":"ContainerStarted","Data":"254c2492fa74906bb7c0f95cf00c6a682ed6132f8f2a3c0a171faca77fc18fba"} Oct 10 16:50:27 crc kubenswrapper[4799]: I1010 16:50:27.806917 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="546e566a-e924-4d27-8776-74b0b1ae123a" containerName="glance-log" containerID="cri-o://7c9e0cb82352e781508fa3f533280eb8ec8a5f6a55b4aa0ab256355371442062" gracePeriod=30 Oct 10 16:50:27 crc kubenswrapper[4799]: I1010 16:50:27.807093 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="546e566a-e924-4d27-8776-74b0b1ae123a" containerName="glance-httpd" containerID="cri-o://254c2492fa74906bb7c0f95cf00c6a682ed6132f8f2a3c0a171faca77fc18fba" gracePeriod=30 Oct 10 16:50:27 crc kubenswrapper[4799]: I1010 16:50:27.812573 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"9b51d046-e9cd-4cba-9f17-800b4e1a223b","Type":"ContainerStarted","Data":"361adc5f084ed182c5a615d67d3006131229d8f2121453c23c6a5345ae2d0046"} Oct 10 16:50:27 crc kubenswrapper[4799]: I1010 16:50:27.813002 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="9b51d046-e9cd-4cba-9f17-800b4e1a223b" containerName="glance-httpd" containerID="cri-o://361adc5f084ed182c5a615d67d3006131229d8f2121453c23c6a5345ae2d0046" gracePeriod=30 Oct 10 16:50:27 crc kubenswrapper[4799]: I1010 16:50:27.812942 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="9b51d046-e9cd-4cba-9f17-800b4e1a223b" containerName="glance-log" containerID="cri-o://b327acc1082eeba77796979df7e35dd7e2577e624ec569449ee1eb5d9f6e5691" gracePeriod=30 Oct 10 16:50:27 crc kubenswrapper[4799]: I1010 16:50:27.815968 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-a843-account-create-t5zlh" podStartSLOduration=1.815955286 podStartE2EDuration="1.815955286s" podCreationTimestamp="2025-10-10 16:50:26 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-10 16:50:27.814829709 +0000 UTC m=+1121.323153834" watchObservedRunningTime="2025-10-10 16:50:27.815955286 +0000 UTC m=+1121.324279401" Oct 10 16:50:27 crc kubenswrapper[4799]: I1010 16:50:27.871113 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=4.871096937 podStartE2EDuration="4.871096937s" podCreationTimestamp="2025-10-10 16:50:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-10 16:50:27.854157929 +0000 UTC m=+1121.362482044" watchObservedRunningTime="2025-10-10 16:50:27.871096937 +0000 UTC m=+1121.379421042" Oct 10 16:50:27 crc kubenswrapper[4799]: I1010 16:50:27.872235 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-fdbc-account-create-kr25d"] Oct 10 16:50:27 crc kubenswrapper[4799]: I1010 16:50:27.903717 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=4.903695142 podStartE2EDuration="4.903695142s" podCreationTimestamp="2025-10-10 16:50:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-10 16:50:27.888569789 +0000 UTC m=+1121.396893914" watchObservedRunningTime="2025-10-10 16:50:27.903695142 +0000 UTC m=+1121.412019247" Oct 10 16:50:28 crc kubenswrapper[4799]: W1010 16:50:28.000638 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode1dd8255_75af_4083_ae20_bba4b5760b3f.slice/crio-2162c7a1dfe7e3c271135fba68e58b065034b59a3f0db516104230647c1bc715 WatchSource:0}: Error finding container 2162c7a1dfe7e3c271135fba68e58b065034b59a3f0db516104230647c1bc715: Status 404 returned error can't find the container with id 2162c7a1dfe7e3c271135fba68e58b065034b59a3f0db516104230647c1bc715 Oct 10 16:50:28 crc kubenswrapper[4799]: I1010 16:50:28.492594 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Oct 10 16:50:28 crc kubenswrapper[4799]: I1010 16:50:28.676071 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/546e566a-e924-4d27-8776-74b0b1ae123a-config-data\") pod \"546e566a-e924-4d27-8776-74b0b1ae123a\" (UID: \"546e566a-e924-4d27-8776-74b0b1ae123a\") " Oct 10 16:50:28 crc kubenswrapper[4799]: I1010 16:50:28.676128 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/546e566a-e924-4d27-8776-74b0b1ae123a-httpd-run\") pod \"546e566a-e924-4d27-8776-74b0b1ae123a\" (UID: \"546e566a-e924-4d27-8776-74b0b1ae123a\") " Oct 10 16:50:28 crc kubenswrapper[4799]: I1010 16:50:28.676202 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xk2d2\" (UniqueName: \"kubernetes.io/projected/546e566a-e924-4d27-8776-74b0b1ae123a-kube-api-access-xk2d2\") pod \"546e566a-e924-4d27-8776-74b0b1ae123a\" (UID: \"546e566a-e924-4d27-8776-74b0b1ae123a\") " Oct 10 16:50:28 crc kubenswrapper[4799]: I1010 16:50:28.676232 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/546e566a-e924-4d27-8776-74b0b1ae123a-combined-ca-bundle\") pod \"546e566a-e924-4d27-8776-74b0b1ae123a\" (UID: \"546e566a-e924-4d27-8776-74b0b1ae123a\") " Oct 10 16:50:28 crc kubenswrapper[4799]: I1010 16:50:28.676301 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/546e566a-e924-4d27-8776-74b0b1ae123a-logs\") pod \"546e566a-e924-4d27-8776-74b0b1ae123a\" (UID: \"546e566a-e924-4d27-8776-74b0b1ae123a\") " Oct 10 16:50:28 crc kubenswrapper[4799]: I1010 16:50:28.676400 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"546e566a-e924-4d27-8776-74b0b1ae123a\" (UID: \"546e566a-e924-4d27-8776-74b0b1ae123a\") " Oct 10 16:50:28 crc kubenswrapper[4799]: I1010 16:50:28.676434 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/546e566a-e924-4d27-8776-74b0b1ae123a-scripts\") pod \"546e566a-e924-4d27-8776-74b0b1ae123a\" (UID: \"546e566a-e924-4d27-8776-74b0b1ae123a\") " Oct 10 16:50:28 crc kubenswrapper[4799]: I1010 16:50:28.677314 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/546e566a-e924-4d27-8776-74b0b1ae123a-logs" (OuterVolumeSpecName: "logs") pod "546e566a-e924-4d27-8776-74b0b1ae123a" (UID: "546e566a-e924-4d27-8776-74b0b1ae123a"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 10 16:50:28 crc kubenswrapper[4799]: I1010 16:50:28.677399 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/546e566a-e924-4d27-8776-74b0b1ae123a-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "546e566a-e924-4d27-8776-74b0b1ae123a" (UID: "546e566a-e924-4d27-8776-74b0b1ae123a"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 10 16:50:28 crc kubenswrapper[4799]: I1010 16:50:28.684547 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage11-crc" (OuterVolumeSpecName: "glance") pod "546e566a-e924-4d27-8776-74b0b1ae123a" (UID: "546e566a-e924-4d27-8776-74b0b1ae123a"). InnerVolumeSpecName "local-storage11-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Oct 10 16:50:28 crc kubenswrapper[4799]: I1010 16:50:28.685513 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/546e566a-e924-4d27-8776-74b0b1ae123a-scripts" (OuterVolumeSpecName: "scripts") pod "546e566a-e924-4d27-8776-74b0b1ae123a" (UID: "546e566a-e924-4d27-8776-74b0b1ae123a"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:50:28 crc kubenswrapper[4799]: I1010 16:50:28.687399 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/546e566a-e924-4d27-8776-74b0b1ae123a-kube-api-access-xk2d2" (OuterVolumeSpecName: "kube-api-access-xk2d2") pod "546e566a-e924-4d27-8776-74b0b1ae123a" (UID: "546e566a-e924-4d27-8776-74b0b1ae123a"). InnerVolumeSpecName "kube-api-access-xk2d2". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 16:50:28 crc kubenswrapper[4799]: I1010 16:50:28.728927 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/546e566a-e924-4d27-8776-74b0b1ae123a-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "546e566a-e924-4d27-8776-74b0b1ae123a" (UID: "546e566a-e924-4d27-8776-74b0b1ae123a"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:50:28 crc kubenswrapper[4799]: I1010 16:50:28.744220 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/546e566a-e924-4d27-8776-74b0b1ae123a-config-data" (OuterVolumeSpecName: "config-data") pod "546e566a-e924-4d27-8776-74b0b1ae123a" (UID: "546e566a-e924-4d27-8776-74b0b1ae123a"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:50:28 crc kubenswrapper[4799]: I1010 16:50:28.778611 4799 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/546e566a-e924-4d27-8776-74b0b1ae123a-config-data\") on node \"crc\" DevicePath \"\"" Oct 10 16:50:28 crc kubenswrapper[4799]: I1010 16:50:28.778922 4799 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/546e566a-e924-4d27-8776-74b0b1ae123a-httpd-run\") on node \"crc\" DevicePath \"\"" Oct 10 16:50:28 crc kubenswrapper[4799]: I1010 16:50:28.778992 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xk2d2\" (UniqueName: \"kubernetes.io/projected/546e566a-e924-4d27-8776-74b0b1ae123a-kube-api-access-xk2d2\") on node \"crc\" DevicePath \"\"" Oct 10 16:50:28 crc kubenswrapper[4799]: I1010 16:50:28.779049 4799 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/546e566a-e924-4d27-8776-74b0b1ae123a-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 10 16:50:28 crc kubenswrapper[4799]: I1010 16:50:28.779104 4799 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/546e566a-e924-4d27-8776-74b0b1ae123a-logs\") on node \"crc\" DevicePath \"\"" Oct 10 16:50:28 crc kubenswrapper[4799]: I1010 16:50:28.779183 4799 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") on node \"crc\" " Oct 10 16:50:28 crc kubenswrapper[4799]: I1010 16:50:28.779243 4799 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/546e566a-e924-4d27-8776-74b0b1ae123a-scripts\") on node \"crc\" DevicePath \"\"" Oct 10 16:50:28 crc kubenswrapper[4799]: I1010 16:50:28.807885 4799 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage11-crc" (UniqueName: "kubernetes.io/local-volume/local-storage11-crc") on node "crc" Oct 10 16:50:28 crc kubenswrapper[4799]: I1010 16:50:28.832133 4799 generic.go:334] "Generic (PLEG): container finished" podID="e1dd8255-75af-4083-ae20-bba4b5760b3f" containerID="721a6c184700479f8f430832a3bec901fd25498e982805e28af8c51139ef2304" exitCode=0 Oct 10 16:50:28 crc kubenswrapper[4799]: I1010 16:50:28.832216 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-fdbc-account-create-kr25d" event={"ID":"e1dd8255-75af-4083-ae20-bba4b5760b3f","Type":"ContainerDied","Data":"721a6c184700479f8f430832a3bec901fd25498e982805e28af8c51139ef2304"} Oct 10 16:50:28 crc kubenswrapper[4799]: I1010 16:50:28.832240 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-fdbc-account-create-kr25d" event={"ID":"e1dd8255-75af-4083-ae20-bba4b5760b3f","Type":"ContainerStarted","Data":"2162c7a1dfe7e3c271135fba68e58b065034b59a3f0db516104230647c1bc715"} Oct 10 16:50:28 crc kubenswrapper[4799]: I1010 16:50:28.835200 4799 generic.go:334] "Generic (PLEG): container finished" podID="9b51d046-e9cd-4cba-9f17-800b4e1a223b" containerID="361adc5f084ed182c5a615d67d3006131229d8f2121453c23c6a5345ae2d0046" exitCode=143 Oct 10 16:50:28 crc kubenswrapper[4799]: I1010 16:50:28.835228 4799 generic.go:334] "Generic (PLEG): container finished" podID="9b51d046-e9cd-4cba-9f17-800b4e1a223b" containerID="b327acc1082eeba77796979df7e35dd7e2577e624ec569449ee1eb5d9f6e5691" exitCode=143 Oct 10 16:50:28 crc kubenswrapper[4799]: I1010 16:50:28.835298 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"9b51d046-e9cd-4cba-9f17-800b4e1a223b","Type":"ContainerDied","Data":"361adc5f084ed182c5a615d67d3006131229d8f2121453c23c6a5345ae2d0046"} Oct 10 16:50:28 crc kubenswrapper[4799]: I1010 16:50:28.835321 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"9b51d046-e9cd-4cba-9f17-800b4e1a223b","Type":"ContainerDied","Data":"b327acc1082eeba77796979df7e35dd7e2577e624ec569449ee1eb5d9f6e5691"} Oct 10 16:50:28 crc kubenswrapper[4799]: I1010 16:50:28.836987 4799 generic.go:334] "Generic (PLEG): container finished" podID="ab2e1e52-21c9-40c3-b87f-59f38ebb7bff" containerID="e0f865b58ad5a365298ef3d503d2862f51abaae4b78ba76578a371ef732177bd" exitCode=0 Oct 10 16:50:28 crc kubenswrapper[4799]: I1010 16:50:28.837021 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-a843-account-create-t5zlh" event={"ID":"ab2e1e52-21c9-40c3-b87f-59f38ebb7bff","Type":"ContainerDied","Data":"e0f865b58ad5a365298ef3d503d2862f51abaae4b78ba76578a371ef732177bd"} Oct 10 16:50:28 crc kubenswrapper[4799]: I1010 16:50:28.840636 4799 generic.go:334] "Generic (PLEG): container finished" podID="546e566a-e924-4d27-8776-74b0b1ae123a" containerID="254c2492fa74906bb7c0f95cf00c6a682ed6132f8f2a3c0a171faca77fc18fba" exitCode=143 Oct 10 16:50:28 crc kubenswrapper[4799]: I1010 16:50:28.840659 4799 generic.go:334] "Generic (PLEG): container finished" podID="546e566a-e924-4d27-8776-74b0b1ae123a" containerID="7c9e0cb82352e781508fa3f533280eb8ec8a5f6a55b4aa0ab256355371442062" exitCode=143 Oct 10 16:50:28 crc kubenswrapper[4799]: I1010 16:50:28.840711 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Oct 10 16:50:28 crc kubenswrapper[4799]: I1010 16:50:28.840765 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"546e566a-e924-4d27-8776-74b0b1ae123a","Type":"ContainerDied","Data":"254c2492fa74906bb7c0f95cf00c6a682ed6132f8f2a3c0a171faca77fc18fba"} Oct 10 16:50:28 crc kubenswrapper[4799]: I1010 16:50:28.840797 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"546e566a-e924-4d27-8776-74b0b1ae123a","Type":"ContainerDied","Data":"7c9e0cb82352e781508fa3f533280eb8ec8a5f6a55b4aa0ab256355371442062"} Oct 10 16:50:28 crc kubenswrapper[4799]: I1010 16:50:28.840810 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"546e566a-e924-4d27-8776-74b0b1ae123a","Type":"ContainerDied","Data":"2adfdd1cf13db12d28cab792662aa0acd7157f5faba657e92e14e0c41755b43d"} Oct 10 16:50:28 crc kubenswrapper[4799]: I1010 16:50:28.840849 4799 scope.go:117] "RemoveContainer" containerID="254c2492fa74906bb7c0f95cf00c6a682ed6132f8f2a3c0a171faca77fc18fba" Oct 10 16:50:28 crc kubenswrapper[4799]: I1010 16:50:28.883977 4799 reconciler_common.go:293] "Volume detached for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") on node \"crc\" DevicePath \"\"" Oct 10 16:50:28 crc kubenswrapper[4799]: I1010 16:50:28.912637 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 10 16:50:28 crc kubenswrapper[4799]: I1010 16:50:28.923885 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 10 16:50:28 crc kubenswrapper[4799]: I1010 16:50:28.931700 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Oct 10 16:50:28 crc kubenswrapper[4799]: E1010 16:50:28.932092 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="546e566a-e924-4d27-8776-74b0b1ae123a" containerName="glance-httpd" Oct 10 16:50:28 crc kubenswrapper[4799]: I1010 16:50:28.932111 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="546e566a-e924-4d27-8776-74b0b1ae123a" containerName="glance-httpd" Oct 10 16:50:28 crc kubenswrapper[4799]: E1010 16:50:28.932139 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="546e566a-e924-4d27-8776-74b0b1ae123a" containerName="glance-log" Oct 10 16:50:28 crc kubenswrapper[4799]: I1010 16:50:28.932148 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="546e566a-e924-4d27-8776-74b0b1ae123a" containerName="glance-log" Oct 10 16:50:28 crc kubenswrapper[4799]: I1010 16:50:28.932312 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="546e566a-e924-4d27-8776-74b0b1ae123a" containerName="glance-httpd" Oct 10 16:50:28 crc kubenswrapper[4799]: I1010 16:50:28.932330 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="546e566a-e924-4d27-8776-74b0b1ae123a" containerName="glance-log" Oct 10 16:50:28 crc kubenswrapper[4799]: I1010 16:50:28.937344 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Oct 10 16:50:28 crc kubenswrapper[4799]: I1010 16:50:28.942951 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Oct 10 16:50:28 crc kubenswrapper[4799]: I1010 16:50:28.944067 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 10 16:50:29 crc kubenswrapper[4799]: I1010 16:50:29.087775 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/09ce78bf-a8a9-4868-a6b7-4a1a56910bf5-logs\") pod \"glance-default-external-api-0\" (UID: \"09ce78bf-a8a9-4868-a6b7-4a1a56910bf5\") " pod="openstack/glance-default-external-api-0" Oct 10 16:50:29 crc kubenswrapper[4799]: I1010 16:50:29.087837 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/09ce78bf-a8a9-4868-a6b7-4a1a56910bf5-config-data\") pod \"glance-default-external-api-0\" (UID: \"09ce78bf-a8a9-4868-a6b7-4a1a56910bf5\") " pod="openstack/glance-default-external-api-0" Oct 10 16:50:29 crc kubenswrapper[4799]: I1010 16:50:29.087884 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-phqhm\" (UniqueName: \"kubernetes.io/projected/09ce78bf-a8a9-4868-a6b7-4a1a56910bf5-kube-api-access-phqhm\") pod \"glance-default-external-api-0\" (UID: \"09ce78bf-a8a9-4868-a6b7-4a1a56910bf5\") " pod="openstack/glance-default-external-api-0" Oct 10 16:50:29 crc kubenswrapper[4799]: I1010 16:50:29.087986 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/09ce78bf-a8a9-4868-a6b7-4a1a56910bf5-scripts\") pod \"glance-default-external-api-0\" (UID: \"09ce78bf-a8a9-4868-a6b7-4a1a56910bf5\") " pod="openstack/glance-default-external-api-0" Oct 10 16:50:29 crc kubenswrapper[4799]: I1010 16:50:29.088038 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/09ce78bf-a8a9-4868-a6b7-4a1a56910bf5-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"09ce78bf-a8a9-4868-a6b7-4a1a56910bf5\") " pod="openstack/glance-default-external-api-0" Oct 10 16:50:29 crc kubenswrapper[4799]: I1010 16:50:29.088105 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/09ce78bf-a8a9-4868-a6b7-4a1a56910bf5-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"09ce78bf-a8a9-4868-a6b7-4a1a56910bf5\") " pod="openstack/glance-default-external-api-0" Oct 10 16:50:29 crc kubenswrapper[4799]: I1010 16:50:29.088140 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"glance-default-external-api-0\" (UID: \"09ce78bf-a8a9-4868-a6b7-4a1a56910bf5\") " pod="openstack/glance-default-external-api-0" Oct 10 16:50:29 crc kubenswrapper[4799]: I1010 16:50:29.189981 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-phqhm\" (UniqueName: \"kubernetes.io/projected/09ce78bf-a8a9-4868-a6b7-4a1a56910bf5-kube-api-access-phqhm\") pod \"glance-default-external-api-0\" (UID: \"09ce78bf-a8a9-4868-a6b7-4a1a56910bf5\") " pod="openstack/glance-default-external-api-0" Oct 10 16:50:29 crc kubenswrapper[4799]: I1010 16:50:29.190061 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/09ce78bf-a8a9-4868-a6b7-4a1a56910bf5-scripts\") pod \"glance-default-external-api-0\" (UID: \"09ce78bf-a8a9-4868-a6b7-4a1a56910bf5\") " pod="openstack/glance-default-external-api-0" Oct 10 16:50:29 crc kubenswrapper[4799]: I1010 16:50:29.190112 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/09ce78bf-a8a9-4868-a6b7-4a1a56910bf5-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"09ce78bf-a8a9-4868-a6b7-4a1a56910bf5\") " pod="openstack/glance-default-external-api-0" Oct 10 16:50:29 crc kubenswrapper[4799]: I1010 16:50:29.190177 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/09ce78bf-a8a9-4868-a6b7-4a1a56910bf5-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"09ce78bf-a8a9-4868-a6b7-4a1a56910bf5\") " pod="openstack/glance-default-external-api-0" Oct 10 16:50:29 crc kubenswrapper[4799]: I1010 16:50:29.190206 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"glance-default-external-api-0\" (UID: \"09ce78bf-a8a9-4868-a6b7-4a1a56910bf5\") " pod="openstack/glance-default-external-api-0" Oct 10 16:50:29 crc kubenswrapper[4799]: I1010 16:50:29.190283 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/09ce78bf-a8a9-4868-a6b7-4a1a56910bf5-logs\") pod \"glance-default-external-api-0\" (UID: \"09ce78bf-a8a9-4868-a6b7-4a1a56910bf5\") " pod="openstack/glance-default-external-api-0" Oct 10 16:50:29 crc kubenswrapper[4799]: I1010 16:50:29.190310 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/09ce78bf-a8a9-4868-a6b7-4a1a56910bf5-config-data\") pod \"glance-default-external-api-0\" (UID: \"09ce78bf-a8a9-4868-a6b7-4a1a56910bf5\") " pod="openstack/glance-default-external-api-0" Oct 10 16:50:29 crc kubenswrapper[4799]: I1010 16:50:29.190419 4799 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"glance-default-external-api-0\" (UID: \"09ce78bf-a8a9-4868-a6b7-4a1a56910bf5\") device mount path \"/mnt/openstack/pv11\"" pod="openstack/glance-default-external-api-0" Oct 10 16:50:29 crc kubenswrapper[4799]: I1010 16:50:29.190859 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/09ce78bf-a8a9-4868-a6b7-4a1a56910bf5-logs\") pod \"glance-default-external-api-0\" (UID: \"09ce78bf-a8a9-4868-a6b7-4a1a56910bf5\") " pod="openstack/glance-default-external-api-0" Oct 10 16:50:29 crc kubenswrapper[4799]: I1010 16:50:29.190858 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/09ce78bf-a8a9-4868-a6b7-4a1a56910bf5-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"09ce78bf-a8a9-4868-a6b7-4a1a56910bf5\") " pod="openstack/glance-default-external-api-0" Oct 10 16:50:29 crc kubenswrapper[4799]: I1010 16:50:29.196120 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/09ce78bf-a8a9-4868-a6b7-4a1a56910bf5-config-data\") pod \"glance-default-external-api-0\" (UID: \"09ce78bf-a8a9-4868-a6b7-4a1a56910bf5\") " pod="openstack/glance-default-external-api-0" Oct 10 16:50:29 crc kubenswrapper[4799]: I1010 16:50:29.207653 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/09ce78bf-a8a9-4868-a6b7-4a1a56910bf5-scripts\") pod \"glance-default-external-api-0\" (UID: \"09ce78bf-a8a9-4868-a6b7-4a1a56910bf5\") " pod="openstack/glance-default-external-api-0" Oct 10 16:50:29 crc kubenswrapper[4799]: I1010 16:50:29.211413 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/09ce78bf-a8a9-4868-a6b7-4a1a56910bf5-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"09ce78bf-a8a9-4868-a6b7-4a1a56910bf5\") " pod="openstack/glance-default-external-api-0" Oct 10 16:50:29 crc kubenswrapper[4799]: I1010 16:50:29.221564 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-phqhm\" (UniqueName: \"kubernetes.io/projected/09ce78bf-a8a9-4868-a6b7-4a1a56910bf5-kube-api-access-phqhm\") pod \"glance-default-external-api-0\" (UID: \"09ce78bf-a8a9-4868-a6b7-4a1a56910bf5\") " pod="openstack/glance-default-external-api-0" Oct 10 16:50:29 crc kubenswrapper[4799]: I1010 16:50:29.235099 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"glance-default-external-api-0\" (UID: \"09ce78bf-a8a9-4868-a6b7-4a1a56910bf5\") " pod="openstack/glance-default-external-api-0" Oct 10 16:50:29 crc kubenswrapper[4799]: I1010 16:50:29.276207 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Oct 10 16:50:29 crc kubenswrapper[4799]: I1010 16:50:29.415781 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="546e566a-e924-4d27-8776-74b0b1ae123a" path="/var/lib/kubelet/pods/546e566a-e924-4d27-8776-74b0b1ae123a/volumes" Oct 10 16:50:29 crc kubenswrapper[4799]: I1010 16:50:29.851537 4799 generic.go:334] "Generic (PLEG): container finished" podID="454f77a5-dadb-4038-8ec3-c3a1bd0c2c22" containerID="194dec3f34924527e2cec1db990873105b661b96fad790933950a8b6a9a87518" exitCode=0 Oct 10 16:50:29 crc kubenswrapper[4799]: I1010 16:50:29.851616 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-g45lb" event={"ID":"454f77a5-dadb-4038-8ec3-c3a1bd0c2c22","Type":"ContainerDied","Data":"194dec3f34924527e2cec1db990873105b661b96fad790933950a8b6a9a87518"} Oct 10 16:50:30 crc kubenswrapper[4799]: I1010 16:50:30.750591 4799 scope.go:117] "RemoveContainer" containerID="7c9e0cb82352e781508fa3f533280eb8ec8a5f6a55b4aa0ab256355371442062" Oct 10 16:50:30 crc kubenswrapper[4799]: I1010 16:50:30.862716 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-fdbc-account-create-kr25d" Oct 10 16:50:30 crc kubenswrapper[4799]: I1010 16:50:30.865091 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-a843-account-create-t5zlh" event={"ID":"ab2e1e52-21c9-40c3-b87f-59f38ebb7bff","Type":"ContainerDied","Data":"851cf54fa05911f95a68c47ffb0ef5d1dee11d077baad5673ed3280f44624345"} Oct 10 16:50:30 crc kubenswrapper[4799]: I1010 16:50:30.865149 4799 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="851cf54fa05911f95a68c47ffb0ef5d1dee11d077baad5673ed3280f44624345" Oct 10 16:50:30 crc kubenswrapper[4799]: I1010 16:50:30.869473 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-fdbc-account-create-kr25d" event={"ID":"e1dd8255-75af-4083-ae20-bba4b5760b3f","Type":"ContainerDied","Data":"2162c7a1dfe7e3c271135fba68e58b065034b59a3f0db516104230647c1bc715"} Oct 10 16:50:30 crc kubenswrapper[4799]: I1010 16:50:30.869530 4799 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2162c7a1dfe7e3c271135fba68e58b065034b59a3f0db516104230647c1bc715" Oct 10 16:50:30 crc kubenswrapper[4799]: I1010 16:50:30.869661 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Oct 10 16:50:30 crc kubenswrapper[4799]: I1010 16:50:30.869853 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-fdbc-account-create-kr25d" Oct 10 16:50:30 crc kubenswrapper[4799]: I1010 16:50:30.875085 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"9b51d046-e9cd-4cba-9f17-800b4e1a223b","Type":"ContainerDied","Data":"6dd2ceca981701a2677d2f5bc94111277b8aa0fde6dcdbcc04248f33a9fe3d67"} Oct 10 16:50:30 crc kubenswrapper[4799]: I1010 16:50:30.889889 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-a843-account-create-t5zlh" Oct 10 16:50:31 crc kubenswrapper[4799]: I1010 16:50:31.037019 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9b51d046-e9cd-4cba-9f17-800b4e1a223b-config-data\") pod \"9b51d046-e9cd-4cba-9f17-800b4e1a223b\" (UID: \"9b51d046-e9cd-4cba-9f17-800b4e1a223b\") " Oct 10 16:50:31 crc kubenswrapper[4799]: I1010 16:50:31.037093 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/9b51d046-e9cd-4cba-9f17-800b4e1a223b-httpd-run\") pod \"9b51d046-e9cd-4cba-9f17-800b4e1a223b\" (UID: \"9b51d046-e9cd-4cba-9f17-800b4e1a223b\") " Oct 10 16:50:31 crc kubenswrapper[4799]: I1010 16:50:31.037166 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9b51d046-e9cd-4cba-9f17-800b4e1a223b-combined-ca-bundle\") pod \"9b51d046-e9cd-4cba-9f17-800b4e1a223b\" (UID: \"9b51d046-e9cd-4cba-9f17-800b4e1a223b\") " Oct 10 16:50:31 crc kubenswrapper[4799]: I1010 16:50:31.037231 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9b51d046-e9cd-4cba-9f17-800b4e1a223b-logs\") pod \"9b51d046-e9cd-4cba-9f17-800b4e1a223b\" (UID: \"9b51d046-e9cd-4cba-9f17-800b4e1a223b\") " Oct 10 16:50:31 crc kubenswrapper[4799]: I1010 16:50:31.037403 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zpqng\" (UniqueName: \"kubernetes.io/projected/ab2e1e52-21c9-40c3-b87f-59f38ebb7bff-kube-api-access-zpqng\") pod \"ab2e1e52-21c9-40c3-b87f-59f38ebb7bff\" (UID: \"ab2e1e52-21c9-40c3-b87f-59f38ebb7bff\") " Oct 10 16:50:31 crc kubenswrapper[4799]: I1010 16:50:31.037445 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9b51d046-e9cd-4cba-9f17-800b4e1a223b-scripts\") pod \"9b51d046-e9cd-4cba-9f17-800b4e1a223b\" (UID: \"9b51d046-e9cd-4cba-9f17-800b4e1a223b\") " Oct 10 16:50:31 crc kubenswrapper[4799]: I1010 16:50:31.037517 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-f49p4\" (UniqueName: \"kubernetes.io/projected/9b51d046-e9cd-4cba-9f17-800b4e1a223b-kube-api-access-f49p4\") pod \"9b51d046-e9cd-4cba-9f17-800b4e1a223b\" (UID: \"9b51d046-e9cd-4cba-9f17-800b4e1a223b\") " Oct 10 16:50:31 crc kubenswrapper[4799]: I1010 16:50:31.037595 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hrr7q\" (UniqueName: \"kubernetes.io/projected/e1dd8255-75af-4083-ae20-bba4b5760b3f-kube-api-access-hrr7q\") pod \"e1dd8255-75af-4083-ae20-bba4b5760b3f\" (UID: \"e1dd8255-75af-4083-ae20-bba4b5760b3f\") " Oct 10 16:50:31 crc kubenswrapper[4799]: I1010 16:50:31.037752 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"9b51d046-e9cd-4cba-9f17-800b4e1a223b\" (UID: \"9b51d046-e9cd-4cba-9f17-800b4e1a223b\") " Oct 10 16:50:31 crc kubenswrapper[4799]: I1010 16:50:31.037848 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9b51d046-e9cd-4cba-9f17-800b4e1a223b-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "9b51d046-e9cd-4cba-9f17-800b4e1a223b" (UID: "9b51d046-e9cd-4cba-9f17-800b4e1a223b"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 10 16:50:31 crc kubenswrapper[4799]: I1010 16:50:31.038303 4799 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/9b51d046-e9cd-4cba-9f17-800b4e1a223b-httpd-run\") on node \"crc\" DevicePath \"\"" Oct 10 16:50:31 crc kubenswrapper[4799]: I1010 16:50:31.038630 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9b51d046-e9cd-4cba-9f17-800b4e1a223b-logs" (OuterVolumeSpecName: "logs") pod "9b51d046-e9cd-4cba-9f17-800b4e1a223b" (UID: "9b51d046-e9cd-4cba-9f17-800b4e1a223b"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 10 16:50:31 crc kubenswrapper[4799]: I1010 16:50:31.049223 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e1dd8255-75af-4083-ae20-bba4b5760b3f-kube-api-access-hrr7q" (OuterVolumeSpecName: "kube-api-access-hrr7q") pod "e1dd8255-75af-4083-ae20-bba4b5760b3f" (UID: "e1dd8255-75af-4083-ae20-bba4b5760b3f"). InnerVolumeSpecName "kube-api-access-hrr7q". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 16:50:31 crc kubenswrapper[4799]: I1010 16:50:31.051077 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ab2e1e52-21c9-40c3-b87f-59f38ebb7bff-kube-api-access-zpqng" (OuterVolumeSpecName: "kube-api-access-zpqng") pod "ab2e1e52-21c9-40c3-b87f-59f38ebb7bff" (UID: "ab2e1e52-21c9-40c3-b87f-59f38ebb7bff"). InnerVolumeSpecName "kube-api-access-zpqng". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 16:50:31 crc kubenswrapper[4799]: I1010 16:50:31.054768 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9b51d046-e9cd-4cba-9f17-800b4e1a223b-scripts" (OuterVolumeSpecName: "scripts") pod "9b51d046-e9cd-4cba-9f17-800b4e1a223b" (UID: "9b51d046-e9cd-4cba-9f17-800b4e1a223b"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:50:31 crc kubenswrapper[4799]: I1010 16:50:31.056969 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage12-crc" (OuterVolumeSpecName: "glance") pod "9b51d046-e9cd-4cba-9f17-800b4e1a223b" (UID: "9b51d046-e9cd-4cba-9f17-800b4e1a223b"). InnerVolumeSpecName "local-storage12-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Oct 10 16:50:31 crc kubenswrapper[4799]: I1010 16:50:31.057048 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9b51d046-e9cd-4cba-9f17-800b4e1a223b-kube-api-access-f49p4" (OuterVolumeSpecName: "kube-api-access-f49p4") pod "9b51d046-e9cd-4cba-9f17-800b4e1a223b" (UID: "9b51d046-e9cd-4cba-9f17-800b4e1a223b"). InnerVolumeSpecName "kube-api-access-f49p4". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 16:50:31 crc kubenswrapper[4799]: I1010 16:50:31.099822 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9b51d046-e9cd-4cba-9f17-800b4e1a223b-config-data" (OuterVolumeSpecName: "config-data") pod "9b51d046-e9cd-4cba-9f17-800b4e1a223b" (UID: "9b51d046-e9cd-4cba-9f17-800b4e1a223b"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:50:31 crc kubenswrapper[4799]: I1010 16:50:31.147480 4799 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") on node \"crc\" " Oct 10 16:50:31 crc kubenswrapper[4799]: I1010 16:50:31.147519 4799 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9b51d046-e9cd-4cba-9f17-800b4e1a223b-config-data\") on node \"crc\" DevicePath \"\"" Oct 10 16:50:31 crc kubenswrapper[4799]: I1010 16:50:31.147531 4799 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9b51d046-e9cd-4cba-9f17-800b4e1a223b-logs\") on node \"crc\" DevicePath \"\"" Oct 10 16:50:31 crc kubenswrapper[4799]: I1010 16:50:31.147542 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zpqng\" (UniqueName: \"kubernetes.io/projected/ab2e1e52-21c9-40c3-b87f-59f38ebb7bff-kube-api-access-zpqng\") on node \"crc\" DevicePath \"\"" Oct 10 16:50:31 crc kubenswrapper[4799]: I1010 16:50:31.147553 4799 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9b51d046-e9cd-4cba-9f17-800b4e1a223b-scripts\") on node \"crc\" DevicePath \"\"" Oct 10 16:50:31 crc kubenswrapper[4799]: I1010 16:50:31.147563 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-f49p4\" (UniqueName: \"kubernetes.io/projected/9b51d046-e9cd-4cba-9f17-800b4e1a223b-kube-api-access-f49p4\") on node \"crc\" DevicePath \"\"" Oct 10 16:50:31 crc kubenswrapper[4799]: I1010 16:50:31.147574 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hrr7q\" (UniqueName: \"kubernetes.io/projected/e1dd8255-75af-4083-ae20-bba4b5760b3f-kube-api-access-hrr7q\") on node \"crc\" DevicePath \"\"" Oct 10 16:50:31 crc kubenswrapper[4799]: I1010 16:50:31.158732 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9b51d046-e9cd-4cba-9f17-800b4e1a223b-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "9b51d046-e9cd-4cba-9f17-800b4e1a223b" (UID: "9b51d046-e9cd-4cba-9f17-800b4e1a223b"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:50:31 crc kubenswrapper[4799]: I1010 16:50:31.173896 4799 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage12-crc" (UniqueName: "kubernetes.io/local-volume/local-storage12-crc") on node "crc" Oct 10 16:50:31 crc kubenswrapper[4799]: I1010 16:50:31.248605 4799 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9b51d046-e9cd-4cba-9f17-800b4e1a223b-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 10 16:50:31 crc kubenswrapper[4799]: I1010 16:50:31.248642 4799 reconciler_common.go:293] "Volume detached for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") on node \"crc\" DevicePath \"\"" Oct 10 16:50:31 crc kubenswrapper[4799]: I1010 16:50:31.891682 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Oct 10 16:50:31 crc kubenswrapper[4799]: I1010 16:50:31.892779 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-a843-account-create-t5zlh" Oct 10 16:50:31 crc kubenswrapper[4799]: I1010 16:50:31.936648 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 10 16:50:31 crc kubenswrapper[4799]: I1010 16:50:31.952374 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 10 16:50:31 crc kubenswrapper[4799]: I1010 16:50:31.988825 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 10 16:50:31 crc kubenswrapper[4799]: E1010 16:50:31.989259 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e1dd8255-75af-4083-ae20-bba4b5760b3f" containerName="mariadb-account-create" Oct 10 16:50:31 crc kubenswrapper[4799]: I1010 16:50:31.989286 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="e1dd8255-75af-4083-ae20-bba4b5760b3f" containerName="mariadb-account-create" Oct 10 16:50:31 crc kubenswrapper[4799]: E1010 16:50:31.989317 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9b51d046-e9cd-4cba-9f17-800b4e1a223b" containerName="glance-log" Oct 10 16:50:31 crc kubenswrapper[4799]: I1010 16:50:31.989327 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="9b51d046-e9cd-4cba-9f17-800b4e1a223b" containerName="glance-log" Oct 10 16:50:31 crc kubenswrapper[4799]: E1010 16:50:31.989345 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ab2e1e52-21c9-40c3-b87f-59f38ebb7bff" containerName="mariadb-account-create" Oct 10 16:50:31 crc kubenswrapper[4799]: I1010 16:50:31.989353 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="ab2e1e52-21c9-40c3-b87f-59f38ebb7bff" containerName="mariadb-account-create" Oct 10 16:50:31 crc kubenswrapper[4799]: E1010 16:50:31.989376 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9b51d046-e9cd-4cba-9f17-800b4e1a223b" containerName="glance-httpd" Oct 10 16:50:31 crc kubenswrapper[4799]: I1010 16:50:31.989387 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="9b51d046-e9cd-4cba-9f17-800b4e1a223b" containerName="glance-httpd" Oct 10 16:50:31 crc kubenswrapper[4799]: I1010 16:50:31.989599 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="9b51d046-e9cd-4cba-9f17-800b4e1a223b" containerName="glance-httpd" Oct 10 16:50:31 crc kubenswrapper[4799]: I1010 16:50:31.989623 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="ab2e1e52-21c9-40c3-b87f-59f38ebb7bff" containerName="mariadb-account-create" Oct 10 16:50:31 crc kubenswrapper[4799]: I1010 16:50:31.989637 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="9b51d046-e9cd-4cba-9f17-800b4e1a223b" containerName="glance-log" Oct 10 16:50:32 crc kubenswrapper[4799]: I1010 16:50:32.005890 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="e1dd8255-75af-4083-ae20-bba4b5760b3f" containerName="mariadb-account-create" Oct 10 16:50:32 crc kubenswrapper[4799]: I1010 16:50:32.007378 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Oct 10 16:50:32 crc kubenswrapper[4799]: I1010 16:50:32.010172 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 10 16:50:32 crc kubenswrapper[4799]: I1010 16:50:32.011582 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Oct 10 16:50:32 crc kubenswrapper[4799]: I1010 16:50:32.165151 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"glance-default-internal-api-0\" (UID: \"76e21275-9059-4dac-b883-b750007e51c3\") " pod="openstack/glance-default-internal-api-0" Oct 10 16:50:32 crc kubenswrapper[4799]: I1010 16:50:32.165195 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/76e21275-9059-4dac-b883-b750007e51c3-logs\") pod \"glance-default-internal-api-0\" (UID: \"76e21275-9059-4dac-b883-b750007e51c3\") " pod="openstack/glance-default-internal-api-0" Oct 10 16:50:32 crc kubenswrapper[4799]: I1010 16:50:32.165247 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/76e21275-9059-4dac-b883-b750007e51c3-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"76e21275-9059-4dac-b883-b750007e51c3\") " pod="openstack/glance-default-internal-api-0" Oct 10 16:50:32 crc kubenswrapper[4799]: I1010 16:50:32.165273 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/76e21275-9059-4dac-b883-b750007e51c3-scripts\") pod \"glance-default-internal-api-0\" (UID: \"76e21275-9059-4dac-b883-b750007e51c3\") " pod="openstack/glance-default-internal-api-0" Oct 10 16:50:32 crc kubenswrapper[4799]: I1010 16:50:32.165343 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lr2pz\" (UniqueName: \"kubernetes.io/projected/76e21275-9059-4dac-b883-b750007e51c3-kube-api-access-lr2pz\") pod \"glance-default-internal-api-0\" (UID: \"76e21275-9059-4dac-b883-b750007e51c3\") " pod="openstack/glance-default-internal-api-0" Oct 10 16:50:32 crc kubenswrapper[4799]: I1010 16:50:32.165397 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/76e21275-9059-4dac-b883-b750007e51c3-config-data\") pod \"glance-default-internal-api-0\" (UID: \"76e21275-9059-4dac-b883-b750007e51c3\") " pod="openstack/glance-default-internal-api-0" Oct 10 16:50:32 crc kubenswrapper[4799]: I1010 16:50:32.165445 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/76e21275-9059-4dac-b883-b750007e51c3-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"76e21275-9059-4dac-b883-b750007e51c3\") " pod="openstack/glance-default-internal-api-0" Oct 10 16:50:32 crc kubenswrapper[4799]: I1010 16:50:32.266523 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/76e21275-9059-4dac-b883-b750007e51c3-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"76e21275-9059-4dac-b883-b750007e51c3\") " pod="openstack/glance-default-internal-api-0" Oct 10 16:50:32 crc kubenswrapper[4799]: I1010 16:50:32.266625 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"glance-default-internal-api-0\" (UID: \"76e21275-9059-4dac-b883-b750007e51c3\") " pod="openstack/glance-default-internal-api-0" Oct 10 16:50:32 crc kubenswrapper[4799]: I1010 16:50:32.266656 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/76e21275-9059-4dac-b883-b750007e51c3-logs\") pod \"glance-default-internal-api-0\" (UID: \"76e21275-9059-4dac-b883-b750007e51c3\") " pod="openstack/glance-default-internal-api-0" Oct 10 16:50:32 crc kubenswrapper[4799]: I1010 16:50:32.266723 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/76e21275-9059-4dac-b883-b750007e51c3-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"76e21275-9059-4dac-b883-b750007e51c3\") " pod="openstack/glance-default-internal-api-0" Oct 10 16:50:32 crc kubenswrapper[4799]: I1010 16:50:32.266772 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/76e21275-9059-4dac-b883-b750007e51c3-scripts\") pod \"glance-default-internal-api-0\" (UID: \"76e21275-9059-4dac-b883-b750007e51c3\") " pod="openstack/glance-default-internal-api-0" Oct 10 16:50:32 crc kubenswrapper[4799]: I1010 16:50:32.266820 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lr2pz\" (UniqueName: \"kubernetes.io/projected/76e21275-9059-4dac-b883-b750007e51c3-kube-api-access-lr2pz\") pod \"glance-default-internal-api-0\" (UID: \"76e21275-9059-4dac-b883-b750007e51c3\") " pod="openstack/glance-default-internal-api-0" Oct 10 16:50:32 crc kubenswrapper[4799]: I1010 16:50:32.266863 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/76e21275-9059-4dac-b883-b750007e51c3-config-data\") pod \"glance-default-internal-api-0\" (UID: \"76e21275-9059-4dac-b883-b750007e51c3\") " pod="openstack/glance-default-internal-api-0" Oct 10 16:50:32 crc kubenswrapper[4799]: I1010 16:50:32.267612 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/76e21275-9059-4dac-b883-b750007e51c3-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"76e21275-9059-4dac-b883-b750007e51c3\") " pod="openstack/glance-default-internal-api-0" Oct 10 16:50:32 crc kubenswrapper[4799]: I1010 16:50:32.267707 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/76e21275-9059-4dac-b883-b750007e51c3-logs\") pod \"glance-default-internal-api-0\" (UID: \"76e21275-9059-4dac-b883-b750007e51c3\") " pod="openstack/glance-default-internal-api-0" Oct 10 16:50:32 crc kubenswrapper[4799]: I1010 16:50:32.267665 4799 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"glance-default-internal-api-0\" (UID: \"76e21275-9059-4dac-b883-b750007e51c3\") device mount path \"/mnt/openstack/pv12\"" pod="openstack/glance-default-internal-api-0" Oct 10 16:50:32 crc kubenswrapper[4799]: I1010 16:50:32.272776 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/76e21275-9059-4dac-b883-b750007e51c3-scripts\") pod \"glance-default-internal-api-0\" (UID: \"76e21275-9059-4dac-b883-b750007e51c3\") " pod="openstack/glance-default-internal-api-0" Oct 10 16:50:32 crc kubenswrapper[4799]: I1010 16:50:32.272991 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/76e21275-9059-4dac-b883-b750007e51c3-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"76e21275-9059-4dac-b883-b750007e51c3\") " pod="openstack/glance-default-internal-api-0" Oct 10 16:50:32 crc kubenswrapper[4799]: I1010 16:50:32.294809 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/76e21275-9059-4dac-b883-b750007e51c3-config-data\") pod \"glance-default-internal-api-0\" (UID: \"76e21275-9059-4dac-b883-b750007e51c3\") " pod="openstack/glance-default-internal-api-0" Oct 10 16:50:32 crc kubenswrapper[4799]: I1010 16:50:32.297982 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"glance-default-internal-api-0\" (UID: \"76e21275-9059-4dac-b883-b750007e51c3\") " pod="openstack/glance-default-internal-api-0" Oct 10 16:50:32 crc kubenswrapper[4799]: I1010 16:50:32.299240 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lr2pz\" (UniqueName: \"kubernetes.io/projected/76e21275-9059-4dac-b883-b750007e51c3-kube-api-access-lr2pz\") pod \"glance-default-internal-api-0\" (UID: \"76e21275-9059-4dac-b883-b750007e51c3\") " pod="openstack/glance-default-internal-api-0" Oct 10 16:50:32 crc kubenswrapper[4799]: I1010 16:50:32.332237 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Oct 10 16:50:33 crc kubenswrapper[4799]: I1010 16:50:33.414739 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9b51d046-e9cd-4cba-9f17-800b4e1a223b" path="/var/lib/kubelet/pods/9b51d046-e9cd-4cba-9f17-800b4e1a223b/volumes" Oct 10 16:50:33 crc kubenswrapper[4799]: I1010 16:50:33.611964 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 10 16:50:33 crc kubenswrapper[4799]: I1010 16:50:33.676192 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 10 16:50:33 crc kubenswrapper[4799]: I1010 16:50:33.830900 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-74776f5dd7-7jxz4" Oct 10 16:50:33 crc kubenswrapper[4799]: I1010 16:50:33.888322 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7b587f8db7-jxsc9"] Oct 10 16:50:33 crc kubenswrapper[4799]: I1010 16:50:33.888803 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-7b587f8db7-jxsc9" podUID="07d3db2f-d9ff-4ac9-bbaa-db30f610a7a6" containerName="dnsmasq-dns" containerID="cri-o://17d83bee82a2e1f670f14fc38a4b800a5bc5d1d3bcc4daf243627e79b435f4d6" gracePeriod=10 Oct 10 16:50:34 crc kubenswrapper[4799]: I1010 16:50:34.924131 4799 generic.go:334] "Generic (PLEG): container finished" podID="07d3db2f-d9ff-4ac9-bbaa-db30f610a7a6" containerID="17d83bee82a2e1f670f14fc38a4b800a5bc5d1d3bcc4daf243627e79b435f4d6" exitCode=0 Oct 10 16:50:34 crc kubenswrapper[4799]: I1010 16:50:34.924141 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7b587f8db7-jxsc9" event={"ID":"07d3db2f-d9ff-4ac9-bbaa-db30f610a7a6","Type":"ContainerDied","Data":"17d83bee82a2e1f670f14fc38a4b800a5bc5d1d3bcc4daf243627e79b435f4d6"} Oct 10 16:50:35 crc kubenswrapper[4799]: I1010 16:50:35.283040 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-g45lb" Oct 10 16:50:35 crc kubenswrapper[4799]: I1010 16:50:35.422145 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/454f77a5-dadb-4038-8ec3-c3a1bd0c2c22-credential-keys\") pod \"454f77a5-dadb-4038-8ec3-c3a1bd0c2c22\" (UID: \"454f77a5-dadb-4038-8ec3-c3a1bd0c2c22\") " Oct 10 16:50:35 crc kubenswrapper[4799]: I1010 16:50:35.422592 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/454f77a5-dadb-4038-8ec3-c3a1bd0c2c22-fernet-keys\") pod \"454f77a5-dadb-4038-8ec3-c3a1bd0c2c22\" (UID: \"454f77a5-dadb-4038-8ec3-c3a1bd0c2c22\") " Oct 10 16:50:35 crc kubenswrapper[4799]: I1010 16:50:35.422712 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/454f77a5-dadb-4038-8ec3-c3a1bd0c2c22-scripts\") pod \"454f77a5-dadb-4038-8ec3-c3a1bd0c2c22\" (UID: \"454f77a5-dadb-4038-8ec3-c3a1bd0c2c22\") " Oct 10 16:50:35 crc kubenswrapper[4799]: I1010 16:50:35.422793 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/454f77a5-dadb-4038-8ec3-c3a1bd0c2c22-combined-ca-bundle\") pod \"454f77a5-dadb-4038-8ec3-c3a1bd0c2c22\" (UID: \"454f77a5-dadb-4038-8ec3-c3a1bd0c2c22\") " Oct 10 16:50:35 crc kubenswrapper[4799]: I1010 16:50:35.422857 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zdhp4\" (UniqueName: \"kubernetes.io/projected/454f77a5-dadb-4038-8ec3-c3a1bd0c2c22-kube-api-access-zdhp4\") pod \"454f77a5-dadb-4038-8ec3-c3a1bd0c2c22\" (UID: \"454f77a5-dadb-4038-8ec3-c3a1bd0c2c22\") " Oct 10 16:50:35 crc kubenswrapper[4799]: I1010 16:50:35.422907 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/454f77a5-dadb-4038-8ec3-c3a1bd0c2c22-config-data\") pod \"454f77a5-dadb-4038-8ec3-c3a1bd0c2c22\" (UID: \"454f77a5-dadb-4038-8ec3-c3a1bd0c2c22\") " Oct 10 16:50:35 crc kubenswrapper[4799]: I1010 16:50:35.430976 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/454f77a5-dadb-4038-8ec3-c3a1bd0c2c22-kube-api-access-zdhp4" (OuterVolumeSpecName: "kube-api-access-zdhp4") pod "454f77a5-dadb-4038-8ec3-c3a1bd0c2c22" (UID: "454f77a5-dadb-4038-8ec3-c3a1bd0c2c22"). InnerVolumeSpecName "kube-api-access-zdhp4". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 16:50:35 crc kubenswrapper[4799]: I1010 16:50:35.430971 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/454f77a5-dadb-4038-8ec3-c3a1bd0c2c22-scripts" (OuterVolumeSpecName: "scripts") pod "454f77a5-dadb-4038-8ec3-c3a1bd0c2c22" (UID: "454f77a5-dadb-4038-8ec3-c3a1bd0c2c22"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:50:35 crc kubenswrapper[4799]: I1010 16:50:35.430991 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/454f77a5-dadb-4038-8ec3-c3a1bd0c2c22-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "454f77a5-dadb-4038-8ec3-c3a1bd0c2c22" (UID: "454f77a5-dadb-4038-8ec3-c3a1bd0c2c22"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:50:35 crc kubenswrapper[4799]: I1010 16:50:35.431049 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/454f77a5-dadb-4038-8ec3-c3a1bd0c2c22-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "454f77a5-dadb-4038-8ec3-c3a1bd0c2c22" (UID: "454f77a5-dadb-4038-8ec3-c3a1bd0c2c22"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:50:35 crc kubenswrapper[4799]: I1010 16:50:35.465281 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/454f77a5-dadb-4038-8ec3-c3a1bd0c2c22-config-data" (OuterVolumeSpecName: "config-data") pod "454f77a5-dadb-4038-8ec3-c3a1bd0c2c22" (UID: "454f77a5-dadb-4038-8ec3-c3a1bd0c2c22"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:50:35 crc kubenswrapper[4799]: I1010 16:50:35.481396 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/454f77a5-dadb-4038-8ec3-c3a1bd0c2c22-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "454f77a5-dadb-4038-8ec3-c3a1bd0c2c22" (UID: "454f77a5-dadb-4038-8ec3-c3a1bd0c2c22"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:50:35 crc kubenswrapper[4799]: I1010 16:50:35.524599 4799 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/454f77a5-dadb-4038-8ec3-c3a1bd0c2c22-credential-keys\") on node \"crc\" DevicePath \"\"" Oct 10 16:50:35 crc kubenswrapper[4799]: I1010 16:50:35.524635 4799 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/454f77a5-dadb-4038-8ec3-c3a1bd0c2c22-fernet-keys\") on node \"crc\" DevicePath \"\"" Oct 10 16:50:35 crc kubenswrapper[4799]: I1010 16:50:35.524647 4799 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/454f77a5-dadb-4038-8ec3-c3a1bd0c2c22-scripts\") on node \"crc\" DevicePath \"\"" Oct 10 16:50:35 crc kubenswrapper[4799]: I1010 16:50:35.524659 4799 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/454f77a5-dadb-4038-8ec3-c3a1bd0c2c22-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 10 16:50:35 crc kubenswrapper[4799]: I1010 16:50:35.524672 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zdhp4\" (UniqueName: \"kubernetes.io/projected/454f77a5-dadb-4038-8ec3-c3a1bd0c2c22-kube-api-access-zdhp4\") on node \"crc\" DevicePath \"\"" Oct 10 16:50:35 crc kubenswrapper[4799]: I1010 16:50:35.524686 4799 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/454f77a5-dadb-4038-8ec3-c3a1bd0c2c22-config-data\") on node \"crc\" DevicePath \"\"" Oct 10 16:50:35 crc kubenswrapper[4799]: I1010 16:50:35.947480 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-g45lb" event={"ID":"454f77a5-dadb-4038-8ec3-c3a1bd0c2c22","Type":"ContainerDied","Data":"cb081e6518cb1542b5204c27ef689039816791fdd84f7a7ec2035e0b3f47f2ba"} Oct 10 16:50:35 crc kubenswrapper[4799]: I1010 16:50:35.947536 4799 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="cb081e6518cb1542b5204c27ef689039816791fdd84f7a7ec2035e0b3f47f2ba" Oct 10 16:50:35 crc kubenswrapper[4799]: I1010 16:50:35.947614 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-g45lb" Oct 10 16:50:36 crc kubenswrapper[4799]: I1010 16:50:36.175883 4799 scope.go:117] "RemoveContainer" containerID="254c2492fa74906bb7c0f95cf00c6a682ed6132f8f2a3c0a171faca77fc18fba" Oct 10 16:50:36 crc kubenswrapper[4799]: E1010 16:50:36.176340 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"254c2492fa74906bb7c0f95cf00c6a682ed6132f8f2a3c0a171faca77fc18fba\": container with ID starting with 254c2492fa74906bb7c0f95cf00c6a682ed6132f8f2a3c0a171faca77fc18fba not found: ID does not exist" containerID="254c2492fa74906bb7c0f95cf00c6a682ed6132f8f2a3c0a171faca77fc18fba" Oct 10 16:50:36 crc kubenswrapper[4799]: I1010 16:50:36.176371 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"254c2492fa74906bb7c0f95cf00c6a682ed6132f8f2a3c0a171faca77fc18fba"} err="failed to get container status \"254c2492fa74906bb7c0f95cf00c6a682ed6132f8f2a3c0a171faca77fc18fba\": rpc error: code = NotFound desc = could not find container \"254c2492fa74906bb7c0f95cf00c6a682ed6132f8f2a3c0a171faca77fc18fba\": container with ID starting with 254c2492fa74906bb7c0f95cf00c6a682ed6132f8f2a3c0a171faca77fc18fba not found: ID does not exist" Oct 10 16:50:36 crc kubenswrapper[4799]: I1010 16:50:36.176399 4799 scope.go:117] "RemoveContainer" containerID="7c9e0cb82352e781508fa3f533280eb8ec8a5f6a55b4aa0ab256355371442062" Oct 10 16:50:36 crc kubenswrapper[4799]: E1010 16:50:36.176653 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7c9e0cb82352e781508fa3f533280eb8ec8a5f6a55b4aa0ab256355371442062\": container with ID starting with 7c9e0cb82352e781508fa3f533280eb8ec8a5f6a55b4aa0ab256355371442062 not found: ID does not exist" containerID="7c9e0cb82352e781508fa3f533280eb8ec8a5f6a55b4aa0ab256355371442062" Oct 10 16:50:36 crc kubenswrapper[4799]: I1010 16:50:36.176678 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7c9e0cb82352e781508fa3f533280eb8ec8a5f6a55b4aa0ab256355371442062"} err="failed to get container status \"7c9e0cb82352e781508fa3f533280eb8ec8a5f6a55b4aa0ab256355371442062\": rpc error: code = NotFound desc = could not find container \"7c9e0cb82352e781508fa3f533280eb8ec8a5f6a55b4aa0ab256355371442062\": container with ID starting with 7c9e0cb82352e781508fa3f533280eb8ec8a5f6a55b4aa0ab256355371442062 not found: ID does not exist" Oct 10 16:50:36 crc kubenswrapper[4799]: I1010 16:50:36.176694 4799 scope.go:117] "RemoveContainer" containerID="254c2492fa74906bb7c0f95cf00c6a682ed6132f8f2a3c0a171faca77fc18fba" Oct 10 16:50:36 crc kubenswrapper[4799]: I1010 16:50:36.176985 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"254c2492fa74906bb7c0f95cf00c6a682ed6132f8f2a3c0a171faca77fc18fba"} err="failed to get container status \"254c2492fa74906bb7c0f95cf00c6a682ed6132f8f2a3c0a171faca77fc18fba\": rpc error: code = NotFound desc = could not find container \"254c2492fa74906bb7c0f95cf00c6a682ed6132f8f2a3c0a171faca77fc18fba\": container with ID starting with 254c2492fa74906bb7c0f95cf00c6a682ed6132f8f2a3c0a171faca77fc18fba not found: ID does not exist" Oct 10 16:50:36 crc kubenswrapper[4799]: I1010 16:50:36.177004 4799 scope.go:117] "RemoveContainer" containerID="7c9e0cb82352e781508fa3f533280eb8ec8a5f6a55b4aa0ab256355371442062" Oct 10 16:50:36 crc kubenswrapper[4799]: I1010 16:50:36.177250 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7c9e0cb82352e781508fa3f533280eb8ec8a5f6a55b4aa0ab256355371442062"} err="failed to get container status \"7c9e0cb82352e781508fa3f533280eb8ec8a5f6a55b4aa0ab256355371442062\": rpc error: code = NotFound desc = could not find container \"7c9e0cb82352e781508fa3f533280eb8ec8a5f6a55b4aa0ab256355371442062\": container with ID starting with 7c9e0cb82352e781508fa3f533280eb8ec8a5f6a55b4aa0ab256355371442062 not found: ID does not exist" Oct 10 16:50:36 crc kubenswrapper[4799]: I1010 16:50:36.177272 4799 scope.go:117] "RemoveContainer" containerID="361adc5f084ed182c5a615d67d3006131229d8f2121453c23c6a5345ae2d0046" Oct 10 16:50:36 crc kubenswrapper[4799]: I1010 16:50:36.384823 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-bootstrap-g45lb"] Oct 10 16:50:36 crc kubenswrapper[4799]: I1010 16:50:36.392417 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-bootstrap-g45lb"] Oct 10 16:50:36 crc kubenswrapper[4799]: I1010 16:50:36.432419 4799 scope.go:117] "RemoveContainer" containerID="b327acc1082eeba77796979df7e35dd7e2577e624ec569449ee1eb5d9f6e5691" Oct 10 16:50:36 crc kubenswrapper[4799]: I1010 16:50:36.478987 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-bootstrap-jrrx4"] Oct 10 16:50:36 crc kubenswrapper[4799]: E1010 16:50:36.479395 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="454f77a5-dadb-4038-8ec3-c3a1bd0c2c22" containerName="keystone-bootstrap" Oct 10 16:50:36 crc kubenswrapper[4799]: I1010 16:50:36.479408 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="454f77a5-dadb-4038-8ec3-c3a1bd0c2c22" containerName="keystone-bootstrap" Oct 10 16:50:36 crc kubenswrapper[4799]: I1010 16:50:36.479668 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="454f77a5-dadb-4038-8ec3-c3a1bd0c2c22" containerName="keystone-bootstrap" Oct 10 16:50:36 crc kubenswrapper[4799]: I1010 16:50:36.480304 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-jrrx4" Oct 10 16:50:36 crc kubenswrapper[4799]: I1010 16:50:36.482457 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Oct 10 16:50:36 crc kubenswrapper[4799]: I1010 16:50:36.482889 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Oct 10 16:50:36 crc kubenswrapper[4799]: I1010 16:50:36.483176 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-jwbbp" Oct 10 16:50:36 crc kubenswrapper[4799]: I1010 16:50:36.483331 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Oct 10 16:50:36 crc kubenswrapper[4799]: I1010 16:50:36.493226 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-jrrx4"] Oct 10 16:50:36 crc kubenswrapper[4799]: I1010 16:50:36.531379 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7b587f8db7-jxsc9" Oct 10 16:50:36 crc kubenswrapper[4799]: I1010 16:50:36.556138 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fd273e06-ed83-42c6-aa3e-3ed6eda94c1d-scripts\") pod \"keystone-bootstrap-jrrx4\" (UID: \"fd273e06-ed83-42c6-aa3e-3ed6eda94c1d\") " pod="openstack/keystone-bootstrap-jrrx4" Oct 10 16:50:36 crc kubenswrapper[4799]: I1010 16:50:36.556195 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/fd273e06-ed83-42c6-aa3e-3ed6eda94c1d-credential-keys\") pod \"keystone-bootstrap-jrrx4\" (UID: \"fd273e06-ed83-42c6-aa3e-3ed6eda94c1d\") " pod="openstack/keystone-bootstrap-jrrx4" Oct 10 16:50:36 crc kubenswrapper[4799]: I1010 16:50:36.556221 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fd273e06-ed83-42c6-aa3e-3ed6eda94c1d-config-data\") pod \"keystone-bootstrap-jrrx4\" (UID: \"fd273e06-ed83-42c6-aa3e-3ed6eda94c1d\") " pod="openstack/keystone-bootstrap-jrrx4" Oct 10 16:50:36 crc kubenswrapper[4799]: I1010 16:50:36.556414 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/fd273e06-ed83-42c6-aa3e-3ed6eda94c1d-fernet-keys\") pod \"keystone-bootstrap-jrrx4\" (UID: \"fd273e06-ed83-42c6-aa3e-3ed6eda94c1d\") " pod="openstack/keystone-bootstrap-jrrx4" Oct 10 16:50:36 crc kubenswrapper[4799]: I1010 16:50:36.556484 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fd273e06-ed83-42c6-aa3e-3ed6eda94c1d-combined-ca-bundle\") pod \"keystone-bootstrap-jrrx4\" (UID: \"fd273e06-ed83-42c6-aa3e-3ed6eda94c1d\") " pod="openstack/keystone-bootstrap-jrrx4" Oct 10 16:50:36 crc kubenswrapper[4799]: I1010 16:50:36.556516 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n4mz5\" (UniqueName: \"kubernetes.io/projected/fd273e06-ed83-42c6-aa3e-3ed6eda94c1d-kube-api-access-n4mz5\") pod \"keystone-bootstrap-jrrx4\" (UID: \"fd273e06-ed83-42c6-aa3e-3ed6eda94c1d\") " pod="openstack/keystone-bootstrap-jrrx4" Oct 10 16:50:36 crc kubenswrapper[4799]: I1010 16:50:36.658133 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/07d3db2f-d9ff-4ac9-bbaa-db30f610a7a6-ovsdbserver-nb\") pod \"07d3db2f-d9ff-4ac9-bbaa-db30f610a7a6\" (UID: \"07d3db2f-d9ff-4ac9-bbaa-db30f610a7a6\") " Oct 10 16:50:36 crc kubenswrapper[4799]: I1010 16:50:36.658625 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/07d3db2f-d9ff-4ac9-bbaa-db30f610a7a6-ovsdbserver-sb\") pod \"07d3db2f-d9ff-4ac9-bbaa-db30f610a7a6\" (UID: \"07d3db2f-d9ff-4ac9-bbaa-db30f610a7a6\") " Oct 10 16:50:36 crc kubenswrapper[4799]: I1010 16:50:36.658675 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ph7bl\" (UniqueName: \"kubernetes.io/projected/07d3db2f-d9ff-4ac9-bbaa-db30f610a7a6-kube-api-access-ph7bl\") pod \"07d3db2f-d9ff-4ac9-bbaa-db30f610a7a6\" (UID: \"07d3db2f-d9ff-4ac9-bbaa-db30f610a7a6\") " Oct 10 16:50:36 crc kubenswrapper[4799]: I1010 16:50:36.658704 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/07d3db2f-d9ff-4ac9-bbaa-db30f610a7a6-dns-svc\") pod \"07d3db2f-d9ff-4ac9-bbaa-db30f610a7a6\" (UID: \"07d3db2f-d9ff-4ac9-bbaa-db30f610a7a6\") " Oct 10 16:50:36 crc kubenswrapper[4799]: I1010 16:50:36.658778 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/07d3db2f-d9ff-4ac9-bbaa-db30f610a7a6-config\") pod \"07d3db2f-d9ff-4ac9-bbaa-db30f610a7a6\" (UID: \"07d3db2f-d9ff-4ac9-bbaa-db30f610a7a6\") " Oct 10 16:50:36 crc kubenswrapper[4799]: I1010 16:50:36.659000 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/fd273e06-ed83-42c6-aa3e-3ed6eda94c1d-fernet-keys\") pod \"keystone-bootstrap-jrrx4\" (UID: \"fd273e06-ed83-42c6-aa3e-3ed6eda94c1d\") " pod="openstack/keystone-bootstrap-jrrx4" Oct 10 16:50:36 crc kubenswrapper[4799]: I1010 16:50:36.659065 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fd273e06-ed83-42c6-aa3e-3ed6eda94c1d-combined-ca-bundle\") pod \"keystone-bootstrap-jrrx4\" (UID: \"fd273e06-ed83-42c6-aa3e-3ed6eda94c1d\") " pod="openstack/keystone-bootstrap-jrrx4" Oct 10 16:50:36 crc kubenswrapper[4799]: I1010 16:50:36.659090 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n4mz5\" (UniqueName: \"kubernetes.io/projected/fd273e06-ed83-42c6-aa3e-3ed6eda94c1d-kube-api-access-n4mz5\") pod \"keystone-bootstrap-jrrx4\" (UID: \"fd273e06-ed83-42c6-aa3e-3ed6eda94c1d\") " pod="openstack/keystone-bootstrap-jrrx4" Oct 10 16:50:36 crc kubenswrapper[4799]: I1010 16:50:36.659511 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fd273e06-ed83-42c6-aa3e-3ed6eda94c1d-scripts\") pod \"keystone-bootstrap-jrrx4\" (UID: \"fd273e06-ed83-42c6-aa3e-3ed6eda94c1d\") " pod="openstack/keystone-bootstrap-jrrx4" Oct 10 16:50:36 crc kubenswrapper[4799]: I1010 16:50:36.659559 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/fd273e06-ed83-42c6-aa3e-3ed6eda94c1d-credential-keys\") pod \"keystone-bootstrap-jrrx4\" (UID: \"fd273e06-ed83-42c6-aa3e-3ed6eda94c1d\") " pod="openstack/keystone-bootstrap-jrrx4" Oct 10 16:50:36 crc kubenswrapper[4799]: I1010 16:50:36.659584 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fd273e06-ed83-42c6-aa3e-3ed6eda94c1d-config-data\") pod \"keystone-bootstrap-jrrx4\" (UID: \"fd273e06-ed83-42c6-aa3e-3ed6eda94c1d\") " pod="openstack/keystone-bootstrap-jrrx4" Oct 10 16:50:36 crc kubenswrapper[4799]: I1010 16:50:36.663168 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fd273e06-ed83-42c6-aa3e-3ed6eda94c1d-combined-ca-bundle\") pod \"keystone-bootstrap-jrrx4\" (UID: \"fd273e06-ed83-42c6-aa3e-3ed6eda94c1d\") " pod="openstack/keystone-bootstrap-jrrx4" Oct 10 16:50:36 crc kubenswrapper[4799]: I1010 16:50:36.663323 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fd273e06-ed83-42c6-aa3e-3ed6eda94c1d-config-data\") pod \"keystone-bootstrap-jrrx4\" (UID: \"fd273e06-ed83-42c6-aa3e-3ed6eda94c1d\") " pod="openstack/keystone-bootstrap-jrrx4" Oct 10 16:50:36 crc kubenswrapper[4799]: I1010 16:50:36.664107 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/07d3db2f-d9ff-4ac9-bbaa-db30f610a7a6-kube-api-access-ph7bl" (OuterVolumeSpecName: "kube-api-access-ph7bl") pod "07d3db2f-d9ff-4ac9-bbaa-db30f610a7a6" (UID: "07d3db2f-d9ff-4ac9-bbaa-db30f610a7a6"). InnerVolumeSpecName "kube-api-access-ph7bl". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 16:50:36 crc kubenswrapper[4799]: I1010 16:50:36.666785 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/fd273e06-ed83-42c6-aa3e-3ed6eda94c1d-credential-keys\") pod \"keystone-bootstrap-jrrx4\" (UID: \"fd273e06-ed83-42c6-aa3e-3ed6eda94c1d\") " pod="openstack/keystone-bootstrap-jrrx4" Oct 10 16:50:36 crc kubenswrapper[4799]: I1010 16:50:36.668054 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/fd273e06-ed83-42c6-aa3e-3ed6eda94c1d-fernet-keys\") pod \"keystone-bootstrap-jrrx4\" (UID: \"fd273e06-ed83-42c6-aa3e-3ed6eda94c1d\") " pod="openstack/keystone-bootstrap-jrrx4" Oct 10 16:50:36 crc kubenswrapper[4799]: I1010 16:50:36.669633 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fd273e06-ed83-42c6-aa3e-3ed6eda94c1d-scripts\") pod \"keystone-bootstrap-jrrx4\" (UID: \"fd273e06-ed83-42c6-aa3e-3ed6eda94c1d\") " pod="openstack/keystone-bootstrap-jrrx4" Oct 10 16:50:36 crc kubenswrapper[4799]: I1010 16:50:36.681961 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n4mz5\" (UniqueName: \"kubernetes.io/projected/fd273e06-ed83-42c6-aa3e-3ed6eda94c1d-kube-api-access-n4mz5\") pod \"keystone-bootstrap-jrrx4\" (UID: \"fd273e06-ed83-42c6-aa3e-3ed6eda94c1d\") " pod="openstack/keystone-bootstrap-jrrx4" Oct 10 16:50:36 crc kubenswrapper[4799]: I1010 16:50:36.708071 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/07d3db2f-d9ff-4ac9-bbaa-db30f610a7a6-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "07d3db2f-d9ff-4ac9-bbaa-db30f610a7a6" (UID: "07d3db2f-d9ff-4ac9-bbaa-db30f610a7a6"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 16:50:36 crc kubenswrapper[4799]: I1010 16:50:36.708084 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/07d3db2f-d9ff-4ac9-bbaa-db30f610a7a6-config" (OuterVolumeSpecName: "config") pod "07d3db2f-d9ff-4ac9-bbaa-db30f610a7a6" (UID: "07d3db2f-d9ff-4ac9-bbaa-db30f610a7a6"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 16:50:36 crc kubenswrapper[4799]: I1010 16:50:36.711530 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/07d3db2f-d9ff-4ac9-bbaa-db30f610a7a6-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "07d3db2f-d9ff-4ac9-bbaa-db30f610a7a6" (UID: "07d3db2f-d9ff-4ac9-bbaa-db30f610a7a6"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 16:50:36 crc kubenswrapper[4799]: I1010 16:50:36.714447 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/07d3db2f-d9ff-4ac9-bbaa-db30f610a7a6-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "07d3db2f-d9ff-4ac9-bbaa-db30f610a7a6" (UID: "07d3db2f-d9ff-4ac9-bbaa-db30f610a7a6"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 16:50:36 crc kubenswrapper[4799]: I1010 16:50:36.761109 4799 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/07d3db2f-d9ff-4ac9-bbaa-db30f610a7a6-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 10 16:50:36 crc kubenswrapper[4799]: I1010 16:50:36.761147 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ph7bl\" (UniqueName: \"kubernetes.io/projected/07d3db2f-d9ff-4ac9-bbaa-db30f610a7a6-kube-api-access-ph7bl\") on node \"crc\" DevicePath \"\"" Oct 10 16:50:36 crc kubenswrapper[4799]: I1010 16:50:36.761179 4799 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/07d3db2f-d9ff-4ac9-bbaa-db30f610a7a6-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 10 16:50:36 crc kubenswrapper[4799]: I1010 16:50:36.761188 4799 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/07d3db2f-d9ff-4ac9-bbaa-db30f610a7a6-config\") on node \"crc\" DevicePath \"\"" Oct 10 16:50:36 crc kubenswrapper[4799]: I1010 16:50:36.761197 4799 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/07d3db2f-d9ff-4ac9-bbaa-db30f610a7a6-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 10 16:50:36 crc kubenswrapper[4799]: I1010 16:50:36.846848 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 10 16:50:36 crc kubenswrapper[4799]: I1010 16:50:36.858772 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-db-sync-dt6zw"] Oct 10 16:50:36 crc kubenswrapper[4799]: E1010 16:50:36.859229 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="07d3db2f-d9ff-4ac9-bbaa-db30f610a7a6" containerName="init" Oct 10 16:50:36 crc kubenswrapper[4799]: I1010 16:50:36.859246 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="07d3db2f-d9ff-4ac9-bbaa-db30f610a7a6" containerName="init" Oct 10 16:50:36 crc kubenswrapper[4799]: E1010 16:50:36.859257 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="07d3db2f-d9ff-4ac9-bbaa-db30f610a7a6" containerName="dnsmasq-dns" Oct 10 16:50:36 crc kubenswrapper[4799]: I1010 16:50:36.859266 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="07d3db2f-d9ff-4ac9-bbaa-db30f610a7a6" containerName="dnsmasq-dns" Oct 10 16:50:36 crc kubenswrapper[4799]: I1010 16:50:36.860723 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="07d3db2f-d9ff-4ac9-bbaa-db30f610a7a6" containerName="dnsmasq-dns" Oct 10 16:50:36 crc kubenswrapper[4799]: I1010 16:50:36.861627 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-dt6zw" Oct 10 16:50:36 crc kubenswrapper[4799]: I1010 16:50:36.865601 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scripts" Oct 10 16:50:36 crc kubenswrapper[4799]: I1010 16:50:36.865855 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-cinder-dockercfg-tvnq2" Oct 10 16:50:36 crc kubenswrapper[4799]: I1010 16:50:36.866047 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-config-data" Oct 10 16:50:36 crc kubenswrapper[4799]: I1010 16:50:36.866669 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-sync-dt6zw"] Oct 10 16:50:36 crc kubenswrapper[4799]: I1010 16:50:36.885105 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-jrrx4" Oct 10 16:50:36 crc kubenswrapper[4799]: I1010 16:50:36.948562 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 10 16:50:36 crc kubenswrapper[4799]: W1010 16:50:36.956905 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod09ce78bf_a8a9_4868_a6b7_4a1a56910bf5.slice/crio-b9d234c8fd7125c66f7e4943e0515b47e29391722b0a393860cc6cfcc4f0988a WatchSource:0}: Error finding container b9d234c8fd7125c66f7e4943e0515b47e29391722b0a393860cc6cfcc4f0988a: Status 404 returned error can't find the container with id b9d234c8fd7125c66f7e4943e0515b47e29391722b0a393860cc6cfcc4f0988a Oct 10 16:50:36 crc kubenswrapper[4799]: I1010 16:50:36.963223 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/875e600d-b55a-48a9-a181-3ad09c24cc41-scripts\") pod \"cinder-db-sync-dt6zw\" (UID: \"875e600d-b55a-48a9-a181-3ad09c24cc41\") " pod="openstack/cinder-db-sync-dt6zw" Oct 10 16:50:36 crc kubenswrapper[4799]: I1010 16:50:36.963283 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-drvnl\" (UniqueName: \"kubernetes.io/projected/875e600d-b55a-48a9-a181-3ad09c24cc41-kube-api-access-drvnl\") pod \"cinder-db-sync-dt6zw\" (UID: \"875e600d-b55a-48a9-a181-3ad09c24cc41\") " pod="openstack/cinder-db-sync-dt6zw" Oct 10 16:50:36 crc kubenswrapper[4799]: I1010 16:50:36.963324 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/875e600d-b55a-48a9-a181-3ad09c24cc41-combined-ca-bundle\") pod \"cinder-db-sync-dt6zw\" (UID: \"875e600d-b55a-48a9-a181-3ad09c24cc41\") " pod="openstack/cinder-db-sync-dt6zw" Oct 10 16:50:36 crc kubenswrapper[4799]: I1010 16:50:36.963351 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/875e600d-b55a-48a9-a181-3ad09c24cc41-config-data\") pod \"cinder-db-sync-dt6zw\" (UID: \"875e600d-b55a-48a9-a181-3ad09c24cc41\") " pod="openstack/cinder-db-sync-dt6zw" Oct 10 16:50:36 crc kubenswrapper[4799]: I1010 16:50:36.963385 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/875e600d-b55a-48a9-a181-3ad09c24cc41-db-sync-config-data\") pod \"cinder-db-sync-dt6zw\" (UID: \"875e600d-b55a-48a9-a181-3ad09c24cc41\") " pod="openstack/cinder-db-sync-dt6zw" Oct 10 16:50:36 crc kubenswrapper[4799]: I1010 16:50:36.963424 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/875e600d-b55a-48a9-a181-3ad09c24cc41-etc-machine-id\") pod \"cinder-db-sync-dt6zw\" (UID: \"875e600d-b55a-48a9-a181-3ad09c24cc41\") " pod="openstack/cinder-db-sync-dt6zw" Oct 10 16:50:36 crc kubenswrapper[4799]: I1010 16:50:36.967568 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-5fpmb" event={"ID":"6d2833c7-9e1e-4063-93a1-54aded9b6daf","Type":"ContainerStarted","Data":"1ba620724d6ec0019ebffc23f81702b7d8bb95b7aecda8d154251b288a4a2c53"} Oct 10 16:50:36 crc kubenswrapper[4799]: I1010 16:50:36.977076 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7b587f8db7-jxsc9" event={"ID":"07d3db2f-d9ff-4ac9-bbaa-db30f610a7a6","Type":"ContainerDied","Data":"974112c8150d77187fa02da9b5d0481fb5c198221c36009668bc6fd00e58ecc3"} Oct 10 16:50:36 crc kubenswrapper[4799]: I1010 16:50:36.977129 4799 scope.go:117] "RemoveContainer" containerID="17d83bee82a2e1f670f14fc38a4b800a5bc5d1d3bcc4daf243627e79b435f4d6" Oct 10 16:50:36 crc kubenswrapper[4799]: I1010 16:50:36.977129 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7b587f8db7-jxsc9" Oct 10 16:50:36 crc kubenswrapper[4799]: I1010 16:50:36.988614 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-db-sync-5fpmb" podStartSLOduration=2.148881464 podStartE2EDuration="13.988595245s" podCreationTimestamp="2025-10-10 16:50:23 +0000 UTC" firstStartedPulling="2025-10-10 16:50:24.380977712 +0000 UTC m=+1117.889301827" lastFinishedPulling="2025-10-10 16:50:36.220691493 +0000 UTC m=+1129.729015608" observedRunningTime="2025-10-10 16:50:36.983315525 +0000 UTC m=+1130.491639670" watchObservedRunningTime="2025-10-10 16:50:36.988595245 +0000 UTC m=+1130.496919370" Oct 10 16:50:36 crc kubenswrapper[4799]: I1010 16:50:36.990286 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-mrh2w" event={"ID":"a8dccd24-a3ca-4f98-90b4-e2943cd228d3","Type":"ContainerStarted","Data":"4a7f26c0a29102620ae44684ad6b914b3b0a0afa490ea3c581dc7f606034046b"} Oct 10 16:50:36 crc kubenswrapper[4799]: I1010 16:50:36.994972 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"76e21275-9059-4dac-b883-b750007e51c3","Type":"ContainerStarted","Data":"22f9ac9066da6fa21af9dbf9463d5a228ab11ecd629d44b1aff8f3b89435f83d"} Oct 10 16:50:36 crc kubenswrapper[4799]: I1010 16:50:36.996852 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"766235b0-a5b9-4448-8dac-1afd1ca60e50","Type":"ContainerStarted","Data":"3c3489eae1a3f26fc1d5543050426a7e85875420285e1be95fa2f92f06fadd75"} Oct 10 16:50:37 crc kubenswrapper[4799]: I1010 16:50:37.015048 4799 scope.go:117] "RemoveContainer" containerID="b7b8fc584c61d1416fc6fe84452ee391b63af3b2562fbacb013a63f417d8343d" Oct 10 16:50:37 crc kubenswrapper[4799]: I1010 16:50:37.036910 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7b587f8db7-jxsc9"] Oct 10 16:50:37 crc kubenswrapper[4799]: I1010 16:50:37.045334 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-7b587f8db7-jxsc9"] Oct 10 16:50:37 crc kubenswrapper[4799]: I1010 16:50:37.049363 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-db-sync-mrh2w" podStartSLOduration=2.220995644 podStartE2EDuration="14.049340335s" podCreationTimestamp="2025-10-10 16:50:23 +0000 UTC" firstStartedPulling="2025-10-10 16:50:24.429584241 +0000 UTC m=+1117.937908356" lastFinishedPulling="2025-10-10 16:50:36.257928932 +0000 UTC m=+1129.766253047" observedRunningTime="2025-10-10 16:50:37.02607353 +0000 UTC m=+1130.534397655" watchObservedRunningTime="2025-10-10 16:50:37.049340335 +0000 UTC m=+1130.557664450" Oct 10 16:50:37 crc kubenswrapper[4799]: I1010 16:50:37.066694 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/875e600d-b55a-48a9-a181-3ad09c24cc41-combined-ca-bundle\") pod \"cinder-db-sync-dt6zw\" (UID: \"875e600d-b55a-48a9-a181-3ad09c24cc41\") " pod="openstack/cinder-db-sync-dt6zw" Oct 10 16:50:37 crc kubenswrapper[4799]: I1010 16:50:37.066740 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/875e600d-b55a-48a9-a181-3ad09c24cc41-config-data\") pod \"cinder-db-sync-dt6zw\" (UID: \"875e600d-b55a-48a9-a181-3ad09c24cc41\") " pod="openstack/cinder-db-sync-dt6zw" Oct 10 16:50:37 crc kubenswrapper[4799]: I1010 16:50:37.066802 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/875e600d-b55a-48a9-a181-3ad09c24cc41-db-sync-config-data\") pod \"cinder-db-sync-dt6zw\" (UID: \"875e600d-b55a-48a9-a181-3ad09c24cc41\") " pod="openstack/cinder-db-sync-dt6zw" Oct 10 16:50:37 crc kubenswrapper[4799]: I1010 16:50:37.066845 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/875e600d-b55a-48a9-a181-3ad09c24cc41-etc-machine-id\") pod \"cinder-db-sync-dt6zw\" (UID: \"875e600d-b55a-48a9-a181-3ad09c24cc41\") " pod="openstack/cinder-db-sync-dt6zw" Oct 10 16:50:37 crc kubenswrapper[4799]: I1010 16:50:37.066937 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/875e600d-b55a-48a9-a181-3ad09c24cc41-scripts\") pod \"cinder-db-sync-dt6zw\" (UID: \"875e600d-b55a-48a9-a181-3ad09c24cc41\") " pod="openstack/cinder-db-sync-dt6zw" Oct 10 16:50:37 crc kubenswrapper[4799]: I1010 16:50:37.066967 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-drvnl\" (UniqueName: \"kubernetes.io/projected/875e600d-b55a-48a9-a181-3ad09c24cc41-kube-api-access-drvnl\") pod \"cinder-db-sync-dt6zw\" (UID: \"875e600d-b55a-48a9-a181-3ad09c24cc41\") " pod="openstack/cinder-db-sync-dt6zw" Oct 10 16:50:37 crc kubenswrapper[4799]: I1010 16:50:37.073902 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/875e600d-b55a-48a9-a181-3ad09c24cc41-etc-machine-id\") pod \"cinder-db-sync-dt6zw\" (UID: \"875e600d-b55a-48a9-a181-3ad09c24cc41\") " pod="openstack/cinder-db-sync-dt6zw" Oct 10 16:50:37 crc kubenswrapper[4799]: I1010 16:50:37.074625 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/875e600d-b55a-48a9-a181-3ad09c24cc41-combined-ca-bundle\") pod \"cinder-db-sync-dt6zw\" (UID: \"875e600d-b55a-48a9-a181-3ad09c24cc41\") " pod="openstack/cinder-db-sync-dt6zw" Oct 10 16:50:37 crc kubenswrapper[4799]: I1010 16:50:37.077073 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/875e600d-b55a-48a9-a181-3ad09c24cc41-scripts\") pod \"cinder-db-sync-dt6zw\" (UID: \"875e600d-b55a-48a9-a181-3ad09c24cc41\") " pod="openstack/cinder-db-sync-dt6zw" Oct 10 16:50:37 crc kubenswrapper[4799]: I1010 16:50:37.077361 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/875e600d-b55a-48a9-a181-3ad09c24cc41-db-sync-config-data\") pod \"cinder-db-sync-dt6zw\" (UID: \"875e600d-b55a-48a9-a181-3ad09c24cc41\") " pod="openstack/cinder-db-sync-dt6zw" Oct 10 16:50:37 crc kubenswrapper[4799]: I1010 16:50:37.077389 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/875e600d-b55a-48a9-a181-3ad09c24cc41-config-data\") pod \"cinder-db-sync-dt6zw\" (UID: \"875e600d-b55a-48a9-a181-3ad09c24cc41\") " pod="openstack/cinder-db-sync-dt6zw" Oct 10 16:50:37 crc kubenswrapper[4799]: I1010 16:50:37.084607 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-drvnl\" (UniqueName: \"kubernetes.io/projected/875e600d-b55a-48a9-a181-3ad09c24cc41-kube-api-access-drvnl\") pod \"cinder-db-sync-dt6zw\" (UID: \"875e600d-b55a-48a9-a181-3ad09c24cc41\") " pod="openstack/cinder-db-sync-dt6zw" Oct 10 16:50:37 crc kubenswrapper[4799]: I1010 16:50:37.161729 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-db-sync-6chg9"] Oct 10 16:50:37 crc kubenswrapper[4799]: I1010 16:50:37.162920 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-6chg9" Oct 10 16:50:37 crc kubenswrapper[4799]: I1010 16:50:37.164972 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-httpd-config" Oct 10 16:50:37 crc kubenswrapper[4799]: I1010 16:50:37.165564 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-config" Oct 10 16:50:37 crc kubenswrapper[4799]: I1010 16:50:37.165833 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-neutron-dockercfg-rws6f" Oct 10 16:50:37 crc kubenswrapper[4799]: I1010 16:50:37.173212 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-sync-6chg9"] Oct 10 16:50:37 crc kubenswrapper[4799]: I1010 16:50:37.181068 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-dt6zw" Oct 10 16:50:37 crc kubenswrapper[4799]: I1010 16:50:37.270054 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-blc9f\" (UniqueName: \"kubernetes.io/projected/adc813f3-66fb-467b-9033-c78cba4a2d36-kube-api-access-blc9f\") pod \"neutron-db-sync-6chg9\" (UID: \"adc813f3-66fb-467b-9033-c78cba4a2d36\") " pod="openstack/neutron-db-sync-6chg9" Oct 10 16:50:37 crc kubenswrapper[4799]: I1010 16:50:37.270338 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/adc813f3-66fb-467b-9033-c78cba4a2d36-combined-ca-bundle\") pod \"neutron-db-sync-6chg9\" (UID: \"adc813f3-66fb-467b-9033-c78cba4a2d36\") " pod="openstack/neutron-db-sync-6chg9" Oct 10 16:50:37 crc kubenswrapper[4799]: I1010 16:50:37.270392 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/adc813f3-66fb-467b-9033-c78cba4a2d36-config\") pod \"neutron-db-sync-6chg9\" (UID: \"adc813f3-66fb-467b-9033-c78cba4a2d36\") " pod="openstack/neutron-db-sync-6chg9" Oct 10 16:50:37 crc kubenswrapper[4799]: I1010 16:50:37.372214 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/adc813f3-66fb-467b-9033-c78cba4a2d36-combined-ca-bundle\") pod \"neutron-db-sync-6chg9\" (UID: \"adc813f3-66fb-467b-9033-c78cba4a2d36\") " pod="openstack/neutron-db-sync-6chg9" Oct 10 16:50:37 crc kubenswrapper[4799]: I1010 16:50:37.372622 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/adc813f3-66fb-467b-9033-c78cba4a2d36-config\") pod \"neutron-db-sync-6chg9\" (UID: \"adc813f3-66fb-467b-9033-c78cba4a2d36\") " pod="openstack/neutron-db-sync-6chg9" Oct 10 16:50:37 crc kubenswrapper[4799]: I1010 16:50:37.372674 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-blc9f\" (UniqueName: \"kubernetes.io/projected/adc813f3-66fb-467b-9033-c78cba4a2d36-kube-api-access-blc9f\") pod \"neutron-db-sync-6chg9\" (UID: \"adc813f3-66fb-467b-9033-c78cba4a2d36\") " pod="openstack/neutron-db-sync-6chg9" Oct 10 16:50:37 crc kubenswrapper[4799]: I1010 16:50:37.392404 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/adc813f3-66fb-467b-9033-c78cba4a2d36-config\") pod \"neutron-db-sync-6chg9\" (UID: \"adc813f3-66fb-467b-9033-c78cba4a2d36\") " pod="openstack/neutron-db-sync-6chg9" Oct 10 16:50:37 crc kubenswrapper[4799]: I1010 16:50:37.402827 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/adc813f3-66fb-467b-9033-c78cba4a2d36-combined-ca-bundle\") pod \"neutron-db-sync-6chg9\" (UID: \"adc813f3-66fb-467b-9033-c78cba4a2d36\") " pod="openstack/neutron-db-sync-6chg9" Oct 10 16:50:37 crc kubenswrapper[4799]: I1010 16:50:37.403396 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-blc9f\" (UniqueName: \"kubernetes.io/projected/adc813f3-66fb-467b-9033-c78cba4a2d36-kube-api-access-blc9f\") pod \"neutron-db-sync-6chg9\" (UID: \"adc813f3-66fb-467b-9033-c78cba4a2d36\") " pod="openstack/neutron-db-sync-6chg9" Oct 10 16:50:37 crc kubenswrapper[4799]: I1010 16:50:37.458187 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="07d3db2f-d9ff-4ac9-bbaa-db30f610a7a6" path="/var/lib/kubelet/pods/07d3db2f-d9ff-4ac9-bbaa-db30f610a7a6/volumes" Oct 10 16:50:37 crc kubenswrapper[4799]: I1010 16:50:37.459208 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="454f77a5-dadb-4038-8ec3-c3a1bd0c2c22" path="/var/lib/kubelet/pods/454f77a5-dadb-4038-8ec3-c3a1bd0c2c22/volumes" Oct 10 16:50:37 crc kubenswrapper[4799]: I1010 16:50:37.459893 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-jrrx4"] Oct 10 16:50:37 crc kubenswrapper[4799]: I1010 16:50:37.486204 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-6chg9" Oct 10 16:50:37 crc kubenswrapper[4799]: I1010 16:50:37.676665 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-sync-dt6zw"] Oct 10 16:50:38 crc kubenswrapper[4799]: I1010 16:50:38.012310 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-sync-6chg9"] Oct 10 16:50:38 crc kubenswrapper[4799]: I1010 16:50:38.020719 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-dt6zw" event={"ID":"875e600d-b55a-48a9-a181-3ad09c24cc41","Type":"ContainerStarted","Data":"987eb13b64ca8dfe18e15a7b6b5998d89824a9af535c6285d2030faa0864e5e4"} Oct 10 16:50:38 crc kubenswrapper[4799]: I1010 16:50:38.025179 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"76e21275-9059-4dac-b883-b750007e51c3","Type":"ContainerStarted","Data":"ab959fe8cf0f34f2a3f4cdc60781adfe5fd472a8ed0b2b36ce9331f59946ba31"} Oct 10 16:50:38 crc kubenswrapper[4799]: I1010 16:50:38.027171 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"09ce78bf-a8a9-4868-a6b7-4a1a56910bf5","Type":"ContainerStarted","Data":"1a424dea1ad69be4e046a80592dc9fc3648a860a959c3a0c3501291770b35964"} Oct 10 16:50:38 crc kubenswrapper[4799]: I1010 16:50:38.027191 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"09ce78bf-a8a9-4868-a6b7-4a1a56910bf5","Type":"ContainerStarted","Data":"b9d234c8fd7125c66f7e4943e0515b47e29391722b0a393860cc6cfcc4f0988a"} Oct 10 16:50:38 crc kubenswrapper[4799]: I1010 16:50:38.035361 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-jrrx4" event={"ID":"fd273e06-ed83-42c6-aa3e-3ed6eda94c1d","Type":"ContainerStarted","Data":"9130b5b2922996b3ae5e11a6833e22b47fbd0cb063fb0cb83a0b3202990aa273"} Oct 10 16:50:38 crc kubenswrapper[4799]: I1010 16:50:38.035398 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-jrrx4" event={"ID":"fd273e06-ed83-42c6-aa3e-3ed6eda94c1d","Type":"ContainerStarted","Data":"6f90e352fb6a282054a8d3e031ce5dbdb95e500e7057110d4d05ea35e91ab9ab"} Oct 10 16:50:38 crc kubenswrapper[4799]: W1010 16:50:38.731799 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podadc813f3_66fb_467b_9033_c78cba4a2d36.slice/crio-61b9629f2498104607d9924e6c29662b22336561e08e649a96e9586c3b559e3f WatchSource:0}: Error finding container 61b9629f2498104607d9924e6c29662b22336561e08e649a96e9586c3b559e3f: Status 404 returned error can't find the container with id 61b9629f2498104607d9924e6c29662b22336561e08e649a96e9586c3b559e3f Oct 10 16:50:39 crc kubenswrapper[4799]: I1010 16:50:39.042610 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"76e21275-9059-4dac-b883-b750007e51c3","Type":"ContainerStarted","Data":"e82e8f901a59cc866346c95d76d43023571438e2d8b6fff6e746e5d6b112b777"} Oct 10 16:50:39 crc kubenswrapper[4799]: I1010 16:50:39.042665 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="76e21275-9059-4dac-b883-b750007e51c3" containerName="glance-log" containerID="cri-o://ab959fe8cf0f34f2a3f4cdc60781adfe5fd472a8ed0b2b36ce9331f59946ba31" gracePeriod=30 Oct 10 16:50:39 crc kubenswrapper[4799]: I1010 16:50:39.043121 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="76e21275-9059-4dac-b883-b750007e51c3" containerName="glance-httpd" containerID="cri-o://e82e8f901a59cc866346c95d76d43023571438e2d8b6fff6e746e5d6b112b777" gracePeriod=30 Oct 10 16:50:39 crc kubenswrapper[4799]: I1010 16:50:39.047445 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"09ce78bf-a8a9-4868-a6b7-4a1a56910bf5","Type":"ContainerStarted","Data":"25fea352a384018c46866f9a1da31cdf8c130becd13452d2aa596c764817aebf"} Oct 10 16:50:39 crc kubenswrapper[4799]: I1010 16:50:39.047679 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="09ce78bf-a8a9-4868-a6b7-4a1a56910bf5" containerName="glance-log" containerID="cri-o://1a424dea1ad69be4e046a80592dc9fc3648a860a959c3a0c3501291770b35964" gracePeriod=30 Oct 10 16:50:39 crc kubenswrapper[4799]: I1010 16:50:39.047785 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="09ce78bf-a8a9-4868-a6b7-4a1a56910bf5" containerName="glance-httpd" containerID="cri-o://25fea352a384018c46866f9a1da31cdf8c130becd13452d2aa596c764817aebf" gracePeriod=30 Oct 10 16:50:39 crc kubenswrapper[4799]: I1010 16:50:39.050146 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-6chg9" event={"ID":"adc813f3-66fb-467b-9033-c78cba4a2d36","Type":"ContainerStarted","Data":"61b9629f2498104607d9924e6c29662b22336561e08e649a96e9586c3b559e3f"} Oct 10 16:50:39 crc kubenswrapper[4799]: I1010 16:50:39.065173 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-bootstrap-jrrx4" podStartSLOduration=3.065155174 podStartE2EDuration="3.065155174s" podCreationTimestamp="2025-10-10 16:50:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-10 16:50:38.056694386 +0000 UTC m=+1131.565018501" watchObservedRunningTime="2025-10-10 16:50:39.065155174 +0000 UTC m=+1132.573479289" Oct 10 16:50:39 crc kubenswrapper[4799]: I1010 16:50:39.072220 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=8.072200398 podStartE2EDuration="8.072200398s" podCreationTimestamp="2025-10-10 16:50:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-10 16:50:39.063630177 +0000 UTC m=+1132.571954302" watchObservedRunningTime="2025-10-10 16:50:39.072200398 +0000 UTC m=+1132.580524513" Oct 10 16:50:39 crc kubenswrapper[4799]: I1010 16:50:39.089441 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=11.089419203 podStartE2EDuration="11.089419203s" podCreationTimestamp="2025-10-10 16:50:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-10 16:50:39.081890627 +0000 UTC m=+1132.590214742" watchObservedRunningTime="2025-10-10 16:50:39.089419203 +0000 UTC m=+1132.597743318" Oct 10 16:50:40 crc kubenswrapper[4799]: I1010 16:50:40.070706 4799 generic.go:334] "Generic (PLEG): container finished" podID="09ce78bf-a8a9-4868-a6b7-4a1a56910bf5" containerID="25fea352a384018c46866f9a1da31cdf8c130becd13452d2aa596c764817aebf" exitCode=0 Oct 10 16:50:40 crc kubenswrapper[4799]: I1010 16:50:40.070804 4799 generic.go:334] "Generic (PLEG): container finished" podID="09ce78bf-a8a9-4868-a6b7-4a1a56910bf5" containerID="1a424dea1ad69be4e046a80592dc9fc3648a860a959c3a0c3501291770b35964" exitCode=143 Oct 10 16:50:40 crc kubenswrapper[4799]: I1010 16:50:40.070817 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"09ce78bf-a8a9-4868-a6b7-4a1a56910bf5","Type":"ContainerDied","Data":"25fea352a384018c46866f9a1da31cdf8c130becd13452d2aa596c764817aebf"} Oct 10 16:50:40 crc kubenswrapper[4799]: I1010 16:50:40.070898 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"09ce78bf-a8a9-4868-a6b7-4a1a56910bf5","Type":"ContainerDied","Data":"1a424dea1ad69be4e046a80592dc9fc3648a860a959c3a0c3501291770b35964"} Oct 10 16:50:40 crc kubenswrapper[4799]: I1010 16:50:40.073956 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-6chg9" event={"ID":"adc813f3-66fb-467b-9033-c78cba4a2d36","Type":"ContainerStarted","Data":"7521cb801e0a8accc208d2be60734cccb2b3fe5514d504e26c34084683264aee"} Oct 10 16:50:40 crc kubenswrapper[4799]: I1010 16:50:40.080686 4799 generic.go:334] "Generic (PLEG): container finished" podID="76e21275-9059-4dac-b883-b750007e51c3" containerID="e82e8f901a59cc866346c95d76d43023571438e2d8b6fff6e746e5d6b112b777" exitCode=0 Oct 10 16:50:40 crc kubenswrapper[4799]: I1010 16:50:40.080717 4799 generic.go:334] "Generic (PLEG): container finished" podID="76e21275-9059-4dac-b883-b750007e51c3" containerID="ab959fe8cf0f34f2a3f4cdc60781adfe5fd472a8ed0b2b36ce9331f59946ba31" exitCode=143 Oct 10 16:50:40 crc kubenswrapper[4799]: I1010 16:50:40.080749 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"76e21275-9059-4dac-b883-b750007e51c3","Type":"ContainerDied","Data":"e82e8f901a59cc866346c95d76d43023571438e2d8b6fff6e746e5d6b112b777"} Oct 10 16:50:40 crc kubenswrapper[4799]: I1010 16:50:40.080818 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"76e21275-9059-4dac-b883-b750007e51c3","Type":"ContainerDied","Data":"ab959fe8cf0f34f2a3f4cdc60781adfe5fd472a8ed0b2b36ce9331f59946ba31"} Oct 10 16:50:40 crc kubenswrapper[4799]: I1010 16:50:40.094640 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-db-sync-6chg9" podStartSLOduration=3.094616401 podStartE2EDuration="3.094616401s" podCreationTimestamp="2025-10-10 16:50:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-10 16:50:40.088522141 +0000 UTC m=+1133.596846266" watchObservedRunningTime="2025-10-10 16:50:40.094616401 +0000 UTC m=+1133.602940516" Oct 10 16:50:40 crc kubenswrapper[4799]: I1010 16:50:40.304135 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Oct 10 16:50:40 crc kubenswrapper[4799]: I1010 16:50:40.438398 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"76e21275-9059-4dac-b883-b750007e51c3\" (UID: \"76e21275-9059-4dac-b883-b750007e51c3\") " Oct 10 16:50:40 crc kubenswrapper[4799]: I1010 16:50:40.440208 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/76e21275-9059-4dac-b883-b750007e51c3-logs" (OuterVolumeSpecName: "logs") pod "76e21275-9059-4dac-b883-b750007e51c3" (UID: "76e21275-9059-4dac-b883-b750007e51c3"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 10 16:50:40 crc kubenswrapper[4799]: I1010 16:50:40.440292 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/76e21275-9059-4dac-b883-b750007e51c3-logs\") pod \"76e21275-9059-4dac-b883-b750007e51c3\" (UID: \"76e21275-9059-4dac-b883-b750007e51c3\") " Oct 10 16:50:40 crc kubenswrapper[4799]: I1010 16:50:40.441561 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/76e21275-9059-4dac-b883-b750007e51c3-scripts\") pod \"76e21275-9059-4dac-b883-b750007e51c3\" (UID: \"76e21275-9059-4dac-b883-b750007e51c3\") " Oct 10 16:50:40 crc kubenswrapper[4799]: I1010 16:50:40.441650 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/76e21275-9059-4dac-b883-b750007e51c3-combined-ca-bundle\") pod \"76e21275-9059-4dac-b883-b750007e51c3\" (UID: \"76e21275-9059-4dac-b883-b750007e51c3\") " Oct 10 16:50:40 crc kubenswrapper[4799]: I1010 16:50:40.441697 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/76e21275-9059-4dac-b883-b750007e51c3-config-data\") pod \"76e21275-9059-4dac-b883-b750007e51c3\" (UID: \"76e21275-9059-4dac-b883-b750007e51c3\") " Oct 10 16:50:40 crc kubenswrapper[4799]: I1010 16:50:40.441799 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lr2pz\" (UniqueName: \"kubernetes.io/projected/76e21275-9059-4dac-b883-b750007e51c3-kube-api-access-lr2pz\") pod \"76e21275-9059-4dac-b883-b750007e51c3\" (UID: \"76e21275-9059-4dac-b883-b750007e51c3\") " Oct 10 16:50:40 crc kubenswrapper[4799]: I1010 16:50:40.441899 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/76e21275-9059-4dac-b883-b750007e51c3-httpd-run\") pod \"76e21275-9059-4dac-b883-b750007e51c3\" (UID: \"76e21275-9059-4dac-b883-b750007e51c3\") " Oct 10 16:50:40 crc kubenswrapper[4799]: I1010 16:50:40.444669 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/76e21275-9059-4dac-b883-b750007e51c3-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "76e21275-9059-4dac-b883-b750007e51c3" (UID: "76e21275-9059-4dac-b883-b750007e51c3"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 10 16:50:40 crc kubenswrapper[4799]: I1010 16:50:40.445329 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage12-crc" (OuterVolumeSpecName: "glance") pod "76e21275-9059-4dac-b883-b750007e51c3" (UID: "76e21275-9059-4dac-b883-b750007e51c3"). InnerVolumeSpecName "local-storage12-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Oct 10 16:50:40 crc kubenswrapper[4799]: I1010 16:50:40.446502 4799 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") on node \"crc\" " Oct 10 16:50:40 crc kubenswrapper[4799]: I1010 16:50:40.446522 4799 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/76e21275-9059-4dac-b883-b750007e51c3-logs\") on node \"crc\" DevicePath \"\"" Oct 10 16:50:40 crc kubenswrapper[4799]: I1010 16:50:40.448913 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/76e21275-9059-4dac-b883-b750007e51c3-kube-api-access-lr2pz" (OuterVolumeSpecName: "kube-api-access-lr2pz") pod "76e21275-9059-4dac-b883-b750007e51c3" (UID: "76e21275-9059-4dac-b883-b750007e51c3"). InnerVolumeSpecName "kube-api-access-lr2pz". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 16:50:40 crc kubenswrapper[4799]: I1010 16:50:40.454161 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/76e21275-9059-4dac-b883-b750007e51c3-scripts" (OuterVolumeSpecName: "scripts") pod "76e21275-9059-4dac-b883-b750007e51c3" (UID: "76e21275-9059-4dac-b883-b750007e51c3"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:50:40 crc kubenswrapper[4799]: I1010 16:50:40.471639 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/76e21275-9059-4dac-b883-b750007e51c3-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "76e21275-9059-4dac-b883-b750007e51c3" (UID: "76e21275-9059-4dac-b883-b750007e51c3"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:50:40 crc kubenswrapper[4799]: I1010 16:50:40.481003 4799 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage12-crc" (UniqueName: "kubernetes.io/local-volume/local-storage12-crc") on node "crc" Oct 10 16:50:40 crc kubenswrapper[4799]: I1010 16:50:40.492506 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Oct 10 16:50:40 crc kubenswrapper[4799]: I1010 16:50:40.493964 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/76e21275-9059-4dac-b883-b750007e51c3-config-data" (OuterVolumeSpecName: "config-data") pod "76e21275-9059-4dac-b883-b750007e51c3" (UID: "76e21275-9059-4dac-b883-b750007e51c3"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:50:40 crc kubenswrapper[4799]: I1010 16:50:40.550018 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lr2pz\" (UniqueName: \"kubernetes.io/projected/76e21275-9059-4dac-b883-b750007e51c3-kube-api-access-lr2pz\") on node \"crc\" DevicePath \"\"" Oct 10 16:50:40 crc kubenswrapper[4799]: I1010 16:50:40.550566 4799 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/76e21275-9059-4dac-b883-b750007e51c3-httpd-run\") on node \"crc\" DevicePath \"\"" Oct 10 16:50:40 crc kubenswrapper[4799]: I1010 16:50:40.550821 4799 reconciler_common.go:293] "Volume detached for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") on node \"crc\" DevicePath \"\"" Oct 10 16:50:40 crc kubenswrapper[4799]: I1010 16:50:40.550895 4799 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/76e21275-9059-4dac-b883-b750007e51c3-scripts\") on node \"crc\" DevicePath \"\"" Oct 10 16:50:40 crc kubenswrapper[4799]: I1010 16:50:40.550973 4799 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/76e21275-9059-4dac-b883-b750007e51c3-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 10 16:50:40 crc kubenswrapper[4799]: I1010 16:50:40.551120 4799 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/76e21275-9059-4dac-b883-b750007e51c3-config-data\") on node \"crc\" DevicePath \"\"" Oct 10 16:50:40 crc kubenswrapper[4799]: I1010 16:50:40.652565 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/09ce78bf-a8a9-4868-a6b7-4a1a56910bf5-httpd-run\") pod \"09ce78bf-a8a9-4868-a6b7-4a1a56910bf5\" (UID: \"09ce78bf-a8a9-4868-a6b7-4a1a56910bf5\") " Oct 10 16:50:40 crc kubenswrapper[4799]: I1010 16:50:40.652822 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/09ce78bf-a8a9-4868-a6b7-4a1a56910bf5-scripts\") pod \"09ce78bf-a8a9-4868-a6b7-4a1a56910bf5\" (UID: \"09ce78bf-a8a9-4868-a6b7-4a1a56910bf5\") " Oct 10 16:50:40 crc kubenswrapper[4799]: I1010 16:50:40.652926 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/09ce78bf-a8a9-4868-a6b7-4a1a56910bf5-combined-ca-bundle\") pod \"09ce78bf-a8a9-4868-a6b7-4a1a56910bf5\" (UID: \"09ce78bf-a8a9-4868-a6b7-4a1a56910bf5\") " Oct 10 16:50:40 crc kubenswrapper[4799]: I1010 16:50:40.653113 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-phqhm\" (UniqueName: \"kubernetes.io/projected/09ce78bf-a8a9-4868-a6b7-4a1a56910bf5-kube-api-access-phqhm\") pod \"09ce78bf-a8a9-4868-a6b7-4a1a56910bf5\" (UID: \"09ce78bf-a8a9-4868-a6b7-4a1a56910bf5\") " Oct 10 16:50:40 crc kubenswrapper[4799]: I1010 16:50:40.653200 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/09ce78bf-a8a9-4868-a6b7-4a1a56910bf5-logs\") pod \"09ce78bf-a8a9-4868-a6b7-4a1a56910bf5\" (UID: \"09ce78bf-a8a9-4868-a6b7-4a1a56910bf5\") " Oct 10 16:50:40 crc kubenswrapper[4799]: I1010 16:50:40.653266 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/09ce78bf-a8a9-4868-a6b7-4a1a56910bf5-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "09ce78bf-a8a9-4868-a6b7-4a1a56910bf5" (UID: "09ce78bf-a8a9-4868-a6b7-4a1a56910bf5"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 10 16:50:40 crc kubenswrapper[4799]: I1010 16:50:40.653634 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/09ce78bf-a8a9-4868-a6b7-4a1a56910bf5-logs" (OuterVolumeSpecName: "logs") pod "09ce78bf-a8a9-4868-a6b7-4a1a56910bf5" (UID: "09ce78bf-a8a9-4868-a6b7-4a1a56910bf5"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 10 16:50:40 crc kubenswrapper[4799]: I1010 16:50:40.653861 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/09ce78bf-a8a9-4868-a6b7-4a1a56910bf5-config-data\") pod \"09ce78bf-a8a9-4868-a6b7-4a1a56910bf5\" (UID: \"09ce78bf-a8a9-4868-a6b7-4a1a56910bf5\") " Oct 10 16:50:40 crc kubenswrapper[4799]: I1010 16:50:40.653935 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"09ce78bf-a8a9-4868-a6b7-4a1a56910bf5\" (UID: \"09ce78bf-a8a9-4868-a6b7-4a1a56910bf5\") " Oct 10 16:50:40 crc kubenswrapper[4799]: I1010 16:50:40.654330 4799 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/09ce78bf-a8a9-4868-a6b7-4a1a56910bf5-httpd-run\") on node \"crc\" DevicePath \"\"" Oct 10 16:50:40 crc kubenswrapper[4799]: I1010 16:50:40.654391 4799 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/09ce78bf-a8a9-4868-a6b7-4a1a56910bf5-logs\") on node \"crc\" DevicePath \"\"" Oct 10 16:50:40 crc kubenswrapper[4799]: I1010 16:50:40.657659 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage11-crc" (OuterVolumeSpecName: "glance") pod "09ce78bf-a8a9-4868-a6b7-4a1a56910bf5" (UID: "09ce78bf-a8a9-4868-a6b7-4a1a56910bf5"). InnerVolumeSpecName "local-storage11-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Oct 10 16:50:40 crc kubenswrapper[4799]: I1010 16:50:40.658288 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09ce78bf-a8a9-4868-a6b7-4a1a56910bf5-kube-api-access-phqhm" (OuterVolumeSpecName: "kube-api-access-phqhm") pod "09ce78bf-a8a9-4868-a6b7-4a1a56910bf5" (UID: "09ce78bf-a8a9-4868-a6b7-4a1a56910bf5"). InnerVolumeSpecName "kube-api-access-phqhm". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 16:50:40 crc kubenswrapper[4799]: I1010 16:50:40.660358 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ce78bf-a8a9-4868-a6b7-4a1a56910bf5-scripts" (OuterVolumeSpecName: "scripts") pod "09ce78bf-a8a9-4868-a6b7-4a1a56910bf5" (UID: "09ce78bf-a8a9-4868-a6b7-4a1a56910bf5"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:50:40 crc kubenswrapper[4799]: I1010 16:50:40.706544 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ce78bf-a8a9-4868-a6b7-4a1a56910bf5-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "09ce78bf-a8a9-4868-a6b7-4a1a56910bf5" (UID: "09ce78bf-a8a9-4868-a6b7-4a1a56910bf5"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:50:40 crc kubenswrapper[4799]: I1010 16:50:40.711208 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ce78bf-a8a9-4868-a6b7-4a1a56910bf5-config-data" (OuterVolumeSpecName: "config-data") pod "09ce78bf-a8a9-4868-a6b7-4a1a56910bf5" (UID: "09ce78bf-a8a9-4868-a6b7-4a1a56910bf5"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:50:40 crc kubenswrapper[4799]: I1010 16:50:40.756781 4799 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/09ce78bf-a8a9-4868-a6b7-4a1a56910bf5-config-data\") on node \"crc\" DevicePath \"\"" Oct 10 16:50:40 crc kubenswrapper[4799]: I1010 16:50:40.756843 4799 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") on node \"crc\" " Oct 10 16:50:40 crc kubenswrapper[4799]: I1010 16:50:40.756854 4799 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/09ce78bf-a8a9-4868-a6b7-4a1a56910bf5-scripts\") on node \"crc\" DevicePath \"\"" Oct 10 16:50:40 crc kubenswrapper[4799]: I1010 16:50:40.756865 4799 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/09ce78bf-a8a9-4868-a6b7-4a1a56910bf5-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 10 16:50:40 crc kubenswrapper[4799]: I1010 16:50:40.756878 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-phqhm\" (UniqueName: \"kubernetes.io/projected/09ce78bf-a8a9-4868-a6b7-4a1a56910bf5-kube-api-access-phqhm\") on node \"crc\" DevicePath \"\"" Oct 10 16:50:40 crc kubenswrapper[4799]: I1010 16:50:40.785212 4799 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage11-crc" (UniqueName: "kubernetes.io/local-volume/local-storage11-crc") on node "crc" Oct 10 16:50:40 crc kubenswrapper[4799]: I1010 16:50:40.859282 4799 reconciler_common.go:293] "Volume detached for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") on node \"crc\" DevicePath \"\"" Oct 10 16:50:41 crc kubenswrapper[4799]: I1010 16:50:41.100100 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"76e21275-9059-4dac-b883-b750007e51c3","Type":"ContainerDied","Data":"22f9ac9066da6fa21af9dbf9463d5a228ab11ecd629d44b1aff8f3b89435f83d"} Oct 10 16:50:41 crc kubenswrapper[4799]: I1010 16:50:41.100151 4799 scope.go:117] "RemoveContainer" containerID="e82e8f901a59cc866346c95d76d43023571438e2d8b6fff6e746e5d6b112b777" Oct 10 16:50:41 crc kubenswrapper[4799]: I1010 16:50:41.100192 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Oct 10 16:50:41 crc kubenswrapper[4799]: I1010 16:50:41.103545 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"766235b0-a5b9-4448-8dac-1afd1ca60e50","Type":"ContainerStarted","Data":"16f7a682fd238e46067b36808612cbce27d75cdbcd6d153529e759fa91f9d41c"} Oct 10 16:50:41 crc kubenswrapper[4799]: I1010 16:50:41.111057 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"09ce78bf-a8a9-4868-a6b7-4a1a56910bf5","Type":"ContainerDied","Data":"b9d234c8fd7125c66f7e4943e0515b47e29391722b0a393860cc6cfcc4f0988a"} Oct 10 16:50:41 crc kubenswrapper[4799]: I1010 16:50:41.111076 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Oct 10 16:50:41 crc kubenswrapper[4799]: I1010 16:50:41.148259 4799 scope.go:117] "RemoveContainer" containerID="ab959fe8cf0f34f2a3f4cdc60781adfe5fd472a8ed0b2b36ce9331f59946ba31" Oct 10 16:50:41 crc kubenswrapper[4799]: I1010 16:50:41.154317 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 10 16:50:41 crc kubenswrapper[4799]: I1010 16:50:41.173802 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 10 16:50:41 crc kubenswrapper[4799]: I1010 16:50:41.197436 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 10 16:50:41 crc kubenswrapper[4799]: I1010 16:50:41.230895 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 10 16:50:41 crc kubenswrapper[4799]: I1010 16:50:41.241341 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 10 16:50:41 crc kubenswrapper[4799]: I1010 16:50:41.245932 4799 scope.go:117] "RemoveContainer" containerID="25fea352a384018c46866f9a1da31cdf8c130becd13452d2aa596c764817aebf" Oct 10 16:50:41 crc kubenswrapper[4799]: E1010 16:50:41.252067 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="76e21275-9059-4dac-b883-b750007e51c3" containerName="glance-log" Oct 10 16:50:41 crc kubenswrapper[4799]: I1010 16:50:41.252104 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="76e21275-9059-4dac-b883-b750007e51c3" containerName="glance-log" Oct 10 16:50:41 crc kubenswrapper[4799]: E1010 16:50:41.252123 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="76e21275-9059-4dac-b883-b750007e51c3" containerName="glance-httpd" Oct 10 16:50:41 crc kubenswrapper[4799]: I1010 16:50:41.252131 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="76e21275-9059-4dac-b883-b750007e51c3" containerName="glance-httpd" Oct 10 16:50:41 crc kubenswrapper[4799]: E1010 16:50:41.252156 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="09ce78bf-a8a9-4868-a6b7-4a1a56910bf5" containerName="glance-httpd" Oct 10 16:50:41 crc kubenswrapper[4799]: I1010 16:50:41.252163 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="09ce78bf-a8a9-4868-a6b7-4a1a56910bf5" containerName="glance-httpd" Oct 10 16:50:41 crc kubenswrapper[4799]: E1010 16:50:41.252179 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="09ce78bf-a8a9-4868-a6b7-4a1a56910bf5" containerName="glance-log" Oct 10 16:50:41 crc kubenswrapper[4799]: I1010 16:50:41.252189 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="09ce78bf-a8a9-4868-a6b7-4a1a56910bf5" containerName="glance-log" Oct 10 16:50:41 crc kubenswrapper[4799]: I1010 16:50:41.252423 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="09ce78bf-a8a9-4868-a6b7-4a1a56910bf5" containerName="glance-httpd" Oct 10 16:50:41 crc kubenswrapper[4799]: I1010 16:50:41.252440 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="76e21275-9059-4dac-b883-b750007e51c3" containerName="glance-log" Oct 10 16:50:41 crc kubenswrapper[4799]: I1010 16:50:41.252447 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="76e21275-9059-4dac-b883-b750007e51c3" containerName="glance-httpd" Oct 10 16:50:41 crc kubenswrapper[4799]: I1010 16:50:41.252461 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="09ce78bf-a8a9-4868-a6b7-4a1a56910bf5" containerName="glance-log" Oct 10 16:50:41 crc kubenswrapper[4799]: I1010 16:50:41.253374 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Oct 10 16:50:41 crc kubenswrapper[4799]: I1010 16:50:41.256244 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-l4znb" Oct 10 16:50:41 crc kubenswrapper[4799]: I1010 16:50:41.256366 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Oct 10 16:50:41 crc kubenswrapper[4799]: I1010 16:50:41.256458 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-scripts" Oct 10 16:50:41 crc kubenswrapper[4799]: I1010 16:50:41.256614 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-internal-svc" Oct 10 16:50:41 crc kubenswrapper[4799]: I1010 16:50:41.268468 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Oct 10 16:50:41 crc kubenswrapper[4799]: I1010 16:50:41.272167 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Oct 10 16:50:41 crc kubenswrapper[4799]: I1010 16:50:41.279059 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Oct 10 16:50:41 crc kubenswrapper[4799]: I1010 16:50:41.279106 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-public-svc" Oct 10 16:50:41 crc kubenswrapper[4799]: I1010 16:50:41.280088 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 10 16:50:41 crc kubenswrapper[4799]: I1010 16:50:41.288949 4799 scope.go:117] "RemoveContainer" containerID="1a424dea1ad69be4e046a80592dc9fc3648a860a959c3a0c3501291770b35964" Oct 10 16:50:41 crc kubenswrapper[4799]: I1010 16:50:41.312626 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 10 16:50:41 crc kubenswrapper[4799]: I1010 16:50:41.387429 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"glance-default-internal-api-0\" (UID: \"b7f1ee63-3947-40d6-ac14-8a1cf5e6f7f0\") " pod="openstack/glance-default-internal-api-0" Oct 10 16:50:41 crc kubenswrapper[4799]: I1010 16:50:41.388030 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b7f1ee63-3947-40d6-ac14-8a1cf5e6f7f0-config-data\") pod \"glance-default-internal-api-0\" (UID: \"b7f1ee63-3947-40d6-ac14-8a1cf5e6f7f0\") " pod="openstack/glance-default-internal-api-0" Oct 10 16:50:41 crc kubenswrapper[4799]: I1010 16:50:41.388132 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/b7f1ee63-3947-40d6-ac14-8a1cf5e6f7f0-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"b7f1ee63-3947-40d6-ac14-8a1cf5e6f7f0\") " pod="openstack/glance-default-internal-api-0" Oct 10 16:50:41 crc kubenswrapper[4799]: I1010 16:50:41.388241 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dl6pq\" (UniqueName: \"kubernetes.io/projected/b7f1ee63-3947-40d6-ac14-8a1cf5e6f7f0-kube-api-access-dl6pq\") pod \"glance-default-internal-api-0\" (UID: \"b7f1ee63-3947-40d6-ac14-8a1cf5e6f7f0\") " pod="openstack/glance-default-internal-api-0" Oct 10 16:50:41 crc kubenswrapper[4799]: I1010 16:50:41.388358 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ef55ae74-4435-4042-9567-4d07d41b3ce0-logs\") pod \"glance-default-external-api-0\" (UID: \"ef55ae74-4435-4042-9567-4d07d41b3ce0\") " pod="openstack/glance-default-external-api-0" Oct 10 16:50:41 crc kubenswrapper[4799]: I1010 16:50:41.388476 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ef55ae74-4435-4042-9567-4d07d41b3ce0-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"ef55ae74-4435-4042-9567-4d07d41b3ce0\") " pod="openstack/glance-default-external-api-0" Oct 10 16:50:41 crc kubenswrapper[4799]: I1010 16:50:41.388835 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/ef55ae74-4435-4042-9567-4d07d41b3ce0-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"ef55ae74-4435-4042-9567-4d07d41b3ce0\") " pod="openstack/glance-default-external-api-0" Oct 10 16:50:41 crc kubenswrapper[4799]: I1010 16:50:41.389078 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b7f1ee63-3947-40d6-ac14-8a1cf5e6f7f0-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"b7f1ee63-3947-40d6-ac14-8a1cf5e6f7f0\") " pod="openstack/glance-default-internal-api-0" Oct 10 16:50:41 crc kubenswrapper[4799]: I1010 16:50:41.389198 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/ef55ae74-4435-4042-9567-4d07d41b3ce0-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"ef55ae74-4435-4042-9567-4d07d41b3ce0\") " pod="openstack/glance-default-external-api-0" Oct 10 16:50:41 crc kubenswrapper[4799]: I1010 16:50:41.389312 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b7f1ee63-3947-40d6-ac14-8a1cf5e6f7f0-scripts\") pod \"glance-default-internal-api-0\" (UID: \"b7f1ee63-3947-40d6-ac14-8a1cf5e6f7f0\") " pod="openstack/glance-default-internal-api-0" Oct 10 16:50:41 crc kubenswrapper[4799]: I1010 16:50:41.389411 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b7f1ee63-3947-40d6-ac14-8a1cf5e6f7f0-logs\") pod \"glance-default-internal-api-0\" (UID: \"b7f1ee63-3947-40d6-ac14-8a1cf5e6f7f0\") " pod="openstack/glance-default-internal-api-0" Oct 10 16:50:41 crc kubenswrapper[4799]: I1010 16:50:41.389517 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ef55ae74-4435-4042-9567-4d07d41b3ce0-config-data\") pod \"glance-default-external-api-0\" (UID: \"ef55ae74-4435-4042-9567-4d07d41b3ce0\") " pod="openstack/glance-default-external-api-0" Oct 10 16:50:41 crc kubenswrapper[4799]: I1010 16:50:41.389633 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-swgwv\" (UniqueName: \"kubernetes.io/projected/ef55ae74-4435-4042-9567-4d07d41b3ce0-kube-api-access-swgwv\") pod \"glance-default-external-api-0\" (UID: \"ef55ae74-4435-4042-9567-4d07d41b3ce0\") " pod="openstack/glance-default-external-api-0" Oct 10 16:50:41 crc kubenswrapper[4799]: I1010 16:50:41.389798 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/b7f1ee63-3947-40d6-ac14-8a1cf5e6f7f0-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"b7f1ee63-3947-40d6-ac14-8a1cf5e6f7f0\") " pod="openstack/glance-default-internal-api-0" Oct 10 16:50:41 crc kubenswrapper[4799]: I1010 16:50:41.389916 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"glance-default-external-api-0\" (UID: \"ef55ae74-4435-4042-9567-4d07d41b3ce0\") " pod="openstack/glance-default-external-api-0" Oct 10 16:50:41 crc kubenswrapper[4799]: I1010 16:50:41.390045 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ef55ae74-4435-4042-9567-4d07d41b3ce0-scripts\") pod \"glance-default-external-api-0\" (UID: \"ef55ae74-4435-4042-9567-4d07d41b3ce0\") " pod="openstack/glance-default-external-api-0" Oct 10 16:50:41 crc kubenswrapper[4799]: I1010 16:50:41.424123 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09ce78bf-a8a9-4868-a6b7-4a1a56910bf5" path="/var/lib/kubelet/pods/09ce78bf-a8a9-4868-a6b7-4a1a56910bf5/volumes" Oct 10 16:50:41 crc kubenswrapper[4799]: I1010 16:50:41.425014 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="76e21275-9059-4dac-b883-b750007e51c3" path="/var/lib/kubelet/pods/76e21275-9059-4dac-b883-b750007e51c3/volumes" Oct 10 16:50:41 crc kubenswrapper[4799]: I1010 16:50:41.491840 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ef55ae74-4435-4042-9567-4d07d41b3ce0-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"ef55ae74-4435-4042-9567-4d07d41b3ce0\") " pod="openstack/glance-default-external-api-0" Oct 10 16:50:41 crc kubenswrapper[4799]: I1010 16:50:41.491900 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/ef55ae74-4435-4042-9567-4d07d41b3ce0-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"ef55ae74-4435-4042-9567-4d07d41b3ce0\") " pod="openstack/glance-default-external-api-0" Oct 10 16:50:41 crc kubenswrapper[4799]: I1010 16:50:41.491965 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b7f1ee63-3947-40d6-ac14-8a1cf5e6f7f0-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"b7f1ee63-3947-40d6-ac14-8a1cf5e6f7f0\") " pod="openstack/glance-default-internal-api-0" Oct 10 16:50:41 crc kubenswrapper[4799]: I1010 16:50:41.491990 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/ef55ae74-4435-4042-9567-4d07d41b3ce0-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"ef55ae74-4435-4042-9567-4d07d41b3ce0\") " pod="openstack/glance-default-external-api-0" Oct 10 16:50:41 crc kubenswrapper[4799]: I1010 16:50:41.492028 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b7f1ee63-3947-40d6-ac14-8a1cf5e6f7f0-scripts\") pod \"glance-default-internal-api-0\" (UID: \"b7f1ee63-3947-40d6-ac14-8a1cf5e6f7f0\") " pod="openstack/glance-default-internal-api-0" Oct 10 16:50:41 crc kubenswrapper[4799]: I1010 16:50:41.492050 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b7f1ee63-3947-40d6-ac14-8a1cf5e6f7f0-logs\") pod \"glance-default-internal-api-0\" (UID: \"b7f1ee63-3947-40d6-ac14-8a1cf5e6f7f0\") " pod="openstack/glance-default-internal-api-0" Oct 10 16:50:41 crc kubenswrapper[4799]: I1010 16:50:41.492080 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ef55ae74-4435-4042-9567-4d07d41b3ce0-config-data\") pod \"glance-default-external-api-0\" (UID: \"ef55ae74-4435-4042-9567-4d07d41b3ce0\") " pod="openstack/glance-default-external-api-0" Oct 10 16:50:41 crc kubenswrapper[4799]: I1010 16:50:41.492114 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-swgwv\" (UniqueName: \"kubernetes.io/projected/ef55ae74-4435-4042-9567-4d07d41b3ce0-kube-api-access-swgwv\") pod \"glance-default-external-api-0\" (UID: \"ef55ae74-4435-4042-9567-4d07d41b3ce0\") " pod="openstack/glance-default-external-api-0" Oct 10 16:50:41 crc kubenswrapper[4799]: I1010 16:50:41.492140 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/b7f1ee63-3947-40d6-ac14-8a1cf5e6f7f0-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"b7f1ee63-3947-40d6-ac14-8a1cf5e6f7f0\") " pod="openstack/glance-default-internal-api-0" Oct 10 16:50:41 crc kubenswrapper[4799]: I1010 16:50:41.492182 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"glance-default-external-api-0\" (UID: \"ef55ae74-4435-4042-9567-4d07d41b3ce0\") " pod="openstack/glance-default-external-api-0" Oct 10 16:50:41 crc kubenswrapper[4799]: I1010 16:50:41.492221 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ef55ae74-4435-4042-9567-4d07d41b3ce0-scripts\") pod \"glance-default-external-api-0\" (UID: \"ef55ae74-4435-4042-9567-4d07d41b3ce0\") " pod="openstack/glance-default-external-api-0" Oct 10 16:50:41 crc kubenswrapper[4799]: I1010 16:50:41.492324 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"glance-default-internal-api-0\" (UID: \"b7f1ee63-3947-40d6-ac14-8a1cf5e6f7f0\") " pod="openstack/glance-default-internal-api-0" Oct 10 16:50:41 crc kubenswrapper[4799]: I1010 16:50:41.492360 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b7f1ee63-3947-40d6-ac14-8a1cf5e6f7f0-config-data\") pod \"glance-default-internal-api-0\" (UID: \"b7f1ee63-3947-40d6-ac14-8a1cf5e6f7f0\") " pod="openstack/glance-default-internal-api-0" Oct 10 16:50:41 crc kubenswrapper[4799]: I1010 16:50:41.492380 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/b7f1ee63-3947-40d6-ac14-8a1cf5e6f7f0-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"b7f1ee63-3947-40d6-ac14-8a1cf5e6f7f0\") " pod="openstack/glance-default-internal-api-0" Oct 10 16:50:41 crc kubenswrapper[4799]: I1010 16:50:41.492403 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dl6pq\" (UniqueName: \"kubernetes.io/projected/b7f1ee63-3947-40d6-ac14-8a1cf5e6f7f0-kube-api-access-dl6pq\") pod \"glance-default-internal-api-0\" (UID: \"b7f1ee63-3947-40d6-ac14-8a1cf5e6f7f0\") " pod="openstack/glance-default-internal-api-0" Oct 10 16:50:41 crc kubenswrapper[4799]: I1010 16:50:41.492434 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ef55ae74-4435-4042-9567-4d07d41b3ce0-logs\") pod \"glance-default-external-api-0\" (UID: \"ef55ae74-4435-4042-9567-4d07d41b3ce0\") " pod="openstack/glance-default-external-api-0" Oct 10 16:50:41 crc kubenswrapper[4799]: I1010 16:50:41.492846 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/ef55ae74-4435-4042-9567-4d07d41b3ce0-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"ef55ae74-4435-4042-9567-4d07d41b3ce0\") " pod="openstack/glance-default-external-api-0" Oct 10 16:50:41 crc kubenswrapper[4799]: I1010 16:50:41.493349 4799 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"glance-default-external-api-0\" (UID: \"ef55ae74-4435-4042-9567-4d07d41b3ce0\") device mount path \"/mnt/openstack/pv11\"" pod="openstack/glance-default-external-api-0" Oct 10 16:50:41 crc kubenswrapper[4799]: I1010 16:50:41.493744 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/b7f1ee63-3947-40d6-ac14-8a1cf5e6f7f0-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"b7f1ee63-3947-40d6-ac14-8a1cf5e6f7f0\") " pod="openstack/glance-default-internal-api-0" Oct 10 16:50:41 crc kubenswrapper[4799]: I1010 16:50:41.494028 4799 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"glance-default-internal-api-0\" (UID: \"b7f1ee63-3947-40d6-ac14-8a1cf5e6f7f0\") device mount path \"/mnt/openstack/pv12\"" pod="openstack/glance-default-internal-api-0" Oct 10 16:50:41 crc kubenswrapper[4799]: I1010 16:50:41.495895 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b7f1ee63-3947-40d6-ac14-8a1cf5e6f7f0-logs\") pod \"glance-default-internal-api-0\" (UID: \"b7f1ee63-3947-40d6-ac14-8a1cf5e6f7f0\") " pod="openstack/glance-default-internal-api-0" Oct 10 16:50:41 crc kubenswrapper[4799]: I1010 16:50:41.496071 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ef55ae74-4435-4042-9567-4d07d41b3ce0-logs\") pod \"glance-default-external-api-0\" (UID: \"ef55ae74-4435-4042-9567-4d07d41b3ce0\") " pod="openstack/glance-default-external-api-0" Oct 10 16:50:41 crc kubenswrapper[4799]: I1010 16:50:41.501116 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/ef55ae74-4435-4042-9567-4d07d41b3ce0-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"ef55ae74-4435-4042-9567-4d07d41b3ce0\") " pod="openstack/glance-default-external-api-0" Oct 10 16:50:41 crc kubenswrapper[4799]: I1010 16:50:41.508029 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b7f1ee63-3947-40d6-ac14-8a1cf5e6f7f0-scripts\") pod \"glance-default-internal-api-0\" (UID: \"b7f1ee63-3947-40d6-ac14-8a1cf5e6f7f0\") " pod="openstack/glance-default-internal-api-0" Oct 10 16:50:41 crc kubenswrapper[4799]: I1010 16:50:41.508646 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/b7f1ee63-3947-40d6-ac14-8a1cf5e6f7f0-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"b7f1ee63-3947-40d6-ac14-8a1cf5e6f7f0\") " pod="openstack/glance-default-internal-api-0" Oct 10 16:50:41 crc kubenswrapper[4799]: I1010 16:50:41.508803 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b7f1ee63-3947-40d6-ac14-8a1cf5e6f7f0-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"b7f1ee63-3947-40d6-ac14-8a1cf5e6f7f0\") " pod="openstack/glance-default-internal-api-0" Oct 10 16:50:41 crc kubenswrapper[4799]: I1010 16:50:41.509173 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ef55ae74-4435-4042-9567-4d07d41b3ce0-config-data\") pod \"glance-default-external-api-0\" (UID: \"ef55ae74-4435-4042-9567-4d07d41b3ce0\") " pod="openstack/glance-default-external-api-0" Oct 10 16:50:41 crc kubenswrapper[4799]: I1010 16:50:41.509622 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ef55ae74-4435-4042-9567-4d07d41b3ce0-scripts\") pod \"glance-default-external-api-0\" (UID: \"ef55ae74-4435-4042-9567-4d07d41b3ce0\") " pod="openstack/glance-default-external-api-0" Oct 10 16:50:41 crc kubenswrapper[4799]: I1010 16:50:41.509859 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ef55ae74-4435-4042-9567-4d07d41b3ce0-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"ef55ae74-4435-4042-9567-4d07d41b3ce0\") " pod="openstack/glance-default-external-api-0" Oct 10 16:50:41 crc kubenswrapper[4799]: I1010 16:50:41.514933 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b7f1ee63-3947-40d6-ac14-8a1cf5e6f7f0-config-data\") pod \"glance-default-internal-api-0\" (UID: \"b7f1ee63-3947-40d6-ac14-8a1cf5e6f7f0\") " pod="openstack/glance-default-internal-api-0" Oct 10 16:50:41 crc kubenswrapper[4799]: I1010 16:50:41.517271 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-swgwv\" (UniqueName: \"kubernetes.io/projected/ef55ae74-4435-4042-9567-4d07d41b3ce0-kube-api-access-swgwv\") pod \"glance-default-external-api-0\" (UID: \"ef55ae74-4435-4042-9567-4d07d41b3ce0\") " pod="openstack/glance-default-external-api-0" Oct 10 16:50:41 crc kubenswrapper[4799]: I1010 16:50:41.525344 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dl6pq\" (UniqueName: \"kubernetes.io/projected/b7f1ee63-3947-40d6-ac14-8a1cf5e6f7f0-kube-api-access-dl6pq\") pod \"glance-default-internal-api-0\" (UID: \"b7f1ee63-3947-40d6-ac14-8a1cf5e6f7f0\") " pod="openstack/glance-default-internal-api-0" Oct 10 16:50:41 crc kubenswrapper[4799]: I1010 16:50:41.527899 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"glance-default-internal-api-0\" (UID: \"b7f1ee63-3947-40d6-ac14-8a1cf5e6f7f0\") " pod="openstack/glance-default-internal-api-0" Oct 10 16:50:41 crc kubenswrapper[4799]: I1010 16:50:41.544606 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"glance-default-external-api-0\" (UID: \"ef55ae74-4435-4042-9567-4d07d41b3ce0\") " pod="openstack/glance-default-external-api-0" Oct 10 16:50:41 crc kubenswrapper[4799]: I1010 16:50:41.576740 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Oct 10 16:50:41 crc kubenswrapper[4799]: I1010 16:50:41.594604 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Oct 10 16:50:42 crc kubenswrapper[4799]: I1010 16:50:42.127424 4799 generic.go:334] "Generic (PLEG): container finished" podID="fd273e06-ed83-42c6-aa3e-3ed6eda94c1d" containerID="9130b5b2922996b3ae5e11a6833e22b47fbd0cb063fb0cb83a0b3202990aa273" exitCode=0 Oct 10 16:50:42 crc kubenswrapper[4799]: I1010 16:50:42.127493 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-jrrx4" event={"ID":"fd273e06-ed83-42c6-aa3e-3ed6eda94c1d","Type":"ContainerDied","Data":"9130b5b2922996b3ae5e11a6833e22b47fbd0cb063fb0cb83a0b3202990aa273"} Oct 10 16:50:42 crc kubenswrapper[4799]: I1010 16:50:42.181208 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 10 16:50:42 crc kubenswrapper[4799]: W1010 16:50:42.187685 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podef55ae74_4435_4042_9567_4d07d41b3ce0.slice/crio-ee373a0b9d5ada72b01747a840e14a908d1ee7d468f468e56be2944474ea0af1 WatchSource:0}: Error finding container ee373a0b9d5ada72b01747a840e14a908d1ee7d468f468e56be2944474ea0af1: Status 404 returned error can't find the container with id ee373a0b9d5ada72b01747a840e14a908d1ee7d468f468e56be2944474ea0af1 Oct 10 16:50:42 crc kubenswrapper[4799]: I1010 16:50:42.279288 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 10 16:50:43 crc kubenswrapper[4799]: I1010 16:50:43.151325 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"b7f1ee63-3947-40d6-ac14-8a1cf5e6f7f0","Type":"ContainerStarted","Data":"866b587e8dd73a9f0f531084fbcbe1a89d5bdd82e34c2eec0199cef8596e3329"} Oct 10 16:50:43 crc kubenswrapper[4799]: I1010 16:50:43.151810 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"b7f1ee63-3947-40d6-ac14-8a1cf5e6f7f0","Type":"ContainerStarted","Data":"3cd05d3a72dcf4f48946b2a2eac9f5ebde3099bbaa924029c1c346340722213a"} Oct 10 16:50:43 crc kubenswrapper[4799]: I1010 16:50:43.155828 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"ef55ae74-4435-4042-9567-4d07d41b3ce0","Type":"ContainerStarted","Data":"0b146c0c34e0636a32d9113ae943fc6de4bf48319ee1cd0678aadae8c7d6e694"} Oct 10 16:50:43 crc kubenswrapper[4799]: I1010 16:50:43.155903 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"ef55ae74-4435-4042-9567-4d07d41b3ce0","Type":"ContainerStarted","Data":"ee373a0b9d5ada72b01747a840e14a908d1ee7d468f468e56be2944474ea0af1"} Oct 10 16:50:44 crc kubenswrapper[4799]: I1010 16:50:44.166826 4799 generic.go:334] "Generic (PLEG): container finished" podID="6d2833c7-9e1e-4063-93a1-54aded9b6daf" containerID="1ba620724d6ec0019ebffc23f81702b7d8bb95b7aecda8d154251b288a4a2c53" exitCode=0 Oct 10 16:50:44 crc kubenswrapper[4799]: I1010 16:50:44.166881 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-5fpmb" event={"ID":"6d2833c7-9e1e-4063-93a1-54aded9b6daf","Type":"ContainerDied","Data":"1ba620724d6ec0019ebffc23f81702b7d8bb95b7aecda8d154251b288a4a2c53"} Oct 10 16:50:44 crc kubenswrapper[4799]: I1010 16:50:44.170458 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"ef55ae74-4435-4042-9567-4d07d41b3ce0","Type":"ContainerStarted","Data":"81a33109b42e3a65de9041c84dfd7b0c48481adc2672b592ae62813fadf5065e"} Oct 10 16:50:44 crc kubenswrapper[4799]: I1010 16:50:44.218479 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=3.218458587 podStartE2EDuration="3.218458587s" podCreationTimestamp="2025-10-10 16:50:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-10 16:50:44.20437774 +0000 UTC m=+1137.712701865" watchObservedRunningTime="2025-10-10 16:50:44.218458587 +0000 UTC m=+1137.726782702" Oct 10 16:50:44 crc kubenswrapper[4799]: I1010 16:50:44.494881 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-jrrx4" Oct 10 16:50:44 crc kubenswrapper[4799]: I1010 16:50:44.643661 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/fd273e06-ed83-42c6-aa3e-3ed6eda94c1d-credential-keys\") pod \"fd273e06-ed83-42c6-aa3e-3ed6eda94c1d\" (UID: \"fd273e06-ed83-42c6-aa3e-3ed6eda94c1d\") " Oct 10 16:50:44 crc kubenswrapper[4799]: I1010 16:50:44.643750 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fd273e06-ed83-42c6-aa3e-3ed6eda94c1d-config-data\") pod \"fd273e06-ed83-42c6-aa3e-3ed6eda94c1d\" (UID: \"fd273e06-ed83-42c6-aa3e-3ed6eda94c1d\") " Oct 10 16:50:44 crc kubenswrapper[4799]: I1010 16:50:44.643850 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fd273e06-ed83-42c6-aa3e-3ed6eda94c1d-combined-ca-bundle\") pod \"fd273e06-ed83-42c6-aa3e-3ed6eda94c1d\" (UID: \"fd273e06-ed83-42c6-aa3e-3ed6eda94c1d\") " Oct 10 16:50:44 crc kubenswrapper[4799]: I1010 16:50:44.643879 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-n4mz5\" (UniqueName: \"kubernetes.io/projected/fd273e06-ed83-42c6-aa3e-3ed6eda94c1d-kube-api-access-n4mz5\") pod \"fd273e06-ed83-42c6-aa3e-3ed6eda94c1d\" (UID: \"fd273e06-ed83-42c6-aa3e-3ed6eda94c1d\") " Oct 10 16:50:44 crc kubenswrapper[4799]: I1010 16:50:44.643953 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fd273e06-ed83-42c6-aa3e-3ed6eda94c1d-scripts\") pod \"fd273e06-ed83-42c6-aa3e-3ed6eda94c1d\" (UID: \"fd273e06-ed83-42c6-aa3e-3ed6eda94c1d\") " Oct 10 16:50:44 crc kubenswrapper[4799]: I1010 16:50:44.643976 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/fd273e06-ed83-42c6-aa3e-3ed6eda94c1d-fernet-keys\") pod \"fd273e06-ed83-42c6-aa3e-3ed6eda94c1d\" (UID: \"fd273e06-ed83-42c6-aa3e-3ed6eda94c1d\") " Oct 10 16:50:44 crc kubenswrapper[4799]: I1010 16:50:44.650156 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fd273e06-ed83-42c6-aa3e-3ed6eda94c1d-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "fd273e06-ed83-42c6-aa3e-3ed6eda94c1d" (UID: "fd273e06-ed83-42c6-aa3e-3ed6eda94c1d"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:50:44 crc kubenswrapper[4799]: I1010 16:50:44.650191 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fd273e06-ed83-42c6-aa3e-3ed6eda94c1d-kube-api-access-n4mz5" (OuterVolumeSpecName: "kube-api-access-n4mz5") pod "fd273e06-ed83-42c6-aa3e-3ed6eda94c1d" (UID: "fd273e06-ed83-42c6-aa3e-3ed6eda94c1d"). InnerVolumeSpecName "kube-api-access-n4mz5". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 16:50:44 crc kubenswrapper[4799]: I1010 16:50:44.650686 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fd273e06-ed83-42c6-aa3e-3ed6eda94c1d-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "fd273e06-ed83-42c6-aa3e-3ed6eda94c1d" (UID: "fd273e06-ed83-42c6-aa3e-3ed6eda94c1d"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:50:44 crc kubenswrapper[4799]: I1010 16:50:44.651401 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fd273e06-ed83-42c6-aa3e-3ed6eda94c1d-scripts" (OuterVolumeSpecName: "scripts") pod "fd273e06-ed83-42c6-aa3e-3ed6eda94c1d" (UID: "fd273e06-ed83-42c6-aa3e-3ed6eda94c1d"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:50:44 crc kubenswrapper[4799]: I1010 16:50:44.674065 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fd273e06-ed83-42c6-aa3e-3ed6eda94c1d-config-data" (OuterVolumeSpecName: "config-data") pod "fd273e06-ed83-42c6-aa3e-3ed6eda94c1d" (UID: "fd273e06-ed83-42c6-aa3e-3ed6eda94c1d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:50:44 crc kubenswrapper[4799]: I1010 16:50:44.675908 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fd273e06-ed83-42c6-aa3e-3ed6eda94c1d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "fd273e06-ed83-42c6-aa3e-3ed6eda94c1d" (UID: "fd273e06-ed83-42c6-aa3e-3ed6eda94c1d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:50:44 crc kubenswrapper[4799]: I1010 16:50:44.745658 4799 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fd273e06-ed83-42c6-aa3e-3ed6eda94c1d-scripts\") on node \"crc\" DevicePath \"\"" Oct 10 16:50:44 crc kubenswrapper[4799]: I1010 16:50:44.745700 4799 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/fd273e06-ed83-42c6-aa3e-3ed6eda94c1d-fernet-keys\") on node \"crc\" DevicePath \"\"" Oct 10 16:50:44 crc kubenswrapper[4799]: I1010 16:50:44.745716 4799 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/fd273e06-ed83-42c6-aa3e-3ed6eda94c1d-credential-keys\") on node \"crc\" DevicePath \"\"" Oct 10 16:50:44 crc kubenswrapper[4799]: I1010 16:50:44.745727 4799 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fd273e06-ed83-42c6-aa3e-3ed6eda94c1d-config-data\") on node \"crc\" DevicePath \"\"" Oct 10 16:50:44 crc kubenswrapper[4799]: I1010 16:50:44.745738 4799 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fd273e06-ed83-42c6-aa3e-3ed6eda94c1d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 10 16:50:44 crc kubenswrapper[4799]: I1010 16:50:44.745749 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-n4mz5\" (UniqueName: \"kubernetes.io/projected/fd273e06-ed83-42c6-aa3e-3ed6eda94c1d-kube-api-access-n4mz5\") on node \"crc\" DevicePath \"\"" Oct 10 16:50:45 crc kubenswrapper[4799]: I1010 16:50:45.181387 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-jrrx4" Oct 10 16:50:45 crc kubenswrapper[4799]: I1010 16:50:45.182537 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-jrrx4" event={"ID":"fd273e06-ed83-42c6-aa3e-3ed6eda94c1d","Type":"ContainerDied","Data":"6f90e352fb6a282054a8d3e031ce5dbdb95e500e7057110d4d05ea35e91ab9ab"} Oct 10 16:50:45 crc kubenswrapper[4799]: I1010 16:50:45.182570 4799 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6f90e352fb6a282054a8d3e031ce5dbdb95e500e7057110d4d05ea35e91ab9ab" Oct 10 16:50:45 crc kubenswrapper[4799]: I1010 16:50:45.249020 4799 patch_prober.go:28] interesting pod/machine-config-daemon-rh8zc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 10 16:50:45 crc kubenswrapper[4799]: I1010 16:50:45.249081 4799 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 10 16:50:45 crc kubenswrapper[4799]: I1010 16:50:45.249126 4799 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" Oct 10 16:50:45 crc kubenswrapper[4799]: I1010 16:50:45.249937 4799 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"7660328ebc7154335d94320ea1d630296da5d0b7a601ee21c41b533b20ba0a49"} pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 10 16:50:45 crc kubenswrapper[4799]: I1010 16:50:45.250007 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" containerName="machine-config-daemon" containerID="cri-o://7660328ebc7154335d94320ea1d630296da5d0b7a601ee21c41b533b20ba0a49" gracePeriod=600 Oct 10 16:50:45 crc kubenswrapper[4799]: I1010 16:50:45.577448 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-657bb59659-swzhl"] Oct 10 16:50:45 crc kubenswrapper[4799]: E1010 16:50:45.577836 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fd273e06-ed83-42c6-aa3e-3ed6eda94c1d" containerName="keystone-bootstrap" Oct 10 16:50:45 crc kubenswrapper[4799]: I1010 16:50:45.577851 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="fd273e06-ed83-42c6-aa3e-3ed6eda94c1d" containerName="keystone-bootstrap" Oct 10 16:50:45 crc kubenswrapper[4799]: I1010 16:50:45.578014 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="fd273e06-ed83-42c6-aa3e-3ed6eda94c1d" containerName="keystone-bootstrap" Oct 10 16:50:45 crc kubenswrapper[4799]: I1010 16:50:45.578747 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-657bb59659-swzhl" Oct 10 16:50:45 crc kubenswrapper[4799]: I1010 16:50:45.585025 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Oct 10 16:50:45 crc kubenswrapper[4799]: I1010 16:50:45.585611 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Oct 10 16:50:45 crc kubenswrapper[4799]: I1010 16:50:45.585716 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-jwbbp" Oct 10 16:50:45 crc kubenswrapper[4799]: I1010 16:50:45.585953 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-keystone-internal-svc" Oct 10 16:50:45 crc kubenswrapper[4799]: I1010 16:50:45.586065 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Oct 10 16:50:45 crc kubenswrapper[4799]: I1010 16:50:45.595705 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-keystone-public-svc" Oct 10 16:50:45 crc kubenswrapper[4799]: I1010 16:50:45.596398 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-657bb59659-swzhl"] Oct 10 16:50:45 crc kubenswrapper[4799]: I1010 16:50:45.674659 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/eb41e34f-dc1c-4b2e-9437-44a0e84e2cd1-scripts\") pod \"keystone-657bb59659-swzhl\" (UID: \"eb41e34f-dc1c-4b2e-9437-44a0e84e2cd1\") " pod="openstack/keystone-657bb59659-swzhl" Oct 10 16:50:45 crc kubenswrapper[4799]: I1010 16:50:45.674719 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-52fg8\" (UniqueName: \"kubernetes.io/projected/eb41e34f-dc1c-4b2e-9437-44a0e84e2cd1-kube-api-access-52fg8\") pod \"keystone-657bb59659-swzhl\" (UID: \"eb41e34f-dc1c-4b2e-9437-44a0e84e2cd1\") " pod="openstack/keystone-657bb59659-swzhl" Oct 10 16:50:45 crc kubenswrapper[4799]: I1010 16:50:45.674772 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/eb41e34f-dc1c-4b2e-9437-44a0e84e2cd1-public-tls-certs\") pod \"keystone-657bb59659-swzhl\" (UID: \"eb41e34f-dc1c-4b2e-9437-44a0e84e2cd1\") " pod="openstack/keystone-657bb59659-swzhl" Oct 10 16:50:45 crc kubenswrapper[4799]: I1010 16:50:45.674809 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/eb41e34f-dc1c-4b2e-9437-44a0e84e2cd1-credential-keys\") pod \"keystone-657bb59659-swzhl\" (UID: \"eb41e34f-dc1c-4b2e-9437-44a0e84e2cd1\") " pod="openstack/keystone-657bb59659-swzhl" Oct 10 16:50:45 crc kubenswrapper[4799]: I1010 16:50:45.674844 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/eb41e34f-dc1c-4b2e-9437-44a0e84e2cd1-fernet-keys\") pod \"keystone-657bb59659-swzhl\" (UID: \"eb41e34f-dc1c-4b2e-9437-44a0e84e2cd1\") " pod="openstack/keystone-657bb59659-swzhl" Oct 10 16:50:45 crc kubenswrapper[4799]: I1010 16:50:45.674867 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/eb41e34f-dc1c-4b2e-9437-44a0e84e2cd1-internal-tls-certs\") pod \"keystone-657bb59659-swzhl\" (UID: \"eb41e34f-dc1c-4b2e-9437-44a0e84e2cd1\") " pod="openstack/keystone-657bb59659-swzhl" Oct 10 16:50:45 crc kubenswrapper[4799]: I1010 16:50:45.674912 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eb41e34f-dc1c-4b2e-9437-44a0e84e2cd1-combined-ca-bundle\") pod \"keystone-657bb59659-swzhl\" (UID: \"eb41e34f-dc1c-4b2e-9437-44a0e84e2cd1\") " pod="openstack/keystone-657bb59659-swzhl" Oct 10 16:50:45 crc kubenswrapper[4799]: I1010 16:50:45.674944 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/eb41e34f-dc1c-4b2e-9437-44a0e84e2cd1-config-data\") pod \"keystone-657bb59659-swzhl\" (UID: \"eb41e34f-dc1c-4b2e-9437-44a0e84e2cd1\") " pod="openstack/keystone-657bb59659-swzhl" Oct 10 16:50:45 crc kubenswrapper[4799]: I1010 16:50:45.779805 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/eb41e34f-dc1c-4b2e-9437-44a0e84e2cd1-scripts\") pod \"keystone-657bb59659-swzhl\" (UID: \"eb41e34f-dc1c-4b2e-9437-44a0e84e2cd1\") " pod="openstack/keystone-657bb59659-swzhl" Oct 10 16:50:45 crc kubenswrapper[4799]: I1010 16:50:45.779862 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-52fg8\" (UniqueName: \"kubernetes.io/projected/eb41e34f-dc1c-4b2e-9437-44a0e84e2cd1-kube-api-access-52fg8\") pod \"keystone-657bb59659-swzhl\" (UID: \"eb41e34f-dc1c-4b2e-9437-44a0e84e2cd1\") " pod="openstack/keystone-657bb59659-swzhl" Oct 10 16:50:45 crc kubenswrapper[4799]: I1010 16:50:45.779897 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/eb41e34f-dc1c-4b2e-9437-44a0e84e2cd1-public-tls-certs\") pod \"keystone-657bb59659-swzhl\" (UID: \"eb41e34f-dc1c-4b2e-9437-44a0e84e2cd1\") " pod="openstack/keystone-657bb59659-swzhl" Oct 10 16:50:45 crc kubenswrapper[4799]: I1010 16:50:45.779939 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/eb41e34f-dc1c-4b2e-9437-44a0e84e2cd1-credential-keys\") pod \"keystone-657bb59659-swzhl\" (UID: \"eb41e34f-dc1c-4b2e-9437-44a0e84e2cd1\") " pod="openstack/keystone-657bb59659-swzhl" Oct 10 16:50:45 crc kubenswrapper[4799]: I1010 16:50:45.779979 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/eb41e34f-dc1c-4b2e-9437-44a0e84e2cd1-fernet-keys\") pod \"keystone-657bb59659-swzhl\" (UID: \"eb41e34f-dc1c-4b2e-9437-44a0e84e2cd1\") " pod="openstack/keystone-657bb59659-swzhl" Oct 10 16:50:45 crc kubenswrapper[4799]: I1010 16:50:45.780001 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/eb41e34f-dc1c-4b2e-9437-44a0e84e2cd1-internal-tls-certs\") pod \"keystone-657bb59659-swzhl\" (UID: \"eb41e34f-dc1c-4b2e-9437-44a0e84e2cd1\") " pod="openstack/keystone-657bb59659-swzhl" Oct 10 16:50:45 crc kubenswrapper[4799]: I1010 16:50:45.780049 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eb41e34f-dc1c-4b2e-9437-44a0e84e2cd1-combined-ca-bundle\") pod \"keystone-657bb59659-swzhl\" (UID: \"eb41e34f-dc1c-4b2e-9437-44a0e84e2cd1\") " pod="openstack/keystone-657bb59659-swzhl" Oct 10 16:50:45 crc kubenswrapper[4799]: I1010 16:50:45.780080 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/eb41e34f-dc1c-4b2e-9437-44a0e84e2cd1-config-data\") pod \"keystone-657bb59659-swzhl\" (UID: \"eb41e34f-dc1c-4b2e-9437-44a0e84e2cd1\") " pod="openstack/keystone-657bb59659-swzhl" Oct 10 16:50:45 crc kubenswrapper[4799]: I1010 16:50:45.794461 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/eb41e34f-dc1c-4b2e-9437-44a0e84e2cd1-fernet-keys\") pod \"keystone-657bb59659-swzhl\" (UID: \"eb41e34f-dc1c-4b2e-9437-44a0e84e2cd1\") " pod="openstack/keystone-657bb59659-swzhl" Oct 10 16:50:45 crc kubenswrapper[4799]: I1010 16:50:45.799304 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/eb41e34f-dc1c-4b2e-9437-44a0e84e2cd1-scripts\") pod \"keystone-657bb59659-swzhl\" (UID: \"eb41e34f-dc1c-4b2e-9437-44a0e84e2cd1\") " pod="openstack/keystone-657bb59659-swzhl" Oct 10 16:50:45 crc kubenswrapper[4799]: I1010 16:50:45.799346 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/eb41e34f-dc1c-4b2e-9437-44a0e84e2cd1-internal-tls-certs\") pod \"keystone-657bb59659-swzhl\" (UID: \"eb41e34f-dc1c-4b2e-9437-44a0e84e2cd1\") " pod="openstack/keystone-657bb59659-swzhl" Oct 10 16:50:45 crc kubenswrapper[4799]: I1010 16:50:45.806336 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/eb41e34f-dc1c-4b2e-9437-44a0e84e2cd1-credential-keys\") pod \"keystone-657bb59659-swzhl\" (UID: \"eb41e34f-dc1c-4b2e-9437-44a0e84e2cd1\") " pod="openstack/keystone-657bb59659-swzhl" Oct 10 16:50:45 crc kubenswrapper[4799]: I1010 16:50:45.806710 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/eb41e34f-dc1c-4b2e-9437-44a0e84e2cd1-public-tls-certs\") pod \"keystone-657bb59659-swzhl\" (UID: \"eb41e34f-dc1c-4b2e-9437-44a0e84e2cd1\") " pod="openstack/keystone-657bb59659-swzhl" Oct 10 16:50:45 crc kubenswrapper[4799]: I1010 16:50:45.807211 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eb41e34f-dc1c-4b2e-9437-44a0e84e2cd1-combined-ca-bundle\") pod \"keystone-657bb59659-swzhl\" (UID: \"eb41e34f-dc1c-4b2e-9437-44a0e84e2cd1\") " pod="openstack/keystone-657bb59659-swzhl" Oct 10 16:50:45 crc kubenswrapper[4799]: I1010 16:50:45.807276 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/eb41e34f-dc1c-4b2e-9437-44a0e84e2cd1-config-data\") pod \"keystone-657bb59659-swzhl\" (UID: \"eb41e34f-dc1c-4b2e-9437-44a0e84e2cd1\") " pod="openstack/keystone-657bb59659-swzhl" Oct 10 16:50:45 crc kubenswrapper[4799]: I1010 16:50:45.814440 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-52fg8\" (UniqueName: \"kubernetes.io/projected/eb41e34f-dc1c-4b2e-9437-44a0e84e2cd1-kube-api-access-52fg8\") pod \"keystone-657bb59659-swzhl\" (UID: \"eb41e34f-dc1c-4b2e-9437-44a0e84e2cd1\") " pod="openstack/keystone-657bb59659-swzhl" Oct 10 16:50:45 crc kubenswrapper[4799]: I1010 16:50:45.961079 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-657bb59659-swzhl" Oct 10 16:50:46 crc kubenswrapper[4799]: I1010 16:50:46.195944 4799 generic.go:334] "Generic (PLEG): container finished" podID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" containerID="7660328ebc7154335d94320ea1d630296da5d0b7a601ee21c41b533b20ba0a49" exitCode=0 Oct 10 16:50:46 crc kubenswrapper[4799]: I1010 16:50:46.196018 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" event={"ID":"6cebefda-e31d-4be2-9bf4-8e1f8ec002cb","Type":"ContainerDied","Data":"7660328ebc7154335d94320ea1d630296da5d0b7a601ee21c41b533b20ba0a49"} Oct 10 16:50:46 crc kubenswrapper[4799]: I1010 16:50:46.196058 4799 scope.go:117] "RemoveContainer" containerID="5d0c20be696163127fb1361e7edc7eadb541b7fccbd83cd240ae6b5f02af5dd5" Oct 10 16:50:46 crc kubenswrapper[4799]: I1010 16:50:46.199190 4799 generic.go:334] "Generic (PLEG): container finished" podID="a8dccd24-a3ca-4f98-90b4-e2943cd228d3" containerID="4a7f26c0a29102620ae44684ad6b914b3b0a0afa490ea3c581dc7f606034046b" exitCode=0 Oct 10 16:50:46 crc kubenswrapper[4799]: I1010 16:50:46.199210 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-mrh2w" event={"ID":"a8dccd24-a3ca-4f98-90b4-e2943cd228d3","Type":"ContainerDied","Data":"4a7f26c0a29102620ae44684ad6b914b3b0a0afa490ea3c581dc7f606034046b"} Oct 10 16:50:46 crc kubenswrapper[4799]: I1010 16:50:46.597885 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-5fpmb" Oct 10 16:50:46 crc kubenswrapper[4799]: I1010 16:50:46.695241 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-z89c9\" (UniqueName: \"kubernetes.io/projected/6d2833c7-9e1e-4063-93a1-54aded9b6daf-kube-api-access-z89c9\") pod \"6d2833c7-9e1e-4063-93a1-54aded9b6daf\" (UID: \"6d2833c7-9e1e-4063-93a1-54aded9b6daf\") " Oct 10 16:50:46 crc kubenswrapper[4799]: I1010 16:50:46.695347 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6d2833c7-9e1e-4063-93a1-54aded9b6daf-logs\") pod \"6d2833c7-9e1e-4063-93a1-54aded9b6daf\" (UID: \"6d2833c7-9e1e-4063-93a1-54aded9b6daf\") " Oct 10 16:50:46 crc kubenswrapper[4799]: I1010 16:50:46.695384 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6d2833c7-9e1e-4063-93a1-54aded9b6daf-config-data\") pod \"6d2833c7-9e1e-4063-93a1-54aded9b6daf\" (UID: \"6d2833c7-9e1e-4063-93a1-54aded9b6daf\") " Oct 10 16:50:46 crc kubenswrapper[4799]: I1010 16:50:46.695445 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6d2833c7-9e1e-4063-93a1-54aded9b6daf-combined-ca-bundle\") pod \"6d2833c7-9e1e-4063-93a1-54aded9b6daf\" (UID: \"6d2833c7-9e1e-4063-93a1-54aded9b6daf\") " Oct 10 16:50:46 crc kubenswrapper[4799]: I1010 16:50:46.695486 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6d2833c7-9e1e-4063-93a1-54aded9b6daf-scripts\") pod \"6d2833c7-9e1e-4063-93a1-54aded9b6daf\" (UID: \"6d2833c7-9e1e-4063-93a1-54aded9b6daf\") " Oct 10 16:50:46 crc kubenswrapper[4799]: I1010 16:50:46.695904 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6d2833c7-9e1e-4063-93a1-54aded9b6daf-logs" (OuterVolumeSpecName: "logs") pod "6d2833c7-9e1e-4063-93a1-54aded9b6daf" (UID: "6d2833c7-9e1e-4063-93a1-54aded9b6daf"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 10 16:50:46 crc kubenswrapper[4799]: I1010 16:50:46.701906 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6d2833c7-9e1e-4063-93a1-54aded9b6daf-scripts" (OuterVolumeSpecName: "scripts") pod "6d2833c7-9e1e-4063-93a1-54aded9b6daf" (UID: "6d2833c7-9e1e-4063-93a1-54aded9b6daf"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:50:46 crc kubenswrapper[4799]: I1010 16:50:46.718824 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6d2833c7-9e1e-4063-93a1-54aded9b6daf-kube-api-access-z89c9" (OuterVolumeSpecName: "kube-api-access-z89c9") pod "6d2833c7-9e1e-4063-93a1-54aded9b6daf" (UID: "6d2833c7-9e1e-4063-93a1-54aded9b6daf"). InnerVolumeSpecName "kube-api-access-z89c9". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 16:50:46 crc kubenswrapper[4799]: I1010 16:50:46.731711 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6d2833c7-9e1e-4063-93a1-54aded9b6daf-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "6d2833c7-9e1e-4063-93a1-54aded9b6daf" (UID: "6d2833c7-9e1e-4063-93a1-54aded9b6daf"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:50:46 crc kubenswrapper[4799]: I1010 16:50:46.739677 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6d2833c7-9e1e-4063-93a1-54aded9b6daf-config-data" (OuterVolumeSpecName: "config-data") pod "6d2833c7-9e1e-4063-93a1-54aded9b6daf" (UID: "6d2833c7-9e1e-4063-93a1-54aded9b6daf"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:50:46 crc kubenswrapper[4799]: I1010 16:50:46.797418 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-z89c9\" (UniqueName: \"kubernetes.io/projected/6d2833c7-9e1e-4063-93a1-54aded9b6daf-kube-api-access-z89c9\") on node \"crc\" DevicePath \"\"" Oct 10 16:50:46 crc kubenswrapper[4799]: I1010 16:50:46.797455 4799 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6d2833c7-9e1e-4063-93a1-54aded9b6daf-logs\") on node \"crc\" DevicePath \"\"" Oct 10 16:50:46 crc kubenswrapper[4799]: I1010 16:50:46.797468 4799 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6d2833c7-9e1e-4063-93a1-54aded9b6daf-config-data\") on node \"crc\" DevicePath \"\"" Oct 10 16:50:46 crc kubenswrapper[4799]: I1010 16:50:46.797479 4799 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6d2833c7-9e1e-4063-93a1-54aded9b6daf-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 10 16:50:46 crc kubenswrapper[4799]: I1010 16:50:46.797489 4799 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6d2833c7-9e1e-4063-93a1-54aded9b6daf-scripts\") on node \"crc\" DevicePath \"\"" Oct 10 16:50:47 crc kubenswrapper[4799]: I1010 16:50:47.214184 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-5fpmb" Oct 10 16:50:47 crc kubenswrapper[4799]: I1010 16:50:47.214174 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-5fpmb" event={"ID":"6d2833c7-9e1e-4063-93a1-54aded9b6daf","Type":"ContainerDied","Data":"19d7dac27f91892a97c8d46b6058de7c6bc0cfb0ca66247b95d8c2cf6b014697"} Oct 10 16:50:47 crc kubenswrapper[4799]: I1010 16:50:47.214812 4799 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="19d7dac27f91892a97c8d46b6058de7c6bc0cfb0ca66247b95d8c2cf6b014697" Oct 10 16:50:47 crc kubenswrapper[4799]: I1010 16:50:47.217544 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" event={"ID":"6cebefda-e31d-4be2-9bf4-8e1f8ec002cb","Type":"ContainerStarted","Data":"145ba828d4b654e155342b2053228303da0bf7c989b77f4342b3cbafaea6b6c8"} Oct 10 16:50:47 crc kubenswrapper[4799]: I1010 16:50:47.225727 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"766235b0-a5b9-4448-8dac-1afd1ca60e50","Type":"ContainerStarted","Data":"2835675d74c0d34b7cae5f37da026d20cdbf84cd064c0374ed6191f0417ceb02"} Oct 10 16:50:47 crc kubenswrapper[4799]: I1010 16:50:47.358433 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-657bb59659-swzhl"] Oct 10 16:50:47 crc kubenswrapper[4799]: I1010 16:50:47.468152 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-mrh2w" Oct 10 16:50:47 crc kubenswrapper[4799]: I1010 16:50:47.621355 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a8dccd24-a3ca-4f98-90b4-e2943cd228d3-combined-ca-bundle\") pod \"a8dccd24-a3ca-4f98-90b4-e2943cd228d3\" (UID: \"a8dccd24-a3ca-4f98-90b4-e2943cd228d3\") " Oct 10 16:50:47 crc kubenswrapper[4799]: I1010 16:50:47.621553 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x72db\" (UniqueName: \"kubernetes.io/projected/a8dccd24-a3ca-4f98-90b4-e2943cd228d3-kube-api-access-x72db\") pod \"a8dccd24-a3ca-4f98-90b4-e2943cd228d3\" (UID: \"a8dccd24-a3ca-4f98-90b4-e2943cd228d3\") " Oct 10 16:50:47 crc kubenswrapper[4799]: I1010 16:50:47.621626 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/a8dccd24-a3ca-4f98-90b4-e2943cd228d3-db-sync-config-data\") pod \"a8dccd24-a3ca-4f98-90b4-e2943cd228d3\" (UID: \"a8dccd24-a3ca-4f98-90b4-e2943cd228d3\") " Oct 10 16:50:47 crc kubenswrapper[4799]: I1010 16:50:47.632672 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a8dccd24-a3ca-4f98-90b4-e2943cd228d3-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "a8dccd24-a3ca-4f98-90b4-e2943cd228d3" (UID: "a8dccd24-a3ca-4f98-90b4-e2943cd228d3"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:50:47 crc kubenswrapper[4799]: I1010 16:50:47.634600 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a8dccd24-a3ca-4f98-90b4-e2943cd228d3-kube-api-access-x72db" (OuterVolumeSpecName: "kube-api-access-x72db") pod "a8dccd24-a3ca-4f98-90b4-e2943cd228d3" (UID: "a8dccd24-a3ca-4f98-90b4-e2943cd228d3"). InnerVolumeSpecName "kube-api-access-x72db". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 16:50:47 crc kubenswrapper[4799]: I1010 16:50:47.690688 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-6565b9cf48-rl77d"] Oct 10 16:50:47 crc kubenswrapper[4799]: E1010 16:50:47.691433 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a8dccd24-a3ca-4f98-90b4-e2943cd228d3" containerName="barbican-db-sync" Oct 10 16:50:47 crc kubenswrapper[4799]: I1010 16:50:47.691456 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="a8dccd24-a3ca-4f98-90b4-e2943cd228d3" containerName="barbican-db-sync" Oct 10 16:50:47 crc kubenswrapper[4799]: E1010 16:50:47.691472 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6d2833c7-9e1e-4063-93a1-54aded9b6daf" containerName="placement-db-sync" Oct 10 16:50:47 crc kubenswrapper[4799]: I1010 16:50:47.691481 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="6d2833c7-9e1e-4063-93a1-54aded9b6daf" containerName="placement-db-sync" Oct 10 16:50:47 crc kubenswrapper[4799]: I1010 16:50:47.691684 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="a8dccd24-a3ca-4f98-90b4-e2943cd228d3" containerName="barbican-db-sync" Oct 10 16:50:47 crc kubenswrapper[4799]: I1010 16:50:47.691719 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="6d2833c7-9e1e-4063-93a1-54aded9b6daf" containerName="placement-db-sync" Oct 10 16:50:47 crc kubenswrapper[4799]: I1010 16:50:47.692867 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-6565b9cf48-rl77d" Oct 10 16:50:47 crc kubenswrapper[4799]: I1010 16:50:47.697179 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-placement-public-svc" Oct 10 16:50:47 crc kubenswrapper[4799]: I1010 16:50:47.697364 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-scripts" Oct 10 16:50:47 crc kubenswrapper[4799]: I1010 16:50:47.697508 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-config-data" Oct 10 16:50:47 crc kubenswrapper[4799]: I1010 16:50:47.697641 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-placement-dockercfg-gv9md" Oct 10 16:50:47 crc kubenswrapper[4799]: I1010 16:50:47.699210 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-placement-internal-svc" Oct 10 16:50:47 crc kubenswrapper[4799]: I1010 16:50:47.701681 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a8dccd24-a3ca-4f98-90b4-e2943cd228d3-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a8dccd24-a3ca-4f98-90b4-e2943cd228d3" (UID: "a8dccd24-a3ca-4f98-90b4-e2943cd228d3"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:50:47 crc kubenswrapper[4799]: I1010 16:50:47.723029 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-6565b9cf48-rl77d"] Oct 10 16:50:47 crc kubenswrapper[4799]: I1010 16:50:47.724208 4799 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/a8dccd24-a3ca-4f98-90b4-e2943cd228d3-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Oct 10 16:50:47 crc kubenswrapper[4799]: I1010 16:50:47.724241 4799 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a8dccd24-a3ca-4f98-90b4-e2943cd228d3-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 10 16:50:47 crc kubenswrapper[4799]: I1010 16:50:47.724251 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x72db\" (UniqueName: \"kubernetes.io/projected/a8dccd24-a3ca-4f98-90b4-e2943cd228d3-kube-api-access-x72db\") on node \"crc\" DevicePath \"\"" Oct 10 16:50:47 crc kubenswrapper[4799]: I1010 16:50:47.825964 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7ab7b7c1-e89f-4562-882b-4f517f90f8c8-combined-ca-bundle\") pod \"placement-6565b9cf48-rl77d\" (UID: \"7ab7b7c1-e89f-4562-882b-4f517f90f8c8\") " pod="openstack/placement-6565b9cf48-rl77d" Oct 10 16:50:47 crc kubenswrapper[4799]: I1010 16:50:47.826047 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7ab7b7c1-e89f-4562-882b-4f517f90f8c8-logs\") pod \"placement-6565b9cf48-rl77d\" (UID: \"7ab7b7c1-e89f-4562-882b-4f517f90f8c8\") " pod="openstack/placement-6565b9cf48-rl77d" Oct 10 16:50:47 crc kubenswrapper[4799]: I1010 16:50:47.826097 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vxgvv\" (UniqueName: \"kubernetes.io/projected/7ab7b7c1-e89f-4562-882b-4f517f90f8c8-kube-api-access-vxgvv\") pod \"placement-6565b9cf48-rl77d\" (UID: \"7ab7b7c1-e89f-4562-882b-4f517f90f8c8\") " pod="openstack/placement-6565b9cf48-rl77d" Oct 10 16:50:47 crc kubenswrapper[4799]: I1010 16:50:47.826119 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7ab7b7c1-e89f-4562-882b-4f517f90f8c8-scripts\") pod \"placement-6565b9cf48-rl77d\" (UID: \"7ab7b7c1-e89f-4562-882b-4f517f90f8c8\") " pod="openstack/placement-6565b9cf48-rl77d" Oct 10 16:50:47 crc kubenswrapper[4799]: I1010 16:50:47.826136 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/7ab7b7c1-e89f-4562-882b-4f517f90f8c8-internal-tls-certs\") pod \"placement-6565b9cf48-rl77d\" (UID: \"7ab7b7c1-e89f-4562-882b-4f517f90f8c8\") " pod="openstack/placement-6565b9cf48-rl77d" Oct 10 16:50:47 crc kubenswrapper[4799]: I1010 16:50:47.826162 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/7ab7b7c1-e89f-4562-882b-4f517f90f8c8-public-tls-certs\") pod \"placement-6565b9cf48-rl77d\" (UID: \"7ab7b7c1-e89f-4562-882b-4f517f90f8c8\") " pod="openstack/placement-6565b9cf48-rl77d" Oct 10 16:50:47 crc kubenswrapper[4799]: I1010 16:50:47.826194 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7ab7b7c1-e89f-4562-882b-4f517f90f8c8-config-data\") pod \"placement-6565b9cf48-rl77d\" (UID: \"7ab7b7c1-e89f-4562-882b-4f517f90f8c8\") " pod="openstack/placement-6565b9cf48-rl77d" Oct 10 16:50:47 crc kubenswrapper[4799]: I1010 16:50:47.927474 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7ab7b7c1-e89f-4562-882b-4f517f90f8c8-config-data\") pod \"placement-6565b9cf48-rl77d\" (UID: \"7ab7b7c1-e89f-4562-882b-4f517f90f8c8\") " pod="openstack/placement-6565b9cf48-rl77d" Oct 10 16:50:47 crc kubenswrapper[4799]: I1010 16:50:47.927537 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7ab7b7c1-e89f-4562-882b-4f517f90f8c8-combined-ca-bundle\") pod \"placement-6565b9cf48-rl77d\" (UID: \"7ab7b7c1-e89f-4562-882b-4f517f90f8c8\") " pod="openstack/placement-6565b9cf48-rl77d" Oct 10 16:50:47 crc kubenswrapper[4799]: I1010 16:50:47.927590 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7ab7b7c1-e89f-4562-882b-4f517f90f8c8-logs\") pod \"placement-6565b9cf48-rl77d\" (UID: \"7ab7b7c1-e89f-4562-882b-4f517f90f8c8\") " pod="openstack/placement-6565b9cf48-rl77d" Oct 10 16:50:47 crc kubenswrapper[4799]: I1010 16:50:47.927639 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vxgvv\" (UniqueName: \"kubernetes.io/projected/7ab7b7c1-e89f-4562-882b-4f517f90f8c8-kube-api-access-vxgvv\") pod \"placement-6565b9cf48-rl77d\" (UID: \"7ab7b7c1-e89f-4562-882b-4f517f90f8c8\") " pod="openstack/placement-6565b9cf48-rl77d" Oct 10 16:50:47 crc kubenswrapper[4799]: I1010 16:50:47.927661 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7ab7b7c1-e89f-4562-882b-4f517f90f8c8-scripts\") pod \"placement-6565b9cf48-rl77d\" (UID: \"7ab7b7c1-e89f-4562-882b-4f517f90f8c8\") " pod="openstack/placement-6565b9cf48-rl77d" Oct 10 16:50:47 crc kubenswrapper[4799]: I1010 16:50:47.927680 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/7ab7b7c1-e89f-4562-882b-4f517f90f8c8-internal-tls-certs\") pod \"placement-6565b9cf48-rl77d\" (UID: \"7ab7b7c1-e89f-4562-882b-4f517f90f8c8\") " pod="openstack/placement-6565b9cf48-rl77d" Oct 10 16:50:47 crc kubenswrapper[4799]: I1010 16:50:47.927706 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/7ab7b7c1-e89f-4562-882b-4f517f90f8c8-public-tls-certs\") pod \"placement-6565b9cf48-rl77d\" (UID: \"7ab7b7c1-e89f-4562-882b-4f517f90f8c8\") " pod="openstack/placement-6565b9cf48-rl77d" Oct 10 16:50:47 crc kubenswrapper[4799]: I1010 16:50:47.928134 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7ab7b7c1-e89f-4562-882b-4f517f90f8c8-logs\") pod \"placement-6565b9cf48-rl77d\" (UID: \"7ab7b7c1-e89f-4562-882b-4f517f90f8c8\") " pod="openstack/placement-6565b9cf48-rl77d" Oct 10 16:50:47 crc kubenswrapper[4799]: I1010 16:50:47.931482 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/7ab7b7c1-e89f-4562-882b-4f517f90f8c8-public-tls-certs\") pod \"placement-6565b9cf48-rl77d\" (UID: \"7ab7b7c1-e89f-4562-882b-4f517f90f8c8\") " pod="openstack/placement-6565b9cf48-rl77d" Oct 10 16:50:47 crc kubenswrapper[4799]: I1010 16:50:47.932390 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7ab7b7c1-e89f-4562-882b-4f517f90f8c8-scripts\") pod \"placement-6565b9cf48-rl77d\" (UID: \"7ab7b7c1-e89f-4562-882b-4f517f90f8c8\") " pod="openstack/placement-6565b9cf48-rl77d" Oct 10 16:50:47 crc kubenswrapper[4799]: I1010 16:50:47.932971 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/7ab7b7c1-e89f-4562-882b-4f517f90f8c8-internal-tls-certs\") pod \"placement-6565b9cf48-rl77d\" (UID: \"7ab7b7c1-e89f-4562-882b-4f517f90f8c8\") " pod="openstack/placement-6565b9cf48-rl77d" Oct 10 16:50:47 crc kubenswrapper[4799]: I1010 16:50:47.933128 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7ab7b7c1-e89f-4562-882b-4f517f90f8c8-combined-ca-bundle\") pod \"placement-6565b9cf48-rl77d\" (UID: \"7ab7b7c1-e89f-4562-882b-4f517f90f8c8\") " pod="openstack/placement-6565b9cf48-rl77d" Oct 10 16:50:47 crc kubenswrapper[4799]: I1010 16:50:47.946121 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7ab7b7c1-e89f-4562-882b-4f517f90f8c8-config-data\") pod \"placement-6565b9cf48-rl77d\" (UID: \"7ab7b7c1-e89f-4562-882b-4f517f90f8c8\") " pod="openstack/placement-6565b9cf48-rl77d" Oct 10 16:50:47 crc kubenswrapper[4799]: I1010 16:50:47.947052 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vxgvv\" (UniqueName: \"kubernetes.io/projected/7ab7b7c1-e89f-4562-882b-4f517f90f8c8-kube-api-access-vxgvv\") pod \"placement-6565b9cf48-rl77d\" (UID: \"7ab7b7c1-e89f-4562-882b-4f517f90f8c8\") " pod="openstack/placement-6565b9cf48-rl77d" Oct 10 16:50:48 crc kubenswrapper[4799]: I1010 16:50:48.014314 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-6565b9cf48-rl77d" Oct 10 16:50:48 crc kubenswrapper[4799]: I1010 16:50:48.248830 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"b7f1ee63-3947-40d6-ac14-8a1cf5e6f7f0","Type":"ContainerStarted","Data":"d09f1c41ff83e3f94d5800a1f1a3455a847a7fa8e5a288a1102d81b7fccd07bf"} Oct 10 16:50:48 crc kubenswrapper[4799]: I1010 16:50:48.250635 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-657bb59659-swzhl" event={"ID":"eb41e34f-dc1c-4b2e-9437-44a0e84e2cd1","Type":"ContainerStarted","Data":"791ae33161eebdb140cb4872e47266b15abe32970fcb198663d953365fc9278c"} Oct 10 16:50:48 crc kubenswrapper[4799]: I1010 16:50:48.250664 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-657bb59659-swzhl" event={"ID":"eb41e34f-dc1c-4b2e-9437-44a0e84e2cd1","Type":"ContainerStarted","Data":"f8a7e55488073c557402150956988c39ba3b5d6889e82d7f6be3ccf70be386d7"} Oct 10 16:50:48 crc kubenswrapper[4799]: I1010 16:50:48.250700 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/keystone-657bb59659-swzhl" Oct 10 16:50:48 crc kubenswrapper[4799]: I1010 16:50:48.252168 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-mrh2w" Oct 10 16:50:48 crc kubenswrapper[4799]: I1010 16:50:48.252371 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-mrh2w" event={"ID":"a8dccd24-a3ca-4f98-90b4-e2943cd228d3","Type":"ContainerDied","Data":"e3313f07f4776368beb200ef3f9748dc26824ad436bc705b8083926f0e14d489"} Oct 10 16:50:48 crc kubenswrapper[4799]: I1010 16:50:48.252412 4799 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e3313f07f4776368beb200ef3f9748dc26824ad436bc705b8083926f0e14d489" Oct 10 16:50:48 crc kubenswrapper[4799]: I1010 16:50:48.294559 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=7.294543175 podStartE2EDuration="7.294543175s" podCreationTimestamp="2025-10-10 16:50:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-10 16:50:48.268967394 +0000 UTC m=+1141.777291519" watchObservedRunningTime="2025-10-10 16:50:48.294543175 +0000 UTC m=+1141.802867290" Oct 10 16:50:48 crc kubenswrapper[4799]: I1010 16:50:48.295877 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-657bb59659-swzhl" podStartSLOduration=3.295872398 podStartE2EDuration="3.295872398s" podCreationTimestamp="2025-10-10 16:50:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-10 16:50:48.293733085 +0000 UTC m=+1141.802057210" watchObservedRunningTime="2025-10-10 16:50:48.295872398 +0000 UTC m=+1141.804196513" Oct 10 16:50:48 crc kubenswrapper[4799]: I1010 16:50:48.495599 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-worker-754947f5d7-z2rks"] Oct 10 16:50:48 crc kubenswrapper[4799]: I1010 16:50:48.513119 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-keystone-listener-5b85b5697b-9wc6c"] Oct 10 16:50:48 crc kubenswrapper[4799]: I1010 16:50:48.514389 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-5b85b5697b-9wc6c" Oct 10 16:50:48 crc kubenswrapper[4799]: I1010 16:50:48.514964 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-754947f5d7-z2rks" Oct 10 16:50:48 crc kubenswrapper[4799]: I1010 16:50:48.522981 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-keystone-listener-config-data" Oct 10 16:50:48 crc kubenswrapper[4799]: I1010 16:50:48.523120 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-barbican-dockercfg-kspsv" Oct 10 16:50:48 crc kubenswrapper[4799]: I1010 16:50:48.523233 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-worker-config-data" Oct 10 16:50:48 crc kubenswrapper[4799]: I1010 16:50:48.523285 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-config-data" Oct 10 16:50:48 crc kubenswrapper[4799]: I1010 16:50:48.523330 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-keystone-listener-5b85b5697b-9wc6c"] Oct 10 16:50:48 crc kubenswrapper[4799]: I1010 16:50:48.535639 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-worker-754947f5d7-z2rks"] Oct 10 16:50:48 crc kubenswrapper[4799]: I1010 16:50:48.594737 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-67cc85ff7-csrr5"] Oct 10 16:50:48 crc kubenswrapper[4799]: I1010 16:50:48.596532 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-67cc85ff7-csrr5" Oct 10 16:50:48 crc kubenswrapper[4799]: I1010 16:50:48.616871 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-67cc85ff7-csrr5"] Oct 10 16:50:48 crc kubenswrapper[4799]: I1010 16:50:48.642017 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3a05167f-cd58-4f9f-806b-8d71271320d2-combined-ca-bundle\") pod \"barbican-worker-754947f5d7-z2rks\" (UID: \"3a05167f-cd58-4f9f-806b-8d71271320d2\") " pod="openstack/barbican-worker-754947f5d7-z2rks" Oct 10 16:50:48 crc kubenswrapper[4799]: I1010 16:50:48.643884 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1bd0e459-efb9-463c-a8fc-d08a3194f3d9-combined-ca-bundle\") pod \"barbican-keystone-listener-5b85b5697b-9wc6c\" (UID: \"1bd0e459-efb9-463c-a8fc-d08a3194f3d9\") " pod="openstack/barbican-keystone-listener-5b85b5697b-9wc6c" Oct 10 16:50:48 crc kubenswrapper[4799]: I1010 16:50:48.643933 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-887gk\" (UniqueName: \"kubernetes.io/projected/1bd0e459-efb9-463c-a8fc-d08a3194f3d9-kube-api-access-887gk\") pod \"barbican-keystone-listener-5b85b5697b-9wc6c\" (UID: \"1bd0e459-efb9-463c-a8fc-d08a3194f3d9\") " pod="openstack/barbican-keystone-listener-5b85b5697b-9wc6c" Oct 10 16:50:48 crc kubenswrapper[4799]: I1010 16:50:48.643964 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dfbmz\" (UniqueName: \"kubernetes.io/projected/3a05167f-cd58-4f9f-806b-8d71271320d2-kube-api-access-dfbmz\") pod \"barbican-worker-754947f5d7-z2rks\" (UID: \"3a05167f-cd58-4f9f-806b-8d71271320d2\") " pod="openstack/barbican-worker-754947f5d7-z2rks" Oct 10 16:50:48 crc kubenswrapper[4799]: I1010 16:50:48.644088 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1bd0e459-efb9-463c-a8fc-d08a3194f3d9-config-data\") pod \"barbican-keystone-listener-5b85b5697b-9wc6c\" (UID: \"1bd0e459-efb9-463c-a8fc-d08a3194f3d9\") " pod="openstack/barbican-keystone-listener-5b85b5697b-9wc6c" Oct 10 16:50:48 crc kubenswrapper[4799]: I1010 16:50:48.644130 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3a05167f-cd58-4f9f-806b-8d71271320d2-config-data\") pod \"barbican-worker-754947f5d7-z2rks\" (UID: \"3a05167f-cd58-4f9f-806b-8d71271320d2\") " pod="openstack/barbican-worker-754947f5d7-z2rks" Oct 10 16:50:48 crc kubenswrapper[4799]: I1010 16:50:48.644486 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/3a05167f-cd58-4f9f-806b-8d71271320d2-config-data-custom\") pod \"barbican-worker-754947f5d7-z2rks\" (UID: \"3a05167f-cd58-4f9f-806b-8d71271320d2\") " pod="openstack/barbican-worker-754947f5d7-z2rks" Oct 10 16:50:48 crc kubenswrapper[4799]: I1010 16:50:48.644641 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1bd0e459-efb9-463c-a8fc-d08a3194f3d9-logs\") pod \"barbican-keystone-listener-5b85b5697b-9wc6c\" (UID: \"1bd0e459-efb9-463c-a8fc-d08a3194f3d9\") " pod="openstack/barbican-keystone-listener-5b85b5697b-9wc6c" Oct 10 16:50:48 crc kubenswrapper[4799]: I1010 16:50:48.644931 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3a05167f-cd58-4f9f-806b-8d71271320d2-logs\") pod \"barbican-worker-754947f5d7-z2rks\" (UID: \"3a05167f-cd58-4f9f-806b-8d71271320d2\") " pod="openstack/barbican-worker-754947f5d7-z2rks" Oct 10 16:50:48 crc kubenswrapper[4799]: I1010 16:50:48.645025 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/1bd0e459-efb9-463c-a8fc-d08a3194f3d9-config-data-custom\") pod \"barbican-keystone-listener-5b85b5697b-9wc6c\" (UID: \"1bd0e459-efb9-463c-a8fc-d08a3194f3d9\") " pod="openstack/barbican-keystone-listener-5b85b5697b-9wc6c" Oct 10 16:50:48 crc kubenswrapper[4799]: I1010 16:50:48.702978 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-api-7b6d7656b4-cxw2s"] Oct 10 16:50:48 crc kubenswrapper[4799]: I1010 16:50:48.704265 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-7b6d7656b4-cxw2s" Oct 10 16:50:48 crc kubenswrapper[4799]: I1010 16:50:48.707473 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-api-config-data" Oct 10 16:50:48 crc kubenswrapper[4799]: I1010 16:50:48.730063 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-7b6d7656b4-cxw2s"] Oct 10 16:50:48 crc kubenswrapper[4799]: I1010 16:50:48.746036 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1bd0e459-efb9-463c-a8fc-d08a3194f3d9-config-data\") pod \"barbican-keystone-listener-5b85b5697b-9wc6c\" (UID: \"1bd0e459-efb9-463c-a8fc-d08a3194f3d9\") " pod="openstack/barbican-keystone-listener-5b85b5697b-9wc6c" Oct 10 16:50:48 crc kubenswrapper[4799]: I1010 16:50:48.746255 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3a05167f-cd58-4f9f-806b-8d71271320d2-config-data\") pod \"barbican-worker-754947f5d7-z2rks\" (UID: \"3a05167f-cd58-4f9f-806b-8d71271320d2\") " pod="openstack/barbican-worker-754947f5d7-z2rks" Oct 10 16:50:48 crc kubenswrapper[4799]: I1010 16:50:48.746285 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/bd142c20-258d-4917-86f1-f60cbaf56268-dns-swift-storage-0\") pod \"dnsmasq-dns-67cc85ff7-csrr5\" (UID: \"bd142c20-258d-4917-86f1-f60cbaf56268\") " pod="openstack/dnsmasq-dns-67cc85ff7-csrr5" Oct 10 16:50:48 crc kubenswrapper[4799]: I1010 16:50:48.746338 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/3a05167f-cd58-4f9f-806b-8d71271320d2-config-data-custom\") pod \"barbican-worker-754947f5d7-z2rks\" (UID: \"3a05167f-cd58-4f9f-806b-8d71271320d2\") " pod="openstack/barbican-worker-754947f5d7-z2rks" Oct 10 16:50:48 crc kubenswrapper[4799]: I1010 16:50:48.746368 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kq6wk\" (UniqueName: \"kubernetes.io/projected/bd142c20-258d-4917-86f1-f60cbaf56268-kube-api-access-kq6wk\") pod \"dnsmasq-dns-67cc85ff7-csrr5\" (UID: \"bd142c20-258d-4917-86f1-f60cbaf56268\") " pod="openstack/dnsmasq-dns-67cc85ff7-csrr5" Oct 10 16:50:48 crc kubenswrapper[4799]: I1010 16:50:48.746402 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bd142c20-258d-4917-86f1-f60cbaf56268-config\") pod \"dnsmasq-dns-67cc85ff7-csrr5\" (UID: \"bd142c20-258d-4917-86f1-f60cbaf56268\") " pod="openstack/dnsmasq-dns-67cc85ff7-csrr5" Oct 10 16:50:48 crc kubenswrapper[4799]: I1010 16:50:48.746582 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1bd0e459-efb9-463c-a8fc-d08a3194f3d9-logs\") pod \"barbican-keystone-listener-5b85b5697b-9wc6c\" (UID: \"1bd0e459-efb9-463c-a8fc-d08a3194f3d9\") " pod="openstack/barbican-keystone-listener-5b85b5697b-9wc6c" Oct 10 16:50:48 crc kubenswrapper[4799]: I1010 16:50:48.746617 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/bd142c20-258d-4917-86f1-f60cbaf56268-dns-svc\") pod \"dnsmasq-dns-67cc85ff7-csrr5\" (UID: \"bd142c20-258d-4917-86f1-f60cbaf56268\") " pod="openstack/dnsmasq-dns-67cc85ff7-csrr5" Oct 10 16:50:48 crc kubenswrapper[4799]: I1010 16:50:48.746647 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3a05167f-cd58-4f9f-806b-8d71271320d2-logs\") pod \"barbican-worker-754947f5d7-z2rks\" (UID: \"3a05167f-cd58-4f9f-806b-8d71271320d2\") " pod="openstack/barbican-worker-754947f5d7-z2rks" Oct 10 16:50:48 crc kubenswrapper[4799]: I1010 16:50:48.746681 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/bd142c20-258d-4917-86f1-f60cbaf56268-ovsdbserver-nb\") pod \"dnsmasq-dns-67cc85ff7-csrr5\" (UID: \"bd142c20-258d-4917-86f1-f60cbaf56268\") " pod="openstack/dnsmasq-dns-67cc85ff7-csrr5" Oct 10 16:50:48 crc kubenswrapper[4799]: I1010 16:50:48.746730 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/bd142c20-258d-4917-86f1-f60cbaf56268-ovsdbserver-sb\") pod \"dnsmasq-dns-67cc85ff7-csrr5\" (UID: \"bd142c20-258d-4917-86f1-f60cbaf56268\") " pod="openstack/dnsmasq-dns-67cc85ff7-csrr5" Oct 10 16:50:48 crc kubenswrapper[4799]: I1010 16:50:48.746768 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/1bd0e459-efb9-463c-a8fc-d08a3194f3d9-config-data-custom\") pod \"barbican-keystone-listener-5b85b5697b-9wc6c\" (UID: \"1bd0e459-efb9-463c-a8fc-d08a3194f3d9\") " pod="openstack/barbican-keystone-listener-5b85b5697b-9wc6c" Oct 10 16:50:48 crc kubenswrapper[4799]: I1010 16:50:48.746807 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3a05167f-cd58-4f9f-806b-8d71271320d2-combined-ca-bundle\") pod \"barbican-worker-754947f5d7-z2rks\" (UID: \"3a05167f-cd58-4f9f-806b-8d71271320d2\") " pod="openstack/barbican-worker-754947f5d7-z2rks" Oct 10 16:50:48 crc kubenswrapper[4799]: I1010 16:50:48.746843 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1bd0e459-efb9-463c-a8fc-d08a3194f3d9-combined-ca-bundle\") pod \"barbican-keystone-listener-5b85b5697b-9wc6c\" (UID: \"1bd0e459-efb9-463c-a8fc-d08a3194f3d9\") " pod="openstack/barbican-keystone-listener-5b85b5697b-9wc6c" Oct 10 16:50:48 crc kubenswrapper[4799]: I1010 16:50:48.746876 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-887gk\" (UniqueName: \"kubernetes.io/projected/1bd0e459-efb9-463c-a8fc-d08a3194f3d9-kube-api-access-887gk\") pod \"barbican-keystone-listener-5b85b5697b-9wc6c\" (UID: \"1bd0e459-efb9-463c-a8fc-d08a3194f3d9\") " pod="openstack/barbican-keystone-listener-5b85b5697b-9wc6c" Oct 10 16:50:48 crc kubenswrapper[4799]: I1010 16:50:48.746901 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dfbmz\" (UniqueName: \"kubernetes.io/projected/3a05167f-cd58-4f9f-806b-8d71271320d2-kube-api-access-dfbmz\") pod \"barbican-worker-754947f5d7-z2rks\" (UID: \"3a05167f-cd58-4f9f-806b-8d71271320d2\") " pod="openstack/barbican-worker-754947f5d7-z2rks" Oct 10 16:50:48 crc kubenswrapper[4799]: I1010 16:50:48.750553 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3a05167f-cd58-4f9f-806b-8d71271320d2-logs\") pod \"barbican-worker-754947f5d7-z2rks\" (UID: \"3a05167f-cd58-4f9f-806b-8d71271320d2\") " pod="openstack/barbican-worker-754947f5d7-z2rks" Oct 10 16:50:48 crc kubenswrapper[4799]: I1010 16:50:48.752560 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1bd0e459-efb9-463c-a8fc-d08a3194f3d9-combined-ca-bundle\") pod \"barbican-keystone-listener-5b85b5697b-9wc6c\" (UID: \"1bd0e459-efb9-463c-a8fc-d08a3194f3d9\") " pod="openstack/barbican-keystone-listener-5b85b5697b-9wc6c" Oct 10 16:50:48 crc kubenswrapper[4799]: I1010 16:50:48.753106 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1bd0e459-efb9-463c-a8fc-d08a3194f3d9-config-data\") pod \"barbican-keystone-listener-5b85b5697b-9wc6c\" (UID: \"1bd0e459-efb9-463c-a8fc-d08a3194f3d9\") " pod="openstack/barbican-keystone-listener-5b85b5697b-9wc6c" Oct 10 16:50:48 crc kubenswrapper[4799]: I1010 16:50:48.753616 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1bd0e459-efb9-463c-a8fc-d08a3194f3d9-logs\") pod \"barbican-keystone-listener-5b85b5697b-9wc6c\" (UID: \"1bd0e459-efb9-463c-a8fc-d08a3194f3d9\") " pod="openstack/barbican-keystone-listener-5b85b5697b-9wc6c" Oct 10 16:50:48 crc kubenswrapper[4799]: I1010 16:50:48.763365 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3a05167f-cd58-4f9f-806b-8d71271320d2-combined-ca-bundle\") pod \"barbican-worker-754947f5d7-z2rks\" (UID: \"3a05167f-cd58-4f9f-806b-8d71271320d2\") " pod="openstack/barbican-worker-754947f5d7-z2rks" Oct 10 16:50:48 crc kubenswrapper[4799]: I1010 16:50:48.765323 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/3a05167f-cd58-4f9f-806b-8d71271320d2-config-data-custom\") pod \"barbican-worker-754947f5d7-z2rks\" (UID: \"3a05167f-cd58-4f9f-806b-8d71271320d2\") " pod="openstack/barbican-worker-754947f5d7-z2rks" Oct 10 16:50:48 crc kubenswrapper[4799]: I1010 16:50:48.766062 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3a05167f-cd58-4f9f-806b-8d71271320d2-config-data\") pod \"barbican-worker-754947f5d7-z2rks\" (UID: \"3a05167f-cd58-4f9f-806b-8d71271320d2\") " pod="openstack/barbican-worker-754947f5d7-z2rks" Oct 10 16:50:48 crc kubenswrapper[4799]: I1010 16:50:48.766349 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/1bd0e459-efb9-463c-a8fc-d08a3194f3d9-config-data-custom\") pod \"barbican-keystone-listener-5b85b5697b-9wc6c\" (UID: \"1bd0e459-efb9-463c-a8fc-d08a3194f3d9\") " pod="openstack/barbican-keystone-listener-5b85b5697b-9wc6c" Oct 10 16:50:48 crc kubenswrapper[4799]: I1010 16:50:48.770372 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-887gk\" (UniqueName: \"kubernetes.io/projected/1bd0e459-efb9-463c-a8fc-d08a3194f3d9-kube-api-access-887gk\") pod \"barbican-keystone-listener-5b85b5697b-9wc6c\" (UID: \"1bd0e459-efb9-463c-a8fc-d08a3194f3d9\") " pod="openstack/barbican-keystone-listener-5b85b5697b-9wc6c" Oct 10 16:50:48 crc kubenswrapper[4799]: I1010 16:50:48.773394 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dfbmz\" (UniqueName: \"kubernetes.io/projected/3a05167f-cd58-4f9f-806b-8d71271320d2-kube-api-access-dfbmz\") pod \"barbican-worker-754947f5d7-z2rks\" (UID: \"3a05167f-cd58-4f9f-806b-8d71271320d2\") " pod="openstack/barbican-worker-754947f5d7-z2rks" Oct 10 16:50:48 crc kubenswrapper[4799]: I1010 16:50:48.846662 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-5b85b5697b-9wc6c" Oct 10 16:50:48 crc kubenswrapper[4799]: I1010 16:50:48.847339 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-754947f5d7-z2rks" Oct 10 16:50:48 crc kubenswrapper[4799]: I1010 16:50:48.848106 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w2mdk\" (UniqueName: \"kubernetes.io/projected/bebadb41-8336-4b6c-b20d-6b8130b165ce-kube-api-access-w2mdk\") pod \"barbican-api-7b6d7656b4-cxw2s\" (UID: \"bebadb41-8336-4b6c-b20d-6b8130b165ce\") " pod="openstack/barbican-api-7b6d7656b4-cxw2s" Oct 10 16:50:48 crc kubenswrapper[4799]: I1010 16:50:48.848176 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/bd142c20-258d-4917-86f1-f60cbaf56268-ovsdbserver-nb\") pod \"dnsmasq-dns-67cc85ff7-csrr5\" (UID: \"bd142c20-258d-4917-86f1-f60cbaf56268\") " pod="openstack/dnsmasq-dns-67cc85ff7-csrr5" Oct 10 16:50:48 crc kubenswrapper[4799]: I1010 16:50:48.848235 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bebadb41-8336-4b6c-b20d-6b8130b165ce-config-data\") pod \"barbican-api-7b6d7656b4-cxw2s\" (UID: \"bebadb41-8336-4b6c-b20d-6b8130b165ce\") " pod="openstack/barbican-api-7b6d7656b4-cxw2s" Oct 10 16:50:48 crc kubenswrapper[4799]: I1010 16:50:48.848258 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/bd142c20-258d-4917-86f1-f60cbaf56268-ovsdbserver-sb\") pod \"dnsmasq-dns-67cc85ff7-csrr5\" (UID: \"bd142c20-258d-4917-86f1-f60cbaf56268\") " pod="openstack/dnsmasq-dns-67cc85ff7-csrr5" Oct 10 16:50:48 crc kubenswrapper[4799]: I1010 16:50:48.848298 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/bebadb41-8336-4b6c-b20d-6b8130b165ce-config-data-custom\") pod \"barbican-api-7b6d7656b4-cxw2s\" (UID: \"bebadb41-8336-4b6c-b20d-6b8130b165ce\") " pod="openstack/barbican-api-7b6d7656b4-cxw2s" Oct 10 16:50:48 crc kubenswrapper[4799]: I1010 16:50:48.848383 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/bd142c20-258d-4917-86f1-f60cbaf56268-dns-swift-storage-0\") pod \"dnsmasq-dns-67cc85ff7-csrr5\" (UID: \"bd142c20-258d-4917-86f1-f60cbaf56268\") " pod="openstack/dnsmasq-dns-67cc85ff7-csrr5" Oct 10 16:50:48 crc kubenswrapper[4799]: I1010 16:50:48.848431 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kq6wk\" (UniqueName: \"kubernetes.io/projected/bd142c20-258d-4917-86f1-f60cbaf56268-kube-api-access-kq6wk\") pod \"dnsmasq-dns-67cc85ff7-csrr5\" (UID: \"bd142c20-258d-4917-86f1-f60cbaf56268\") " pod="openstack/dnsmasq-dns-67cc85ff7-csrr5" Oct 10 16:50:48 crc kubenswrapper[4799]: I1010 16:50:48.848454 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bd142c20-258d-4917-86f1-f60cbaf56268-config\") pod \"dnsmasq-dns-67cc85ff7-csrr5\" (UID: \"bd142c20-258d-4917-86f1-f60cbaf56268\") " pod="openstack/dnsmasq-dns-67cc85ff7-csrr5" Oct 10 16:50:48 crc kubenswrapper[4799]: I1010 16:50:48.848475 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bebadb41-8336-4b6c-b20d-6b8130b165ce-combined-ca-bundle\") pod \"barbican-api-7b6d7656b4-cxw2s\" (UID: \"bebadb41-8336-4b6c-b20d-6b8130b165ce\") " pod="openstack/barbican-api-7b6d7656b4-cxw2s" Oct 10 16:50:48 crc kubenswrapper[4799]: I1010 16:50:48.848495 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/bd142c20-258d-4917-86f1-f60cbaf56268-dns-svc\") pod \"dnsmasq-dns-67cc85ff7-csrr5\" (UID: \"bd142c20-258d-4917-86f1-f60cbaf56268\") " pod="openstack/dnsmasq-dns-67cc85ff7-csrr5" Oct 10 16:50:48 crc kubenswrapper[4799]: I1010 16:50:48.848516 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/bebadb41-8336-4b6c-b20d-6b8130b165ce-logs\") pod \"barbican-api-7b6d7656b4-cxw2s\" (UID: \"bebadb41-8336-4b6c-b20d-6b8130b165ce\") " pod="openstack/barbican-api-7b6d7656b4-cxw2s" Oct 10 16:50:48 crc kubenswrapper[4799]: I1010 16:50:48.851497 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/bd142c20-258d-4917-86f1-f60cbaf56268-ovsdbserver-sb\") pod \"dnsmasq-dns-67cc85ff7-csrr5\" (UID: \"bd142c20-258d-4917-86f1-f60cbaf56268\") " pod="openstack/dnsmasq-dns-67cc85ff7-csrr5" Oct 10 16:50:48 crc kubenswrapper[4799]: I1010 16:50:48.851590 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/bd142c20-258d-4917-86f1-f60cbaf56268-ovsdbserver-nb\") pod \"dnsmasq-dns-67cc85ff7-csrr5\" (UID: \"bd142c20-258d-4917-86f1-f60cbaf56268\") " pod="openstack/dnsmasq-dns-67cc85ff7-csrr5" Oct 10 16:50:48 crc kubenswrapper[4799]: I1010 16:50:48.852257 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/bd142c20-258d-4917-86f1-f60cbaf56268-dns-swift-storage-0\") pod \"dnsmasq-dns-67cc85ff7-csrr5\" (UID: \"bd142c20-258d-4917-86f1-f60cbaf56268\") " pod="openstack/dnsmasq-dns-67cc85ff7-csrr5" Oct 10 16:50:48 crc kubenswrapper[4799]: I1010 16:50:48.853208 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bd142c20-258d-4917-86f1-f60cbaf56268-config\") pod \"dnsmasq-dns-67cc85ff7-csrr5\" (UID: \"bd142c20-258d-4917-86f1-f60cbaf56268\") " pod="openstack/dnsmasq-dns-67cc85ff7-csrr5" Oct 10 16:50:48 crc kubenswrapper[4799]: I1010 16:50:48.854560 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/bd142c20-258d-4917-86f1-f60cbaf56268-dns-svc\") pod \"dnsmasq-dns-67cc85ff7-csrr5\" (UID: \"bd142c20-258d-4917-86f1-f60cbaf56268\") " pod="openstack/dnsmasq-dns-67cc85ff7-csrr5" Oct 10 16:50:48 crc kubenswrapper[4799]: I1010 16:50:48.866526 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kq6wk\" (UniqueName: \"kubernetes.io/projected/bd142c20-258d-4917-86f1-f60cbaf56268-kube-api-access-kq6wk\") pod \"dnsmasq-dns-67cc85ff7-csrr5\" (UID: \"bd142c20-258d-4917-86f1-f60cbaf56268\") " pod="openstack/dnsmasq-dns-67cc85ff7-csrr5" Oct 10 16:50:48 crc kubenswrapper[4799]: I1010 16:50:48.932182 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-67cc85ff7-csrr5" Oct 10 16:50:48 crc kubenswrapper[4799]: I1010 16:50:48.950523 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/bebadb41-8336-4b6c-b20d-6b8130b165ce-logs\") pod \"barbican-api-7b6d7656b4-cxw2s\" (UID: \"bebadb41-8336-4b6c-b20d-6b8130b165ce\") " pod="openstack/barbican-api-7b6d7656b4-cxw2s" Oct 10 16:50:48 crc kubenswrapper[4799]: I1010 16:50:48.950598 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w2mdk\" (UniqueName: \"kubernetes.io/projected/bebadb41-8336-4b6c-b20d-6b8130b165ce-kube-api-access-w2mdk\") pod \"barbican-api-7b6d7656b4-cxw2s\" (UID: \"bebadb41-8336-4b6c-b20d-6b8130b165ce\") " pod="openstack/barbican-api-7b6d7656b4-cxw2s" Oct 10 16:50:48 crc kubenswrapper[4799]: I1010 16:50:48.950671 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bebadb41-8336-4b6c-b20d-6b8130b165ce-config-data\") pod \"barbican-api-7b6d7656b4-cxw2s\" (UID: \"bebadb41-8336-4b6c-b20d-6b8130b165ce\") " pod="openstack/barbican-api-7b6d7656b4-cxw2s" Oct 10 16:50:48 crc kubenswrapper[4799]: I1010 16:50:48.950728 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/bebadb41-8336-4b6c-b20d-6b8130b165ce-config-data-custom\") pod \"barbican-api-7b6d7656b4-cxw2s\" (UID: \"bebadb41-8336-4b6c-b20d-6b8130b165ce\") " pod="openstack/barbican-api-7b6d7656b4-cxw2s" Oct 10 16:50:48 crc kubenswrapper[4799]: I1010 16:50:48.950918 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bebadb41-8336-4b6c-b20d-6b8130b165ce-combined-ca-bundle\") pod \"barbican-api-7b6d7656b4-cxw2s\" (UID: \"bebadb41-8336-4b6c-b20d-6b8130b165ce\") " pod="openstack/barbican-api-7b6d7656b4-cxw2s" Oct 10 16:50:48 crc kubenswrapper[4799]: I1010 16:50:48.950957 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/bebadb41-8336-4b6c-b20d-6b8130b165ce-logs\") pod \"barbican-api-7b6d7656b4-cxw2s\" (UID: \"bebadb41-8336-4b6c-b20d-6b8130b165ce\") " pod="openstack/barbican-api-7b6d7656b4-cxw2s" Oct 10 16:50:48 crc kubenswrapper[4799]: I1010 16:50:48.955001 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bebadb41-8336-4b6c-b20d-6b8130b165ce-combined-ca-bundle\") pod \"barbican-api-7b6d7656b4-cxw2s\" (UID: \"bebadb41-8336-4b6c-b20d-6b8130b165ce\") " pod="openstack/barbican-api-7b6d7656b4-cxw2s" Oct 10 16:50:48 crc kubenswrapper[4799]: I1010 16:50:48.956069 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bebadb41-8336-4b6c-b20d-6b8130b165ce-config-data\") pod \"barbican-api-7b6d7656b4-cxw2s\" (UID: \"bebadb41-8336-4b6c-b20d-6b8130b165ce\") " pod="openstack/barbican-api-7b6d7656b4-cxw2s" Oct 10 16:50:48 crc kubenswrapper[4799]: I1010 16:50:48.959279 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/bebadb41-8336-4b6c-b20d-6b8130b165ce-config-data-custom\") pod \"barbican-api-7b6d7656b4-cxw2s\" (UID: \"bebadb41-8336-4b6c-b20d-6b8130b165ce\") " pod="openstack/barbican-api-7b6d7656b4-cxw2s" Oct 10 16:50:48 crc kubenswrapper[4799]: I1010 16:50:48.975897 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w2mdk\" (UniqueName: \"kubernetes.io/projected/bebadb41-8336-4b6c-b20d-6b8130b165ce-kube-api-access-w2mdk\") pod \"barbican-api-7b6d7656b4-cxw2s\" (UID: \"bebadb41-8336-4b6c-b20d-6b8130b165ce\") " pod="openstack/barbican-api-7b6d7656b4-cxw2s" Oct 10 16:50:49 crc kubenswrapper[4799]: I1010 16:50:49.024044 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-7b6d7656b4-cxw2s" Oct 10 16:50:51 crc kubenswrapper[4799]: I1010 16:50:51.577829 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Oct 10 16:50:51 crc kubenswrapper[4799]: I1010 16:50:51.583118 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Oct 10 16:50:51 crc kubenswrapper[4799]: I1010 16:50:51.597068 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Oct 10 16:50:51 crc kubenswrapper[4799]: I1010 16:50:51.597112 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Oct 10 16:50:51 crc kubenswrapper[4799]: I1010 16:50:51.602851 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-api-56d84d574d-x5sbm"] Oct 10 16:50:51 crc kubenswrapper[4799]: I1010 16:50:51.604795 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-56d84d574d-x5sbm" Oct 10 16:50:51 crc kubenswrapper[4799]: I1010 16:50:51.607616 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-barbican-internal-svc" Oct 10 16:50:51 crc kubenswrapper[4799]: I1010 16:50:51.607924 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-barbican-public-svc" Oct 10 16:50:51 crc kubenswrapper[4799]: I1010 16:50:51.609916 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-56d84d574d-x5sbm"] Oct 10 16:50:51 crc kubenswrapper[4799]: I1010 16:50:51.666914 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Oct 10 16:50:51 crc kubenswrapper[4799]: I1010 16:50:51.682035 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Oct 10 16:50:51 crc kubenswrapper[4799]: I1010 16:50:51.686133 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Oct 10 16:50:51 crc kubenswrapper[4799]: I1010 16:50:51.693096 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Oct 10 16:50:51 crc kubenswrapper[4799]: I1010 16:50:51.705881 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ac766919-d788-40da-879a-627919926594-config-data\") pod \"barbican-api-56d84d574d-x5sbm\" (UID: \"ac766919-d788-40da-879a-627919926594\") " pod="openstack/barbican-api-56d84d574d-x5sbm" Oct 10 16:50:51 crc kubenswrapper[4799]: I1010 16:50:51.705965 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/ac766919-d788-40da-879a-627919926594-public-tls-certs\") pod \"barbican-api-56d84d574d-x5sbm\" (UID: \"ac766919-d788-40da-879a-627919926594\") " pod="openstack/barbican-api-56d84d574d-x5sbm" Oct 10 16:50:51 crc kubenswrapper[4799]: I1010 16:50:51.706028 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kpmn4\" (UniqueName: \"kubernetes.io/projected/ac766919-d788-40da-879a-627919926594-kube-api-access-kpmn4\") pod \"barbican-api-56d84d574d-x5sbm\" (UID: \"ac766919-d788-40da-879a-627919926594\") " pod="openstack/barbican-api-56d84d574d-x5sbm" Oct 10 16:50:51 crc kubenswrapper[4799]: I1010 16:50:51.706131 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ac766919-d788-40da-879a-627919926594-combined-ca-bundle\") pod \"barbican-api-56d84d574d-x5sbm\" (UID: \"ac766919-d788-40da-879a-627919926594\") " pod="openstack/barbican-api-56d84d574d-x5sbm" Oct 10 16:50:51 crc kubenswrapper[4799]: I1010 16:50:51.706156 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ac766919-d788-40da-879a-627919926594-logs\") pod \"barbican-api-56d84d574d-x5sbm\" (UID: \"ac766919-d788-40da-879a-627919926594\") " pod="openstack/barbican-api-56d84d574d-x5sbm" Oct 10 16:50:51 crc kubenswrapper[4799]: I1010 16:50:51.706225 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/ac766919-d788-40da-879a-627919926594-internal-tls-certs\") pod \"barbican-api-56d84d574d-x5sbm\" (UID: \"ac766919-d788-40da-879a-627919926594\") " pod="openstack/barbican-api-56d84d574d-x5sbm" Oct 10 16:50:51 crc kubenswrapper[4799]: I1010 16:50:51.706266 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ac766919-d788-40da-879a-627919926594-config-data-custom\") pod \"barbican-api-56d84d574d-x5sbm\" (UID: \"ac766919-d788-40da-879a-627919926594\") " pod="openstack/barbican-api-56d84d574d-x5sbm" Oct 10 16:50:51 crc kubenswrapper[4799]: I1010 16:50:51.810666 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ac766919-d788-40da-879a-627919926594-config-data\") pod \"barbican-api-56d84d574d-x5sbm\" (UID: \"ac766919-d788-40da-879a-627919926594\") " pod="openstack/barbican-api-56d84d574d-x5sbm" Oct 10 16:50:51 crc kubenswrapper[4799]: I1010 16:50:51.810741 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/ac766919-d788-40da-879a-627919926594-public-tls-certs\") pod \"barbican-api-56d84d574d-x5sbm\" (UID: \"ac766919-d788-40da-879a-627919926594\") " pod="openstack/barbican-api-56d84d574d-x5sbm" Oct 10 16:50:51 crc kubenswrapper[4799]: I1010 16:50:51.810787 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kpmn4\" (UniqueName: \"kubernetes.io/projected/ac766919-d788-40da-879a-627919926594-kube-api-access-kpmn4\") pod \"barbican-api-56d84d574d-x5sbm\" (UID: \"ac766919-d788-40da-879a-627919926594\") " pod="openstack/barbican-api-56d84d574d-x5sbm" Oct 10 16:50:51 crc kubenswrapper[4799]: I1010 16:50:51.811194 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ac766919-d788-40da-879a-627919926594-combined-ca-bundle\") pod \"barbican-api-56d84d574d-x5sbm\" (UID: \"ac766919-d788-40da-879a-627919926594\") " pod="openstack/barbican-api-56d84d574d-x5sbm" Oct 10 16:50:51 crc kubenswrapper[4799]: I1010 16:50:51.811244 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ac766919-d788-40da-879a-627919926594-logs\") pod \"barbican-api-56d84d574d-x5sbm\" (UID: \"ac766919-d788-40da-879a-627919926594\") " pod="openstack/barbican-api-56d84d574d-x5sbm" Oct 10 16:50:51 crc kubenswrapper[4799]: I1010 16:50:51.811355 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/ac766919-d788-40da-879a-627919926594-internal-tls-certs\") pod \"barbican-api-56d84d574d-x5sbm\" (UID: \"ac766919-d788-40da-879a-627919926594\") " pod="openstack/barbican-api-56d84d574d-x5sbm" Oct 10 16:50:51 crc kubenswrapper[4799]: I1010 16:50:51.811453 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ac766919-d788-40da-879a-627919926594-config-data-custom\") pod \"barbican-api-56d84d574d-x5sbm\" (UID: \"ac766919-d788-40da-879a-627919926594\") " pod="openstack/barbican-api-56d84d574d-x5sbm" Oct 10 16:50:51 crc kubenswrapper[4799]: I1010 16:50:51.812420 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ac766919-d788-40da-879a-627919926594-logs\") pod \"barbican-api-56d84d574d-x5sbm\" (UID: \"ac766919-d788-40da-879a-627919926594\") " pod="openstack/barbican-api-56d84d574d-x5sbm" Oct 10 16:50:51 crc kubenswrapper[4799]: I1010 16:50:51.820476 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/ac766919-d788-40da-879a-627919926594-internal-tls-certs\") pod \"barbican-api-56d84d574d-x5sbm\" (UID: \"ac766919-d788-40da-879a-627919926594\") " pod="openstack/barbican-api-56d84d574d-x5sbm" Oct 10 16:50:51 crc kubenswrapper[4799]: I1010 16:50:51.821845 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ac766919-d788-40da-879a-627919926594-config-data\") pod \"barbican-api-56d84d574d-x5sbm\" (UID: \"ac766919-d788-40da-879a-627919926594\") " pod="openstack/barbican-api-56d84d574d-x5sbm" Oct 10 16:50:51 crc kubenswrapper[4799]: I1010 16:50:51.826571 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ac766919-d788-40da-879a-627919926594-combined-ca-bundle\") pod \"barbican-api-56d84d574d-x5sbm\" (UID: \"ac766919-d788-40da-879a-627919926594\") " pod="openstack/barbican-api-56d84d574d-x5sbm" Oct 10 16:50:51 crc kubenswrapper[4799]: I1010 16:50:51.835296 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ac766919-d788-40da-879a-627919926594-config-data-custom\") pod \"barbican-api-56d84d574d-x5sbm\" (UID: \"ac766919-d788-40da-879a-627919926594\") " pod="openstack/barbican-api-56d84d574d-x5sbm" Oct 10 16:50:51 crc kubenswrapper[4799]: I1010 16:50:51.841451 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/ac766919-d788-40da-879a-627919926594-public-tls-certs\") pod \"barbican-api-56d84d574d-x5sbm\" (UID: \"ac766919-d788-40da-879a-627919926594\") " pod="openstack/barbican-api-56d84d574d-x5sbm" Oct 10 16:50:51 crc kubenswrapper[4799]: I1010 16:50:51.842634 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kpmn4\" (UniqueName: \"kubernetes.io/projected/ac766919-d788-40da-879a-627919926594-kube-api-access-kpmn4\") pod \"barbican-api-56d84d574d-x5sbm\" (UID: \"ac766919-d788-40da-879a-627919926594\") " pod="openstack/barbican-api-56d84d574d-x5sbm" Oct 10 16:50:51 crc kubenswrapper[4799]: I1010 16:50:51.977729 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-56d84d574d-x5sbm" Oct 10 16:50:52 crc kubenswrapper[4799]: I1010 16:50:52.285346 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Oct 10 16:50:52 crc kubenswrapper[4799]: I1010 16:50:52.285393 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Oct 10 16:50:52 crc kubenswrapper[4799]: I1010 16:50:52.285405 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Oct 10 16:50:52 crc kubenswrapper[4799]: I1010 16:50:52.285418 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Oct 10 16:50:54 crc kubenswrapper[4799]: I1010 16:50:54.228025 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Oct 10 16:50:54 crc kubenswrapper[4799]: I1010 16:50:54.230981 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Oct 10 16:50:54 crc kubenswrapper[4799]: I1010 16:50:54.314829 4799 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Oct 10 16:50:54 crc kubenswrapper[4799]: I1010 16:50:54.314872 4799 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Oct 10 16:50:54 crc kubenswrapper[4799]: I1010 16:50:54.510732 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Oct 10 16:50:54 crc kubenswrapper[4799]: I1010 16:50:54.510803 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Oct 10 16:51:00 crc kubenswrapper[4799]: E1010 16:51:00.779925 4799 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-cinder-api@sha256:85c75d60e1bd2f8a9ea0a2bb21a8df64c0a6f7b504cc1a05a355981d4b90e92f" Oct 10 16:51:00 crc kubenswrapper[4799]: E1010 16:51:00.780720 4799 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:cinder-db-sync,Image:quay.io/podified-antelope-centos9/openstack-cinder-api@sha256:85c75d60e1bd2f8a9ea0a2bb21a8df64c0a6f7b504cc1a05a355981d4b90e92f,Command:[/bin/bash],Args:[-c /usr/local/bin/kolla_set_configs && /usr/local/bin/kolla_start],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:TRUE,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:etc-machine-id,ReadOnly:true,MountPath:/etc/machine-id,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:scripts,ReadOnly:true,MountPath:/usr/local/bin/container-scripts,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/config-data/merged,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/etc/my.cnf,SubPath:my.cnf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:db-sync-config-data,ReadOnly:true,MountPath:/etc/cinder/cinder.conf.d,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/config.json,SubPath:db-sync-config.json,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-drvnl,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:nil,Privileged:nil,SELinuxOptions:nil,RunAsUser:*0,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod cinder-db-sync-dt6zw_openstack(875e600d-b55a-48a9-a181-3ad09c24cc41): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Oct 10 16:51:00 crc kubenswrapper[4799]: E1010 16:51:00.781904 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cinder-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/cinder-db-sync-dt6zw" podUID="875e600d-b55a-48a9-a181-3ad09c24cc41" Oct 10 16:51:01 crc kubenswrapper[4799]: I1010 16:51:01.313262 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-67cc85ff7-csrr5"] Oct 10 16:51:01 crc kubenswrapper[4799]: W1010 16:51:01.317376 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podbd142c20_258d_4917_86f1_f60cbaf56268.slice/crio-c95b6cea0c60fbad3ec5c6d2467cedd1ffdaa8a20953d587ca9d70f9ee3db3a3 WatchSource:0}: Error finding container c95b6cea0c60fbad3ec5c6d2467cedd1ffdaa8a20953d587ca9d70f9ee3db3a3: Status 404 returned error can't find the container with id c95b6cea0c60fbad3ec5c6d2467cedd1ffdaa8a20953d587ca9d70f9ee3db3a3 Oct 10 16:51:01 crc kubenswrapper[4799]: E1010 16:51:01.404624 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cinder-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-cinder-api@sha256:85c75d60e1bd2f8a9ea0a2bb21a8df64c0a6f7b504cc1a05a355981d4b90e92f\\\"\"" pod="openstack/cinder-db-sync-dt6zw" podUID="875e600d-b55a-48a9-a181-3ad09c24cc41" Oct 10 16:51:01 crc kubenswrapper[4799]: I1010 16:51:01.417483 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-67cc85ff7-csrr5" event={"ID":"bd142c20-258d-4917-86f1-f60cbaf56268","Type":"ContainerStarted","Data":"c95b6cea0c60fbad3ec5c6d2467cedd1ffdaa8a20953d587ca9d70f9ee3db3a3"} Oct 10 16:51:01 crc kubenswrapper[4799]: I1010 16:51:01.503939 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-7b6d7656b4-cxw2s"] Oct 10 16:51:01 crc kubenswrapper[4799]: I1010 16:51:01.511566 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-worker-754947f5d7-z2rks"] Oct 10 16:51:01 crc kubenswrapper[4799]: W1010 16:51:01.517800 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3a05167f_cd58_4f9f_806b_8d71271320d2.slice/crio-6ab739e3c938fbee42c14c3d3c3f28963dac6979d90fade8897e3b358371438b WatchSource:0}: Error finding container 6ab739e3c938fbee42c14c3d3c3f28963dac6979d90fade8897e3b358371438b: Status 404 returned error can't find the container with id 6ab739e3c938fbee42c14c3d3c3f28963dac6979d90fade8897e3b358371438b Oct 10 16:51:01 crc kubenswrapper[4799]: W1010 16:51:01.520435 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podbebadb41_8336_4b6c_b20d_6b8130b165ce.slice/crio-789ad5d265e05ce0f577efe0bad124696a0e07f67ffdf611a4740084158a055a WatchSource:0}: Error finding container 789ad5d265e05ce0f577efe0bad124696a0e07f67ffdf611a4740084158a055a: Status 404 returned error can't find the container with id 789ad5d265e05ce0f577efe0bad124696a0e07f67ffdf611a4740084158a055a Oct 10 16:51:01 crc kubenswrapper[4799]: I1010 16:51:01.521718 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-6565b9cf48-rl77d"] Oct 10 16:51:01 crc kubenswrapper[4799]: W1010 16:51:01.526185 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podac766919_d788_40da_879a_627919926594.slice/crio-c532cde8cbfd5bdb70765bccab4d581443eb1828222a4e880f8d0cab6492ff21 WatchSource:0}: Error finding container c532cde8cbfd5bdb70765bccab4d581443eb1828222a4e880f8d0cab6492ff21: Status 404 returned error can't find the container with id c532cde8cbfd5bdb70765bccab4d581443eb1828222a4e880f8d0cab6492ff21 Oct 10 16:51:01 crc kubenswrapper[4799]: W1010 16:51:01.530996 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7ab7b7c1_e89f_4562_882b_4f517f90f8c8.slice/crio-ef70f52ae58b93b1c0d97afd58eaabfed4177c0eb894fd9aa8da2869f2bbae4d WatchSource:0}: Error finding container ef70f52ae58b93b1c0d97afd58eaabfed4177c0eb894fd9aa8da2869f2bbae4d: Status 404 returned error can't find the container with id ef70f52ae58b93b1c0d97afd58eaabfed4177c0eb894fd9aa8da2869f2bbae4d Oct 10 16:51:01 crc kubenswrapper[4799]: I1010 16:51:01.531060 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-56d84d574d-x5sbm"] Oct 10 16:51:01 crc kubenswrapper[4799]: I1010 16:51:01.649694 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-keystone-listener-5b85b5697b-9wc6c"] Oct 10 16:51:01 crc kubenswrapper[4799]: W1010 16:51:01.660560 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1bd0e459_efb9_463c_a8fc_d08a3194f3d9.slice/crio-efe237b55e5242af0278a73975e4ae635c5aefa7fcda42dc748aad1699ce6abb WatchSource:0}: Error finding container efe237b55e5242af0278a73975e4ae635c5aefa7fcda42dc748aad1699ce6abb: Status 404 returned error can't find the container with id efe237b55e5242af0278a73975e4ae635c5aefa7fcda42dc748aad1699ce6abb Oct 10 16:51:02 crc kubenswrapper[4799]: I1010 16:51:02.411827 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-56d84d574d-x5sbm" event={"ID":"ac766919-d788-40da-879a-627919926594","Type":"ContainerStarted","Data":"c532cde8cbfd5bdb70765bccab4d581443eb1828222a4e880f8d0cab6492ff21"} Oct 10 16:51:02 crc kubenswrapper[4799]: I1010 16:51:02.414584 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-6565b9cf48-rl77d" event={"ID":"7ab7b7c1-e89f-4562-882b-4f517f90f8c8","Type":"ContainerStarted","Data":"ef70f52ae58b93b1c0d97afd58eaabfed4177c0eb894fd9aa8da2869f2bbae4d"} Oct 10 16:51:02 crc kubenswrapper[4799]: I1010 16:51:02.415479 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-5b85b5697b-9wc6c" event={"ID":"1bd0e459-efb9-463c-a8fc-d08a3194f3d9","Type":"ContainerStarted","Data":"efe237b55e5242af0278a73975e4ae635c5aefa7fcda42dc748aad1699ce6abb"} Oct 10 16:51:02 crc kubenswrapper[4799]: I1010 16:51:02.416302 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-754947f5d7-z2rks" event={"ID":"3a05167f-cd58-4f9f-806b-8d71271320d2","Type":"ContainerStarted","Data":"6ab739e3c938fbee42c14c3d3c3f28963dac6979d90fade8897e3b358371438b"} Oct 10 16:51:02 crc kubenswrapper[4799]: I1010 16:51:02.417462 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-7b6d7656b4-cxw2s" event={"ID":"bebadb41-8336-4b6c-b20d-6b8130b165ce","Type":"ContainerStarted","Data":"789ad5d265e05ce0f577efe0bad124696a0e07f67ffdf611a4740084158a055a"} Oct 10 16:51:03 crc kubenswrapper[4799]: I1010 16:51:03.429559 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-7b6d7656b4-cxw2s" event={"ID":"bebadb41-8336-4b6c-b20d-6b8130b165ce","Type":"ContainerStarted","Data":"9b71ee61665b3132d531889454e29722422a6a0b470d17895e43a1cf227396dc"} Oct 10 16:51:03 crc kubenswrapper[4799]: I1010 16:51:03.430741 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-56d84d574d-x5sbm" event={"ID":"ac766919-d788-40da-879a-627919926594","Type":"ContainerStarted","Data":"5af6a65cf06c9580d2979a8455e9bbfb9189ae51ad9185bd52c4a3fc972febbc"} Oct 10 16:51:03 crc kubenswrapper[4799]: I1010 16:51:03.431684 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-6565b9cf48-rl77d" event={"ID":"7ab7b7c1-e89f-4562-882b-4f517f90f8c8","Type":"ContainerStarted","Data":"7ff035a8a6498fce1542054aa6ef55bd158eb92c46cb410ac3528b2a07a6250d"} Oct 10 16:51:03 crc kubenswrapper[4799]: I1010 16:51:03.443127 4799 generic.go:334] "Generic (PLEG): container finished" podID="bd142c20-258d-4917-86f1-f60cbaf56268" containerID="129466a22981637b7792b11ef135e282d4dc863e9e958d191972a7c5d9007c0b" exitCode=0 Oct 10 16:51:03 crc kubenswrapper[4799]: I1010 16:51:03.443374 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-67cc85ff7-csrr5" event={"ID":"bd142c20-258d-4917-86f1-f60cbaf56268","Type":"ContainerDied","Data":"129466a22981637b7792b11ef135e282d4dc863e9e958d191972a7c5d9007c0b"} Oct 10 16:51:07 crc kubenswrapper[4799]: I1010 16:51:07.557156 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-7b6d7656b4-cxw2s" event={"ID":"bebadb41-8336-4b6c-b20d-6b8130b165ce","Type":"ContainerStarted","Data":"08df7005def57a0c28c64f0558472e4f8c70ae20c7c6a71a76edf6d9339eecd8"} Oct 10 16:51:07 crc kubenswrapper[4799]: I1010 16:51:07.558459 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-7b6d7656b4-cxw2s" Oct 10 16:51:07 crc kubenswrapper[4799]: I1010 16:51:07.558476 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-7b6d7656b4-cxw2s" Oct 10 16:51:07 crc kubenswrapper[4799]: I1010 16:51:07.702735 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-api-7b6d7656b4-cxw2s" podStartSLOduration=19.702714065 podStartE2EDuration="19.702714065s" podCreationTimestamp="2025-10-10 16:50:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-10 16:51:07.646145939 +0000 UTC m=+1161.154470074" watchObservedRunningTime="2025-10-10 16:51:07.702714065 +0000 UTC m=+1161.211038180" Oct 10 16:51:08 crc kubenswrapper[4799]: I1010 16:51:08.574441 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"766235b0-a5b9-4448-8dac-1afd1ca60e50","Type":"ContainerStarted","Data":"002aced825b2ba369f5e8c313b9b6167bd2bc03a7c89699707e99bfff740991b"} Oct 10 16:51:08 crc kubenswrapper[4799]: I1010 16:51:08.575191 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="766235b0-a5b9-4448-8dac-1afd1ca60e50" containerName="ceilometer-central-agent" containerID="cri-o://3c3489eae1a3f26fc1d5543050426a7e85875420285e1be95fa2f92f06fadd75" gracePeriod=30 Oct 10 16:51:08 crc kubenswrapper[4799]: I1010 16:51:08.575580 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Oct 10 16:51:08 crc kubenswrapper[4799]: I1010 16:51:08.575961 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="766235b0-a5b9-4448-8dac-1afd1ca60e50" containerName="proxy-httpd" containerID="cri-o://002aced825b2ba369f5e8c313b9b6167bd2bc03a7c89699707e99bfff740991b" gracePeriod=30 Oct 10 16:51:08 crc kubenswrapper[4799]: I1010 16:51:08.576051 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="766235b0-a5b9-4448-8dac-1afd1ca60e50" containerName="sg-core" containerID="cri-o://2835675d74c0d34b7cae5f37da026d20cdbf84cd064c0374ed6191f0417ceb02" gracePeriod=30 Oct 10 16:51:08 crc kubenswrapper[4799]: I1010 16:51:08.576124 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="766235b0-a5b9-4448-8dac-1afd1ca60e50" containerName="ceilometer-notification-agent" containerID="cri-o://16f7a682fd238e46067b36808612cbce27d75cdbcd6d153529e759fa91f9d41c" gracePeriod=30 Oct 10 16:51:08 crc kubenswrapper[4799]: I1010 16:51:08.583821 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-6565b9cf48-rl77d" event={"ID":"7ab7b7c1-e89f-4562-882b-4f517f90f8c8","Type":"ContainerStarted","Data":"a8fdb5ed39a199e1efb2bc1f77b2de74dd205c6dcb6fea5e9f71d53f89199fcd"} Oct 10 16:51:08 crc kubenswrapper[4799]: I1010 16:51:08.583890 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/placement-6565b9cf48-rl77d" Oct 10 16:51:08 crc kubenswrapper[4799]: I1010 16:51:08.584058 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/placement-6565b9cf48-rl77d" Oct 10 16:51:08 crc kubenswrapper[4799]: I1010 16:51:08.593510 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-5b85b5697b-9wc6c" event={"ID":"1bd0e459-efb9-463c-a8fc-d08a3194f3d9","Type":"ContainerStarted","Data":"92ceb7a81421c011199b208f71ac06ee53a9dc28e8d3ab70e4e869c6e631df9b"} Oct 10 16:51:08 crc kubenswrapper[4799]: I1010 16:51:08.593902 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-5b85b5697b-9wc6c" event={"ID":"1bd0e459-efb9-463c-a8fc-d08a3194f3d9","Type":"ContainerStarted","Data":"6a4a38abddd264e6f436a27a2422c3d7dee920ced5f32af3275fc86edc0ea42c"} Oct 10 16:51:08 crc kubenswrapper[4799]: I1010 16:51:08.597284 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-754947f5d7-z2rks" event={"ID":"3a05167f-cd58-4f9f-806b-8d71271320d2","Type":"ContainerStarted","Data":"44db3563c5cb7bf8e2e66a9895632efdaaa14dc9f4496418fa8fa16fcb55a2cc"} Oct 10 16:51:08 crc kubenswrapper[4799]: I1010 16:51:08.597342 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-754947f5d7-z2rks" event={"ID":"3a05167f-cd58-4f9f-806b-8d71271320d2","Type":"ContainerStarted","Data":"ffeaf50de5582926c54ffdb7618e55611dd1a565aacf1bd0b7bbf41db579bdee"} Oct 10 16:51:08 crc kubenswrapper[4799]: I1010 16:51:08.600731 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-67cc85ff7-csrr5" event={"ID":"bd142c20-258d-4917-86f1-f60cbaf56268","Type":"ContainerStarted","Data":"2587e65baf79f3aae1540600e756c8097ab481b86bca2cbaed0b95d59d90d46b"} Oct 10 16:51:08 crc kubenswrapper[4799]: I1010 16:51:08.600893 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-67cc85ff7-csrr5" Oct 10 16:51:08 crc kubenswrapper[4799]: I1010 16:51:08.603734 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-56d84d574d-x5sbm" event={"ID":"ac766919-d788-40da-879a-627919926594","Type":"ContainerStarted","Data":"3395c2ca55d83e7b3885a78bfc0d6276c0ac7455bc1ef7cc5df2ccda1fecad2d"} Oct 10 16:51:08 crc kubenswrapper[4799]: I1010 16:51:08.604086 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-56d84d574d-x5sbm" Oct 10 16:51:08 crc kubenswrapper[4799]: I1010 16:51:08.604110 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-56d84d574d-x5sbm" Oct 10 16:51:08 crc kubenswrapper[4799]: I1010 16:51:08.614579 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.4845842879999998 podStartE2EDuration="45.614553259s" podCreationTimestamp="2025-10-10 16:50:23 +0000 UTC" firstStartedPulling="2025-10-10 16:50:24.38173531 +0000 UTC m=+1117.890059425" lastFinishedPulling="2025-10-10 16:51:07.511704281 +0000 UTC m=+1161.020028396" observedRunningTime="2025-10-10 16:51:08.609117395 +0000 UTC m=+1162.117441580" watchObservedRunningTime="2025-10-10 16:51:08.614553259 +0000 UTC m=+1162.122877394" Oct 10 16:51:08 crc kubenswrapper[4799]: I1010 16:51:08.641824 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-6565b9cf48-rl77d" podStartSLOduration=21.641805582 podStartE2EDuration="21.641805582s" podCreationTimestamp="2025-10-10 16:50:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-10 16:51:08.639376632 +0000 UTC m=+1162.147700757" watchObservedRunningTime="2025-10-10 16:51:08.641805582 +0000 UTC m=+1162.150129707" Oct 10 16:51:08 crc kubenswrapper[4799]: I1010 16:51:08.712834 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-keystone-listener-5b85b5697b-9wc6c" podStartSLOduration=14.762118463 podStartE2EDuration="20.712817425s" podCreationTimestamp="2025-10-10 16:50:48 +0000 UTC" firstStartedPulling="2025-10-10 16:51:01.66355823 +0000 UTC m=+1155.171882345" lastFinishedPulling="2025-10-10 16:51:07.614257192 +0000 UTC m=+1161.122581307" observedRunningTime="2025-10-10 16:51:08.707863893 +0000 UTC m=+1162.216188018" watchObservedRunningTime="2025-10-10 16:51:08.712817425 +0000 UTC m=+1162.221141540" Oct 10 16:51:08 crc kubenswrapper[4799]: I1010 16:51:08.730064 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-api-56d84d574d-x5sbm" podStartSLOduration=17.730044499999998 podStartE2EDuration="17.7300445s" podCreationTimestamp="2025-10-10 16:50:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-10 16:51:08.727342603 +0000 UTC m=+1162.235666728" watchObservedRunningTime="2025-10-10 16:51:08.7300445 +0000 UTC m=+1162.238368625" Oct 10 16:51:08 crc kubenswrapper[4799]: I1010 16:51:08.754000 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-worker-754947f5d7-z2rks" podStartSLOduration=14.649336179 podStartE2EDuration="20.753984061s" podCreationTimestamp="2025-10-10 16:50:48 +0000 UTC" firstStartedPulling="2025-10-10 16:51:01.520834307 +0000 UTC m=+1155.029158422" lastFinishedPulling="2025-10-10 16:51:07.625482179 +0000 UTC m=+1161.133806304" observedRunningTime="2025-10-10 16:51:08.74908969 +0000 UTC m=+1162.257413795" watchObservedRunningTime="2025-10-10 16:51:08.753984061 +0000 UTC m=+1162.262308176" Oct 10 16:51:08 crc kubenswrapper[4799]: I1010 16:51:08.783663 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-67cc85ff7-csrr5" podStartSLOduration=20.783643873 podStartE2EDuration="20.783643873s" podCreationTimestamp="2025-10-10 16:50:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-10 16:51:08.778041704 +0000 UTC m=+1162.286365819" watchObservedRunningTime="2025-10-10 16:51:08.783643873 +0000 UTC m=+1162.291967988" Oct 10 16:51:09 crc kubenswrapper[4799]: I1010 16:51:09.471321 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-7b6d7656b4-cxw2s" Oct 10 16:51:09 crc kubenswrapper[4799]: I1010 16:51:09.629015 4799 generic.go:334] "Generic (PLEG): container finished" podID="766235b0-a5b9-4448-8dac-1afd1ca60e50" containerID="002aced825b2ba369f5e8c313b9b6167bd2bc03a7c89699707e99bfff740991b" exitCode=0 Oct 10 16:51:09 crc kubenswrapper[4799]: I1010 16:51:09.629350 4799 generic.go:334] "Generic (PLEG): container finished" podID="766235b0-a5b9-4448-8dac-1afd1ca60e50" containerID="2835675d74c0d34b7cae5f37da026d20cdbf84cd064c0374ed6191f0417ceb02" exitCode=2 Oct 10 16:51:09 crc kubenswrapper[4799]: I1010 16:51:09.629359 4799 generic.go:334] "Generic (PLEG): container finished" podID="766235b0-a5b9-4448-8dac-1afd1ca60e50" containerID="16f7a682fd238e46067b36808612cbce27d75cdbcd6d153529e759fa91f9d41c" exitCode=0 Oct 10 16:51:09 crc kubenswrapper[4799]: I1010 16:51:09.629366 4799 generic.go:334] "Generic (PLEG): container finished" podID="766235b0-a5b9-4448-8dac-1afd1ca60e50" containerID="3c3489eae1a3f26fc1d5543050426a7e85875420285e1be95fa2f92f06fadd75" exitCode=0 Oct 10 16:51:09 crc kubenswrapper[4799]: I1010 16:51:09.629453 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"766235b0-a5b9-4448-8dac-1afd1ca60e50","Type":"ContainerDied","Data":"002aced825b2ba369f5e8c313b9b6167bd2bc03a7c89699707e99bfff740991b"} Oct 10 16:51:09 crc kubenswrapper[4799]: I1010 16:51:09.629480 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"766235b0-a5b9-4448-8dac-1afd1ca60e50","Type":"ContainerDied","Data":"2835675d74c0d34b7cae5f37da026d20cdbf84cd064c0374ed6191f0417ceb02"} Oct 10 16:51:09 crc kubenswrapper[4799]: I1010 16:51:09.629489 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"766235b0-a5b9-4448-8dac-1afd1ca60e50","Type":"ContainerDied","Data":"16f7a682fd238e46067b36808612cbce27d75cdbcd6d153529e759fa91f9d41c"} Oct 10 16:51:09 crc kubenswrapper[4799]: I1010 16:51:09.629498 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"766235b0-a5b9-4448-8dac-1afd1ca60e50","Type":"ContainerDied","Data":"3c3489eae1a3f26fc1d5543050426a7e85875420285e1be95fa2f92f06fadd75"} Oct 10 16:51:09 crc kubenswrapper[4799]: I1010 16:51:09.636978 4799 generic.go:334] "Generic (PLEG): container finished" podID="adc813f3-66fb-467b-9033-c78cba4a2d36" containerID="7521cb801e0a8accc208d2be60734cccb2b3fe5514d504e26c34084683264aee" exitCode=0 Oct 10 16:51:09 crc kubenswrapper[4799]: I1010 16:51:09.637974 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-6chg9" event={"ID":"adc813f3-66fb-467b-9033-c78cba4a2d36","Type":"ContainerDied","Data":"7521cb801e0a8accc208d2be60734cccb2b3fe5514d504e26c34084683264aee"} Oct 10 16:51:09 crc kubenswrapper[4799]: I1010 16:51:09.767150 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 10 16:51:09 crc kubenswrapper[4799]: I1010 16:51:09.852653 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/placement-6565b9cf48-rl77d" Oct 10 16:51:09 crc kubenswrapper[4799]: I1010 16:51:09.917638 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/766235b0-a5b9-4448-8dac-1afd1ca60e50-combined-ca-bundle\") pod \"766235b0-a5b9-4448-8dac-1afd1ca60e50\" (UID: \"766235b0-a5b9-4448-8dac-1afd1ca60e50\") " Oct 10 16:51:09 crc kubenswrapper[4799]: I1010 16:51:09.917780 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6nkg9\" (UniqueName: \"kubernetes.io/projected/766235b0-a5b9-4448-8dac-1afd1ca60e50-kube-api-access-6nkg9\") pod \"766235b0-a5b9-4448-8dac-1afd1ca60e50\" (UID: \"766235b0-a5b9-4448-8dac-1afd1ca60e50\") " Oct 10 16:51:09 crc kubenswrapper[4799]: I1010 16:51:09.917917 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/766235b0-a5b9-4448-8dac-1afd1ca60e50-log-httpd\") pod \"766235b0-a5b9-4448-8dac-1afd1ca60e50\" (UID: \"766235b0-a5b9-4448-8dac-1afd1ca60e50\") " Oct 10 16:51:09 crc kubenswrapper[4799]: I1010 16:51:09.917952 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/766235b0-a5b9-4448-8dac-1afd1ca60e50-scripts\") pod \"766235b0-a5b9-4448-8dac-1afd1ca60e50\" (UID: \"766235b0-a5b9-4448-8dac-1afd1ca60e50\") " Oct 10 16:51:09 crc kubenswrapper[4799]: I1010 16:51:09.917987 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/766235b0-a5b9-4448-8dac-1afd1ca60e50-run-httpd\") pod \"766235b0-a5b9-4448-8dac-1afd1ca60e50\" (UID: \"766235b0-a5b9-4448-8dac-1afd1ca60e50\") " Oct 10 16:51:09 crc kubenswrapper[4799]: I1010 16:51:09.918036 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/766235b0-a5b9-4448-8dac-1afd1ca60e50-config-data\") pod \"766235b0-a5b9-4448-8dac-1afd1ca60e50\" (UID: \"766235b0-a5b9-4448-8dac-1afd1ca60e50\") " Oct 10 16:51:09 crc kubenswrapper[4799]: I1010 16:51:09.918080 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/766235b0-a5b9-4448-8dac-1afd1ca60e50-sg-core-conf-yaml\") pod \"766235b0-a5b9-4448-8dac-1afd1ca60e50\" (UID: \"766235b0-a5b9-4448-8dac-1afd1ca60e50\") " Oct 10 16:51:09 crc kubenswrapper[4799]: I1010 16:51:09.918694 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/766235b0-a5b9-4448-8dac-1afd1ca60e50-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "766235b0-a5b9-4448-8dac-1afd1ca60e50" (UID: "766235b0-a5b9-4448-8dac-1afd1ca60e50"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 10 16:51:09 crc kubenswrapper[4799]: I1010 16:51:09.919047 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/766235b0-a5b9-4448-8dac-1afd1ca60e50-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "766235b0-a5b9-4448-8dac-1afd1ca60e50" (UID: "766235b0-a5b9-4448-8dac-1afd1ca60e50"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 10 16:51:09 crc kubenswrapper[4799]: I1010 16:51:09.924871 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/766235b0-a5b9-4448-8dac-1afd1ca60e50-scripts" (OuterVolumeSpecName: "scripts") pod "766235b0-a5b9-4448-8dac-1afd1ca60e50" (UID: "766235b0-a5b9-4448-8dac-1afd1ca60e50"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:51:09 crc kubenswrapper[4799]: I1010 16:51:09.925477 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/766235b0-a5b9-4448-8dac-1afd1ca60e50-kube-api-access-6nkg9" (OuterVolumeSpecName: "kube-api-access-6nkg9") pod "766235b0-a5b9-4448-8dac-1afd1ca60e50" (UID: "766235b0-a5b9-4448-8dac-1afd1ca60e50"). InnerVolumeSpecName "kube-api-access-6nkg9". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 16:51:09 crc kubenswrapper[4799]: I1010 16:51:09.954985 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/766235b0-a5b9-4448-8dac-1afd1ca60e50-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "766235b0-a5b9-4448-8dac-1afd1ca60e50" (UID: "766235b0-a5b9-4448-8dac-1afd1ca60e50"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:51:09 crc kubenswrapper[4799]: I1010 16:51:09.997203 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/766235b0-a5b9-4448-8dac-1afd1ca60e50-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "766235b0-a5b9-4448-8dac-1afd1ca60e50" (UID: "766235b0-a5b9-4448-8dac-1afd1ca60e50"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:51:10 crc kubenswrapper[4799]: I1010 16:51:10.019940 4799 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/766235b0-a5b9-4448-8dac-1afd1ca60e50-log-httpd\") on node \"crc\" DevicePath \"\"" Oct 10 16:51:10 crc kubenswrapper[4799]: I1010 16:51:10.020901 4799 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/766235b0-a5b9-4448-8dac-1afd1ca60e50-scripts\") on node \"crc\" DevicePath \"\"" Oct 10 16:51:10 crc kubenswrapper[4799]: I1010 16:51:10.021010 4799 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/766235b0-a5b9-4448-8dac-1afd1ca60e50-run-httpd\") on node \"crc\" DevicePath \"\"" Oct 10 16:51:10 crc kubenswrapper[4799]: I1010 16:51:10.021088 4799 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/766235b0-a5b9-4448-8dac-1afd1ca60e50-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Oct 10 16:51:10 crc kubenswrapper[4799]: I1010 16:51:10.021163 4799 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/766235b0-a5b9-4448-8dac-1afd1ca60e50-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 10 16:51:10 crc kubenswrapper[4799]: I1010 16:51:10.021235 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6nkg9\" (UniqueName: \"kubernetes.io/projected/766235b0-a5b9-4448-8dac-1afd1ca60e50-kube-api-access-6nkg9\") on node \"crc\" DevicePath \"\"" Oct 10 16:51:10 crc kubenswrapper[4799]: I1010 16:51:10.060782 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/766235b0-a5b9-4448-8dac-1afd1ca60e50-config-data" (OuterVolumeSpecName: "config-data") pod "766235b0-a5b9-4448-8dac-1afd1ca60e50" (UID: "766235b0-a5b9-4448-8dac-1afd1ca60e50"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:51:10 crc kubenswrapper[4799]: I1010 16:51:10.123275 4799 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/766235b0-a5b9-4448-8dac-1afd1ca60e50-config-data\") on node \"crc\" DevicePath \"\"" Oct 10 16:51:10 crc kubenswrapper[4799]: I1010 16:51:10.261364 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-56d84d574d-x5sbm" Oct 10 16:51:10 crc kubenswrapper[4799]: I1010 16:51:10.645403 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 10 16:51:10 crc kubenswrapper[4799]: I1010 16:51:10.645449 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"766235b0-a5b9-4448-8dac-1afd1ca60e50","Type":"ContainerDied","Data":"e599caec4b8b12a22e3ad54782bf7285344c71fac82fa3661c389ca3959918f8"} Oct 10 16:51:10 crc kubenswrapper[4799]: I1010 16:51:10.645483 4799 scope.go:117] "RemoveContainer" containerID="002aced825b2ba369f5e8c313b9b6167bd2bc03a7c89699707e99bfff740991b" Oct 10 16:51:10 crc kubenswrapper[4799]: I1010 16:51:10.675245 4799 scope.go:117] "RemoveContainer" containerID="2835675d74c0d34b7cae5f37da026d20cdbf84cd064c0374ed6191f0417ceb02" Oct 10 16:51:10 crc kubenswrapper[4799]: I1010 16:51:10.689507 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 10 16:51:10 crc kubenswrapper[4799]: I1010 16:51:10.709827 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Oct 10 16:51:10 crc kubenswrapper[4799]: I1010 16:51:10.721709 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Oct 10 16:51:10 crc kubenswrapper[4799]: E1010 16:51:10.722149 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="766235b0-a5b9-4448-8dac-1afd1ca60e50" containerName="ceilometer-notification-agent" Oct 10 16:51:10 crc kubenswrapper[4799]: I1010 16:51:10.722172 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="766235b0-a5b9-4448-8dac-1afd1ca60e50" containerName="ceilometer-notification-agent" Oct 10 16:51:10 crc kubenswrapper[4799]: E1010 16:51:10.722195 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="766235b0-a5b9-4448-8dac-1afd1ca60e50" containerName="ceilometer-central-agent" Oct 10 16:51:10 crc kubenswrapper[4799]: I1010 16:51:10.722201 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="766235b0-a5b9-4448-8dac-1afd1ca60e50" containerName="ceilometer-central-agent" Oct 10 16:51:10 crc kubenswrapper[4799]: E1010 16:51:10.722211 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="766235b0-a5b9-4448-8dac-1afd1ca60e50" containerName="proxy-httpd" Oct 10 16:51:10 crc kubenswrapper[4799]: I1010 16:51:10.722219 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="766235b0-a5b9-4448-8dac-1afd1ca60e50" containerName="proxy-httpd" Oct 10 16:51:10 crc kubenswrapper[4799]: E1010 16:51:10.722238 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="766235b0-a5b9-4448-8dac-1afd1ca60e50" containerName="sg-core" Oct 10 16:51:10 crc kubenswrapper[4799]: I1010 16:51:10.722243 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="766235b0-a5b9-4448-8dac-1afd1ca60e50" containerName="sg-core" Oct 10 16:51:10 crc kubenswrapper[4799]: I1010 16:51:10.722419 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="766235b0-a5b9-4448-8dac-1afd1ca60e50" containerName="proxy-httpd" Oct 10 16:51:10 crc kubenswrapper[4799]: I1010 16:51:10.722440 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="766235b0-a5b9-4448-8dac-1afd1ca60e50" containerName="ceilometer-notification-agent" Oct 10 16:51:10 crc kubenswrapper[4799]: I1010 16:51:10.722449 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="766235b0-a5b9-4448-8dac-1afd1ca60e50" containerName="ceilometer-central-agent" Oct 10 16:51:10 crc kubenswrapper[4799]: I1010 16:51:10.722461 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="766235b0-a5b9-4448-8dac-1afd1ca60e50" containerName="sg-core" Oct 10 16:51:10 crc kubenswrapper[4799]: I1010 16:51:10.722916 4799 scope.go:117] "RemoveContainer" containerID="16f7a682fd238e46067b36808612cbce27d75cdbcd6d153529e759fa91f9d41c" Oct 10 16:51:10 crc kubenswrapper[4799]: I1010 16:51:10.724166 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 10 16:51:10 crc kubenswrapper[4799]: I1010 16:51:10.726392 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 10 16:51:10 crc kubenswrapper[4799]: I1010 16:51:10.726608 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Oct 10 16:51:10 crc kubenswrapper[4799]: I1010 16:51:10.739437 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Oct 10 16:51:10 crc kubenswrapper[4799]: I1010 16:51:10.761905 4799 scope.go:117] "RemoveContainer" containerID="3c3489eae1a3f26fc1d5543050426a7e85875420285e1be95fa2f92f06fadd75" Oct 10 16:51:10 crc kubenswrapper[4799]: I1010 16:51:10.841570 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a5ead453-da07-4656-ae87-9d4bc94daee7-scripts\") pod \"ceilometer-0\" (UID: \"a5ead453-da07-4656-ae87-9d4bc94daee7\") " pod="openstack/ceilometer-0" Oct 10 16:51:10 crc kubenswrapper[4799]: I1010 16:51:10.841903 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a5ead453-da07-4656-ae87-9d4bc94daee7-config-data\") pod \"ceilometer-0\" (UID: \"a5ead453-da07-4656-ae87-9d4bc94daee7\") " pod="openstack/ceilometer-0" Oct 10 16:51:10 crc kubenswrapper[4799]: I1010 16:51:10.841944 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a5ead453-da07-4656-ae87-9d4bc94daee7-log-httpd\") pod \"ceilometer-0\" (UID: \"a5ead453-da07-4656-ae87-9d4bc94daee7\") " pod="openstack/ceilometer-0" Oct 10 16:51:10 crc kubenswrapper[4799]: I1010 16:51:10.841962 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4bvb4\" (UniqueName: \"kubernetes.io/projected/a5ead453-da07-4656-ae87-9d4bc94daee7-kube-api-access-4bvb4\") pod \"ceilometer-0\" (UID: \"a5ead453-da07-4656-ae87-9d4bc94daee7\") " pod="openstack/ceilometer-0" Oct 10 16:51:10 crc kubenswrapper[4799]: I1010 16:51:10.841999 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a5ead453-da07-4656-ae87-9d4bc94daee7-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"a5ead453-da07-4656-ae87-9d4bc94daee7\") " pod="openstack/ceilometer-0" Oct 10 16:51:10 crc kubenswrapper[4799]: I1010 16:51:10.842029 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a5ead453-da07-4656-ae87-9d4bc94daee7-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"a5ead453-da07-4656-ae87-9d4bc94daee7\") " pod="openstack/ceilometer-0" Oct 10 16:51:10 crc kubenswrapper[4799]: I1010 16:51:10.842043 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a5ead453-da07-4656-ae87-9d4bc94daee7-run-httpd\") pod \"ceilometer-0\" (UID: \"a5ead453-da07-4656-ae87-9d4bc94daee7\") " pod="openstack/ceilometer-0" Oct 10 16:51:10 crc kubenswrapper[4799]: I1010 16:51:10.943223 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a5ead453-da07-4656-ae87-9d4bc94daee7-log-httpd\") pod \"ceilometer-0\" (UID: \"a5ead453-da07-4656-ae87-9d4bc94daee7\") " pod="openstack/ceilometer-0" Oct 10 16:51:10 crc kubenswrapper[4799]: I1010 16:51:10.943254 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4bvb4\" (UniqueName: \"kubernetes.io/projected/a5ead453-da07-4656-ae87-9d4bc94daee7-kube-api-access-4bvb4\") pod \"ceilometer-0\" (UID: \"a5ead453-da07-4656-ae87-9d4bc94daee7\") " pod="openstack/ceilometer-0" Oct 10 16:51:10 crc kubenswrapper[4799]: I1010 16:51:10.943297 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a5ead453-da07-4656-ae87-9d4bc94daee7-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"a5ead453-da07-4656-ae87-9d4bc94daee7\") " pod="openstack/ceilometer-0" Oct 10 16:51:10 crc kubenswrapper[4799]: I1010 16:51:10.943335 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a5ead453-da07-4656-ae87-9d4bc94daee7-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"a5ead453-da07-4656-ae87-9d4bc94daee7\") " pod="openstack/ceilometer-0" Oct 10 16:51:10 crc kubenswrapper[4799]: I1010 16:51:10.943349 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a5ead453-da07-4656-ae87-9d4bc94daee7-run-httpd\") pod \"ceilometer-0\" (UID: \"a5ead453-da07-4656-ae87-9d4bc94daee7\") " pod="openstack/ceilometer-0" Oct 10 16:51:10 crc kubenswrapper[4799]: I1010 16:51:10.943422 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a5ead453-da07-4656-ae87-9d4bc94daee7-scripts\") pod \"ceilometer-0\" (UID: \"a5ead453-da07-4656-ae87-9d4bc94daee7\") " pod="openstack/ceilometer-0" Oct 10 16:51:10 crc kubenswrapper[4799]: I1010 16:51:10.943440 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a5ead453-da07-4656-ae87-9d4bc94daee7-config-data\") pod \"ceilometer-0\" (UID: \"a5ead453-da07-4656-ae87-9d4bc94daee7\") " pod="openstack/ceilometer-0" Oct 10 16:51:10 crc kubenswrapper[4799]: I1010 16:51:10.943848 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a5ead453-da07-4656-ae87-9d4bc94daee7-log-httpd\") pod \"ceilometer-0\" (UID: \"a5ead453-da07-4656-ae87-9d4bc94daee7\") " pod="openstack/ceilometer-0" Oct 10 16:51:10 crc kubenswrapper[4799]: I1010 16:51:10.944051 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a5ead453-da07-4656-ae87-9d4bc94daee7-run-httpd\") pod \"ceilometer-0\" (UID: \"a5ead453-da07-4656-ae87-9d4bc94daee7\") " pod="openstack/ceilometer-0" Oct 10 16:51:10 crc kubenswrapper[4799]: I1010 16:51:10.949567 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a5ead453-da07-4656-ae87-9d4bc94daee7-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"a5ead453-da07-4656-ae87-9d4bc94daee7\") " pod="openstack/ceilometer-0" Oct 10 16:51:10 crc kubenswrapper[4799]: I1010 16:51:10.963317 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a5ead453-da07-4656-ae87-9d4bc94daee7-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"a5ead453-da07-4656-ae87-9d4bc94daee7\") " pod="openstack/ceilometer-0" Oct 10 16:51:10 crc kubenswrapper[4799]: I1010 16:51:10.963396 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a5ead453-da07-4656-ae87-9d4bc94daee7-scripts\") pod \"ceilometer-0\" (UID: \"a5ead453-da07-4656-ae87-9d4bc94daee7\") " pod="openstack/ceilometer-0" Oct 10 16:51:10 crc kubenswrapper[4799]: I1010 16:51:10.963542 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a5ead453-da07-4656-ae87-9d4bc94daee7-config-data\") pod \"ceilometer-0\" (UID: \"a5ead453-da07-4656-ae87-9d4bc94daee7\") " pod="openstack/ceilometer-0" Oct 10 16:51:10 crc kubenswrapper[4799]: I1010 16:51:10.967414 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4bvb4\" (UniqueName: \"kubernetes.io/projected/a5ead453-da07-4656-ae87-9d4bc94daee7-kube-api-access-4bvb4\") pod \"ceilometer-0\" (UID: \"a5ead453-da07-4656-ae87-9d4bc94daee7\") " pod="openstack/ceilometer-0" Oct 10 16:51:11 crc kubenswrapper[4799]: I1010 16:51:11.049126 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 10 16:51:11 crc kubenswrapper[4799]: I1010 16:51:11.270339 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-6chg9" Oct 10 16:51:11 crc kubenswrapper[4799]: I1010 16:51:11.361432 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/adc813f3-66fb-467b-9033-c78cba4a2d36-config\") pod \"adc813f3-66fb-467b-9033-c78cba4a2d36\" (UID: \"adc813f3-66fb-467b-9033-c78cba4a2d36\") " Oct 10 16:51:11 crc kubenswrapper[4799]: I1010 16:51:11.361586 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-blc9f\" (UniqueName: \"kubernetes.io/projected/adc813f3-66fb-467b-9033-c78cba4a2d36-kube-api-access-blc9f\") pod \"adc813f3-66fb-467b-9033-c78cba4a2d36\" (UID: \"adc813f3-66fb-467b-9033-c78cba4a2d36\") " Oct 10 16:51:11 crc kubenswrapper[4799]: I1010 16:51:11.361655 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/adc813f3-66fb-467b-9033-c78cba4a2d36-combined-ca-bundle\") pod \"adc813f3-66fb-467b-9033-c78cba4a2d36\" (UID: \"adc813f3-66fb-467b-9033-c78cba4a2d36\") " Oct 10 16:51:11 crc kubenswrapper[4799]: I1010 16:51:11.371943 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/adc813f3-66fb-467b-9033-c78cba4a2d36-kube-api-access-blc9f" (OuterVolumeSpecName: "kube-api-access-blc9f") pod "adc813f3-66fb-467b-9033-c78cba4a2d36" (UID: "adc813f3-66fb-467b-9033-c78cba4a2d36"). InnerVolumeSpecName "kube-api-access-blc9f". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 16:51:11 crc kubenswrapper[4799]: I1010 16:51:11.392909 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/adc813f3-66fb-467b-9033-c78cba4a2d36-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "adc813f3-66fb-467b-9033-c78cba4a2d36" (UID: "adc813f3-66fb-467b-9033-c78cba4a2d36"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:51:11 crc kubenswrapper[4799]: I1010 16:51:11.406241 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/adc813f3-66fb-467b-9033-c78cba4a2d36-config" (OuterVolumeSpecName: "config") pod "adc813f3-66fb-467b-9033-c78cba4a2d36" (UID: "adc813f3-66fb-467b-9033-c78cba4a2d36"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:51:11 crc kubenswrapper[4799]: I1010 16:51:11.412743 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="766235b0-a5b9-4448-8dac-1afd1ca60e50" path="/var/lib/kubelet/pods/766235b0-a5b9-4448-8dac-1afd1ca60e50/volumes" Oct 10 16:51:11 crc kubenswrapper[4799]: I1010 16:51:11.463310 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-blc9f\" (UniqueName: \"kubernetes.io/projected/adc813f3-66fb-467b-9033-c78cba4a2d36-kube-api-access-blc9f\") on node \"crc\" DevicePath \"\"" Oct 10 16:51:11 crc kubenswrapper[4799]: I1010 16:51:11.463357 4799 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/adc813f3-66fb-467b-9033-c78cba4a2d36-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 10 16:51:11 crc kubenswrapper[4799]: I1010 16:51:11.463370 4799 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/adc813f3-66fb-467b-9033-c78cba4a2d36-config\") on node \"crc\" DevicePath \"\"" Oct 10 16:51:11 crc kubenswrapper[4799]: I1010 16:51:11.577456 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 10 16:51:11 crc kubenswrapper[4799]: W1010 16:51:11.587372 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda5ead453_da07_4656_ae87_9d4bc94daee7.slice/crio-d01bb1daf6208a0437d536f7ba1fcf22ad1f76c623682072e8584000476505e8 WatchSource:0}: Error finding container d01bb1daf6208a0437d536f7ba1fcf22ad1f76c623682072e8584000476505e8: Status 404 returned error can't find the container with id d01bb1daf6208a0437d536f7ba1fcf22ad1f76c623682072e8584000476505e8 Oct 10 16:51:11 crc kubenswrapper[4799]: I1010 16:51:11.668616 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-6chg9" Oct 10 16:51:11 crc kubenswrapper[4799]: I1010 16:51:11.668619 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-6chg9" event={"ID":"adc813f3-66fb-467b-9033-c78cba4a2d36","Type":"ContainerDied","Data":"61b9629f2498104607d9924e6c29662b22336561e08e649a96e9586c3b559e3f"} Oct 10 16:51:11 crc kubenswrapper[4799]: I1010 16:51:11.668728 4799 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="61b9629f2498104607d9924e6c29662b22336561e08e649a96e9586c3b559e3f" Oct 10 16:51:11 crc kubenswrapper[4799]: I1010 16:51:11.672405 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a5ead453-da07-4656-ae87-9d4bc94daee7","Type":"ContainerStarted","Data":"d01bb1daf6208a0437d536f7ba1fcf22ad1f76c623682072e8584000476505e8"} Oct 10 16:51:11 crc kubenswrapper[4799]: I1010 16:51:11.939063 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-67cc85ff7-csrr5"] Oct 10 16:51:11 crc kubenswrapper[4799]: I1010 16:51:11.939675 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-67cc85ff7-csrr5" podUID="bd142c20-258d-4917-86f1-f60cbaf56268" containerName="dnsmasq-dns" containerID="cri-o://2587e65baf79f3aae1540600e756c8097ab481b86bca2cbaed0b95d59d90d46b" gracePeriod=10 Oct 10 16:51:11 crc kubenswrapper[4799]: I1010 16:51:11.987151 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-744fd954cc-ngzjd"] Oct 10 16:51:11 crc kubenswrapper[4799]: E1010 16:51:11.987695 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="adc813f3-66fb-467b-9033-c78cba4a2d36" containerName="neutron-db-sync" Oct 10 16:51:11 crc kubenswrapper[4799]: I1010 16:51:11.987712 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="adc813f3-66fb-467b-9033-c78cba4a2d36" containerName="neutron-db-sync" Oct 10 16:51:11 crc kubenswrapper[4799]: I1010 16:51:11.988359 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="adc813f3-66fb-467b-9033-c78cba4a2d36" containerName="neutron-db-sync" Oct 10 16:51:11 crc kubenswrapper[4799]: I1010 16:51:11.989461 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-744fd954cc-ngzjd" Oct 10 16:51:12 crc kubenswrapper[4799]: I1010 16:51:12.010895 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-744fd954cc-ngzjd"] Oct 10 16:51:12 crc kubenswrapper[4799]: I1010 16:51:12.076835 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3bab4b53-c147-4875-aaea-df06dae44b04-config\") pod \"dnsmasq-dns-744fd954cc-ngzjd\" (UID: \"3bab4b53-c147-4875-aaea-df06dae44b04\") " pod="openstack/dnsmasq-dns-744fd954cc-ngzjd" Oct 10 16:51:12 crc kubenswrapper[4799]: I1010 16:51:12.076893 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/3bab4b53-c147-4875-aaea-df06dae44b04-ovsdbserver-nb\") pod \"dnsmasq-dns-744fd954cc-ngzjd\" (UID: \"3bab4b53-c147-4875-aaea-df06dae44b04\") " pod="openstack/dnsmasq-dns-744fd954cc-ngzjd" Oct 10 16:51:12 crc kubenswrapper[4799]: I1010 16:51:12.076937 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3bab4b53-c147-4875-aaea-df06dae44b04-dns-svc\") pod \"dnsmasq-dns-744fd954cc-ngzjd\" (UID: \"3bab4b53-c147-4875-aaea-df06dae44b04\") " pod="openstack/dnsmasq-dns-744fd954cc-ngzjd" Oct 10 16:51:12 crc kubenswrapper[4799]: I1010 16:51:12.076970 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/3bab4b53-c147-4875-aaea-df06dae44b04-ovsdbserver-sb\") pod \"dnsmasq-dns-744fd954cc-ngzjd\" (UID: \"3bab4b53-c147-4875-aaea-df06dae44b04\") " pod="openstack/dnsmasq-dns-744fd954cc-ngzjd" Oct 10 16:51:12 crc kubenswrapper[4799]: I1010 16:51:12.077030 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/3bab4b53-c147-4875-aaea-df06dae44b04-dns-swift-storage-0\") pod \"dnsmasq-dns-744fd954cc-ngzjd\" (UID: \"3bab4b53-c147-4875-aaea-df06dae44b04\") " pod="openstack/dnsmasq-dns-744fd954cc-ngzjd" Oct 10 16:51:12 crc kubenswrapper[4799]: I1010 16:51:12.077054 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zww56\" (UniqueName: \"kubernetes.io/projected/3bab4b53-c147-4875-aaea-df06dae44b04-kube-api-access-zww56\") pod \"dnsmasq-dns-744fd954cc-ngzjd\" (UID: \"3bab4b53-c147-4875-aaea-df06dae44b04\") " pod="openstack/dnsmasq-dns-744fd954cc-ngzjd" Oct 10 16:51:12 crc kubenswrapper[4799]: I1010 16:51:12.091310 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-694f88c746-tbgjz"] Oct 10 16:51:12 crc kubenswrapper[4799]: I1010 16:51:12.094918 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-694f88c746-tbgjz" Oct 10 16:51:12 crc kubenswrapper[4799]: I1010 16:51:12.098519 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-config" Oct 10 16:51:12 crc kubenswrapper[4799]: I1010 16:51:12.098723 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-httpd-config" Oct 10 16:51:12 crc kubenswrapper[4799]: I1010 16:51:12.098835 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-ovndbs" Oct 10 16:51:12 crc kubenswrapper[4799]: I1010 16:51:12.098970 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-neutron-dockercfg-rws6f" Oct 10 16:51:12 crc kubenswrapper[4799]: I1010 16:51:12.116707 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-694f88c746-tbgjz"] Oct 10 16:51:12 crc kubenswrapper[4799]: I1010 16:51:12.180509 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/2b7debcd-ccac-4b9d-9b6e-011a0f8072d9-ovndb-tls-certs\") pod \"neutron-694f88c746-tbgjz\" (UID: \"2b7debcd-ccac-4b9d-9b6e-011a0f8072d9\") " pod="openstack/neutron-694f88c746-tbgjz" Oct 10 16:51:12 crc kubenswrapper[4799]: I1010 16:51:12.180582 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fz28v\" (UniqueName: \"kubernetes.io/projected/2b7debcd-ccac-4b9d-9b6e-011a0f8072d9-kube-api-access-fz28v\") pod \"neutron-694f88c746-tbgjz\" (UID: \"2b7debcd-ccac-4b9d-9b6e-011a0f8072d9\") " pod="openstack/neutron-694f88c746-tbgjz" Oct 10 16:51:12 crc kubenswrapper[4799]: I1010 16:51:12.180620 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3bab4b53-c147-4875-aaea-df06dae44b04-config\") pod \"dnsmasq-dns-744fd954cc-ngzjd\" (UID: \"3bab4b53-c147-4875-aaea-df06dae44b04\") " pod="openstack/dnsmasq-dns-744fd954cc-ngzjd" Oct 10 16:51:12 crc kubenswrapper[4799]: I1010 16:51:12.180649 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/2b7debcd-ccac-4b9d-9b6e-011a0f8072d9-config\") pod \"neutron-694f88c746-tbgjz\" (UID: \"2b7debcd-ccac-4b9d-9b6e-011a0f8072d9\") " pod="openstack/neutron-694f88c746-tbgjz" Oct 10 16:51:12 crc kubenswrapper[4799]: I1010 16:51:12.180677 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/3bab4b53-c147-4875-aaea-df06dae44b04-ovsdbserver-nb\") pod \"dnsmasq-dns-744fd954cc-ngzjd\" (UID: \"3bab4b53-c147-4875-aaea-df06dae44b04\") " pod="openstack/dnsmasq-dns-744fd954cc-ngzjd" Oct 10 16:51:12 crc kubenswrapper[4799]: I1010 16:51:12.180713 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3bab4b53-c147-4875-aaea-df06dae44b04-dns-svc\") pod \"dnsmasq-dns-744fd954cc-ngzjd\" (UID: \"3bab4b53-c147-4875-aaea-df06dae44b04\") " pod="openstack/dnsmasq-dns-744fd954cc-ngzjd" Oct 10 16:51:12 crc kubenswrapper[4799]: I1010 16:51:12.180743 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/3bab4b53-c147-4875-aaea-df06dae44b04-ovsdbserver-sb\") pod \"dnsmasq-dns-744fd954cc-ngzjd\" (UID: \"3bab4b53-c147-4875-aaea-df06dae44b04\") " pod="openstack/dnsmasq-dns-744fd954cc-ngzjd" Oct 10 16:51:12 crc kubenswrapper[4799]: I1010 16:51:12.180910 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/3bab4b53-c147-4875-aaea-df06dae44b04-dns-swift-storage-0\") pod \"dnsmasq-dns-744fd954cc-ngzjd\" (UID: \"3bab4b53-c147-4875-aaea-df06dae44b04\") " pod="openstack/dnsmasq-dns-744fd954cc-ngzjd" Oct 10 16:51:12 crc kubenswrapper[4799]: I1010 16:51:12.181010 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zww56\" (UniqueName: \"kubernetes.io/projected/3bab4b53-c147-4875-aaea-df06dae44b04-kube-api-access-zww56\") pod \"dnsmasq-dns-744fd954cc-ngzjd\" (UID: \"3bab4b53-c147-4875-aaea-df06dae44b04\") " pod="openstack/dnsmasq-dns-744fd954cc-ngzjd" Oct 10 16:51:12 crc kubenswrapper[4799]: I1010 16:51:12.181034 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/2b7debcd-ccac-4b9d-9b6e-011a0f8072d9-httpd-config\") pod \"neutron-694f88c746-tbgjz\" (UID: \"2b7debcd-ccac-4b9d-9b6e-011a0f8072d9\") " pod="openstack/neutron-694f88c746-tbgjz" Oct 10 16:51:12 crc kubenswrapper[4799]: I1010 16:51:12.181083 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2b7debcd-ccac-4b9d-9b6e-011a0f8072d9-combined-ca-bundle\") pod \"neutron-694f88c746-tbgjz\" (UID: \"2b7debcd-ccac-4b9d-9b6e-011a0f8072d9\") " pod="openstack/neutron-694f88c746-tbgjz" Oct 10 16:51:12 crc kubenswrapper[4799]: I1010 16:51:12.182350 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/3bab4b53-c147-4875-aaea-df06dae44b04-ovsdbserver-sb\") pod \"dnsmasq-dns-744fd954cc-ngzjd\" (UID: \"3bab4b53-c147-4875-aaea-df06dae44b04\") " pod="openstack/dnsmasq-dns-744fd954cc-ngzjd" Oct 10 16:51:12 crc kubenswrapper[4799]: I1010 16:51:12.182384 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3bab4b53-c147-4875-aaea-df06dae44b04-config\") pod \"dnsmasq-dns-744fd954cc-ngzjd\" (UID: \"3bab4b53-c147-4875-aaea-df06dae44b04\") " pod="openstack/dnsmasq-dns-744fd954cc-ngzjd" Oct 10 16:51:12 crc kubenswrapper[4799]: I1010 16:51:12.182895 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/3bab4b53-c147-4875-aaea-df06dae44b04-ovsdbserver-nb\") pod \"dnsmasq-dns-744fd954cc-ngzjd\" (UID: \"3bab4b53-c147-4875-aaea-df06dae44b04\") " pod="openstack/dnsmasq-dns-744fd954cc-ngzjd" Oct 10 16:51:12 crc kubenswrapper[4799]: I1010 16:51:12.185054 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/3bab4b53-c147-4875-aaea-df06dae44b04-dns-swift-storage-0\") pod \"dnsmasq-dns-744fd954cc-ngzjd\" (UID: \"3bab4b53-c147-4875-aaea-df06dae44b04\") " pod="openstack/dnsmasq-dns-744fd954cc-ngzjd" Oct 10 16:51:12 crc kubenswrapper[4799]: I1010 16:51:12.185582 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3bab4b53-c147-4875-aaea-df06dae44b04-dns-svc\") pod \"dnsmasq-dns-744fd954cc-ngzjd\" (UID: \"3bab4b53-c147-4875-aaea-df06dae44b04\") " pod="openstack/dnsmasq-dns-744fd954cc-ngzjd" Oct 10 16:51:12 crc kubenswrapper[4799]: I1010 16:51:12.209111 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zww56\" (UniqueName: \"kubernetes.io/projected/3bab4b53-c147-4875-aaea-df06dae44b04-kube-api-access-zww56\") pod \"dnsmasq-dns-744fd954cc-ngzjd\" (UID: \"3bab4b53-c147-4875-aaea-df06dae44b04\") " pod="openstack/dnsmasq-dns-744fd954cc-ngzjd" Oct 10 16:51:12 crc kubenswrapper[4799]: I1010 16:51:12.282803 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/2b7debcd-ccac-4b9d-9b6e-011a0f8072d9-httpd-config\") pod \"neutron-694f88c746-tbgjz\" (UID: \"2b7debcd-ccac-4b9d-9b6e-011a0f8072d9\") " pod="openstack/neutron-694f88c746-tbgjz" Oct 10 16:51:12 crc kubenswrapper[4799]: I1010 16:51:12.282899 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2b7debcd-ccac-4b9d-9b6e-011a0f8072d9-combined-ca-bundle\") pod \"neutron-694f88c746-tbgjz\" (UID: \"2b7debcd-ccac-4b9d-9b6e-011a0f8072d9\") " pod="openstack/neutron-694f88c746-tbgjz" Oct 10 16:51:12 crc kubenswrapper[4799]: I1010 16:51:12.283067 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/2b7debcd-ccac-4b9d-9b6e-011a0f8072d9-ovndb-tls-certs\") pod \"neutron-694f88c746-tbgjz\" (UID: \"2b7debcd-ccac-4b9d-9b6e-011a0f8072d9\") " pod="openstack/neutron-694f88c746-tbgjz" Oct 10 16:51:12 crc kubenswrapper[4799]: I1010 16:51:12.283175 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fz28v\" (UniqueName: \"kubernetes.io/projected/2b7debcd-ccac-4b9d-9b6e-011a0f8072d9-kube-api-access-fz28v\") pod \"neutron-694f88c746-tbgjz\" (UID: \"2b7debcd-ccac-4b9d-9b6e-011a0f8072d9\") " pod="openstack/neutron-694f88c746-tbgjz" Oct 10 16:51:12 crc kubenswrapper[4799]: I1010 16:51:12.283253 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/2b7debcd-ccac-4b9d-9b6e-011a0f8072d9-config\") pod \"neutron-694f88c746-tbgjz\" (UID: \"2b7debcd-ccac-4b9d-9b6e-011a0f8072d9\") " pod="openstack/neutron-694f88c746-tbgjz" Oct 10 16:51:12 crc kubenswrapper[4799]: I1010 16:51:12.292220 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2b7debcd-ccac-4b9d-9b6e-011a0f8072d9-combined-ca-bundle\") pod \"neutron-694f88c746-tbgjz\" (UID: \"2b7debcd-ccac-4b9d-9b6e-011a0f8072d9\") " pod="openstack/neutron-694f88c746-tbgjz" Oct 10 16:51:12 crc kubenswrapper[4799]: I1010 16:51:12.296369 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/2b7debcd-ccac-4b9d-9b6e-011a0f8072d9-httpd-config\") pod \"neutron-694f88c746-tbgjz\" (UID: \"2b7debcd-ccac-4b9d-9b6e-011a0f8072d9\") " pod="openstack/neutron-694f88c746-tbgjz" Oct 10 16:51:12 crc kubenswrapper[4799]: I1010 16:51:12.297530 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/2b7debcd-ccac-4b9d-9b6e-011a0f8072d9-ovndb-tls-certs\") pod \"neutron-694f88c746-tbgjz\" (UID: \"2b7debcd-ccac-4b9d-9b6e-011a0f8072d9\") " pod="openstack/neutron-694f88c746-tbgjz" Oct 10 16:51:12 crc kubenswrapper[4799]: I1010 16:51:12.310305 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/2b7debcd-ccac-4b9d-9b6e-011a0f8072d9-config\") pod \"neutron-694f88c746-tbgjz\" (UID: \"2b7debcd-ccac-4b9d-9b6e-011a0f8072d9\") " pod="openstack/neutron-694f88c746-tbgjz" Oct 10 16:51:12 crc kubenswrapper[4799]: I1010 16:51:12.311045 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fz28v\" (UniqueName: \"kubernetes.io/projected/2b7debcd-ccac-4b9d-9b6e-011a0f8072d9-kube-api-access-fz28v\") pod \"neutron-694f88c746-tbgjz\" (UID: \"2b7debcd-ccac-4b9d-9b6e-011a0f8072d9\") " pod="openstack/neutron-694f88c746-tbgjz" Oct 10 16:51:12 crc kubenswrapper[4799]: I1010 16:51:12.412726 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-67cc85ff7-csrr5" Oct 10 16:51:12 crc kubenswrapper[4799]: I1010 16:51:12.432748 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-744fd954cc-ngzjd" Oct 10 16:51:12 crc kubenswrapper[4799]: I1010 16:51:12.445231 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-694f88c746-tbgjz" Oct 10 16:51:12 crc kubenswrapper[4799]: I1010 16:51:12.490296 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bd142c20-258d-4917-86f1-f60cbaf56268-config\") pod \"bd142c20-258d-4917-86f1-f60cbaf56268\" (UID: \"bd142c20-258d-4917-86f1-f60cbaf56268\") " Oct 10 16:51:12 crc kubenswrapper[4799]: I1010 16:51:12.490381 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/bd142c20-258d-4917-86f1-f60cbaf56268-ovsdbserver-nb\") pod \"bd142c20-258d-4917-86f1-f60cbaf56268\" (UID: \"bd142c20-258d-4917-86f1-f60cbaf56268\") " Oct 10 16:51:12 crc kubenswrapper[4799]: I1010 16:51:12.490401 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/bd142c20-258d-4917-86f1-f60cbaf56268-ovsdbserver-sb\") pod \"bd142c20-258d-4917-86f1-f60cbaf56268\" (UID: \"bd142c20-258d-4917-86f1-f60cbaf56268\") " Oct 10 16:51:12 crc kubenswrapper[4799]: I1010 16:51:12.490455 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/bd142c20-258d-4917-86f1-f60cbaf56268-dns-swift-storage-0\") pod \"bd142c20-258d-4917-86f1-f60cbaf56268\" (UID: \"bd142c20-258d-4917-86f1-f60cbaf56268\") " Oct 10 16:51:12 crc kubenswrapper[4799]: I1010 16:51:12.490502 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kq6wk\" (UniqueName: \"kubernetes.io/projected/bd142c20-258d-4917-86f1-f60cbaf56268-kube-api-access-kq6wk\") pod \"bd142c20-258d-4917-86f1-f60cbaf56268\" (UID: \"bd142c20-258d-4917-86f1-f60cbaf56268\") " Oct 10 16:51:12 crc kubenswrapper[4799]: I1010 16:51:12.490595 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/bd142c20-258d-4917-86f1-f60cbaf56268-dns-svc\") pod \"bd142c20-258d-4917-86f1-f60cbaf56268\" (UID: \"bd142c20-258d-4917-86f1-f60cbaf56268\") " Oct 10 16:51:12 crc kubenswrapper[4799]: I1010 16:51:12.496530 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bd142c20-258d-4917-86f1-f60cbaf56268-kube-api-access-kq6wk" (OuterVolumeSpecName: "kube-api-access-kq6wk") pod "bd142c20-258d-4917-86f1-f60cbaf56268" (UID: "bd142c20-258d-4917-86f1-f60cbaf56268"). InnerVolumeSpecName "kube-api-access-kq6wk". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 16:51:12 crc kubenswrapper[4799]: I1010 16:51:12.579264 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bd142c20-258d-4917-86f1-f60cbaf56268-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "bd142c20-258d-4917-86f1-f60cbaf56268" (UID: "bd142c20-258d-4917-86f1-f60cbaf56268"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 16:51:12 crc kubenswrapper[4799]: I1010 16:51:12.582526 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bd142c20-258d-4917-86f1-f60cbaf56268-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "bd142c20-258d-4917-86f1-f60cbaf56268" (UID: "bd142c20-258d-4917-86f1-f60cbaf56268"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 16:51:12 crc kubenswrapper[4799]: I1010 16:51:12.592656 4799 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/bd142c20-258d-4917-86f1-f60cbaf56268-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 10 16:51:12 crc kubenswrapper[4799]: I1010 16:51:12.592879 4799 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/bd142c20-258d-4917-86f1-f60cbaf56268-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Oct 10 16:51:12 crc kubenswrapper[4799]: I1010 16:51:12.592939 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kq6wk\" (UniqueName: \"kubernetes.io/projected/bd142c20-258d-4917-86f1-f60cbaf56268-kube-api-access-kq6wk\") on node \"crc\" DevicePath \"\"" Oct 10 16:51:12 crc kubenswrapper[4799]: I1010 16:51:12.652315 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bd142c20-258d-4917-86f1-f60cbaf56268-config" (OuterVolumeSpecName: "config") pod "bd142c20-258d-4917-86f1-f60cbaf56268" (UID: "bd142c20-258d-4917-86f1-f60cbaf56268"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 16:51:12 crc kubenswrapper[4799]: I1010 16:51:12.661555 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bd142c20-258d-4917-86f1-f60cbaf56268-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "bd142c20-258d-4917-86f1-f60cbaf56268" (UID: "bd142c20-258d-4917-86f1-f60cbaf56268"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 16:51:12 crc kubenswrapper[4799]: I1010 16:51:12.687273 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bd142c20-258d-4917-86f1-f60cbaf56268-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "bd142c20-258d-4917-86f1-f60cbaf56268" (UID: "bd142c20-258d-4917-86f1-f60cbaf56268"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 16:51:12 crc kubenswrapper[4799]: I1010 16:51:12.694684 4799 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bd142c20-258d-4917-86f1-f60cbaf56268-config\") on node \"crc\" DevicePath \"\"" Oct 10 16:51:12 crc kubenswrapper[4799]: I1010 16:51:12.694728 4799 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/bd142c20-258d-4917-86f1-f60cbaf56268-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 10 16:51:12 crc kubenswrapper[4799]: I1010 16:51:12.694741 4799 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/bd142c20-258d-4917-86f1-f60cbaf56268-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 10 16:51:12 crc kubenswrapper[4799]: I1010 16:51:12.710068 4799 generic.go:334] "Generic (PLEG): container finished" podID="bd142c20-258d-4917-86f1-f60cbaf56268" containerID="2587e65baf79f3aae1540600e756c8097ab481b86bca2cbaed0b95d59d90d46b" exitCode=0 Oct 10 16:51:12 crc kubenswrapper[4799]: I1010 16:51:12.710122 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-67cc85ff7-csrr5" event={"ID":"bd142c20-258d-4917-86f1-f60cbaf56268","Type":"ContainerDied","Data":"2587e65baf79f3aae1540600e756c8097ab481b86bca2cbaed0b95d59d90d46b"} Oct 10 16:51:12 crc kubenswrapper[4799]: I1010 16:51:12.710147 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-67cc85ff7-csrr5" event={"ID":"bd142c20-258d-4917-86f1-f60cbaf56268","Type":"ContainerDied","Data":"c95b6cea0c60fbad3ec5c6d2467cedd1ffdaa8a20953d587ca9d70f9ee3db3a3"} Oct 10 16:51:12 crc kubenswrapper[4799]: I1010 16:51:12.710162 4799 scope.go:117] "RemoveContainer" containerID="2587e65baf79f3aae1540600e756c8097ab481b86bca2cbaed0b95d59d90d46b" Oct 10 16:51:12 crc kubenswrapper[4799]: I1010 16:51:12.710306 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-67cc85ff7-csrr5" Oct 10 16:51:12 crc kubenswrapper[4799]: I1010 16:51:12.751332 4799 scope.go:117] "RemoveContainer" containerID="129466a22981637b7792b11ef135e282d4dc863e9e958d191972a7c5d9007c0b" Oct 10 16:51:12 crc kubenswrapper[4799]: I1010 16:51:12.754713 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-67cc85ff7-csrr5"] Oct 10 16:51:12 crc kubenswrapper[4799]: I1010 16:51:12.764402 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-67cc85ff7-csrr5"] Oct 10 16:51:12 crc kubenswrapper[4799]: I1010 16:51:12.776332 4799 scope.go:117] "RemoveContainer" containerID="2587e65baf79f3aae1540600e756c8097ab481b86bca2cbaed0b95d59d90d46b" Oct 10 16:51:12 crc kubenswrapper[4799]: E1010 16:51:12.777082 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2587e65baf79f3aae1540600e756c8097ab481b86bca2cbaed0b95d59d90d46b\": container with ID starting with 2587e65baf79f3aae1540600e756c8097ab481b86bca2cbaed0b95d59d90d46b not found: ID does not exist" containerID="2587e65baf79f3aae1540600e756c8097ab481b86bca2cbaed0b95d59d90d46b" Oct 10 16:51:12 crc kubenswrapper[4799]: I1010 16:51:12.777123 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2587e65baf79f3aae1540600e756c8097ab481b86bca2cbaed0b95d59d90d46b"} err="failed to get container status \"2587e65baf79f3aae1540600e756c8097ab481b86bca2cbaed0b95d59d90d46b\": rpc error: code = NotFound desc = could not find container \"2587e65baf79f3aae1540600e756c8097ab481b86bca2cbaed0b95d59d90d46b\": container with ID starting with 2587e65baf79f3aae1540600e756c8097ab481b86bca2cbaed0b95d59d90d46b not found: ID does not exist" Oct 10 16:51:12 crc kubenswrapper[4799]: I1010 16:51:12.777151 4799 scope.go:117] "RemoveContainer" containerID="129466a22981637b7792b11ef135e282d4dc863e9e958d191972a7c5d9007c0b" Oct 10 16:51:12 crc kubenswrapper[4799]: E1010 16:51:12.777437 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"129466a22981637b7792b11ef135e282d4dc863e9e958d191972a7c5d9007c0b\": container with ID starting with 129466a22981637b7792b11ef135e282d4dc863e9e958d191972a7c5d9007c0b not found: ID does not exist" containerID="129466a22981637b7792b11ef135e282d4dc863e9e958d191972a7c5d9007c0b" Oct 10 16:51:12 crc kubenswrapper[4799]: I1010 16:51:12.777455 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"129466a22981637b7792b11ef135e282d4dc863e9e958d191972a7c5d9007c0b"} err="failed to get container status \"129466a22981637b7792b11ef135e282d4dc863e9e958d191972a7c5d9007c0b\": rpc error: code = NotFound desc = could not find container \"129466a22981637b7792b11ef135e282d4dc863e9e958d191972a7c5d9007c0b\": container with ID starting with 129466a22981637b7792b11ef135e282d4dc863e9e958d191972a7c5d9007c0b not found: ID does not exist" Oct 10 16:51:13 crc kubenswrapper[4799]: I1010 16:51:13.044028 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-694f88c746-tbgjz"] Oct 10 16:51:13 crc kubenswrapper[4799]: I1010 16:51:13.052699 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-744fd954cc-ngzjd"] Oct 10 16:51:13 crc kubenswrapper[4799]: I1010 16:51:13.419867 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bd142c20-258d-4917-86f1-f60cbaf56268" path="/var/lib/kubelet/pods/bd142c20-258d-4917-86f1-f60cbaf56268/volumes" Oct 10 16:51:13 crc kubenswrapper[4799]: I1010 16:51:13.719672 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a5ead453-da07-4656-ae87-9d4bc94daee7","Type":"ContainerStarted","Data":"29ee5983567edd8575a8a57b7430adae0c52ae039baecf8e3ab04899cc03fc20"} Oct 10 16:51:13 crc kubenswrapper[4799]: I1010 16:51:13.719721 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a5ead453-da07-4656-ae87-9d4bc94daee7","Type":"ContainerStarted","Data":"6426570f9dfe449a016759848e9fb9bec9428063398ab822ea960095cba4927b"} Oct 10 16:51:13 crc kubenswrapper[4799]: I1010 16:51:13.722543 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-694f88c746-tbgjz" event={"ID":"2b7debcd-ccac-4b9d-9b6e-011a0f8072d9","Type":"ContainerStarted","Data":"a62494b9ff2ced5a032b285546d5814b83b5cac1ccbb043f0f7db208a692bf89"} Oct 10 16:51:13 crc kubenswrapper[4799]: I1010 16:51:13.722570 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-694f88c746-tbgjz" event={"ID":"2b7debcd-ccac-4b9d-9b6e-011a0f8072d9","Type":"ContainerStarted","Data":"51b411970b3fc3556f70eba6af79b3eba9f4d0cd9b2656eaf38bc05e92a8d335"} Oct 10 16:51:13 crc kubenswrapper[4799]: I1010 16:51:13.722580 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-694f88c746-tbgjz" event={"ID":"2b7debcd-ccac-4b9d-9b6e-011a0f8072d9","Type":"ContainerStarted","Data":"df26d26bb66a91d34c2a3c15847b842400cebfe33e4969c7daa42221c86fb6c7"} Oct 10 16:51:13 crc kubenswrapper[4799]: I1010 16:51:13.722623 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/neutron-694f88c746-tbgjz" Oct 10 16:51:13 crc kubenswrapper[4799]: I1010 16:51:13.723942 4799 generic.go:334] "Generic (PLEG): container finished" podID="3bab4b53-c147-4875-aaea-df06dae44b04" containerID="0a350afee44f9a4994b7e57622835f2d7e5b5441064e342822475aa18c0daadc" exitCode=0 Oct 10 16:51:13 crc kubenswrapper[4799]: I1010 16:51:13.723987 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-744fd954cc-ngzjd" event={"ID":"3bab4b53-c147-4875-aaea-df06dae44b04","Type":"ContainerDied","Data":"0a350afee44f9a4994b7e57622835f2d7e5b5441064e342822475aa18c0daadc"} Oct 10 16:51:13 crc kubenswrapper[4799]: I1010 16:51:13.724003 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-744fd954cc-ngzjd" event={"ID":"3bab4b53-c147-4875-aaea-df06dae44b04","Type":"ContainerStarted","Data":"dd45c3f8f254de7e9eb99c6ef5c57d50a19af24f9104d8755e3b636403d1a54b"} Oct 10 16:51:13 crc kubenswrapper[4799]: I1010 16:51:13.764144 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-694f88c746-tbgjz" podStartSLOduration=1.764127041 podStartE2EDuration="1.764127041s" podCreationTimestamp="2025-10-10 16:51:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-10 16:51:13.760373298 +0000 UTC m=+1167.268697413" watchObservedRunningTime="2025-10-10 16:51:13.764127041 +0000 UTC m=+1167.272451146" Oct 10 16:51:14 crc kubenswrapper[4799]: I1010 16:51:14.532155 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-69f7ddf877-mclzd"] Oct 10 16:51:14 crc kubenswrapper[4799]: E1010 16:51:14.532996 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bd142c20-258d-4917-86f1-f60cbaf56268" containerName="init" Oct 10 16:51:14 crc kubenswrapper[4799]: I1010 16:51:14.533011 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="bd142c20-258d-4917-86f1-f60cbaf56268" containerName="init" Oct 10 16:51:14 crc kubenswrapper[4799]: E1010 16:51:14.533039 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bd142c20-258d-4917-86f1-f60cbaf56268" containerName="dnsmasq-dns" Oct 10 16:51:14 crc kubenswrapper[4799]: I1010 16:51:14.533046 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="bd142c20-258d-4917-86f1-f60cbaf56268" containerName="dnsmasq-dns" Oct 10 16:51:14 crc kubenswrapper[4799]: I1010 16:51:14.533214 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="bd142c20-258d-4917-86f1-f60cbaf56268" containerName="dnsmasq-dns" Oct 10 16:51:14 crc kubenswrapper[4799]: I1010 16:51:14.534108 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-69f7ddf877-mclzd" Oct 10 16:51:14 crc kubenswrapper[4799]: I1010 16:51:14.537547 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-internal-svc" Oct 10 16:51:14 crc kubenswrapper[4799]: I1010 16:51:14.537583 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-public-svc" Oct 10 16:51:14 crc kubenswrapper[4799]: I1010 16:51:14.547282 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-69f7ddf877-mclzd"] Oct 10 16:51:14 crc kubenswrapper[4799]: I1010 16:51:14.636168 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pczq6\" (UniqueName: \"kubernetes.io/projected/78820835-eb2d-40d8-a497-e9a351a9cef9-kube-api-access-pczq6\") pod \"neutron-69f7ddf877-mclzd\" (UID: \"78820835-eb2d-40d8-a497-e9a351a9cef9\") " pod="openstack/neutron-69f7ddf877-mclzd" Oct 10 16:51:14 crc kubenswrapper[4799]: I1010 16:51:14.636262 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/78820835-eb2d-40d8-a497-e9a351a9cef9-combined-ca-bundle\") pod \"neutron-69f7ddf877-mclzd\" (UID: \"78820835-eb2d-40d8-a497-e9a351a9cef9\") " pod="openstack/neutron-69f7ddf877-mclzd" Oct 10 16:51:14 crc kubenswrapper[4799]: I1010 16:51:14.636606 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/78820835-eb2d-40d8-a497-e9a351a9cef9-internal-tls-certs\") pod \"neutron-69f7ddf877-mclzd\" (UID: \"78820835-eb2d-40d8-a497-e9a351a9cef9\") " pod="openstack/neutron-69f7ddf877-mclzd" Oct 10 16:51:14 crc kubenswrapper[4799]: I1010 16:51:14.636716 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/78820835-eb2d-40d8-a497-e9a351a9cef9-ovndb-tls-certs\") pod \"neutron-69f7ddf877-mclzd\" (UID: \"78820835-eb2d-40d8-a497-e9a351a9cef9\") " pod="openstack/neutron-69f7ddf877-mclzd" Oct 10 16:51:14 crc kubenswrapper[4799]: I1010 16:51:14.636774 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/78820835-eb2d-40d8-a497-e9a351a9cef9-public-tls-certs\") pod \"neutron-69f7ddf877-mclzd\" (UID: \"78820835-eb2d-40d8-a497-e9a351a9cef9\") " pod="openstack/neutron-69f7ddf877-mclzd" Oct 10 16:51:14 crc kubenswrapper[4799]: I1010 16:51:14.636914 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/78820835-eb2d-40d8-a497-e9a351a9cef9-config\") pod \"neutron-69f7ddf877-mclzd\" (UID: \"78820835-eb2d-40d8-a497-e9a351a9cef9\") " pod="openstack/neutron-69f7ddf877-mclzd" Oct 10 16:51:14 crc kubenswrapper[4799]: I1010 16:51:14.636959 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/78820835-eb2d-40d8-a497-e9a351a9cef9-httpd-config\") pod \"neutron-69f7ddf877-mclzd\" (UID: \"78820835-eb2d-40d8-a497-e9a351a9cef9\") " pod="openstack/neutron-69f7ddf877-mclzd" Oct 10 16:51:14 crc kubenswrapper[4799]: I1010 16:51:14.738504 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/78820835-eb2d-40d8-a497-e9a351a9cef9-internal-tls-certs\") pod \"neutron-69f7ddf877-mclzd\" (UID: \"78820835-eb2d-40d8-a497-e9a351a9cef9\") " pod="openstack/neutron-69f7ddf877-mclzd" Oct 10 16:51:14 crc kubenswrapper[4799]: I1010 16:51:14.738538 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/78820835-eb2d-40d8-a497-e9a351a9cef9-ovndb-tls-certs\") pod \"neutron-69f7ddf877-mclzd\" (UID: \"78820835-eb2d-40d8-a497-e9a351a9cef9\") " pod="openstack/neutron-69f7ddf877-mclzd" Oct 10 16:51:14 crc kubenswrapper[4799]: I1010 16:51:14.738554 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/78820835-eb2d-40d8-a497-e9a351a9cef9-public-tls-certs\") pod \"neutron-69f7ddf877-mclzd\" (UID: \"78820835-eb2d-40d8-a497-e9a351a9cef9\") " pod="openstack/neutron-69f7ddf877-mclzd" Oct 10 16:51:14 crc kubenswrapper[4799]: I1010 16:51:14.738589 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/78820835-eb2d-40d8-a497-e9a351a9cef9-config\") pod \"neutron-69f7ddf877-mclzd\" (UID: \"78820835-eb2d-40d8-a497-e9a351a9cef9\") " pod="openstack/neutron-69f7ddf877-mclzd" Oct 10 16:51:14 crc kubenswrapper[4799]: I1010 16:51:14.738608 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/78820835-eb2d-40d8-a497-e9a351a9cef9-httpd-config\") pod \"neutron-69f7ddf877-mclzd\" (UID: \"78820835-eb2d-40d8-a497-e9a351a9cef9\") " pod="openstack/neutron-69f7ddf877-mclzd" Oct 10 16:51:14 crc kubenswrapper[4799]: I1010 16:51:14.738679 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pczq6\" (UniqueName: \"kubernetes.io/projected/78820835-eb2d-40d8-a497-e9a351a9cef9-kube-api-access-pczq6\") pod \"neutron-69f7ddf877-mclzd\" (UID: \"78820835-eb2d-40d8-a497-e9a351a9cef9\") " pod="openstack/neutron-69f7ddf877-mclzd" Oct 10 16:51:14 crc kubenswrapper[4799]: I1010 16:51:14.738714 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/78820835-eb2d-40d8-a497-e9a351a9cef9-combined-ca-bundle\") pod \"neutron-69f7ddf877-mclzd\" (UID: \"78820835-eb2d-40d8-a497-e9a351a9cef9\") " pod="openstack/neutron-69f7ddf877-mclzd" Oct 10 16:51:14 crc kubenswrapper[4799]: I1010 16:51:14.742494 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-744fd954cc-ngzjd" event={"ID":"3bab4b53-c147-4875-aaea-df06dae44b04","Type":"ContainerStarted","Data":"ae3095796bd451a14cd4ec37a6aac8ee71abb63b13b255c1a69cc358efc9d139"} Oct 10 16:51:14 crc kubenswrapper[4799]: I1010 16:51:14.754089 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/78820835-eb2d-40d8-a497-e9a351a9cef9-combined-ca-bundle\") pod \"neutron-69f7ddf877-mclzd\" (UID: \"78820835-eb2d-40d8-a497-e9a351a9cef9\") " pod="openstack/neutron-69f7ddf877-mclzd" Oct 10 16:51:14 crc kubenswrapper[4799]: I1010 16:51:14.764628 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/78820835-eb2d-40d8-a497-e9a351a9cef9-public-tls-certs\") pod \"neutron-69f7ddf877-mclzd\" (UID: \"78820835-eb2d-40d8-a497-e9a351a9cef9\") " pod="openstack/neutron-69f7ddf877-mclzd" Oct 10 16:51:14 crc kubenswrapper[4799]: I1010 16:51:14.767979 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/78820835-eb2d-40d8-a497-e9a351a9cef9-internal-tls-certs\") pod \"neutron-69f7ddf877-mclzd\" (UID: \"78820835-eb2d-40d8-a497-e9a351a9cef9\") " pod="openstack/neutron-69f7ddf877-mclzd" Oct 10 16:51:14 crc kubenswrapper[4799]: I1010 16:51:14.772003 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pczq6\" (UniqueName: \"kubernetes.io/projected/78820835-eb2d-40d8-a497-e9a351a9cef9-kube-api-access-pczq6\") pod \"neutron-69f7ddf877-mclzd\" (UID: \"78820835-eb2d-40d8-a497-e9a351a9cef9\") " pod="openstack/neutron-69f7ddf877-mclzd" Oct 10 16:51:14 crc kubenswrapper[4799]: I1010 16:51:14.779483 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-744fd954cc-ngzjd" podStartSLOduration=3.779460358 podStartE2EDuration="3.779460358s" podCreationTimestamp="2025-10-10 16:51:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-10 16:51:14.772162888 +0000 UTC m=+1168.280487013" watchObservedRunningTime="2025-10-10 16:51:14.779460358 +0000 UTC m=+1168.287784473" Oct 10 16:51:14 crc kubenswrapper[4799]: I1010 16:51:14.787503 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/78820835-eb2d-40d8-a497-e9a351a9cef9-httpd-config\") pod \"neutron-69f7ddf877-mclzd\" (UID: \"78820835-eb2d-40d8-a497-e9a351a9cef9\") " pod="openstack/neutron-69f7ddf877-mclzd" Oct 10 16:51:14 crc kubenswrapper[4799]: I1010 16:51:14.801555 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/78820835-eb2d-40d8-a497-e9a351a9cef9-config\") pod \"neutron-69f7ddf877-mclzd\" (UID: \"78820835-eb2d-40d8-a497-e9a351a9cef9\") " pod="openstack/neutron-69f7ddf877-mclzd" Oct 10 16:51:14 crc kubenswrapper[4799]: I1010 16:51:14.829415 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/78820835-eb2d-40d8-a497-e9a351a9cef9-ovndb-tls-certs\") pod \"neutron-69f7ddf877-mclzd\" (UID: \"78820835-eb2d-40d8-a497-e9a351a9cef9\") " pod="openstack/neutron-69f7ddf877-mclzd" Oct 10 16:51:14 crc kubenswrapper[4799]: I1010 16:51:14.867125 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-69f7ddf877-mclzd" Oct 10 16:51:15 crc kubenswrapper[4799]: I1010 16:51:15.755573 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a5ead453-da07-4656-ae87-9d4bc94daee7","Type":"ContainerStarted","Data":"30d5477ecc3dcc08c02cacb0310066bcb7edc48b8b0dde7ac2c44b6f6066ee5e"} Oct 10 16:51:15 crc kubenswrapper[4799]: I1010 16:51:15.756238 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-744fd954cc-ngzjd" Oct 10 16:51:15 crc kubenswrapper[4799]: I1010 16:51:15.783211 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-7b6d7656b4-cxw2s" Oct 10 16:51:16 crc kubenswrapper[4799]: I1010 16:51:16.033978 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-69f7ddf877-mclzd"] Oct 10 16:51:16 crc kubenswrapper[4799]: W1010 16:51:16.045715 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod78820835_eb2d_40d8_a497_e9a351a9cef9.slice/crio-d9ca68973d27f952ebfee715da6c21a44b21d8c5cf3ff3dc873bc028ecb31da7 WatchSource:0}: Error finding container d9ca68973d27f952ebfee715da6c21a44b21d8c5cf3ff3dc873bc028ecb31da7: Status 404 returned error can't find the container with id d9ca68973d27f952ebfee715da6c21a44b21d8c5cf3ff3dc873bc028ecb31da7 Oct 10 16:51:16 crc kubenswrapper[4799]: I1010 16:51:16.769417 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-69f7ddf877-mclzd" event={"ID":"78820835-eb2d-40d8-a497-e9a351a9cef9","Type":"ContainerStarted","Data":"304a677749d95012ce2795e133dc4470b68b20824bdcbe3901e9d128f4e5ec4b"} Oct 10 16:51:16 crc kubenswrapper[4799]: I1010 16:51:16.769834 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-69f7ddf877-mclzd" event={"ID":"78820835-eb2d-40d8-a497-e9a351a9cef9","Type":"ContainerStarted","Data":"89ea0df023f8cd2efabe4a60d20707c43edc3f235ad2613e512cece899859399"} Oct 10 16:51:16 crc kubenswrapper[4799]: I1010 16:51:16.769855 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-69f7ddf877-mclzd" event={"ID":"78820835-eb2d-40d8-a497-e9a351a9cef9","Type":"ContainerStarted","Data":"d9ca68973d27f952ebfee715da6c21a44b21d8c5cf3ff3dc873bc028ecb31da7"} Oct 10 16:51:16 crc kubenswrapper[4799]: I1010 16:51:16.769906 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/neutron-69f7ddf877-mclzd" Oct 10 16:51:16 crc kubenswrapper[4799]: I1010 16:51:16.771578 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-dt6zw" event={"ID":"875e600d-b55a-48a9-a181-3ad09c24cc41","Type":"ContainerStarted","Data":"d62e15674081e38d400533f852a94a64631beca6244fed0891de2bc949a8005b"} Oct 10 16:51:16 crc kubenswrapper[4799]: I1010 16:51:16.793410 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-69f7ddf877-mclzd" podStartSLOduration=2.793396012 podStartE2EDuration="2.793396012s" podCreationTimestamp="2025-10-10 16:51:14 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-10 16:51:16.78722093 +0000 UTC m=+1170.295545045" watchObservedRunningTime="2025-10-10 16:51:16.793396012 +0000 UTC m=+1170.301720127" Oct 10 16:51:16 crc kubenswrapper[4799]: I1010 16:51:16.816452 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-db-sync-dt6zw" podStartSLOduration=2.612973866 podStartE2EDuration="40.816435911s" podCreationTimestamp="2025-10-10 16:50:36 +0000 UTC" firstStartedPulling="2025-10-10 16:50:37.699949332 +0000 UTC m=+1131.208273447" lastFinishedPulling="2025-10-10 16:51:15.903411367 +0000 UTC m=+1169.411735492" observedRunningTime="2025-10-10 16:51:16.803713107 +0000 UTC m=+1170.312037252" watchObservedRunningTime="2025-10-10 16:51:16.816435911 +0000 UTC m=+1170.324760026" Oct 10 16:51:16 crc kubenswrapper[4799]: I1010 16:51:16.984126 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-56d84d574d-x5sbm" Oct 10 16:51:17 crc kubenswrapper[4799]: I1010 16:51:17.055691 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-7b6d7656b4-cxw2s"] Oct 10 16:51:17 crc kubenswrapper[4799]: I1010 16:51:17.056025 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-7b6d7656b4-cxw2s" podUID="bebadb41-8336-4b6c-b20d-6b8130b165ce" containerName="barbican-api" containerID="cri-o://08df7005def57a0c28c64f0558472e4f8c70ae20c7c6a71a76edf6d9339eecd8" gracePeriod=30 Oct 10 16:51:17 crc kubenswrapper[4799]: I1010 16:51:17.056179 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-7b6d7656b4-cxw2s" podUID="bebadb41-8336-4b6c-b20d-6b8130b165ce" containerName="barbican-api-log" containerID="cri-o://9b71ee61665b3132d531889454e29722422a6a0b470d17895e43a1cf227396dc" gracePeriod=30 Oct 10 16:51:17 crc kubenswrapper[4799]: I1010 16:51:17.784318 4799 generic.go:334] "Generic (PLEG): container finished" podID="bebadb41-8336-4b6c-b20d-6b8130b165ce" containerID="9b71ee61665b3132d531889454e29722422a6a0b470d17895e43a1cf227396dc" exitCode=143 Oct 10 16:51:17 crc kubenswrapper[4799]: I1010 16:51:17.784404 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-7b6d7656b4-cxw2s" event={"ID":"bebadb41-8336-4b6c-b20d-6b8130b165ce","Type":"ContainerDied","Data":"9b71ee61665b3132d531889454e29722422a6a0b470d17895e43a1cf227396dc"} Oct 10 16:51:17 crc kubenswrapper[4799]: I1010 16:51:17.787541 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a5ead453-da07-4656-ae87-9d4bc94daee7","Type":"ContainerStarted","Data":"95b26ab8841c2b18e7c7fb3068e2fa261a28ce7e0fa8d8a115d33d584aa880fb"} Oct 10 16:51:17 crc kubenswrapper[4799]: I1010 16:51:17.818426 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.305040731 podStartE2EDuration="7.81840867s" podCreationTimestamp="2025-10-10 16:51:10 +0000 UTC" firstStartedPulling="2025-10-10 16:51:11.591614543 +0000 UTC m=+1165.099938658" lastFinishedPulling="2025-10-10 16:51:17.104982482 +0000 UTC m=+1170.613306597" observedRunningTime="2025-10-10 16:51:17.815411216 +0000 UTC m=+1171.323735341" watchObservedRunningTime="2025-10-10 16:51:17.81840867 +0000 UTC m=+1171.326732785" Oct 10 16:51:17 crc kubenswrapper[4799]: I1010 16:51:17.904148 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/keystone-657bb59659-swzhl" Oct 10 16:51:18 crc kubenswrapper[4799]: I1010 16:51:18.026308 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/placement-6565b9cf48-rl77d" Oct 10 16:51:18 crc kubenswrapper[4799]: I1010 16:51:18.795401 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Oct 10 16:51:20 crc kubenswrapper[4799]: I1010 16:51:20.221365 4799 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-7b6d7656b4-cxw2s" podUID="bebadb41-8336-4b6c-b20d-6b8130b165ce" containerName="barbican-api" probeResult="failure" output="Get \"http://10.217.0.161:9311/healthcheck\": read tcp 10.217.0.2:45014->10.217.0.161:9311: read: connection reset by peer" Oct 10 16:51:20 crc kubenswrapper[4799]: I1010 16:51:20.221381 4799 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-7b6d7656b4-cxw2s" podUID="bebadb41-8336-4b6c-b20d-6b8130b165ce" containerName="barbican-api-log" probeResult="failure" output="Get \"http://10.217.0.161:9311/healthcheck\": read tcp 10.217.0.2:45006->10.217.0.161:9311: read: connection reset by peer" Oct 10 16:51:20 crc kubenswrapper[4799]: I1010 16:51:20.634397 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-7b6d7656b4-cxw2s" Oct 10 16:51:20 crc kubenswrapper[4799]: I1010 16:51:20.772735 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bebadb41-8336-4b6c-b20d-6b8130b165ce-config-data\") pod \"bebadb41-8336-4b6c-b20d-6b8130b165ce\" (UID: \"bebadb41-8336-4b6c-b20d-6b8130b165ce\") " Oct 10 16:51:20 crc kubenswrapper[4799]: I1010 16:51:20.772800 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bebadb41-8336-4b6c-b20d-6b8130b165ce-combined-ca-bundle\") pod \"bebadb41-8336-4b6c-b20d-6b8130b165ce\" (UID: \"bebadb41-8336-4b6c-b20d-6b8130b165ce\") " Oct 10 16:51:20 crc kubenswrapper[4799]: I1010 16:51:20.772828 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/bebadb41-8336-4b6c-b20d-6b8130b165ce-logs\") pod \"bebadb41-8336-4b6c-b20d-6b8130b165ce\" (UID: \"bebadb41-8336-4b6c-b20d-6b8130b165ce\") " Oct 10 16:51:20 crc kubenswrapper[4799]: I1010 16:51:20.772863 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/bebadb41-8336-4b6c-b20d-6b8130b165ce-config-data-custom\") pod \"bebadb41-8336-4b6c-b20d-6b8130b165ce\" (UID: \"bebadb41-8336-4b6c-b20d-6b8130b165ce\") " Oct 10 16:51:20 crc kubenswrapper[4799]: I1010 16:51:20.772887 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w2mdk\" (UniqueName: \"kubernetes.io/projected/bebadb41-8336-4b6c-b20d-6b8130b165ce-kube-api-access-w2mdk\") pod \"bebadb41-8336-4b6c-b20d-6b8130b165ce\" (UID: \"bebadb41-8336-4b6c-b20d-6b8130b165ce\") " Oct 10 16:51:20 crc kubenswrapper[4799]: I1010 16:51:20.773573 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bebadb41-8336-4b6c-b20d-6b8130b165ce-logs" (OuterVolumeSpecName: "logs") pod "bebadb41-8336-4b6c-b20d-6b8130b165ce" (UID: "bebadb41-8336-4b6c-b20d-6b8130b165ce"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 10 16:51:20 crc kubenswrapper[4799]: I1010 16:51:20.780917 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bebadb41-8336-4b6c-b20d-6b8130b165ce-kube-api-access-w2mdk" (OuterVolumeSpecName: "kube-api-access-w2mdk") pod "bebadb41-8336-4b6c-b20d-6b8130b165ce" (UID: "bebadb41-8336-4b6c-b20d-6b8130b165ce"). InnerVolumeSpecName "kube-api-access-w2mdk". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 16:51:20 crc kubenswrapper[4799]: I1010 16:51:20.787789 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bebadb41-8336-4b6c-b20d-6b8130b165ce-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "bebadb41-8336-4b6c-b20d-6b8130b165ce" (UID: "bebadb41-8336-4b6c-b20d-6b8130b165ce"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:51:20 crc kubenswrapper[4799]: I1010 16:51:20.809359 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bebadb41-8336-4b6c-b20d-6b8130b165ce-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "bebadb41-8336-4b6c-b20d-6b8130b165ce" (UID: "bebadb41-8336-4b6c-b20d-6b8130b165ce"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:51:20 crc kubenswrapper[4799]: I1010 16:51:20.814445 4799 generic.go:334] "Generic (PLEG): container finished" podID="bebadb41-8336-4b6c-b20d-6b8130b165ce" containerID="08df7005def57a0c28c64f0558472e4f8c70ae20c7c6a71a76edf6d9339eecd8" exitCode=0 Oct 10 16:51:20 crc kubenswrapper[4799]: I1010 16:51:20.814485 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-7b6d7656b4-cxw2s" event={"ID":"bebadb41-8336-4b6c-b20d-6b8130b165ce","Type":"ContainerDied","Data":"08df7005def57a0c28c64f0558472e4f8c70ae20c7c6a71a76edf6d9339eecd8"} Oct 10 16:51:20 crc kubenswrapper[4799]: I1010 16:51:20.814509 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-7b6d7656b4-cxw2s" event={"ID":"bebadb41-8336-4b6c-b20d-6b8130b165ce","Type":"ContainerDied","Data":"789ad5d265e05ce0f577efe0bad124696a0e07f67ffdf611a4740084158a055a"} Oct 10 16:51:20 crc kubenswrapper[4799]: I1010 16:51:20.814525 4799 scope.go:117] "RemoveContainer" containerID="08df7005def57a0c28c64f0558472e4f8c70ae20c7c6a71a76edf6d9339eecd8" Oct 10 16:51:20 crc kubenswrapper[4799]: I1010 16:51:20.814602 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-7b6d7656b4-cxw2s" Oct 10 16:51:20 crc kubenswrapper[4799]: I1010 16:51:20.874970 4799 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bebadb41-8336-4b6c-b20d-6b8130b165ce-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 10 16:51:20 crc kubenswrapper[4799]: I1010 16:51:20.875009 4799 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/bebadb41-8336-4b6c-b20d-6b8130b165ce-logs\") on node \"crc\" DevicePath \"\"" Oct 10 16:51:20 crc kubenswrapper[4799]: I1010 16:51:20.875020 4799 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/bebadb41-8336-4b6c-b20d-6b8130b165ce-config-data-custom\") on node \"crc\" DevicePath \"\"" Oct 10 16:51:20 crc kubenswrapper[4799]: I1010 16:51:20.875029 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w2mdk\" (UniqueName: \"kubernetes.io/projected/bebadb41-8336-4b6c-b20d-6b8130b165ce-kube-api-access-w2mdk\") on node \"crc\" DevicePath \"\"" Oct 10 16:51:20 crc kubenswrapper[4799]: I1010 16:51:20.895900 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bebadb41-8336-4b6c-b20d-6b8130b165ce-config-data" (OuterVolumeSpecName: "config-data") pod "bebadb41-8336-4b6c-b20d-6b8130b165ce" (UID: "bebadb41-8336-4b6c-b20d-6b8130b165ce"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:51:20 crc kubenswrapper[4799]: I1010 16:51:20.912491 4799 scope.go:117] "RemoveContainer" containerID="9b71ee61665b3132d531889454e29722422a6a0b470d17895e43a1cf227396dc" Oct 10 16:51:20 crc kubenswrapper[4799]: I1010 16:51:20.929142 4799 scope.go:117] "RemoveContainer" containerID="08df7005def57a0c28c64f0558472e4f8c70ae20c7c6a71a76edf6d9339eecd8" Oct 10 16:51:20 crc kubenswrapper[4799]: E1010 16:51:20.929534 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"08df7005def57a0c28c64f0558472e4f8c70ae20c7c6a71a76edf6d9339eecd8\": container with ID starting with 08df7005def57a0c28c64f0558472e4f8c70ae20c7c6a71a76edf6d9339eecd8 not found: ID does not exist" containerID="08df7005def57a0c28c64f0558472e4f8c70ae20c7c6a71a76edf6d9339eecd8" Oct 10 16:51:20 crc kubenswrapper[4799]: I1010 16:51:20.929571 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"08df7005def57a0c28c64f0558472e4f8c70ae20c7c6a71a76edf6d9339eecd8"} err="failed to get container status \"08df7005def57a0c28c64f0558472e4f8c70ae20c7c6a71a76edf6d9339eecd8\": rpc error: code = NotFound desc = could not find container \"08df7005def57a0c28c64f0558472e4f8c70ae20c7c6a71a76edf6d9339eecd8\": container with ID starting with 08df7005def57a0c28c64f0558472e4f8c70ae20c7c6a71a76edf6d9339eecd8 not found: ID does not exist" Oct 10 16:51:20 crc kubenswrapper[4799]: I1010 16:51:20.929595 4799 scope.go:117] "RemoveContainer" containerID="9b71ee61665b3132d531889454e29722422a6a0b470d17895e43a1cf227396dc" Oct 10 16:51:20 crc kubenswrapper[4799]: E1010 16:51:20.929993 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9b71ee61665b3132d531889454e29722422a6a0b470d17895e43a1cf227396dc\": container with ID starting with 9b71ee61665b3132d531889454e29722422a6a0b470d17895e43a1cf227396dc not found: ID does not exist" containerID="9b71ee61665b3132d531889454e29722422a6a0b470d17895e43a1cf227396dc" Oct 10 16:51:20 crc kubenswrapper[4799]: I1010 16:51:20.930016 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9b71ee61665b3132d531889454e29722422a6a0b470d17895e43a1cf227396dc"} err="failed to get container status \"9b71ee61665b3132d531889454e29722422a6a0b470d17895e43a1cf227396dc\": rpc error: code = NotFound desc = could not find container \"9b71ee61665b3132d531889454e29722422a6a0b470d17895e43a1cf227396dc\": container with ID starting with 9b71ee61665b3132d531889454e29722422a6a0b470d17895e43a1cf227396dc not found: ID does not exist" Oct 10 16:51:20 crc kubenswrapper[4799]: I1010 16:51:20.976684 4799 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bebadb41-8336-4b6c-b20d-6b8130b165ce-config-data\") on node \"crc\" DevicePath \"\"" Oct 10 16:51:21 crc kubenswrapper[4799]: I1010 16:51:21.044331 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstackclient"] Oct 10 16:51:21 crc kubenswrapper[4799]: E1010 16:51:21.044665 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bebadb41-8336-4b6c-b20d-6b8130b165ce" containerName="barbican-api" Oct 10 16:51:21 crc kubenswrapper[4799]: I1010 16:51:21.044680 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="bebadb41-8336-4b6c-b20d-6b8130b165ce" containerName="barbican-api" Oct 10 16:51:21 crc kubenswrapper[4799]: E1010 16:51:21.044708 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bebadb41-8336-4b6c-b20d-6b8130b165ce" containerName="barbican-api-log" Oct 10 16:51:21 crc kubenswrapper[4799]: I1010 16:51:21.044714 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="bebadb41-8336-4b6c-b20d-6b8130b165ce" containerName="barbican-api-log" Oct 10 16:51:21 crc kubenswrapper[4799]: I1010 16:51:21.044938 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="bebadb41-8336-4b6c-b20d-6b8130b165ce" containerName="barbican-api" Oct 10 16:51:21 crc kubenswrapper[4799]: I1010 16:51:21.044957 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="bebadb41-8336-4b6c-b20d-6b8130b165ce" containerName="barbican-api-log" Oct 10 16:51:21 crc kubenswrapper[4799]: I1010 16:51:21.045502 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Oct 10 16:51:21 crc kubenswrapper[4799]: I1010 16:51:21.048231 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-config" Oct 10 16:51:21 crc kubenswrapper[4799]: I1010 16:51:21.048355 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstackclient-openstackclient-dockercfg-6br2r" Oct 10 16:51:21 crc kubenswrapper[4799]: I1010 16:51:21.048465 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-config-secret" Oct 10 16:51:21 crc kubenswrapper[4799]: I1010 16:51:21.065963 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Oct 10 16:51:21 crc kubenswrapper[4799]: I1010 16:51:21.143713 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-7b6d7656b4-cxw2s"] Oct 10 16:51:21 crc kubenswrapper[4799]: I1010 16:51:21.150187 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-api-7b6d7656b4-cxw2s"] Oct 10 16:51:21 crc kubenswrapper[4799]: I1010 16:51:21.180827 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/a7314800-8b3b-4f07-838b-16e8652e2bc8-openstack-config\") pod \"openstackclient\" (UID: \"a7314800-8b3b-4f07-838b-16e8652e2bc8\") " pod="openstack/openstackclient" Oct 10 16:51:21 crc kubenswrapper[4799]: I1010 16:51:21.181166 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/a7314800-8b3b-4f07-838b-16e8652e2bc8-openstack-config-secret\") pod \"openstackclient\" (UID: \"a7314800-8b3b-4f07-838b-16e8652e2bc8\") " pod="openstack/openstackclient" Oct 10 16:51:21 crc kubenswrapper[4799]: I1010 16:51:21.181304 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a7314800-8b3b-4f07-838b-16e8652e2bc8-combined-ca-bundle\") pod \"openstackclient\" (UID: \"a7314800-8b3b-4f07-838b-16e8652e2bc8\") " pod="openstack/openstackclient" Oct 10 16:51:21 crc kubenswrapper[4799]: I1010 16:51:21.181405 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8cxdl\" (UniqueName: \"kubernetes.io/projected/a7314800-8b3b-4f07-838b-16e8652e2bc8-kube-api-access-8cxdl\") pod \"openstackclient\" (UID: \"a7314800-8b3b-4f07-838b-16e8652e2bc8\") " pod="openstack/openstackclient" Oct 10 16:51:21 crc kubenswrapper[4799]: I1010 16:51:21.283039 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/a7314800-8b3b-4f07-838b-16e8652e2bc8-openstack-config\") pod \"openstackclient\" (UID: \"a7314800-8b3b-4f07-838b-16e8652e2bc8\") " pod="openstack/openstackclient" Oct 10 16:51:21 crc kubenswrapper[4799]: I1010 16:51:21.283147 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/a7314800-8b3b-4f07-838b-16e8652e2bc8-openstack-config-secret\") pod \"openstackclient\" (UID: \"a7314800-8b3b-4f07-838b-16e8652e2bc8\") " pod="openstack/openstackclient" Oct 10 16:51:21 crc kubenswrapper[4799]: I1010 16:51:21.283190 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a7314800-8b3b-4f07-838b-16e8652e2bc8-combined-ca-bundle\") pod \"openstackclient\" (UID: \"a7314800-8b3b-4f07-838b-16e8652e2bc8\") " pod="openstack/openstackclient" Oct 10 16:51:21 crc kubenswrapper[4799]: I1010 16:51:21.283215 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8cxdl\" (UniqueName: \"kubernetes.io/projected/a7314800-8b3b-4f07-838b-16e8652e2bc8-kube-api-access-8cxdl\") pod \"openstackclient\" (UID: \"a7314800-8b3b-4f07-838b-16e8652e2bc8\") " pod="openstack/openstackclient" Oct 10 16:51:21 crc kubenswrapper[4799]: I1010 16:51:21.284170 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/a7314800-8b3b-4f07-838b-16e8652e2bc8-openstack-config\") pod \"openstackclient\" (UID: \"a7314800-8b3b-4f07-838b-16e8652e2bc8\") " pod="openstack/openstackclient" Oct 10 16:51:21 crc kubenswrapper[4799]: I1010 16:51:21.287603 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a7314800-8b3b-4f07-838b-16e8652e2bc8-combined-ca-bundle\") pod \"openstackclient\" (UID: \"a7314800-8b3b-4f07-838b-16e8652e2bc8\") " pod="openstack/openstackclient" Oct 10 16:51:21 crc kubenswrapper[4799]: I1010 16:51:21.288259 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/a7314800-8b3b-4f07-838b-16e8652e2bc8-openstack-config-secret\") pod \"openstackclient\" (UID: \"a7314800-8b3b-4f07-838b-16e8652e2bc8\") " pod="openstack/openstackclient" Oct 10 16:51:21 crc kubenswrapper[4799]: I1010 16:51:21.314349 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8cxdl\" (UniqueName: \"kubernetes.io/projected/a7314800-8b3b-4f07-838b-16e8652e2bc8-kube-api-access-8cxdl\") pod \"openstackclient\" (UID: \"a7314800-8b3b-4f07-838b-16e8652e2bc8\") " pod="openstack/openstackclient" Oct 10 16:51:21 crc kubenswrapper[4799]: I1010 16:51:21.344667 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/openstackclient"] Oct 10 16:51:21 crc kubenswrapper[4799]: I1010 16:51:21.345386 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Oct 10 16:51:21 crc kubenswrapper[4799]: I1010 16:51:21.353634 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/openstackclient"] Oct 10 16:51:21 crc kubenswrapper[4799]: I1010 16:51:21.392552 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstackclient"] Oct 10 16:51:21 crc kubenswrapper[4799]: I1010 16:51:21.393961 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Oct 10 16:51:21 crc kubenswrapper[4799]: I1010 16:51:21.443288 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bebadb41-8336-4b6c-b20d-6b8130b165ce" path="/var/lib/kubelet/pods/bebadb41-8336-4b6c-b20d-6b8130b165ce/volumes" Oct 10 16:51:21 crc kubenswrapper[4799]: I1010 16:51:21.443865 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Oct 10 16:51:21 crc kubenswrapper[4799]: I1010 16:51:21.486544 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/95c3e251-04ea-40ab-94d0-608d6ef0d8f3-openstack-config\") pod \"openstackclient\" (UID: \"95c3e251-04ea-40ab-94d0-608d6ef0d8f3\") " pod="openstack/openstackclient" Oct 10 16:51:21 crc kubenswrapper[4799]: I1010 16:51:21.486620 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/95c3e251-04ea-40ab-94d0-608d6ef0d8f3-combined-ca-bundle\") pod \"openstackclient\" (UID: \"95c3e251-04ea-40ab-94d0-608d6ef0d8f3\") " pod="openstack/openstackclient" Oct 10 16:51:21 crc kubenswrapper[4799]: I1010 16:51:21.486661 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/95c3e251-04ea-40ab-94d0-608d6ef0d8f3-openstack-config-secret\") pod \"openstackclient\" (UID: \"95c3e251-04ea-40ab-94d0-608d6ef0d8f3\") " pod="openstack/openstackclient" Oct 10 16:51:21 crc kubenswrapper[4799]: I1010 16:51:21.486817 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qwn5h\" (UniqueName: \"kubernetes.io/projected/95c3e251-04ea-40ab-94d0-608d6ef0d8f3-kube-api-access-qwn5h\") pod \"openstackclient\" (UID: \"95c3e251-04ea-40ab-94d0-608d6ef0d8f3\") " pod="openstack/openstackclient" Oct 10 16:51:21 crc kubenswrapper[4799]: E1010 16:51:21.488916 4799 log.go:32] "RunPodSandbox from runtime service failed" err=< Oct 10 16:51:21 crc kubenswrapper[4799]: rpc error: code = Unknown desc = failed to create pod network sandbox k8s_openstackclient_openstack_a7314800-8b3b-4f07-838b-16e8652e2bc8_0(164e22d0e017aa0f01c255b3e5b64b7f64fe2b0142ba978354636d21d3e5032d): error adding pod openstack_openstackclient to CNI network "multus-cni-network": plugin type="multus-shim" name="multus-cni-network" failed (add): CmdAdd (shim): CNI request failed with status 400: 'ContainerID:"164e22d0e017aa0f01c255b3e5b64b7f64fe2b0142ba978354636d21d3e5032d" Netns:"/var/run/netns/6d88296e-6e12-4217-99d6-320f728e0d4c" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openstack;K8S_POD_NAME=openstackclient;K8S_POD_INFRA_CONTAINER_ID=164e22d0e017aa0f01c255b3e5b64b7f64fe2b0142ba978354636d21d3e5032d;K8S_POD_UID=a7314800-8b3b-4f07-838b-16e8652e2bc8" Path:"" ERRORED: error configuring pod [openstack/openstackclient] networking: Multus: [openstack/openstackclient/a7314800-8b3b-4f07-838b-16e8652e2bc8]: expected pod UID "a7314800-8b3b-4f07-838b-16e8652e2bc8" but got "95c3e251-04ea-40ab-94d0-608d6ef0d8f3" from Kube API Oct 10 16:51:21 crc kubenswrapper[4799]: ': StdinData: {"binDir":"/var/lib/cni/bin","clusterNetwork":"/host/run/multus/cni/net.d/10-ovn-kubernetes.conf","cniVersion":"0.3.1","daemonSocketDir":"/run/multus/socket","globalNamespaces":"default,openshift-multus,openshift-sriov-network-operator,openshift-cnv","logLevel":"verbose","logToStderr":true,"name":"multus-cni-network","namespaceIsolation":true,"type":"multus-shim"} Oct 10 16:51:21 crc kubenswrapper[4799]: > Oct 10 16:51:21 crc kubenswrapper[4799]: E1010 16:51:21.488959 4799 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err=< Oct 10 16:51:21 crc kubenswrapper[4799]: rpc error: code = Unknown desc = failed to create pod network sandbox k8s_openstackclient_openstack_a7314800-8b3b-4f07-838b-16e8652e2bc8_0(164e22d0e017aa0f01c255b3e5b64b7f64fe2b0142ba978354636d21d3e5032d): error adding pod openstack_openstackclient to CNI network "multus-cni-network": plugin type="multus-shim" name="multus-cni-network" failed (add): CmdAdd (shim): CNI request failed with status 400: 'ContainerID:"164e22d0e017aa0f01c255b3e5b64b7f64fe2b0142ba978354636d21d3e5032d" Netns:"/var/run/netns/6d88296e-6e12-4217-99d6-320f728e0d4c" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openstack;K8S_POD_NAME=openstackclient;K8S_POD_INFRA_CONTAINER_ID=164e22d0e017aa0f01c255b3e5b64b7f64fe2b0142ba978354636d21d3e5032d;K8S_POD_UID=a7314800-8b3b-4f07-838b-16e8652e2bc8" Path:"" ERRORED: error configuring pod [openstack/openstackclient] networking: Multus: [openstack/openstackclient/a7314800-8b3b-4f07-838b-16e8652e2bc8]: expected pod UID "a7314800-8b3b-4f07-838b-16e8652e2bc8" but got "95c3e251-04ea-40ab-94d0-608d6ef0d8f3" from Kube API Oct 10 16:51:21 crc kubenswrapper[4799]: ': StdinData: {"binDir":"/var/lib/cni/bin","clusterNetwork":"/host/run/multus/cni/net.d/10-ovn-kubernetes.conf","cniVersion":"0.3.1","daemonSocketDir":"/run/multus/socket","globalNamespaces":"default,openshift-multus,openshift-sriov-network-operator,openshift-cnv","logLevel":"verbose","logToStderr":true,"name":"multus-cni-network","namespaceIsolation":true,"type":"multus-shim"} Oct 10 16:51:21 crc kubenswrapper[4799]: > pod="openstack/openstackclient" Oct 10 16:51:21 crc kubenswrapper[4799]: I1010 16:51:21.588673 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/95c3e251-04ea-40ab-94d0-608d6ef0d8f3-combined-ca-bundle\") pod \"openstackclient\" (UID: \"95c3e251-04ea-40ab-94d0-608d6ef0d8f3\") " pod="openstack/openstackclient" Oct 10 16:51:21 crc kubenswrapper[4799]: I1010 16:51:21.588739 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/95c3e251-04ea-40ab-94d0-608d6ef0d8f3-openstack-config-secret\") pod \"openstackclient\" (UID: \"95c3e251-04ea-40ab-94d0-608d6ef0d8f3\") " pod="openstack/openstackclient" Oct 10 16:51:21 crc kubenswrapper[4799]: I1010 16:51:21.588840 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qwn5h\" (UniqueName: \"kubernetes.io/projected/95c3e251-04ea-40ab-94d0-608d6ef0d8f3-kube-api-access-qwn5h\") pod \"openstackclient\" (UID: \"95c3e251-04ea-40ab-94d0-608d6ef0d8f3\") " pod="openstack/openstackclient" Oct 10 16:51:21 crc kubenswrapper[4799]: I1010 16:51:21.588871 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/95c3e251-04ea-40ab-94d0-608d6ef0d8f3-openstack-config\") pod \"openstackclient\" (UID: \"95c3e251-04ea-40ab-94d0-608d6ef0d8f3\") " pod="openstack/openstackclient" Oct 10 16:51:21 crc kubenswrapper[4799]: I1010 16:51:21.589679 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/95c3e251-04ea-40ab-94d0-608d6ef0d8f3-openstack-config\") pod \"openstackclient\" (UID: \"95c3e251-04ea-40ab-94d0-608d6ef0d8f3\") " pod="openstack/openstackclient" Oct 10 16:51:21 crc kubenswrapper[4799]: I1010 16:51:21.593601 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/95c3e251-04ea-40ab-94d0-608d6ef0d8f3-openstack-config-secret\") pod \"openstackclient\" (UID: \"95c3e251-04ea-40ab-94d0-608d6ef0d8f3\") " pod="openstack/openstackclient" Oct 10 16:51:21 crc kubenswrapper[4799]: I1010 16:51:21.593970 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/95c3e251-04ea-40ab-94d0-608d6ef0d8f3-combined-ca-bundle\") pod \"openstackclient\" (UID: \"95c3e251-04ea-40ab-94d0-608d6ef0d8f3\") " pod="openstack/openstackclient" Oct 10 16:51:21 crc kubenswrapper[4799]: I1010 16:51:21.605021 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qwn5h\" (UniqueName: \"kubernetes.io/projected/95c3e251-04ea-40ab-94d0-608d6ef0d8f3-kube-api-access-qwn5h\") pod \"openstackclient\" (UID: \"95c3e251-04ea-40ab-94d0-608d6ef0d8f3\") " pod="openstack/openstackclient" Oct 10 16:51:21 crc kubenswrapper[4799]: I1010 16:51:21.770609 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Oct 10 16:51:21 crc kubenswrapper[4799]: I1010 16:51:21.823904 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Oct 10 16:51:21 crc kubenswrapper[4799]: I1010 16:51:21.833343 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Oct 10 16:51:21 crc kubenswrapper[4799]: I1010 16:51:21.836267 4799 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openstack/openstackclient" oldPodUID="a7314800-8b3b-4f07-838b-16e8652e2bc8" podUID="95c3e251-04ea-40ab-94d0-608d6ef0d8f3" Oct 10 16:51:21 crc kubenswrapper[4799]: I1010 16:51:21.995374 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a7314800-8b3b-4f07-838b-16e8652e2bc8-combined-ca-bundle\") pod \"a7314800-8b3b-4f07-838b-16e8652e2bc8\" (UID: \"a7314800-8b3b-4f07-838b-16e8652e2bc8\") " Oct 10 16:51:21 crc kubenswrapper[4799]: I1010 16:51:21.995472 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/a7314800-8b3b-4f07-838b-16e8652e2bc8-openstack-config\") pod \"a7314800-8b3b-4f07-838b-16e8652e2bc8\" (UID: \"a7314800-8b3b-4f07-838b-16e8652e2bc8\") " Oct 10 16:51:21 crc kubenswrapper[4799]: I1010 16:51:21.995519 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8cxdl\" (UniqueName: \"kubernetes.io/projected/a7314800-8b3b-4f07-838b-16e8652e2bc8-kube-api-access-8cxdl\") pod \"a7314800-8b3b-4f07-838b-16e8652e2bc8\" (UID: \"a7314800-8b3b-4f07-838b-16e8652e2bc8\") " Oct 10 16:51:21 crc kubenswrapper[4799]: I1010 16:51:21.995587 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/a7314800-8b3b-4f07-838b-16e8652e2bc8-openstack-config-secret\") pod \"a7314800-8b3b-4f07-838b-16e8652e2bc8\" (UID: \"a7314800-8b3b-4f07-838b-16e8652e2bc8\") " Oct 10 16:51:21 crc kubenswrapper[4799]: I1010 16:51:21.996058 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a7314800-8b3b-4f07-838b-16e8652e2bc8-openstack-config" (OuterVolumeSpecName: "openstack-config") pod "a7314800-8b3b-4f07-838b-16e8652e2bc8" (UID: "a7314800-8b3b-4f07-838b-16e8652e2bc8"). InnerVolumeSpecName "openstack-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 16:51:21 crc kubenswrapper[4799]: I1010 16:51:21.996197 4799 reconciler_common.go:293] "Volume detached for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/a7314800-8b3b-4f07-838b-16e8652e2bc8-openstack-config\") on node \"crc\" DevicePath \"\"" Oct 10 16:51:22 crc kubenswrapper[4799]: I1010 16:51:22.003043 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a7314800-8b3b-4f07-838b-16e8652e2bc8-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a7314800-8b3b-4f07-838b-16e8652e2bc8" (UID: "a7314800-8b3b-4f07-838b-16e8652e2bc8"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:51:22 crc kubenswrapper[4799]: I1010 16:51:22.003935 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a7314800-8b3b-4f07-838b-16e8652e2bc8-openstack-config-secret" (OuterVolumeSpecName: "openstack-config-secret") pod "a7314800-8b3b-4f07-838b-16e8652e2bc8" (UID: "a7314800-8b3b-4f07-838b-16e8652e2bc8"). InnerVolumeSpecName "openstack-config-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:51:22 crc kubenswrapper[4799]: I1010 16:51:22.007832 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a7314800-8b3b-4f07-838b-16e8652e2bc8-kube-api-access-8cxdl" (OuterVolumeSpecName: "kube-api-access-8cxdl") pod "a7314800-8b3b-4f07-838b-16e8652e2bc8" (UID: "a7314800-8b3b-4f07-838b-16e8652e2bc8"). InnerVolumeSpecName "kube-api-access-8cxdl". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 16:51:22 crc kubenswrapper[4799]: I1010 16:51:22.098466 4799 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a7314800-8b3b-4f07-838b-16e8652e2bc8-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 10 16:51:22 crc kubenswrapper[4799]: I1010 16:51:22.098503 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8cxdl\" (UniqueName: \"kubernetes.io/projected/a7314800-8b3b-4f07-838b-16e8652e2bc8-kube-api-access-8cxdl\") on node \"crc\" DevicePath \"\"" Oct 10 16:51:22 crc kubenswrapper[4799]: I1010 16:51:22.098514 4799 reconciler_common.go:293] "Volume detached for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/a7314800-8b3b-4f07-838b-16e8652e2bc8-openstack-config-secret\") on node \"crc\" DevicePath \"\"" Oct 10 16:51:22 crc kubenswrapper[4799]: I1010 16:51:22.215817 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Oct 10 16:51:22 crc kubenswrapper[4799]: W1010 16:51:22.220959 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod95c3e251_04ea_40ab_94d0_608d6ef0d8f3.slice/crio-5db9132742050481600358fe329def75c6b8f3bae323f1d3e96f951a00ca083c WatchSource:0}: Error finding container 5db9132742050481600358fe329def75c6b8f3bae323f1d3e96f951a00ca083c: Status 404 returned error can't find the container with id 5db9132742050481600358fe329def75c6b8f3bae323f1d3e96f951a00ca083c Oct 10 16:51:22 crc kubenswrapper[4799]: I1010 16:51:22.435012 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-744fd954cc-ngzjd" Oct 10 16:51:22 crc kubenswrapper[4799]: I1010 16:51:22.528570 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-74776f5dd7-7jxz4"] Oct 10 16:51:22 crc kubenswrapper[4799]: I1010 16:51:22.528891 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-74776f5dd7-7jxz4" podUID="5a8fddc9-9cab-41e0-90c4-3c797749e5e2" containerName="dnsmasq-dns" containerID="cri-o://6ed3353899f4c0225963fc765a124e1b2d481711fafe23a817601fba30fa1384" gracePeriod=10 Oct 10 16:51:22 crc kubenswrapper[4799]: I1010 16:51:22.845327 4799 generic.go:334] "Generic (PLEG): container finished" podID="5a8fddc9-9cab-41e0-90c4-3c797749e5e2" containerID="6ed3353899f4c0225963fc765a124e1b2d481711fafe23a817601fba30fa1384" exitCode=0 Oct 10 16:51:22 crc kubenswrapper[4799]: I1010 16:51:22.845617 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-74776f5dd7-7jxz4" event={"ID":"5a8fddc9-9cab-41e0-90c4-3c797749e5e2","Type":"ContainerDied","Data":"6ed3353899f4c0225963fc765a124e1b2d481711fafe23a817601fba30fa1384"} Oct 10 16:51:22 crc kubenswrapper[4799]: I1010 16:51:22.846933 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Oct 10 16:51:22 crc kubenswrapper[4799]: I1010 16:51:22.847406 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"95c3e251-04ea-40ab-94d0-608d6ef0d8f3","Type":"ContainerStarted","Data":"5db9132742050481600358fe329def75c6b8f3bae323f1d3e96f951a00ca083c"} Oct 10 16:51:22 crc kubenswrapper[4799]: I1010 16:51:22.862276 4799 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openstack/openstackclient" oldPodUID="a7314800-8b3b-4f07-838b-16e8652e2bc8" podUID="95c3e251-04ea-40ab-94d0-608d6ef0d8f3" Oct 10 16:51:23 crc kubenswrapper[4799]: I1010 16:51:23.013368 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-74776f5dd7-7jxz4" Oct 10 16:51:23 crc kubenswrapper[4799]: I1010 16:51:23.113107 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nrz7m\" (UniqueName: \"kubernetes.io/projected/5a8fddc9-9cab-41e0-90c4-3c797749e5e2-kube-api-access-nrz7m\") pod \"5a8fddc9-9cab-41e0-90c4-3c797749e5e2\" (UID: \"5a8fddc9-9cab-41e0-90c4-3c797749e5e2\") " Oct 10 16:51:23 crc kubenswrapper[4799]: I1010 16:51:23.113238 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5a8fddc9-9cab-41e0-90c4-3c797749e5e2-dns-svc\") pod \"5a8fddc9-9cab-41e0-90c4-3c797749e5e2\" (UID: \"5a8fddc9-9cab-41e0-90c4-3c797749e5e2\") " Oct 10 16:51:23 crc kubenswrapper[4799]: I1010 16:51:23.113315 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/5a8fddc9-9cab-41e0-90c4-3c797749e5e2-dns-swift-storage-0\") pod \"5a8fddc9-9cab-41e0-90c4-3c797749e5e2\" (UID: \"5a8fddc9-9cab-41e0-90c4-3c797749e5e2\") " Oct 10 16:51:23 crc kubenswrapper[4799]: I1010 16:51:23.113450 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5a8fddc9-9cab-41e0-90c4-3c797749e5e2-ovsdbserver-sb\") pod \"5a8fddc9-9cab-41e0-90c4-3c797749e5e2\" (UID: \"5a8fddc9-9cab-41e0-90c4-3c797749e5e2\") " Oct 10 16:51:23 crc kubenswrapper[4799]: I1010 16:51:23.113488 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5a8fddc9-9cab-41e0-90c4-3c797749e5e2-ovsdbserver-nb\") pod \"5a8fddc9-9cab-41e0-90c4-3c797749e5e2\" (UID: \"5a8fddc9-9cab-41e0-90c4-3c797749e5e2\") " Oct 10 16:51:23 crc kubenswrapper[4799]: I1010 16:51:23.113589 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5a8fddc9-9cab-41e0-90c4-3c797749e5e2-config\") pod \"5a8fddc9-9cab-41e0-90c4-3c797749e5e2\" (UID: \"5a8fddc9-9cab-41e0-90c4-3c797749e5e2\") " Oct 10 16:51:23 crc kubenswrapper[4799]: I1010 16:51:23.134046 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5a8fddc9-9cab-41e0-90c4-3c797749e5e2-kube-api-access-nrz7m" (OuterVolumeSpecName: "kube-api-access-nrz7m") pod "5a8fddc9-9cab-41e0-90c4-3c797749e5e2" (UID: "5a8fddc9-9cab-41e0-90c4-3c797749e5e2"). InnerVolumeSpecName "kube-api-access-nrz7m". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 16:51:23 crc kubenswrapper[4799]: I1010 16:51:23.178350 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5a8fddc9-9cab-41e0-90c4-3c797749e5e2-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "5a8fddc9-9cab-41e0-90c4-3c797749e5e2" (UID: "5a8fddc9-9cab-41e0-90c4-3c797749e5e2"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 16:51:23 crc kubenswrapper[4799]: I1010 16:51:23.182559 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5a8fddc9-9cab-41e0-90c4-3c797749e5e2-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "5a8fddc9-9cab-41e0-90c4-3c797749e5e2" (UID: "5a8fddc9-9cab-41e0-90c4-3c797749e5e2"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 16:51:23 crc kubenswrapper[4799]: I1010 16:51:23.209419 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5a8fddc9-9cab-41e0-90c4-3c797749e5e2-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "5a8fddc9-9cab-41e0-90c4-3c797749e5e2" (UID: "5a8fddc9-9cab-41e0-90c4-3c797749e5e2"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 16:51:23 crc kubenswrapper[4799]: I1010 16:51:23.209433 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5a8fddc9-9cab-41e0-90c4-3c797749e5e2-config" (OuterVolumeSpecName: "config") pod "5a8fddc9-9cab-41e0-90c4-3c797749e5e2" (UID: "5a8fddc9-9cab-41e0-90c4-3c797749e5e2"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 16:51:23 crc kubenswrapper[4799]: I1010 16:51:23.209761 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5a8fddc9-9cab-41e0-90c4-3c797749e5e2-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "5a8fddc9-9cab-41e0-90c4-3c797749e5e2" (UID: "5a8fddc9-9cab-41e0-90c4-3c797749e5e2"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 16:51:23 crc kubenswrapper[4799]: I1010 16:51:23.215445 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nrz7m\" (UniqueName: \"kubernetes.io/projected/5a8fddc9-9cab-41e0-90c4-3c797749e5e2-kube-api-access-nrz7m\") on node \"crc\" DevicePath \"\"" Oct 10 16:51:23 crc kubenswrapper[4799]: I1010 16:51:23.215465 4799 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5a8fddc9-9cab-41e0-90c4-3c797749e5e2-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 10 16:51:23 crc kubenswrapper[4799]: I1010 16:51:23.215475 4799 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/5a8fddc9-9cab-41e0-90c4-3c797749e5e2-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Oct 10 16:51:23 crc kubenswrapper[4799]: I1010 16:51:23.215484 4799 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5a8fddc9-9cab-41e0-90c4-3c797749e5e2-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 10 16:51:23 crc kubenswrapper[4799]: I1010 16:51:23.215492 4799 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5a8fddc9-9cab-41e0-90c4-3c797749e5e2-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 10 16:51:23 crc kubenswrapper[4799]: I1010 16:51:23.215500 4799 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5a8fddc9-9cab-41e0-90c4-3c797749e5e2-config\") on node \"crc\" DevicePath \"\"" Oct 10 16:51:23 crc kubenswrapper[4799]: I1010 16:51:23.429415 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a7314800-8b3b-4f07-838b-16e8652e2bc8" path="/var/lib/kubelet/pods/a7314800-8b3b-4f07-838b-16e8652e2bc8/volumes" Oct 10 16:51:23 crc kubenswrapper[4799]: I1010 16:51:23.856949 4799 generic.go:334] "Generic (PLEG): container finished" podID="875e600d-b55a-48a9-a181-3ad09c24cc41" containerID="d62e15674081e38d400533f852a94a64631beca6244fed0891de2bc949a8005b" exitCode=0 Oct 10 16:51:23 crc kubenswrapper[4799]: I1010 16:51:23.857014 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-dt6zw" event={"ID":"875e600d-b55a-48a9-a181-3ad09c24cc41","Type":"ContainerDied","Data":"d62e15674081e38d400533f852a94a64631beca6244fed0891de2bc949a8005b"} Oct 10 16:51:23 crc kubenswrapper[4799]: I1010 16:51:23.861675 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-74776f5dd7-7jxz4" event={"ID":"5a8fddc9-9cab-41e0-90c4-3c797749e5e2","Type":"ContainerDied","Data":"a8b3d6d44948faf7188152133bb1654533232a3262d8ae716cf232eed375ee46"} Oct 10 16:51:23 crc kubenswrapper[4799]: I1010 16:51:23.861713 4799 scope.go:117] "RemoveContainer" containerID="6ed3353899f4c0225963fc765a124e1b2d481711fafe23a817601fba30fa1384" Oct 10 16:51:23 crc kubenswrapper[4799]: I1010 16:51:23.861862 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-74776f5dd7-7jxz4" Oct 10 16:51:23 crc kubenswrapper[4799]: I1010 16:51:23.890308 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-74776f5dd7-7jxz4"] Oct 10 16:51:23 crc kubenswrapper[4799]: I1010 16:51:23.894974 4799 scope.go:117] "RemoveContainer" containerID="9439cc6e202ea07c09476fcf8d57e61e2dccdd23fec47bfd562e6adabebb3783" Oct 10 16:51:23 crc kubenswrapper[4799]: I1010 16:51:23.901090 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-74776f5dd7-7jxz4"] Oct 10 16:51:24 crc kubenswrapper[4799]: I1010 16:51:24.571074 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-proxy-69dc9744df-smbqh"] Oct 10 16:51:24 crc kubenswrapper[4799]: E1010 16:51:24.571517 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5a8fddc9-9cab-41e0-90c4-3c797749e5e2" containerName="dnsmasq-dns" Oct 10 16:51:24 crc kubenswrapper[4799]: I1010 16:51:24.571537 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="5a8fddc9-9cab-41e0-90c4-3c797749e5e2" containerName="dnsmasq-dns" Oct 10 16:51:24 crc kubenswrapper[4799]: E1010 16:51:24.571579 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5a8fddc9-9cab-41e0-90c4-3c797749e5e2" containerName="init" Oct 10 16:51:24 crc kubenswrapper[4799]: I1010 16:51:24.571588 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="5a8fddc9-9cab-41e0-90c4-3c797749e5e2" containerName="init" Oct 10 16:51:24 crc kubenswrapper[4799]: I1010 16:51:24.571828 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="5a8fddc9-9cab-41e0-90c4-3c797749e5e2" containerName="dnsmasq-dns" Oct 10 16:51:24 crc kubenswrapper[4799]: I1010 16:51:24.573006 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-proxy-69dc9744df-smbqh" Oct 10 16:51:24 crc kubenswrapper[4799]: I1010 16:51:24.574919 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-swift-public-svc" Oct 10 16:51:24 crc kubenswrapper[4799]: I1010 16:51:24.575575 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-proxy-config-data" Oct 10 16:51:24 crc kubenswrapper[4799]: I1010 16:51:24.575915 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-swift-internal-svc" Oct 10 16:51:24 crc kubenswrapper[4799]: I1010 16:51:24.587530 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-proxy-69dc9744df-smbqh"] Oct 10 16:51:24 crc kubenswrapper[4799]: I1010 16:51:24.757404 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/34f6a30f-81f3-4240-8a4e-d7f1220801ab-internal-tls-certs\") pod \"swift-proxy-69dc9744df-smbqh\" (UID: \"34f6a30f-81f3-4240-8a4e-d7f1220801ab\") " pod="openstack/swift-proxy-69dc9744df-smbqh" Oct 10 16:51:24 crc kubenswrapper[4799]: I1010 16:51:24.757456 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/34f6a30f-81f3-4240-8a4e-d7f1220801ab-log-httpd\") pod \"swift-proxy-69dc9744df-smbqh\" (UID: \"34f6a30f-81f3-4240-8a4e-d7f1220801ab\") " pod="openstack/swift-proxy-69dc9744df-smbqh" Oct 10 16:51:24 crc kubenswrapper[4799]: I1010 16:51:24.757557 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/34f6a30f-81f3-4240-8a4e-d7f1220801ab-combined-ca-bundle\") pod \"swift-proxy-69dc9744df-smbqh\" (UID: \"34f6a30f-81f3-4240-8a4e-d7f1220801ab\") " pod="openstack/swift-proxy-69dc9744df-smbqh" Oct 10 16:51:24 crc kubenswrapper[4799]: I1010 16:51:24.757687 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/34f6a30f-81f3-4240-8a4e-d7f1220801ab-public-tls-certs\") pod \"swift-proxy-69dc9744df-smbqh\" (UID: \"34f6a30f-81f3-4240-8a4e-d7f1220801ab\") " pod="openstack/swift-proxy-69dc9744df-smbqh" Oct 10 16:51:24 crc kubenswrapper[4799]: I1010 16:51:24.757750 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/34f6a30f-81f3-4240-8a4e-d7f1220801ab-run-httpd\") pod \"swift-proxy-69dc9744df-smbqh\" (UID: \"34f6a30f-81f3-4240-8a4e-d7f1220801ab\") " pod="openstack/swift-proxy-69dc9744df-smbqh" Oct 10 16:51:24 crc kubenswrapper[4799]: I1010 16:51:24.757821 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f672z\" (UniqueName: \"kubernetes.io/projected/34f6a30f-81f3-4240-8a4e-d7f1220801ab-kube-api-access-f672z\") pod \"swift-proxy-69dc9744df-smbqh\" (UID: \"34f6a30f-81f3-4240-8a4e-d7f1220801ab\") " pod="openstack/swift-proxy-69dc9744df-smbqh" Oct 10 16:51:24 crc kubenswrapper[4799]: I1010 16:51:24.757990 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/34f6a30f-81f3-4240-8a4e-d7f1220801ab-etc-swift\") pod \"swift-proxy-69dc9744df-smbqh\" (UID: \"34f6a30f-81f3-4240-8a4e-d7f1220801ab\") " pod="openstack/swift-proxy-69dc9744df-smbqh" Oct 10 16:51:24 crc kubenswrapper[4799]: I1010 16:51:24.758064 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/34f6a30f-81f3-4240-8a4e-d7f1220801ab-config-data\") pod \"swift-proxy-69dc9744df-smbqh\" (UID: \"34f6a30f-81f3-4240-8a4e-d7f1220801ab\") " pod="openstack/swift-proxy-69dc9744df-smbqh" Oct 10 16:51:24 crc kubenswrapper[4799]: I1010 16:51:24.859462 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/34f6a30f-81f3-4240-8a4e-d7f1220801ab-log-httpd\") pod \"swift-proxy-69dc9744df-smbqh\" (UID: \"34f6a30f-81f3-4240-8a4e-d7f1220801ab\") " pod="openstack/swift-proxy-69dc9744df-smbqh" Oct 10 16:51:24 crc kubenswrapper[4799]: I1010 16:51:24.859846 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/34f6a30f-81f3-4240-8a4e-d7f1220801ab-combined-ca-bundle\") pod \"swift-proxy-69dc9744df-smbqh\" (UID: \"34f6a30f-81f3-4240-8a4e-d7f1220801ab\") " pod="openstack/swift-proxy-69dc9744df-smbqh" Oct 10 16:51:24 crc kubenswrapper[4799]: I1010 16:51:24.859886 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/34f6a30f-81f3-4240-8a4e-d7f1220801ab-public-tls-certs\") pod \"swift-proxy-69dc9744df-smbqh\" (UID: \"34f6a30f-81f3-4240-8a4e-d7f1220801ab\") " pod="openstack/swift-proxy-69dc9744df-smbqh" Oct 10 16:51:24 crc kubenswrapper[4799]: I1010 16:51:24.859909 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/34f6a30f-81f3-4240-8a4e-d7f1220801ab-run-httpd\") pod \"swift-proxy-69dc9744df-smbqh\" (UID: \"34f6a30f-81f3-4240-8a4e-d7f1220801ab\") " pod="openstack/swift-proxy-69dc9744df-smbqh" Oct 10 16:51:24 crc kubenswrapper[4799]: I1010 16:51:24.859939 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f672z\" (UniqueName: \"kubernetes.io/projected/34f6a30f-81f3-4240-8a4e-d7f1220801ab-kube-api-access-f672z\") pod \"swift-proxy-69dc9744df-smbqh\" (UID: \"34f6a30f-81f3-4240-8a4e-d7f1220801ab\") " pod="openstack/swift-proxy-69dc9744df-smbqh" Oct 10 16:51:24 crc kubenswrapper[4799]: I1010 16:51:24.859998 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/34f6a30f-81f3-4240-8a4e-d7f1220801ab-etc-swift\") pod \"swift-proxy-69dc9744df-smbqh\" (UID: \"34f6a30f-81f3-4240-8a4e-d7f1220801ab\") " pod="openstack/swift-proxy-69dc9744df-smbqh" Oct 10 16:51:24 crc kubenswrapper[4799]: I1010 16:51:24.860033 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/34f6a30f-81f3-4240-8a4e-d7f1220801ab-config-data\") pod \"swift-proxy-69dc9744df-smbqh\" (UID: \"34f6a30f-81f3-4240-8a4e-d7f1220801ab\") " pod="openstack/swift-proxy-69dc9744df-smbqh" Oct 10 16:51:24 crc kubenswrapper[4799]: I1010 16:51:24.860061 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/34f6a30f-81f3-4240-8a4e-d7f1220801ab-internal-tls-certs\") pod \"swift-proxy-69dc9744df-smbqh\" (UID: \"34f6a30f-81f3-4240-8a4e-d7f1220801ab\") " pod="openstack/swift-proxy-69dc9744df-smbqh" Oct 10 16:51:24 crc kubenswrapper[4799]: I1010 16:51:24.860212 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/34f6a30f-81f3-4240-8a4e-d7f1220801ab-run-httpd\") pod \"swift-proxy-69dc9744df-smbqh\" (UID: \"34f6a30f-81f3-4240-8a4e-d7f1220801ab\") " pod="openstack/swift-proxy-69dc9744df-smbqh" Oct 10 16:51:24 crc kubenswrapper[4799]: I1010 16:51:24.860907 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/34f6a30f-81f3-4240-8a4e-d7f1220801ab-log-httpd\") pod \"swift-proxy-69dc9744df-smbqh\" (UID: \"34f6a30f-81f3-4240-8a4e-d7f1220801ab\") " pod="openstack/swift-proxy-69dc9744df-smbqh" Oct 10 16:51:24 crc kubenswrapper[4799]: I1010 16:51:24.865548 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/34f6a30f-81f3-4240-8a4e-d7f1220801ab-combined-ca-bundle\") pod \"swift-proxy-69dc9744df-smbqh\" (UID: \"34f6a30f-81f3-4240-8a4e-d7f1220801ab\") " pod="openstack/swift-proxy-69dc9744df-smbqh" Oct 10 16:51:24 crc kubenswrapper[4799]: I1010 16:51:24.866481 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/34f6a30f-81f3-4240-8a4e-d7f1220801ab-etc-swift\") pod \"swift-proxy-69dc9744df-smbqh\" (UID: \"34f6a30f-81f3-4240-8a4e-d7f1220801ab\") " pod="openstack/swift-proxy-69dc9744df-smbqh" Oct 10 16:51:24 crc kubenswrapper[4799]: I1010 16:51:24.867148 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/34f6a30f-81f3-4240-8a4e-d7f1220801ab-config-data\") pod \"swift-proxy-69dc9744df-smbqh\" (UID: \"34f6a30f-81f3-4240-8a4e-d7f1220801ab\") " pod="openstack/swift-proxy-69dc9744df-smbqh" Oct 10 16:51:24 crc kubenswrapper[4799]: I1010 16:51:24.870447 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/34f6a30f-81f3-4240-8a4e-d7f1220801ab-public-tls-certs\") pod \"swift-proxy-69dc9744df-smbqh\" (UID: \"34f6a30f-81f3-4240-8a4e-d7f1220801ab\") " pod="openstack/swift-proxy-69dc9744df-smbqh" Oct 10 16:51:24 crc kubenswrapper[4799]: I1010 16:51:24.874535 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/34f6a30f-81f3-4240-8a4e-d7f1220801ab-internal-tls-certs\") pod \"swift-proxy-69dc9744df-smbqh\" (UID: \"34f6a30f-81f3-4240-8a4e-d7f1220801ab\") " pod="openstack/swift-proxy-69dc9744df-smbqh" Oct 10 16:51:24 crc kubenswrapper[4799]: I1010 16:51:24.885940 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f672z\" (UniqueName: \"kubernetes.io/projected/34f6a30f-81f3-4240-8a4e-d7f1220801ab-kube-api-access-f672z\") pod \"swift-proxy-69dc9744df-smbqh\" (UID: \"34f6a30f-81f3-4240-8a4e-d7f1220801ab\") " pod="openstack/swift-proxy-69dc9744df-smbqh" Oct 10 16:51:24 crc kubenswrapper[4799]: I1010 16:51:24.910766 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-proxy-69dc9744df-smbqh" Oct 10 16:51:25 crc kubenswrapper[4799]: I1010 16:51:25.258679 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-dt6zw" Oct 10 16:51:25 crc kubenswrapper[4799]: I1010 16:51:25.368366 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/875e600d-b55a-48a9-a181-3ad09c24cc41-config-data\") pod \"875e600d-b55a-48a9-a181-3ad09c24cc41\" (UID: \"875e600d-b55a-48a9-a181-3ad09c24cc41\") " Oct 10 16:51:25 crc kubenswrapper[4799]: I1010 16:51:25.368405 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/875e600d-b55a-48a9-a181-3ad09c24cc41-combined-ca-bundle\") pod \"875e600d-b55a-48a9-a181-3ad09c24cc41\" (UID: \"875e600d-b55a-48a9-a181-3ad09c24cc41\") " Oct 10 16:51:25 crc kubenswrapper[4799]: I1010 16:51:25.368465 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-drvnl\" (UniqueName: \"kubernetes.io/projected/875e600d-b55a-48a9-a181-3ad09c24cc41-kube-api-access-drvnl\") pod \"875e600d-b55a-48a9-a181-3ad09c24cc41\" (UID: \"875e600d-b55a-48a9-a181-3ad09c24cc41\") " Oct 10 16:51:25 crc kubenswrapper[4799]: I1010 16:51:25.368533 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/875e600d-b55a-48a9-a181-3ad09c24cc41-db-sync-config-data\") pod \"875e600d-b55a-48a9-a181-3ad09c24cc41\" (UID: \"875e600d-b55a-48a9-a181-3ad09c24cc41\") " Oct 10 16:51:25 crc kubenswrapper[4799]: I1010 16:51:25.368573 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/875e600d-b55a-48a9-a181-3ad09c24cc41-etc-machine-id\") pod \"875e600d-b55a-48a9-a181-3ad09c24cc41\" (UID: \"875e600d-b55a-48a9-a181-3ad09c24cc41\") " Oct 10 16:51:25 crc kubenswrapper[4799]: I1010 16:51:25.368600 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/875e600d-b55a-48a9-a181-3ad09c24cc41-scripts\") pod \"875e600d-b55a-48a9-a181-3ad09c24cc41\" (UID: \"875e600d-b55a-48a9-a181-3ad09c24cc41\") " Oct 10 16:51:25 crc kubenswrapper[4799]: I1010 16:51:25.368661 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/875e600d-b55a-48a9-a181-3ad09c24cc41-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "875e600d-b55a-48a9-a181-3ad09c24cc41" (UID: "875e600d-b55a-48a9-a181-3ad09c24cc41"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 10 16:51:25 crc kubenswrapper[4799]: I1010 16:51:25.368926 4799 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/875e600d-b55a-48a9-a181-3ad09c24cc41-etc-machine-id\") on node \"crc\" DevicePath \"\"" Oct 10 16:51:25 crc kubenswrapper[4799]: I1010 16:51:25.373851 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/875e600d-b55a-48a9-a181-3ad09c24cc41-scripts" (OuterVolumeSpecName: "scripts") pod "875e600d-b55a-48a9-a181-3ad09c24cc41" (UID: "875e600d-b55a-48a9-a181-3ad09c24cc41"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:51:25 crc kubenswrapper[4799]: I1010 16:51:25.374107 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/875e600d-b55a-48a9-a181-3ad09c24cc41-kube-api-access-drvnl" (OuterVolumeSpecName: "kube-api-access-drvnl") pod "875e600d-b55a-48a9-a181-3ad09c24cc41" (UID: "875e600d-b55a-48a9-a181-3ad09c24cc41"). InnerVolumeSpecName "kube-api-access-drvnl". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 16:51:25 crc kubenswrapper[4799]: I1010 16:51:25.376159 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/875e600d-b55a-48a9-a181-3ad09c24cc41-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "875e600d-b55a-48a9-a181-3ad09c24cc41" (UID: "875e600d-b55a-48a9-a181-3ad09c24cc41"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:51:25 crc kubenswrapper[4799]: I1010 16:51:25.405735 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/875e600d-b55a-48a9-a181-3ad09c24cc41-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "875e600d-b55a-48a9-a181-3ad09c24cc41" (UID: "875e600d-b55a-48a9-a181-3ad09c24cc41"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:51:25 crc kubenswrapper[4799]: I1010 16:51:25.414214 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5a8fddc9-9cab-41e0-90c4-3c797749e5e2" path="/var/lib/kubelet/pods/5a8fddc9-9cab-41e0-90c4-3c797749e5e2/volumes" Oct 10 16:51:25 crc kubenswrapper[4799]: I1010 16:51:25.434562 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/875e600d-b55a-48a9-a181-3ad09c24cc41-config-data" (OuterVolumeSpecName: "config-data") pod "875e600d-b55a-48a9-a181-3ad09c24cc41" (UID: "875e600d-b55a-48a9-a181-3ad09c24cc41"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:51:25 crc kubenswrapper[4799]: I1010 16:51:25.438558 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-proxy-69dc9744df-smbqh"] Oct 10 16:51:25 crc kubenswrapper[4799]: W1010 16:51:25.440139 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod34f6a30f_81f3_4240_8a4e_d7f1220801ab.slice/crio-5c2a32d25f9486e9f562bfeaa8fcfe59225c0df3e4b448f3f274900700e43789 WatchSource:0}: Error finding container 5c2a32d25f9486e9f562bfeaa8fcfe59225c0df3e4b448f3f274900700e43789: Status 404 returned error can't find the container with id 5c2a32d25f9486e9f562bfeaa8fcfe59225c0df3e4b448f3f274900700e43789 Oct 10 16:51:25 crc kubenswrapper[4799]: I1010 16:51:25.470075 4799 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/875e600d-b55a-48a9-a181-3ad09c24cc41-config-data\") on node \"crc\" DevicePath \"\"" Oct 10 16:51:25 crc kubenswrapper[4799]: I1010 16:51:25.470106 4799 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/875e600d-b55a-48a9-a181-3ad09c24cc41-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 10 16:51:25 crc kubenswrapper[4799]: I1010 16:51:25.470118 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-drvnl\" (UniqueName: \"kubernetes.io/projected/875e600d-b55a-48a9-a181-3ad09c24cc41-kube-api-access-drvnl\") on node \"crc\" DevicePath \"\"" Oct 10 16:51:25 crc kubenswrapper[4799]: I1010 16:51:25.470127 4799 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/875e600d-b55a-48a9-a181-3ad09c24cc41-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Oct 10 16:51:25 crc kubenswrapper[4799]: I1010 16:51:25.470137 4799 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/875e600d-b55a-48a9-a181-3ad09c24cc41-scripts\") on node \"crc\" DevicePath \"\"" Oct 10 16:51:25 crc kubenswrapper[4799]: I1010 16:51:25.902325 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-69dc9744df-smbqh" event={"ID":"34f6a30f-81f3-4240-8a4e-d7f1220801ab","Type":"ContainerStarted","Data":"7d59d6ece2c99c837d44d3e71e1c7dc7a0dd9eaa3806c1e6d93e906cf1b5fcfd"} Oct 10 16:51:25 crc kubenswrapper[4799]: I1010 16:51:25.902376 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-69dc9744df-smbqh" event={"ID":"34f6a30f-81f3-4240-8a4e-d7f1220801ab","Type":"ContainerStarted","Data":"d89cab6d55e7c7a9cd3bd3ae258a2ce92f7ee5e9e44da70f5b9a36b753e9b185"} Oct 10 16:51:25 crc kubenswrapper[4799]: I1010 16:51:25.902390 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-69dc9744df-smbqh" event={"ID":"34f6a30f-81f3-4240-8a4e-d7f1220801ab","Type":"ContainerStarted","Data":"5c2a32d25f9486e9f562bfeaa8fcfe59225c0df3e4b448f3f274900700e43789"} Oct 10 16:51:25 crc kubenswrapper[4799]: I1010 16:51:25.903458 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/swift-proxy-69dc9744df-smbqh" Oct 10 16:51:25 crc kubenswrapper[4799]: I1010 16:51:25.903479 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/swift-proxy-69dc9744df-smbqh" Oct 10 16:51:25 crc kubenswrapper[4799]: I1010 16:51:25.914146 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-dt6zw" event={"ID":"875e600d-b55a-48a9-a181-3ad09c24cc41","Type":"ContainerDied","Data":"987eb13b64ca8dfe18e15a7b6b5998d89824a9af535c6285d2030faa0864e5e4"} Oct 10 16:51:25 crc kubenswrapper[4799]: I1010 16:51:25.914178 4799 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="987eb13b64ca8dfe18e15a7b6b5998d89824a9af535c6285d2030faa0864e5e4" Oct 10 16:51:25 crc kubenswrapper[4799]: I1010 16:51:25.914232 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-dt6zw" Oct 10 16:51:26 crc kubenswrapper[4799]: I1010 16:51:26.262982 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-proxy-69dc9744df-smbqh" podStartSLOduration=2.262957559 podStartE2EDuration="2.262957559s" podCreationTimestamp="2025-10-10 16:51:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-10 16:51:25.935519328 +0000 UTC m=+1179.443843443" watchObservedRunningTime="2025-10-10 16:51:26.262957559 +0000 UTC m=+1179.771281684" Oct 10 16:51:26 crc kubenswrapper[4799]: I1010 16:51:26.279229 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-scheduler-0"] Oct 10 16:51:26 crc kubenswrapper[4799]: E1010 16:51:26.279829 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="875e600d-b55a-48a9-a181-3ad09c24cc41" containerName="cinder-db-sync" Oct 10 16:51:26 crc kubenswrapper[4799]: I1010 16:51:26.279875 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="875e600d-b55a-48a9-a181-3ad09c24cc41" containerName="cinder-db-sync" Oct 10 16:51:26 crc kubenswrapper[4799]: I1010 16:51:26.280225 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="875e600d-b55a-48a9-a181-3ad09c24cc41" containerName="cinder-db-sync" Oct 10 16:51:26 crc kubenswrapper[4799]: I1010 16:51:26.281629 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Oct 10 16:51:26 crc kubenswrapper[4799]: I1010 16:51:26.294928 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Oct 10 16:51:26 crc kubenswrapper[4799]: I1010 16:51:26.301212 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-cinder-dockercfg-tvnq2" Oct 10 16:51:26 crc kubenswrapper[4799]: I1010 16:51:26.301524 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-config-data" Oct 10 16:51:26 crc kubenswrapper[4799]: I1010 16:51:26.301649 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scripts" Oct 10 16:51:26 crc kubenswrapper[4799]: I1010 16:51:26.301771 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scheduler-config-data" Oct 10 16:51:26 crc kubenswrapper[4799]: I1010 16:51:26.326538 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5f74b5f5cc-54sss"] Oct 10 16:51:26 crc kubenswrapper[4799]: I1010 16:51:26.327974 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5f74b5f5cc-54sss" Oct 10 16:51:26 crc kubenswrapper[4799]: I1010 16:51:26.361818 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5f74b5f5cc-54sss"] Oct 10 16:51:26 crc kubenswrapper[4799]: I1010 16:51:26.387767 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/2240606d-067a-4655-9deb-611ff6e3d5af-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"2240606d-067a-4655-9deb-611ff6e3d5af\") " pod="openstack/cinder-scheduler-0" Oct 10 16:51:26 crc kubenswrapper[4799]: I1010 16:51:26.387840 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/2240606d-067a-4655-9deb-611ff6e3d5af-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"2240606d-067a-4655-9deb-611ff6e3d5af\") " pod="openstack/cinder-scheduler-0" Oct 10 16:51:26 crc kubenswrapper[4799]: I1010 16:51:26.387864 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2240606d-067a-4655-9deb-611ff6e3d5af-config-data\") pod \"cinder-scheduler-0\" (UID: \"2240606d-067a-4655-9deb-611ff6e3d5af\") " pod="openstack/cinder-scheduler-0" Oct 10 16:51:26 crc kubenswrapper[4799]: I1010 16:51:26.387880 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2240606d-067a-4655-9deb-611ff6e3d5af-scripts\") pod \"cinder-scheduler-0\" (UID: \"2240606d-067a-4655-9deb-611ff6e3d5af\") " pod="openstack/cinder-scheduler-0" Oct 10 16:51:26 crc kubenswrapper[4799]: I1010 16:51:26.387904 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2240606d-067a-4655-9deb-611ff6e3d5af-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"2240606d-067a-4655-9deb-611ff6e3d5af\") " pod="openstack/cinder-scheduler-0" Oct 10 16:51:26 crc kubenswrapper[4799]: I1010 16:51:26.387962 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4v94s\" (UniqueName: \"kubernetes.io/projected/2240606d-067a-4655-9deb-611ff6e3d5af-kube-api-access-4v94s\") pod \"cinder-scheduler-0\" (UID: \"2240606d-067a-4655-9deb-611ff6e3d5af\") " pod="openstack/cinder-scheduler-0" Oct 10 16:51:26 crc kubenswrapper[4799]: I1010 16:51:26.399504 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-api-0"] Oct 10 16:51:26 crc kubenswrapper[4799]: I1010 16:51:26.401008 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Oct 10 16:51:26 crc kubenswrapper[4799]: I1010 16:51:26.403068 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-api-config-data" Oct 10 16:51:26 crc kubenswrapper[4799]: I1010 16:51:26.407357 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Oct 10 16:51:26 crc kubenswrapper[4799]: I1010 16:51:26.489710 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e6a761e9-d08f-4c77-8441-b7fe50f5ffdd-config\") pod \"dnsmasq-dns-5f74b5f5cc-54sss\" (UID: \"e6a761e9-d08f-4c77-8441-b7fe50f5ffdd\") " pod="openstack/dnsmasq-dns-5f74b5f5cc-54sss" Oct 10 16:51:26 crc kubenswrapper[4799]: I1010 16:51:26.489770 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/74472060-a532-439b-9223-127aa23985f3-config-data-custom\") pod \"cinder-api-0\" (UID: \"74472060-a532-439b-9223-127aa23985f3\") " pod="openstack/cinder-api-0" Oct 10 16:51:26 crc kubenswrapper[4799]: I1010 16:51:26.489801 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4v94s\" (UniqueName: \"kubernetes.io/projected/2240606d-067a-4655-9deb-611ff6e3d5af-kube-api-access-4v94s\") pod \"cinder-scheduler-0\" (UID: \"2240606d-067a-4655-9deb-611ff6e3d5af\") " pod="openstack/cinder-scheduler-0" Oct 10 16:51:26 crc kubenswrapper[4799]: I1010 16:51:26.489840 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/74472060-a532-439b-9223-127aa23985f3-scripts\") pod \"cinder-api-0\" (UID: \"74472060-a532-439b-9223-127aa23985f3\") " pod="openstack/cinder-api-0" Oct 10 16:51:26 crc kubenswrapper[4799]: I1010 16:51:26.489859 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dkmlt\" (UniqueName: \"kubernetes.io/projected/74472060-a532-439b-9223-127aa23985f3-kube-api-access-dkmlt\") pod \"cinder-api-0\" (UID: \"74472060-a532-439b-9223-127aa23985f3\") " pod="openstack/cinder-api-0" Oct 10 16:51:26 crc kubenswrapper[4799]: I1010 16:51:26.489884 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/74472060-a532-439b-9223-127aa23985f3-config-data\") pod \"cinder-api-0\" (UID: \"74472060-a532-439b-9223-127aa23985f3\") " pod="openstack/cinder-api-0" Oct 10 16:51:26 crc kubenswrapper[4799]: I1010 16:51:26.489907 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/74472060-a532-439b-9223-127aa23985f3-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"74472060-a532-439b-9223-127aa23985f3\") " pod="openstack/cinder-api-0" Oct 10 16:51:26 crc kubenswrapper[4799]: I1010 16:51:26.489949 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/74472060-a532-439b-9223-127aa23985f3-etc-machine-id\") pod \"cinder-api-0\" (UID: \"74472060-a532-439b-9223-127aa23985f3\") " pod="openstack/cinder-api-0" Oct 10 16:51:26 crc kubenswrapper[4799]: I1010 16:51:26.489970 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/2240606d-067a-4655-9deb-611ff6e3d5af-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"2240606d-067a-4655-9deb-611ff6e3d5af\") " pod="openstack/cinder-scheduler-0" Oct 10 16:51:26 crc kubenswrapper[4799]: I1010 16:51:26.489990 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/e6a761e9-d08f-4c77-8441-b7fe50f5ffdd-dns-swift-storage-0\") pod \"dnsmasq-dns-5f74b5f5cc-54sss\" (UID: \"e6a761e9-d08f-4c77-8441-b7fe50f5ffdd\") " pod="openstack/dnsmasq-dns-5f74b5f5cc-54sss" Oct 10 16:51:26 crc kubenswrapper[4799]: I1010 16:51:26.490028 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/2240606d-067a-4655-9deb-611ff6e3d5af-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"2240606d-067a-4655-9deb-611ff6e3d5af\") " pod="openstack/cinder-scheduler-0" Oct 10 16:51:26 crc kubenswrapper[4799]: I1010 16:51:26.490046 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2240606d-067a-4655-9deb-611ff6e3d5af-config-data\") pod \"cinder-scheduler-0\" (UID: \"2240606d-067a-4655-9deb-611ff6e3d5af\") " pod="openstack/cinder-scheduler-0" Oct 10 16:51:26 crc kubenswrapper[4799]: I1010 16:51:26.490061 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2240606d-067a-4655-9deb-611ff6e3d5af-scripts\") pod \"cinder-scheduler-0\" (UID: \"2240606d-067a-4655-9deb-611ff6e3d5af\") " pod="openstack/cinder-scheduler-0" Oct 10 16:51:26 crc kubenswrapper[4799]: I1010 16:51:26.490080 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j7q7n\" (UniqueName: \"kubernetes.io/projected/e6a761e9-d08f-4c77-8441-b7fe50f5ffdd-kube-api-access-j7q7n\") pod \"dnsmasq-dns-5f74b5f5cc-54sss\" (UID: \"e6a761e9-d08f-4c77-8441-b7fe50f5ffdd\") " pod="openstack/dnsmasq-dns-5f74b5f5cc-54sss" Oct 10 16:51:26 crc kubenswrapper[4799]: I1010 16:51:26.490102 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e6a761e9-d08f-4c77-8441-b7fe50f5ffdd-ovsdbserver-sb\") pod \"dnsmasq-dns-5f74b5f5cc-54sss\" (UID: \"e6a761e9-d08f-4c77-8441-b7fe50f5ffdd\") " pod="openstack/dnsmasq-dns-5f74b5f5cc-54sss" Oct 10 16:51:26 crc kubenswrapper[4799]: I1010 16:51:26.490119 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2240606d-067a-4655-9deb-611ff6e3d5af-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"2240606d-067a-4655-9deb-611ff6e3d5af\") " pod="openstack/cinder-scheduler-0" Oct 10 16:51:26 crc kubenswrapper[4799]: I1010 16:51:26.490136 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e6a761e9-d08f-4c77-8441-b7fe50f5ffdd-ovsdbserver-nb\") pod \"dnsmasq-dns-5f74b5f5cc-54sss\" (UID: \"e6a761e9-d08f-4c77-8441-b7fe50f5ffdd\") " pod="openstack/dnsmasq-dns-5f74b5f5cc-54sss" Oct 10 16:51:26 crc kubenswrapper[4799]: I1010 16:51:26.490177 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/74472060-a532-439b-9223-127aa23985f3-logs\") pod \"cinder-api-0\" (UID: \"74472060-a532-439b-9223-127aa23985f3\") " pod="openstack/cinder-api-0" Oct 10 16:51:26 crc kubenswrapper[4799]: I1010 16:51:26.490192 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e6a761e9-d08f-4c77-8441-b7fe50f5ffdd-dns-svc\") pod \"dnsmasq-dns-5f74b5f5cc-54sss\" (UID: \"e6a761e9-d08f-4c77-8441-b7fe50f5ffdd\") " pod="openstack/dnsmasq-dns-5f74b5f5cc-54sss" Oct 10 16:51:26 crc kubenswrapper[4799]: I1010 16:51:26.491915 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/2240606d-067a-4655-9deb-611ff6e3d5af-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"2240606d-067a-4655-9deb-611ff6e3d5af\") " pod="openstack/cinder-scheduler-0" Oct 10 16:51:26 crc kubenswrapper[4799]: I1010 16:51:26.497189 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2240606d-067a-4655-9deb-611ff6e3d5af-config-data\") pod \"cinder-scheduler-0\" (UID: \"2240606d-067a-4655-9deb-611ff6e3d5af\") " pod="openstack/cinder-scheduler-0" Oct 10 16:51:26 crc kubenswrapper[4799]: I1010 16:51:26.497532 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/2240606d-067a-4655-9deb-611ff6e3d5af-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"2240606d-067a-4655-9deb-611ff6e3d5af\") " pod="openstack/cinder-scheduler-0" Oct 10 16:51:26 crc kubenswrapper[4799]: I1010 16:51:26.498314 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2240606d-067a-4655-9deb-611ff6e3d5af-scripts\") pod \"cinder-scheduler-0\" (UID: \"2240606d-067a-4655-9deb-611ff6e3d5af\") " pod="openstack/cinder-scheduler-0" Oct 10 16:51:26 crc kubenswrapper[4799]: I1010 16:51:26.498739 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2240606d-067a-4655-9deb-611ff6e3d5af-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"2240606d-067a-4655-9deb-611ff6e3d5af\") " pod="openstack/cinder-scheduler-0" Oct 10 16:51:26 crc kubenswrapper[4799]: I1010 16:51:26.510525 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4v94s\" (UniqueName: \"kubernetes.io/projected/2240606d-067a-4655-9deb-611ff6e3d5af-kube-api-access-4v94s\") pod \"cinder-scheduler-0\" (UID: \"2240606d-067a-4655-9deb-611ff6e3d5af\") " pod="openstack/cinder-scheduler-0" Oct 10 16:51:26 crc kubenswrapper[4799]: I1010 16:51:26.592044 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/74472060-a532-439b-9223-127aa23985f3-etc-machine-id\") pod \"cinder-api-0\" (UID: \"74472060-a532-439b-9223-127aa23985f3\") " pod="openstack/cinder-api-0" Oct 10 16:51:26 crc kubenswrapper[4799]: I1010 16:51:26.592109 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/e6a761e9-d08f-4c77-8441-b7fe50f5ffdd-dns-swift-storage-0\") pod \"dnsmasq-dns-5f74b5f5cc-54sss\" (UID: \"e6a761e9-d08f-4c77-8441-b7fe50f5ffdd\") " pod="openstack/dnsmasq-dns-5f74b5f5cc-54sss" Oct 10 16:51:26 crc kubenswrapper[4799]: I1010 16:51:26.592171 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j7q7n\" (UniqueName: \"kubernetes.io/projected/e6a761e9-d08f-4c77-8441-b7fe50f5ffdd-kube-api-access-j7q7n\") pod \"dnsmasq-dns-5f74b5f5cc-54sss\" (UID: \"e6a761e9-d08f-4c77-8441-b7fe50f5ffdd\") " pod="openstack/dnsmasq-dns-5f74b5f5cc-54sss" Oct 10 16:51:26 crc kubenswrapper[4799]: I1010 16:51:26.592201 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e6a761e9-d08f-4c77-8441-b7fe50f5ffdd-ovsdbserver-sb\") pod \"dnsmasq-dns-5f74b5f5cc-54sss\" (UID: \"e6a761e9-d08f-4c77-8441-b7fe50f5ffdd\") " pod="openstack/dnsmasq-dns-5f74b5f5cc-54sss" Oct 10 16:51:26 crc kubenswrapper[4799]: I1010 16:51:26.592225 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e6a761e9-d08f-4c77-8441-b7fe50f5ffdd-ovsdbserver-nb\") pod \"dnsmasq-dns-5f74b5f5cc-54sss\" (UID: \"e6a761e9-d08f-4c77-8441-b7fe50f5ffdd\") " pod="openstack/dnsmasq-dns-5f74b5f5cc-54sss" Oct 10 16:51:26 crc kubenswrapper[4799]: I1010 16:51:26.592276 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/74472060-a532-439b-9223-127aa23985f3-logs\") pod \"cinder-api-0\" (UID: \"74472060-a532-439b-9223-127aa23985f3\") " pod="openstack/cinder-api-0" Oct 10 16:51:26 crc kubenswrapper[4799]: I1010 16:51:26.592295 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e6a761e9-d08f-4c77-8441-b7fe50f5ffdd-dns-svc\") pod \"dnsmasq-dns-5f74b5f5cc-54sss\" (UID: \"e6a761e9-d08f-4c77-8441-b7fe50f5ffdd\") " pod="openstack/dnsmasq-dns-5f74b5f5cc-54sss" Oct 10 16:51:26 crc kubenswrapper[4799]: I1010 16:51:26.592281 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/74472060-a532-439b-9223-127aa23985f3-etc-machine-id\") pod \"cinder-api-0\" (UID: \"74472060-a532-439b-9223-127aa23985f3\") " pod="openstack/cinder-api-0" Oct 10 16:51:26 crc kubenswrapper[4799]: I1010 16:51:26.592321 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e6a761e9-d08f-4c77-8441-b7fe50f5ffdd-config\") pod \"dnsmasq-dns-5f74b5f5cc-54sss\" (UID: \"e6a761e9-d08f-4c77-8441-b7fe50f5ffdd\") " pod="openstack/dnsmasq-dns-5f74b5f5cc-54sss" Oct 10 16:51:26 crc kubenswrapper[4799]: I1010 16:51:26.592380 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/74472060-a532-439b-9223-127aa23985f3-config-data-custom\") pod \"cinder-api-0\" (UID: \"74472060-a532-439b-9223-127aa23985f3\") " pod="openstack/cinder-api-0" Oct 10 16:51:26 crc kubenswrapper[4799]: I1010 16:51:26.592439 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/74472060-a532-439b-9223-127aa23985f3-scripts\") pod \"cinder-api-0\" (UID: \"74472060-a532-439b-9223-127aa23985f3\") " pod="openstack/cinder-api-0" Oct 10 16:51:26 crc kubenswrapper[4799]: I1010 16:51:26.592463 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dkmlt\" (UniqueName: \"kubernetes.io/projected/74472060-a532-439b-9223-127aa23985f3-kube-api-access-dkmlt\") pod \"cinder-api-0\" (UID: \"74472060-a532-439b-9223-127aa23985f3\") " pod="openstack/cinder-api-0" Oct 10 16:51:26 crc kubenswrapper[4799]: I1010 16:51:26.592520 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/74472060-a532-439b-9223-127aa23985f3-config-data\") pod \"cinder-api-0\" (UID: \"74472060-a532-439b-9223-127aa23985f3\") " pod="openstack/cinder-api-0" Oct 10 16:51:26 crc kubenswrapper[4799]: I1010 16:51:26.592567 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/74472060-a532-439b-9223-127aa23985f3-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"74472060-a532-439b-9223-127aa23985f3\") " pod="openstack/cinder-api-0" Oct 10 16:51:26 crc kubenswrapper[4799]: I1010 16:51:26.593183 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e6a761e9-d08f-4c77-8441-b7fe50f5ffdd-ovsdbserver-nb\") pod \"dnsmasq-dns-5f74b5f5cc-54sss\" (UID: \"e6a761e9-d08f-4c77-8441-b7fe50f5ffdd\") " pod="openstack/dnsmasq-dns-5f74b5f5cc-54sss" Oct 10 16:51:26 crc kubenswrapper[4799]: I1010 16:51:26.593305 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/e6a761e9-d08f-4c77-8441-b7fe50f5ffdd-dns-swift-storage-0\") pod \"dnsmasq-dns-5f74b5f5cc-54sss\" (UID: \"e6a761e9-d08f-4c77-8441-b7fe50f5ffdd\") " pod="openstack/dnsmasq-dns-5f74b5f5cc-54sss" Oct 10 16:51:26 crc kubenswrapper[4799]: I1010 16:51:26.594077 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e6a761e9-d08f-4c77-8441-b7fe50f5ffdd-config\") pod \"dnsmasq-dns-5f74b5f5cc-54sss\" (UID: \"e6a761e9-d08f-4c77-8441-b7fe50f5ffdd\") " pod="openstack/dnsmasq-dns-5f74b5f5cc-54sss" Oct 10 16:51:26 crc kubenswrapper[4799]: I1010 16:51:26.594275 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e6a761e9-d08f-4c77-8441-b7fe50f5ffdd-dns-svc\") pod \"dnsmasq-dns-5f74b5f5cc-54sss\" (UID: \"e6a761e9-d08f-4c77-8441-b7fe50f5ffdd\") " pod="openstack/dnsmasq-dns-5f74b5f5cc-54sss" Oct 10 16:51:26 crc kubenswrapper[4799]: I1010 16:51:26.594685 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e6a761e9-d08f-4c77-8441-b7fe50f5ffdd-ovsdbserver-sb\") pod \"dnsmasq-dns-5f74b5f5cc-54sss\" (UID: \"e6a761e9-d08f-4c77-8441-b7fe50f5ffdd\") " pod="openstack/dnsmasq-dns-5f74b5f5cc-54sss" Oct 10 16:51:26 crc kubenswrapper[4799]: I1010 16:51:26.594746 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/74472060-a532-439b-9223-127aa23985f3-logs\") pod \"cinder-api-0\" (UID: \"74472060-a532-439b-9223-127aa23985f3\") " pod="openstack/cinder-api-0" Oct 10 16:51:26 crc kubenswrapper[4799]: I1010 16:51:26.596335 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/74472060-a532-439b-9223-127aa23985f3-scripts\") pod \"cinder-api-0\" (UID: \"74472060-a532-439b-9223-127aa23985f3\") " pod="openstack/cinder-api-0" Oct 10 16:51:26 crc kubenswrapper[4799]: I1010 16:51:26.597197 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/74472060-a532-439b-9223-127aa23985f3-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"74472060-a532-439b-9223-127aa23985f3\") " pod="openstack/cinder-api-0" Oct 10 16:51:26 crc kubenswrapper[4799]: I1010 16:51:26.598058 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/74472060-a532-439b-9223-127aa23985f3-config-data-custom\") pod \"cinder-api-0\" (UID: \"74472060-a532-439b-9223-127aa23985f3\") " pod="openstack/cinder-api-0" Oct 10 16:51:26 crc kubenswrapper[4799]: I1010 16:51:26.599477 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/74472060-a532-439b-9223-127aa23985f3-config-data\") pod \"cinder-api-0\" (UID: \"74472060-a532-439b-9223-127aa23985f3\") " pod="openstack/cinder-api-0" Oct 10 16:51:26 crc kubenswrapper[4799]: I1010 16:51:26.610585 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Oct 10 16:51:26 crc kubenswrapper[4799]: I1010 16:51:26.618289 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j7q7n\" (UniqueName: \"kubernetes.io/projected/e6a761e9-d08f-4c77-8441-b7fe50f5ffdd-kube-api-access-j7q7n\") pod \"dnsmasq-dns-5f74b5f5cc-54sss\" (UID: \"e6a761e9-d08f-4c77-8441-b7fe50f5ffdd\") " pod="openstack/dnsmasq-dns-5f74b5f5cc-54sss" Oct 10 16:51:26 crc kubenswrapper[4799]: I1010 16:51:26.621764 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dkmlt\" (UniqueName: \"kubernetes.io/projected/74472060-a532-439b-9223-127aa23985f3-kube-api-access-dkmlt\") pod \"cinder-api-0\" (UID: \"74472060-a532-439b-9223-127aa23985f3\") " pod="openstack/cinder-api-0" Oct 10 16:51:26 crc kubenswrapper[4799]: I1010 16:51:26.671265 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5f74b5f5cc-54sss" Oct 10 16:51:26 crc kubenswrapper[4799]: I1010 16:51:26.723248 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Oct 10 16:51:27 crc kubenswrapper[4799]: I1010 16:51:27.304506 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Oct 10 16:51:27 crc kubenswrapper[4799]: W1010 16:51:27.312969 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2240606d_067a_4655_9deb_611ff6e3d5af.slice/crio-7f5780f51480cc4caaddaa8a77d05aec121ef76831973f3706648c0b9e2e916c WatchSource:0}: Error finding container 7f5780f51480cc4caaddaa8a77d05aec121ef76831973f3706648c0b9e2e916c: Status 404 returned error can't find the container with id 7f5780f51480cc4caaddaa8a77d05aec121ef76831973f3706648c0b9e2e916c Oct 10 16:51:27 crc kubenswrapper[4799]: I1010 16:51:27.387808 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5f74b5f5cc-54sss"] Oct 10 16:51:27 crc kubenswrapper[4799]: I1010 16:51:27.462807 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Oct 10 16:51:27 crc kubenswrapper[4799]: I1010 16:51:27.971018 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"2240606d-067a-4655-9deb-611ff6e3d5af","Type":"ContainerStarted","Data":"7f5780f51480cc4caaddaa8a77d05aec121ef76831973f3706648c0b9e2e916c"} Oct 10 16:51:27 crc kubenswrapper[4799]: I1010 16:51:27.978518 4799 generic.go:334] "Generic (PLEG): container finished" podID="e6a761e9-d08f-4c77-8441-b7fe50f5ffdd" containerID="a2769586af832ea36b0234b06dc38e503d5edf31198c982d5c9ca14e24e55699" exitCode=0 Oct 10 16:51:27 crc kubenswrapper[4799]: I1010 16:51:27.978603 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5f74b5f5cc-54sss" event={"ID":"e6a761e9-d08f-4c77-8441-b7fe50f5ffdd","Type":"ContainerDied","Data":"a2769586af832ea36b0234b06dc38e503d5edf31198c982d5c9ca14e24e55699"} Oct 10 16:51:27 crc kubenswrapper[4799]: I1010 16:51:27.978630 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5f74b5f5cc-54sss" event={"ID":"e6a761e9-d08f-4c77-8441-b7fe50f5ffdd","Type":"ContainerStarted","Data":"621e41439dd364e0c198678859d4a8895a6d9019fe6bb4f62b7314c489568d96"} Oct 10 16:51:27 crc kubenswrapper[4799]: I1010 16:51:27.991220 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"74472060-a532-439b-9223-127aa23985f3","Type":"ContainerStarted","Data":"9f956da04122010c3f92de131ecc3fd66e0a751d96b302250b3255f836f63ed6"} Oct 10 16:51:28 crc kubenswrapper[4799]: I1010 16:51:28.464714 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 10 16:51:28 crc kubenswrapper[4799]: I1010 16:51:28.465788 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="a5ead453-da07-4656-ae87-9d4bc94daee7" containerName="sg-core" containerID="cri-o://30d5477ecc3dcc08c02cacb0310066bcb7edc48b8b0dde7ac2c44b6f6066ee5e" gracePeriod=30 Oct 10 16:51:28 crc kubenswrapper[4799]: I1010 16:51:28.465795 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="a5ead453-da07-4656-ae87-9d4bc94daee7" containerName="proxy-httpd" containerID="cri-o://95b26ab8841c2b18e7c7fb3068e2fa261a28ce7e0fa8d8a115d33d584aa880fb" gracePeriod=30 Oct 10 16:51:28 crc kubenswrapper[4799]: I1010 16:51:28.465901 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="a5ead453-da07-4656-ae87-9d4bc94daee7" containerName="ceilometer-notification-agent" containerID="cri-o://29ee5983567edd8575a8a57b7430adae0c52ae039baecf8e3ab04899cc03fc20" gracePeriod=30 Oct 10 16:51:28 crc kubenswrapper[4799]: I1010 16:51:28.465733 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="a5ead453-da07-4656-ae87-9d4bc94daee7" containerName="ceilometer-central-agent" containerID="cri-o://6426570f9dfe449a016759848e9fb9bec9428063398ab822ea960095cba4927b" gracePeriod=30 Oct 10 16:51:28 crc kubenswrapper[4799]: I1010 16:51:28.481154 4799 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ceilometer-0" podUID="a5ead453-da07-4656-ae87-9d4bc94daee7" containerName="proxy-httpd" probeResult="failure" output="Get \"http://10.217.0.163:3000/\": EOF" Oct 10 16:51:28 crc kubenswrapper[4799]: I1010 16:51:28.965619 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Oct 10 16:51:29 crc kubenswrapper[4799]: I1010 16:51:29.003518 4799 generic.go:334] "Generic (PLEG): container finished" podID="a5ead453-da07-4656-ae87-9d4bc94daee7" containerID="95b26ab8841c2b18e7c7fb3068e2fa261a28ce7e0fa8d8a115d33d584aa880fb" exitCode=0 Oct 10 16:51:29 crc kubenswrapper[4799]: I1010 16:51:29.003583 4799 generic.go:334] "Generic (PLEG): container finished" podID="a5ead453-da07-4656-ae87-9d4bc94daee7" containerID="30d5477ecc3dcc08c02cacb0310066bcb7edc48b8b0dde7ac2c44b6f6066ee5e" exitCode=2 Oct 10 16:51:29 crc kubenswrapper[4799]: I1010 16:51:29.003595 4799 generic.go:334] "Generic (PLEG): container finished" podID="a5ead453-da07-4656-ae87-9d4bc94daee7" containerID="6426570f9dfe449a016759848e9fb9bec9428063398ab822ea960095cba4927b" exitCode=0 Oct 10 16:51:29 crc kubenswrapper[4799]: I1010 16:51:29.003669 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a5ead453-da07-4656-ae87-9d4bc94daee7","Type":"ContainerDied","Data":"95b26ab8841c2b18e7c7fb3068e2fa261a28ce7e0fa8d8a115d33d584aa880fb"} Oct 10 16:51:29 crc kubenswrapper[4799]: I1010 16:51:29.003704 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a5ead453-da07-4656-ae87-9d4bc94daee7","Type":"ContainerDied","Data":"30d5477ecc3dcc08c02cacb0310066bcb7edc48b8b0dde7ac2c44b6f6066ee5e"} Oct 10 16:51:29 crc kubenswrapper[4799]: I1010 16:51:29.003747 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a5ead453-da07-4656-ae87-9d4bc94daee7","Type":"ContainerDied","Data":"6426570f9dfe449a016759848e9fb9bec9428063398ab822ea960095cba4927b"} Oct 10 16:51:29 crc kubenswrapper[4799]: I1010 16:51:29.005708 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"2240606d-067a-4655-9deb-611ff6e3d5af","Type":"ContainerStarted","Data":"b6eb7bb2625a48be5127c7ecd8054f4b5f9b17a07f31b4debc4d2fd30bd140ad"} Oct 10 16:51:29 crc kubenswrapper[4799]: I1010 16:51:29.007640 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"74472060-a532-439b-9223-127aa23985f3","Type":"ContainerStarted","Data":"9c96894227aabc979909cde92185406f5d8f1e250a51793abbcc7d19cf200507"} Oct 10 16:51:32 crc kubenswrapper[4799]: I1010 16:51:32.045156 4799 generic.go:334] "Generic (PLEG): container finished" podID="a5ead453-da07-4656-ae87-9d4bc94daee7" containerID="29ee5983567edd8575a8a57b7430adae0c52ae039baecf8e3ab04899cc03fc20" exitCode=0 Oct 10 16:51:32 crc kubenswrapper[4799]: I1010 16:51:32.045221 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a5ead453-da07-4656-ae87-9d4bc94daee7","Type":"ContainerDied","Data":"29ee5983567edd8575a8a57b7430adae0c52ae039baecf8e3ab04899cc03fc20"} Oct 10 16:51:33 crc kubenswrapper[4799]: I1010 16:51:33.898809 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 10 16:51:34 crc kubenswrapper[4799]: I1010 16:51:34.029312 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-db-create-zmshk"] Oct 10 16:51:34 crc kubenswrapper[4799]: E1010 16:51:34.029735 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a5ead453-da07-4656-ae87-9d4bc94daee7" containerName="ceilometer-central-agent" Oct 10 16:51:34 crc kubenswrapper[4799]: I1010 16:51:34.029755 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="a5ead453-da07-4656-ae87-9d4bc94daee7" containerName="ceilometer-central-agent" Oct 10 16:51:34 crc kubenswrapper[4799]: E1010 16:51:34.029827 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a5ead453-da07-4656-ae87-9d4bc94daee7" containerName="ceilometer-notification-agent" Oct 10 16:51:34 crc kubenswrapper[4799]: I1010 16:51:34.029834 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="a5ead453-da07-4656-ae87-9d4bc94daee7" containerName="ceilometer-notification-agent" Oct 10 16:51:34 crc kubenswrapper[4799]: E1010 16:51:34.029849 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a5ead453-da07-4656-ae87-9d4bc94daee7" containerName="sg-core" Oct 10 16:51:34 crc kubenswrapper[4799]: I1010 16:51:34.029856 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="a5ead453-da07-4656-ae87-9d4bc94daee7" containerName="sg-core" Oct 10 16:51:34 crc kubenswrapper[4799]: E1010 16:51:34.029870 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a5ead453-da07-4656-ae87-9d4bc94daee7" containerName="proxy-httpd" Oct 10 16:51:34 crc kubenswrapper[4799]: I1010 16:51:34.029876 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="a5ead453-da07-4656-ae87-9d4bc94daee7" containerName="proxy-httpd" Oct 10 16:51:34 crc kubenswrapper[4799]: I1010 16:51:34.030117 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="a5ead453-da07-4656-ae87-9d4bc94daee7" containerName="sg-core" Oct 10 16:51:34 crc kubenswrapper[4799]: I1010 16:51:34.030133 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="a5ead453-da07-4656-ae87-9d4bc94daee7" containerName="proxy-httpd" Oct 10 16:51:34 crc kubenswrapper[4799]: I1010 16:51:34.030150 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="a5ead453-da07-4656-ae87-9d4bc94daee7" containerName="ceilometer-notification-agent" Oct 10 16:51:34 crc kubenswrapper[4799]: I1010 16:51:34.030166 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="a5ead453-da07-4656-ae87-9d4bc94daee7" containerName="ceilometer-central-agent" Oct 10 16:51:34 crc kubenswrapper[4799]: I1010 16:51:34.030711 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-zmshk" Oct 10 16:51:34 crc kubenswrapper[4799]: I1010 16:51:34.053266 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-db-create-zmshk"] Oct 10 16:51:34 crc kubenswrapper[4799]: I1010 16:51:34.073417 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a5ead453-da07-4656-ae87-9d4bc94daee7-log-httpd\") pod \"a5ead453-da07-4656-ae87-9d4bc94daee7\" (UID: \"a5ead453-da07-4656-ae87-9d4bc94daee7\") " Oct 10 16:51:34 crc kubenswrapper[4799]: I1010 16:51:34.073717 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a5ead453-da07-4656-ae87-9d4bc94daee7-run-httpd\") pod \"a5ead453-da07-4656-ae87-9d4bc94daee7\" (UID: \"a5ead453-da07-4656-ae87-9d4bc94daee7\") " Oct 10 16:51:34 crc kubenswrapper[4799]: I1010 16:51:34.073786 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a5ead453-da07-4656-ae87-9d4bc94daee7-scripts\") pod \"a5ead453-da07-4656-ae87-9d4bc94daee7\" (UID: \"a5ead453-da07-4656-ae87-9d4bc94daee7\") " Oct 10 16:51:34 crc kubenswrapper[4799]: I1010 16:51:34.073805 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a5ead453-da07-4656-ae87-9d4bc94daee7-config-data\") pod \"a5ead453-da07-4656-ae87-9d4bc94daee7\" (UID: \"a5ead453-da07-4656-ae87-9d4bc94daee7\") " Oct 10 16:51:34 crc kubenswrapper[4799]: I1010 16:51:34.073853 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a5ead453-da07-4656-ae87-9d4bc94daee7-combined-ca-bundle\") pod \"a5ead453-da07-4656-ae87-9d4bc94daee7\" (UID: \"a5ead453-da07-4656-ae87-9d4bc94daee7\") " Oct 10 16:51:34 crc kubenswrapper[4799]: I1010 16:51:34.073868 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a5ead453-da07-4656-ae87-9d4bc94daee7-sg-core-conf-yaml\") pod \"a5ead453-da07-4656-ae87-9d4bc94daee7\" (UID: \"a5ead453-da07-4656-ae87-9d4bc94daee7\") " Oct 10 16:51:34 crc kubenswrapper[4799]: I1010 16:51:34.073964 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4bvb4\" (UniqueName: \"kubernetes.io/projected/a5ead453-da07-4656-ae87-9d4bc94daee7-kube-api-access-4bvb4\") pod \"a5ead453-da07-4656-ae87-9d4bc94daee7\" (UID: \"a5ead453-da07-4656-ae87-9d4bc94daee7\") " Oct 10 16:51:34 crc kubenswrapper[4799]: I1010 16:51:34.078809 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a5ead453-da07-4656-ae87-9d4bc94daee7-scripts" (OuterVolumeSpecName: "scripts") pod "a5ead453-da07-4656-ae87-9d4bc94daee7" (UID: "a5ead453-da07-4656-ae87-9d4bc94daee7"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:51:34 crc kubenswrapper[4799]: I1010 16:51:34.079460 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a5ead453-da07-4656-ae87-9d4bc94daee7-kube-api-access-4bvb4" (OuterVolumeSpecName: "kube-api-access-4bvb4") pod "a5ead453-da07-4656-ae87-9d4bc94daee7" (UID: "a5ead453-da07-4656-ae87-9d4bc94daee7"). InnerVolumeSpecName "kube-api-access-4bvb4". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 16:51:34 crc kubenswrapper[4799]: I1010 16:51:34.083006 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a5ead453-da07-4656-ae87-9d4bc94daee7-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "a5ead453-da07-4656-ae87-9d4bc94daee7" (UID: "a5ead453-da07-4656-ae87-9d4bc94daee7"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 10 16:51:34 crc kubenswrapper[4799]: I1010 16:51:34.083310 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a5ead453-da07-4656-ae87-9d4bc94daee7-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "a5ead453-da07-4656-ae87-9d4bc94daee7" (UID: "a5ead453-da07-4656-ae87-9d4bc94daee7"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 10 16:51:34 crc kubenswrapper[4799]: I1010 16:51:34.092849 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5f74b5f5cc-54sss" event={"ID":"e6a761e9-d08f-4c77-8441-b7fe50f5ffdd","Type":"ContainerStarted","Data":"fd59d61fc5fd52a7907ba716a92a3b1b087699ca7d1f1416c6fcc3348300f073"} Oct 10 16:51:34 crc kubenswrapper[4799]: I1010 16:51:34.093176 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-5f74b5f5cc-54sss" Oct 10 16:51:34 crc kubenswrapper[4799]: I1010 16:51:34.113054 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a5ead453-da07-4656-ae87-9d4bc94daee7","Type":"ContainerDied","Data":"d01bb1daf6208a0437d536f7ba1fcf22ad1f76c623682072e8584000476505e8"} Oct 10 16:51:34 crc kubenswrapper[4799]: I1010 16:51:34.113112 4799 scope.go:117] "RemoveContainer" containerID="95b26ab8841c2b18e7c7fb3068e2fa261a28ce7e0fa8d8a115d33d584aa880fb" Oct 10 16:51:34 crc kubenswrapper[4799]: I1010 16:51:34.113294 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 10 16:51:34 crc kubenswrapper[4799]: I1010 16:51:34.118051 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-5f74b5f5cc-54sss" podStartSLOduration=8.118026271 podStartE2EDuration="8.118026271s" podCreationTimestamp="2025-10-10 16:51:26 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-10 16:51:34.11676621 +0000 UTC m=+1187.625090345" watchObservedRunningTime="2025-10-10 16:51:34.118026271 +0000 UTC m=+1187.626350386" Oct 10 16:51:34 crc kubenswrapper[4799]: I1010 16:51:34.122501 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a5ead453-da07-4656-ae87-9d4bc94daee7-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "a5ead453-da07-4656-ae87-9d4bc94daee7" (UID: "a5ead453-da07-4656-ae87-9d4bc94daee7"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:51:34 crc kubenswrapper[4799]: I1010 16:51:34.124132 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"95c3e251-04ea-40ab-94d0-608d6ef0d8f3","Type":"ContainerStarted","Data":"7894d2a17f4958f21c9c52f542e34e11e9c6033bdf6b58c15d012c7bb5cc154f"} Oct 10 16:51:34 crc kubenswrapper[4799]: I1010 16:51:34.157687 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-db-create-srvbw"] Oct 10 16:51:34 crc kubenswrapper[4799]: I1010 16:51:34.160400 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-srvbw" Oct 10 16:51:34 crc kubenswrapper[4799]: I1010 16:51:34.172063 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-db-create-srvbw"] Oct 10 16:51:34 crc kubenswrapper[4799]: I1010 16:51:34.173486 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstackclient" podStartSLOduration=1.7539132670000002 podStartE2EDuration="13.17346941s" podCreationTimestamp="2025-10-10 16:51:21 +0000 UTC" firstStartedPulling="2025-10-10 16:51:22.223481736 +0000 UTC m=+1175.731805861" lastFinishedPulling="2025-10-10 16:51:33.643037859 +0000 UTC m=+1187.151362004" observedRunningTime="2025-10-10 16:51:34.143149331 +0000 UTC m=+1187.651473446" watchObservedRunningTime="2025-10-10 16:51:34.17346941 +0000 UTC m=+1187.681793525" Oct 10 16:51:34 crc kubenswrapper[4799]: I1010 16:51:34.175825 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tnftm\" (UniqueName: \"kubernetes.io/projected/3834e038-89d1-48e1-94f2-6323bd3a9bca-kube-api-access-tnftm\") pod \"nova-api-db-create-zmshk\" (UID: \"3834e038-89d1-48e1-94f2-6323bd3a9bca\") " pod="openstack/nova-api-db-create-zmshk" Oct 10 16:51:34 crc kubenswrapper[4799]: I1010 16:51:34.176016 4799 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a5ead453-da07-4656-ae87-9d4bc94daee7-log-httpd\") on node \"crc\" DevicePath \"\"" Oct 10 16:51:34 crc kubenswrapper[4799]: I1010 16:51:34.176035 4799 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a5ead453-da07-4656-ae87-9d4bc94daee7-run-httpd\") on node \"crc\" DevicePath \"\"" Oct 10 16:51:34 crc kubenswrapper[4799]: I1010 16:51:34.176043 4799 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a5ead453-da07-4656-ae87-9d4bc94daee7-scripts\") on node \"crc\" DevicePath \"\"" Oct 10 16:51:34 crc kubenswrapper[4799]: I1010 16:51:34.176051 4799 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a5ead453-da07-4656-ae87-9d4bc94daee7-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Oct 10 16:51:34 crc kubenswrapper[4799]: I1010 16:51:34.176060 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4bvb4\" (UniqueName: \"kubernetes.io/projected/a5ead453-da07-4656-ae87-9d4bc94daee7-kube-api-access-4bvb4\") on node \"crc\" DevicePath \"\"" Oct 10 16:51:34 crc kubenswrapper[4799]: I1010 16:51:34.184482 4799 scope.go:117] "RemoveContainer" containerID="30d5477ecc3dcc08c02cacb0310066bcb7edc48b8b0dde7ac2c44b6f6066ee5e" Oct 10 16:51:34 crc kubenswrapper[4799]: I1010 16:51:34.195331 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a5ead453-da07-4656-ae87-9d4bc94daee7-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a5ead453-da07-4656-ae87-9d4bc94daee7" (UID: "a5ead453-da07-4656-ae87-9d4bc94daee7"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:51:34 crc kubenswrapper[4799]: I1010 16:51:34.208577 4799 scope.go:117] "RemoveContainer" containerID="29ee5983567edd8575a8a57b7430adae0c52ae039baecf8e3ab04899cc03fc20" Oct 10 16:51:34 crc kubenswrapper[4799]: I1010 16:51:34.226592 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a5ead453-da07-4656-ae87-9d4bc94daee7-config-data" (OuterVolumeSpecName: "config-data") pod "a5ead453-da07-4656-ae87-9d4bc94daee7" (UID: "a5ead453-da07-4656-ae87-9d4bc94daee7"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:51:34 crc kubenswrapper[4799]: I1010 16:51:34.238771 4799 scope.go:117] "RemoveContainer" containerID="6426570f9dfe449a016759848e9fb9bec9428063398ab822ea960095cba4927b" Oct 10 16:51:34 crc kubenswrapper[4799]: I1010 16:51:34.277680 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tnftm\" (UniqueName: \"kubernetes.io/projected/3834e038-89d1-48e1-94f2-6323bd3a9bca-kube-api-access-tnftm\") pod \"nova-api-db-create-zmshk\" (UID: \"3834e038-89d1-48e1-94f2-6323bd3a9bca\") " pod="openstack/nova-api-db-create-zmshk" Oct 10 16:51:34 crc kubenswrapper[4799]: I1010 16:51:34.277872 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jm2np\" (UniqueName: \"kubernetes.io/projected/21f9a08f-0fc1-497c-96f9-17b436acf166-kube-api-access-jm2np\") pod \"nova-cell0-db-create-srvbw\" (UID: \"21f9a08f-0fc1-497c-96f9-17b436acf166\") " pod="openstack/nova-cell0-db-create-srvbw" Oct 10 16:51:34 crc kubenswrapper[4799]: I1010 16:51:34.277972 4799 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a5ead453-da07-4656-ae87-9d4bc94daee7-config-data\") on node \"crc\" DevicePath \"\"" Oct 10 16:51:34 crc kubenswrapper[4799]: I1010 16:51:34.277984 4799 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a5ead453-da07-4656-ae87-9d4bc94daee7-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 10 16:51:34 crc kubenswrapper[4799]: I1010 16:51:34.294376 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tnftm\" (UniqueName: \"kubernetes.io/projected/3834e038-89d1-48e1-94f2-6323bd3a9bca-kube-api-access-tnftm\") pod \"nova-api-db-create-zmshk\" (UID: \"3834e038-89d1-48e1-94f2-6323bd3a9bca\") " pod="openstack/nova-api-db-create-zmshk" Oct 10 16:51:34 crc kubenswrapper[4799]: I1010 16:51:34.333880 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-db-create-x9xfs"] Oct 10 16:51:34 crc kubenswrapper[4799]: I1010 16:51:34.335356 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-x9xfs" Oct 10 16:51:34 crc kubenswrapper[4799]: I1010 16:51:34.344634 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-db-create-x9xfs"] Oct 10 16:51:34 crc kubenswrapper[4799]: I1010 16:51:34.352801 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-zmshk" Oct 10 16:51:34 crc kubenswrapper[4799]: I1010 16:51:34.379346 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jm2np\" (UniqueName: \"kubernetes.io/projected/21f9a08f-0fc1-497c-96f9-17b436acf166-kube-api-access-jm2np\") pod \"nova-cell0-db-create-srvbw\" (UID: \"21f9a08f-0fc1-497c-96f9-17b436acf166\") " pod="openstack/nova-cell0-db-create-srvbw" Oct 10 16:51:34 crc kubenswrapper[4799]: I1010 16:51:34.395991 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jm2np\" (UniqueName: \"kubernetes.io/projected/21f9a08f-0fc1-497c-96f9-17b436acf166-kube-api-access-jm2np\") pod \"nova-cell0-db-create-srvbw\" (UID: \"21f9a08f-0fc1-497c-96f9-17b436acf166\") " pod="openstack/nova-cell0-db-create-srvbw" Oct 10 16:51:34 crc kubenswrapper[4799]: I1010 16:51:34.475939 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 10 16:51:34 crc kubenswrapper[4799]: I1010 16:51:34.482433 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-srvbw" Oct 10 16:51:34 crc kubenswrapper[4799]: I1010 16:51:34.483600 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rlb2v\" (UniqueName: \"kubernetes.io/projected/29b5dc61-70d3-4fc7-85e7-665f12d6f2ad-kube-api-access-rlb2v\") pod \"nova-cell1-db-create-x9xfs\" (UID: \"29b5dc61-70d3-4fc7-85e7-665f12d6f2ad\") " pod="openstack/nova-cell1-db-create-x9xfs" Oct 10 16:51:34 crc kubenswrapper[4799]: I1010 16:51:34.512856 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Oct 10 16:51:34 crc kubenswrapper[4799]: I1010 16:51:34.529889 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Oct 10 16:51:34 crc kubenswrapper[4799]: I1010 16:51:34.532158 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 10 16:51:34 crc kubenswrapper[4799]: I1010 16:51:34.546646 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Oct 10 16:51:34 crc kubenswrapper[4799]: I1010 16:51:34.546920 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Oct 10 16:51:34 crc kubenswrapper[4799]: I1010 16:51:34.548684 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 10 16:51:34 crc kubenswrapper[4799]: I1010 16:51:34.586882 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rlb2v\" (UniqueName: \"kubernetes.io/projected/29b5dc61-70d3-4fc7-85e7-665f12d6f2ad-kube-api-access-rlb2v\") pod \"nova-cell1-db-create-x9xfs\" (UID: \"29b5dc61-70d3-4fc7-85e7-665f12d6f2ad\") " pod="openstack/nova-cell1-db-create-x9xfs" Oct 10 16:51:34 crc kubenswrapper[4799]: I1010 16:51:34.655913 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rlb2v\" (UniqueName: \"kubernetes.io/projected/29b5dc61-70d3-4fc7-85e7-665f12d6f2ad-kube-api-access-rlb2v\") pod \"nova-cell1-db-create-x9xfs\" (UID: \"29b5dc61-70d3-4fc7-85e7-665f12d6f2ad\") " pod="openstack/nova-cell1-db-create-x9xfs" Oct 10 16:51:34 crc kubenswrapper[4799]: I1010 16:51:34.677004 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-x9xfs" Oct 10 16:51:34 crc kubenswrapper[4799]: I1010 16:51:34.694186 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a1a7ed1c-2ac1-4c0b-9896-7e5b28d030c3-run-httpd\") pod \"ceilometer-0\" (UID: \"a1a7ed1c-2ac1-4c0b-9896-7e5b28d030c3\") " pod="openstack/ceilometer-0" Oct 10 16:51:34 crc kubenswrapper[4799]: I1010 16:51:34.694242 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a1a7ed1c-2ac1-4c0b-9896-7e5b28d030c3-scripts\") pod \"ceilometer-0\" (UID: \"a1a7ed1c-2ac1-4c0b-9896-7e5b28d030c3\") " pod="openstack/ceilometer-0" Oct 10 16:51:34 crc kubenswrapper[4799]: I1010 16:51:34.694295 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a1a7ed1c-2ac1-4c0b-9896-7e5b28d030c3-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"a1a7ed1c-2ac1-4c0b-9896-7e5b28d030c3\") " pod="openstack/ceilometer-0" Oct 10 16:51:34 crc kubenswrapper[4799]: I1010 16:51:34.694344 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hw45z\" (UniqueName: \"kubernetes.io/projected/a1a7ed1c-2ac1-4c0b-9896-7e5b28d030c3-kube-api-access-hw45z\") pod \"ceilometer-0\" (UID: \"a1a7ed1c-2ac1-4c0b-9896-7e5b28d030c3\") " pod="openstack/ceilometer-0" Oct 10 16:51:34 crc kubenswrapper[4799]: I1010 16:51:34.694364 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a1a7ed1c-2ac1-4c0b-9896-7e5b28d030c3-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"a1a7ed1c-2ac1-4c0b-9896-7e5b28d030c3\") " pod="openstack/ceilometer-0" Oct 10 16:51:34 crc kubenswrapper[4799]: I1010 16:51:34.694381 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a1a7ed1c-2ac1-4c0b-9896-7e5b28d030c3-log-httpd\") pod \"ceilometer-0\" (UID: \"a1a7ed1c-2ac1-4c0b-9896-7e5b28d030c3\") " pod="openstack/ceilometer-0" Oct 10 16:51:34 crc kubenswrapper[4799]: I1010 16:51:34.694414 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a1a7ed1c-2ac1-4c0b-9896-7e5b28d030c3-config-data\") pod \"ceilometer-0\" (UID: \"a1a7ed1c-2ac1-4c0b-9896-7e5b28d030c3\") " pod="openstack/ceilometer-0" Oct 10 16:51:34 crc kubenswrapper[4799]: I1010 16:51:34.796069 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hw45z\" (UniqueName: \"kubernetes.io/projected/a1a7ed1c-2ac1-4c0b-9896-7e5b28d030c3-kube-api-access-hw45z\") pod \"ceilometer-0\" (UID: \"a1a7ed1c-2ac1-4c0b-9896-7e5b28d030c3\") " pod="openstack/ceilometer-0" Oct 10 16:51:34 crc kubenswrapper[4799]: I1010 16:51:34.796131 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a1a7ed1c-2ac1-4c0b-9896-7e5b28d030c3-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"a1a7ed1c-2ac1-4c0b-9896-7e5b28d030c3\") " pod="openstack/ceilometer-0" Oct 10 16:51:34 crc kubenswrapper[4799]: I1010 16:51:34.796153 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a1a7ed1c-2ac1-4c0b-9896-7e5b28d030c3-log-httpd\") pod \"ceilometer-0\" (UID: \"a1a7ed1c-2ac1-4c0b-9896-7e5b28d030c3\") " pod="openstack/ceilometer-0" Oct 10 16:51:34 crc kubenswrapper[4799]: I1010 16:51:34.796192 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a1a7ed1c-2ac1-4c0b-9896-7e5b28d030c3-config-data\") pod \"ceilometer-0\" (UID: \"a1a7ed1c-2ac1-4c0b-9896-7e5b28d030c3\") " pod="openstack/ceilometer-0" Oct 10 16:51:34 crc kubenswrapper[4799]: I1010 16:51:34.796228 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a1a7ed1c-2ac1-4c0b-9896-7e5b28d030c3-run-httpd\") pod \"ceilometer-0\" (UID: \"a1a7ed1c-2ac1-4c0b-9896-7e5b28d030c3\") " pod="openstack/ceilometer-0" Oct 10 16:51:34 crc kubenswrapper[4799]: I1010 16:51:34.796262 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a1a7ed1c-2ac1-4c0b-9896-7e5b28d030c3-scripts\") pod \"ceilometer-0\" (UID: \"a1a7ed1c-2ac1-4c0b-9896-7e5b28d030c3\") " pod="openstack/ceilometer-0" Oct 10 16:51:34 crc kubenswrapper[4799]: I1010 16:51:34.796316 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a1a7ed1c-2ac1-4c0b-9896-7e5b28d030c3-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"a1a7ed1c-2ac1-4c0b-9896-7e5b28d030c3\") " pod="openstack/ceilometer-0" Oct 10 16:51:34 crc kubenswrapper[4799]: I1010 16:51:34.798359 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a1a7ed1c-2ac1-4c0b-9896-7e5b28d030c3-run-httpd\") pod \"ceilometer-0\" (UID: \"a1a7ed1c-2ac1-4c0b-9896-7e5b28d030c3\") " pod="openstack/ceilometer-0" Oct 10 16:51:34 crc kubenswrapper[4799]: I1010 16:51:34.801258 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a1a7ed1c-2ac1-4c0b-9896-7e5b28d030c3-config-data\") pod \"ceilometer-0\" (UID: \"a1a7ed1c-2ac1-4c0b-9896-7e5b28d030c3\") " pod="openstack/ceilometer-0" Oct 10 16:51:34 crc kubenswrapper[4799]: I1010 16:51:34.801457 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a1a7ed1c-2ac1-4c0b-9896-7e5b28d030c3-log-httpd\") pod \"ceilometer-0\" (UID: \"a1a7ed1c-2ac1-4c0b-9896-7e5b28d030c3\") " pod="openstack/ceilometer-0" Oct 10 16:51:34 crc kubenswrapper[4799]: I1010 16:51:34.801866 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a1a7ed1c-2ac1-4c0b-9896-7e5b28d030c3-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"a1a7ed1c-2ac1-4c0b-9896-7e5b28d030c3\") " pod="openstack/ceilometer-0" Oct 10 16:51:34 crc kubenswrapper[4799]: I1010 16:51:34.808887 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a1a7ed1c-2ac1-4c0b-9896-7e5b28d030c3-scripts\") pod \"ceilometer-0\" (UID: \"a1a7ed1c-2ac1-4c0b-9896-7e5b28d030c3\") " pod="openstack/ceilometer-0" Oct 10 16:51:34 crc kubenswrapper[4799]: I1010 16:51:34.808895 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a1a7ed1c-2ac1-4c0b-9896-7e5b28d030c3-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"a1a7ed1c-2ac1-4c0b-9896-7e5b28d030c3\") " pod="openstack/ceilometer-0" Oct 10 16:51:34 crc kubenswrapper[4799]: I1010 16:51:34.819605 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hw45z\" (UniqueName: \"kubernetes.io/projected/a1a7ed1c-2ac1-4c0b-9896-7e5b28d030c3-kube-api-access-hw45z\") pod \"ceilometer-0\" (UID: \"a1a7ed1c-2ac1-4c0b-9896-7e5b28d030c3\") " pod="openstack/ceilometer-0" Oct 10 16:51:34 crc kubenswrapper[4799]: I1010 16:51:34.908559 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 10 16:51:34 crc kubenswrapper[4799]: I1010 16:51:34.935277 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/swift-proxy-69dc9744df-smbqh" Oct 10 16:51:34 crc kubenswrapper[4799]: I1010 16:51:34.959830 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/swift-proxy-69dc9744df-smbqh" Oct 10 16:51:34 crc kubenswrapper[4799]: I1010 16:51:34.988673 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-db-create-zmshk"] Oct 10 16:51:35 crc kubenswrapper[4799]: I1010 16:51:35.151470 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-db-create-srvbw"] Oct 10 16:51:35 crc kubenswrapper[4799]: I1010 16:51:35.178223 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"2240606d-067a-4655-9deb-611ff6e3d5af","Type":"ContainerStarted","Data":"eff44efb5e4b0fcea774798d7f33d3ea26d38bfcd6b641cd95d859a93438c2b5"} Oct 10 16:51:35 crc kubenswrapper[4799]: I1010 16:51:35.180522 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-zmshk" event={"ID":"3834e038-89d1-48e1-94f2-6323bd3a9bca","Type":"ContainerStarted","Data":"d0d1164487141f0185daceaf0e22d6ed98d6811a77affd1a1ab019fc36374707"} Oct 10 16:51:35 crc kubenswrapper[4799]: I1010 16:51:35.186341 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"74472060-a532-439b-9223-127aa23985f3","Type":"ContainerStarted","Data":"321bf01f8c797847adc57e48546c89842c314557eaa1b9606d90e24e7e67c356"} Oct 10 16:51:35 crc kubenswrapper[4799]: I1010 16:51:35.186541 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="74472060-a532-439b-9223-127aa23985f3" containerName="cinder-api-log" containerID="cri-o://9c96894227aabc979909cde92185406f5d8f1e250a51793abbcc7d19cf200507" gracePeriod=30 Oct 10 16:51:35 crc kubenswrapper[4799]: I1010 16:51:35.186658 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="74472060-a532-439b-9223-127aa23985f3" containerName="cinder-api" containerID="cri-o://321bf01f8c797847adc57e48546c89842c314557eaa1b9606d90e24e7e67c356" gracePeriod=30 Oct 10 16:51:35 crc kubenswrapper[4799]: I1010 16:51:35.200946 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-scheduler-0" podStartSLOduration=8.592411459000001 podStartE2EDuration="9.200925957s" podCreationTimestamp="2025-10-10 16:51:26 +0000 UTC" firstStartedPulling="2025-10-10 16:51:27.314692506 +0000 UTC m=+1180.823016621" lastFinishedPulling="2025-10-10 16:51:27.923207014 +0000 UTC m=+1181.431531119" observedRunningTime="2025-10-10 16:51:35.197285868 +0000 UTC m=+1188.705609973" watchObservedRunningTime="2025-10-10 16:51:35.200925957 +0000 UTC m=+1188.709250082" Oct 10 16:51:35 crc kubenswrapper[4799]: I1010 16:51:35.229506 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-api-0" podStartSLOduration=9.229484782 podStartE2EDuration="9.229484782s" podCreationTimestamp="2025-10-10 16:51:26 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-10 16:51:35.222242864 +0000 UTC m=+1188.730566979" watchObservedRunningTime="2025-10-10 16:51:35.229484782 +0000 UTC m=+1188.737808887" Oct 10 16:51:35 crc kubenswrapper[4799]: I1010 16:51:35.249457 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-db-create-x9xfs"] Oct 10 16:51:35 crc kubenswrapper[4799]: I1010 16:51:35.453234 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a5ead453-da07-4656-ae87-9d4bc94daee7" path="/var/lib/kubelet/pods/a5ead453-da07-4656-ae87-9d4bc94daee7/volumes" Oct 10 16:51:35 crc kubenswrapper[4799]: I1010 16:51:35.454988 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 10 16:51:35 crc kubenswrapper[4799]: E1010 16:51:35.651821 4799 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod74472060_a532_439b_9223_127aa23985f3.slice/crio-321bf01f8c797847adc57e48546c89842c314557eaa1b9606d90e24e7e67c356.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod74472060_a532_439b_9223_127aa23985f3.slice/crio-conmon-321bf01f8c797847adc57e48546c89842c314557eaa1b9606d90e24e7e67c356.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod21f9a08f_0fc1_497c_96f9_17b436acf166.slice/crio-conmon-a7744f1f6b9b90abac5951b974c22a05f9b743a15b598bd4cd1b427036db5928.scope\": RecentStats: unable to find data in memory cache]" Oct 10 16:51:35 crc kubenswrapper[4799]: I1010 16:51:35.839315 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Oct 10 16:51:35 crc kubenswrapper[4799]: I1010 16:51:35.920504 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/74472060-a532-439b-9223-127aa23985f3-logs\") pod \"74472060-a532-439b-9223-127aa23985f3\" (UID: \"74472060-a532-439b-9223-127aa23985f3\") " Oct 10 16:51:35 crc kubenswrapper[4799]: I1010 16:51:35.920575 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/74472060-a532-439b-9223-127aa23985f3-config-data\") pod \"74472060-a532-439b-9223-127aa23985f3\" (UID: \"74472060-a532-439b-9223-127aa23985f3\") " Oct 10 16:51:35 crc kubenswrapper[4799]: I1010 16:51:35.920684 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dkmlt\" (UniqueName: \"kubernetes.io/projected/74472060-a532-439b-9223-127aa23985f3-kube-api-access-dkmlt\") pod \"74472060-a532-439b-9223-127aa23985f3\" (UID: \"74472060-a532-439b-9223-127aa23985f3\") " Oct 10 16:51:35 crc kubenswrapper[4799]: I1010 16:51:35.920713 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/74472060-a532-439b-9223-127aa23985f3-scripts\") pod \"74472060-a532-439b-9223-127aa23985f3\" (UID: \"74472060-a532-439b-9223-127aa23985f3\") " Oct 10 16:51:35 crc kubenswrapper[4799]: I1010 16:51:35.920849 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/74472060-a532-439b-9223-127aa23985f3-etc-machine-id\") pod \"74472060-a532-439b-9223-127aa23985f3\" (UID: \"74472060-a532-439b-9223-127aa23985f3\") " Oct 10 16:51:35 crc kubenswrapper[4799]: I1010 16:51:35.920888 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/74472060-a532-439b-9223-127aa23985f3-combined-ca-bundle\") pod \"74472060-a532-439b-9223-127aa23985f3\" (UID: \"74472060-a532-439b-9223-127aa23985f3\") " Oct 10 16:51:35 crc kubenswrapper[4799]: I1010 16:51:35.920915 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/74472060-a532-439b-9223-127aa23985f3-config-data-custom\") pod \"74472060-a532-439b-9223-127aa23985f3\" (UID: \"74472060-a532-439b-9223-127aa23985f3\") " Oct 10 16:51:35 crc kubenswrapper[4799]: I1010 16:51:35.921217 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/74472060-a532-439b-9223-127aa23985f3-logs" (OuterVolumeSpecName: "logs") pod "74472060-a532-439b-9223-127aa23985f3" (UID: "74472060-a532-439b-9223-127aa23985f3"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 10 16:51:35 crc kubenswrapper[4799]: I1010 16:51:35.921309 4799 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/74472060-a532-439b-9223-127aa23985f3-logs\") on node \"crc\" DevicePath \"\"" Oct 10 16:51:35 crc kubenswrapper[4799]: I1010 16:51:35.921379 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/74472060-a532-439b-9223-127aa23985f3-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "74472060-a532-439b-9223-127aa23985f3" (UID: "74472060-a532-439b-9223-127aa23985f3"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 10 16:51:35 crc kubenswrapper[4799]: I1010 16:51:35.925691 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/74472060-a532-439b-9223-127aa23985f3-scripts" (OuterVolumeSpecName: "scripts") pod "74472060-a532-439b-9223-127aa23985f3" (UID: "74472060-a532-439b-9223-127aa23985f3"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:51:35 crc kubenswrapper[4799]: I1010 16:51:35.926332 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/74472060-a532-439b-9223-127aa23985f3-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "74472060-a532-439b-9223-127aa23985f3" (UID: "74472060-a532-439b-9223-127aa23985f3"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:51:35 crc kubenswrapper[4799]: I1010 16:51:35.926390 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/74472060-a532-439b-9223-127aa23985f3-kube-api-access-dkmlt" (OuterVolumeSpecName: "kube-api-access-dkmlt") pod "74472060-a532-439b-9223-127aa23985f3" (UID: "74472060-a532-439b-9223-127aa23985f3"). InnerVolumeSpecName "kube-api-access-dkmlt". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 16:51:35 crc kubenswrapper[4799]: I1010 16:51:35.950478 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/74472060-a532-439b-9223-127aa23985f3-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "74472060-a532-439b-9223-127aa23985f3" (UID: "74472060-a532-439b-9223-127aa23985f3"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:51:35 crc kubenswrapper[4799]: I1010 16:51:35.975655 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/74472060-a532-439b-9223-127aa23985f3-config-data" (OuterVolumeSpecName: "config-data") pod "74472060-a532-439b-9223-127aa23985f3" (UID: "74472060-a532-439b-9223-127aa23985f3"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:51:36 crc kubenswrapper[4799]: I1010 16:51:36.023979 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dkmlt\" (UniqueName: \"kubernetes.io/projected/74472060-a532-439b-9223-127aa23985f3-kube-api-access-dkmlt\") on node \"crc\" DevicePath \"\"" Oct 10 16:51:36 crc kubenswrapper[4799]: I1010 16:51:36.024016 4799 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/74472060-a532-439b-9223-127aa23985f3-scripts\") on node \"crc\" DevicePath \"\"" Oct 10 16:51:36 crc kubenswrapper[4799]: I1010 16:51:36.024031 4799 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/74472060-a532-439b-9223-127aa23985f3-etc-machine-id\") on node \"crc\" DevicePath \"\"" Oct 10 16:51:36 crc kubenswrapper[4799]: I1010 16:51:36.024044 4799 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/74472060-a532-439b-9223-127aa23985f3-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 10 16:51:36 crc kubenswrapper[4799]: I1010 16:51:36.024056 4799 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/74472060-a532-439b-9223-127aa23985f3-config-data-custom\") on node \"crc\" DevicePath \"\"" Oct 10 16:51:36 crc kubenswrapper[4799]: I1010 16:51:36.024067 4799 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/74472060-a532-439b-9223-127aa23985f3-config-data\") on node \"crc\" DevicePath \"\"" Oct 10 16:51:36 crc kubenswrapper[4799]: I1010 16:51:36.081307 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 10 16:51:36 crc kubenswrapper[4799]: I1010 16:51:36.081599 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="ef55ae74-4435-4042-9567-4d07d41b3ce0" containerName="glance-log" containerID="cri-o://0b146c0c34e0636a32d9113ae943fc6de4bf48319ee1cd0678aadae8c7d6e694" gracePeriod=30 Oct 10 16:51:36 crc kubenswrapper[4799]: I1010 16:51:36.081721 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="ef55ae74-4435-4042-9567-4d07d41b3ce0" containerName="glance-httpd" containerID="cri-o://81a33109b42e3a65de9041c84dfd7b0c48481adc2672b592ae62813fadf5065e" gracePeriod=30 Oct 10 16:51:36 crc kubenswrapper[4799]: I1010 16:51:36.197739 4799 generic.go:334] "Generic (PLEG): container finished" podID="74472060-a532-439b-9223-127aa23985f3" containerID="321bf01f8c797847adc57e48546c89842c314557eaa1b9606d90e24e7e67c356" exitCode=0 Oct 10 16:51:36 crc kubenswrapper[4799]: I1010 16:51:36.198011 4799 generic.go:334] "Generic (PLEG): container finished" podID="74472060-a532-439b-9223-127aa23985f3" containerID="9c96894227aabc979909cde92185406f5d8f1e250a51793abbcc7d19cf200507" exitCode=143 Oct 10 16:51:36 crc kubenswrapper[4799]: I1010 16:51:36.197792 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Oct 10 16:51:36 crc kubenswrapper[4799]: I1010 16:51:36.197807 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"74472060-a532-439b-9223-127aa23985f3","Type":"ContainerDied","Data":"321bf01f8c797847adc57e48546c89842c314557eaa1b9606d90e24e7e67c356"} Oct 10 16:51:36 crc kubenswrapper[4799]: I1010 16:51:36.200842 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"74472060-a532-439b-9223-127aa23985f3","Type":"ContainerDied","Data":"9c96894227aabc979909cde92185406f5d8f1e250a51793abbcc7d19cf200507"} Oct 10 16:51:36 crc kubenswrapper[4799]: I1010 16:51:36.200871 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"74472060-a532-439b-9223-127aa23985f3","Type":"ContainerDied","Data":"9f956da04122010c3f92de131ecc3fd66e0a751d96b302250b3255f836f63ed6"} Oct 10 16:51:36 crc kubenswrapper[4799]: I1010 16:51:36.200889 4799 scope.go:117] "RemoveContainer" containerID="321bf01f8c797847adc57e48546c89842c314557eaa1b9606d90e24e7e67c356" Oct 10 16:51:36 crc kubenswrapper[4799]: I1010 16:51:36.204258 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a1a7ed1c-2ac1-4c0b-9896-7e5b28d030c3","Type":"ContainerStarted","Data":"4a850942ad660d59cad880d27871b132e4b78133816d348570dca1309bbe0cc2"} Oct 10 16:51:36 crc kubenswrapper[4799]: I1010 16:51:36.204295 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a1a7ed1c-2ac1-4c0b-9896-7e5b28d030c3","Type":"ContainerStarted","Data":"8bca0624a2407c0272fd20f138a97b1f8739dfe4ae5539b0b91d56c104d2749f"} Oct 10 16:51:36 crc kubenswrapper[4799]: I1010 16:51:36.207813 4799 generic.go:334] "Generic (PLEG): container finished" podID="21f9a08f-0fc1-497c-96f9-17b436acf166" containerID="a7744f1f6b9b90abac5951b974c22a05f9b743a15b598bd4cd1b427036db5928" exitCode=0 Oct 10 16:51:36 crc kubenswrapper[4799]: I1010 16:51:36.207878 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-srvbw" event={"ID":"21f9a08f-0fc1-497c-96f9-17b436acf166","Type":"ContainerDied","Data":"a7744f1f6b9b90abac5951b974c22a05f9b743a15b598bd4cd1b427036db5928"} Oct 10 16:51:36 crc kubenswrapper[4799]: I1010 16:51:36.207901 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-srvbw" event={"ID":"21f9a08f-0fc1-497c-96f9-17b436acf166","Type":"ContainerStarted","Data":"d8510759952c2c264bef5f8c957825a63ce57b99a9363c520eadf48129d5efe0"} Oct 10 16:51:36 crc kubenswrapper[4799]: I1010 16:51:36.210959 4799 generic.go:334] "Generic (PLEG): container finished" podID="3834e038-89d1-48e1-94f2-6323bd3a9bca" containerID="733db5a009878bd263eb72d80af0d8d8c163d3807ed5234db056107575883b22" exitCode=0 Oct 10 16:51:36 crc kubenswrapper[4799]: I1010 16:51:36.211020 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-zmshk" event={"ID":"3834e038-89d1-48e1-94f2-6323bd3a9bca","Type":"ContainerDied","Data":"733db5a009878bd263eb72d80af0d8d8c163d3807ed5234db056107575883b22"} Oct 10 16:51:36 crc kubenswrapper[4799]: I1010 16:51:36.217607 4799 generic.go:334] "Generic (PLEG): container finished" podID="29b5dc61-70d3-4fc7-85e7-665f12d6f2ad" containerID="e433847b37d87c8813ed78b7c50a3a66b8a762a349bba6c0f50b139812bbe003" exitCode=0 Oct 10 16:51:36 crc kubenswrapper[4799]: I1010 16:51:36.217837 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-x9xfs" event={"ID":"29b5dc61-70d3-4fc7-85e7-665f12d6f2ad","Type":"ContainerDied","Data":"e433847b37d87c8813ed78b7c50a3a66b8a762a349bba6c0f50b139812bbe003"} Oct 10 16:51:36 crc kubenswrapper[4799]: I1010 16:51:36.217927 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-x9xfs" event={"ID":"29b5dc61-70d3-4fc7-85e7-665f12d6f2ad","Type":"ContainerStarted","Data":"14036e92099b05fa071b61d60a52274ea55aaff7b23fa77aa8beb3c205bc13f9"} Oct 10 16:51:36 crc kubenswrapper[4799]: I1010 16:51:36.227032 4799 scope.go:117] "RemoveContainer" containerID="9c96894227aabc979909cde92185406f5d8f1e250a51793abbcc7d19cf200507" Oct 10 16:51:36 crc kubenswrapper[4799]: I1010 16:51:36.249934 4799 scope.go:117] "RemoveContainer" containerID="321bf01f8c797847adc57e48546c89842c314557eaa1b9606d90e24e7e67c356" Oct 10 16:51:36 crc kubenswrapper[4799]: E1010 16:51:36.255184 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"321bf01f8c797847adc57e48546c89842c314557eaa1b9606d90e24e7e67c356\": container with ID starting with 321bf01f8c797847adc57e48546c89842c314557eaa1b9606d90e24e7e67c356 not found: ID does not exist" containerID="321bf01f8c797847adc57e48546c89842c314557eaa1b9606d90e24e7e67c356" Oct 10 16:51:36 crc kubenswrapper[4799]: I1010 16:51:36.255231 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"321bf01f8c797847adc57e48546c89842c314557eaa1b9606d90e24e7e67c356"} err="failed to get container status \"321bf01f8c797847adc57e48546c89842c314557eaa1b9606d90e24e7e67c356\": rpc error: code = NotFound desc = could not find container \"321bf01f8c797847adc57e48546c89842c314557eaa1b9606d90e24e7e67c356\": container with ID starting with 321bf01f8c797847adc57e48546c89842c314557eaa1b9606d90e24e7e67c356 not found: ID does not exist" Oct 10 16:51:36 crc kubenswrapper[4799]: I1010 16:51:36.255258 4799 scope.go:117] "RemoveContainer" containerID="9c96894227aabc979909cde92185406f5d8f1e250a51793abbcc7d19cf200507" Oct 10 16:51:36 crc kubenswrapper[4799]: E1010 16:51:36.256055 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9c96894227aabc979909cde92185406f5d8f1e250a51793abbcc7d19cf200507\": container with ID starting with 9c96894227aabc979909cde92185406f5d8f1e250a51793abbcc7d19cf200507 not found: ID does not exist" containerID="9c96894227aabc979909cde92185406f5d8f1e250a51793abbcc7d19cf200507" Oct 10 16:51:36 crc kubenswrapper[4799]: I1010 16:51:36.256079 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9c96894227aabc979909cde92185406f5d8f1e250a51793abbcc7d19cf200507"} err="failed to get container status \"9c96894227aabc979909cde92185406f5d8f1e250a51793abbcc7d19cf200507\": rpc error: code = NotFound desc = could not find container \"9c96894227aabc979909cde92185406f5d8f1e250a51793abbcc7d19cf200507\": container with ID starting with 9c96894227aabc979909cde92185406f5d8f1e250a51793abbcc7d19cf200507 not found: ID does not exist" Oct 10 16:51:36 crc kubenswrapper[4799]: I1010 16:51:36.256094 4799 scope.go:117] "RemoveContainer" containerID="321bf01f8c797847adc57e48546c89842c314557eaa1b9606d90e24e7e67c356" Oct 10 16:51:36 crc kubenswrapper[4799]: I1010 16:51:36.256282 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"321bf01f8c797847adc57e48546c89842c314557eaa1b9606d90e24e7e67c356"} err="failed to get container status \"321bf01f8c797847adc57e48546c89842c314557eaa1b9606d90e24e7e67c356\": rpc error: code = NotFound desc = could not find container \"321bf01f8c797847adc57e48546c89842c314557eaa1b9606d90e24e7e67c356\": container with ID starting with 321bf01f8c797847adc57e48546c89842c314557eaa1b9606d90e24e7e67c356 not found: ID does not exist" Oct 10 16:51:36 crc kubenswrapper[4799]: I1010 16:51:36.256302 4799 scope.go:117] "RemoveContainer" containerID="9c96894227aabc979909cde92185406f5d8f1e250a51793abbcc7d19cf200507" Oct 10 16:51:36 crc kubenswrapper[4799]: I1010 16:51:36.259226 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9c96894227aabc979909cde92185406f5d8f1e250a51793abbcc7d19cf200507"} err="failed to get container status \"9c96894227aabc979909cde92185406f5d8f1e250a51793abbcc7d19cf200507\": rpc error: code = NotFound desc = could not find container \"9c96894227aabc979909cde92185406f5d8f1e250a51793abbcc7d19cf200507\": container with ID starting with 9c96894227aabc979909cde92185406f5d8f1e250a51793abbcc7d19cf200507 not found: ID does not exist" Oct 10 16:51:36 crc kubenswrapper[4799]: I1010 16:51:36.281806 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Oct 10 16:51:36 crc kubenswrapper[4799]: I1010 16:51:36.298691 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-api-0"] Oct 10 16:51:36 crc kubenswrapper[4799]: I1010 16:51:36.314226 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-api-0"] Oct 10 16:51:36 crc kubenswrapper[4799]: E1010 16:51:36.314965 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="74472060-a532-439b-9223-127aa23985f3" containerName="cinder-api" Oct 10 16:51:36 crc kubenswrapper[4799]: I1010 16:51:36.314982 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="74472060-a532-439b-9223-127aa23985f3" containerName="cinder-api" Oct 10 16:51:36 crc kubenswrapper[4799]: E1010 16:51:36.315036 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="74472060-a532-439b-9223-127aa23985f3" containerName="cinder-api-log" Oct 10 16:51:36 crc kubenswrapper[4799]: I1010 16:51:36.315044 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="74472060-a532-439b-9223-127aa23985f3" containerName="cinder-api-log" Oct 10 16:51:36 crc kubenswrapper[4799]: I1010 16:51:36.315434 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="74472060-a532-439b-9223-127aa23985f3" containerName="cinder-api" Oct 10 16:51:36 crc kubenswrapper[4799]: I1010 16:51:36.315461 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="74472060-a532-439b-9223-127aa23985f3" containerName="cinder-api-log" Oct 10 16:51:36 crc kubenswrapper[4799]: I1010 16:51:36.317076 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Oct 10 16:51:36 crc kubenswrapper[4799]: I1010 16:51:36.320323 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cinder-internal-svc" Oct 10 16:51:36 crc kubenswrapper[4799]: I1010 16:51:36.323576 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-api-config-data" Oct 10 16:51:36 crc kubenswrapper[4799]: I1010 16:51:36.323805 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cinder-public-svc" Oct 10 16:51:36 crc kubenswrapper[4799]: I1010 16:51:36.327204 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Oct 10 16:51:36 crc kubenswrapper[4799]: I1010 16:51:36.434881 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/f9c4cd8a-6aed-4826-b23b-328645f5801f-etc-machine-id\") pod \"cinder-api-0\" (UID: \"f9c4cd8a-6aed-4826-b23b-328645f5801f\") " pod="openstack/cinder-api-0" Oct 10 16:51:36 crc kubenswrapper[4799]: I1010 16:51:36.434943 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gpp2r\" (UniqueName: \"kubernetes.io/projected/f9c4cd8a-6aed-4826-b23b-328645f5801f-kube-api-access-gpp2r\") pod \"cinder-api-0\" (UID: \"f9c4cd8a-6aed-4826-b23b-328645f5801f\") " pod="openstack/cinder-api-0" Oct 10 16:51:36 crc kubenswrapper[4799]: I1010 16:51:36.435253 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/f9c4cd8a-6aed-4826-b23b-328645f5801f-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"f9c4cd8a-6aed-4826-b23b-328645f5801f\") " pod="openstack/cinder-api-0" Oct 10 16:51:36 crc kubenswrapper[4799]: I1010 16:51:36.435344 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/f9c4cd8a-6aed-4826-b23b-328645f5801f-public-tls-certs\") pod \"cinder-api-0\" (UID: \"f9c4cd8a-6aed-4826-b23b-328645f5801f\") " pod="openstack/cinder-api-0" Oct 10 16:51:36 crc kubenswrapper[4799]: I1010 16:51:36.435380 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f9c4cd8a-6aed-4826-b23b-328645f5801f-scripts\") pod \"cinder-api-0\" (UID: \"f9c4cd8a-6aed-4826-b23b-328645f5801f\") " pod="openstack/cinder-api-0" Oct 10 16:51:36 crc kubenswrapper[4799]: I1010 16:51:36.435417 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f9c4cd8a-6aed-4826-b23b-328645f5801f-config-data-custom\") pod \"cinder-api-0\" (UID: \"f9c4cd8a-6aed-4826-b23b-328645f5801f\") " pod="openstack/cinder-api-0" Oct 10 16:51:36 crc kubenswrapper[4799]: I1010 16:51:36.435439 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f9c4cd8a-6aed-4826-b23b-328645f5801f-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"f9c4cd8a-6aed-4826-b23b-328645f5801f\") " pod="openstack/cinder-api-0" Oct 10 16:51:36 crc kubenswrapper[4799]: I1010 16:51:36.435612 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f9c4cd8a-6aed-4826-b23b-328645f5801f-logs\") pod \"cinder-api-0\" (UID: \"f9c4cd8a-6aed-4826-b23b-328645f5801f\") " pod="openstack/cinder-api-0" Oct 10 16:51:36 crc kubenswrapper[4799]: I1010 16:51:36.435748 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f9c4cd8a-6aed-4826-b23b-328645f5801f-config-data\") pod \"cinder-api-0\" (UID: \"f9c4cd8a-6aed-4826-b23b-328645f5801f\") " pod="openstack/cinder-api-0" Oct 10 16:51:36 crc kubenswrapper[4799]: I1010 16:51:36.536873 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/f9c4cd8a-6aed-4826-b23b-328645f5801f-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"f9c4cd8a-6aed-4826-b23b-328645f5801f\") " pod="openstack/cinder-api-0" Oct 10 16:51:36 crc kubenswrapper[4799]: I1010 16:51:36.536942 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/f9c4cd8a-6aed-4826-b23b-328645f5801f-public-tls-certs\") pod \"cinder-api-0\" (UID: \"f9c4cd8a-6aed-4826-b23b-328645f5801f\") " pod="openstack/cinder-api-0" Oct 10 16:51:36 crc kubenswrapper[4799]: I1010 16:51:36.536963 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f9c4cd8a-6aed-4826-b23b-328645f5801f-scripts\") pod \"cinder-api-0\" (UID: \"f9c4cd8a-6aed-4826-b23b-328645f5801f\") " pod="openstack/cinder-api-0" Oct 10 16:51:36 crc kubenswrapper[4799]: I1010 16:51:36.537002 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f9c4cd8a-6aed-4826-b23b-328645f5801f-config-data-custom\") pod \"cinder-api-0\" (UID: \"f9c4cd8a-6aed-4826-b23b-328645f5801f\") " pod="openstack/cinder-api-0" Oct 10 16:51:36 crc kubenswrapper[4799]: I1010 16:51:36.537022 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f9c4cd8a-6aed-4826-b23b-328645f5801f-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"f9c4cd8a-6aed-4826-b23b-328645f5801f\") " pod="openstack/cinder-api-0" Oct 10 16:51:36 crc kubenswrapper[4799]: I1010 16:51:36.537086 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f9c4cd8a-6aed-4826-b23b-328645f5801f-logs\") pod \"cinder-api-0\" (UID: \"f9c4cd8a-6aed-4826-b23b-328645f5801f\") " pod="openstack/cinder-api-0" Oct 10 16:51:36 crc kubenswrapper[4799]: I1010 16:51:36.537128 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f9c4cd8a-6aed-4826-b23b-328645f5801f-config-data\") pod \"cinder-api-0\" (UID: \"f9c4cd8a-6aed-4826-b23b-328645f5801f\") " pod="openstack/cinder-api-0" Oct 10 16:51:36 crc kubenswrapper[4799]: I1010 16:51:36.537196 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/f9c4cd8a-6aed-4826-b23b-328645f5801f-etc-machine-id\") pod \"cinder-api-0\" (UID: \"f9c4cd8a-6aed-4826-b23b-328645f5801f\") " pod="openstack/cinder-api-0" Oct 10 16:51:36 crc kubenswrapper[4799]: I1010 16:51:36.537241 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gpp2r\" (UniqueName: \"kubernetes.io/projected/f9c4cd8a-6aed-4826-b23b-328645f5801f-kube-api-access-gpp2r\") pod \"cinder-api-0\" (UID: \"f9c4cd8a-6aed-4826-b23b-328645f5801f\") " pod="openstack/cinder-api-0" Oct 10 16:51:36 crc kubenswrapper[4799]: I1010 16:51:36.542078 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/f9c4cd8a-6aed-4826-b23b-328645f5801f-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"f9c4cd8a-6aed-4826-b23b-328645f5801f\") " pod="openstack/cinder-api-0" Oct 10 16:51:36 crc kubenswrapper[4799]: I1010 16:51:36.542517 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f9c4cd8a-6aed-4826-b23b-328645f5801f-logs\") pod \"cinder-api-0\" (UID: \"f9c4cd8a-6aed-4826-b23b-328645f5801f\") " pod="openstack/cinder-api-0" Oct 10 16:51:36 crc kubenswrapper[4799]: I1010 16:51:36.543389 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/f9c4cd8a-6aed-4826-b23b-328645f5801f-etc-machine-id\") pod \"cinder-api-0\" (UID: \"f9c4cd8a-6aed-4826-b23b-328645f5801f\") " pod="openstack/cinder-api-0" Oct 10 16:51:36 crc kubenswrapper[4799]: I1010 16:51:36.545951 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f9c4cd8a-6aed-4826-b23b-328645f5801f-config-data-custom\") pod \"cinder-api-0\" (UID: \"f9c4cd8a-6aed-4826-b23b-328645f5801f\") " pod="openstack/cinder-api-0" Oct 10 16:51:36 crc kubenswrapper[4799]: I1010 16:51:36.546294 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/f9c4cd8a-6aed-4826-b23b-328645f5801f-public-tls-certs\") pod \"cinder-api-0\" (UID: \"f9c4cd8a-6aed-4826-b23b-328645f5801f\") " pod="openstack/cinder-api-0" Oct 10 16:51:36 crc kubenswrapper[4799]: I1010 16:51:36.547037 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f9c4cd8a-6aed-4826-b23b-328645f5801f-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"f9c4cd8a-6aed-4826-b23b-328645f5801f\") " pod="openstack/cinder-api-0" Oct 10 16:51:36 crc kubenswrapper[4799]: I1010 16:51:36.551686 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f9c4cd8a-6aed-4826-b23b-328645f5801f-config-data\") pod \"cinder-api-0\" (UID: \"f9c4cd8a-6aed-4826-b23b-328645f5801f\") " pod="openstack/cinder-api-0" Oct 10 16:51:36 crc kubenswrapper[4799]: I1010 16:51:36.551836 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f9c4cd8a-6aed-4826-b23b-328645f5801f-scripts\") pod \"cinder-api-0\" (UID: \"f9c4cd8a-6aed-4826-b23b-328645f5801f\") " pod="openstack/cinder-api-0" Oct 10 16:51:36 crc kubenswrapper[4799]: I1010 16:51:36.560696 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gpp2r\" (UniqueName: \"kubernetes.io/projected/f9c4cd8a-6aed-4826-b23b-328645f5801f-kube-api-access-gpp2r\") pod \"cinder-api-0\" (UID: \"f9c4cd8a-6aed-4826-b23b-328645f5801f\") " pod="openstack/cinder-api-0" Oct 10 16:51:36 crc kubenswrapper[4799]: I1010 16:51:36.611675 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-scheduler-0" Oct 10 16:51:36 crc kubenswrapper[4799]: I1010 16:51:36.643278 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Oct 10 16:51:36 crc kubenswrapper[4799]: I1010 16:51:36.869437 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-scheduler-0" Oct 10 16:51:37 crc kubenswrapper[4799]: W1010 16:51:37.128133 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf9c4cd8a_6aed_4826_b23b_328645f5801f.slice/crio-af3e54d179c2ce9e28e0158ccfc133816d62b3e2bb192e4e76f70dd1617edde0 WatchSource:0}: Error finding container af3e54d179c2ce9e28e0158ccfc133816d62b3e2bb192e4e76f70dd1617edde0: Status 404 returned error can't find the container with id af3e54d179c2ce9e28e0158ccfc133816d62b3e2bb192e4e76f70dd1617edde0 Oct 10 16:51:37 crc kubenswrapper[4799]: I1010 16:51:37.129022 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Oct 10 16:51:37 crc kubenswrapper[4799]: I1010 16:51:37.244228 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"f9c4cd8a-6aed-4826-b23b-328645f5801f","Type":"ContainerStarted","Data":"af3e54d179c2ce9e28e0158ccfc133816d62b3e2bb192e4e76f70dd1617edde0"} Oct 10 16:51:37 crc kubenswrapper[4799]: I1010 16:51:37.247754 4799 generic.go:334] "Generic (PLEG): container finished" podID="ef55ae74-4435-4042-9567-4d07d41b3ce0" containerID="0b146c0c34e0636a32d9113ae943fc6de4bf48319ee1cd0678aadae8c7d6e694" exitCode=143 Oct 10 16:51:37 crc kubenswrapper[4799]: I1010 16:51:37.247907 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"ef55ae74-4435-4042-9567-4d07d41b3ce0","Type":"ContainerDied","Data":"0b146c0c34e0636a32d9113ae943fc6de4bf48319ee1cd0678aadae8c7d6e694"} Oct 10 16:51:37 crc kubenswrapper[4799]: I1010 16:51:37.251653 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a1a7ed1c-2ac1-4c0b-9896-7e5b28d030c3","Type":"ContainerStarted","Data":"4b1f81396a03bbdff9b4db95fc8e929c7d01c8d07ea7e15891c3c97bbc39e2e2"} Oct 10 16:51:37 crc kubenswrapper[4799]: I1010 16:51:37.301134 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Oct 10 16:51:37 crc kubenswrapper[4799]: I1010 16:51:37.445300 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="74472060-a532-439b-9223-127aa23985f3" path="/var/lib/kubelet/pods/74472060-a532-439b-9223-127aa23985f3/volumes" Oct 10 16:51:37 crc kubenswrapper[4799]: I1010 16:51:37.954004 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-x9xfs" Oct 10 16:51:37 crc kubenswrapper[4799]: I1010 16:51:37.984979 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-srvbw" Oct 10 16:51:38 crc kubenswrapper[4799]: I1010 16:51:38.006719 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-zmshk" Oct 10 16:51:38 crc kubenswrapper[4799]: I1010 16:51:38.063849 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tnftm\" (UniqueName: \"kubernetes.io/projected/3834e038-89d1-48e1-94f2-6323bd3a9bca-kube-api-access-tnftm\") pod \"3834e038-89d1-48e1-94f2-6323bd3a9bca\" (UID: \"3834e038-89d1-48e1-94f2-6323bd3a9bca\") " Oct 10 16:51:38 crc kubenswrapper[4799]: I1010 16:51:38.064175 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jm2np\" (UniqueName: \"kubernetes.io/projected/21f9a08f-0fc1-497c-96f9-17b436acf166-kube-api-access-jm2np\") pod \"21f9a08f-0fc1-497c-96f9-17b436acf166\" (UID: \"21f9a08f-0fc1-497c-96f9-17b436acf166\") " Oct 10 16:51:38 crc kubenswrapper[4799]: I1010 16:51:38.064415 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rlb2v\" (UniqueName: \"kubernetes.io/projected/29b5dc61-70d3-4fc7-85e7-665f12d6f2ad-kube-api-access-rlb2v\") pod \"29b5dc61-70d3-4fc7-85e7-665f12d6f2ad\" (UID: \"29b5dc61-70d3-4fc7-85e7-665f12d6f2ad\") " Oct 10 16:51:38 crc kubenswrapper[4799]: I1010 16:51:38.070805 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/21f9a08f-0fc1-497c-96f9-17b436acf166-kube-api-access-jm2np" (OuterVolumeSpecName: "kube-api-access-jm2np") pod "21f9a08f-0fc1-497c-96f9-17b436acf166" (UID: "21f9a08f-0fc1-497c-96f9-17b436acf166"). InnerVolumeSpecName "kube-api-access-jm2np". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 16:51:38 crc kubenswrapper[4799]: I1010 16:51:38.076034 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3834e038-89d1-48e1-94f2-6323bd3a9bca-kube-api-access-tnftm" (OuterVolumeSpecName: "kube-api-access-tnftm") pod "3834e038-89d1-48e1-94f2-6323bd3a9bca" (UID: "3834e038-89d1-48e1-94f2-6323bd3a9bca"). InnerVolumeSpecName "kube-api-access-tnftm". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 16:51:38 crc kubenswrapper[4799]: I1010 16:51:38.077208 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/29b5dc61-70d3-4fc7-85e7-665f12d6f2ad-kube-api-access-rlb2v" (OuterVolumeSpecName: "kube-api-access-rlb2v") pod "29b5dc61-70d3-4fc7-85e7-665f12d6f2ad" (UID: "29b5dc61-70d3-4fc7-85e7-665f12d6f2ad"). InnerVolumeSpecName "kube-api-access-rlb2v". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 16:51:38 crc kubenswrapper[4799]: I1010 16:51:38.168691 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tnftm\" (UniqueName: \"kubernetes.io/projected/3834e038-89d1-48e1-94f2-6323bd3a9bca-kube-api-access-tnftm\") on node \"crc\" DevicePath \"\"" Oct 10 16:51:38 crc kubenswrapper[4799]: I1010 16:51:38.169027 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jm2np\" (UniqueName: \"kubernetes.io/projected/21f9a08f-0fc1-497c-96f9-17b436acf166-kube-api-access-jm2np\") on node \"crc\" DevicePath \"\"" Oct 10 16:51:38 crc kubenswrapper[4799]: I1010 16:51:38.169041 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rlb2v\" (UniqueName: \"kubernetes.io/projected/29b5dc61-70d3-4fc7-85e7-665f12d6f2ad-kube-api-access-rlb2v\") on node \"crc\" DevicePath \"\"" Oct 10 16:51:38 crc kubenswrapper[4799]: I1010 16:51:38.264834 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"f9c4cd8a-6aed-4826-b23b-328645f5801f","Type":"ContainerStarted","Data":"1e2bb89cca75275f38d0737f645f0b33d216e5600663a5ffc8d93f24601d9e28"} Oct 10 16:51:38 crc kubenswrapper[4799]: I1010 16:51:38.268247 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a1a7ed1c-2ac1-4c0b-9896-7e5b28d030c3","Type":"ContainerStarted","Data":"b8bb6a627d95f865adcce7f4a3518c281775a147e8cb02d5b3b447e2f941f3dc"} Oct 10 16:51:38 crc kubenswrapper[4799]: I1010 16:51:38.272073 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-srvbw" Oct 10 16:51:38 crc kubenswrapper[4799]: I1010 16:51:38.272065 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-srvbw" event={"ID":"21f9a08f-0fc1-497c-96f9-17b436acf166","Type":"ContainerDied","Data":"d8510759952c2c264bef5f8c957825a63ce57b99a9363c520eadf48129d5efe0"} Oct 10 16:51:38 crc kubenswrapper[4799]: I1010 16:51:38.272197 4799 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d8510759952c2c264bef5f8c957825a63ce57b99a9363c520eadf48129d5efe0" Oct 10 16:51:38 crc kubenswrapper[4799]: I1010 16:51:38.273886 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-zmshk" event={"ID":"3834e038-89d1-48e1-94f2-6323bd3a9bca","Type":"ContainerDied","Data":"d0d1164487141f0185daceaf0e22d6ed98d6811a77affd1a1ab019fc36374707"} Oct 10 16:51:38 crc kubenswrapper[4799]: I1010 16:51:38.273907 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-zmshk" Oct 10 16:51:38 crc kubenswrapper[4799]: I1010 16:51:38.273916 4799 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d0d1164487141f0185daceaf0e22d6ed98d6811a77affd1a1ab019fc36374707" Oct 10 16:51:38 crc kubenswrapper[4799]: I1010 16:51:38.275737 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-x9xfs" event={"ID":"29b5dc61-70d3-4fc7-85e7-665f12d6f2ad","Type":"ContainerDied","Data":"14036e92099b05fa071b61d60a52274ea55aaff7b23fa77aa8beb3c205bc13f9"} Oct 10 16:51:38 crc kubenswrapper[4799]: I1010 16:51:38.275809 4799 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="14036e92099b05fa071b61d60a52274ea55aaff7b23fa77aa8beb3c205bc13f9" Oct 10 16:51:38 crc kubenswrapper[4799]: I1010 16:51:38.275840 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-x9xfs" Oct 10 16:51:39 crc kubenswrapper[4799]: I1010 16:51:39.287274 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="2240606d-067a-4655-9deb-611ff6e3d5af" containerName="cinder-scheduler" containerID="cri-o://b6eb7bb2625a48be5127c7ecd8054f4b5f9b17a07f31b4debc4d2fd30bd140ad" gracePeriod=30 Oct 10 16:51:39 crc kubenswrapper[4799]: I1010 16:51:39.288733 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="2240606d-067a-4655-9deb-611ff6e3d5af" containerName="probe" containerID="cri-o://eff44efb5e4b0fcea774798d7f33d3ea26d38bfcd6b641cd95d859a93438c2b5" gracePeriod=30 Oct 10 16:51:40 crc kubenswrapper[4799]: I1010 16:51:40.284348 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Oct 10 16:51:40 crc kubenswrapper[4799]: I1010 16:51:40.307456 4799 generic.go:334] "Generic (PLEG): container finished" podID="2240606d-067a-4655-9deb-611ff6e3d5af" containerID="eff44efb5e4b0fcea774798d7f33d3ea26d38bfcd6b641cd95d859a93438c2b5" exitCode=0 Oct 10 16:51:40 crc kubenswrapper[4799]: I1010 16:51:40.307502 4799 generic.go:334] "Generic (PLEG): container finished" podID="2240606d-067a-4655-9deb-611ff6e3d5af" containerID="b6eb7bb2625a48be5127c7ecd8054f4b5f9b17a07f31b4debc4d2fd30bd140ad" exitCode=0 Oct 10 16:51:40 crc kubenswrapper[4799]: I1010 16:51:40.307611 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"2240606d-067a-4655-9deb-611ff6e3d5af","Type":"ContainerDied","Data":"eff44efb5e4b0fcea774798d7f33d3ea26d38bfcd6b641cd95d859a93438c2b5"} Oct 10 16:51:40 crc kubenswrapper[4799]: I1010 16:51:40.307680 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"2240606d-067a-4655-9deb-611ff6e3d5af","Type":"ContainerDied","Data":"b6eb7bb2625a48be5127c7ecd8054f4b5f9b17a07f31b4debc4d2fd30bd140ad"} Oct 10 16:51:40 crc kubenswrapper[4799]: I1010 16:51:40.334450 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a1a7ed1c-2ac1-4c0b-9896-7e5b28d030c3","Type":"ContainerStarted","Data":"0e4d67573fc7051eb2a82f10df5ef08a23dbbf730c4c7508e74be1e08fe03fb9"} Oct 10 16:51:40 crc kubenswrapper[4799]: I1010 16:51:40.334664 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Oct 10 16:51:40 crc kubenswrapper[4799]: I1010 16:51:40.367377 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.371419855 podStartE2EDuration="6.367351894s" podCreationTimestamp="2025-10-10 16:51:34 +0000 UTC" firstStartedPulling="2025-10-10 16:51:35.489542631 +0000 UTC m=+1188.997866746" lastFinishedPulling="2025-10-10 16:51:39.48547464 +0000 UTC m=+1192.993798785" observedRunningTime="2025-10-10 16:51:40.35260357 +0000 UTC m=+1193.860927695" watchObservedRunningTime="2025-10-10 16:51:40.367351894 +0000 UTC m=+1193.875676009" Oct 10 16:51:40 crc kubenswrapper[4799]: I1010 16:51:40.373259 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"f9c4cd8a-6aed-4826-b23b-328645f5801f","Type":"ContainerStarted","Data":"02155113288bc0bf63cf3a3084f2f7ed5580d5f5ad54aca68e4fb0b10bb08a28"} Oct 10 16:51:40 crc kubenswrapper[4799]: I1010 16:51:40.374403 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cinder-api-0" Oct 10 16:51:40 crc kubenswrapper[4799]: I1010 16:51:40.386123 4799 generic.go:334] "Generic (PLEG): container finished" podID="ef55ae74-4435-4042-9567-4d07d41b3ce0" containerID="81a33109b42e3a65de9041c84dfd7b0c48481adc2672b592ae62813fadf5065e" exitCode=0 Oct 10 16:51:40 crc kubenswrapper[4799]: I1010 16:51:40.386164 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"ef55ae74-4435-4042-9567-4d07d41b3ce0","Type":"ContainerDied","Data":"81a33109b42e3a65de9041c84dfd7b0c48481adc2672b592ae62813fadf5065e"} Oct 10 16:51:40 crc kubenswrapper[4799]: I1010 16:51:40.386192 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"ef55ae74-4435-4042-9567-4d07d41b3ce0","Type":"ContainerDied","Data":"ee373a0b9d5ada72b01747a840e14a908d1ee7d468f468e56be2944474ea0af1"} Oct 10 16:51:40 crc kubenswrapper[4799]: I1010 16:51:40.386210 4799 scope.go:117] "RemoveContainer" containerID="81a33109b42e3a65de9041c84dfd7b0c48481adc2672b592ae62813fadf5065e" Oct 10 16:51:40 crc kubenswrapper[4799]: I1010 16:51:40.386334 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Oct 10 16:51:40 crc kubenswrapper[4799]: I1010 16:51:40.404471 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-api-0" podStartSLOduration=4.404428099 podStartE2EDuration="4.404428099s" podCreationTimestamp="2025-10-10 16:51:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-10 16:51:40.404131901 +0000 UTC m=+1193.912456026" watchObservedRunningTime="2025-10-10 16:51:40.404428099 +0000 UTC m=+1193.912752214" Oct 10 16:51:40 crc kubenswrapper[4799]: I1010 16:51:40.433127 4799 scope.go:117] "RemoveContainer" containerID="0b146c0c34e0636a32d9113ae943fc6de4bf48319ee1cd0678aadae8c7d6e694" Oct 10 16:51:40 crc kubenswrapper[4799]: I1010 16:51:40.436567 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/ef55ae74-4435-4042-9567-4d07d41b3ce0-public-tls-certs\") pod \"ef55ae74-4435-4042-9567-4d07d41b3ce0\" (UID: \"ef55ae74-4435-4042-9567-4d07d41b3ce0\") " Oct 10 16:51:40 crc kubenswrapper[4799]: I1010 16:51:40.436619 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ef55ae74-4435-4042-9567-4d07d41b3ce0-combined-ca-bundle\") pod \"ef55ae74-4435-4042-9567-4d07d41b3ce0\" (UID: \"ef55ae74-4435-4042-9567-4d07d41b3ce0\") " Oct 10 16:51:40 crc kubenswrapper[4799]: I1010 16:51:40.436652 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/ef55ae74-4435-4042-9567-4d07d41b3ce0-httpd-run\") pod \"ef55ae74-4435-4042-9567-4d07d41b3ce0\" (UID: \"ef55ae74-4435-4042-9567-4d07d41b3ce0\") " Oct 10 16:51:40 crc kubenswrapper[4799]: I1010 16:51:40.436705 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ef55ae74-4435-4042-9567-4d07d41b3ce0-scripts\") pod \"ef55ae74-4435-4042-9567-4d07d41b3ce0\" (UID: \"ef55ae74-4435-4042-9567-4d07d41b3ce0\") " Oct 10 16:51:40 crc kubenswrapper[4799]: I1010 16:51:40.436791 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ef55ae74-4435-4042-9567-4d07d41b3ce0-config-data\") pod \"ef55ae74-4435-4042-9567-4d07d41b3ce0\" (UID: \"ef55ae74-4435-4042-9567-4d07d41b3ce0\") " Oct 10 16:51:40 crc kubenswrapper[4799]: I1010 16:51:40.436869 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ef55ae74-4435-4042-9567-4d07d41b3ce0-logs\") pod \"ef55ae74-4435-4042-9567-4d07d41b3ce0\" (UID: \"ef55ae74-4435-4042-9567-4d07d41b3ce0\") " Oct 10 16:51:40 crc kubenswrapper[4799]: I1010 16:51:40.436904 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"ef55ae74-4435-4042-9567-4d07d41b3ce0\" (UID: \"ef55ae74-4435-4042-9567-4d07d41b3ce0\") " Oct 10 16:51:40 crc kubenswrapper[4799]: I1010 16:51:40.436934 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-swgwv\" (UniqueName: \"kubernetes.io/projected/ef55ae74-4435-4042-9567-4d07d41b3ce0-kube-api-access-swgwv\") pod \"ef55ae74-4435-4042-9567-4d07d41b3ce0\" (UID: \"ef55ae74-4435-4042-9567-4d07d41b3ce0\") " Oct 10 16:51:40 crc kubenswrapper[4799]: I1010 16:51:40.437201 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ef55ae74-4435-4042-9567-4d07d41b3ce0-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "ef55ae74-4435-4042-9567-4d07d41b3ce0" (UID: "ef55ae74-4435-4042-9567-4d07d41b3ce0"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 10 16:51:40 crc kubenswrapper[4799]: I1010 16:51:40.437404 4799 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/ef55ae74-4435-4042-9567-4d07d41b3ce0-httpd-run\") on node \"crc\" DevicePath \"\"" Oct 10 16:51:40 crc kubenswrapper[4799]: I1010 16:51:40.437474 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ef55ae74-4435-4042-9567-4d07d41b3ce0-logs" (OuterVolumeSpecName: "logs") pod "ef55ae74-4435-4042-9567-4d07d41b3ce0" (UID: "ef55ae74-4435-4042-9567-4d07d41b3ce0"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 10 16:51:40 crc kubenswrapper[4799]: I1010 16:51:40.443090 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ef55ae74-4435-4042-9567-4d07d41b3ce0-kube-api-access-swgwv" (OuterVolumeSpecName: "kube-api-access-swgwv") pod "ef55ae74-4435-4042-9567-4d07d41b3ce0" (UID: "ef55ae74-4435-4042-9567-4d07d41b3ce0"). InnerVolumeSpecName "kube-api-access-swgwv". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 16:51:40 crc kubenswrapper[4799]: I1010 16:51:40.445996 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage11-crc" (OuterVolumeSpecName: "glance") pod "ef55ae74-4435-4042-9567-4d07d41b3ce0" (UID: "ef55ae74-4435-4042-9567-4d07d41b3ce0"). InnerVolumeSpecName "local-storage11-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Oct 10 16:51:40 crc kubenswrapper[4799]: I1010 16:51:40.445996 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ef55ae74-4435-4042-9567-4d07d41b3ce0-scripts" (OuterVolumeSpecName: "scripts") pod "ef55ae74-4435-4042-9567-4d07d41b3ce0" (UID: "ef55ae74-4435-4042-9567-4d07d41b3ce0"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:51:40 crc kubenswrapper[4799]: I1010 16:51:40.477987 4799 scope.go:117] "RemoveContainer" containerID="81a33109b42e3a65de9041c84dfd7b0c48481adc2672b592ae62813fadf5065e" Oct 10 16:51:40 crc kubenswrapper[4799]: E1010 16:51:40.478536 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"81a33109b42e3a65de9041c84dfd7b0c48481adc2672b592ae62813fadf5065e\": container with ID starting with 81a33109b42e3a65de9041c84dfd7b0c48481adc2672b592ae62813fadf5065e not found: ID does not exist" containerID="81a33109b42e3a65de9041c84dfd7b0c48481adc2672b592ae62813fadf5065e" Oct 10 16:51:40 crc kubenswrapper[4799]: I1010 16:51:40.478579 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"81a33109b42e3a65de9041c84dfd7b0c48481adc2672b592ae62813fadf5065e"} err="failed to get container status \"81a33109b42e3a65de9041c84dfd7b0c48481adc2672b592ae62813fadf5065e\": rpc error: code = NotFound desc = could not find container \"81a33109b42e3a65de9041c84dfd7b0c48481adc2672b592ae62813fadf5065e\": container with ID starting with 81a33109b42e3a65de9041c84dfd7b0c48481adc2672b592ae62813fadf5065e not found: ID does not exist" Oct 10 16:51:40 crc kubenswrapper[4799]: I1010 16:51:40.478605 4799 scope.go:117] "RemoveContainer" containerID="0b146c0c34e0636a32d9113ae943fc6de4bf48319ee1cd0678aadae8c7d6e694" Oct 10 16:51:40 crc kubenswrapper[4799]: E1010 16:51:40.478908 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0b146c0c34e0636a32d9113ae943fc6de4bf48319ee1cd0678aadae8c7d6e694\": container with ID starting with 0b146c0c34e0636a32d9113ae943fc6de4bf48319ee1cd0678aadae8c7d6e694 not found: ID does not exist" containerID="0b146c0c34e0636a32d9113ae943fc6de4bf48319ee1cd0678aadae8c7d6e694" Oct 10 16:51:40 crc kubenswrapper[4799]: I1010 16:51:40.478934 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0b146c0c34e0636a32d9113ae943fc6de4bf48319ee1cd0678aadae8c7d6e694"} err="failed to get container status \"0b146c0c34e0636a32d9113ae943fc6de4bf48319ee1cd0678aadae8c7d6e694\": rpc error: code = NotFound desc = could not find container \"0b146c0c34e0636a32d9113ae943fc6de4bf48319ee1cd0678aadae8c7d6e694\": container with ID starting with 0b146c0c34e0636a32d9113ae943fc6de4bf48319ee1cd0678aadae8c7d6e694 not found: ID does not exist" Oct 10 16:51:40 crc kubenswrapper[4799]: I1010 16:51:40.501800 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Oct 10 16:51:40 crc kubenswrapper[4799]: I1010 16:51:40.522007 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ef55ae74-4435-4042-9567-4d07d41b3ce0-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "ef55ae74-4435-4042-9567-4d07d41b3ce0" (UID: "ef55ae74-4435-4042-9567-4d07d41b3ce0"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:51:40 crc kubenswrapper[4799]: I1010 16:51:40.522368 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ef55ae74-4435-4042-9567-4d07d41b3ce0-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "ef55ae74-4435-4042-9567-4d07d41b3ce0" (UID: "ef55ae74-4435-4042-9567-4d07d41b3ce0"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:51:40 crc kubenswrapper[4799]: I1010 16:51:40.532927 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ef55ae74-4435-4042-9567-4d07d41b3ce0-config-data" (OuterVolumeSpecName: "config-data") pod "ef55ae74-4435-4042-9567-4d07d41b3ce0" (UID: "ef55ae74-4435-4042-9567-4d07d41b3ce0"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:51:40 crc kubenswrapper[4799]: I1010 16:51:40.541210 4799 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/ef55ae74-4435-4042-9567-4d07d41b3ce0-public-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 10 16:51:40 crc kubenswrapper[4799]: I1010 16:51:40.541254 4799 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ef55ae74-4435-4042-9567-4d07d41b3ce0-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 10 16:51:40 crc kubenswrapper[4799]: I1010 16:51:40.541265 4799 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ef55ae74-4435-4042-9567-4d07d41b3ce0-scripts\") on node \"crc\" DevicePath \"\"" Oct 10 16:51:40 crc kubenswrapper[4799]: I1010 16:51:40.541273 4799 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ef55ae74-4435-4042-9567-4d07d41b3ce0-config-data\") on node \"crc\" DevicePath \"\"" Oct 10 16:51:40 crc kubenswrapper[4799]: I1010 16:51:40.541281 4799 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ef55ae74-4435-4042-9567-4d07d41b3ce0-logs\") on node \"crc\" DevicePath \"\"" Oct 10 16:51:40 crc kubenswrapper[4799]: I1010 16:51:40.541316 4799 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") on node \"crc\" " Oct 10 16:51:40 crc kubenswrapper[4799]: I1010 16:51:40.541328 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-swgwv\" (UniqueName: \"kubernetes.io/projected/ef55ae74-4435-4042-9567-4d07d41b3ce0-kube-api-access-swgwv\") on node \"crc\" DevicePath \"\"" Oct 10 16:51:40 crc kubenswrapper[4799]: I1010 16:51:40.562250 4799 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage11-crc" (UniqueName: "kubernetes.io/local-volume/local-storage11-crc") on node "crc" Oct 10 16:51:40 crc kubenswrapper[4799]: I1010 16:51:40.642600 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2240606d-067a-4655-9deb-611ff6e3d5af-scripts\") pod \"2240606d-067a-4655-9deb-611ff6e3d5af\" (UID: \"2240606d-067a-4655-9deb-611ff6e3d5af\") " Oct 10 16:51:40 crc kubenswrapper[4799]: I1010 16:51:40.642675 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2240606d-067a-4655-9deb-611ff6e3d5af-combined-ca-bundle\") pod \"2240606d-067a-4655-9deb-611ff6e3d5af\" (UID: \"2240606d-067a-4655-9deb-611ff6e3d5af\") " Oct 10 16:51:40 crc kubenswrapper[4799]: I1010 16:51:40.642731 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/2240606d-067a-4655-9deb-611ff6e3d5af-config-data-custom\") pod \"2240606d-067a-4655-9deb-611ff6e3d5af\" (UID: \"2240606d-067a-4655-9deb-611ff6e3d5af\") " Oct 10 16:51:40 crc kubenswrapper[4799]: I1010 16:51:40.642747 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2240606d-067a-4655-9deb-611ff6e3d5af-config-data\") pod \"2240606d-067a-4655-9deb-611ff6e3d5af\" (UID: \"2240606d-067a-4655-9deb-611ff6e3d5af\") " Oct 10 16:51:40 crc kubenswrapper[4799]: I1010 16:51:40.642825 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4v94s\" (UniqueName: \"kubernetes.io/projected/2240606d-067a-4655-9deb-611ff6e3d5af-kube-api-access-4v94s\") pod \"2240606d-067a-4655-9deb-611ff6e3d5af\" (UID: \"2240606d-067a-4655-9deb-611ff6e3d5af\") " Oct 10 16:51:40 crc kubenswrapper[4799]: I1010 16:51:40.642852 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/2240606d-067a-4655-9deb-611ff6e3d5af-etc-machine-id\") pod \"2240606d-067a-4655-9deb-611ff6e3d5af\" (UID: \"2240606d-067a-4655-9deb-611ff6e3d5af\") " Oct 10 16:51:40 crc kubenswrapper[4799]: I1010 16:51:40.643225 4799 reconciler_common.go:293] "Volume detached for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") on node \"crc\" DevicePath \"\"" Oct 10 16:51:40 crc kubenswrapper[4799]: I1010 16:51:40.643271 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/2240606d-067a-4655-9deb-611ff6e3d5af-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "2240606d-067a-4655-9deb-611ff6e3d5af" (UID: "2240606d-067a-4655-9deb-611ff6e3d5af"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 10 16:51:40 crc kubenswrapper[4799]: I1010 16:51:40.646894 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2240606d-067a-4655-9deb-611ff6e3d5af-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "2240606d-067a-4655-9deb-611ff6e3d5af" (UID: "2240606d-067a-4655-9deb-611ff6e3d5af"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:51:40 crc kubenswrapper[4799]: I1010 16:51:40.647013 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2240606d-067a-4655-9deb-611ff6e3d5af-scripts" (OuterVolumeSpecName: "scripts") pod "2240606d-067a-4655-9deb-611ff6e3d5af" (UID: "2240606d-067a-4655-9deb-611ff6e3d5af"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:51:40 crc kubenswrapper[4799]: I1010 16:51:40.648600 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2240606d-067a-4655-9deb-611ff6e3d5af-kube-api-access-4v94s" (OuterVolumeSpecName: "kube-api-access-4v94s") pod "2240606d-067a-4655-9deb-611ff6e3d5af" (UID: "2240606d-067a-4655-9deb-611ff6e3d5af"). InnerVolumeSpecName "kube-api-access-4v94s". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 16:51:40 crc kubenswrapper[4799]: I1010 16:51:40.687938 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2240606d-067a-4655-9deb-611ff6e3d5af-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "2240606d-067a-4655-9deb-611ff6e3d5af" (UID: "2240606d-067a-4655-9deb-611ff6e3d5af"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:51:40 crc kubenswrapper[4799]: I1010 16:51:40.740060 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2240606d-067a-4655-9deb-611ff6e3d5af-config-data" (OuterVolumeSpecName: "config-data") pod "2240606d-067a-4655-9deb-611ff6e3d5af" (UID: "2240606d-067a-4655-9deb-611ff6e3d5af"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:51:40 crc kubenswrapper[4799]: I1010 16:51:40.745191 4799 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2240606d-067a-4655-9deb-611ff6e3d5af-scripts\") on node \"crc\" DevicePath \"\"" Oct 10 16:51:40 crc kubenswrapper[4799]: I1010 16:51:40.745221 4799 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2240606d-067a-4655-9deb-611ff6e3d5af-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 10 16:51:40 crc kubenswrapper[4799]: I1010 16:51:40.745232 4799 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/2240606d-067a-4655-9deb-611ff6e3d5af-config-data-custom\") on node \"crc\" DevicePath \"\"" Oct 10 16:51:40 crc kubenswrapper[4799]: I1010 16:51:40.745240 4799 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2240606d-067a-4655-9deb-611ff6e3d5af-config-data\") on node \"crc\" DevicePath \"\"" Oct 10 16:51:40 crc kubenswrapper[4799]: I1010 16:51:40.745249 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4v94s\" (UniqueName: \"kubernetes.io/projected/2240606d-067a-4655-9deb-611ff6e3d5af-kube-api-access-4v94s\") on node \"crc\" DevicePath \"\"" Oct 10 16:51:40 crc kubenswrapper[4799]: I1010 16:51:40.745259 4799 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/2240606d-067a-4655-9deb-611ff6e3d5af-etc-machine-id\") on node \"crc\" DevicePath \"\"" Oct 10 16:51:40 crc kubenswrapper[4799]: I1010 16:51:40.825958 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 10 16:51:40 crc kubenswrapper[4799]: I1010 16:51:40.833880 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 10 16:51:40 crc kubenswrapper[4799]: I1010 16:51:40.853380 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Oct 10 16:51:40 crc kubenswrapper[4799]: E1010 16:51:40.853806 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="21f9a08f-0fc1-497c-96f9-17b436acf166" containerName="mariadb-database-create" Oct 10 16:51:40 crc kubenswrapper[4799]: I1010 16:51:40.853830 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="21f9a08f-0fc1-497c-96f9-17b436acf166" containerName="mariadb-database-create" Oct 10 16:51:40 crc kubenswrapper[4799]: E1010 16:51:40.853850 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ef55ae74-4435-4042-9567-4d07d41b3ce0" containerName="glance-log" Oct 10 16:51:40 crc kubenswrapper[4799]: I1010 16:51:40.853862 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="ef55ae74-4435-4042-9567-4d07d41b3ce0" containerName="glance-log" Oct 10 16:51:40 crc kubenswrapper[4799]: E1010 16:51:40.853889 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2240606d-067a-4655-9deb-611ff6e3d5af" containerName="probe" Oct 10 16:51:40 crc kubenswrapper[4799]: I1010 16:51:40.853896 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="2240606d-067a-4655-9deb-611ff6e3d5af" containerName="probe" Oct 10 16:51:40 crc kubenswrapper[4799]: E1010 16:51:40.853927 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="29b5dc61-70d3-4fc7-85e7-665f12d6f2ad" containerName="mariadb-database-create" Oct 10 16:51:40 crc kubenswrapper[4799]: I1010 16:51:40.853935 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="29b5dc61-70d3-4fc7-85e7-665f12d6f2ad" containerName="mariadb-database-create" Oct 10 16:51:40 crc kubenswrapper[4799]: E1010 16:51:40.853946 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2240606d-067a-4655-9deb-611ff6e3d5af" containerName="cinder-scheduler" Oct 10 16:51:40 crc kubenswrapper[4799]: I1010 16:51:40.853953 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="2240606d-067a-4655-9deb-611ff6e3d5af" containerName="cinder-scheduler" Oct 10 16:51:40 crc kubenswrapper[4799]: E1010 16:51:40.853965 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3834e038-89d1-48e1-94f2-6323bd3a9bca" containerName="mariadb-database-create" Oct 10 16:51:40 crc kubenswrapper[4799]: I1010 16:51:40.853973 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="3834e038-89d1-48e1-94f2-6323bd3a9bca" containerName="mariadb-database-create" Oct 10 16:51:40 crc kubenswrapper[4799]: E1010 16:51:40.853986 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ef55ae74-4435-4042-9567-4d07d41b3ce0" containerName="glance-httpd" Oct 10 16:51:40 crc kubenswrapper[4799]: I1010 16:51:40.854019 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="ef55ae74-4435-4042-9567-4d07d41b3ce0" containerName="glance-httpd" Oct 10 16:51:40 crc kubenswrapper[4799]: I1010 16:51:40.854216 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="2240606d-067a-4655-9deb-611ff6e3d5af" containerName="cinder-scheduler" Oct 10 16:51:40 crc kubenswrapper[4799]: I1010 16:51:40.854232 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="29b5dc61-70d3-4fc7-85e7-665f12d6f2ad" containerName="mariadb-database-create" Oct 10 16:51:40 crc kubenswrapper[4799]: I1010 16:51:40.854245 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="ef55ae74-4435-4042-9567-4d07d41b3ce0" containerName="glance-log" Oct 10 16:51:40 crc kubenswrapper[4799]: I1010 16:51:40.854258 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="ef55ae74-4435-4042-9567-4d07d41b3ce0" containerName="glance-httpd" Oct 10 16:51:40 crc kubenswrapper[4799]: I1010 16:51:40.854281 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="2240606d-067a-4655-9deb-611ff6e3d5af" containerName="probe" Oct 10 16:51:40 crc kubenswrapper[4799]: I1010 16:51:40.854289 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="21f9a08f-0fc1-497c-96f9-17b436acf166" containerName="mariadb-database-create" Oct 10 16:51:40 crc kubenswrapper[4799]: I1010 16:51:40.854300 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="3834e038-89d1-48e1-94f2-6323bd3a9bca" containerName="mariadb-database-create" Oct 10 16:51:40 crc kubenswrapper[4799]: I1010 16:51:40.855461 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Oct 10 16:51:40 crc kubenswrapper[4799]: I1010 16:51:40.860544 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-public-svc" Oct 10 16:51:40 crc kubenswrapper[4799]: I1010 16:51:40.863450 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Oct 10 16:51:40 crc kubenswrapper[4799]: I1010 16:51:40.902978 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 10 16:51:40 crc kubenswrapper[4799]: I1010 16:51:40.949730 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/31fc68f8-af18-42b7-a94c-90a22afea5f1-config-data\") pod \"glance-default-external-api-0\" (UID: \"31fc68f8-af18-42b7-a94c-90a22afea5f1\") " pod="openstack/glance-default-external-api-0" Oct 10 16:51:40 crc kubenswrapper[4799]: I1010 16:51:40.949808 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/31fc68f8-af18-42b7-a94c-90a22afea5f1-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"31fc68f8-af18-42b7-a94c-90a22afea5f1\") " pod="openstack/glance-default-external-api-0" Oct 10 16:51:40 crc kubenswrapper[4799]: I1010 16:51:40.950022 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/31fc68f8-af18-42b7-a94c-90a22afea5f1-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"31fc68f8-af18-42b7-a94c-90a22afea5f1\") " pod="openstack/glance-default-external-api-0" Oct 10 16:51:40 crc kubenswrapper[4799]: I1010 16:51:40.950111 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/31fc68f8-af18-42b7-a94c-90a22afea5f1-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"31fc68f8-af18-42b7-a94c-90a22afea5f1\") " pod="openstack/glance-default-external-api-0" Oct 10 16:51:40 crc kubenswrapper[4799]: I1010 16:51:40.950183 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/31fc68f8-af18-42b7-a94c-90a22afea5f1-logs\") pod \"glance-default-external-api-0\" (UID: \"31fc68f8-af18-42b7-a94c-90a22afea5f1\") " pod="openstack/glance-default-external-api-0" Oct 10 16:51:40 crc kubenswrapper[4799]: I1010 16:51:40.950254 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/31fc68f8-af18-42b7-a94c-90a22afea5f1-scripts\") pod \"glance-default-external-api-0\" (UID: \"31fc68f8-af18-42b7-a94c-90a22afea5f1\") " pod="openstack/glance-default-external-api-0" Oct 10 16:51:40 crc kubenswrapper[4799]: I1010 16:51:40.950291 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2p989\" (UniqueName: \"kubernetes.io/projected/31fc68f8-af18-42b7-a94c-90a22afea5f1-kube-api-access-2p989\") pod \"glance-default-external-api-0\" (UID: \"31fc68f8-af18-42b7-a94c-90a22afea5f1\") " pod="openstack/glance-default-external-api-0" Oct 10 16:51:40 crc kubenswrapper[4799]: I1010 16:51:40.950357 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"glance-default-external-api-0\" (UID: \"31fc68f8-af18-42b7-a94c-90a22afea5f1\") " pod="openstack/glance-default-external-api-0" Oct 10 16:51:41 crc kubenswrapper[4799]: I1010 16:51:41.053191 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/31fc68f8-af18-42b7-a94c-90a22afea5f1-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"31fc68f8-af18-42b7-a94c-90a22afea5f1\") " pod="openstack/glance-default-external-api-0" Oct 10 16:51:41 crc kubenswrapper[4799]: I1010 16:51:41.053268 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/31fc68f8-af18-42b7-a94c-90a22afea5f1-logs\") pod \"glance-default-external-api-0\" (UID: \"31fc68f8-af18-42b7-a94c-90a22afea5f1\") " pod="openstack/glance-default-external-api-0" Oct 10 16:51:41 crc kubenswrapper[4799]: I1010 16:51:41.053319 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/31fc68f8-af18-42b7-a94c-90a22afea5f1-scripts\") pod \"glance-default-external-api-0\" (UID: \"31fc68f8-af18-42b7-a94c-90a22afea5f1\") " pod="openstack/glance-default-external-api-0" Oct 10 16:51:41 crc kubenswrapper[4799]: I1010 16:51:41.053352 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2p989\" (UniqueName: \"kubernetes.io/projected/31fc68f8-af18-42b7-a94c-90a22afea5f1-kube-api-access-2p989\") pod \"glance-default-external-api-0\" (UID: \"31fc68f8-af18-42b7-a94c-90a22afea5f1\") " pod="openstack/glance-default-external-api-0" Oct 10 16:51:41 crc kubenswrapper[4799]: I1010 16:51:41.053379 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"glance-default-external-api-0\" (UID: \"31fc68f8-af18-42b7-a94c-90a22afea5f1\") " pod="openstack/glance-default-external-api-0" Oct 10 16:51:41 crc kubenswrapper[4799]: I1010 16:51:41.053448 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/31fc68f8-af18-42b7-a94c-90a22afea5f1-config-data\") pod \"glance-default-external-api-0\" (UID: \"31fc68f8-af18-42b7-a94c-90a22afea5f1\") " pod="openstack/glance-default-external-api-0" Oct 10 16:51:41 crc kubenswrapper[4799]: I1010 16:51:41.053475 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/31fc68f8-af18-42b7-a94c-90a22afea5f1-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"31fc68f8-af18-42b7-a94c-90a22afea5f1\") " pod="openstack/glance-default-external-api-0" Oct 10 16:51:41 crc kubenswrapper[4799]: I1010 16:51:41.053588 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/31fc68f8-af18-42b7-a94c-90a22afea5f1-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"31fc68f8-af18-42b7-a94c-90a22afea5f1\") " pod="openstack/glance-default-external-api-0" Oct 10 16:51:41 crc kubenswrapper[4799]: I1010 16:51:41.053736 4799 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"glance-default-external-api-0\" (UID: \"31fc68f8-af18-42b7-a94c-90a22afea5f1\") device mount path \"/mnt/openstack/pv11\"" pod="openstack/glance-default-external-api-0" Oct 10 16:51:41 crc kubenswrapper[4799]: I1010 16:51:41.053908 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/31fc68f8-af18-42b7-a94c-90a22afea5f1-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"31fc68f8-af18-42b7-a94c-90a22afea5f1\") " pod="openstack/glance-default-external-api-0" Oct 10 16:51:41 crc kubenswrapper[4799]: I1010 16:51:41.054015 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/31fc68f8-af18-42b7-a94c-90a22afea5f1-logs\") pod \"glance-default-external-api-0\" (UID: \"31fc68f8-af18-42b7-a94c-90a22afea5f1\") " pod="openstack/glance-default-external-api-0" Oct 10 16:51:41 crc kubenswrapper[4799]: I1010 16:51:41.060533 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/31fc68f8-af18-42b7-a94c-90a22afea5f1-scripts\") pod \"glance-default-external-api-0\" (UID: \"31fc68f8-af18-42b7-a94c-90a22afea5f1\") " pod="openstack/glance-default-external-api-0" Oct 10 16:51:41 crc kubenswrapper[4799]: I1010 16:51:41.060874 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/31fc68f8-af18-42b7-a94c-90a22afea5f1-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"31fc68f8-af18-42b7-a94c-90a22afea5f1\") " pod="openstack/glance-default-external-api-0" Oct 10 16:51:41 crc kubenswrapper[4799]: I1010 16:51:41.061145 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/31fc68f8-af18-42b7-a94c-90a22afea5f1-config-data\") pod \"glance-default-external-api-0\" (UID: \"31fc68f8-af18-42b7-a94c-90a22afea5f1\") " pod="openstack/glance-default-external-api-0" Oct 10 16:51:41 crc kubenswrapper[4799]: I1010 16:51:41.061609 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/31fc68f8-af18-42b7-a94c-90a22afea5f1-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"31fc68f8-af18-42b7-a94c-90a22afea5f1\") " pod="openstack/glance-default-external-api-0" Oct 10 16:51:41 crc kubenswrapper[4799]: I1010 16:51:41.076603 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2p989\" (UniqueName: \"kubernetes.io/projected/31fc68f8-af18-42b7-a94c-90a22afea5f1-kube-api-access-2p989\") pod \"glance-default-external-api-0\" (UID: \"31fc68f8-af18-42b7-a94c-90a22afea5f1\") " pod="openstack/glance-default-external-api-0" Oct 10 16:51:41 crc kubenswrapper[4799]: I1010 16:51:41.085748 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"glance-default-external-api-0\" (UID: \"31fc68f8-af18-42b7-a94c-90a22afea5f1\") " pod="openstack/glance-default-external-api-0" Oct 10 16:51:41 crc kubenswrapper[4799]: I1010 16:51:41.179430 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Oct 10 16:51:41 crc kubenswrapper[4799]: I1010 16:51:41.435435 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ef55ae74-4435-4042-9567-4d07d41b3ce0" path="/var/lib/kubelet/pods/ef55ae74-4435-4042-9567-4d07d41b3ce0/volumes" Oct 10 16:51:41 crc kubenswrapper[4799]: I1010 16:51:41.467447 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 10 16:51:41 crc kubenswrapper[4799]: I1010 16:51:41.467939 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="b7f1ee63-3947-40d6-ac14-8a1cf5e6f7f0" containerName="glance-httpd" containerID="cri-o://d09f1c41ff83e3f94d5800a1f1a3455a847a7fa8e5a288a1102d81b7fccd07bf" gracePeriod=30 Oct 10 16:51:41 crc kubenswrapper[4799]: I1010 16:51:41.469691 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="b7f1ee63-3947-40d6-ac14-8a1cf5e6f7f0" containerName="glance-log" containerID="cri-o://866b587e8dd73a9f0f531084fbcbe1a89d5bdd82e34c2eec0199cef8596e3329" gracePeriod=30 Oct 10 16:51:41 crc kubenswrapper[4799]: I1010 16:51:41.475229 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"2240606d-067a-4655-9deb-611ff6e3d5af","Type":"ContainerDied","Data":"7f5780f51480cc4caaddaa8a77d05aec121ef76831973f3706648c0b9e2e916c"} Oct 10 16:51:41 crc kubenswrapper[4799]: I1010 16:51:41.475298 4799 scope.go:117] "RemoveContainer" containerID="eff44efb5e4b0fcea774798d7f33d3ea26d38bfcd6b641cd95d859a93438c2b5" Oct 10 16:51:41 crc kubenswrapper[4799]: I1010 16:51:41.475790 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Oct 10 16:51:41 crc kubenswrapper[4799]: I1010 16:51:41.514980 4799 scope.go:117] "RemoveContainer" containerID="b6eb7bb2625a48be5127c7ecd8054f4b5f9b17a07f31b4debc4d2fd30bd140ad" Oct 10 16:51:41 crc kubenswrapper[4799]: I1010 16:51:41.523067 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Oct 10 16:51:41 crc kubenswrapper[4799]: I1010 16:51:41.537589 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-scheduler-0"] Oct 10 16:51:41 crc kubenswrapper[4799]: I1010 16:51:41.558328 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-scheduler-0"] Oct 10 16:51:41 crc kubenswrapper[4799]: I1010 16:51:41.559708 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Oct 10 16:51:41 crc kubenswrapper[4799]: I1010 16:51:41.564159 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scheduler-config-data" Oct 10 16:51:41 crc kubenswrapper[4799]: I1010 16:51:41.577731 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Oct 10 16:51:41 crc kubenswrapper[4799]: I1010 16:51:41.674158 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-5f74b5f5cc-54sss" Oct 10 16:51:41 crc kubenswrapper[4799]: I1010 16:51:41.689286 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/7dc78f94-acb0-4411-b1a2-14dd6500674b-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"7dc78f94-acb0-4411-b1a2-14dd6500674b\") " pod="openstack/cinder-scheduler-0" Oct 10 16:51:41 crc kubenswrapper[4799]: I1010 16:51:41.689362 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7dc78f94-acb0-4411-b1a2-14dd6500674b-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"7dc78f94-acb0-4411-b1a2-14dd6500674b\") " pod="openstack/cinder-scheduler-0" Oct 10 16:51:41 crc kubenswrapper[4799]: I1010 16:51:41.689391 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7dc78f94-acb0-4411-b1a2-14dd6500674b-scripts\") pod \"cinder-scheduler-0\" (UID: \"7dc78f94-acb0-4411-b1a2-14dd6500674b\") " pod="openstack/cinder-scheduler-0" Oct 10 16:51:41 crc kubenswrapper[4799]: I1010 16:51:41.689455 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/7dc78f94-acb0-4411-b1a2-14dd6500674b-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"7dc78f94-acb0-4411-b1a2-14dd6500674b\") " pod="openstack/cinder-scheduler-0" Oct 10 16:51:41 crc kubenswrapper[4799]: I1010 16:51:41.689487 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7dc78f94-acb0-4411-b1a2-14dd6500674b-config-data\") pod \"cinder-scheduler-0\" (UID: \"7dc78f94-acb0-4411-b1a2-14dd6500674b\") " pod="openstack/cinder-scheduler-0" Oct 10 16:51:41 crc kubenswrapper[4799]: I1010 16:51:41.689544 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8xx82\" (UniqueName: \"kubernetes.io/projected/7dc78f94-acb0-4411-b1a2-14dd6500674b-kube-api-access-8xx82\") pod \"cinder-scheduler-0\" (UID: \"7dc78f94-acb0-4411-b1a2-14dd6500674b\") " pod="openstack/cinder-scheduler-0" Oct 10 16:51:41 crc kubenswrapper[4799]: I1010 16:51:41.745983 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-744fd954cc-ngzjd"] Oct 10 16:51:41 crc kubenswrapper[4799]: I1010 16:51:41.746829 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-744fd954cc-ngzjd" podUID="3bab4b53-c147-4875-aaea-df06dae44b04" containerName="dnsmasq-dns" containerID="cri-o://ae3095796bd451a14cd4ec37a6aac8ee71abb63b13b255c1a69cc358efc9d139" gracePeriod=10 Oct 10 16:51:41 crc kubenswrapper[4799]: I1010 16:51:41.791737 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/7dc78f94-acb0-4411-b1a2-14dd6500674b-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"7dc78f94-acb0-4411-b1a2-14dd6500674b\") " pod="openstack/cinder-scheduler-0" Oct 10 16:51:41 crc kubenswrapper[4799]: I1010 16:51:41.791882 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7dc78f94-acb0-4411-b1a2-14dd6500674b-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"7dc78f94-acb0-4411-b1a2-14dd6500674b\") " pod="openstack/cinder-scheduler-0" Oct 10 16:51:41 crc kubenswrapper[4799]: I1010 16:51:41.791905 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/7dc78f94-acb0-4411-b1a2-14dd6500674b-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"7dc78f94-acb0-4411-b1a2-14dd6500674b\") " pod="openstack/cinder-scheduler-0" Oct 10 16:51:41 crc kubenswrapper[4799]: I1010 16:51:41.791914 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7dc78f94-acb0-4411-b1a2-14dd6500674b-scripts\") pod \"cinder-scheduler-0\" (UID: \"7dc78f94-acb0-4411-b1a2-14dd6500674b\") " pod="openstack/cinder-scheduler-0" Oct 10 16:51:41 crc kubenswrapper[4799]: I1010 16:51:41.792620 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/7dc78f94-acb0-4411-b1a2-14dd6500674b-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"7dc78f94-acb0-4411-b1a2-14dd6500674b\") " pod="openstack/cinder-scheduler-0" Oct 10 16:51:41 crc kubenswrapper[4799]: I1010 16:51:41.792689 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7dc78f94-acb0-4411-b1a2-14dd6500674b-config-data\") pod \"cinder-scheduler-0\" (UID: \"7dc78f94-acb0-4411-b1a2-14dd6500674b\") " pod="openstack/cinder-scheduler-0" Oct 10 16:51:41 crc kubenswrapper[4799]: I1010 16:51:41.792738 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8xx82\" (UniqueName: \"kubernetes.io/projected/7dc78f94-acb0-4411-b1a2-14dd6500674b-kube-api-access-8xx82\") pod \"cinder-scheduler-0\" (UID: \"7dc78f94-acb0-4411-b1a2-14dd6500674b\") " pod="openstack/cinder-scheduler-0" Oct 10 16:51:41 crc kubenswrapper[4799]: I1010 16:51:41.798092 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7dc78f94-acb0-4411-b1a2-14dd6500674b-scripts\") pod \"cinder-scheduler-0\" (UID: \"7dc78f94-acb0-4411-b1a2-14dd6500674b\") " pod="openstack/cinder-scheduler-0" Oct 10 16:51:41 crc kubenswrapper[4799]: I1010 16:51:41.798504 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7dc78f94-acb0-4411-b1a2-14dd6500674b-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"7dc78f94-acb0-4411-b1a2-14dd6500674b\") " pod="openstack/cinder-scheduler-0" Oct 10 16:51:41 crc kubenswrapper[4799]: I1010 16:51:41.802146 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7dc78f94-acb0-4411-b1a2-14dd6500674b-config-data\") pod \"cinder-scheduler-0\" (UID: \"7dc78f94-acb0-4411-b1a2-14dd6500674b\") " pod="openstack/cinder-scheduler-0" Oct 10 16:51:41 crc kubenswrapper[4799]: I1010 16:51:41.808429 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/7dc78f94-acb0-4411-b1a2-14dd6500674b-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"7dc78f94-acb0-4411-b1a2-14dd6500674b\") " pod="openstack/cinder-scheduler-0" Oct 10 16:51:41 crc kubenswrapper[4799]: I1010 16:51:41.811345 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8xx82\" (UniqueName: \"kubernetes.io/projected/7dc78f94-acb0-4411-b1a2-14dd6500674b-kube-api-access-8xx82\") pod \"cinder-scheduler-0\" (UID: \"7dc78f94-acb0-4411-b1a2-14dd6500674b\") " pod="openstack/cinder-scheduler-0" Oct 10 16:51:41 crc kubenswrapper[4799]: I1010 16:51:41.880178 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 10 16:51:41 crc kubenswrapper[4799]: W1010 16:51:41.880780 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod31fc68f8_af18_42b7_a94c_90a22afea5f1.slice/crio-1729d6e07b94453ac709f5c5f7a9355fe6c0f4500c9e56089fdbb9d43d7b42e8 WatchSource:0}: Error finding container 1729d6e07b94453ac709f5c5f7a9355fe6c0f4500c9e56089fdbb9d43d7b42e8: Status 404 returned error can't find the container with id 1729d6e07b94453ac709f5c5f7a9355fe6c0f4500c9e56089fdbb9d43d7b42e8 Oct 10 16:51:41 crc kubenswrapper[4799]: I1010 16:51:41.966552 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Oct 10 16:51:42 crc kubenswrapper[4799]: I1010 16:51:42.260919 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-744fd954cc-ngzjd" Oct 10 16:51:42 crc kubenswrapper[4799]: I1010 16:51:42.307338 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3bab4b53-c147-4875-aaea-df06dae44b04-config\") pod \"3bab4b53-c147-4875-aaea-df06dae44b04\" (UID: \"3bab4b53-c147-4875-aaea-df06dae44b04\") " Oct 10 16:51:42 crc kubenswrapper[4799]: I1010 16:51:42.307502 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zww56\" (UniqueName: \"kubernetes.io/projected/3bab4b53-c147-4875-aaea-df06dae44b04-kube-api-access-zww56\") pod \"3bab4b53-c147-4875-aaea-df06dae44b04\" (UID: \"3bab4b53-c147-4875-aaea-df06dae44b04\") " Oct 10 16:51:42 crc kubenswrapper[4799]: I1010 16:51:42.307545 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/3bab4b53-c147-4875-aaea-df06dae44b04-ovsdbserver-nb\") pod \"3bab4b53-c147-4875-aaea-df06dae44b04\" (UID: \"3bab4b53-c147-4875-aaea-df06dae44b04\") " Oct 10 16:51:42 crc kubenswrapper[4799]: I1010 16:51:42.307642 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/3bab4b53-c147-4875-aaea-df06dae44b04-dns-swift-storage-0\") pod \"3bab4b53-c147-4875-aaea-df06dae44b04\" (UID: \"3bab4b53-c147-4875-aaea-df06dae44b04\") " Oct 10 16:51:42 crc kubenswrapper[4799]: I1010 16:51:42.307670 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/3bab4b53-c147-4875-aaea-df06dae44b04-ovsdbserver-sb\") pod \"3bab4b53-c147-4875-aaea-df06dae44b04\" (UID: \"3bab4b53-c147-4875-aaea-df06dae44b04\") " Oct 10 16:51:42 crc kubenswrapper[4799]: I1010 16:51:42.307705 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3bab4b53-c147-4875-aaea-df06dae44b04-dns-svc\") pod \"3bab4b53-c147-4875-aaea-df06dae44b04\" (UID: \"3bab4b53-c147-4875-aaea-df06dae44b04\") " Oct 10 16:51:42 crc kubenswrapper[4799]: I1010 16:51:42.319210 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3bab4b53-c147-4875-aaea-df06dae44b04-kube-api-access-zww56" (OuterVolumeSpecName: "kube-api-access-zww56") pod "3bab4b53-c147-4875-aaea-df06dae44b04" (UID: "3bab4b53-c147-4875-aaea-df06dae44b04"). InnerVolumeSpecName "kube-api-access-zww56". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 16:51:42 crc kubenswrapper[4799]: I1010 16:51:42.362869 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3bab4b53-c147-4875-aaea-df06dae44b04-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "3bab4b53-c147-4875-aaea-df06dae44b04" (UID: "3bab4b53-c147-4875-aaea-df06dae44b04"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 16:51:42 crc kubenswrapper[4799]: I1010 16:51:42.363056 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3bab4b53-c147-4875-aaea-df06dae44b04-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "3bab4b53-c147-4875-aaea-df06dae44b04" (UID: "3bab4b53-c147-4875-aaea-df06dae44b04"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 16:51:42 crc kubenswrapper[4799]: I1010 16:51:42.373087 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3bab4b53-c147-4875-aaea-df06dae44b04-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "3bab4b53-c147-4875-aaea-df06dae44b04" (UID: "3bab4b53-c147-4875-aaea-df06dae44b04"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 16:51:42 crc kubenswrapper[4799]: I1010 16:51:42.403661 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3bab4b53-c147-4875-aaea-df06dae44b04-config" (OuterVolumeSpecName: "config") pod "3bab4b53-c147-4875-aaea-df06dae44b04" (UID: "3bab4b53-c147-4875-aaea-df06dae44b04"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 16:51:42 crc kubenswrapper[4799]: I1010 16:51:42.410884 4799 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/3bab4b53-c147-4875-aaea-df06dae44b04-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Oct 10 16:51:42 crc kubenswrapper[4799]: I1010 16:51:42.410914 4799 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3bab4b53-c147-4875-aaea-df06dae44b04-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 10 16:51:42 crc kubenswrapper[4799]: I1010 16:51:42.410926 4799 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3bab4b53-c147-4875-aaea-df06dae44b04-config\") on node \"crc\" DevicePath \"\"" Oct 10 16:51:42 crc kubenswrapper[4799]: I1010 16:51:42.410934 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zww56\" (UniqueName: \"kubernetes.io/projected/3bab4b53-c147-4875-aaea-df06dae44b04-kube-api-access-zww56\") on node \"crc\" DevicePath \"\"" Oct 10 16:51:42 crc kubenswrapper[4799]: I1010 16:51:42.410946 4799 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/3bab4b53-c147-4875-aaea-df06dae44b04-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 10 16:51:42 crc kubenswrapper[4799]: I1010 16:51:42.413864 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3bab4b53-c147-4875-aaea-df06dae44b04-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "3bab4b53-c147-4875-aaea-df06dae44b04" (UID: "3bab4b53-c147-4875-aaea-df06dae44b04"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 16:51:42 crc kubenswrapper[4799]: I1010 16:51:42.461255 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/neutron-694f88c746-tbgjz" Oct 10 16:51:42 crc kubenswrapper[4799]: I1010 16:51:42.512902 4799 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/3bab4b53-c147-4875-aaea-df06dae44b04-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 10 16:51:42 crc kubenswrapper[4799]: I1010 16:51:42.524602 4799 generic.go:334] "Generic (PLEG): container finished" podID="3bab4b53-c147-4875-aaea-df06dae44b04" containerID="ae3095796bd451a14cd4ec37a6aac8ee71abb63b13b255c1a69cc358efc9d139" exitCode=0 Oct 10 16:51:42 crc kubenswrapper[4799]: I1010 16:51:42.524666 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-744fd954cc-ngzjd" event={"ID":"3bab4b53-c147-4875-aaea-df06dae44b04","Type":"ContainerDied","Data":"ae3095796bd451a14cd4ec37a6aac8ee71abb63b13b255c1a69cc358efc9d139"} Oct 10 16:51:42 crc kubenswrapper[4799]: I1010 16:51:42.524693 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-744fd954cc-ngzjd" event={"ID":"3bab4b53-c147-4875-aaea-df06dae44b04","Type":"ContainerDied","Data":"dd45c3f8f254de7e9eb99c6ef5c57d50a19af24f9104d8755e3b636403d1a54b"} Oct 10 16:51:42 crc kubenswrapper[4799]: I1010 16:51:42.524709 4799 scope.go:117] "RemoveContainer" containerID="ae3095796bd451a14cd4ec37a6aac8ee71abb63b13b255c1a69cc358efc9d139" Oct 10 16:51:42 crc kubenswrapper[4799]: I1010 16:51:42.524811 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-744fd954cc-ngzjd" Oct 10 16:51:42 crc kubenswrapper[4799]: I1010 16:51:42.531918 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"31fc68f8-af18-42b7-a94c-90a22afea5f1","Type":"ContainerStarted","Data":"1729d6e07b94453ac709f5c5f7a9355fe6c0f4500c9e56089fdbb9d43d7b42e8"} Oct 10 16:51:42 crc kubenswrapper[4799]: I1010 16:51:42.535693 4799 generic.go:334] "Generic (PLEG): container finished" podID="b7f1ee63-3947-40d6-ac14-8a1cf5e6f7f0" containerID="866b587e8dd73a9f0f531084fbcbe1a89d5bdd82e34c2eec0199cef8596e3329" exitCode=143 Oct 10 16:51:42 crc kubenswrapper[4799]: I1010 16:51:42.536546 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"b7f1ee63-3947-40d6-ac14-8a1cf5e6f7f0","Type":"ContainerDied","Data":"866b587e8dd73a9f0f531084fbcbe1a89d5bdd82e34c2eec0199cef8596e3329"} Oct 10 16:51:42 crc kubenswrapper[4799]: I1010 16:51:42.549621 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Oct 10 16:51:42 crc kubenswrapper[4799]: I1010 16:51:42.552256 4799 scope.go:117] "RemoveContainer" containerID="0a350afee44f9a4994b7e57622835f2d7e5b5441064e342822475aa18c0daadc" Oct 10 16:51:42 crc kubenswrapper[4799]: W1010 16:51:42.556208 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7dc78f94_acb0_4411_b1a2_14dd6500674b.slice/crio-09c101bdfa8200db69fbedd9879fffc56304b95ad83b7a1bac433f5c3fed197c WatchSource:0}: Error finding container 09c101bdfa8200db69fbedd9879fffc56304b95ad83b7a1bac433f5c3fed197c: Status 404 returned error can't find the container with id 09c101bdfa8200db69fbedd9879fffc56304b95ad83b7a1bac433f5c3fed197c Oct 10 16:51:42 crc kubenswrapper[4799]: I1010 16:51:42.727027 4799 scope.go:117] "RemoveContainer" containerID="ae3095796bd451a14cd4ec37a6aac8ee71abb63b13b255c1a69cc358efc9d139" Oct 10 16:51:42 crc kubenswrapper[4799]: E1010 16:51:42.727682 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ae3095796bd451a14cd4ec37a6aac8ee71abb63b13b255c1a69cc358efc9d139\": container with ID starting with ae3095796bd451a14cd4ec37a6aac8ee71abb63b13b255c1a69cc358efc9d139 not found: ID does not exist" containerID="ae3095796bd451a14cd4ec37a6aac8ee71abb63b13b255c1a69cc358efc9d139" Oct 10 16:51:42 crc kubenswrapper[4799]: I1010 16:51:42.727706 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ae3095796bd451a14cd4ec37a6aac8ee71abb63b13b255c1a69cc358efc9d139"} err="failed to get container status \"ae3095796bd451a14cd4ec37a6aac8ee71abb63b13b255c1a69cc358efc9d139\": rpc error: code = NotFound desc = could not find container \"ae3095796bd451a14cd4ec37a6aac8ee71abb63b13b255c1a69cc358efc9d139\": container with ID starting with ae3095796bd451a14cd4ec37a6aac8ee71abb63b13b255c1a69cc358efc9d139 not found: ID does not exist" Oct 10 16:51:42 crc kubenswrapper[4799]: I1010 16:51:42.727726 4799 scope.go:117] "RemoveContainer" containerID="0a350afee44f9a4994b7e57622835f2d7e5b5441064e342822475aa18c0daadc" Oct 10 16:51:42 crc kubenswrapper[4799]: E1010 16:51:42.728107 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0a350afee44f9a4994b7e57622835f2d7e5b5441064e342822475aa18c0daadc\": container with ID starting with 0a350afee44f9a4994b7e57622835f2d7e5b5441064e342822475aa18c0daadc not found: ID does not exist" containerID="0a350afee44f9a4994b7e57622835f2d7e5b5441064e342822475aa18c0daadc" Oct 10 16:51:42 crc kubenswrapper[4799]: I1010 16:51:42.728144 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0a350afee44f9a4994b7e57622835f2d7e5b5441064e342822475aa18c0daadc"} err="failed to get container status \"0a350afee44f9a4994b7e57622835f2d7e5b5441064e342822475aa18c0daadc\": rpc error: code = NotFound desc = could not find container \"0a350afee44f9a4994b7e57622835f2d7e5b5441064e342822475aa18c0daadc\": container with ID starting with 0a350afee44f9a4994b7e57622835f2d7e5b5441064e342822475aa18c0daadc not found: ID does not exist" Oct 10 16:51:42 crc kubenswrapper[4799]: I1010 16:51:42.731680 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-744fd954cc-ngzjd"] Oct 10 16:51:42 crc kubenswrapper[4799]: I1010 16:51:42.740092 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-744fd954cc-ngzjd"] Oct 10 16:51:43 crc kubenswrapper[4799]: I1010 16:51:43.425033 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2240606d-067a-4655-9deb-611ff6e3d5af" path="/var/lib/kubelet/pods/2240606d-067a-4655-9deb-611ff6e3d5af/volumes" Oct 10 16:51:43 crc kubenswrapper[4799]: I1010 16:51:43.426276 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3bab4b53-c147-4875-aaea-df06dae44b04" path="/var/lib/kubelet/pods/3bab4b53-c147-4875-aaea-df06dae44b04/volumes" Oct 10 16:51:43 crc kubenswrapper[4799]: I1010 16:51:43.561279 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"7dc78f94-acb0-4411-b1a2-14dd6500674b","Type":"ContainerStarted","Data":"a5a1b6e00a35ec28b0a11cef63bf27aa74edf00ead5c5dff888593622c9a0138"} Oct 10 16:51:43 crc kubenswrapper[4799]: I1010 16:51:43.561346 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"7dc78f94-acb0-4411-b1a2-14dd6500674b","Type":"ContainerStarted","Data":"09c101bdfa8200db69fbedd9879fffc56304b95ad83b7a1bac433f5c3fed197c"} Oct 10 16:51:43 crc kubenswrapper[4799]: I1010 16:51:43.570714 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"31fc68f8-af18-42b7-a94c-90a22afea5f1","Type":"ContainerStarted","Data":"d2b3ab1b197b085ea5a23bbdabb78c44e9c002b3cd5536ddb8dc1fcd93bae475"} Oct 10 16:51:43 crc kubenswrapper[4799]: I1010 16:51:43.570770 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"31fc68f8-af18-42b7-a94c-90a22afea5f1","Type":"ContainerStarted","Data":"f233fe566e513cc4d04821964bcde90cce13e4323a97a80af9c4e16bc8ddb102"} Oct 10 16:51:43 crc kubenswrapper[4799]: I1010 16:51:43.609697 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=3.609680424 podStartE2EDuration="3.609680424s" podCreationTimestamp="2025-10-10 16:51:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-10 16:51:43.604153237 +0000 UTC m=+1197.112477362" watchObservedRunningTime="2025-10-10 16:51:43.609680424 +0000 UTC m=+1197.118004539" Oct 10 16:51:44 crc kubenswrapper[4799]: I1010 16:51:44.017658 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 10 16:51:44 crc kubenswrapper[4799]: I1010 16:51:44.018330 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="a1a7ed1c-2ac1-4c0b-9896-7e5b28d030c3" containerName="ceilometer-central-agent" containerID="cri-o://4a850942ad660d59cad880d27871b132e4b78133816d348570dca1309bbe0cc2" gracePeriod=30 Oct 10 16:51:44 crc kubenswrapper[4799]: I1010 16:51:44.018438 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="a1a7ed1c-2ac1-4c0b-9896-7e5b28d030c3" containerName="proxy-httpd" containerID="cri-o://0e4d67573fc7051eb2a82f10df5ef08a23dbbf730c4c7508e74be1e08fe03fb9" gracePeriod=30 Oct 10 16:51:44 crc kubenswrapper[4799]: I1010 16:51:44.018455 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="a1a7ed1c-2ac1-4c0b-9896-7e5b28d030c3" containerName="ceilometer-notification-agent" containerID="cri-o://4b1f81396a03bbdff9b4db95fc8e929c7d01c8d07ea7e15891c3c97bbc39e2e2" gracePeriod=30 Oct 10 16:51:44 crc kubenswrapper[4799]: I1010 16:51:44.018670 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="a1a7ed1c-2ac1-4c0b-9896-7e5b28d030c3" containerName="sg-core" containerID="cri-o://b8bb6a627d95f865adcce7f4a3518c281775a147e8cb02d5b3b447e2f941f3dc" gracePeriod=30 Oct 10 16:51:44 crc kubenswrapper[4799]: I1010 16:51:44.207442 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-eef9-account-create-8rt89"] Oct 10 16:51:44 crc kubenswrapper[4799]: E1010 16:51:44.207874 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3bab4b53-c147-4875-aaea-df06dae44b04" containerName="init" Oct 10 16:51:44 crc kubenswrapper[4799]: I1010 16:51:44.207889 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="3bab4b53-c147-4875-aaea-df06dae44b04" containerName="init" Oct 10 16:51:44 crc kubenswrapper[4799]: E1010 16:51:44.207905 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3bab4b53-c147-4875-aaea-df06dae44b04" containerName="dnsmasq-dns" Oct 10 16:51:44 crc kubenswrapper[4799]: I1010 16:51:44.207912 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="3bab4b53-c147-4875-aaea-df06dae44b04" containerName="dnsmasq-dns" Oct 10 16:51:44 crc kubenswrapper[4799]: I1010 16:51:44.208074 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="3bab4b53-c147-4875-aaea-df06dae44b04" containerName="dnsmasq-dns" Oct 10 16:51:44 crc kubenswrapper[4799]: I1010 16:51:44.208660 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-eef9-account-create-8rt89" Oct 10 16:51:44 crc kubenswrapper[4799]: I1010 16:51:44.211723 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-db-secret" Oct 10 16:51:44 crc kubenswrapper[4799]: I1010 16:51:44.225800 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-eef9-account-create-8rt89"] Oct 10 16:51:44 crc kubenswrapper[4799]: I1010 16:51:44.257057 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f7lj8\" (UniqueName: \"kubernetes.io/projected/a2d50cf9-d24c-42fb-a3be-716a020a8b5a-kube-api-access-f7lj8\") pod \"nova-api-eef9-account-create-8rt89\" (UID: \"a2d50cf9-d24c-42fb-a3be-716a020a8b5a\") " pod="openstack/nova-api-eef9-account-create-8rt89" Oct 10 16:51:44 crc kubenswrapper[4799]: I1010 16:51:44.358748 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f7lj8\" (UniqueName: \"kubernetes.io/projected/a2d50cf9-d24c-42fb-a3be-716a020a8b5a-kube-api-access-f7lj8\") pod \"nova-api-eef9-account-create-8rt89\" (UID: \"a2d50cf9-d24c-42fb-a3be-716a020a8b5a\") " pod="openstack/nova-api-eef9-account-create-8rt89" Oct 10 16:51:44 crc kubenswrapper[4799]: I1010 16:51:44.396035 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f7lj8\" (UniqueName: \"kubernetes.io/projected/a2d50cf9-d24c-42fb-a3be-716a020a8b5a-kube-api-access-f7lj8\") pod \"nova-api-eef9-account-create-8rt89\" (UID: \"a2d50cf9-d24c-42fb-a3be-716a020a8b5a\") " pod="openstack/nova-api-eef9-account-create-8rt89" Oct 10 16:51:44 crc kubenswrapper[4799]: I1010 16:51:44.408571 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-f8a8-account-create-bcclb"] Oct 10 16:51:44 crc kubenswrapper[4799]: I1010 16:51:44.410365 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-f8a8-account-create-bcclb" Oct 10 16:51:44 crc kubenswrapper[4799]: I1010 16:51:44.413870 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-db-secret" Oct 10 16:51:44 crc kubenswrapper[4799]: I1010 16:51:44.420596 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-f8a8-account-create-bcclb"] Oct 10 16:51:44 crc kubenswrapper[4799]: I1010 16:51:44.461156 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4dgwq\" (UniqueName: \"kubernetes.io/projected/9099bbc4-2f79-441f-a02b-6653832c7714-kube-api-access-4dgwq\") pod \"nova-cell0-f8a8-account-create-bcclb\" (UID: \"9099bbc4-2f79-441f-a02b-6653832c7714\") " pod="openstack/nova-cell0-f8a8-account-create-bcclb" Oct 10 16:51:44 crc kubenswrapper[4799]: I1010 16:51:44.562924 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4dgwq\" (UniqueName: \"kubernetes.io/projected/9099bbc4-2f79-441f-a02b-6653832c7714-kube-api-access-4dgwq\") pod \"nova-cell0-f8a8-account-create-bcclb\" (UID: \"9099bbc4-2f79-441f-a02b-6653832c7714\") " pod="openstack/nova-cell0-f8a8-account-create-bcclb" Oct 10 16:51:44 crc kubenswrapper[4799]: I1010 16:51:44.582571 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-eef9-account-create-8rt89" Oct 10 16:51:44 crc kubenswrapper[4799]: I1010 16:51:44.587020 4799 generic.go:334] "Generic (PLEG): container finished" podID="a1a7ed1c-2ac1-4c0b-9896-7e5b28d030c3" containerID="0e4d67573fc7051eb2a82f10df5ef08a23dbbf730c4c7508e74be1e08fe03fb9" exitCode=0 Oct 10 16:51:44 crc kubenswrapper[4799]: I1010 16:51:44.587049 4799 generic.go:334] "Generic (PLEG): container finished" podID="a1a7ed1c-2ac1-4c0b-9896-7e5b28d030c3" containerID="b8bb6a627d95f865adcce7f4a3518c281775a147e8cb02d5b3b447e2f941f3dc" exitCode=2 Oct 10 16:51:44 crc kubenswrapper[4799]: I1010 16:51:44.587057 4799 generic.go:334] "Generic (PLEG): container finished" podID="a1a7ed1c-2ac1-4c0b-9896-7e5b28d030c3" containerID="4b1f81396a03bbdff9b4db95fc8e929c7d01c8d07ea7e15891c3c97bbc39e2e2" exitCode=0 Oct 10 16:51:44 crc kubenswrapper[4799]: I1010 16:51:44.587065 4799 generic.go:334] "Generic (PLEG): container finished" podID="a1a7ed1c-2ac1-4c0b-9896-7e5b28d030c3" containerID="4a850942ad660d59cad880d27871b132e4b78133816d348570dca1309bbe0cc2" exitCode=0 Oct 10 16:51:44 crc kubenswrapper[4799]: I1010 16:51:44.587106 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a1a7ed1c-2ac1-4c0b-9896-7e5b28d030c3","Type":"ContainerDied","Data":"0e4d67573fc7051eb2a82f10df5ef08a23dbbf730c4c7508e74be1e08fe03fb9"} Oct 10 16:51:44 crc kubenswrapper[4799]: I1010 16:51:44.587130 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a1a7ed1c-2ac1-4c0b-9896-7e5b28d030c3","Type":"ContainerDied","Data":"b8bb6a627d95f865adcce7f4a3518c281775a147e8cb02d5b3b447e2f941f3dc"} Oct 10 16:51:44 crc kubenswrapper[4799]: I1010 16:51:44.587141 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a1a7ed1c-2ac1-4c0b-9896-7e5b28d030c3","Type":"ContainerDied","Data":"4b1f81396a03bbdff9b4db95fc8e929c7d01c8d07ea7e15891c3c97bbc39e2e2"} Oct 10 16:51:44 crc kubenswrapper[4799]: I1010 16:51:44.587149 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a1a7ed1c-2ac1-4c0b-9896-7e5b28d030c3","Type":"ContainerDied","Data":"4a850942ad660d59cad880d27871b132e4b78133816d348570dca1309bbe0cc2"} Oct 10 16:51:44 crc kubenswrapper[4799]: I1010 16:51:44.593439 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"7dc78f94-acb0-4411-b1a2-14dd6500674b","Type":"ContainerStarted","Data":"45d51a08521515637f5b1846723d166ebcfd370a8e928d653837e32fd1bdcaff"} Oct 10 16:51:44 crc kubenswrapper[4799]: I1010 16:51:44.607372 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4dgwq\" (UniqueName: \"kubernetes.io/projected/9099bbc4-2f79-441f-a02b-6653832c7714-kube-api-access-4dgwq\") pod \"nova-cell0-f8a8-account-create-bcclb\" (UID: \"9099bbc4-2f79-441f-a02b-6653832c7714\") " pod="openstack/nova-cell0-f8a8-account-create-bcclb" Oct 10 16:51:44 crc kubenswrapper[4799]: I1010 16:51:44.627408 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-6843-account-create-dm4pm"] Oct 10 16:51:44 crc kubenswrapper[4799]: I1010 16:51:44.628556 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-6843-account-create-dm4pm" Oct 10 16:51:44 crc kubenswrapper[4799]: I1010 16:51:44.630077 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-scheduler-0" podStartSLOduration=3.6300607769999997 podStartE2EDuration="3.630060777s" podCreationTimestamp="2025-10-10 16:51:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-10 16:51:44.619498226 +0000 UTC m=+1198.127822341" watchObservedRunningTime="2025-10-10 16:51:44.630060777 +0000 UTC m=+1198.138384892" Oct 10 16:51:44 crc kubenswrapper[4799]: I1010 16:51:44.632915 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-db-secret" Oct 10 16:51:44 crc kubenswrapper[4799]: I1010 16:51:44.653821 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-6843-account-create-dm4pm"] Oct 10 16:51:44 crc kubenswrapper[4799]: I1010 16:51:44.665251 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-brj6t\" (UniqueName: \"kubernetes.io/projected/afce335f-6c8d-422f-9ee9-f69cd8a83715-kube-api-access-brj6t\") pod \"nova-cell1-6843-account-create-dm4pm\" (UID: \"afce335f-6c8d-422f-9ee9-f69cd8a83715\") " pod="openstack/nova-cell1-6843-account-create-dm4pm" Oct 10 16:51:44 crc kubenswrapper[4799]: I1010 16:51:44.683335 4799 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/glance-default-internal-api-0" podUID="b7f1ee63-3947-40d6-ac14-8a1cf5e6f7f0" containerName="glance-httpd" probeResult="failure" output="Get \"https://10.217.0.154:9292/healthcheck\": read tcp 10.217.0.2:43526->10.217.0.154:9292: read: connection reset by peer" Oct 10 16:51:44 crc kubenswrapper[4799]: I1010 16:51:44.683335 4799 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/glance-default-internal-api-0" podUID="b7f1ee63-3947-40d6-ac14-8a1cf5e6f7f0" containerName="glance-log" probeResult="failure" output="Get \"https://10.217.0.154:9292/healthcheck\": read tcp 10.217.0.2:43520->10.217.0.154:9292: read: connection reset by peer" Oct 10 16:51:44 crc kubenswrapper[4799]: I1010 16:51:44.763520 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-f8a8-account-create-bcclb" Oct 10 16:51:44 crc kubenswrapper[4799]: I1010 16:51:44.767424 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-brj6t\" (UniqueName: \"kubernetes.io/projected/afce335f-6c8d-422f-9ee9-f69cd8a83715-kube-api-access-brj6t\") pod \"nova-cell1-6843-account-create-dm4pm\" (UID: \"afce335f-6c8d-422f-9ee9-f69cd8a83715\") " pod="openstack/nova-cell1-6843-account-create-dm4pm" Oct 10 16:51:44 crc kubenswrapper[4799]: I1010 16:51:44.787094 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-brj6t\" (UniqueName: \"kubernetes.io/projected/afce335f-6c8d-422f-9ee9-f69cd8a83715-kube-api-access-brj6t\") pod \"nova-cell1-6843-account-create-dm4pm\" (UID: \"afce335f-6c8d-422f-9ee9-f69cd8a83715\") " pod="openstack/nova-cell1-6843-account-create-dm4pm" Oct 10 16:51:44 crc kubenswrapper[4799]: I1010 16:51:44.902284 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/neutron-69f7ddf877-mclzd" Oct 10 16:51:44 crc kubenswrapper[4799]: I1010 16:51:44.904007 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-6843-account-create-dm4pm" Oct 10 16:51:44 crc kubenswrapper[4799]: I1010 16:51:44.968400 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-694f88c746-tbgjz"] Oct 10 16:51:44 crc kubenswrapper[4799]: I1010 16:51:44.968633 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-694f88c746-tbgjz" podUID="2b7debcd-ccac-4b9d-9b6e-011a0f8072d9" containerName="neutron-api" containerID="cri-o://51b411970b3fc3556f70eba6af79b3eba9f4d0cd9b2656eaf38bc05e92a8d335" gracePeriod=30 Oct 10 16:51:44 crc kubenswrapper[4799]: I1010 16:51:44.969112 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-694f88c746-tbgjz" podUID="2b7debcd-ccac-4b9d-9b6e-011a0f8072d9" containerName="neutron-httpd" containerID="cri-o://a62494b9ff2ced5a032b285546d5814b83b5cac1ccbb043f0f7db208a692bf89" gracePeriod=30 Oct 10 16:51:44 crc kubenswrapper[4799]: I1010 16:51:44.971378 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 10 16:51:45 crc kubenswrapper[4799]: I1010 16:51:45.082771 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hw45z\" (UniqueName: \"kubernetes.io/projected/a1a7ed1c-2ac1-4c0b-9896-7e5b28d030c3-kube-api-access-hw45z\") pod \"a1a7ed1c-2ac1-4c0b-9896-7e5b28d030c3\" (UID: \"a1a7ed1c-2ac1-4c0b-9896-7e5b28d030c3\") " Oct 10 16:51:45 crc kubenswrapper[4799]: I1010 16:51:45.083210 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a1a7ed1c-2ac1-4c0b-9896-7e5b28d030c3-sg-core-conf-yaml\") pod \"a1a7ed1c-2ac1-4c0b-9896-7e5b28d030c3\" (UID: \"a1a7ed1c-2ac1-4c0b-9896-7e5b28d030c3\") " Oct 10 16:51:45 crc kubenswrapper[4799]: I1010 16:51:45.083341 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a1a7ed1c-2ac1-4c0b-9896-7e5b28d030c3-combined-ca-bundle\") pod \"a1a7ed1c-2ac1-4c0b-9896-7e5b28d030c3\" (UID: \"a1a7ed1c-2ac1-4c0b-9896-7e5b28d030c3\") " Oct 10 16:51:45 crc kubenswrapper[4799]: I1010 16:51:45.083368 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a1a7ed1c-2ac1-4c0b-9896-7e5b28d030c3-scripts\") pod \"a1a7ed1c-2ac1-4c0b-9896-7e5b28d030c3\" (UID: \"a1a7ed1c-2ac1-4c0b-9896-7e5b28d030c3\") " Oct 10 16:51:45 crc kubenswrapper[4799]: I1010 16:51:45.083403 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a1a7ed1c-2ac1-4c0b-9896-7e5b28d030c3-run-httpd\") pod \"a1a7ed1c-2ac1-4c0b-9896-7e5b28d030c3\" (UID: \"a1a7ed1c-2ac1-4c0b-9896-7e5b28d030c3\") " Oct 10 16:51:45 crc kubenswrapper[4799]: I1010 16:51:45.083439 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a1a7ed1c-2ac1-4c0b-9896-7e5b28d030c3-log-httpd\") pod \"a1a7ed1c-2ac1-4c0b-9896-7e5b28d030c3\" (UID: \"a1a7ed1c-2ac1-4c0b-9896-7e5b28d030c3\") " Oct 10 16:51:45 crc kubenswrapper[4799]: I1010 16:51:45.083462 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a1a7ed1c-2ac1-4c0b-9896-7e5b28d030c3-config-data\") pod \"a1a7ed1c-2ac1-4c0b-9896-7e5b28d030c3\" (UID: \"a1a7ed1c-2ac1-4c0b-9896-7e5b28d030c3\") " Oct 10 16:51:45 crc kubenswrapper[4799]: I1010 16:51:45.084105 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a1a7ed1c-2ac1-4c0b-9896-7e5b28d030c3-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "a1a7ed1c-2ac1-4c0b-9896-7e5b28d030c3" (UID: "a1a7ed1c-2ac1-4c0b-9896-7e5b28d030c3"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 10 16:51:45 crc kubenswrapper[4799]: I1010 16:51:45.084136 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a1a7ed1c-2ac1-4c0b-9896-7e5b28d030c3-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "a1a7ed1c-2ac1-4c0b-9896-7e5b28d030c3" (UID: "a1a7ed1c-2ac1-4c0b-9896-7e5b28d030c3"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 10 16:51:45 crc kubenswrapper[4799]: I1010 16:51:45.090923 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a1a7ed1c-2ac1-4c0b-9896-7e5b28d030c3-kube-api-access-hw45z" (OuterVolumeSpecName: "kube-api-access-hw45z") pod "a1a7ed1c-2ac1-4c0b-9896-7e5b28d030c3" (UID: "a1a7ed1c-2ac1-4c0b-9896-7e5b28d030c3"). InnerVolumeSpecName "kube-api-access-hw45z". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 16:51:45 crc kubenswrapper[4799]: I1010 16:51:45.099240 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a1a7ed1c-2ac1-4c0b-9896-7e5b28d030c3-scripts" (OuterVolumeSpecName: "scripts") pod "a1a7ed1c-2ac1-4c0b-9896-7e5b28d030c3" (UID: "a1a7ed1c-2ac1-4c0b-9896-7e5b28d030c3"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:51:45 crc kubenswrapper[4799]: I1010 16:51:45.142704 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a1a7ed1c-2ac1-4c0b-9896-7e5b28d030c3-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "a1a7ed1c-2ac1-4c0b-9896-7e5b28d030c3" (UID: "a1a7ed1c-2ac1-4c0b-9896-7e5b28d030c3"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:51:45 crc kubenswrapper[4799]: I1010 16:51:45.146625 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-eef9-account-create-8rt89"] Oct 10 16:51:45 crc kubenswrapper[4799]: I1010 16:51:45.186367 4799 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a1a7ed1c-2ac1-4c0b-9896-7e5b28d030c3-scripts\") on node \"crc\" DevicePath \"\"" Oct 10 16:51:45 crc kubenswrapper[4799]: I1010 16:51:45.186400 4799 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a1a7ed1c-2ac1-4c0b-9896-7e5b28d030c3-run-httpd\") on node \"crc\" DevicePath \"\"" Oct 10 16:51:45 crc kubenswrapper[4799]: I1010 16:51:45.186408 4799 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a1a7ed1c-2ac1-4c0b-9896-7e5b28d030c3-log-httpd\") on node \"crc\" DevicePath \"\"" Oct 10 16:51:45 crc kubenswrapper[4799]: I1010 16:51:45.186417 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hw45z\" (UniqueName: \"kubernetes.io/projected/a1a7ed1c-2ac1-4c0b-9896-7e5b28d030c3-kube-api-access-hw45z\") on node \"crc\" DevicePath \"\"" Oct 10 16:51:45 crc kubenswrapper[4799]: I1010 16:51:45.186426 4799 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a1a7ed1c-2ac1-4c0b-9896-7e5b28d030c3-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Oct 10 16:51:45 crc kubenswrapper[4799]: I1010 16:51:45.210709 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a1a7ed1c-2ac1-4c0b-9896-7e5b28d030c3-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a1a7ed1c-2ac1-4c0b-9896-7e5b28d030c3" (UID: "a1a7ed1c-2ac1-4c0b-9896-7e5b28d030c3"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:51:45 crc kubenswrapper[4799]: I1010 16:51:45.231629 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a1a7ed1c-2ac1-4c0b-9896-7e5b28d030c3-config-data" (OuterVolumeSpecName: "config-data") pod "a1a7ed1c-2ac1-4c0b-9896-7e5b28d030c3" (UID: "a1a7ed1c-2ac1-4c0b-9896-7e5b28d030c3"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:51:45 crc kubenswrapper[4799]: I1010 16:51:45.294935 4799 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a1a7ed1c-2ac1-4c0b-9896-7e5b28d030c3-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 10 16:51:45 crc kubenswrapper[4799]: I1010 16:51:45.294969 4799 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a1a7ed1c-2ac1-4c0b-9896-7e5b28d030c3-config-data\") on node \"crc\" DevicePath \"\"" Oct 10 16:51:45 crc kubenswrapper[4799]: I1010 16:51:45.396285 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-f8a8-account-create-bcclb"] Oct 10 16:51:45 crc kubenswrapper[4799]: W1010 16:51:45.439075 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9099bbc4_2f79_441f_a02b_6653832c7714.slice/crio-a100e438b0a8bd01248de4b8b54a391018866cf5124083e9694bf4894b60de33 WatchSource:0}: Error finding container a100e438b0a8bd01248de4b8b54a391018866cf5124083e9694bf4894b60de33: Status 404 returned error can't find the container with id a100e438b0a8bd01248de4b8b54a391018866cf5124083e9694bf4894b60de33 Oct 10 16:51:45 crc kubenswrapper[4799]: I1010 16:51:45.519819 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Oct 10 16:51:45 crc kubenswrapper[4799]: I1010 16:51:45.573140 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-6843-account-create-dm4pm"] Oct 10 16:51:45 crc kubenswrapper[4799]: I1010 16:51:45.603189 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b7f1ee63-3947-40d6-ac14-8a1cf5e6f7f0-logs\") pod \"b7f1ee63-3947-40d6-ac14-8a1cf5e6f7f0\" (UID: \"b7f1ee63-3947-40d6-ac14-8a1cf5e6f7f0\") " Oct 10 16:51:45 crc kubenswrapper[4799]: I1010 16:51:45.603261 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dl6pq\" (UniqueName: \"kubernetes.io/projected/b7f1ee63-3947-40d6-ac14-8a1cf5e6f7f0-kube-api-access-dl6pq\") pod \"b7f1ee63-3947-40d6-ac14-8a1cf5e6f7f0\" (UID: \"b7f1ee63-3947-40d6-ac14-8a1cf5e6f7f0\") " Oct 10 16:51:45 crc kubenswrapper[4799]: I1010 16:51:45.603324 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"b7f1ee63-3947-40d6-ac14-8a1cf5e6f7f0\" (UID: \"b7f1ee63-3947-40d6-ac14-8a1cf5e6f7f0\") " Oct 10 16:51:45 crc kubenswrapper[4799]: I1010 16:51:45.603357 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/b7f1ee63-3947-40d6-ac14-8a1cf5e6f7f0-httpd-run\") pod \"b7f1ee63-3947-40d6-ac14-8a1cf5e6f7f0\" (UID: \"b7f1ee63-3947-40d6-ac14-8a1cf5e6f7f0\") " Oct 10 16:51:45 crc kubenswrapper[4799]: I1010 16:51:45.603396 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b7f1ee63-3947-40d6-ac14-8a1cf5e6f7f0-scripts\") pod \"b7f1ee63-3947-40d6-ac14-8a1cf5e6f7f0\" (UID: \"b7f1ee63-3947-40d6-ac14-8a1cf5e6f7f0\") " Oct 10 16:51:45 crc kubenswrapper[4799]: I1010 16:51:45.603443 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b7f1ee63-3947-40d6-ac14-8a1cf5e6f7f0-combined-ca-bundle\") pod \"b7f1ee63-3947-40d6-ac14-8a1cf5e6f7f0\" (UID: \"b7f1ee63-3947-40d6-ac14-8a1cf5e6f7f0\") " Oct 10 16:51:45 crc kubenswrapper[4799]: I1010 16:51:45.603463 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/b7f1ee63-3947-40d6-ac14-8a1cf5e6f7f0-internal-tls-certs\") pod \"b7f1ee63-3947-40d6-ac14-8a1cf5e6f7f0\" (UID: \"b7f1ee63-3947-40d6-ac14-8a1cf5e6f7f0\") " Oct 10 16:51:45 crc kubenswrapper[4799]: I1010 16:51:45.603538 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b7f1ee63-3947-40d6-ac14-8a1cf5e6f7f0-config-data\") pod \"b7f1ee63-3947-40d6-ac14-8a1cf5e6f7f0\" (UID: \"b7f1ee63-3947-40d6-ac14-8a1cf5e6f7f0\") " Oct 10 16:51:45 crc kubenswrapper[4799]: I1010 16:51:45.604820 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b7f1ee63-3947-40d6-ac14-8a1cf5e6f7f0-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "b7f1ee63-3947-40d6-ac14-8a1cf5e6f7f0" (UID: "b7f1ee63-3947-40d6-ac14-8a1cf5e6f7f0"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 10 16:51:45 crc kubenswrapper[4799]: I1010 16:51:45.605013 4799 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/b7f1ee63-3947-40d6-ac14-8a1cf5e6f7f0-httpd-run\") on node \"crc\" DevicePath \"\"" Oct 10 16:51:45 crc kubenswrapper[4799]: I1010 16:51:45.605032 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b7f1ee63-3947-40d6-ac14-8a1cf5e6f7f0-logs" (OuterVolumeSpecName: "logs") pod "b7f1ee63-3947-40d6-ac14-8a1cf5e6f7f0" (UID: "b7f1ee63-3947-40d6-ac14-8a1cf5e6f7f0"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 10 16:51:45 crc kubenswrapper[4799]: I1010 16:51:45.617278 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b7f1ee63-3947-40d6-ac14-8a1cf5e6f7f0-scripts" (OuterVolumeSpecName: "scripts") pod "b7f1ee63-3947-40d6-ac14-8a1cf5e6f7f0" (UID: "b7f1ee63-3947-40d6-ac14-8a1cf5e6f7f0"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:51:45 crc kubenswrapper[4799]: I1010 16:51:45.623591 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b7f1ee63-3947-40d6-ac14-8a1cf5e6f7f0-kube-api-access-dl6pq" (OuterVolumeSpecName: "kube-api-access-dl6pq") pod "b7f1ee63-3947-40d6-ac14-8a1cf5e6f7f0" (UID: "b7f1ee63-3947-40d6-ac14-8a1cf5e6f7f0"). InnerVolumeSpecName "kube-api-access-dl6pq". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 16:51:45 crc kubenswrapper[4799]: I1010 16:51:45.623724 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage12-crc" (OuterVolumeSpecName: "glance") pod "b7f1ee63-3947-40d6-ac14-8a1cf5e6f7f0" (UID: "b7f1ee63-3947-40d6-ac14-8a1cf5e6f7f0"). InnerVolumeSpecName "local-storage12-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Oct 10 16:51:45 crc kubenswrapper[4799]: I1010 16:51:45.625213 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-f8a8-account-create-bcclb" event={"ID":"9099bbc4-2f79-441f-a02b-6653832c7714","Type":"ContainerStarted","Data":"a100e438b0a8bd01248de4b8b54a391018866cf5124083e9694bf4894b60de33"} Oct 10 16:51:45 crc kubenswrapper[4799]: I1010 16:51:45.631532 4799 generic.go:334] "Generic (PLEG): container finished" podID="a2d50cf9-d24c-42fb-a3be-716a020a8b5a" containerID="3d6a3a8b4b49974d5c09eeb8c6ec4a2ebb70c3d484c47ca792ba983856130907" exitCode=0 Oct 10 16:51:45 crc kubenswrapper[4799]: I1010 16:51:45.631613 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-eef9-account-create-8rt89" event={"ID":"a2d50cf9-d24c-42fb-a3be-716a020a8b5a","Type":"ContainerDied","Data":"3d6a3a8b4b49974d5c09eeb8c6ec4a2ebb70c3d484c47ca792ba983856130907"} Oct 10 16:51:45 crc kubenswrapper[4799]: I1010 16:51:45.631636 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-eef9-account-create-8rt89" event={"ID":"a2d50cf9-d24c-42fb-a3be-716a020a8b5a","Type":"ContainerStarted","Data":"dba267a9d5a3f8135dffa3ae55e5143096d77d3e5053f799ef4018c1afa29765"} Oct 10 16:51:45 crc kubenswrapper[4799]: I1010 16:51:45.640946 4799 generic.go:334] "Generic (PLEG): container finished" podID="2b7debcd-ccac-4b9d-9b6e-011a0f8072d9" containerID="a62494b9ff2ced5a032b285546d5814b83b5cac1ccbb043f0f7db208a692bf89" exitCode=0 Oct 10 16:51:45 crc kubenswrapper[4799]: I1010 16:51:45.641008 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-694f88c746-tbgjz" event={"ID":"2b7debcd-ccac-4b9d-9b6e-011a0f8072d9","Type":"ContainerDied","Data":"a62494b9ff2ced5a032b285546d5814b83b5cac1ccbb043f0f7db208a692bf89"} Oct 10 16:51:45 crc kubenswrapper[4799]: I1010 16:51:45.651426 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-6843-account-create-dm4pm" event={"ID":"afce335f-6c8d-422f-9ee9-f69cd8a83715","Type":"ContainerStarted","Data":"975e4a056531d4fdb10670c84c95ce5bb2d39a1f2e4e55a0e065fb76b076212f"} Oct 10 16:51:45 crc kubenswrapper[4799]: I1010 16:51:45.651943 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b7f1ee63-3947-40d6-ac14-8a1cf5e6f7f0-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b7f1ee63-3947-40d6-ac14-8a1cf5e6f7f0" (UID: "b7f1ee63-3947-40d6-ac14-8a1cf5e6f7f0"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:51:45 crc kubenswrapper[4799]: I1010 16:51:45.659501 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a1a7ed1c-2ac1-4c0b-9896-7e5b28d030c3","Type":"ContainerDied","Data":"8bca0624a2407c0272fd20f138a97b1f8739dfe4ae5539b0b91d56c104d2749f"} Oct 10 16:51:45 crc kubenswrapper[4799]: I1010 16:51:45.659549 4799 scope.go:117] "RemoveContainer" containerID="0e4d67573fc7051eb2a82f10df5ef08a23dbbf730c4c7508e74be1e08fe03fb9" Oct 10 16:51:45 crc kubenswrapper[4799]: I1010 16:51:45.659666 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 10 16:51:45 crc kubenswrapper[4799]: I1010 16:51:45.668271 4799 generic.go:334] "Generic (PLEG): container finished" podID="b7f1ee63-3947-40d6-ac14-8a1cf5e6f7f0" containerID="d09f1c41ff83e3f94d5800a1f1a3455a847a7fa8e5a288a1102d81b7fccd07bf" exitCode=0 Oct 10 16:51:45 crc kubenswrapper[4799]: I1010 16:51:45.669634 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Oct 10 16:51:45 crc kubenswrapper[4799]: I1010 16:51:45.669857 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"b7f1ee63-3947-40d6-ac14-8a1cf5e6f7f0","Type":"ContainerDied","Data":"d09f1c41ff83e3f94d5800a1f1a3455a847a7fa8e5a288a1102d81b7fccd07bf"} Oct 10 16:51:45 crc kubenswrapper[4799]: I1010 16:51:45.669963 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"b7f1ee63-3947-40d6-ac14-8a1cf5e6f7f0","Type":"ContainerDied","Data":"3cd05d3a72dcf4f48946b2a2eac9f5ebde3099bbaa924029c1c346340722213a"} Oct 10 16:51:45 crc kubenswrapper[4799]: I1010 16:51:45.706514 4799 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") on node \"crc\" " Oct 10 16:51:45 crc kubenswrapper[4799]: I1010 16:51:45.706730 4799 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b7f1ee63-3947-40d6-ac14-8a1cf5e6f7f0-scripts\") on node \"crc\" DevicePath \"\"" Oct 10 16:51:45 crc kubenswrapper[4799]: I1010 16:51:45.706810 4799 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b7f1ee63-3947-40d6-ac14-8a1cf5e6f7f0-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 10 16:51:45 crc kubenswrapper[4799]: I1010 16:51:45.706866 4799 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b7f1ee63-3947-40d6-ac14-8a1cf5e6f7f0-logs\") on node \"crc\" DevicePath \"\"" Oct 10 16:51:45 crc kubenswrapper[4799]: I1010 16:51:45.706919 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dl6pq\" (UniqueName: \"kubernetes.io/projected/b7f1ee63-3947-40d6-ac14-8a1cf5e6f7f0-kube-api-access-dl6pq\") on node \"crc\" DevicePath \"\"" Oct 10 16:51:45 crc kubenswrapper[4799]: I1010 16:51:45.713157 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b7f1ee63-3947-40d6-ac14-8a1cf5e6f7f0-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "b7f1ee63-3947-40d6-ac14-8a1cf5e6f7f0" (UID: "b7f1ee63-3947-40d6-ac14-8a1cf5e6f7f0"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:51:45 crc kubenswrapper[4799]: I1010 16:51:45.771877 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b7f1ee63-3947-40d6-ac14-8a1cf5e6f7f0-config-data" (OuterVolumeSpecName: "config-data") pod "b7f1ee63-3947-40d6-ac14-8a1cf5e6f7f0" (UID: "b7f1ee63-3947-40d6-ac14-8a1cf5e6f7f0"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:51:45 crc kubenswrapper[4799]: I1010 16:51:45.791010 4799 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage12-crc" (UniqueName: "kubernetes.io/local-volume/local-storage12-crc") on node "crc" Oct 10 16:51:45 crc kubenswrapper[4799]: I1010 16:51:45.808309 4799 reconciler_common.go:293] "Volume detached for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") on node \"crc\" DevicePath \"\"" Oct 10 16:51:45 crc kubenswrapper[4799]: I1010 16:51:45.808341 4799 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/b7f1ee63-3947-40d6-ac14-8a1cf5e6f7f0-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 10 16:51:45 crc kubenswrapper[4799]: I1010 16:51:45.808354 4799 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b7f1ee63-3947-40d6-ac14-8a1cf5e6f7f0-config-data\") on node \"crc\" DevicePath \"\"" Oct 10 16:51:45 crc kubenswrapper[4799]: I1010 16:51:45.822142 4799 scope.go:117] "RemoveContainer" containerID="b8bb6a627d95f865adcce7f4a3518c281775a147e8cb02d5b3b447e2f941f3dc" Oct 10 16:51:45 crc kubenswrapper[4799]: I1010 16:51:45.856588 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 10 16:51:45 crc kubenswrapper[4799]: I1010 16:51:45.857674 4799 scope.go:117] "RemoveContainer" containerID="4b1f81396a03bbdff9b4db95fc8e929c7d01c8d07ea7e15891c3c97bbc39e2e2" Oct 10 16:51:45 crc kubenswrapper[4799]: I1010 16:51:45.866867 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Oct 10 16:51:45 crc kubenswrapper[4799]: I1010 16:51:45.883337 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Oct 10 16:51:45 crc kubenswrapper[4799]: E1010 16:51:45.883920 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a1a7ed1c-2ac1-4c0b-9896-7e5b28d030c3" containerName="proxy-httpd" Oct 10 16:51:45 crc kubenswrapper[4799]: I1010 16:51:45.883952 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="a1a7ed1c-2ac1-4c0b-9896-7e5b28d030c3" containerName="proxy-httpd" Oct 10 16:51:45 crc kubenswrapper[4799]: E1010 16:51:45.883963 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a1a7ed1c-2ac1-4c0b-9896-7e5b28d030c3" containerName="ceilometer-notification-agent" Oct 10 16:51:45 crc kubenswrapper[4799]: I1010 16:51:45.883969 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="a1a7ed1c-2ac1-4c0b-9896-7e5b28d030c3" containerName="ceilometer-notification-agent" Oct 10 16:51:45 crc kubenswrapper[4799]: E1010 16:51:45.883984 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b7f1ee63-3947-40d6-ac14-8a1cf5e6f7f0" containerName="glance-httpd" Oct 10 16:51:45 crc kubenswrapper[4799]: I1010 16:51:45.883990 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="b7f1ee63-3947-40d6-ac14-8a1cf5e6f7f0" containerName="glance-httpd" Oct 10 16:51:45 crc kubenswrapper[4799]: E1010 16:51:45.884019 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a1a7ed1c-2ac1-4c0b-9896-7e5b28d030c3" containerName="sg-core" Oct 10 16:51:45 crc kubenswrapper[4799]: I1010 16:51:45.884026 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="a1a7ed1c-2ac1-4c0b-9896-7e5b28d030c3" containerName="sg-core" Oct 10 16:51:45 crc kubenswrapper[4799]: E1010 16:51:45.884040 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b7f1ee63-3947-40d6-ac14-8a1cf5e6f7f0" containerName="glance-log" Oct 10 16:51:45 crc kubenswrapper[4799]: I1010 16:51:45.884045 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="b7f1ee63-3947-40d6-ac14-8a1cf5e6f7f0" containerName="glance-log" Oct 10 16:51:45 crc kubenswrapper[4799]: E1010 16:51:45.884073 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a1a7ed1c-2ac1-4c0b-9896-7e5b28d030c3" containerName="ceilometer-central-agent" Oct 10 16:51:45 crc kubenswrapper[4799]: I1010 16:51:45.884079 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="a1a7ed1c-2ac1-4c0b-9896-7e5b28d030c3" containerName="ceilometer-central-agent" Oct 10 16:51:45 crc kubenswrapper[4799]: I1010 16:51:45.884302 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="a1a7ed1c-2ac1-4c0b-9896-7e5b28d030c3" containerName="sg-core" Oct 10 16:51:45 crc kubenswrapper[4799]: I1010 16:51:45.884332 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="a1a7ed1c-2ac1-4c0b-9896-7e5b28d030c3" containerName="proxy-httpd" Oct 10 16:51:45 crc kubenswrapper[4799]: I1010 16:51:45.884347 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="a1a7ed1c-2ac1-4c0b-9896-7e5b28d030c3" containerName="ceilometer-notification-agent" Oct 10 16:51:45 crc kubenswrapper[4799]: I1010 16:51:45.884359 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="b7f1ee63-3947-40d6-ac14-8a1cf5e6f7f0" containerName="glance-httpd" Oct 10 16:51:45 crc kubenswrapper[4799]: I1010 16:51:45.884371 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="b7f1ee63-3947-40d6-ac14-8a1cf5e6f7f0" containerName="glance-log" Oct 10 16:51:45 crc kubenswrapper[4799]: I1010 16:51:45.884384 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="a1a7ed1c-2ac1-4c0b-9896-7e5b28d030c3" containerName="ceilometer-central-agent" Oct 10 16:51:45 crc kubenswrapper[4799]: I1010 16:51:45.887894 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 10 16:51:45 crc kubenswrapper[4799]: I1010 16:51:45.889898 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Oct 10 16:51:45 crc kubenswrapper[4799]: I1010 16:51:45.889906 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Oct 10 16:51:45 crc kubenswrapper[4799]: I1010 16:51:45.893836 4799 scope.go:117] "RemoveContainer" containerID="4a850942ad660d59cad880d27871b132e4b78133816d348570dca1309bbe0cc2" Oct 10 16:51:45 crc kubenswrapper[4799]: I1010 16:51:45.898448 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 10 16:51:45 crc kubenswrapper[4799]: I1010 16:51:45.910208 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/59b18af6-5673-4b47-b936-6234d29e80db-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"59b18af6-5673-4b47-b936-6234d29e80db\") " pod="openstack/ceilometer-0" Oct 10 16:51:45 crc kubenswrapper[4799]: I1010 16:51:45.910251 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/59b18af6-5673-4b47-b936-6234d29e80db-run-httpd\") pod \"ceilometer-0\" (UID: \"59b18af6-5673-4b47-b936-6234d29e80db\") " pod="openstack/ceilometer-0" Oct 10 16:51:45 crc kubenswrapper[4799]: I1010 16:51:45.910453 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/59b18af6-5673-4b47-b936-6234d29e80db-log-httpd\") pod \"ceilometer-0\" (UID: \"59b18af6-5673-4b47-b936-6234d29e80db\") " pod="openstack/ceilometer-0" Oct 10 16:51:45 crc kubenswrapper[4799]: I1010 16:51:45.910701 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k5hj5\" (UniqueName: \"kubernetes.io/projected/59b18af6-5673-4b47-b936-6234d29e80db-kube-api-access-k5hj5\") pod \"ceilometer-0\" (UID: \"59b18af6-5673-4b47-b936-6234d29e80db\") " pod="openstack/ceilometer-0" Oct 10 16:51:45 crc kubenswrapper[4799]: I1010 16:51:45.910791 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/59b18af6-5673-4b47-b936-6234d29e80db-scripts\") pod \"ceilometer-0\" (UID: \"59b18af6-5673-4b47-b936-6234d29e80db\") " pod="openstack/ceilometer-0" Oct 10 16:51:45 crc kubenswrapper[4799]: I1010 16:51:45.910964 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/59b18af6-5673-4b47-b936-6234d29e80db-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"59b18af6-5673-4b47-b936-6234d29e80db\") " pod="openstack/ceilometer-0" Oct 10 16:51:45 crc kubenswrapper[4799]: I1010 16:51:45.911032 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/59b18af6-5673-4b47-b936-6234d29e80db-config-data\") pod \"ceilometer-0\" (UID: \"59b18af6-5673-4b47-b936-6234d29e80db\") " pod="openstack/ceilometer-0" Oct 10 16:51:45 crc kubenswrapper[4799]: I1010 16:51:45.927709 4799 scope.go:117] "RemoveContainer" containerID="d09f1c41ff83e3f94d5800a1f1a3455a847a7fa8e5a288a1102d81b7fccd07bf" Oct 10 16:51:45 crc kubenswrapper[4799]: I1010 16:51:45.964384 4799 scope.go:117] "RemoveContainer" containerID="866b587e8dd73a9f0f531084fbcbe1a89d5bdd82e34c2eec0199cef8596e3329" Oct 10 16:51:46 crc kubenswrapper[4799]: I1010 16:51:46.005591 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 10 16:51:46 crc kubenswrapper[4799]: I1010 16:51:46.012101 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 10 16:51:46 crc kubenswrapper[4799]: I1010 16:51:46.012932 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/59b18af6-5673-4b47-b936-6234d29e80db-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"59b18af6-5673-4b47-b936-6234d29e80db\") " pod="openstack/ceilometer-0" Oct 10 16:51:46 crc kubenswrapper[4799]: I1010 16:51:46.012979 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/59b18af6-5673-4b47-b936-6234d29e80db-config-data\") pod \"ceilometer-0\" (UID: \"59b18af6-5673-4b47-b936-6234d29e80db\") " pod="openstack/ceilometer-0" Oct 10 16:51:46 crc kubenswrapper[4799]: I1010 16:51:46.013011 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/59b18af6-5673-4b47-b936-6234d29e80db-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"59b18af6-5673-4b47-b936-6234d29e80db\") " pod="openstack/ceilometer-0" Oct 10 16:51:46 crc kubenswrapper[4799]: I1010 16:51:46.013032 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/59b18af6-5673-4b47-b936-6234d29e80db-run-httpd\") pod \"ceilometer-0\" (UID: \"59b18af6-5673-4b47-b936-6234d29e80db\") " pod="openstack/ceilometer-0" Oct 10 16:51:46 crc kubenswrapper[4799]: I1010 16:51:46.013083 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/59b18af6-5673-4b47-b936-6234d29e80db-log-httpd\") pod \"ceilometer-0\" (UID: \"59b18af6-5673-4b47-b936-6234d29e80db\") " pod="openstack/ceilometer-0" Oct 10 16:51:46 crc kubenswrapper[4799]: I1010 16:51:46.013125 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k5hj5\" (UniqueName: \"kubernetes.io/projected/59b18af6-5673-4b47-b936-6234d29e80db-kube-api-access-k5hj5\") pod \"ceilometer-0\" (UID: \"59b18af6-5673-4b47-b936-6234d29e80db\") " pod="openstack/ceilometer-0" Oct 10 16:51:46 crc kubenswrapper[4799]: I1010 16:51:46.013156 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/59b18af6-5673-4b47-b936-6234d29e80db-scripts\") pod \"ceilometer-0\" (UID: \"59b18af6-5673-4b47-b936-6234d29e80db\") " pod="openstack/ceilometer-0" Oct 10 16:51:46 crc kubenswrapper[4799]: I1010 16:51:46.014500 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/59b18af6-5673-4b47-b936-6234d29e80db-log-httpd\") pod \"ceilometer-0\" (UID: \"59b18af6-5673-4b47-b936-6234d29e80db\") " pod="openstack/ceilometer-0" Oct 10 16:51:46 crc kubenswrapper[4799]: I1010 16:51:46.015030 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/59b18af6-5673-4b47-b936-6234d29e80db-run-httpd\") pod \"ceilometer-0\" (UID: \"59b18af6-5673-4b47-b936-6234d29e80db\") " pod="openstack/ceilometer-0" Oct 10 16:51:46 crc kubenswrapper[4799]: I1010 16:51:46.017286 4799 scope.go:117] "RemoveContainer" containerID="d09f1c41ff83e3f94d5800a1f1a3455a847a7fa8e5a288a1102d81b7fccd07bf" Oct 10 16:51:46 crc kubenswrapper[4799]: I1010 16:51:46.020419 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/59b18af6-5673-4b47-b936-6234d29e80db-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"59b18af6-5673-4b47-b936-6234d29e80db\") " pod="openstack/ceilometer-0" Oct 10 16:51:46 crc kubenswrapper[4799]: I1010 16:51:46.025630 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/59b18af6-5673-4b47-b936-6234d29e80db-config-data\") pod \"ceilometer-0\" (UID: \"59b18af6-5673-4b47-b936-6234d29e80db\") " pod="openstack/ceilometer-0" Oct 10 16:51:46 crc kubenswrapper[4799]: E1010 16:51:46.025882 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d09f1c41ff83e3f94d5800a1f1a3455a847a7fa8e5a288a1102d81b7fccd07bf\": container with ID starting with d09f1c41ff83e3f94d5800a1f1a3455a847a7fa8e5a288a1102d81b7fccd07bf not found: ID does not exist" containerID="d09f1c41ff83e3f94d5800a1f1a3455a847a7fa8e5a288a1102d81b7fccd07bf" Oct 10 16:51:46 crc kubenswrapper[4799]: I1010 16:51:46.025944 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d09f1c41ff83e3f94d5800a1f1a3455a847a7fa8e5a288a1102d81b7fccd07bf"} err="failed to get container status \"d09f1c41ff83e3f94d5800a1f1a3455a847a7fa8e5a288a1102d81b7fccd07bf\": rpc error: code = NotFound desc = could not find container \"d09f1c41ff83e3f94d5800a1f1a3455a847a7fa8e5a288a1102d81b7fccd07bf\": container with ID starting with d09f1c41ff83e3f94d5800a1f1a3455a847a7fa8e5a288a1102d81b7fccd07bf not found: ID does not exist" Oct 10 16:51:46 crc kubenswrapper[4799]: I1010 16:51:46.025971 4799 scope.go:117] "RemoveContainer" containerID="866b587e8dd73a9f0f531084fbcbe1a89d5bdd82e34c2eec0199cef8596e3329" Oct 10 16:51:46 crc kubenswrapper[4799]: E1010 16:51:46.028836 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"866b587e8dd73a9f0f531084fbcbe1a89d5bdd82e34c2eec0199cef8596e3329\": container with ID starting with 866b587e8dd73a9f0f531084fbcbe1a89d5bdd82e34c2eec0199cef8596e3329 not found: ID does not exist" containerID="866b587e8dd73a9f0f531084fbcbe1a89d5bdd82e34c2eec0199cef8596e3329" Oct 10 16:51:46 crc kubenswrapper[4799]: I1010 16:51:46.029072 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"866b587e8dd73a9f0f531084fbcbe1a89d5bdd82e34c2eec0199cef8596e3329"} err="failed to get container status \"866b587e8dd73a9f0f531084fbcbe1a89d5bdd82e34c2eec0199cef8596e3329\": rpc error: code = NotFound desc = could not find container \"866b587e8dd73a9f0f531084fbcbe1a89d5bdd82e34c2eec0199cef8596e3329\": container with ID starting with 866b587e8dd73a9f0f531084fbcbe1a89d5bdd82e34c2eec0199cef8596e3329 not found: ID does not exist" Oct 10 16:51:46 crc kubenswrapper[4799]: I1010 16:51:46.030468 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 10 16:51:46 crc kubenswrapper[4799]: I1010 16:51:46.032074 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Oct 10 16:51:46 crc kubenswrapper[4799]: I1010 16:51:46.033779 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-internal-svc" Oct 10 16:51:46 crc kubenswrapper[4799]: I1010 16:51:46.036952 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Oct 10 16:51:46 crc kubenswrapper[4799]: I1010 16:51:46.046543 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 10 16:51:46 crc kubenswrapper[4799]: I1010 16:51:46.049337 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k5hj5\" (UniqueName: \"kubernetes.io/projected/59b18af6-5673-4b47-b936-6234d29e80db-kube-api-access-k5hj5\") pod \"ceilometer-0\" (UID: \"59b18af6-5673-4b47-b936-6234d29e80db\") " pod="openstack/ceilometer-0" Oct 10 16:51:46 crc kubenswrapper[4799]: I1010 16:51:46.049728 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/59b18af6-5673-4b47-b936-6234d29e80db-scripts\") pod \"ceilometer-0\" (UID: \"59b18af6-5673-4b47-b936-6234d29e80db\") " pod="openstack/ceilometer-0" Oct 10 16:51:46 crc kubenswrapper[4799]: I1010 16:51:46.054195 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/59b18af6-5673-4b47-b936-6234d29e80db-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"59b18af6-5673-4b47-b936-6234d29e80db\") " pod="openstack/ceilometer-0" Oct 10 16:51:46 crc kubenswrapper[4799]: I1010 16:51:46.118604 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e424a8e6-64c8-4572-8706-33026a2cc44d-scripts\") pod \"glance-default-internal-api-0\" (UID: \"e424a8e6-64c8-4572-8706-33026a2cc44d\") " pod="openstack/glance-default-internal-api-0" Oct 10 16:51:46 crc kubenswrapper[4799]: I1010 16:51:46.118781 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/e424a8e6-64c8-4572-8706-33026a2cc44d-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"e424a8e6-64c8-4572-8706-33026a2cc44d\") " pod="openstack/glance-default-internal-api-0" Oct 10 16:51:46 crc kubenswrapper[4799]: I1010 16:51:46.118944 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e424a8e6-64c8-4572-8706-33026a2cc44d-logs\") pod \"glance-default-internal-api-0\" (UID: \"e424a8e6-64c8-4572-8706-33026a2cc44d\") " pod="openstack/glance-default-internal-api-0" Oct 10 16:51:46 crc kubenswrapper[4799]: I1010 16:51:46.119018 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e424a8e6-64c8-4572-8706-33026a2cc44d-config-data\") pod \"glance-default-internal-api-0\" (UID: \"e424a8e6-64c8-4572-8706-33026a2cc44d\") " pod="openstack/glance-default-internal-api-0" Oct 10 16:51:46 crc kubenswrapper[4799]: I1010 16:51:46.119124 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"glance-default-internal-api-0\" (UID: \"e424a8e6-64c8-4572-8706-33026a2cc44d\") " pod="openstack/glance-default-internal-api-0" Oct 10 16:51:46 crc kubenswrapper[4799]: I1010 16:51:46.119205 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sjm4m\" (UniqueName: \"kubernetes.io/projected/e424a8e6-64c8-4572-8706-33026a2cc44d-kube-api-access-sjm4m\") pod \"glance-default-internal-api-0\" (UID: \"e424a8e6-64c8-4572-8706-33026a2cc44d\") " pod="openstack/glance-default-internal-api-0" Oct 10 16:51:46 crc kubenswrapper[4799]: I1010 16:51:46.119292 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e424a8e6-64c8-4572-8706-33026a2cc44d-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"e424a8e6-64c8-4572-8706-33026a2cc44d\") " pod="openstack/glance-default-internal-api-0" Oct 10 16:51:46 crc kubenswrapper[4799]: I1010 16:51:46.119384 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/e424a8e6-64c8-4572-8706-33026a2cc44d-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"e424a8e6-64c8-4572-8706-33026a2cc44d\") " pod="openstack/glance-default-internal-api-0" Oct 10 16:51:46 crc kubenswrapper[4799]: I1010 16:51:46.220693 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e424a8e6-64c8-4572-8706-33026a2cc44d-logs\") pod \"glance-default-internal-api-0\" (UID: \"e424a8e6-64c8-4572-8706-33026a2cc44d\") " pod="openstack/glance-default-internal-api-0" Oct 10 16:51:46 crc kubenswrapper[4799]: I1010 16:51:46.220749 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e424a8e6-64c8-4572-8706-33026a2cc44d-config-data\") pod \"glance-default-internal-api-0\" (UID: \"e424a8e6-64c8-4572-8706-33026a2cc44d\") " pod="openstack/glance-default-internal-api-0" Oct 10 16:51:46 crc kubenswrapper[4799]: I1010 16:51:46.220823 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"glance-default-internal-api-0\" (UID: \"e424a8e6-64c8-4572-8706-33026a2cc44d\") " pod="openstack/glance-default-internal-api-0" Oct 10 16:51:46 crc kubenswrapper[4799]: I1010 16:51:46.220856 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sjm4m\" (UniqueName: \"kubernetes.io/projected/e424a8e6-64c8-4572-8706-33026a2cc44d-kube-api-access-sjm4m\") pod \"glance-default-internal-api-0\" (UID: \"e424a8e6-64c8-4572-8706-33026a2cc44d\") " pod="openstack/glance-default-internal-api-0" Oct 10 16:51:46 crc kubenswrapper[4799]: I1010 16:51:46.220889 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e424a8e6-64c8-4572-8706-33026a2cc44d-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"e424a8e6-64c8-4572-8706-33026a2cc44d\") " pod="openstack/glance-default-internal-api-0" Oct 10 16:51:46 crc kubenswrapper[4799]: I1010 16:51:46.220920 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/e424a8e6-64c8-4572-8706-33026a2cc44d-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"e424a8e6-64c8-4572-8706-33026a2cc44d\") " pod="openstack/glance-default-internal-api-0" Oct 10 16:51:46 crc kubenswrapper[4799]: I1010 16:51:46.220952 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e424a8e6-64c8-4572-8706-33026a2cc44d-scripts\") pod \"glance-default-internal-api-0\" (UID: \"e424a8e6-64c8-4572-8706-33026a2cc44d\") " pod="openstack/glance-default-internal-api-0" Oct 10 16:51:46 crc kubenswrapper[4799]: I1010 16:51:46.220990 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/e424a8e6-64c8-4572-8706-33026a2cc44d-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"e424a8e6-64c8-4572-8706-33026a2cc44d\") " pod="openstack/glance-default-internal-api-0" Oct 10 16:51:46 crc kubenswrapper[4799]: I1010 16:51:46.221151 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e424a8e6-64c8-4572-8706-33026a2cc44d-logs\") pod \"glance-default-internal-api-0\" (UID: \"e424a8e6-64c8-4572-8706-33026a2cc44d\") " pod="openstack/glance-default-internal-api-0" Oct 10 16:51:46 crc kubenswrapper[4799]: I1010 16:51:46.222030 4799 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"glance-default-internal-api-0\" (UID: \"e424a8e6-64c8-4572-8706-33026a2cc44d\") device mount path \"/mnt/openstack/pv12\"" pod="openstack/glance-default-internal-api-0" Oct 10 16:51:46 crc kubenswrapper[4799]: I1010 16:51:46.222152 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/e424a8e6-64c8-4572-8706-33026a2cc44d-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"e424a8e6-64c8-4572-8706-33026a2cc44d\") " pod="openstack/glance-default-internal-api-0" Oct 10 16:51:46 crc kubenswrapper[4799]: I1010 16:51:46.222040 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 10 16:51:46 crc kubenswrapper[4799]: I1010 16:51:46.225171 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e424a8e6-64c8-4572-8706-33026a2cc44d-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"e424a8e6-64c8-4572-8706-33026a2cc44d\") " pod="openstack/glance-default-internal-api-0" Oct 10 16:51:46 crc kubenswrapper[4799]: I1010 16:51:46.225656 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/e424a8e6-64c8-4572-8706-33026a2cc44d-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"e424a8e6-64c8-4572-8706-33026a2cc44d\") " pod="openstack/glance-default-internal-api-0" Oct 10 16:51:46 crc kubenswrapper[4799]: I1010 16:51:46.242838 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e424a8e6-64c8-4572-8706-33026a2cc44d-scripts\") pod \"glance-default-internal-api-0\" (UID: \"e424a8e6-64c8-4572-8706-33026a2cc44d\") " pod="openstack/glance-default-internal-api-0" Oct 10 16:51:46 crc kubenswrapper[4799]: I1010 16:51:46.243709 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e424a8e6-64c8-4572-8706-33026a2cc44d-config-data\") pod \"glance-default-internal-api-0\" (UID: \"e424a8e6-64c8-4572-8706-33026a2cc44d\") " pod="openstack/glance-default-internal-api-0" Oct 10 16:51:46 crc kubenswrapper[4799]: I1010 16:51:46.248478 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sjm4m\" (UniqueName: \"kubernetes.io/projected/e424a8e6-64c8-4572-8706-33026a2cc44d-kube-api-access-sjm4m\") pod \"glance-default-internal-api-0\" (UID: \"e424a8e6-64c8-4572-8706-33026a2cc44d\") " pod="openstack/glance-default-internal-api-0" Oct 10 16:51:46 crc kubenswrapper[4799]: I1010 16:51:46.277926 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"glance-default-internal-api-0\" (UID: \"e424a8e6-64c8-4572-8706-33026a2cc44d\") " pod="openstack/glance-default-internal-api-0" Oct 10 16:51:46 crc kubenswrapper[4799]: I1010 16:51:46.435323 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Oct 10 16:51:46 crc kubenswrapper[4799]: I1010 16:51:46.689719 4799 generic.go:334] "Generic (PLEG): container finished" podID="9099bbc4-2f79-441f-a02b-6653832c7714" containerID="cc3bf43932097b81efc1b1d49b76a3c7a6d6672678274efa32ce397549e1ff65" exitCode=0 Oct 10 16:51:46 crc kubenswrapper[4799]: I1010 16:51:46.689795 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-f8a8-account-create-bcclb" event={"ID":"9099bbc4-2f79-441f-a02b-6653832c7714","Type":"ContainerDied","Data":"cc3bf43932097b81efc1b1d49b76a3c7a6d6672678274efa32ce397549e1ff65"} Oct 10 16:51:46 crc kubenswrapper[4799]: I1010 16:51:46.695234 4799 generic.go:334] "Generic (PLEG): container finished" podID="afce335f-6c8d-422f-9ee9-f69cd8a83715" containerID="cba2769cec2ab6be62ce3ab6c11812ca168c95b6a4c44b2275fc0c2801c3b3a7" exitCode=0 Oct 10 16:51:46 crc kubenswrapper[4799]: I1010 16:51:46.695290 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-6843-account-create-dm4pm" event={"ID":"afce335f-6c8d-422f-9ee9-f69cd8a83715","Type":"ContainerDied","Data":"cba2769cec2ab6be62ce3ab6c11812ca168c95b6a4c44b2275fc0c2801c3b3a7"} Oct 10 16:51:46 crc kubenswrapper[4799]: I1010 16:51:46.756333 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 10 16:51:46 crc kubenswrapper[4799]: I1010 16:51:46.967922 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-scheduler-0" Oct 10 16:51:47 crc kubenswrapper[4799]: I1010 16:51:47.046573 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 10 16:51:47 crc kubenswrapper[4799]: I1010 16:51:47.138428 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-eef9-account-create-8rt89" Oct 10 16:51:47 crc kubenswrapper[4799]: I1010 16:51:47.244623 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-f7lj8\" (UniqueName: \"kubernetes.io/projected/a2d50cf9-d24c-42fb-a3be-716a020a8b5a-kube-api-access-f7lj8\") pod \"a2d50cf9-d24c-42fb-a3be-716a020a8b5a\" (UID: \"a2d50cf9-d24c-42fb-a3be-716a020a8b5a\") " Oct 10 16:51:47 crc kubenswrapper[4799]: I1010 16:51:47.248024 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a2d50cf9-d24c-42fb-a3be-716a020a8b5a-kube-api-access-f7lj8" (OuterVolumeSpecName: "kube-api-access-f7lj8") pod "a2d50cf9-d24c-42fb-a3be-716a020a8b5a" (UID: "a2d50cf9-d24c-42fb-a3be-716a020a8b5a"). InnerVolumeSpecName "kube-api-access-f7lj8". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 16:51:47 crc kubenswrapper[4799]: I1010 16:51:47.351042 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-f7lj8\" (UniqueName: \"kubernetes.io/projected/a2d50cf9-d24c-42fb-a3be-716a020a8b5a-kube-api-access-f7lj8\") on node \"crc\" DevicePath \"\"" Oct 10 16:51:47 crc kubenswrapper[4799]: I1010 16:51:47.446703 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a1a7ed1c-2ac1-4c0b-9896-7e5b28d030c3" path="/var/lib/kubelet/pods/a1a7ed1c-2ac1-4c0b-9896-7e5b28d030c3/volumes" Oct 10 16:51:47 crc kubenswrapper[4799]: I1010 16:51:47.449692 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b7f1ee63-3947-40d6-ac14-8a1cf5e6f7f0" path="/var/lib/kubelet/pods/b7f1ee63-3947-40d6-ac14-8a1cf5e6f7f0/volumes" Oct 10 16:51:47 crc kubenswrapper[4799]: I1010 16:51:47.731217 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"59b18af6-5673-4b47-b936-6234d29e80db","Type":"ContainerStarted","Data":"7319d487cd3fb830377942374419320559f5bcf9f90e36a31967c4b800112976"} Oct 10 16:51:47 crc kubenswrapper[4799]: I1010 16:51:47.731501 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"59b18af6-5673-4b47-b936-6234d29e80db","Type":"ContainerStarted","Data":"df61c060f15c6cf447d7cc7ddcfb2b196dcd4b28ed2d09776f8970f669b7ec3d"} Oct 10 16:51:47 crc kubenswrapper[4799]: I1010 16:51:47.736983 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 10 16:51:47 crc kubenswrapper[4799]: I1010 16:51:47.753318 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-eef9-account-create-8rt89" Oct 10 16:51:47 crc kubenswrapper[4799]: I1010 16:51:47.753826 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-eef9-account-create-8rt89" event={"ID":"a2d50cf9-d24c-42fb-a3be-716a020a8b5a","Type":"ContainerDied","Data":"dba267a9d5a3f8135dffa3ae55e5143096d77d3e5053f799ef4018c1afa29765"} Oct 10 16:51:47 crc kubenswrapper[4799]: I1010 16:51:47.753875 4799 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="dba267a9d5a3f8135dffa3ae55e5143096d77d3e5053f799ef4018c1afa29765" Oct 10 16:51:47 crc kubenswrapper[4799]: I1010 16:51:47.771555 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"e424a8e6-64c8-4572-8706-33026a2cc44d","Type":"ContainerStarted","Data":"0c7274b1845353423dc9ca09628f12d4f64ce4d85b22e824833d76018af77dc1"} Oct 10 16:51:48 crc kubenswrapper[4799]: I1010 16:51:48.235931 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-6843-account-create-dm4pm" Oct 10 16:51:48 crc kubenswrapper[4799]: I1010 16:51:48.375549 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-f8a8-account-create-bcclb" Oct 10 16:51:48 crc kubenswrapper[4799]: I1010 16:51:48.404581 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-brj6t\" (UniqueName: \"kubernetes.io/projected/afce335f-6c8d-422f-9ee9-f69cd8a83715-kube-api-access-brj6t\") pod \"afce335f-6c8d-422f-9ee9-f69cd8a83715\" (UID: \"afce335f-6c8d-422f-9ee9-f69cd8a83715\") " Oct 10 16:51:48 crc kubenswrapper[4799]: I1010 16:51:48.412125 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/afce335f-6c8d-422f-9ee9-f69cd8a83715-kube-api-access-brj6t" (OuterVolumeSpecName: "kube-api-access-brj6t") pod "afce335f-6c8d-422f-9ee9-f69cd8a83715" (UID: "afce335f-6c8d-422f-9ee9-f69cd8a83715"). InnerVolumeSpecName "kube-api-access-brj6t". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 16:51:48 crc kubenswrapper[4799]: I1010 16:51:48.507695 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4dgwq\" (UniqueName: \"kubernetes.io/projected/9099bbc4-2f79-441f-a02b-6653832c7714-kube-api-access-4dgwq\") pod \"9099bbc4-2f79-441f-a02b-6653832c7714\" (UID: \"9099bbc4-2f79-441f-a02b-6653832c7714\") " Oct 10 16:51:48 crc kubenswrapper[4799]: I1010 16:51:48.508567 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-brj6t\" (UniqueName: \"kubernetes.io/projected/afce335f-6c8d-422f-9ee9-f69cd8a83715-kube-api-access-brj6t\") on node \"crc\" DevicePath \"\"" Oct 10 16:51:48 crc kubenswrapper[4799]: I1010 16:51:48.512631 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9099bbc4-2f79-441f-a02b-6653832c7714-kube-api-access-4dgwq" (OuterVolumeSpecName: "kube-api-access-4dgwq") pod "9099bbc4-2f79-441f-a02b-6653832c7714" (UID: "9099bbc4-2f79-441f-a02b-6653832c7714"). InnerVolumeSpecName "kube-api-access-4dgwq". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 16:51:48 crc kubenswrapper[4799]: I1010 16:51:48.610330 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4dgwq\" (UniqueName: \"kubernetes.io/projected/9099bbc4-2f79-441f-a02b-6653832c7714-kube-api-access-4dgwq\") on node \"crc\" DevicePath \"\"" Oct 10 16:51:48 crc kubenswrapper[4799]: I1010 16:51:48.795605 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-6843-account-create-dm4pm" event={"ID":"afce335f-6c8d-422f-9ee9-f69cd8a83715","Type":"ContainerDied","Data":"975e4a056531d4fdb10670c84c95ce5bb2d39a1f2e4e55a0e065fb76b076212f"} Oct 10 16:51:48 crc kubenswrapper[4799]: I1010 16:51:48.795863 4799 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="975e4a056531d4fdb10670c84c95ce5bb2d39a1f2e4e55a0e065fb76b076212f" Oct 10 16:51:48 crc kubenswrapper[4799]: I1010 16:51:48.795676 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-6843-account-create-dm4pm" Oct 10 16:51:48 crc kubenswrapper[4799]: I1010 16:51:48.797909 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"e424a8e6-64c8-4572-8706-33026a2cc44d","Type":"ContainerStarted","Data":"45b55f581534a90bac80ffd0b27bca1fc0d2639dbcc1d9165ca16243e681541e"} Oct 10 16:51:48 crc kubenswrapper[4799]: I1010 16:51:48.797955 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"e424a8e6-64c8-4572-8706-33026a2cc44d","Type":"ContainerStarted","Data":"f9d2d1faeec7a5eede440474335541991431514b0a33516124505bcbefe52453"} Oct 10 16:51:48 crc kubenswrapper[4799]: I1010 16:51:48.808891 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-f8a8-account-create-bcclb" event={"ID":"9099bbc4-2f79-441f-a02b-6653832c7714","Type":"ContainerDied","Data":"a100e438b0a8bd01248de4b8b54a391018866cf5124083e9694bf4894b60de33"} Oct 10 16:51:48 crc kubenswrapper[4799]: I1010 16:51:48.809021 4799 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a100e438b0a8bd01248de4b8b54a391018866cf5124083e9694bf4894b60de33" Oct 10 16:51:48 crc kubenswrapper[4799]: I1010 16:51:48.809052 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-f8a8-account-create-bcclb" Oct 10 16:51:48 crc kubenswrapper[4799]: I1010 16:51:48.823621 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"59b18af6-5673-4b47-b936-6234d29e80db","Type":"ContainerStarted","Data":"8313937bdd30288019c75135c3b84a0482dd0cea1921a49ac32417f81f3cdb1c"} Oct 10 16:51:48 crc kubenswrapper[4799]: I1010 16:51:48.832314 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=2.829746395 podStartE2EDuration="2.829746395s" podCreationTimestamp="2025-10-10 16:51:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-10 16:51:48.819410589 +0000 UTC m=+1202.327734704" watchObservedRunningTime="2025-10-10 16:51:48.829746395 +0000 UTC m=+1202.338070500" Oct 10 16:51:48 crc kubenswrapper[4799]: I1010 16:51:48.909656 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/cinder-api-0" Oct 10 16:51:49 crc kubenswrapper[4799]: I1010 16:51:49.684566 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-db-sync-kgmbm"] Oct 10 16:51:49 crc kubenswrapper[4799]: E1010 16:51:49.685141 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9099bbc4-2f79-441f-a02b-6653832c7714" containerName="mariadb-account-create" Oct 10 16:51:49 crc kubenswrapper[4799]: I1010 16:51:49.685156 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="9099bbc4-2f79-441f-a02b-6653832c7714" containerName="mariadb-account-create" Oct 10 16:51:49 crc kubenswrapper[4799]: E1010 16:51:49.685187 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a2d50cf9-d24c-42fb-a3be-716a020a8b5a" containerName="mariadb-account-create" Oct 10 16:51:49 crc kubenswrapper[4799]: I1010 16:51:49.685194 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="a2d50cf9-d24c-42fb-a3be-716a020a8b5a" containerName="mariadb-account-create" Oct 10 16:51:49 crc kubenswrapper[4799]: E1010 16:51:49.685213 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="afce335f-6c8d-422f-9ee9-f69cd8a83715" containerName="mariadb-account-create" Oct 10 16:51:49 crc kubenswrapper[4799]: I1010 16:51:49.685220 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="afce335f-6c8d-422f-9ee9-f69cd8a83715" containerName="mariadb-account-create" Oct 10 16:51:49 crc kubenswrapper[4799]: I1010 16:51:49.685377 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="9099bbc4-2f79-441f-a02b-6653832c7714" containerName="mariadb-account-create" Oct 10 16:51:49 crc kubenswrapper[4799]: I1010 16:51:49.685406 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="afce335f-6c8d-422f-9ee9-f69cd8a83715" containerName="mariadb-account-create" Oct 10 16:51:49 crc kubenswrapper[4799]: I1010 16:51:49.685422 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="a2d50cf9-d24c-42fb-a3be-716a020a8b5a" containerName="mariadb-account-create" Oct 10 16:51:49 crc kubenswrapper[4799]: I1010 16:51:49.685959 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-kgmbm" Oct 10 16:51:49 crc kubenswrapper[4799]: I1010 16:51:49.690682 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-nova-dockercfg-pg9jc" Oct 10 16:51:49 crc kubenswrapper[4799]: I1010 16:51:49.691086 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Oct 10 16:51:49 crc kubenswrapper[4799]: I1010 16:51:49.693867 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-kgmbm"] Oct 10 16:51:49 crc kubenswrapper[4799]: I1010 16:51:49.696609 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-scripts" Oct 10 16:51:49 crc kubenswrapper[4799]: I1010 16:51:49.833904 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"59b18af6-5673-4b47-b936-6234d29e80db","Type":"ContainerStarted","Data":"72fd57e6c468acd3fe2e98b1e98885feb0593fdb4195e119d3c5afa2d1e27f69"} Oct 10 16:51:49 crc kubenswrapper[4799]: I1010 16:51:49.851925 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f2b06e0c-bd3e-4928-94f4-bdb22ae99a89-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-kgmbm\" (UID: \"f2b06e0c-bd3e-4928-94f4-bdb22ae99a89\") " pod="openstack/nova-cell0-conductor-db-sync-kgmbm" Oct 10 16:51:49 crc kubenswrapper[4799]: I1010 16:51:49.852001 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7f62n\" (UniqueName: \"kubernetes.io/projected/f2b06e0c-bd3e-4928-94f4-bdb22ae99a89-kube-api-access-7f62n\") pod \"nova-cell0-conductor-db-sync-kgmbm\" (UID: \"f2b06e0c-bd3e-4928-94f4-bdb22ae99a89\") " pod="openstack/nova-cell0-conductor-db-sync-kgmbm" Oct 10 16:51:49 crc kubenswrapper[4799]: I1010 16:51:49.852026 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f2b06e0c-bd3e-4928-94f4-bdb22ae99a89-config-data\") pod \"nova-cell0-conductor-db-sync-kgmbm\" (UID: \"f2b06e0c-bd3e-4928-94f4-bdb22ae99a89\") " pod="openstack/nova-cell0-conductor-db-sync-kgmbm" Oct 10 16:51:49 crc kubenswrapper[4799]: I1010 16:51:49.852093 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f2b06e0c-bd3e-4928-94f4-bdb22ae99a89-scripts\") pod \"nova-cell0-conductor-db-sync-kgmbm\" (UID: \"f2b06e0c-bd3e-4928-94f4-bdb22ae99a89\") " pod="openstack/nova-cell0-conductor-db-sync-kgmbm" Oct 10 16:51:49 crc kubenswrapper[4799]: I1010 16:51:49.953732 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f2b06e0c-bd3e-4928-94f4-bdb22ae99a89-scripts\") pod \"nova-cell0-conductor-db-sync-kgmbm\" (UID: \"f2b06e0c-bd3e-4928-94f4-bdb22ae99a89\") " pod="openstack/nova-cell0-conductor-db-sync-kgmbm" Oct 10 16:51:49 crc kubenswrapper[4799]: I1010 16:51:49.953912 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f2b06e0c-bd3e-4928-94f4-bdb22ae99a89-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-kgmbm\" (UID: \"f2b06e0c-bd3e-4928-94f4-bdb22ae99a89\") " pod="openstack/nova-cell0-conductor-db-sync-kgmbm" Oct 10 16:51:49 crc kubenswrapper[4799]: I1010 16:51:49.953953 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7f62n\" (UniqueName: \"kubernetes.io/projected/f2b06e0c-bd3e-4928-94f4-bdb22ae99a89-kube-api-access-7f62n\") pod \"nova-cell0-conductor-db-sync-kgmbm\" (UID: \"f2b06e0c-bd3e-4928-94f4-bdb22ae99a89\") " pod="openstack/nova-cell0-conductor-db-sync-kgmbm" Oct 10 16:51:49 crc kubenswrapper[4799]: I1010 16:51:49.953970 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f2b06e0c-bd3e-4928-94f4-bdb22ae99a89-config-data\") pod \"nova-cell0-conductor-db-sync-kgmbm\" (UID: \"f2b06e0c-bd3e-4928-94f4-bdb22ae99a89\") " pod="openstack/nova-cell0-conductor-db-sync-kgmbm" Oct 10 16:51:49 crc kubenswrapper[4799]: I1010 16:51:49.961559 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f2b06e0c-bd3e-4928-94f4-bdb22ae99a89-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-kgmbm\" (UID: \"f2b06e0c-bd3e-4928-94f4-bdb22ae99a89\") " pod="openstack/nova-cell0-conductor-db-sync-kgmbm" Oct 10 16:51:49 crc kubenswrapper[4799]: I1010 16:51:49.961797 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f2b06e0c-bd3e-4928-94f4-bdb22ae99a89-config-data\") pod \"nova-cell0-conductor-db-sync-kgmbm\" (UID: \"f2b06e0c-bd3e-4928-94f4-bdb22ae99a89\") " pod="openstack/nova-cell0-conductor-db-sync-kgmbm" Oct 10 16:51:49 crc kubenswrapper[4799]: I1010 16:51:49.971954 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f2b06e0c-bd3e-4928-94f4-bdb22ae99a89-scripts\") pod \"nova-cell0-conductor-db-sync-kgmbm\" (UID: \"f2b06e0c-bd3e-4928-94f4-bdb22ae99a89\") " pod="openstack/nova-cell0-conductor-db-sync-kgmbm" Oct 10 16:51:49 crc kubenswrapper[4799]: I1010 16:51:49.977521 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7f62n\" (UniqueName: \"kubernetes.io/projected/f2b06e0c-bd3e-4928-94f4-bdb22ae99a89-kube-api-access-7f62n\") pod \"nova-cell0-conductor-db-sync-kgmbm\" (UID: \"f2b06e0c-bd3e-4928-94f4-bdb22ae99a89\") " pod="openstack/nova-cell0-conductor-db-sync-kgmbm" Oct 10 16:51:50 crc kubenswrapper[4799]: I1010 16:51:50.005181 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-kgmbm" Oct 10 16:51:50 crc kubenswrapper[4799]: I1010 16:51:50.478345 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-kgmbm"] Oct 10 16:51:50 crc kubenswrapper[4799]: W1010 16:51:50.479436 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf2b06e0c_bd3e_4928_94f4_bdb22ae99a89.slice/crio-20b4f4cd8fbec5cf4149746b583cac95a66bb87c002239d6bd6e6b9405014d0b WatchSource:0}: Error finding container 20b4f4cd8fbec5cf4149746b583cac95a66bb87c002239d6bd6e6b9405014d0b: Status 404 returned error can't find the container with id 20b4f4cd8fbec5cf4149746b583cac95a66bb87c002239d6bd6e6b9405014d0b Oct 10 16:51:50 crc kubenswrapper[4799]: I1010 16:51:50.846120 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-kgmbm" event={"ID":"f2b06e0c-bd3e-4928-94f4-bdb22ae99a89","Type":"ContainerStarted","Data":"20b4f4cd8fbec5cf4149746b583cac95a66bb87c002239d6bd6e6b9405014d0b"} Oct 10 16:51:50 crc kubenswrapper[4799]: I1010 16:51:50.849381 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"59b18af6-5673-4b47-b936-6234d29e80db","Type":"ContainerStarted","Data":"4220a18152adb489f1a512885e006c6638e19b871e611558cae105233b2c0196"} Oct 10 16:51:50 crc kubenswrapper[4799]: I1010 16:51:50.849633 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="59b18af6-5673-4b47-b936-6234d29e80db" containerName="proxy-httpd" containerID="cri-o://4220a18152adb489f1a512885e006c6638e19b871e611558cae105233b2c0196" gracePeriod=30 Oct 10 16:51:50 crc kubenswrapper[4799]: I1010 16:51:50.849640 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="59b18af6-5673-4b47-b936-6234d29e80db" containerName="ceilometer-notification-agent" containerID="cri-o://8313937bdd30288019c75135c3b84a0482dd0cea1921a49ac32417f81f3cdb1c" gracePeriod=30 Oct 10 16:51:50 crc kubenswrapper[4799]: I1010 16:51:50.849639 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="59b18af6-5673-4b47-b936-6234d29e80db" containerName="sg-core" containerID="cri-o://72fd57e6c468acd3fe2e98b1e98885feb0593fdb4195e119d3c5afa2d1e27f69" gracePeriod=30 Oct 10 16:51:50 crc kubenswrapper[4799]: I1010 16:51:50.849793 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Oct 10 16:51:50 crc kubenswrapper[4799]: I1010 16:51:50.849860 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="59b18af6-5673-4b47-b936-6234d29e80db" containerName="ceilometer-central-agent" containerID="cri-o://7319d487cd3fb830377942374419320559f5bcf9f90e36a31967c4b800112976" gracePeriod=30 Oct 10 16:51:50 crc kubenswrapper[4799]: I1010 16:51:50.874734 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.394716389 podStartE2EDuration="5.874716494s" podCreationTimestamp="2025-10-10 16:51:45 +0000 UTC" firstStartedPulling="2025-10-10 16:51:46.738669158 +0000 UTC m=+1200.246993273" lastFinishedPulling="2025-10-10 16:51:50.218669263 +0000 UTC m=+1203.726993378" observedRunningTime="2025-10-10 16:51:50.871278719 +0000 UTC m=+1204.379602874" watchObservedRunningTime="2025-10-10 16:51:50.874716494 +0000 UTC m=+1204.383040609" Oct 10 16:51:51 crc kubenswrapper[4799]: I1010 16:51:51.180461 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Oct 10 16:51:51 crc kubenswrapper[4799]: I1010 16:51:51.180501 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Oct 10 16:51:51 crc kubenswrapper[4799]: I1010 16:51:51.212871 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Oct 10 16:51:51 crc kubenswrapper[4799]: I1010 16:51:51.223048 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Oct 10 16:51:51 crc kubenswrapper[4799]: I1010 16:51:51.863064 4799 generic.go:334] "Generic (PLEG): container finished" podID="59b18af6-5673-4b47-b936-6234d29e80db" containerID="4220a18152adb489f1a512885e006c6638e19b871e611558cae105233b2c0196" exitCode=0 Oct 10 16:51:51 crc kubenswrapper[4799]: I1010 16:51:51.863092 4799 generic.go:334] "Generic (PLEG): container finished" podID="59b18af6-5673-4b47-b936-6234d29e80db" containerID="72fd57e6c468acd3fe2e98b1e98885feb0593fdb4195e119d3c5afa2d1e27f69" exitCode=2 Oct 10 16:51:51 crc kubenswrapper[4799]: I1010 16:51:51.863099 4799 generic.go:334] "Generic (PLEG): container finished" podID="59b18af6-5673-4b47-b936-6234d29e80db" containerID="8313937bdd30288019c75135c3b84a0482dd0cea1921a49ac32417f81f3cdb1c" exitCode=0 Oct 10 16:51:51 crc kubenswrapper[4799]: I1010 16:51:51.863140 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"59b18af6-5673-4b47-b936-6234d29e80db","Type":"ContainerDied","Data":"4220a18152adb489f1a512885e006c6638e19b871e611558cae105233b2c0196"} Oct 10 16:51:51 crc kubenswrapper[4799]: I1010 16:51:51.863198 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Oct 10 16:51:51 crc kubenswrapper[4799]: I1010 16:51:51.863220 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Oct 10 16:51:51 crc kubenswrapper[4799]: I1010 16:51:51.863229 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"59b18af6-5673-4b47-b936-6234d29e80db","Type":"ContainerDied","Data":"72fd57e6c468acd3fe2e98b1e98885feb0593fdb4195e119d3c5afa2d1e27f69"} Oct 10 16:51:51 crc kubenswrapper[4799]: I1010 16:51:51.863238 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"59b18af6-5673-4b47-b936-6234d29e80db","Type":"ContainerDied","Data":"8313937bdd30288019c75135c3b84a0482dd0cea1921a49ac32417f81f3cdb1c"} Oct 10 16:51:52 crc kubenswrapper[4799]: I1010 16:51:52.228739 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-scheduler-0" Oct 10 16:51:53 crc kubenswrapper[4799]: I1010 16:51:53.785913 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Oct 10 16:51:53 crc kubenswrapper[4799]: I1010 16:51:53.834443 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Oct 10 16:51:54 crc kubenswrapper[4799]: I1010 16:51:54.904620 4799 generic.go:334] "Generic (PLEG): container finished" podID="59b18af6-5673-4b47-b936-6234d29e80db" containerID="7319d487cd3fb830377942374419320559f5bcf9f90e36a31967c4b800112976" exitCode=0 Oct 10 16:51:54 crc kubenswrapper[4799]: I1010 16:51:54.904675 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"59b18af6-5673-4b47-b936-6234d29e80db","Type":"ContainerDied","Data":"7319d487cd3fb830377942374419320559f5bcf9f90e36a31967c4b800112976"} Oct 10 16:51:56 crc kubenswrapper[4799]: I1010 16:51:56.435659 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Oct 10 16:51:56 crc kubenswrapper[4799]: I1010 16:51:56.439727 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Oct 10 16:51:56 crc kubenswrapper[4799]: I1010 16:51:56.475991 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Oct 10 16:51:56 crc kubenswrapper[4799]: I1010 16:51:56.495567 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Oct 10 16:51:56 crc kubenswrapper[4799]: I1010 16:51:56.927594 4799 generic.go:334] "Generic (PLEG): container finished" podID="2b7debcd-ccac-4b9d-9b6e-011a0f8072d9" containerID="51b411970b3fc3556f70eba6af79b3eba9f4d0cd9b2656eaf38bc05e92a8d335" exitCode=0 Oct 10 16:51:56 crc kubenswrapper[4799]: I1010 16:51:56.927659 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-694f88c746-tbgjz" event={"ID":"2b7debcd-ccac-4b9d-9b6e-011a0f8072d9","Type":"ContainerDied","Data":"51b411970b3fc3556f70eba6af79b3eba9f4d0cd9b2656eaf38bc05e92a8d335"} Oct 10 16:51:56 crc kubenswrapper[4799]: I1010 16:51:56.927945 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Oct 10 16:51:56 crc kubenswrapper[4799]: I1010 16:51:56.928222 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Oct 10 16:51:58 crc kubenswrapper[4799]: I1010 16:51:58.336688 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 10 16:51:58 crc kubenswrapper[4799]: I1010 16:51:58.392325 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-694f88c746-tbgjz" Oct 10 16:51:58 crc kubenswrapper[4799]: I1010 16:51:58.418903 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/59b18af6-5673-4b47-b936-6234d29e80db-sg-core-conf-yaml\") pod \"59b18af6-5673-4b47-b936-6234d29e80db\" (UID: \"59b18af6-5673-4b47-b936-6234d29e80db\") " Oct 10 16:51:58 crc kubenswrapper[4799]: I1010 16:51:58.419338 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-k5hj5\" (UniqueName: \"kubernetes.io/projected/59b18af6-5673-4b47-b936-6234d29e80db-kube-api-access-k5hj5\") pod \"59b18af6-5673-4b47-b936-6234d29e80db\" (UID: \"59b18af6-5673-4b47-b936-6234d29e80db\") " Oct 10 16:51:58 crc kubenswrapper[4799]: I1010 16:51:58.419462 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/59b18af6-5673-4b47-b936-6234d29e80db-config-data\") pod \"59b18af6-5673-4b47-b936-6234d29e80db\" (UID: \"59b18af6-5673-4b47-b936-6234d29e80db\") " Oct 10 16:51:58 crc kubenswrapper[4799]: I1010 16:51:58.419500 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/59b18af6-5673-4b47-b936-6234d29e80db-combined-ca-bundle\") pod \"59b18af6-5673-4b47-b936-6234d29e80db\" (UID: \"59b18af6-5673-4b47-b936-6234d29e80db\") " Oct 10 16:51:58 crc kubenswrapper[4799]: I1010 16:51:58.419553 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/59b18af6-5673-4b47-b936-6234d29e80db-scripts\") pod \"59b18af6-5673-4b47-b936-6234d29e80db\" (UID: \"59b18af6-5673-4b47-b936-6234d29e80db\") " Oct 10 16:51:58 crc kubenswrapper[4799]: I1010 16:51:58.419591 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/59b18af6-5673-4b47-b936-6234d29e80db-run-httpd\") pod \"59b18af6-5673-4b47-b936-6234d29e80db\" (UID: \"59b18af6-5673-4b47-b936-6234d29e80db\") " Oct 10 16:51:58 crc kubenswrapper[4799]: I1010 16:51:58.419623 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/59b18af6-5673-4b47-b936-6234d29e80db-log-httpd\") pod \"59b18af6-5673-4b47-b936-6234d29e80db\" (UID: \"59b18af6-5673-4b47-b936-6234d29e80db\") " Oct 10 16:51:58 crc kubenswrapper[4799]: I1010 16:51:58.421544 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/59b18af6-5673-4b47-b936-6234d29e80db-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "59b18af6-5673-4b47-b936-6234d29e80db" (UID: "59b18af6-5673-4b47-b936-6234d29e80db"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 10 16:51:58 crc kubenswrapper[4799]: I1010 16:51:58.421931 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/59b18af6-5673-4b47-b936-6234d29e80db-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "59b18af6-5673-4b47-b936-6234d29e80db" (UID: "59b18af6-5673-4b47-b936-6234d29e80db"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 10 16:51:58 crc kubenswrapper[4799]: I1010 16:51:58.426894 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/59b18af6-5673-4b47-b936-6234d29e80db-kube-api-access-k5hj5" (OuterVolumeSpecName: "kube-api-access-k5hj5") pod "59b18af6-5673-4b47-b936-6234d29e80db" (UID: "59b18af6-5673-4b47-b936-6234d29e80db"). InnerVolumeSpecName "kube-api-access-k5hj5". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 16:51:58 crc kubenswrapper[4799]: I1010 16:51:58.430364 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/59b18af6-5673-4b47-b936-6234d29e80db-scripts" (OuterVolumeSpecName: "scripts") pod "59b18af6-5673-4b47-b936-6234d29e80db" (UID: "59b18af6-5673-4b47-b936-6234d29e80db"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:51:58 crc kubenswrapper[4799]: I1010 16:51:58.472792 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/59b18af6-5673-4b47-b936-6234d29e80db-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "59b18af6-5673-4b47-b936-6234d29e80db" (UID: "59b18af6-5673-4b47-b936-6234d29e80db"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:51:58 crc kubenswrapper[4799]: I1010 16:51:58.513884 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/59b18af6-5673-4b47-b936-6234d29e80db-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "59b18af6-5673-4b47-b936-6234d29e80db" (UID: "59b18af6-5673-4b47-b936-6234d29e80db"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:51:58 crc kubenswrapper[4799]: I1010 16:51:58.521569 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/2b7debcd-ccac-4b9d-9b6e-011a0f8072d9-httpd-config\") pod \"2b7debcd-ccac-4b9d-9b6e-011a0f8072d9\" (UID: \"2b7debcd-ccac-4b9d-9b6e-011a0f8072d9\") " Oct 10 16:51:58 crc kubenswrapper[4799]: I1010 16:51:58.521688 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/2b7debcd-ccac-4b9d-9b6e-011a0f8072d9-config\") pod \"2b7debcd-ccac-4b9d-9b6e-011a0f8072d9\" (UID: \"2b7debcd-ccac-4b9d-9b6e-011a0f8072d9\") " Oct 10 16:51:58 crc kubenswrapper[4799]: I1010 16:51:58.521744 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fz28v\" (UniqueName: \"kubernetes.io/projected/2b7debcd-ccac-4b9d-9b6e-011a0f8072d9-kube-api-access-fz28v\") pod \"2b7debcd-ccac-4b9d-9b6e-011a0f8072d9\" (UID: \"2b7debcd-ccac-4b9d-9b6e-011a0f8072d9\") " Oct 10 16:51:58 crc kubenswrapper[4799]: I1010 16:51:58.521935 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2b7debcd-ccac-4b9d-9b6e-011a0f8072d9-combined-ca-bundle\") pod \"2b7debcd-ccac-4b9d-9b6e-011a0f8072d9\" (UID: \"2b7debcd-ccac-4b9d-9b6e-011a0f8072d9\") " Oct 10 16:51:58 crc kubenswrapper[4799]: I1010 16:51:58.522055 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/2b7debcd-ccac-4b9d-9b6e-011a0f8072d9-ovndb-tls-certs\") pod \"2b7debcd-ccac-4b9d-9b6e-011a0f8072d9\" (UID: \"2b7debcd-ccac-4b9d-9b6e-011a0f8072d9\") " Oct 10 16:51:58 crc kubenswrapper[4799]: I1010 16:51:58.523016 4799 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/59b18af6-5673-4b47-b936-6234d29e80db-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Oct 10 16:51:58 crc kubenswrapper[4799]: I1010 16:51:58.523046 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-k5hj5\" (UniqueName: \"kubernetes.io/projected/59b18af6-5673-4b47-b936-6234d29e80db-kube-api-access-k5hj5\") on node \"crc\" DevicePath \"\"" Oct 10 16:51:58 crc kubenswrapper[4799]: I1010 16:51:58.523066 4799 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/59b18af6-5673-4b47-b936-6234d29e80db-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 10 16:51:58 crc kubenswrapper[4799]: I1010 16:51:58.523084 4799 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/59b18af6-5673-4b47-b936-6234d29e80db-scripts\") on node \"crc\" DevicePath \"\"" Oct 10 16:51:58 crc kubenswrapper[4799]: I1010 16:51:58.523101 4799 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/59b18af6-5673-4b47-b936-6234d29e80db-run-httpd\") on node \"crc\" DevicePath \"\"" Oct 10 16:51:58 crc kubenswrapper[4799]: I1010 16:51:58.523118 4799 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/59b18af6-5673-4b47-b936-6234d29e80db-log-httpd\") on node \"crc\" DevicePath \"\"" Oct 10 16:51:58 crc kubenswrapper[4799]: I1010 16:51:58.529089 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/59b18af6-5673-4b47-b936-6234d29e80db-config-data" (OuterVolumeSpecName: "config-data") pod "59b18af6-5673-4b47-b936-6234d29e80db" (UID: "59b18af6-5673-4b47-b936-6234d29e80db"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:51:58 crc kubenswrapper[4799]: I1010 16:51:58.530624 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2b7debcd-ccac-4b9d-9b6e-011a0f8072d9-kube-api-access-fz28v" (OuterVolumeSpecName: "kube-api-access-fz28v") pod "2b7debcd-ccac-4b9d-9b6e-011a0f8072d9" (UID: "2b7debcd-ccac-4b9d-9b6e-011a0f8072d9"). InnerVolumeSpecName "kube-api-access-fz28v". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 16:51:58 crc kubenswrapper[4799]: I1010 16:51:58.541121 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2b7debcd-ccac-4b9d-9b6e-011a0f8072d9-httpd-config" (OuterVolumeSpecName: "httpd-config") pod "2b7debcd-ccac-4b9d-9b6e-011a0f8072d9" (UID: "2b7debcd-ccac-4b9d-9b6e-011a0f8072d9"). InnerVolumeSpecName "httpd-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:51:58 crc kubenswrapper[4799]: I1010 16:51:58.580277 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2b7debcd-ccac-4b9d-9b6e-011a0f8072d9-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "2b7debcd-ccac-4b9d-9b6e-011a0f8072d9" (UID: "2b7debcd-ccac-4b9d-9b6e-011a0f8072d9"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:51:58 crc kubenswrapper[4799]: I1010 16:51:58.590956 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2b7debcd-ccac-4b9d-9b6e-011a0f8072d9-config" (OuterVolumeSpecName: "config") pod "2b7debcd-ccac-4b9d-9b6e-011a0f8072d9" (UID: "2b7debcd-ccac-4b9d-9b6e-011a0f8072d9"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:51:58 crc kubenswrapper[4799]: I1010 16:51:58.620707 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2b7debcd-ccac-4b9d-9b6e-011a0f8072d9-ovndb-tls-certs" (OuterVolumeSpecName: "ovndb-tls-certs") pod "2b7debcd-ccac-4b9d-9b6e-011a0f8072d9" (UID: "2b7debcd-ccac-4b9d-9b6e-011a0f8072d9"). InnerVolumeSpecName "ovndb-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:51:58 crc kubenswrapper[4799]: I1010 16:51:58.624839 4799 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2b7debcd-ccac-4b9d-9b6e-011a0f8072d9-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 10 16:51:58 crc kubenswrapper[4799]: I1010 16:51:58.624868 4799 reconciler_common.go:293] "Volume detached for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/2b7debcd-ccac-4b9d-9b6e-011a0f8072d9-ovndb-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 10 16:51:58 crc kubenswrapper[4799]: I1010 16:51:58.624878 4799 reconciler_common.go:293] "Volume detached for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/2b7debcd-ccac-4b9d-9b6e-011a0f8072d9-httpd-config\") on node \"crc\" DevicePath \"\"" Oct 10 16:51:58 crc kubenswrapper[4799]: I1010 16:51:58.624889 4799 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/2b7debcd-ccac-4b9d-9b6e-011a0f8072d9-config\") on node \"crc\" DevicePath \"\"" Oct 10 16:51:58 crc kubenswrapper[4799]: I1010 16:51:58.624901 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fz28v\" (UniqueName: \"kubernetes.io/projected/2b7debcd-ccac-4b9d-9b6e-011a0f8072d9-kube-api-access-fz28v\") on node \"crc\" DevicePath \"\"" Oct 10 16:51:58 crc kubenswrapper[4799]: I1010 16:51:58.624912 4799 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/59b18af6-5673-4b47-b936-6234d29e80db-config-data\") on node \"crc\" DevicePath \"\"" Oct 10 16:51:58 crc kubenswrapper[4799]: I1010 16:51:58.826351 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Oct 10 16:51:58 crc kubenswrapper[4799]: I1010 16:51:58.969451 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"59b18af6-5673-4b47-b936-6234d29e80db","Type":"ContainerDied","Data":"df61c060f15c6cf447d7cc7ddcfb2b196dcd4b28ed2d09776f8970f669b7ec3d"} Oct 10 16:51:58 crc kubenswrapper[4799]: I1010 16:51:58.970790 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 10 16:51:58 crc kubenswrapper[4799]: I1010 16:51:58.972807 4799 scope.go:117] "RemoveContainer" containerID="4220a18152adb489f1a512885e006c6638e19b871e611558cae105233b2c0196" Oct 10 16:51:58 crc kubenswrapper[4799]: I1010 16:51:58.976101 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-694f88c746-tbgjz" event={"ID":"2b7debcd-ccac-4b9d-9b6e-011a0f8072d9","Type":"ContainerDied","Data":"df26d26bb66a91d34c2a3c15847b842400cebfe33e4969c7daa42221c86fb6c7"} Oct 10 16:51:58 crc kubenswrapper[4799]: I1010 16:51:58.976111 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-694f88c746-tbgjz" Oct 10 16:51:58 crc kubenswrapper[4799]: I1010 16:51:58.978377 4799 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Oct 10 16:51:58 crc kubenswrapper[4799]: I1010 16:51:58.979841 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-kgmbm" event={"ID":"f2b06e0c-bd3e-4928-94f4-bdb22ae99a89","Type":"ContainerStarted","Data":"b23ac04dd50a4eaaa66ae053dec6bce7db1f6a62a4f25b005e5b7204ecb4bdc1"} Oct 10 16:51:58 crc kubenswrapper[4799]: I1010 16:51:58.998504 4799 scope.go:117] "RemoveContainer" containerID="72fd57e6c468acd3fe2e98b1e98885feb0593fdb4195e119d3c5afa2d1e27f69" Oct 10 16:51:59 crc kubenswrapper[4799]: I1010 16:51:59.016840 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-db-sync-kgmbm" podStartSLOduration=2.403659708 podStartE2EDuration="10.01681948s" podCreationTimestamp="2025-10-10 16:51:49 +0000 UTC" firstStartedPulling="2025-10-10 16:51:50.481575011 +0000 UTC m=+1203.989899126" lastFinishedPulling="2025-10-10 16:51:58.094734783 +0000 UTC m=+1211.603058898" observedRunningTime="2025-10-10 16:51:59.010316259 +0000 UTC m=+1212.518640374" watchObservedRunningTime="2025-10-10 16:51:59.01681948 +0000 UTC m=+1212.525143595" Oct 10 16:51:59 crc kubenswrapper[4799]: I1010 16:51:59.032640 4799 scope.go:117] "RemoveContainer" containerID="8313937bdd30288019c75135c3b84a0482dd0cea1921a49ac32417f81f3cdb1c" Oct 10 16:51:59 crc kubenswrapper[4799]: I1010 16:51:59.034309 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 10 16:51:59 crc kubenswrapper[4799]: I1010 16:51:59.055410 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Oct 10 16:51:59 crc kubenswrapper[4799]: I1010 16:51:59.066902 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-694f88c746-tbgjz"] Oct 10 16:51:59 crc kubenswrapper[4799]: I1010 16:51:59.073122 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Oct 10 16:51:59 crc kubenswrapper[4799]: E1010 16:51:59.073538 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="59b18af6-5673-4b47-b936-6234d29e80db" containerName="proxy-httpd" Oct 10 16:51:59 crc kubenswrapper[4799]: I1010 16:51:59.073556 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="59b18af6-5673-4b47-b936-6234d29e80db" containerName="proxy-httpd" Oct 10 16:51:59 crc kubenswrapper[4799]: E1010 16:51:59.073573 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="59b18af6-5673-4b47-b936-6234d29e80db" containerName="ceilometer-notification-agent" Oct 10 16:51:59 crc kubenswrapper[4799]: I1010 16:51:59.073581 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="59b18af6-5673-4b47-b936-6234d29e80db" containerName="ceilometer-notification-agent" Oct 10 16:51:59 crc kubenswrapper[4799]: E1010 16:51:59.073601 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2b7debcd-ccac-4b9d-9b6e-011a0f8072d9" containerName="neutron-httpd" Oct 10 16:51:59 crc kubenswrapper[4799]: I1010 16:51:59.073607 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="2b7debcd-ccac-4b9d-9b6e-011a0f8072d9" containerName="neutron-httpd" Oct 10 16:51:59 crc kubenswrapper[4799]: E1010 16:51:59.073620 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="59b18af6-5673-4b47-b936-6234d29e80db" containerName="ceilometer-central-agent" Oct 10 16:51:59 crc kubenswrapper[4799]: I1010 16:51:59.073627 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="59b18af6-5673-4b47-b936-6234d29e80db" containerName="ceilometer-central-agent" Oct 10 16:51:59 crc kubenswrapper[4799]: E1010 16:51:59.073637 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="59b18af6-5673-4b47-b936-6234d29e80db" containerName="sg-core" Oct 10 16:51:59 crc kubenswrapper[4799]: I1010 16:51:59.073642 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="59b18af6-5673-4b47-b936-6234d29e80db" containerName="sg-core" Oct 10 16:51:59 crc kubenswrapper[4799]: E1010 16:51:59.073661 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2b7debcd-ccac-4b9d-9b6e-011a0f8072d9" containerName="neutron-api" Oct 10 16:51:59 crc kubenswrapper[4799]: I1010 16:51:59.073667 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="2b7debcd-ccac-4b9d-9b6e-011a0f8072d9" containerName="neutron-api" Oct 10 16:51:59 crc kubenswrapper[4799]: I1010 16:51:59.073852 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="2b7debcd-ccac-4b9d-9b6e-011a0f8072d9" containerName="neutron-api" Oct 10 16:51:59 crc kubenswrapper[4799]: I1010 16:51:59.073867 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="59b18af6-5673-4b47-b936-6234d29e80db" containerName="ceilometer-central-agent" Oct 10 16:51:59 crc kubenswrapper[4799]: I1010 16:51:59.073885 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="59b18af6-5673-4b47-b936-6234d29e80db" containerName="sg-core" Oct 10 16:51:59 crc kubenswrapper[4799]: I1010 16:51:59.073898 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="59b18af6-5673-4b47-b936-6234d29e80db" containerName="ceilometer-notification-agent" Oct 10 16:51:59 crc kubenswrapper[4799]: I1010 16:51:59.073910 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="59b18af6-5673-4b47-b936-6234d29e80db" containerName="proxy-httpd" Oct 10 16:51:59 crc kubenswrapper[4799]: I1010 16:51:59.073921 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="2b7debcd-ccac-4b9d-9b6e-011a0f8072d9" containerName="neutron-httpd" Oct 10 16:51:59 crc kubenswrapper[4799]: I1010 16:51:59.075472 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 10 16:51:59 crc kubenswrapper[4799]: I1010 16:51:59.076476 4799 scope.go:117] "RemoveContainer" containerID="7319d487cd3fb830377942374419320559f5bcf9f90e36a31967c4b800112976" Oct 10 16:51:59 crc kubenswrapper[4799]: I1010 16:51:59.083882 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-694f88c746-tbgjz"] Oct 10 16:51:59 crc kubenswrapper[4799]: I1010 16:51:59.084500 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Oct 10 16:51:59 crc kubenswrapper[4799]: I1010 16:51:59.084575 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Oct 10 16:51:59 crc kubenswrapper[4799]: I1010 16:51:59.089407 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 10 16:51:59 crc kubenswrapper[4799]: I1010 16:51:59.104937 4799 scope.go:117] "RemoveContainer" containerID="a62494b9ff2ced5a032b285546d5814b83b5cac1ccbb043f0f7db208a692bf89" Oct 10 16:51:59 crc kubenswrapper[4799]: I1010 16:51:59.133163 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/801513a8-8170-4b02-b637-c10364f240c7-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"801513a8-8170-4b02-b637-c10364f240c7\") " pod="openstack/ceilometer-0" Oct 10 16:51:59 crc kubenswrapper[4799]: I1010 16:51:59.133244 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/801513a8-8170-4b02-b637-c10364f240c7-scripts\") pod \"ceilometer-0\" (UID: \"801513a8-8170-4b02-b637-c10364f240c7\") " pod="openstack/ceilometer-0" Oct 10 16:51:59 crc kubenswrapper[4799]: I1010 16:51:59.133274 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/801513a8-8170-4b02-b637-c10364f240c7-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"801513a8-8170-4b02-b637-c10364f240c7\") " pod="openstack/ceilometer-0" Oct 10 16:51:59 crc kubenswrapper[4799]: I1010 16:51:59.133301 4799 scope.go:117] "RemoveContainer" containerID="51b411970b3fc3556f70eba6af79b3eba9f4d0cd9b2656eaf38bc05e92a8d335" Oct 10 16:51:59 crc kubenswrapper[4799]: I1010 16:51:59.133329 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/801513a8-8170-4b02-b637-c10364f240c7-run-httpd\") pod \"ceilometer-0\" (UID: \"801513a8-8170-4b02-b637-c10364f240c7\") " pod="openstack/ceilometer-0" Oct 10 16:51:59 crc kubenswrapper[4799]: I1010 16:51:59.133351 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/801513a8-8170-4b02-b637-c10364f240c7-config-data\") pod \"ceilometer-0\" (UID: \"801513a8-8170-4b02-b637-c10364f240c7\") " pod="openstack/ceilometer-0" Oct 10 16:51:59 crc kubenswrapper[4799]: I1010 16:51:59.133382 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/801513a8-8170-4b02-b637-c10364f240c7-log-httpd\") pod \"ceilometer-0\" (UID: \"801513a8-8170-4b02-b637-c10364f240c7\") " pod="openstack/ceilometer-0" Oct 10 16:51:59 crc kubenswrapper[4799]: I1010 16:51:59.133405 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-96kfc\" (UniqueName: \"kubernetes.io/projected/801513a8-8170-4b02-b637-c10364f240c7-kube-api-access-96kfc\") pod \"ceilometer-0\" (UID: \"801513a8-8170-4b02-b637-c10364f240c7\") " pod="openstack/ceilometer-0" Oct 10 16:51:59 crc kubenswrapper[4799]: I1010 16:51:59.222827 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Oct 10 16:51:59 crc kubenswrapper[4799]: I1010 16:51:59.237995 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/801513a8-8170-4b02-b637-c10364f240c7-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"801513a8-8170-4b02-b637-c10364f240c7\") " pod="openstack/ceilometer-0" Oct 10 16:51:59 crc kubenswrapper[4799]: I1010 16:51:59.238173 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/801513a8-8170-4b02-b637-c10364f240c7-run-httpd\") pod \"ceilometer-0\" (UID: \"801513a8-8170-4b02-b637-c10364f240c7\") " pod="openstack/ceilometer-0" Oct 10 16:51:59 crc kubenswrapper[4799]: I1010 16:51:59.238222 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/801513a8-8170-4b02-b637-c10364f240c7-config-data\") pod \"ceilometer-0\" (UID: \"801513a8-8170-4b02-b637-c10364f240c7\") " pod="openstack/ceilometer-0" Oct 10 16:51:59 crc kubenswrapper[4799]: I1010 16:51:59.238289 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/801513a8-8170-4b02-b637-c10364f240c7-log-httpd\") pod \"ceilometer-0\" (UID: \"801513a8-8170-4b02-b637-c10364f240c7\") " pod="openstack/ceilometer-0" Oct 10 16:51:59 crc kubenswrapper[4799]: I1010 16:51:59.238335 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-96kfc\" (UniqueName: \"kubernetes.io/projected/801513a8-8170-4b02-b637-c10364f240c7-kube-api-access-96kfc\") pod \"ceilometer-0\" (UID: \"801513a8-8170-4b02-b637-c10364f240c7\") " pod="openstack/ceilometer-0" Oct 10 16:51:59 crc kubenswrapper[4799]: I1010 16:51:59.238404 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/801513a8-8170-4b02-b637-c10364f240c7-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"801513a8-8170-4b02-b637-c10364f240c7\") " pod="openstack/ceilometer-0" Oct 10 16:51:59 crc kubenswrapper[4799]: I1010 16:51:59.238495 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/801513a8-8170-4b02-b637-c10364f240c7-scripts\") pod \"ceilometer-0\" (UID: \"801513a8-8170-4b02-b637-c10364f240c7\") " pod="openstack/ceilometer-0" Oct 10 16:51:59 crc kubenswrapper[4799]: I1010 16:51:59.238709 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/801513a8-8170-4b02-b637-c10364f240c7-run-httpd\") pod \"ceilometer-0\" (UID: \"801513a8-8170-4b02-b637-c10364f240c7\") " pod="openstack/ceilometer-0" Oct 10 16:51:59 crc kubenswrapper[4799]: I1010 16:51:59.238801 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/801513a8-8170-4b02-b637-c10364f240c7-log-httpd\") pod \"ceilometer-0\" (UID: \"801513a8-8170-4b02-b637-c10364f240c7\") " pod="openstack/ceilometer-0" Oct 10 16:51:59 crc kubenswrapper[4799]: I1010 16:51:59.245228 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/801513a8-8170-4b02-b637-c10364f240c7-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"801513a8-8170-4b02-b637-c10364f240c7\") " pod="openstack/ceilometer-0" Oct 10 16:51:59 crc kubenswrapper[4799]: I1010 16:51:59.247316 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/801513a8-8170-4b02-b637-c10364f240c7-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"801513a8-8170-4b02-b637-c10364f240c7\") " pod="openstack/ceilometer-0" Oct 10 16:51:59 crc kubenswrapper[4799]: I1010 16:51:59.250356 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/801513a8-8170-4b02-b637-c10364f240c7-config-data\") pod \"ceilometer-0\" (UID: \"801513a8-8170-4b02-b637-c10364f240c7\") " pod="openstack/ceilometer-0" Oct 10 16:51:59 crc kubenswrapper[4799]: I1010 16:51:59.263802 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-96kfc\" (UniqueName: \"kubernetes.io/projected/801513a8-8170-4b02-b637-c10364f240c7-kube-api-access-96kfc\") pod \"ceilometer-0\" (UID: \"801513a8-8170-4b02-b637-c10364f240c7\") " pod="openstack/ceilometer-0" Oct 10 16:51:59 crc kubenswrapper[4799]: I1010 16:51:59.265425 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/801513a8-8170-4b02-b637-c10364f240c7-scripts\") pod \"ceilometer-0\" (UID: \"801513a8-8170-4b02-b637-c10364f240c7\") " pod="openstack/ceilometer-0" Oct 10 16:51:59 crc kubenswrapper[4799]: I1010 16:51:59.396608 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 10 16:51:59 crc kubenswrapper[4799]: I1010 16:51:59.413121 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2b7debcd-ccac-4b9d-9b6e-011a0f8072d9" path="/var/lib/kubelet/pods/2b7debcd-ccac-4b9d-9b6e-011a0f8072d9/volumes" Oct 10 16:51:59 crc kubenswrapper[4799]: I1010 16:51:59.414036 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="59b18af6-5673-4b47-b936-6234d29e80db" path="/var/lib/kubelet/pods/59b18af6-5673-4b47-b936-6234d29e80db/volumes" Oct 10 16:51:59 crc kubenswrapper[4799]: I1010 16:51:59.691269 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 10 16:51:59 crc kubenswrapper[4799]: I1010 16:51:59.988142 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"801513a8-8170-4b02-b637-c10364f240c7","Type":"ContainerStarted","Data":"9b0c3873252b437bce46bbe9ff5edbe8649db87ae3ce59315fa7313b7891601c"} Oct 10 16:52:02 crc kubenswrapper[4799]: I1010 16:52:02.009845 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"801513a8-8170-4b02-b637-c10364f240c7","Type":"ContainerStarted","Data":"19425ae729b6aa9f2dcc31ce95dadb0a293d3f1cccf0e8753c9e682571421a35"} Oct 10 16:52:03 crc kubenswrapper[4799]: I1010 16:52:03.023229 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"801513a8-8170-4b02-b637-c10364f240c7","Type":"ContainerStarted","Data":"d68ff2f343a0e2a95bd1d808a68d0fc0aeea4dd766882dec64ac7d56062cc7f9"} Oct 10 16:52:03 crc kubenswrapper[4799]: I1010 16:52:03.023569 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"801513a8-8170-4b02-b637-c10364f240c7","Type":"ContainerStarted","Data":"36a847df7b5ac753f61fd31406958f6f527016237d0cac4a56ff63f22adba123"} Oct 10 16:52:03 crc kubenswrapper[4799]: I1010 16:52:03.524928 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 10 16:52:05 crc kubenswrapper[4799]: I1010 16:52:05.045279 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"801513a8-8170-4b02-b637-c10364f240c7","Type":"ContainerStarted","Data":"483a2ba4431563082c07097667669b71d1ca357edfe8928b08b882c094ba9a46"} Oct 10 16:52:05 crc kubenswrapper[4799]: I1010 16:52:05.045789 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Oct 10 16:52:05 crc kubenswrapper[4799]: I1010 16:52:05.045504 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="801513a8-8170-4b02-b637-c10364f240c7" containerName="sg-core" containerID="cri-o://d68ff2f343a0e2a95bd1d808a68d0fc0aeea4dd766882dec64ac7d56062cc7f9" gracePeriod=30 Oct 10 16:52:05 crc kubenswrapper[4799]: I1010 16:52:05.045761 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="801513a8-8170-4b02-b637-c10364f240c7" containerName="ceilometer-central-agent" containerID="cri-o://19425ae729b6aa9f2dcc31ce95dadb0a293d3f1cccf0e8753c9e682571421a35" gracePeriod=30 Oct 10 16:52:05 crc kubenswrapper[4799]: I1010 16:52:05.045505 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="801513a8-8170-4b02-b637-c10364f240c7" containerName="ceilometer-notification-agent" containerID="cri-o://36a847df7b5ac753f61fd31406958f6f527016237d0cac4a56ff63f22adba123" gracePeriod=30 Oct 10 16:52:05 crc kubenswrapper[4799]: I1010 16:52:05.045496 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="801513a8-8170-4b02-b637-c10364f240c7" containerName="proxy-httpd" containerID="cri-o://483a2ba4431563082c07097667669b71d1ca357edfe8928b08b882c094ba9a46" gracePeriod=30 Oct 10 16:52:06 crc kubenswrapper[4799]: I1010 16:52:06.055202 4799 generic.go:334] "Generic (PLEG): container finished" podID="801513a8-8170-4b02-b637-c10364f240c7" containerID="483a2ba4431563082c07097667669b71d1ca357edfe8928b08b882c094ba9a46" exitCode=0 Oct 10 16:52:06 crc kubenswrapper[4799]: I1010 16:52:06.055614 4799 generic.go:334] "Generic (PLEG): container finished" podID="801513a8-8170-4b02-b637-c10364f240c7" containerID="d68ff2f343a0e2a95bd1d808a68d0fc0aeea4dd766882dec64ac7d56062cc7f9" exitCode=2 Oct 10 16:52:06 crc kubenswrapper[4799]: I1010 16:52:06.055635 4799 generic.go:334] "Generic (PLEG): container finished" podID="801513a8-8170-4b02-b637-c10364f240c7" containerID="36a847df7b5ac753f61fd31406958f6f527016237d0cac4a56ff63f22adba123" exitCode=0 Oct 10 16:52:06 crc kubenswrapper[4799]: I1010 16:52:06.055304 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"801513a8-8170-4b02-b637-c10364f240c7","Type":"ContainerDied","Data":"483a2ba4431563082c07097667669b71d1ca357edfe8928b08b882c094ba9a46"} Oct 10 16:52:06 crc kubenswrapper[4799]: I1010 16:52:06.055704 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"801513a8-8170-4b02-b637-c10364f240c7","Type":"ContainerDied","Data":"d68ff2f343a0e2a95bd1d808a68d0fc0aeea4dd766882dec64ac7d56062cc7f9"} Oct 10 16:52:06 crc kubenswrapper[4799]: I1010 16:52:06.055728 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"801513a8-8170-4b02-b637-c10364f240c7","Type":"ContainerDied","Data":"36a847df7b5ac753f61fd31406958f6f527016237d0cac4a56ff63f22adba123"} Oct 10 16:52:08 crc kubenswrapper[4799]: I1010 16:52:08.915825 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 10 16:52:09 crc kubenswrapper[4799]: I1010 16:52:09.004808 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/801513a8-8170-4b02-b637-c10364f240c7-run-httpd\") pod \"801513a8-8170-4b02-b637-c10364f240c7\" (UID: \"801513a8-8170-4b02-b637-c10364f240c7\") " Oct 10 16:52:09 crc kubenswrapper[4799]: I1010 16:52:09.005199 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/801513a8-8170-4b02-b637-c10364f240c7-sg-core-conf-yaml\") pod \"801513a8-8170-4b02-b637-c10364f240c7\" (UID: \"801513a8-8170-4b02-b637-c10364f240c7\") " Oct 10 16:52:09 crc kubenswrapper[4799]: I1010 16:52:09.005231 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/801513a8-8170-4b02-b637-c10364f240c7-config-data\") pod \"801513a8-8170-4b02-b637-c10364f240c7\" (UID: \"801513a8-8170-4b02-b637-c10364f240c7\") " Oct 10 16:52:09 crc kubenswrapper[4799]: I1010 16:52:09.005869 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/801513a8-8170-4b02-b637-c10364f240c7-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "801513a8-8170-4b02-b637-c10364f240c7" (UID: "801513a8-8170-4b02-b637-c10364f240c7"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 10 16:52:09 crc kubenswrapper[4799]: I1010 16:52:09.033683 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/801513a8-8170-4b02-b637-c10364f240c7-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "801513a8-8170-4b02-b637-c10364f240c7" (UID: "801513a8-8170-4b02-b637-c10364f240c7"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:52:09 crc kubenswrapper[4799]: I1010 16:52:09.086871 4799 generic.go:334] "Generic (PLEG): container finished" podID="801513a8-8170-4b02-b637-c10364f240c7" containerID="19425ae729b6aa9f2dcc31ce95dadb0a293d3f1cccf0e8753c9e682571421a35" exitCode=0 Oct 10 16:52:09 crc kubenswrapper[4799]: I1010 16:52:09.086922 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"801513a8-8170-4b02-b637-c10364f240c7","Type":"ContainerDied","Data":"19425ae729b6aa9f2dcc31ce95dadb0a293d3f1cccf0e8753c9e682571421a35"} Oct 10 16:52:09 crc kubenswrapper[4799]: I1010 16:52:09.086954 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"801513a8-8170-4b02-b637-c10364f240c7","Type":"ContainerDied","Data":"9b0c3873252b437bce46bbe9ff5edbe8649db87ae3ce59315fa7313b7891601c"} Oct 10 16:52:09 crc kubenswrapper[4799]: I1010 16:52:09.086977 4799 scope.go:117] "RemoveContainer" containerID="483a2ba4431563082c07097667669b71d1ca357edfe8928b08b882c094ba9a46" Oct 10 16:52:09 crc kubenswrapper[4799]: I1010 16:52:09.087023 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 10 16:52:09 crc kubenswrapper[4799]: I1010 16:52:09.107984 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-96kfc\" (UniqueName: \"kubernetes.io/projected/801513a8-8170-4b02-b637-c10364f240c7-kube-api-access-96kfc\") pod \"801513a8-8170-4b02-b637-c10364f240c7\" (UID: \"801513a8-8170-4b02-b637-c10364f240c7\") " Oct 10 16:52:09 crc kubenswrapper[4799]: I1010 16:52:09.108221 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/801513a8-8170-4b02-b637-c10364f240c7-scripts\") pod \"801513a8-8170-4b02-b637-c10364f240c7\" (UID: \"801513a8-8170-4b02-b637-c10364f240c7\") " Oct 10 16:52:09 crc kubenswrapper[4799]: I1010 16:52:09.108384 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/801513a8-8170-4b02-b637-c10364f240c7-log-httpd\") pod \"801513a8-8170-4b02-b637-c10364f240c7\" (UID: \"801513a8-8170-4b02-b637-c10364f240c7\") " Oct 10 16:52:09 crc kubenswrapper[4799]: I1010 16:52:09.108436 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/801513a8-8170-4b02-b637-c10364f240c7-combined-ca-bundle\") pod \"801513a8-8170-4b02-b637-c10364f240c7\" (UID: \"801513a8-8170-4b02-b637-c10364f240c7\") " Oct 10 16:52:09 crc kubenswrapper[4799]: I1010 16:52:09.108931 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/801513a8-8170-4b02-b637-c10364f240c7-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "801513a8-8170-4b02-b637-c10364f240c7" (UID: "801513a8-8170-4b02-b637-c10364f240c7"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 10 16:52:09 crc kubenswrapper[4799]: I1010 16:52:09.109387 4799 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/801513a8-8170-4b02-b637-c10364f240c7-log-httpd\") on node \"crc\" DevicePath \"\"" Oct 10 16:52:09 crc kubenswrapper[4799]: I1010 16:52:09.109410 4799 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/801513a8-8170-4b02-b637-c10364f240c7-run-httpd\") on node \"crc\" DevicePath \"\"" Oct 10 16:52:09 crc kubenswrapper[4799]: I1010 16:52:09.109424 4799 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/801513a8-8170-4b02-b637-c10364f240c7-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Oct 10 16:52:09 crc kubenswrapper[4799]: I1010 16:52:09.111134 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/801513a8-8170-4b02-b637-c10364f240c7-config-data" (OuterVolumeSpecName: "config-data") pod "801513a8-8170-4b02-b637-c10364f240c7" (UID: "801513a8-8170-4b02-b637-c10364f240c7"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:52:09 crc kubenswrapper[4799]: I1010 16:52:09.111783 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/801513a8-8170-4b02-b637-c10364f240c7-scripts" (OuterVolumeSpecName: "scripts") pod "801513a8-8170-4b02-b637-c10364f240c7" (UID: "801513a8-8170-4b02-b637-c10364f240c7"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:52:09 crc kubenswrapper[4799]: I1010 16:52:09.112398 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/801513a8-8170-4b02-b637-c10364f240c7-kube-api-access-96kfc" (OuterVolumeSpecName: "kube-api-access-96kfc") pod "801513a8-8170-4b02-b637-c10364f240c7" (UID: "801513a8-8170-4b02-b637-c10364f240c7"). InnerVolumeSpecName "kube-api-access-96kfc". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 16:52:09 crc kubenswrapper[4799]: I1010 16:52:09.179513 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/801513a8-8170-4b02-b637-c10364f240c7-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "801513a8-8170-4b02-b637-c10364f240c7" (UID: "801513a8-8170-4b02-b637-c10364f240c7"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:52:09 crc kubenswrapper[4799]: I1010 16:52:09.180960 4799 scope.go:117] "RemoveContainer" containerID="d68ff2f343a0e2a95bd1d808a68d0fc0aeea4dd766882dec64ac7d56062cc7f9" Oct 10 16:52:09 crc kubenswrapper[4799]: I1010 16:52:09.205741 4799 scope.go:117] "RemoveContainer" containerID="36a847df7b5ac753f61fd31406958f6f527016237d0cac4a56ff63f22adba123" Oct 10 16:52:09 crc kubenswrapper[4799]: I1010 16:52:09.211717 4799 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/801513a8-8170-4b02-b637-c10364f240c7-scripts\") on node \"crc\" DevicePath \"\"" Oct 10 16:52:09 crc kubenswrapper[4799]: I1010 16:52:09.211827 4799 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/801513a8-8170-4b02-b637-c10364f240c7-config-data\") on node \"crc\" DevicePath \"\"" Oct 10 16:52:09 crc kubenswrapper[4799]: I1010 16:52:09.211850 4799 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/801513a8-8170-4b02-b637-c10364f240c7-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 10 16:52:09 crc kubenswrapper[4799]: I1010 16:52:09.211869 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-96kfc\" (UniqueName: \"kubernetes.io/projected/801513a8-8170-4b02-b637-c10364f240c7-kube-api-access-96kfc\") on node \"crc\" DevicePath \"\"" Oct 10 16:52:09 crc kubenswrapper[4799]: I1010 16:52:09.224472 4799 scope.go:117] "RemoveContainer" containerID="19425ae729b6aa9f2dcc31ce95dadb0a293d3f1cccf0e8753c9e682571421a35" Oct 10 16:52:09 crc kubenswrapper[4799]: I1010 16:52:09.247604 4799 scope.go:117] "RemoveContainer" containerID="483a2ba4431563082c07097667669b71d1ca357edfe8928b08b882c094ba9a46" Oct 10 16:52:09 crc kubenswrapper[4799]: E1010 16:52:09.248154 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"483a2ba4431563082c07097667669b71d1ca357edfe8928b08b882c094ba9a46\": container with ID starting with 483a2ba4431563082c07097667669b71d1ca357edfe8928b08b882c094ba9a46 not found: ID does not exist" containerID="483a2ba4431563082c07097667669b71d1ca357edfe8928b08b882c094ba9a46" Oct 10 16:52:09 crc kubenswrapper[4799]: I1010 16:52:09.248212 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"483a2ba4431563082c07097667669b71d1ca357edfe8928b08b882c094ba9a46"} err="failed to get container status \"483a2ba4431563082c07097667669b71d1ca357edfe8928b08b882c094ba9a46\": rpc error: code = NotFound desc = could not find container \"483a2ba4431563082c07097667669b71d1ca357edfe8928b08b882c094ba9a46\": container with ID starting with 483a2ba4431563082c07097667669b71d1ca357edfe8928b08b882c094ba9a46 not found: ID does not exist" Oct 10 16:52:09 crc kubenswrapper[4799]: I1010 16:52:09.248238 4799 scope.go:117] "RemoveContainer" containerID="d68ff2f343a0e2a95bd1d808a68d0fc0aeea4dd766882dec64ac7d56062cc7f9" Oct 10 16:52:09 crc kubenswrapper[4799]: E1010 16:52:09.248594 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d68ff2f343a0e2a95bd1d808a68d0fc0aeea4dd766882dec64ac7d56062cc7f9\": container with ID starting with d68ff2f343a0e2a95bd1d808a68d0fc0aeea4dd766882dec64ac7d56062cc7f9 not found: ID does not exist" containerID="d68ff2f343a0e2a95bd1d808a68d0fc0aeea4dd766882dec64ac7d56062cc7f9" Oct 10 16:52:09 crc kubenswrapper[4799]: I1010 16:52:09.248671 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d68ff2f343a0e2a95bd1d808a68d0fc0aeea4dd766882dec64ac7d56062cc7f9"} err="failed to get container status \"d68ff2f343a0e2a95bd1d808a68d0fc0aeea4dd766882dec64ac7d56062cc7f9\": rpc error: code = NotFound desc = could not find container \"d68ff2f343a0e2a95bd1d808a68d0fc0aeea4dd766882dec64ac7d56062cc7f9\": container with ID starting with d68ff2f343a0e2a95bd1d808a68d0fc0aeea4dd766882dec64ac7d56062cc7f9 not found: ID does not exist" Oct 10 16:52:09 crc kubenswrapper[4799]: I1010 16:52:09.248706 4799 scope.go:117] "RemoveContainer" containerID="36a847df7b5ac753f61fd31406958f6f527016237d0cac4a56ff63f22adba123" Oct 10 16:52:09 crc kubenswrapper[4799]: E1010 16:52:09.249434 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"36a847df7b5ac753f61fd31406958f6f527016237d0cac4a56ff63f22adba123\": container with ID starting with 36a847df7b5ac753f61fd31406958f6f527016237d0cac4a56ff63f22adba123 not found: ID does not exist" containerID="36a847df7b5ac753f61fd31406958f6f527016237d0cac4a56ff63f22adba123" Oct 10 16:52:09 crc kubenswrapper[4799]: I1010 16:52:09.249465 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"36a847df7b5ac753f61fd31406958f6f527016237d0cac4a56ff63f22adba123"} err="failed to get container status \"36a847df7b5ac753f61fd31406958f6f527016237d0cac4a56ff63f22adba123\": rpc error: code = NotFound desc = could not find container \"36a847df7b5ac753f61fd31406958f6f527016237d0cac4a56ff63f22adba123\": container with ID starting with 36a847df7b5ac753f61fd31406958f6f527016237d0cac4a56ff63f22adba123 not found: ID does not exist" Oct 10 16:52:09 crc kubenswrapper[4799]: I1010 16:52:09.249486 4799 scope.go:117] "RemoveContainer" containerID="19425ae729b6aa9f2dcc31ce95dadb0a293d3f1cccf0e8753c9e682571421a35" Oct 10 16:52:09 crc kubenswrapper[4799]: E1010 16:52:09.249905 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"19425ae729b6aa9f2dcc31ce95dadb0a293d3f1cccf0e8753c9e682571421a35\": container with ID starting with 19425ae729b6aa9f2dcc31ce95dadb0a293d3f1cccf0e8753c9e682571421a35 not found: ID does not exist" containerID="19425ae729b6aa9f2dcc31ce95dadb0a293d3f1cccf0e8753c9e682571421a35" Oct 10 16:52:09 crc kubenswrapper[4799]: I1010 16:52:09.249957 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"19425ae729b6aa9f2dcc31ce95dadb0a293d3f1cccf0e8753c9e682571421a35"} err="failed to get container status \"19425ae729b6aa9f2dcc31ce95dadb0a293d3f1cccf0e8753c9e682571421a35\": rpc error: code = NotFound desc = could not find container \"19425ae729b6aa9f2dcc31ce95dadb0a293d3f1cccf0e8753c9e682571421a35\": container with ID starting with 19425ae729b6aa9f2dcc31ce95dadb0a293d3f1cccf0e8753c9e682571421a35 not found: ID does not exist" Oct 10 16:52:09 crc kubenswrapper[4799]: I1010 16:52:09.432053 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 10 16:52:09 crc kubenswrapper[4799]: I1010 16:52:09.442194 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Oct 10 16:52:09 crc kubenswrapper[4799]: I1010 16:52:09.460026 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Oct 10 16:52:09 crc kubenswrapper[4799]: E1010 16:52:09.460394 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="801513a8-8170-4b02-b637-c10364f240c7" containerName="sg-core" Oct 10 16:52:09 crc kubenswrapper[4799]: I1010 16:52:09.460404 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="801513a8-8170-4b02-b637-c10364f240c7" containerName="sg-core" Oct 10 16:52:09 crc kubenswrapper[4799]: E1010 16:52:09.460429 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="801513a8-8170-4b02-b637-c10364f240c7" containerName="proxy-httpd" Oct 10 16:52:09 crc kubenswrapper[4799]: I1010 16:52:09.460435 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="801513a8-8170-4b02-b637-c10364f240c7" containerName="proxy-httpd" Oct 10 16:52:09 crc kubenswrapper[4799]: E1010 16:52:09.460451 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="801513a8-8170-4b02-b637-c10364f240c7" containerName="ceilometer-notification-agent" Oct 10 16:52:09 crc kubenswrapper[4799]: I1010 16:52:09.460457 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="801513a8-8170-4b02-b637-c10364f240c7" containerName="ceilometer-notification-agent" Oct 10 16:52:09 crc kubenswrapper[4799]: E1010 16:52:09.460468 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="801513a8-8170-4b02-b637-c10364f240c7" containerName="ceilometer-central-agent" Oct 10 16:52:09 crc kubenswrapper[4799]: I1010 16:52:09.460474 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="801513a8-8170-4b02-b637-c10364f240c7" containerName="ceilometer-central-agent" Oct 10 16:52:09 crc kubenswrapper[4799]: I1010 16:52:09.460635 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="801513a8-8170-4b02-b637-c10364f240c7" containerName="ceilometer-central-agent" Oct 10 16:52:09 crc kubenswrapper[4799]: I1010 16:52:09.460659 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="801513a8-8170-4b02-b637-c10364f240c7" containerName="ceilometer-notification-agent" Oct 10 16:52:09 crc kubenswrapper[4799]: I1010 16:52:09.460669 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="801513a8-8170-4b02-b637-c10364f240c7" containerName="proxy-httpd" Oct 10 16:52:09 crc kubenswrapper[4799]: I1010 16:52:09.460678 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="801513a8-8170-4b02-b637-c10364f240c7" containerName="sg-core" Oct 10 16:52:09 crc kubenswrapper[4799]: I1010 16:52:09.462245 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 10 16:52:09 crc kubenswrapper[4799]: I1010 16:52:09.466960 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 10 16:52:09 crc kubenswrapper[4799]: I1010 16:52:09.496904 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Oct 10 16:52:09 crc kubenswrapper[4799]: I1010 16:52:09.497143 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Oct 10 16:52:09 crc kubenswrapper[4799]: I1010 16:52:09.514785 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/ea58536a-f98a-46dd-b3a9-90612fe9a438-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"ea58536a-f98a-46dd-b3a9-90612fe9a438\") " pod="openstack/ceilometer-0" Oct 10 16:52:09 crc kubenswrapper[4799]: I1010 16:52:09.514844 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7cndq\" (UniqueName: \"kubernetes.io/projected/ea58536a-f98a-46dd-b3a9-90612fe9a438-kube-api-access-7cndq\") pod \"ceilometer-0\" (UID: \"ea58536a-f98a-46dd-b3a9-90612fe9a438\") " pod="openstack/ceilometer-0" Oct 10 16:52:09 crc kubenswrapper[4799]: I1010 16:52:09.514871 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ea58536a-f98a-46dd-b3a9-90612fe9a438-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"ea58536a-f98a-46dd-b3a9-90612fe9a438\") " pod="openstack/ceilometer-0" Oct 10 16:52:09 crc kubenswrapper[4799]: I1010 16:52:09.514891 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ea58536a-f98a-46dd-b3a9-90612fe9a438-scripts\") pod \"ceilometer-0\" (UID: \"ea58536a-f98a-46dd-b3a9-90612fe9a438\") " pod="openstack/ceilometer-0" Oct 10 16:52:09 crc kubenswrapper[4799]: I1010 16:52:09.514912 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ea58536a-f98a-46dd-b3a9-90612fe9a438-config-data\") pod \"ceilometer-0\" (UID: \"ea58536a-f98a-46dd-b3a9-90612fe9a438\") " pod="openstack/ceilometer-0" Oct 10 16:52:09 crc kubenswrapper[4799]: I1010 16:52:09.514977 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ea58536a-f98a-46dd-b3a9-90612fe9a438-log-httpd\") pod \"ceilometer-0\" (UID: \"ea58536a-f98a-46dd-b3a9-90612fe9a438\") " pod="openstack/ceilometer-0" Oct 10 16:52:09 crc kubenswrapper[4799]: I1010 16:52:09.515012 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ea58536a-f98a-46dd-b3a9-90612fe9a438-run-httpd\") pod \"ceilometer-0\" (UID: \"ea58536a-f98a-46dd-b3a9-90612fe9a438\") " pod="openstack/ceilometer-0" Oct 10 16:52:09 crc kubenswrapper[4799]: I1010 16:52:09.615734 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ea58536a-f98a-46dd-b3a9-90612fe9a438-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"ea58536a-f98a-46dd-b3a9-90612fe9a438\") " pod="openstack/ceilometer-0" Oct 10 16:52:09 crc kubenswrapper[4799]: I1010 16:52:09.615803 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ea58536a-f98a-46dd-b3a9-90612fe9a438-scripts\") pod \"ceilometer-0\" (UID: \"ea58536a-f98a-46dd-b3a9-90612fe9a438\") " pod="openstack/ceilometer-0" Oct 10 16:52:09 crc kubenswrapper[4799]: I1010 16:52:09.615833 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ea58536a-f98a-46dd-b3a9-90612fe9a438-config-data\") pod \"ceilometer-0\" (UID: \"ea58536a-f98a-46dd-b3a9-90612fe9a438\") " pod="openstack/ceilometer-0" Oct 10 16:52:09 crc kubenswrapper[4799]: I1010 16:52:09.615922 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ea58536a-f98a-46dd-b3a9-90612fe9a438-log-httpd\") pod \"ceilometer-0\" (UID: \"ea58536a-f98a-46dd-b3a9-90612fe9a438\") " pod="openstack/ceilometer-0" Oct 10 16:52:09 crc kubenswrapper[4799]: I1010 16:52:09.615953 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ea58536a-f98a-46dd-b3a9-90612fe9a438-run-httpd\") pod \"ceilometer-0\" (UID: \"ea58536a-f98a-46dd-b3a9-90612fe9a438\") " pod="openstack/ceilometer-0" Oct 10 16:52:09 crc kubenswrapper[4799]: I1010 16:52:09.615996 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/ea58536a-f98a-46dd-b3a9-90612fe9a438-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"ea58536a-f98a-46dd-b3a9-90612fe9a438\") " pod="openstack/ceilometer-0" Oct 10 16:52:09 crc kubenswrapper[4799]: I1010 16:52:09.616047 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7cndq\" (UniqueName: \"kubernetes.io/projected/ea58536a-f98a-46dd-b3a9-90612fe9a438-kube-api-access-7cndq\") pod \"ceilometer-0\" (UID: \"ea58536a-f98a-46dd-b3a9-90612fe9a438\") " pod="openstack/ceilometer-0" Oct 10 16:52:09 crc kubenswrapper[4799]: I1010 16:52:09.616578 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ea58536a-f98a-46dd-b3a9-90612fe9a438-run-httpd\") pod \"ceilometer-0\" (UID: \"ea58536a-f98a-46dd-b3a9-90612fe9a438\") " pod="openstack/ceilometer-0" Oct 10 16:52:09 crc kubenswrapper[4799]: I1010 16:52:09.616794 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ea58536a-f98a-46dd-b3a9-90612fe9a438-log-httpd\") pod \"ceilometer-0\" (UID: \"ea58536a-f98a-46dd-b3a9-90612fe9a438\") " pod="openstack/ceilometer-0" Oct 10 16:52:09 crc kubenswrapper[4799]: I1010 16:52:09.619437 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ea58536a-f98a-46dd-b3a9-90612fe9a438-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"ea58536a-f98a-46dd-b3a9-90612fe9a438\") " pod="openstack/ceilometer-0" Oct 10 16:52:09 crc kubenswrapper[4799]: I1010 16:52:09.620225 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/ea58536a-f98a-46dd-b3a9-90612fe9a438-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"ea58536a-f98a-46dd-b3a9-90612fe9a438\") " pod="openstack/ceilometer-0" Oct 10 16:52:09 crc kubenswrapper[4799]: I1010 16:52:09.621011 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ea58536a-f98a-46dd-b3a9-90612fe9a438-config-data\") pod \"ceilometer-0\" (UID: \"ea58536a-f98a-46dd-b3a9-90612fe9a438\") " pod="openstack/ceilometer-0" Oct 10 16:52:09 crc kubenswrapper[4799]: I1010 16:52:09.621301 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ea58536a-f98a-46dd-b3a9-90612fe9a438-scripts\") pod \"ceilometer-0\" (UID: \"ea58536a-f98a-46dd-b3a9-90612fe9a438\") " pod="openstack/ceilometer-0" Oct 10 16:52:09 crc kubenswrapper[4799]: I1010 16:52:09.633646 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7cndq\" (UniqueName: \"kubernetes.io/projected/ea58536a-f98a-46dd-b3a9-90612fe9a438-kube-api-access-7cndq\") pod \"ceilometer-0\" (UID: \"ea58536a-f98a-46dd-b3a9-90612fe9a438\") " pod="openstack/ceilometer-0" Oct 10 16:52:09 crc kubenswrapper[4799]: I1010 16:52:09.823849 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 10 16:52:10 crc kubenswrapper[4799]: I1010 16:52:10.309273 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 10 16:52:10 crc kubenswrapper[4799]: W1010 16:52:10.314294 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podea58536a_f98a_46dd_b3a9_90612fe9a438.slice/crio-1c51338ba739155587f6d018ed5e71fd76f096520548cf1e94cd33f19707cd1b WatchSource:0}: Error finding container 1c51338ba739155587f6d018ed5e71fd76f096520548cf1e94cd33f19707cd1b: Status 404 returned error can't find the container with id 1c51338ba739155587f6d018ed5e71fd76f096520548cf1e94cd33f19707cd1b Oct 10 16:52:11 crc kubenswrapper[4799]: I1010 16:52:11.107563 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ea58536a-f98a-46dd-b3a9-90612fe9a438","Type":"ContainerStarted","Data":"382f609b14ca6577c1d275d1762255d6754d3113ac3741fae5f0bb95acfa7851"} Oct 10 16:52:11 crc kubenswrapper[4799]: I1010 16:52:11.108014 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ea58536a-f98a-46dd-b3a9-90612fe9a438","Type":"ContainerStarted","Data":"1c51338ba739155587f6d018ed5e71fd76f096520548cf1e94cd33f19707cd1b"} Oct 10 16:52:11 crc kubenswrapper[4799]: I1010 16:52:11.421857 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="801513a8-8170-4b02-b637-c10364f240c7" path="/var/lib/kubelet/pods/801513a8-8170-4b02-b637-c10364f240c7/volumes" Oct 10 16:52:11 crc kubenswrapper[4799]: I1010 16:52:11.488835 4799 pod_container_manager_linux.go:210] "Failed to delete cgroup paths" cgroupName=["kubepods","besteffort","pod2240606d-067a-4655-9deb-611ff6e3d5af"] err="unable to destroy cgroup paths for cgroup [kubepods besteffort pod2240606d-067a-4655-9deb-611ff6e3d5af] : Timed out while waiting for systemd to remove kubepods-besteffort-pod2240606d_067a_4655_9deb_611ff6e3d5af.slice" Oct 10 16:52:12 crc kubenswrapper[4799]: I1010 16:52:12.119108 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ea58536a-f98a-46dd-b3a9-90612fe9a438","Type":"ContainerStarted","Data":"5ca541c2236fe349c764f953bc2a4d10b8a704c1db2fef0341deb5eb558cd280"} Oct 10 16:52:12 crc kubenswrapper[4799]: I1010 16:52:12.120709 4799 generic.go:334] "Generic (PLEG): container finished" podID="f2b06e0c-bd3e-4928-94f4-bdb22ae99a89" containerID="b23ac04dd50a4eaaa66ae053dec6bce7db1f6a62a4f25b005e5b7204ecb4bdc1" exitCode=0 Oct 10 16:52:12 crc kubenswrapper[4799]: I1010 16:52:12.120768 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-kgmbm" event={"ID":"f2b06e0c-bd3e-4928-94f4-bdb22ae99a89","Type":"ContainerDied","Data":"b23ac04dd50a4eaaa66ae053dec6bce7db1f6a62a4f25b005e5b7204ecb4bdc1"} Oct 10 16:52:13 crc kubenswrapper[4799]: I1010 16:52:13.135568 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ea58536a-f98a-46dd-b3a9-90612fe9a438","Type":"ContainerStarted","Data":"0685ce541d3855e534e7fa279df6a22b07a42590b5dd25b6669dafdf92acd8bf"} Oct 10 16:52:13 crc kubenswrapper[4799]: I1010 16:52:13.614506 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-kgmbm" Oct 10 16:52:13 crc kubenswrapper[4799]: I1010 16:52:13.793820 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f2b06e0c-bd3e-4928-94f4-bdb22ae99a89-combined-ca-bundle\") pod \"f2b06e0c-bd3e-4928-94f4-bdb22ae99a89\" (UID: \"f2b06e0c-bd3e-4928-94f4-bdb22ae99a89\") " Oct 10 16:52:13 crc kubenswrapper[4799]: I1010 16:52:13.793992 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f2b06e0c-bd3e-4928-94f4-bdb22ae99a89-config-data\") pod \"f2b06e0c-bd3e-4928-94f4-bdb22ae99a89\" (UID: \"f2b06e0c-bd3e-4928-94f4-bdb22ae99a89\") " Oct 10 16:52:13 crc kubenswrapper[4799]: I1010 16:52:13.794072 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f2b06e0c-bd3e-4928-94f4-bdb22ae99a89-scripts\") pod \"f2b06e0c-bd3e-4928-94f4-bdb22ae99a89\" (UID: \"f2b06e0c-bd3e-4928-94f4-bdb22ae99a89\") " Oct 10 16:52:13 crc kubenswrapper[4799]: I1010 16:52:13.794171 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7f62n\" (UniqueName: \"kubernetes.io/projected/f2b06e0c-bd3e-4928-94f4-bdb22ae99a89-kube-api-access-7f62n\") pod \"f2b06e0c-bd3e-4928-94f4-bdb22ae99a89\" (UID: \"f2b06e0c-bd3e-4928-94f4-bdb22ae99a89\") " Oct 10 16:52:13 crc kubenswrapper[4799]: I1010 16:52:13.803888 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f2b06e0c-bd3e-4928-94f4-bdb22ae99a89-scripts" (OuterVolumeSpecName: "scripts") pod "f2b06e0c-bd3e-4928-94f4-bdb22ae99a89" (UID: "f2b06e0c-bd3e-4928-94f4-bdb22ae99a89"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:52:13 crc kubenswrapper[4799]: I1010 16:52:13.803958 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f2b06e0c-bd3e-4928-94f4-bdb22ae99a89-kube-api-access-7f62n" (OuterVolumeSpecName: "kube-api-access-7f62n") pod "f2b06e0c-bd3e-4928-94f4-bdb22ae99a89" (UID: "f2b06e0c-bd3e-4928-94f4-bdb22ae99a89"). InnerVolumeSpecName "kube-api-access-7f62n". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 16:52:13 crc kubenswrapper[4799]: I1010 16:52:13.826903 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f2b06e0c-bd3e-4928-94f4-bdb22ae99a89-config-data" (OuterVolumeSpecName: "config-data") pod "f2b06e0c-bd3e-4928-94f4-bdb22ae99a89" (UID: "f2b06e0c-bd3e-4928-94f4-bdb22ae99a89"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:52:13 crc kubenswrapper[4799]: I1010 16:52:13.844996 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f2b06e0c-bd3e-4928-94f4-bdb22ae99a89-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f2b06e0c-bd3e-4928-94f4-bdb22ae99a89" (UID: "f2b06e0c-bd3e-4928-94f4-bdb22ae99a89"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:52:13 crc kubenswrapper[4799]: I1010 16:52:13.896491 4799 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f2b06e0c-bd3e-4928-94f4-bdb22ae99a89-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 10 16:52:13 crc kubenswrapper[4799]: I1010 16:52:13.896536 4799 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f2b06e0c-bd3e-4928-94f4-bdb22ae99a89-config-data\") on node \"crc\" DevicePath \"\"" Oct 10 16:52:13 crc kubenswrapper[4799]: I1010 16:52:13.896550 4799 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f2b06e0c-bd3e-4928-94f4-bdb22ae99a89-scripts\") on node \"crc\" DevicePath \"\"" Oct 10 16:52:13 crc kubenswrapper[4799]: I1010 16:52:13.896563 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7f62n\" (UniqueName: \"kubernetes.io/projected/f2b06e0c-bd3e-4928-94f4-bdb22ae99a89-kube-api-access-7f62n\") on node \"crc\" DevicePath \"\"" Oct 10 16:52:14 crc kubenswrapper[4799]: I1010 16:52:14.149131 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ea58536a-f98a-46dd-b3a9-90612fe9a438","Type":"ContainerStarted","Data":"79de021fa6d1b97a7f1f765431e167489c1b950430a175cb7a0f6addf9e70ac2"} Oct 10 16:52:14 crc kubenswrapper[4799]: I1010 16:52:14.149461 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Oct 10 16:52:14 crc kubenswrapper[4799]: I1010 16:52:14.151920 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-kgmbm" event={"ID":"f2b06e0c-bd3e-4928-94f4-bdb22ae99a89","Type":"ContainerDied","Data":"20b4f4cd8fbec5cf4149746b583cac95a66bb87c002239d6bd6e6b9405014d0b"} Oct 10 16:52:14 crc kubenswrapper[4799]: I1010 16:52:14.151962 4799 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="20b4f4cd8fbec5cf4149746b583cac95a66bb87c002239d6bd6e6b9405014d0b" Oct 10 16:52:14 crc kubenswrapper[4799]: I1010 16:52:14.152020 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-kgmbm" Oct 10 16:52:14 crc kubenswrapper[4799]: I1010 16:52:14.199079 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.082767797 podStartE2EDuration="5.199061636s" podCreationTimestamp="2025-10-10 16:52:09 +0000 UTC" firstStartedPulling="2025-10-10 16:52:10.317062209 +0000 UTC m=+1223.825386324" lastFinishedPulling="2025-10-10 16:52:13.433356048 +0000 UTC m=+1226.941680163" observedRunningTime="2025-10-10 16:52:14.187029329 +0000 UTC m=+1227.695353454" watchObservedRunningTime="2025-10-10 16:52:14.199061636 +0000 UTC m=+1227.707385751" Oct 10 16:52:14 crc kubenswrapper[4799]: I1010 16:52:14.335585 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-0"] Oct 10 16:52:14 crc kubenswrapper[4799]: E1010 16:52:14.336118 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f2b06e0c-bd3e-4928-94f4-bdb22ae99a89" containerName="nova-cell0-conductor-db-sync" Oct 10 16:52:14 crc kubenswrapper[4799]: I1010 16:52:14.336142 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="f2b06e0c-bd3e-4928-94f4-bdb22ae99a89" containerName="nova-cell0-conductor-db-sync" Oct 10 16:52:14 crc kubenswrapper[4799]: I1010 16:52:14.336369 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="f2b06e0c-bd3e-4928-94f4-bdb22ae99a89" containerName="nova-cell0-conductor-db-sync" Oct 10 16:52:14 crc kubenswrapper[4799]: I1010 16:52:14.337169 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Oct 10 16:52:14 crc kubenswrapper[4799]: I1010 16:52:14.341564 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-nova-dockercfg-pg9jc" Oct 10 16:52:14 crc kubenswrapper[4799]: I1010 16:52:14.341567 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Oct 10 16:52:14 crc kubenswrapper[4799]: I1010 16:52:14.344894 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Oct 10 16:52:14 crc kubenswrapper[4799]: I1010 16:52:14.508578 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nsq2j\" (UniqueName: \"kubernetes.io/projected/60be0e86-f2dd-4575-b3c8-0131575b1cd8-kube-api-access-nsq2j\") pod \"nova-cell0-conductor-0\" (UID: \"60be0e86-f2dd-4575-b3c8-0131575b1cd8\") " pod="openstack/nova-cell0-conductor-0" Oct 10 16:52:14 crc kubenswrapper[4799]: I1010 16:52:14.508864 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/60be0e86-f2dd-4575-b3c8-0131575b1cd8-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"60be0e86-f2dd-4575-b3c8-0131575b1cd8\") " pod="openstack/nova-cell0-conductor-0" Oct 10 16:52:14 crc kubenswrapper[4799]: I1010 16:52:14.508964 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/60be0e86-f2dd-4575-b3c8-0131575b1cd8-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"60be0e86-f2dd-4575-b3c8-0131575b1cd8\") " pod="openstack/nova-cell0-conductor-0" Oct 10 16:52:14 crc kubenswrapper[4799]: I1010 16:52:14.610444 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nsq2j\" (UniqueName: \"kubernetes.io/projected/60be0e86-f2dd-4575-b3c8-0131575b1cd8-kube-api-access-nsq2j\") pod \"nova-cell0-conductor-0\" (UID: \"60be0e86-f2dd-4575-b3c8-0131575b1cd8\") " pod="openstack/nova-cell0-conductor-0" Oct 10 16:52:14 crc kubenswrapper[4799]: I1010 16:52:14.610507 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/60be0e86-f2dd-4575-b3c8-0131575b1cd8-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"60be0e86-f2dd-4575-b3c8-0131575b1cd8\") " pod="openstack/nova-cell0-conductor-0" Oct 10 16:52:14 crc kubenswrapper[4799]: I1010 16:52:14.610583 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/60be0e86-f2dd-4575-b3c8-0131575b1cd8-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"60be0e86-f2dd-4575-b3c8-0131575b1cd8\") " pod="openstack/nova-cell0-conductor-0" Oct 10 16:52:14 crc kubenswrapper[4799]: I1010 16:52:14.617404 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/60be0e86-f2dd-4575-b3c8-0131575b1cd8-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"60be0e86-f2dd-4575-b3c8-0131575b1cd8\") " pod="openstack/nova-cell0-conductor-0" Oct 10 16:52:14 crc kubenswrapper[4799]: I1010 16:52:14.619401 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/60be0e86-f2dd-4575-b3c8-0131575b1cd8-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"60be0e86-f2dd-4575-b3c8-0131575b1cd8\") " pod="openstack/nova-cell0-conductor-0" Oct 10 16:52:14 crc kubenswrapper[4799]: I1010 16:52:14.632537 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nsq2j\" (UniqueName: \"kubernetes.io/projected/60be0e86-f2dd-4575-b3c8-0131575b1cd8-kube-api-access-nsq2j\") pod \"nova-cell0-conductor-0\" (UID: \"60be0e86-f2dd-4575-b3c8-0131575b1cd8\") " pod="openstack/nova-cell0-conductor-0" Oct 10 16:52:14 crc kubenswrapper[4799]: I1010 16:52:14.690218 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Oct 10 16:52:15 crc kubenswrapper[4799]: I1010 16:52:15.142883 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Oct 10 16:52:15 crc kubenswrapper[4799]: W1010 16:52:15.145750 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod60be0e86_f2dd_4575_b3c8_0131575b1cd8.slice/crio-b8dd1b5626b63450e4276f12d6277470cef7ff847854fedeafff007b81b42d87 WatchSource:0}: Error finding container b8dd1b5626b63450e4276f12d6277470cef7ff847854fedeafff007b81b42d87: Status 404 returned error can't find the container with id b8dd1b5626b63450e4276f12d6277470cef7ff847854fedeafff007b81b42d87 Oct 10 16:52:15 crc kubenswrapper[4799]: I1010 16:52:15.161043 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"60be0e86-f2dd-4575-b3c8-0131575b1cd8","Type":"ContainerStarted","Data":"b8dd1b5626b63450e4276f12d6277470cef7ff847854fedeafff007b81b42d87"} Oct 10 16:52:16 crc kubenswrapper[4799]: I1010 16:52:16.182172 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"60be0e86-f2dd-4575-b3c8-0131575b1cd8","Type":"ContainerStarted","Data":"35d51a78c7ee3dde16f77dfec5a6f5f69c8e3d2b0eccd75b5f3e3226dc047eeb"} Oct 10 16:52:16 crc kubenswrapper[4799]: I1010 16:52:16.182571 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell0-conductor-0" Oct 10 16:52:16 crc kubenswrapper[4799]: I1010 16:52:16.201241 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-0" podStartSLOduration=2.201225859 podStartE2EDuration="2.201225859s" podCreationTimestamp="2025-10-10 16:52:14 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-10 16:52:16.198407539 +0000 UTC m=+1229.706731694" watchObservedRunningTime="2025-10-10 16:52:16.201225859 +0000 UTC m=+1229.709549974" Oct 10 16:52:24 crc kubenswrapper[4799]: I1010 16:52:24.739360 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell0-conductor-0" Oct 10 16:52:25 crc kubenswrapper[4799]: I1010 16:52:25.361337 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-cell-mapping-phknx"] Oct 10 16:52:25 crc kubenswrapper[4799]: I1010 16:52:25.362827 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-phknx" Oct 10 16:52:25 crc kubenswrapper[4799]: I1010 16:52:25.366165 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-manage-config-data" Oct 10 16:52:25 crc kubenswrapper[4799]: I1010 16:52:25.367444 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-manage-scripts" Oct 10 16:52:25 crc kubenswrapper[4799]: I1010 16:52:25.373642 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-cell-mapping-phknx"] Oct 10 16:52:25 crc kubenswrapper[4799]: I1010 16:52:25.522354 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c784371b-9184-431f-93cd-92b037921b6b-scripts\") pod \"nova-cell0-cell-mapping-phknx\" (UID: \"c784371b-9184-431f-93cd-92b037921b6b\") " pod="openstack/nova-cell0-cell-mapping-phknx" Oct 10 16:52:25 crc kubenswrapper[4799]: I1010 16:52:25.522432 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zr5q4\" (UniqueName: \"kubernetes.io/projected/c784371b-9184-431f-93cd-92b037921b6b-kube-api-access-zr5q4\") pod \"nova-cell0-cell-mapping-phknx\" (UID: \"c784371b-9184-431f-93cd-92b037921b6b\") " pod="openstack/nova-cell0-cell-mapping-phknx" Oct 10 16:52:25 crc kubenswrapper[4799]: I1010 16:52:25.522487 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c784371b-9184-431f-93cd-92b037921b6b-config-data\") pod \"nova-cell0-cell-mapping-phknx\" (UID: \"c784371b-9184-431f-93cd-92b037921b6b\") " pod="openstack/nova-cell0-cell-mapping-phknx" Oct 10 16:52:25 crc kubenswrapper[4799]: I1010 16:52:25.522515 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c784371b-9184-431f-93cd-92b037921b6b-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-phknx\" (UID: \"c784371b-9184-431f-93cd-92b037921b6b\") " pod="openstack/nova-cell0-cell-mapping-phknx" Oct 10 16:52:25 crc kubenswrapper[4799]: I1010 16:52:25.554161 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Oct 10 16:52:25 crc kubenswrapper[4799]: I1010 16:52:25.555710 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 10 16:52:25 crc kubenswrapper[4799]: I1010 16:52:25.557292 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Oct 10 16:52:25 crc kubenswrapper[4799]: I1010 16:52:25.561710 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Oct 10 16:52:25 crc kubenswrapper[4799]: I1010 16:52:25.596522 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Oct 10 16:52:25 crc kubenswrapper[4799]: I1010 16:52:25.597845 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 10 16:52:25 crc kubenswrapper[4799]: I1010 16:52:25.605205 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Oct 10 16:52:25 crc kubenswrapper[4799]: I1010 16:52:25.611969 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Oct 10 16:52:25 crc kubenswrapper[4799]: I1010 16:52:25.624946 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jtwtl\" (UniqueName: \"kubernetes.io/projected/f0e73585-38a6-4a89-9ce7-30bd4ea7fe71-kube-api-access-jtwtl\") pod \"nova-api-0\" (UID: \"f0e73585-38a6-4a89-9ce7-30bd4ea7fe71\") " pod="openstack/nova-api-0" Oct 10 16:52:25 crc kubenswrapper[4799]: I1010 16:52:25.625024 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f0e73585-38a6-4a89-9ce7-30bd4ea7fe71-config-data\") pod \"nova-api-0\" (UID: \"f0e73585-38a6-4a89-9ce7-30bd4ea7fe71\") " pod="openstack/nova-api-0" Oct 10 16:52:25 crc kubenswrapper[4799]: I1010 16:52:25.625062 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e7e4c0b7-9b70-46eb-8c7f-9ec17e0ec899-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"e7e4c0b7-9b70-46eb-8c7f-9ec17e0ec899\") " pod="openstack/nova-scheduler-0" Oct 10 16:52:25 crc kubenswrapper[4799]: I1010 16:52:25.625103 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ckdmw\" (UniqueName: \"kubernetes.io/projected/e7e4c0b7-9b70-46eb-8c7f-9ec17e0ec899-kube-api-access-ckdmw\") pod \"nova-scheduler-0\" (UID: \"e7e4c0b7-9b70-46eb-8c7f-9ec17e0ec899\") " pod="openstack/nova-scheduler-0" Oct 10 16:52:25 crc kubenswrapper[4799]: I1010 16:52:25.625134 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f0e73585-38a6-4a89-9ce7-30bd4ea7fe71-logs\") pod \"nova-api-0\" (UID: \"f0e73585-38a6-4a89-9ce7-30bd4ea7fe71\") " pod="openstack/nova-api-0" Oct 10 16:52:25 crc kubenswrapper[4799]: I1010 16:52:25.625202 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c784371b-9184-431f-93cd-92b037921b6b-scripts\") pod \"nova-cell0-cell-mapping-phknx\" (UID: \"c784371b-9184-431f-93cd-92b037921b6b\") " pod="openstack/nova-cell0-cell-mapping-phknx" Oct 10 16:52:25 crc kubenswrapper[4799]: I1010 16:52:25.625262 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zr5q4\" (UniqueName: \"kubernetes.io/projected/c784371b-9184-431f-93cd-92b037921b6b-kube-api-access-zr5q4\") pod \"nova-cell0-cell-mapping-phknx\" (UID: \"c784371b-9184-431f-93cd-92b037921b6b\") " pod="openstack/nova-cell0-cell-mapping-phknx" Oct 10 16:52:25 crc kubenswrapper[4799]: I1010 16:52:25.625288 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e7e4c0b7-9b70-46eb-8c7f-9ec17e0ec899-config-data\") pod \"nova-scheduler-0\" (UID: \"e7e4c0b7-9b70-46eb-8c7f-9ec17e0ec899\") " pod="openstack/nova-scheduler-0" Oct 10 16:52:25 crc kubenswrapper[4799]: I1010 16:52:25.625344 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c784371b-9184-431f-93cd-92b037921b6b-config-data\") pod \"nova-cell0-cell-mapping-phknx\" (UID: \"c784371b-9184-431f-93cd-92b037921b6b\") " pod="openstack/nova-cell0-cell-mapping-phknx" Oct 10 16:52:25 crc kubenswrapper[4799]: I1010 16:52:25.625366 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c784371b-9184-431f-93cd-92b037921b6b-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-phknx\" (UID: \"c784371b-9184-431f-93cd-92b037921b6b\") " pod="openstack/nova-cell0-cell-mapping-phknx" Oct 10 16:52:25 crc kubenswrapper[4799]: I1010 16:52:25.625399 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f0e73585-38a6-4a89-9ce7-30bd4ea7fe71-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"f0e73585-38a6-4a89-9ce7-30bd4ea7fe71\") " pod="openstack/nova-api-0" Oct 10 16:52:25 crc kubenswrapper[4799]: I1010 16:52:25.631842 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c784371b-9184-431f-93cd-92b037921b6b-config-data\") pod \"nova-cell0-cell-mapping-phknx\" (UID: \"c784371b-9184-431f-93cd-92b037921b6b\") " pod="openstack/nova-cell0-cell-mapping-phknx" Oct 10 16:52:25 crc kubenswrapper[4799]: I1010 16:52:25.637449 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c784371b-9184-431f-93cd-92b037921b6b-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-phknx\" (UID: \"c784371b-9184-431f-93cd-92b037921b6b\") " pod="openstack/nova-cell0-cell-mapping-phknx" Oct 10 16:52:25 crc kubenswrapper[4799]: I1010 16:52:25.655584 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zr5q4\" (UniqueName: \"kubernetes.io/projected/c784371b-9184-431f-93cd-92b037921b6b-kube-api-access-zr5q4\") pod \"nova-cell0-cell-mapping-phknx\" (UID: \"c784371b-9184-431f-93cd-92b037921b6b\") " pod="openstack/nova-cell0-cell-mapping-phknx" Oct 10 16:52:25 crc kubenswrapper[4799]: I1010 16:52:25.660571 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c784371b-9184-431f-93cd-92b037921b6b-scripts\") pod \"nova-cell0-cell-mapping-phknx\" (UID: \"c784371b-9184-431f-93cd-92b037921b6b\") " pod="openstack/nova-cell0-cell-mapping-phknx" Oct 10 16:52:25 crc kubenswrapper[4799]: I1010 16:52:25.685136 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Oct 10 16:52:25 crc kubenswrapper[4799]: I1010 16:52:25.686299 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Oct 10 16:52:25 crc kubenswrapper[4799]: I1010 16:52:25.701731 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Oct 10 16:52:25 crc kubenswrapper[4799]: I1010 16:52:25.703188 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-novncproxy-config-data" Oct 10 16:52:25 crc kubenswrapper[4799]: I1010 16:52:25.707633 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 10 16:52:25 crc kubenswrapper[4799]: I1010 16:52:25.713144 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Oct 10 16:52:25 crc kubenswrapper[4799]: I1010 16:52:25.723045 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Oct 10 16:52:25 crc kubenswrapper[4799]: I1010 16:52:25.735776 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e98f1eab-bf7e-4d86-ab62-5294603982ae-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"e98f1eab-bf7e-4d86-ab62-5294603982ae\") " pod="openstack/nova-metadata-0" Oct 10 16:52:25 crc kubenswrapper[4799]: I1010 16:52:25.736135 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e7e4c0b7-9b70-46eb-8c7f-9ec17e0ec899-config-data\") pod \"nova-scheduler-0\" (UID: \"e7e4c0b7-9b70-46eb-8c7f-9ec17e0ec899\") " pod="openstack/nova-scheduler-0" Oct 10 16:52:25 crc kubenswrapper[4799]: I1010 16:52:25.736251 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/54b8af05-cb42-4c0b-85af-e1cd04c3f5af-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"54b8af05-cb42-4c0b-85af-e1cd04c3f5af\") " pod="openstack/nova-cell1-novncproxy-0" Oct 10 16:52:25 crc kubenswrapper[4799]: I1010 16:52:25.736322 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fm6xm\" (UniqueName: \"kubernetes.io/projected/54b8af05-cb42-4c0b-85af-e1cd04c3f5af-kube-api-access-fm6xm\") pod \"nova-cell1-novncproxy-0\" (UID: \"54b8af05-cb42-4c0b-85af-e1cd04c3f5af\") " pod="openstack/nova-cell1-novncproxy-0" Oct 10 16:52:25 crc kubenswrapper[4799]: I1010 16:52:25.736410 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f0e73585-38a6-4a89-9ce7-30bd4ea7fe71-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"f0e73585-38a6-4a89-9ce7-30bd4ea7fe71\") " pod="openstack/nova-api-0" Oct 10 16:52:25 crc kubenswrapper[4799]: I1010 16:52:25.736485 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e98f1eab-bf7e-4d86-ab62-5294603982ae-logs\") pod \"nova-metadata-0\" (UID: \"e98f1eab-bf7e-4d86-ab62-5294603982ae\") " pod="openstack/nova-metadata-0" Oct 10 16:52:25 crc kubenswrapper[4799]: I1010 16:52:25.736526 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jtwtl\" (UniqueName: \"kubernetes.io/projected/f0e73585-38a6-4a89-9ce7-30bd4ea7fe71-kube-api-access-jtwtl\") pod \"nova-api-0\" (UID: \"f0e73585-38a6-4a89-9ce7-30bd4ea7fe71\") " pod="openstack/nova-api-0" Oct 10 16:52:25 crc kubenswrapper[4799]: I1010 16:52:25.736639 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/54b8af05-cb42-4c0b-85af-e1cd04c3f5af-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"54b8af05-cb42-4c0b-85af-e1cd04c3f5af\") " pod="openstack/nova-cell1-novncproxy-0" Oct 10 16:52:25 crc kubenswrapper[4799]: I1010 16:52:25.736671 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f0e73585-38a6-4a89-9ce7-30bd4ea7fe71-config-data\") pod \"nova-api-0\" (UID: \"f0e73585-38a6-4a89-9ce7-30bd4ea7fe71\") " pod="openstack/nova-api-0" Oct 10 16:52:25 crc kubenswrapper[4799]: I1010 16:52:25.736857 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e7e4c0b7-9b70-46eb-8c7f-9ec17e0ec899-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"e7e4c0b7-9b70-46eb-8c7f-9ec17e0ec899\") " pod="openstack/nova-scheduler-0" Oct 10 16:52:25 crc kubenswrapper[4799]: I1010 16:52:25.736924 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ckdmw\" (UniqueName: \"kubernetes.io/projected/e7e4c0b7-9b70-46eb-8c7f-9ec17e0ec899-kube-api-access-ckdmw\") pod \"nova-scheduler-0\" (UID: \"e7e4c0b7-9b70-46eb-8c7f-9ec17e0ec899\") " pod="openstack/nova-scheduler-0" Oct 10 16:52:25 crc kubenswrapper[4799]: I1010 16:52:25.737153 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f0e73585-38a6-4a89-9ce7-30bd4ea7fe71-logs\") pod \"nova-api-0\" (UID: \"f0e73585-38a6-4a89-9ce7-30bd4ea7fe71\") " pod="openstack/nova-api-0" Oct 10 16:52:25 crc kubenswrapper[4799]: I1010 16:52:25.737358 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e98f1eab-bf7e-4d86-ab62-5294603982ae-config-data\") pod \"nova-metadata-0\" (UID: \"e98f1eab-bf7e-4d86-ab62-5294603982ae\") " pod="openstack/nova-metadata-0" Oct 10 16:52:25 crc kubenswrapper[4799]: I1010 16:52:25.737418 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x7dn5\" (UniqueName: \"kubernetes.io/projected/e98f1eab-bf7e-4d86-ab62-5294603982ae-kube-api-access-x7dn5\") pod \"nova-metadata-0\" (UID: \"e98f1eab-bf7e-4d86-ab62-5294603982ae\") " pod="openstack/nova-metadata-0" Oct 10 16:52:25 crc kubenswrapper[4799]: I1010 16:52:25.758665 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f0e73585-38a6-4a89-9ce7-30bd4ea7fe71-logs\") pod \"nova-api-0\" (UID: \"f0e73585-38a6-4a89-9ce7-30bd4ea7fe71\") " pod="openstack/nova-api-0" Oct 10 16:52:25 crc kubenswrapper[4799]: I1010 16:52:25.760899 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-phknx" Oct 10 16:52:25 crc kubenswrapper[4799]: I1010 16:52:25.761346 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f0e73585-38a6-4a89-9ce7-30bd4ea7fe71-config-data\") pod \"nova-api-0\" (UID: \"f0e73585-38a6-4a89-9ce7-30bd4ea7fe71\") " pod="openstack/nova-api-0" Oct 10 16:52:25 crc kubenswrapper[4799]: I1010 16:52:25.775458 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f0e73585-38a6-4a89-9ce7-30bd4ea7fe71-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"f0e73585-38a6-4a89-9ce7-30bd4ea7fe71\") " pod="openstack/nova-api-0" Oct 10 16:52:25 crc kubenswrapper[4799]: I1010 16:52:25.787311 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e7e4c0b7-9b70-46eb-8c7f-9ec17e0ec899-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"e7e4c0b7-9b70-46eb-8c7f-9ec17e0ec899\") " pod="openstack/nova-scheduler-0" Oct 10 16:52:25 crc kubenswrapper[4799]: I1010 16:52:25.801327 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e7e4c0b7-9b70-46eb-8c7f-9ec17e0ec899-config-data\") pod \"nova-scheduler-0\" (UID: \"e7e4c0b7-9b70-46eb-8c7f-9ec17e0ec899\") " pod="openstack/nova-scheduler-0" Oct 10 16:52:25 crc kubenswrapper[4799]: I1010 16:52:25.802064 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jtwtl\" (UniqueName: \"kubernetes.io/projected/f0e73585-38a6-4a89-9ce7-30bd4ea7fe71-kube-api-access-jtwtl\") pod \"nova-api-0\" (UID: \"f0e73585-38a6-4a89-9ce7-30bd4ea7fe71\") " pod="openstack/nova-api-0" Oct 10 16:52:25 crc kubenswrapper[4799]: I1010 16:52:25.826075 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Oct 10 16:52:25 crc kubenswrapper[4799]: I1010 16:52:25.842915 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e98f1eab-bf7e-4d86-ab62-5294603982ae-logs\") pod \"nova-metadata-0\" (UID: \"e98f1eab-bf7e-4d86-ab62-5294603982ae\") " pod="openstack/nova-metadata-0" Oct 10 16:52:25 crc kubenswrapper[4799]: I1010 16:52:25.842980 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/54b8af05-cb42-4c0b-85af-e1cd04c3f5af-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"54b8af05-cb42-4c0b-85af-e1cd04c3f5af\") " pod="openstack/nova-cell1-novncproxy-0" Oct 10 16:52:25 crc kubenswrapper[4799]: I1010 16:52:25.843046 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e98f1eab-bf7e-4d86-ab62-5294603982ae-config-data\") pod \"nova-metadata-0\" (UID: \"e98f1eab-bf7e-4d86-ab62-5294603982ae\") " pod="openstack/nova-metadata-0" Oct 10 16:52:25 crc kubenswrapper[4799]: I1010 16:52:25.843075 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x7dn5\" (UniqueName: \"kubernetes.io/projected/e98f1eab-bf7e-4d86-ab62-5294603982ae-kube-api-access-x7dn5\") pod \"nova-metadata-0\" (UID: \"e98f1eab-bf7e-4d86-ab62-5294603982ae\") " pod="openstack/nova-metadata-0" Oct 10 16:52:25 crc kubenswrapper[4799]: I1010 16:52:25.843099 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e98f1eab-bf7e-4d86-ab62-5294603982ae-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"e98f1eab-bf7e-4d86-ab62-5294603982ae\") " pod="openstack/nova-metadata-0" Oct 10 16:52:25 crc kubenswrapper[4799]: I1010 16:52:25.843111 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ckdmw\" (UniqueName: \"kubernetes.io/projected/e7e4c0b7-9b70-46eb-8c7f-9ec17e0ec899-kube-api-access-ckdmw\") pod \"nova-scheduler-0\" (UID: \"e7e4c0b7-9b70-46eb-8c7f-9ec17e0ec899\") " pod="openstack/nova-scheduler-0" Oct 10 16:52:25 crc kubenswrapper[4799]: I1010 16:52:25.861045 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/54b8af05-cb42-4c0b-85af-e1cd04c3f5af-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"54b8af05-cb42-4c0b-85af-e1cd04c3f5af\") " pod="openstack/nova-cell1-novncproxy-0" Oct 10 16:52:25 crc kubenswrapper[4799]: I1010 16:52:25.861089 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fm6xm\" (UniqueName: \"kubernetes.io/projected/54b8af05-cb42-4c0b-85af-e1cd04c3f5af-kube-api-access-fm6xm\") pod \"nova-cell1-novncproxy-0\" (UID: \"54b8af05-cb42-4c0b-85af-e1cd04c3f5af\") " pod="openstack/nova-cell1-novncproxy-0" Oct 10 16:52:25 crc kubenswrapper[4799]: I1010 16:52:25.866343 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e98f1eab-bf7e-4d86-ab62-5294603982ae-logs\") pod \"nova-metadata-0\" (UID: \"e98f1eab-bf7e-4d86-ab62-5294603982ae\") " pod="openstack/nova-metadata-0" Oct 10 16:52:25 crc kubenswrapper[4799]: I1010 16:52:25.867603 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/54b8af05-cb42-4c0b-85af-e1cd04c3f5af-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"54b8af05-cb42-4c0b-85af-e1cd04c3f5af\") " pod="openstack/nova-cell1-novncproxy-0" Oct 10 16:52:25 crc kubenswrapper[4799]: I1010 16:52:25.870629 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e98f1eab-bf7e-4d86-ab62-5294603982ae-config-data\") pod \"nova-metadata-0\" (UID: \"e98f1eab-bf7e-4d86-ab62-5294603982ae\") " pod="openstack/nova-metadata-0" Oct 10 16:52:25 crc kubenswrapper[4799]: I1010 16:52:25.871132 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 10 16:52:25 crc kubenswrapper[4799]: I1010 16:52:25.872465 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e98f1eab-bf7e-4d86-ab62-5294603982ae-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"e98f1eab-bf7e-4d86-ab62-5294603982ae\") " pod="openstack/nova-metadata-0" Oct 10 16:52:25 crc kubenswrapper[4799]: I1010 16:52:25.883000 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/54b8af05-cb42-4c0b-85af-e1cd04c3f5af-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"54b8af05-cb42-4c0b-85af-e1cd04c3f5af\") " pod="openstack/nova-cell1-novncproxy-0" Oct 10 16:52:25 crc kubenswrapper[4799]: I1010 16:52:25.889820 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fm6xm\" (UniqueName: \"kubernetes.io/projected/54b8af05-cb42-4c0b-85af-e1cd04c3f5af-kube-api-access-fm6xm\") pod \"nova-cell1-novncproxy-0\" (UID: \"54b8af05-cb42-4c0b-85af-e1cd04c3f5af\") " pod="openstack/nova-cell1-novncproxy-0" Oct 10 16:52:25 crc kubenswrapper[4799]: I1010 16:52:25.904657 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x7dn5\" (UniqueName: \"kubernetes.io/projected/e98f1eab-bf7e-4d86-ab62-5294603982ae-kube-api-access-x7dn5\") pod \"nova-metadata-0\" (UID: \"e98f1eab-bf7e-4d86-ab62-5294603982ae\") " pod="openstack/nova-metadata-0" Oct 10 16:52:25 crc kubenswrapper[4799]: I1010 16:52:25.923359 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-694b6b9bcc-4mgtw"] Oct 10 16:52:25 crc kubenswrapper[4799]: I1010 16:52:25.923867 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 10 16:52:25 crc kubenswrapper[4799]: I1010 16:52:25.931566 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-694b6b9bcc-4mgtw" Oct 10 16:52:25 crc kubenswrapper[4799]: I1010 16:52:25.933318 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-694b6b9bcc-4mgtw"] Oct 10 16:52:26 crc kubenswrapper[4799]: I1010 16:52:26.062659 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Oct 10 16:52:26 crc kubenswrapper[4799]: I1010 16:52:26.064916 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/9e4317e1-d160-41a4-adbc-8226d554bc6a-dns-swift-storage-0\") pod \"dnsmasq-dns-694b6b9bcc-4mgtw\" (UID: \"9e4317e1-d160-41a4-adbc-8226d554bc6a\") " pod="openstack/dnsmasq-dns-694b6b9bcc-4mgtw" Oct 10 16:52:26 crc kubenswrapper[4799]: I1010 16:52:26.064959 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9e4317e1-d160-41a4-adbc-8226d554bc6a-config\") pod \"dnsmasq-dns-694b6b9bcc-4mgtw\" (UID: \"9e4317e1-d160-41a4-adbc-8226d554bc6a\") " pod="openstack/dnsmasq-dns-694b6b9bcc-4mgtw" Oct 10 16:52:26 crc kubenswrapper[4799]: I1010 16:52:26.064985 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9e4317e1-d160-41a4-adbc-8226d554bc6a-dns-svc\") pod \"dnsmasq-dns-694b6b9bcc-4mgtw\" (UID: \"9e4317e1-d160-41a4-adbc-8226d554bc6a\") " pod="openstack/dnsmasq-dns-694b6b9bcc-4mgtw" Oct 10 16:52:26 crc kubenswrapper[4799]: I1010 16:52:26.065016 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4n44z\" (UniqueName: \"kubernetes.io/projected/9e4317e1-d160-41a4-adbc-8226d554bc6a-kube-api-access-4n44z\") pod \"dnsmasq-dns-694b6b9bcc-4mgtw\" (UID: \"9e4317e1-d160-41a4-adbc-8226d554bc6a\") " pod="openstack/dnsmasq-dns-694b6b9bcc-4mgtw" Oct 10 16:52:26 crc kubenswrapper[4799]: I1010 16:52:26.065054 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/9e4317e1-d160-41a4-adbc-8226d554bc6a-ovsdbserver-nb\") pod \"dnsmasq-dns-694b6b9bcc-4mgtw\" (UID: \"9e4317e1-d160-41a4-adbc-8226d554bc6a\") " pod="openstack/dnsmasq-dns-694b6b9bcc-4mgtw" Oct 10 16:52:26 crc kubenswrapper[4799]: I1010 16:52:26.065093 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/9e4317e1-d160-41a4-adbc-8226d554bc6a-ovsdbserver-sb\") pod \"dnsmasq-dns-694b6b9bcc-4mgtw\" (UID: \"9e4317e1-d160-41a4-adbc-8226d554bc6a\") " pod="openstack/dnsmasq-dns-694b6b9bcc-4mgtw" Oct 10 16:52:26 crc kubenswrapper[4799]: I1010 16:52:26.167020 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/9e4317e1-d160-41a4-adbc-8226d554bc6a-dns-swift-storage-0\") pod \"dnsmasq-dns-694b6b9bcc-4mgtw\" (UID: \"9e4317e1-d160-41a4-adbc-8226d554bc6a\") " pod="openstack/dnsmasq-dns-694b6b9bcc-4mgtw" Oct 10 16:52:26 crc kubenswrapper[4799]: I1010 16:52:26.167095 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9e4317e1-d160-41a4-adbc-8226d554bc6a-config\") pod \"dnsmasq-dns-694b6b9bcc-4mgtw\" (UID: \"9e4317e1-d160-41a4-adbc-8226d554bc6a\") " pod="openstack/dnsmasq-dns-694b6b9bcc-4mgtw" Oct 10 16:52:26 crc kubenswrapper[4799]: I1010 16:52:26.167124 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9e4317e1-d160-41a4-adbc-8226d554bc6a-dns-svc\") pod \"dnsmasq-dns-694b6b9bcc-4mgtw\" (UID: \"9e4317e1-d160-41a4-adbc-8226d554bc6a\") " pod="openstack/dnsmasq-dns-694b6b9bcc-4mgtw" Oct 10 16:52:26 crc kubenswrapper[4799]: I1010 16:52:26.168096 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/9e4317e1-d160-41a4-adbc-8226d554bc6a-dns-swift-storage-0\") pod \"dnsmasq-dns-694b6b9bcc-4mgtw\" (UID: \"9e4317e1-d160-41a4-adbc-8226d554bc6a\") " pod="openstack/dnsmasq-dns-694b6b9bcc-4mgtw" Oct 10 16:52:26 crc kubenswrapper[4799]: I1010 16:52:26.168909 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9e4317e1-d160-41a4-adbc-8226d554bc6a-config\") pod \"dnsmasq-dns-694b6b9bcc-4mgtw\" (UID: \"9e4317e1-d160-41a4-adbc-8226d554bc6a\") " pod="openstack/dnsmasq-dns-694b6b9bcc-4mgtw" Oct 10 16:52:26 crc kubenswrapper[4799]: I1010 16:52:26.170177 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9e4317e1-d160-41a4-adbc-8226d554bc6a-dns-svc\") pod \"dnsmasq-dns-694b6b9bcc-4mgtw\" (UID: \"9e4317e1-d160-41a4-adbc-8226d554bc6a\") " pod="openstack/dnsmasq-dns-694b6b9bcc-4mgtw" Oct 10 16:52:26 crc kubenswrapper[4799]: I1010 16:52:26.167162 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4n44z\" (UniqueName: \"kubernetes.io/projected/9e4317e1-d160-41a4-adbc-8226d554bc6a-kube-api-access-4n44z\") pod \"dnsmasq-dns-694b6b9bcc-4mgtw\" (UID: \"9e4317e1-d160-41a4-adbc-8226d554bc6a\") " pod="openstack/dnsmasq-dns-694b6b9bcc-4mgtw" Oct 10 16:52:26 crc kubenswrapper[4799]: I1010 16:52:26.170308 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/9e4317e1-d160-41a4-adbc-8226d554bc6a-ovsdbserver-nb\") pod \"dnsmasq-dns-694b6b9bcc-4mgtw\" (UID: \"9e4317e1-d160-41a4-adbc-8226d554bc6a\") " pod="openstack/dnsmasq-dns-694b6b9bcc-4mgtw" Oct 10 16:52:26 crc kubenswrapper[4799]: I1010 16:52:26.170383 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/9e4317e1-d160-41a4-adbc-8226d554bc6a-ovsdbserver-sb\") pod \"dnsmasq-dns-694b6b9bcc-4mgtw\" (UID: \"9e4317e1-d160-41a4-adbc-8226d554bc6a\") " pod="openstack/dnsmasq-dns-694b6b9bcc-4mgtw" Oct 10 16:52:26 crc kubenswrapper[4799]: I1010 16:52:26.171172 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/9e4317e1-d160-41a4-adbc-8226d554bc6a-ovsdbserver-sb\") pod \"dnsmasq-dns-694b6b9bcc-4mgtw\" (UID: \"9e4317e1-d160-41a4-adbc-8226d554bc6a\") " pod="openstack/dnsmasq-dns-694b6b9bcc-4mgtw" Oct 10 16:52:26 crc kubenswrapper[4799]: I1010 16:52:26.171744 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/9e4317e1-d160-41a4-adbc-8226d554bc6a-ovsdbserver-nb\") pod \"dnsmasq-dns-694b6b9bcc-4mgtw\" (UID: \"9e4317e1-d160-41a4-adbc-8226d554bc6a\") " pod="openstack/dnsmasq-dns-694b6b9bcc-4mgtw" Oct 10 16:52:26 crc kubenswrapper[4799]: I1010 16:52:26.185934 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4n44z\" (UniqueName: \"kubernetes.io/projected/9e4317e1-d160-41a4-adbc-8226d554bc6a-kube-api-access-4n44z\") pod \"dnsmasq-dns-694b6b9bcc-4mgtw\" (UID: \"9e4317e1-d160-41a4-adbc-8226d554bc6a\") " pod="openstack/dnsmasq-dns-694b6b9bcc-4mgtw" Oct 10 16:52:26 crc kubenswrapper[4799]: I1010 16:52:26.189493 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 10 16:52:26 crc kubenswrapper[4799]: I1010 16:52:26.264286 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-694b6b9bcc-4mgtw" Oct 10 16:52:26 crc kubenswrapper[4799]: I1010 16:52:26.349139 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-cell-mapping-phknx"] Oct 10 16:52:26 crc kubenswrapper[4799]: I1010 16:52:26.485732 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Oct 10 16:52:26 crc kubenswrapper[4799]: W1010 16:52:26.516953 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf0e73585_38a6_4a89_9ce7_30bd4ea7fe71.slice/crio-499d9f98e05522f459aca4332340fcbc91432396c2b61bf6c1c3691904479ba2 WatchSource:0}: Error finding container 499d9f98e05522f459aca4332340fcbc91432396c2b61bf6c1c3691904479ba2: Status 404 returned error can't find the container with id 499d9f98e05522f459aca4332340fcbc91432396c2b61bf6c1c3691904479ba2 Oct 10 16:52:26 crc kubenswrapper[4799]: I1010 16:52:26.526061 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Oct 10 16:52:26 crc kubenswrapper[4799]: I1010 16:52:26.656966 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-db-sync-tc47q"] Oct 10 16:52:26 crc kubenswrapper[4799]: I1010 16:52:26.658105 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-tc47q" Oct 10 16:52:26 crc kubenswrapper[4799]: I1010 16:52:26.660274 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-scripts" Oct 10 16:52:26 crc kubenswrapper[4799]: I1010 16:52:26.660715 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-config-data" Oct 10 16:52:26 crc kubenswrapper[4799]: I1010 16:52:26.669611 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-tc47q"] Oct 10 16:52:26 crc kubenswrapper[4799]: I1010 16:52:26.727882 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Oct 10 16:52:26 crc kubenswrapper[4799]: I1010 16:52:26.785521 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/23204589-3275-4935-9f16-171e3a66fe1b-scripts\") pod \"nova-cell1-conductor-db-sync-tc47q\" (UID: \"23204589-3275-4935-9f16-171e3a66fe1b\") " pod="openstack/nova-cell1-conductor-db-sync-tc47q" Oct 10 16:52:26 crc kubenswrapper[4799]: I1010 16:52:26.785584 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gzw8v\" (UniqueName: \"kubernetes.io/projected/23204589-3275-4935-9f16-171e3a66fe1b-kube-api-access-gzw8v\") pod \"nova-cell1-conductor-db-sync-tc47q\" (UID: \"23204589-3275-4935-9f16-171e3a66fe1b\") " pod="openstack/nova-cell1-conductor-db-sync-tc47q" Oct 10 16:52:26 crc kubenswrapper[4799]: I1010 16:52:26.785613 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/23204589-3275-4935-9f16-171e3a66fe1b-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-tc47q\" (UID: \"23204589-3275-4935-9f16-171e3a66fe1b\") " pod="openstack/nova-cell1-conductor-db-sync-tc47q" Oct 10 16:52:26 crc kubenswrapper[4799]: I1010 16:52:26.785657 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/23204589-3275-4935-9f16-171e3a66fe1b-config-data\") pod \"nova-cell1-conductor-db-sync-tc47q\" (UID: \"23204589-3275-4935-9f16-171e3a66fe1b\") " pod="openstack/nova-cell1-conductor-db-sync-tc47q" Oct 10 16:52:26 crc kubenswrapper[4799]: W1010 16:52:26.860886 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9e4317e1_d160_41a4_adbc_8226d554bc6a.slice/crio-621a5bc361d8186bd3c7ab2749ebab664b94192c33d120d2ce0a13720edf632c WatchSource:0}: Error finding container 621a5bc361d8186bd3c7ab2749ebab664b94192c33d120d2ce0a13720edf632c: Status 404 returned error can't find the container with id 621a5bc361d8186bd3c7ab2749ebab664b94192c33d120d2ce0a13720edf632c Oct 10 16:52:26 crc kubenswrapper[4799]: I1010 16:52:26.861903 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-694b6b9bcc-4mgtw"] Oct 10 16:52:26 crc kubenswrapper[4799]: I1010 16:52:26.879341 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Oct 10 16:52:26 crc kubenswrapper[4799]: I1010 16:52:26.887950 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/23204589-3275-4935-9f16-171e3a66fe1b-scripts\") pod \"nova-cell1-conductor-db-sync-tc47q\" (UID: \"23204589-3275-4935-9f16-171e3a66fe1b\") " pod="openstack/nova-cell1-conductor-db-sync-tc47q" Oct 10 16:52:26 crc kubenswrapper[4799]: I1010 16:52:26.888015 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gzw8v\" (UniqueName: \"kubernetes.io/projected/23204589-3275-4935-9f16-171e3a66fe1b-kube-api-access-gzw8v\") pod \"nova-cell1-conductor-db-sync-tc47q\" (UID: \"23204589-3275-4935-9f16-171e3a66fe1b\") " pod="openstack/nova-cell1-conductor-db-sync-tc47q" Oct 10 16:52:26 crc kubenswrapper[4799]: I1010 16:52:26.888051 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/23204589-3275-4935-9f16-171e3a66fe1b-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-tc47q\" (UID: \"23204589-3275-4935-9f16-171e3a66fe1b\") " pod="openstack/nova-cell1-conductor-db-sync-tc47q" Oct 10 16:52:26 crc kubenswrapper[4799]: I1010 16:52:26.888108 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/23204589-3275-4935-9f16-171e3a66fe1b-config-data\") pod \"nova-cell1-conductor-db-sync-tc47q\" (UID: \"23204589-3275-4935-9f16-171e3a66fe1b\") " pod="openstack/nova-cell1-conductor-db-sync-tc47q" Oct 10 16:52:26 crc kubenswrapper[4799]: I1010 16:52:26.897324 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/23204589-3275-4935-9f16-171e3a66fe1b-scripts\") pod \"nova-cell1-conductor-db-sync-tc47q\" (UID: \"23204589-3275-4935-9f16-171e3a66fe1b\") " pod="openstack/nova-cell1-conductor-db-sync-tc47q" Oct 10 16:52:26 crc kubenswrapper[4799]: I1010 16:52:26.897560 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/23204589-3275-4935-9f16-171e3a66fe1b-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-tc47q\" (UID: \"23204589-3275-4935-9f16-171e3a66fe1b\") " pod="openstack/nova-cell1-conductor-db-sync-tc47q" Oct 10 16:52:26 crc kubenswrapper[4799]: I1010 16:52:26.897952 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/23204589-3275-4935-9f16-171e3a66fe1b-config-data\") pod \"nova-cell1-conductor-db-sync-tc47q\" (UID: \"23204589-3275-4935-9f16-171e3a66fe1b\") " pod="openstack/nova-cell1-conductor-db-sync-tc47q" Oct 10 16:52:26 crc kubenswrapper[4799]: I1010 16:52:26.905295 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gzw8v\" (UniqueName: \"kubernetes.io/projected/23204589-3275-4935-9f16-171e3a66fe1b-kube-api-access-gzw8v\") pod \"nova-cell1-conductor-db-sync-tc47q\" (UID: \"23204589-3275-4935-9f16-171e3a66fe1b\") " pod="openstack/nova-cell1-conductor-db-sync-tc47q" Oct 10 16:52:27 crc kubenswrapper[4799]: I1010 16:52:27.012212 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-tc47q" Oct 10 16:52:27 crc kubenswrapper[4799]: I1010 16:52:27.327845 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"54b8af05-cb42-4c0b-85af-e1cd04c3f5af","Type":"ContainerStarted","Data":"dbbb78124876a5e03da04fb33480cfa2f49c164ef063758d101d2f699a90c8e9"} Oct 10 16:52:27 crc kubenswrapper[4799]: I1010 16:52:27.335544 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-phknx" event={"ID":"c784371b-9184-431f-93cd-92b037921b6b","Type":"ContainerStarted","Data":"addd6c933147fa3bde0b56efc182d2aac6eb56fac50867086fb25a7bb39e07ca"} Oct 10 16:52:27 crc kubenswrapper[4799]: I1010 16:52:27.335583 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-phknx" event={"ID":"c784371b-9184-431f-93cd-92b037921b6b","Type":"ContainerStarted","Data":"14af2e0af318f2eab4337132e88c1d3160c3ef867310c785a5fcbd3ca3724af6"} Oct 10 16:52:27 crc kubenswrapper[4799]: I1010 16:52:27.341162 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"f0e73585-38a6-4a89-9ce7-30bd4ea7fe71","Type":"ContainerStarted","Data":"499d9f98e05522f459aca4332340fcbc91432396c2b61bf6c1c3691904479ba2"} Oct 10 16:52:27 crc kubenswrapper[4799]: I1010 16:52:27.343637 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"e7e4c0b7-9b70-46eb-8c7f-9ec17e0ec899","Type":"ContainerStarted","Data":"9c32b285ed31c36d61a898fae3dbe1adf1b84f9ef12ddd55c224e63d06525bb8"} Oct 10 16:52:27 crc kubenswrapper[4799]: I1010 16:52:27.345935 4799 generic.go:334] "Generic (PLEG): container finished" podID="9e4317e1-d160-41a4-adbc-8226d554bc6a" containerID="c1cc9ab96e0b2bf01e77f55473311f1a9e3ab523200af1d7102d8a0cafd11b0e" exitCode=0 Oct 10 16:52:27 crc kubenswrapper[4799]: I1010 16:52:27.346006 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-694b6b9bcc-4mgtw" event={"ID":"9e4317e1-d160-41a4-adbc-8226d554bc6a","Type":"ContainerDied","Data":"c1cc9ab96e0b2bf01e77f55473311f1a9e3ab523200af1d7102d8a0cafd11b0e"} Oct 10 16:52:27 crc kubenswrapper[4799]: I1010 16:52:27.346033 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-694b6b9bcc-4mgtw" event={"ID":"9e4317e1-d160-41a4-adbc-8226d554bc6a","Type":"ContainerStarted","Data":"621a5bc361d8186bd3c7ab2749ebab664b94192c33d120d2ce0a13720edf632c"} Oct 10 16:52:27 crc kubenswrapper[4799]: I1010 16:52:27.350908 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"e98f1eab-bf7e-4d86-ab62-5294603982ae","Type":"ContainerStarted","Data":"8f180477fba8651cc60f3dabc12a29b9d3825b0f9476af8e4f00bd21b198f53f"} Oct 10 16:52:27 crc kubenswrapper[4799]: I1010 16:52:27.382270 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-cell-mapping-phknx" podStartSLOduration=2.382234814 podStartE2EDuration="2.382234814s" podCreationTimestamp="2025-10-10 16:52:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-10 16:52:27.361384131 +0000 UTC m=+1240.869708246" watchObservedRunningTime="2025-10-10 16:52:27.382234814 +0000 UTC m=+1240.890558929" Oct 10 16:52:27 crc kubenswrapper[4799]: I1010 16:52:27.481411 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-tc47q"] Oct 10 16:52:28 crc kubenswrapper[4799]: I1010 16:52:28.361949 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-694b6b9bcc-4mgtw" event={"ID":"9e4317e1-d160-41a4-adbc-8226d554bc6a","Type":"ContainerStarted","Data":"c9533479ea097624f2e803b15bdb79d4faabdb36fea0ba6e3f99c85cb3a21f02"} Oct 10 16:52:28 crc kubenswrapper[4799]: I1010 16:52:28.362266 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-694b6b9bcc-4mgtw" Oct 10 16:52:28 crc kubenswrapper[4799]: I1010 16:52:28.364191 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-tc47q" event={"ID":"23204589-3275-4935-9f16-171e3a66fe1b","Type":"ContainerStarted","Data":"7bf6c7d3613def6536265da4a2ef17104e40af69add4759f9d5cb3a1dff047ff"} Oct 10 16:52:28 crc kubenswrapper[4799]: I1010 16:52:28.364230 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-tc47q" event={"ID":"23204589-3275-4935-9f16-171e3a66fe1b","Type":"ContainerStarted","Data":"84a0863a2b9cd87bef025e2574973235d4f06d9c8ba44b174ab3f80b3b7d2a9a"} Oct 10 16:52:28 crc kubenswrapper[4799]: I1010 16:52:28.389055 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-694b6b9bcc-4mgtw" podStartSLOduration=3.389034045 podStartE2EDuration="3.389034045s" podCreationTimestamp="2025-10-10 16:52:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-10 16:52:28.380527476 +0000 UTC m=+1241.888851601" watchObservedRunningTime="2025-10-10 16:52:28.389034045 +0000 UTC m=+1241.897358160" Oct 10 16:52:28 crc kubenswrapper[4799]: I1010 16:52:28.409557 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-db-sync-tc47q" podStartSLOduration=2.40954044 podStartE2EDuration="2.40954044s" podCreationTimestamp="2025-10-10 16:52:26 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-10 16:52:28.402053406 +0000 UTC m=+1241.910377531" watchObservedRunningTime="2025-10-10 16:52:28.40954044 +0000 UTC m=+1241.917864565" Oct 10 16:52:29 crc kubenswrapper[4799]: I1010 16:52:29.864893 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Oct 10 16:52:29 crc kubenswrapper[4799]: I1010 16:52:29.872145 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Oct 10 16:52:30 crc kubenswrapper[4799]: I1010 16:52:30.391288 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"e98f1eab-bf7e-4d86-ab62-5294603982ae","Type":"ContainerStarted","Data":"aa85025ce5fb865ee1f7f2d10b63641cfa05cd447835f60d17888291883b56d3"} Oct 10 16:52:30 crc kubenswrapper[4799]: I1010 16:52:30.391537 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"e98f1eab-bf7e-4d86-ab62-5294603982ae","Type":"ContainerStarted","Data":"7bbf71222455af1fff4ae4868d27ad31019220e3e9b1ab5e5465f8b8c1705c20"} Oct 10 16:52:30 crc kubenswrapper[4799]: I1010 16:52:30.391463 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="e98f1eab-bf7e-4d86-ab62-5294603982ae" containerName="nova-metadata-metadata" containerID="cri-o://aa85025ce5fb865ee1f7f2d10b63641cfa05cd447835f60d17888291883b56d3" gracePeriod=30 Oct 10 16:52:30 crc kubenswrapper[4799]: I1010 16:52:30.391383 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="e98f1eab-bf7e-4d86-ab62-5294603982ae" containerName="nova-metadata-log" containerID="cri-o://7bbf71222455af1fff4ae4868d27ad31019220e3e9b1ab5e5465f8b8c1705c20" gracePeriod=30 Oct 10 16:52:30 crc kubenswrapper[4799]: I1010 16:52:30.398175 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-cell1-novncproxy-0" podUID="54b8af05-cb42-4c0b-85af-e1cd04c3f5af" containerName="nova-cell1-novncproxy-novncproxy" containerID="cri-o://1d7442524389524b37c35f42abf2fbafa97cbf60950462f30223e7c21af358f9" gracePeriod=30 Oct 10 16:52:30 crc kubenswrapper[4799]: I1010 16:52:30.398279 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"54b8af05-cb42-4c0b-85af-e1cd04c3f5af","Type":"ContainerStarted","Data":"1d7442524389524b37c35f42abf2fbafa97cbf60950462f30223e7c21af358f9"} Oct 10 16:52:30 crc kubenswrapper[4799]: I1010 16:52:30.405857 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"e7e4c0b7-9b70-46eb-8c7f-9ec17e0ec899","Type":"ContainerStarted","Data":"820ca220df9e5c5262ce2f09ca286e73e432d401da8a517181dfb0db455e36e4"} Oct 10 16:52:30 crc kubenswrapper[4799]: I1010 16:52:30.409692 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"f0e73585-38a6-4a89-9ce7-30bd4ea7fe71","Type":"ContainerStarted","Data":"67400cc2426eb5ea4cc47a3cb3542ce051591dc896fe9dc87b8d30bbffa78d50"} Oct 10 16:52:30 crc kubenswrapper[4799]: I1010 16:52:30.409875 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"f0e73585-38a6-4a89-9ce7-30bd4ea7fe71","Type":"ContainerStarted","Data":"4ce31eab40b5b3f12567f9c020fb29a29b561a6b6a7213cbae5edec02226ca49"} Oct 10 16:52:30 crc kubenswrapper[4799]: I1010 16:52:30.417696 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.7116450739999998 podStartE2EDuration="5.417678876s" podCreationTimestamp="2025-10-10 16:52:25 +0000 UTC" firstStartedPulling="2025-10-10 16:52:26.872581235 +0000 UTC m=+1240.380905350" lastFinishedPulling="2025-10-10 16:52:29.578615037 +0000 UTC m=+1243.086939152" observedRunningTime="2025-10-10 16:52:30.415632196 +0000 UTC m=+1243.923956331" watchObservedRunningTime="2025-10-10 16:52:30.417678876 +0000 UTC m=+1243.926002991" Oct 10 16:52:30 crc kubenswrapper[4799]: I1010 16:52:30.436314 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.40525984 podStartE2EDuration="5.436294845s" podCreationTimestamp="2025-10-10 16:52:25 +0000 UTC" firstStartedPulling="2025-10-10 16:52:26.548509855 +0000 UTC m=+1240.056833970" lastFinishedPulling="2025-10-10 16:52:29.57954483 +0000 UTC m=+1243.087868975" observedRunningTime="2025-10-10 16:52:30.430712557 +0000 UTC m=+1243.939036682" watchObservedRunningTime="2025-10-10 16:52:30.436294845 +0000 UTC m=+1243.944618960" Oct 10 16:52:30 crc kubenswrapper[4799]: I1010 16:52:30.452143 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.397179161 podStartE2EDuration="5.452129745s" podCreationTimestamp="2025-10-10 16:52:25 +0000 UTC" firstStartedPulling="2025-10-10 16:52:26.52962613 +0000 UTC m=+1240.037950245" lastFinishedPulling="2025-10-10 16:52:29.584576714 +0000 UTC m=+1243.092900829" observedRunningTime="2025-10-10 16:52:30.450462874 +0000 UTC m=+1243.958786999" watchObservedRunningTime="2025-10-10 16:52:30.452129745 +0000 UTC m=+1243.960453860" Oct 10 16:52:30 crc kubenswrapper[4799]: I1010 16:52:30.497697 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-novncproxy-0" podStartSLOduration=2.65785604 podStartE2EDuration="5.497313977s" podCreationTimestamp="2025-10-10 16:52:25 +0000 UTC" firstStartedPulling="2025-10-10 16:52:26.738209976 +0000 UTC m=+1240.246534091" lastFinishedPulling="2025-10-10 16:52:29.577667913 +0000 UTC m=+1243.085992028" observedRunningTime="2025-10-10 16:52:30.465694379 +0000 UTC m=+1243.974018514" watchObservedRunningTime="2025-10-10 16:52:30.497313977 +0000 UTC m=+1244.005638092" Oct 10 16:52:30 crc kubenswrapper[4799]: I1010 16:52:30.924217 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Oct 10 16:52:31 crc kubenswrapper[4799]: I1010 16:52:31.063390 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-novncproxy-0" Oct 10 16:52:31 crc kubenswrapper[4799]: I1010 16:52:31.190098 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Oct 10 16:52:31 crc kubenswrapper[4799]: I1010 16:52:31.190172 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Oct 10 16:52:31 crc kubenswrapper[4799]: I1010 16:52:31.429599 4799 generic.go:334] "Generic (PLEG): container finished" podID="e98f1eab-bf7e-4d86-ab62-5294603982ae" containerID="7bbf71222455af1fff4ae4868d27ad31019220e3e9b1ab5e5465f8b8c1705c20" exitCode=143 Oct 10 16:52:31 crc kubenswrapper[4799]: I1010 16:52:31.430033 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"e98f1eab-bf7e-4d86-ab62-5294603982ae","Type":"ContainerDied","Data":"7bbf71222455af1fff4ae4868d27ad31019220e3e9b1ab5e5465f8b8c1705c20"} Oct 10 16:52:34 crc kubenswrapper[4799]: I1010 16:52:34.472150 4799 generic.go:334] "Generic (PLEG): container finished" podID="c784371b-9184-431f-93cd-92b037921b6b" containerID="addd6c933147fa3bde0b56efc182d2aac6eb56fac50867086fb25a7bb39e07ca" exitCode=0 Oct 10 16:52:34 crc kubenswrapper[4799]: I1010 16:52:34.472272 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-phknx" event={"ID":"c784371b-9184-431f-93cd-92b037921b6b","Type":"ContainerDied","Data":"addd6c933147fa3bde0b56efc182d2aac6eb56fac50867086fb25a7bb39e07ca"} Oct 10 16:52:35 crc kubenswrapper[4799]: I1010 16:52:35.872443 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Oct 10 16:52:35 crc kubenswrapper[4799]: I1010 16:52:35.872730 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Oct 10 16:52:35 crc kubenswrapper[4799]: I1010 16:52:35.924251 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Oct 10 16:52:35 crc kubenswrapper[4799]: I1010 16:52:35.958551 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-phknx" Oct 10 16:52:35 crc kubenswrapper[4799]: I1010 16:52:35.976229 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Oct 10 16:52:36 crc kubenswrapper[4799]: I1010 16:52:36.082319 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c784371b-9184-431f-93cd-92b037921b6b-combined-ca-bundle\") pod \"c784371b-9184-431f-93cd-92b037921b6b\" (UID: \"c784371b-9184-431f-93cd-92b037921b6b\") " Oct 10 16:52:36 crc kubenswrapper[4799]: I1010 16:52:36.082411 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c784371b-9184-431f-93cd-92b037921b6b-scripts\") pod \"c784371b-9184-431f-93cd-92b037921b6b\" (UID: \"c784371b-9184-431f-93cd-92b037921b6b\") " Oct 10 16:52:36 crc kubenswrapper[4799]: I1010 16:52:36.082621 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zr5q4\" (UniqueName: \"kubernetes.io/projected/c784371b-9184-431f-93cd-92b037921b6b-kube-api-access-zr5q4\") pod \"c784371b-9184-431f-93cd-92b037921b6b\" (UID: \"c784371b-9184-431f-93cd-92b037921b6b\") " Oct 10 16:52:36 crc kubenswrapper[4799]: I1010 16:52:36.082732 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c784371b-9184-431f-93cd-92b037921b6b-config-data\") pod \"c784371b-9184-431f-93cd-92b037921b6b\" (UID: \"c784371b-9184-431f-93cd-92b037921b6b\") " Oct 10 16:52:36 crc kubenswrapper[4799]: I1010 16:52:36.087729 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c784371b-9184-431f-93cd-92b037921b6b-scripts" (OuterVolumeSpecName: "scripts") pod "c784371b-9184-431f-93cd-92b037921b6b" (UID: "c784371b-9184-431f-93cd-92b037921b6b"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:52:36 crc kubenswrapper[4799]: I1010 16:52:36.089397 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c784371b-9184-431f-93cd-92b037921b6b-kube-api-access-zr5q4" (OuterVolumeSpecName: "kube-api-access-zr5q4") pod "c784371b-9184-431f-93cd-92b037921b6b" (UID: "c784371b-9184-431f-93cd-92b037921b6b"). InnerVolumeSpecName "kube-api-access-zr5q4". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 16:52:36 crc kubenswrapper[4799]: I1010 16:52:36.119941 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c784371b-9184-431f-93cd-92b037921b6b-config-data" (OuterVolumeSpecName: "config-data") pod "c784371b-9184-431f-93cd-92b037921b6b" (UID: "c784371b-9184-431f-93cd-92b037921b6b"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:52:36 crc kubenswrapper[4799]: I1010 16:52:36.124502 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c784371b-9184-431f-93cd-92b037921b6b-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "c784371b-9184-431f-93cd-92b037921b6b" (UID: "c784371b-9184-431f-93cd-92b037921b6b"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:52:36 crc kubenswrapper[4799]: I1010 16:52:36.185952 4799 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c784371b-9184-431f-93cd-92b037921b6b-config-data\") on node \"crc\" DevicePath \"\"" Oct 10 16:52:36 crc kubenswrapper[4799]: I1010 16:52:36.185985 4799 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c784371b-9184-431f-93cd-92b037921b6b-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 10 16:52:36 crc kubenswrapper[4799]: I1010 16:52:36.185998 4799 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c784371b-9184-431f-93cd-92b037921b6b-scripts\") on node \"crc\" DevicePath \"\"" Oct 10 16:52:36 crc kubenswrapper[4799]: I1010 16:52:36.186009 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zr5q4\" (UniqueName: \"kubernetes.io/projected/c784371b-9184-431f-93cd-92b037921b6b-kube-api-access-zr5q4\") on node \"crc\" DevicePath \"\"" Oct 10 16:52:36 crc kubenswrapper[4799]: I1010 16:52:36.265923 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-694b6b9bcc-4mgtw" Oct 10 16:52:36 crc kubenswrapper[4799]: I1010 16:52:36.341694 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5f74b5f5cc-54sss"] Oct 10 16:52:36 crc kubenswrapper[4799]: I1010 16:52:36.342023 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-5f74b5f5cc-54sss" podUID="e6a761e9-d08f-4c77-8441-b7fe50f5ffdd" containerName="dnsmasq-dns" containerID="cri-o://fd59d61fc5fd52a7907ba716a92a3b1b087699ca7d1f1416c6fcc3348300f073" gracePeriod=10 Oct 10 16:52:36 crc kubenswrapper[4799]: I1010 16:52:36.496077 4799 generic.go:334] "Generic (PLEG): container finished" podID="23204589-3275-4935-9f16-171e3a66fe1b" containerID="7bf6c7d3613def6536265da4a2ef17104e40af69add4759f9d5cb3a1dff047ff" exitCode=0 Oct 10 16:52:36 crc kubenswrapper[4799]: I1010 16:52:36.496161 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-tc47q" event={"ID":"23204589-3275-4935-9f16-171e3a66fe1b","Type":"ContainerDied","Data":"7bf6c7d3613def6536265da4a2ef17104e40af69add4759f9d5cb3a1dff047ff"} Oct 10 16:52:36 crc kubenswrapper[4799]: I1010 16:52:36.498640 4799 generic.go:334] "Generic (PLEG): container finished" podID="e6a761e9-d08f-4c77-8441-b7fe50f5ffdd" containerID="fd59d61fc5fd52a7907ba716a92a3b1b087699ca7d1f1416c6fcc3348300f073" exitCode=0 Oct 10 16:52:36 crc kubenswrapper[4799]: I1010 16:52:36.498802 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5f74b5f5cc-54sss" event={"ID":"e6a761e9-d08f-4c77-8441-b7fe50f5ffdd","Type":"ContainerDied","Data":"fd59d61fc5fd52a7907ba716a92a3b1b087699ca7d1f1416c6fcc3348300f073"} Oct 10 16:52:36 crc kubenswrapper[4799]: I1010 16:52:36.500990 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-phknx" Oct 10 16:52:36 crc kubenswrapper[4799]: I1010 16:52:36.501982 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-phknx" event={"ID":"c784371b-9184-431f-93cd-92b037921b6b","Type":"ContainerDied","Data":"14af2e0af318f2eab4337132e88c1d3160c3ef867310c785a5fcbd3ca3724af6"} Oct 10 16:52:36 crc kubenswrapper[4799]: I1010 16:52:36.502018 4799 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="14af2e0af318f2eab4337132e88c1d3160c3ef867310c785a5fcbd3ca3724af6" Oct 10 16:52:36 crc kubenswrapper[4799]: I1010 16:52:36.553732 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Oct 10 16:52:36 crc kubenswrapper[4799]: I1010 16:52:36.669420 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Oct 10 16:52:36 crc kubenswrapper[4799]: I1010 16:52:36.669903 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="f0e73585-38a6-4a89-9ce7-30bd4ea7fe71" containerName="nova-api-log" containerID="cri-o://4ce31eab40b5b3f12567f9c020fb29a29b561a6b6a7213cbae5edec02226ca49" gracePeriod=30 Oct 10 16:52:36 crc kubenswrapper[4799]: I1010 16:52:36.670352 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="f0e73585-38a6-4a89-9ce7-30bd4ea7fe71" containerName="nova-api-api" containerID="cri-o://67400cc2426eb5ea4cc47a3cb3542ce051591dc896fe9dc87b8d30bbffa78d50" gracePeriod=30 Oct 10 16:52:36 crc kubenswrapper[4799]: I1010 16:52:36.676672 4799 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="f0e73585-38a6-4a89-9ce7-30bd4ea7fe71" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.0.190:8774/\": EOF" Oct 10 16:52:36 crc kubenswrapper[4799]: I1010 16:52:36.676831 4799 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="f0e73585-38a6-4a89-9ce7-30bd4ea7fe71" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.0.190:8774/\": EOF" Oct 10 16:52:36 crc kubenswrapper[4799]: I1010 16:52:36.849097 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5f74b5f5cc-54sss" Oct 10 16:52:36 crc kubenswrapper[4799]: I1010 16:52:36.906518 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e6a761e9-d08f-4c77-8441-b7fe50f5ffdd-ovsdbserver-nb\") pod \"e6a761e9-d08f-4c77-8441-b7fe50f5ffdd\" (UID: \"e6a761e9-d08f-4c77-8441-b7fe50f5ffdd\") " Oct 10 16:52:36 crc kubenswrapper[4799]: I1010 16:52:36.906579 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e6a761e9-d08f-4c77-8441-b7fe50f5ffdd-config\") pod \"e6a761e9-d08f-4c77-8441-b7fe50f5ffdd\" (UID: \"e6a761e9-d08f-4c77-8441-b7fe50f5ffdd\") " Oct 10 16:52:36 crc kubenswrapper[4799]: I1010 16:52:36.906597 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e6a761e9-d08f-4c77-8441-b7fe50f5ffdd-dns-svc\") pod \"e6a761e9-d08f-4c77-8441-b7fe50f5ffdd\" (UID: \"e6a761e9-d08f-4c77-8441-b7fe50f5ffdd\") " Oct 10 16:52:36 crc kubenswrapper[4799]: I1010 16:52:36.906643 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-j7q7n\" (UniqueName: \"kubernetes.io/projected/e6a761e9-d08f-4c77-8441-b7fe50f5ffdd-kube-api-access-j7q7n\") pod \"e6a761e9-d08f-4c77-8441-b7fe50f5ffdd\" (UID: \"e6a761e9-d08f-4c77-8441-b7fe50f5ffdd\") " Oct 10 16:52:36 crc kubenswrapper[4799]: I1010 16:52:36.906725 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e6a761e9-d08f-4c77-8441-b7fe50f5ffdd-ovsdbserver-sb\") pod \"e6a761e9-d08f-4c77-8441-b7fe50f5ffdd\" (UID: \"e6a761e9-d08f-4c77-8441-b7fe50f5ffdd\") " Oct 10 16:52:36 crc kubenswrapper[4799]: I1010 16:52:36.906809 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/e6a761e9-d08f-4c77-8441-b7fe50f5ffdd-dns-swift-storage-0\") pod \"e6a761e9-d08f-4c77-8441-b7fe50f5ffdd\" (UID: \"e6a761e9-d08f-4c77-8441-b7fe50f5ffdd\") " Oct 10 16:52:36 crc kubenswrapper[4799]: I1010 16:52:36.934192 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e6a761e9-d08f-4c77-8441-b7fe50f5ffdd-kube-api-access-j7q7n" (OuterVolumeSpecName: "kube-api-access-j7q7n") pod "e6a761e9-d08f-4c77-8441-b7fe50f5ffdd" (UID: "e6a761e9-d08f-4c77-8441-b7fe50f5ffdd"). InnerVolumeSpecName "kube-api-access-j7q7n". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 16:52:37 crc kubenswrapper[4799]: I1010 16:52:37.002737 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e6a761e9-d08f-4c77-8441-b7fe50f5ffdd-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "e6a761e9-d08f-4c77-8441-b7fe50f5ffdd" (UID: "e6a761e9-d08f-4c77-8441-b7fe50f5ffdd"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 16:52:37 crc kubenswrapper[4799]: I1010 16:52:37.010117 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-j7q7n\" (UniqueName: \"kubernetes.io/projected/e6a761e9-d08f-4c77-8441-b7fe50f5ffdd-kube-api-access-j7q7n\") on node \"crc\" DevicePath \"\"" Oct 10 16:52:37 crc kubenswrapper[4799]: I1010 16:52:37.010148 4799 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e6a761e9-d08f-4c77-8441-b7fe50f5ffdd-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 10 16:52:37 crc kubenswrapper[4799]: I1010 16:52:37.011570 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e6a761e9-d08f-4c77-8441-b7fe50f5ffdd-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "e6a761e9-d08f-4c77-8441-b7fe50f5ffdd" (UID: "e6a761e9-d08f-4c77-8441-b7fe50f5ffdd"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 16:52:37 crc kubenswrapper[4799]: I1010 16:52:37.011582 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e6a761e9-d08f-4c77-8441-b7fe50f5ffdd-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "e6a761e9-d08f-4c77-8441-b7fe50f5ffdd" (UID: "e6a761e9-d08f-4c77-8441-b7fe50f5ffdd"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 16:52:37 crc kubenswrapper[4799]: I1010 16:52:37.012126 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e6a761e9-d08f-4c77-8441-b7fe50f5ffdd-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "e6a761e9-d08f-4c77-8441-b7fe50f5ffdd" (UID: "e6a761e9-d08f-4c77-8441-b7fe50f5ffdd"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 16:52:37 crc kubenswrapper[4799]: I1010 16:52:37.026857 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e6a761e9-d08f-4c77-8441-b7fe50f5ffdd-config" (OuterVolumeSpecName: "config") pod "e6a761e9-d08f-4c77-8441-b7fe50f5ffdd" (UID: "e6a761e9-d08f-4c77-8441-b7fe50f5ffdd"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 16:52:37 crc kubenswrapper[4799]: I1010 16:52:37.034436 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Oct 10 16:52:37 crc kubenswrapper[4799]: I1010 16:52:37.111799 4799 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/e6a761e9-d08f-4c77-8441-b7fe50f5ffdd-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Oct 10 16:52:37 crc kubenswrapper[4799]: I1010 16:52:37.111849 4799 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e6a761e9-d08f-4c77-8441-b7fe50f5ffdd-config\") on node \"crc\" DevicePath \"\"" Oct 10 16:52:37 crc kubenswrapper[4799]: I1010 16:52:37.111862 4799 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e6a761e9-d08f-4c77-8441-b7fe50f5ffdd-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 10 16:52:37 crc kubenswrapper[4799]: I1010 16:52:37.111874 4799 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e6a761e9-d08f-4c77-8441-b7fe50f5ffdd-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 10 16:52:37 crc kubenswrapper[4799]: I1010 16:52:37.511596 4799 generic.go:334] "Generic (PLEG): container finished" podID="f0e73585-38a6-4a89-9ce7-30bd4ea7fe71" containerID="4ce31eab40b5b3f12567f9c020fb29a29b561a6b6a7213cbae5edec02226ca49" exitCode=143 Oct 10 16:52:37 crc kubenswrapper[4799]: I1010 16:52:37.511718 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"f0e73585-38a6-4a89-9ce7-30bd4ea7fe71","Type":"ContainerDied","Data":"4ce31eab40b5b3f12567f9c020fb29a29b561a6b6a7213cbae5edec02226ca49"} Oct 10 16:52:37 crc kubenswrapper[4799]: I1010 16:52:37.515067 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5f74b5f5cc-54sss" Oct 10 16:52:37 crc kubenswrapper[4799]: I1010 16:52:37.515291 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5f74b5f5cc-54sss" event={"ID":"e6a761e9-d08f-4c77-8441-b7fe50f5ffdd","Type":"ContainerDied","Data":"621e41439dd364e0c198678859d4a8895a6d9019fe6bb4f62b7314c489568d96"} Oct 10 16:52:37 crc kubenswrapper[4799]: I1010 16:52:37.515388 4799 scope.go:117] "RemoveContainer" containerID="fd59d61fc5fd52a7907ba716a92a3b1b087699ca7d1f1416c6fcc3348300f073" Oct 10 16:52:37 crc kubenswrapper[4799]: I1010 16:52:37.546787 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5f74b5f5cc-54sss"] Oct 10 16:52:37 crc kubenswrapper[4799]: I1010 16:52:37.554757 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5f74b5f5cc-54sss"] Oct 10 16:52:37 crc kubenswrapper[4799]: I1010 16:52:37.558053 4799 scope.go:117] "RemoveContainer" containerID="a2769586af832ea36b0234b06dc38e503d5edf31198c982d5c9ca14e24e55699" Oct 10 16:52:37 crc kubenswrapper[4799]: I1010 16:52:37.800184 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-tc47q" Oct 10 16:52:37 crc kubenswrapper[4799]: I1010 16:52:37.954437 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/23204589-3275-4935-9f16-171e3a66fe1b-scripts\") pod \"23204589-3275-4935-9f16-171e3a66fe1b\" (UID: \"23204589-3275-4935-9f16-171e3a66fe1b\") " Oct 10 16:52:37 crc kubenswrapper[4799]: I1010 16:52:37.954551 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gzw8v\" (UniqueName: \"kubernetes.io/projected/23204589-3275-4935-9f16-171e3a66fe1b-kube-api-access-gzw8v\") pod \"23204589-3275-4935-9f16-171e3a66fe1b\" (UID: \"23204589-3275-4935-9f16-171e3a66fe1b\") " Oct 10 16:52:37 crc kubenswrapper[4799]: I1010 16:52:37.954607 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/23204589-3275-4935-9f16-171e3a66fe1b-config-data\") pod \"23204589-3275-4935-9f16-171e3a66fe1b\" (UID: \"23204589-3275-4935-9f16-171e3a66fe1b\") " Oct 10 16:52:37 crc kubenswrapper[4799]: I1010 16:52:37.954705 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/23204589-3275-4935-9f16-171e3a66fe1b-combined-ca-bundle\") pod \"23204589-3275-4935-9f16-171e3a66fe1b\" (UID: \"23204589-3275-4935-9f16-171e3a66fe1b\") " Oct 10 16:52:37 crc kubenswrapper[4799]: I1010 16:52:37.959281 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/23204589-3275-4935-9f16-171e3a66fe1b-scripts" (OuterVolumeSpecName: "scripts") pod "23204589-3275-4935-9f16-171e3a66fe1b" (UID: "23204589-3275-4935-9f16-171e3a66fe1b"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:52:37 crc kubenswrapper[4799]: I1010 16:52:37.960965 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/23204589-3275-4935-9f16-171e3a66fe1b-kube-api-access-gzw8v" (OuterVolumeSpecName: "kube-api-access-gzw8v") pod "23204589-3275-4935-9f16-171e3a66fe1b" (UID: "23204589-3275-4935-9f16-171e3a66fe1b"). InnerVolumeSpecName "kube-api-access-gzw8v". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 16:52:37 crc kubenswrapper[4799]: I1010 16:52:37.988460 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/23204589-3275-4935-9f16-171e3a66fe1b-config-data" (OuterVolumeSpecName: "config-data") pod "23204589-3275-4935-9f16-171e3a66fe1b" (UID: "23204589-3275-4935-9f16-171e3a66fe1b"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:52:37 crc kubenswrapper[4799]: I1010 16:52:37.988993 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/23204589-3275-4935-9f16-171e3a66fe1b-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "23204589-3275-4935-9f16-171e3a66fe1b" (UID: "23204589-3275-4935-9f16-171e3a66fe1b"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:52:38 crc kubenswrapper[4799]: I1010 16:52:38.057501 4799 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/23204589-3275-4935-9f16-171e3a66fe1b-scripts\") on node \"crc\" DevicePath \"\"" Oct 10 16:52:38 crc kubenswrapper[4799]: I1010 16:52:38.057542 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gzw8v\" (UniqueName: \"kubernetes.io/projected/23204589-3275-4935-9f16-171e3a66fe1b-kube-api-access-gzw8v\") on node \"crc\" DevicePath \"\"" Oct 10 16:52:38 crc kubenswrapper[4799]: I1010 16:52:38.057554 4799 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/23204589-3275-4935-9f16-171e3a66fe1b-config-data\") on node \"crc\" DevicePath \"\"" Oct 10 16:52:38 crc kubenswrapper[4799]: I1010 16:52:38.057566 4799 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/23204589-3275-4935-9f16-171e3a66fe1b-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 10 16:52:38 crc kubenswrapper[4799]: I1010 16:52:38.529498 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="e7e4c0b7-9b70-46eb-8c7f-9ec17e0ec899" containerName="nova-scheduler-scheduler" containerID="cri-o://820ca220df9e5c5262ce2f09ca286e73e432d401da8a517181dfb0db455e36e4" gracePeriod=30 Oct 10 16:52:38 crc kubenswrapper[4799]: I1010 16:52:38.530031 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-tc47q" Oct 10 16:52:38 crc kubenswrapper[4799]: I1010 16:52:38.544911 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-tc47q" event={"ID":"23204589-3275-4935-9f16-171e3a66fe1b","Type":"ContainerDied","Data":"84a0863a2b9cd87bef025e2574973235d4f06d9c8ba44b174ab3f80b3b7d2a9a"} Oct 10 16:52:38 crc kubenswrapper[4799]: I1010 16:52:38.544972 4799 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="84a0863a2b9cd87bef025e2574973235d4f06d9c8ba44b174ab3f80b3b7d2a9a" Oct 10 16:52:38 crc kubenswrapper[4799]: I1010 16:52:38.605735 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-0"] Oct 10 16:52:38 crc kubenswrapper[4799]: E1010 16:52:38.606148 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e6a761e9-d08f-4c77-8441-b7fe50f5ffdd" containerName="dnsmasq-dns" Oct 10 16:52:38 crc kubenswrapper[4799]: I1010 16:52:38.606170 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="e6a761e9-d08f-4c77-8441-b7fe50f5ffdd" containerName="dnsmasq-dns" Oct 10 16:52:38 crc kubenswrapper[4799]: E1010 16:52:38.606189 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c784371b-9184-431f-93cd-92b037921b6b" containerName="nova-manage" Oct 10 16:52:38 crc kubenswrapper[4799]: I1010 16:52:38.606196 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="c784371b-9184-431f-93cd-92b037921b6b" containerName="nova-manage" Oct 10 16:52:38 crc kubenswrapper[4799]: E1010 16:52:38.606215 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="23204589-3275-4935-9f16-171e3a66fe1b" containerName="nova-cell1-conductor-db-sync" Oct 10 16:52:38 crc kubenswrapper[4799]: I1010 16:52:38.606222 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="23204589-3275-4935-9f16-171e3a66fe1b" containerName="nova-cell1-conductor-db-sync" Oct 10 16:52:38 crc kubenswrapper[4799]: E1010 16:52:38.606246 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e6a761e9-d08f-4c77-8441-b7fe50f5ffdd" containerName="init" Oct 10 16:52:38 crc kubenswrapper[4799]: I1010 16:52:38.606251 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="e6a761e9-d08f-4c77-8441-b7fe50f5ffdd" containerName="init" Oct 10 16:52:38 crc kubenswrapper[4799]: I1010 16:52:38.606432 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="e6a761e9-d08f-4c77-8441-b7fe50f5ffdd" containerName="dnsmasq-dns" Oct 10 16:52:38 crc kubenswrapper[4799]: I1010 16:52:38.606442 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="c784371b-9184-431f-93cd-92b037921b6b" containerName="nova-manage" Oct 10 16:52:38 crc kubenswrapper[4799]: I1010 16:52:38.606456 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="23204589-3275-4935-9f16-171e3a66fe1b" containerName="nova-cell1-conductor-db-sync" Oct 10 16:52:38 crc kubenswrapper[4799]: I1010 16:52:38.607128 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Oct 10 16:52:38 crc kubenswrapper[4799]: I1010 16:52:38.610385 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-config-data" Oct 10 16:52:38 crc kubenswrapper[4799]: I1010 16:52:38.641939 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Oct 10 16:52:38 crc kubenswrapper[4799]: I1010 16:52:38.666270 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7f2f6\" (UniqueName: \"kubernetes.io/projected/971dd170-cc55-481f-b76d-820102f811cd-kube-api-access-7f2f6\") pod \"nova-cell1-conductor-0\" (UID: \"971dd170-cc55-481f-b76d-820102f811cd\") " pod="openstack/nova-cell1-conductor-0" Oct 10 16:52:38 crc kubenswrapper[4799]: I1010 16:52:38.666330 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/971dd170-cc55-481f-b76d-820102f811cd-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"971dd170-cc55-481f-b76d-820102f811cd\") " pod="openstack/nova-cell1-conductor-0" Oct 10 16:52:38 crc kubenswrapper[4799]: I1010 16:52:38.666373 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/971dd170-cc55-481f-b76d-820102f811cd-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"971dd170-cc55-481f-b76d-820102f811cd\") " pod="openstack/nova-cell1-conductor-0" Oct 10 16:52:38 crc kubenswrapper[4799]: I1010 16:52:38.767769 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7f2f6\" (UniqueName: \"kubernetes.io/projected/971dd170-cc55-481f-b76d-820102f811cd-kube-api-access-7f2f6\") pod \"nova-cell1-conductor-0\" (UID: \"971dd170-cc55-481f-b76d-820102f811cd\") " pod="openstack/nova-cell1-conductor-0" Oct 10 16:52:38 crc kubenswrapper[4799]: I1010 16:52:38.767822 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/971dd170-cc55-481f-b76d-820102f811cd-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"971dd170-cc55-481f-b76d-820102f811cd\") " pod="openstack/nova-cell1-conductor-0" Oct 10 16:52:38 crc kubenswrapper[4799]: I1010 16:52:38.767862 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/971dd170-cc55-481f-b76d-820102f811cd-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"971dd170-cc55-481f-b76d-820102f811cd\") " pod="openstack/nova-cell1-conductor-0" Oct 10 16:52:38 crc kubenswrapper[4799]: I1010 16:52:38.772400 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/971dd170-cc55-481f-b76d-820102f811cd-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"971dd170-cc55-481f-b76d-820102f811cd\") " pod="openstack/nova-cell1-conductor-0" Oct 10 16:52:38 crc kubenswrapper[4799]: I1010 16:52:38.772879 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/971dd170-cc55-481f-b76d-820102f811cd-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"971dd170-cc55-481f-b76d-820102f811cd\") " pod="openstack/nova-cell1-conductor-0" Oct 10 16:52:38 crc kubenswrapper[4799]: I1010 16:52:38.785347 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7f2f6\" (UniqueName: \"kubernetes.io/projected/971dd170-cc55-481f-b76d-820102f811cd-kube-api-access-7f2f6\") pod \"nova-cell1-conductor-0\" (UID: \"971dd170-cc55-481f-b76d-820102f811cd\") " pod="openstack/nova-cell1-conductor-0" Oct 10 16:52:38 crc kubenswrapper[4799]: I1010 16:52:38.924248 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Oct 10 16:52:39 crc kubenswrapper[4799]: I1010 16:52:39.372266 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Oct 10 16:52:39 crc kubenswrapper[4799]: W1010 16:52:39.379172 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod971dd170_cc55_481f_b76d_820102f811cd.slice/crio-4c5f15e3c16097cc6eeac97326e01d5fd2c7aa95ecbb95452de322a6b6a25a4d WatchSource:0}: Error finding container 4c5f15e3c16097cc6eeac97326e01d5fd2c7aa95ecbb95452de322a6b6a25a4d: Status 404 returned error can't find the container with id 4c5f15e3c16097cc6eeac97326e01d5fd2c7aa95ecbb95452de322a6b6a25a4d Oct 10 16:52:39 crc kubenswrapper[4799]: I1010 16:52:39.424199 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e6a761e9-d08f-4c77-8441-b7fe50f5ffdd" path="/var/lib/kubelet/pods/e6a761e9-d08f-4c77-8441-b7fe50f5ffdd/volumes" Oct 10 16:52:39 crc kubenswrapper[4799]: I1010 16:52:39.539911 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"971dd170-cc55-481f-b76d-820102f811cd","Type":"ContainerStarted","Data":"4c5f15e3c16097cc6eeac97326e01d5fd2c7aa95ecbb95452de322a6b6a25a4d"} Oct 10 16:52:39 crc kubenswrapper[4799]: I1010 16:52:39.830887 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Oct 10 16:52:40 crc kubenswrapper[4799]: I1010 16:52:40.552632 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"971dd170-cc55-481f-b76d-820102f811cd","Type":"ContainerStarted","Data":"ff0b33623ee2e909045d84098d1c8b4f4ee31b12318171307a8ee09a9499c92d"} Oct 10 16:52:40 crc kubenswrapper[4799]: I1010 16:52:40.554068 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-conductor-0" Oct 10 16:52:40 crc kubenswrapper[4799]: I1010 16:52:40.580261 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-0" podStartSLOduration=2.580235202 podStartE2EDuration="2.580235202s" podCreationTimestamp="2025-10-10 16:52:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-10 16:52:40.570932484 +0000 UTC m=+1254.079256639" watchObservedRunningTime="2025-10-10 16:52:40.580235202 +0000 UTC m=+1254.088559327" Oct 10 16:52:40 crc kubenswrapper[4799]: E1010 16:52:40.928078 4799 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="820ca220df9e5c5262ce2f09ca286e73e432d401da8a517181dfb0db455e36e4" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Oct 10 16:52:40 crc kubenswrapper[4799]: E1010 16:52:40.930588 4799 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="820ca220df9e5c5262ce2f09ca286e73e432d401da8a517181dfb0db455e36e4" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Oct 10 16:52:40 crc kubenswrapper[4799]: E1010 16:52:40.932619 4799 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="820ca220df9e5c5262ce2f09ca286e73e432d401da8a517181dfb0db455e36e4" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Oct 10 16:52:40 crc kubenswrapper[4799]: E1010 16:52:40.932689 4799 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/nova-scheduler-0" podUID="e7e4c0b7-9b70-46eb-8c7f-9ec17e0ec899" containerName="nova-scheduler-scheduler" Oct 10 16:52:41 crc kubenswrapper[4799]: I1010 16:52:41.567940 4799 generic.go:334] "Generic (PLEG): container finished" podID="e7e4c0b7-9b70-46eb-8c7f-9ec17e0ec899" containerID="820ca220df9e5c5262ce2f09ca286e73e432d401da8a517181dfb0db455e36e4" exitCode=0 Oct 10 16:52:41 crc kubenswrapper[4799]: I1010 16:52:41.568004 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"e7e4c0b7-9b70-46eb-8c7f-9ec17e0ec899","Type":"ContainerDied","Data":"820ca220df9e5c5262ce2f09ca286e73e432d401da8a517181dfb0db455e36e4"} Oct 10 16:52:41 crc kubenswrapper[4799]: I1010 16:52:41.681977 4799 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-5f74b5f5cc-54sss" podUID="e6a761e9-d08f-4c77-8441-b7fe50f5ffdd" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.171:5353: i/o timeout" Oct 10 16:52:41 crc kubenswrapper[4799]: I1010 16:52:41.950066 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 10 16:52:42 crc kubenswrapper[4799]: I1010 16:52:42.032941 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e7e4c0b7-9b70-46eb-8c7f-9ec17e0ec899-combined-ca-bundle\") pod \"e7e4c0b7-9b70-46eb-8c7f-9ec17e0ec899\" (UID: \"e7e4c0b7-9b70-46eb-8c7f-9ec17e0ec899\") " Oct 10 16:52:42 crc kubenswrapper[4799]: I1010 16:52:42.033103 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e7e4c0b7-9b70-46eb-8c7f-9ec17e0ec899-config-data\") pod \"e7e4c0b7-9b70-46eb-8c7f-9ec17e0ec899\" (UID: \"e7e4c0b7-9b70-46eb-8c7f-9ec17e0ec899\") " Oct 10 16:52:42 crc kubenswrapper[4799]: I1010 16:52:42.033248 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ckdmw\" (UniqueName: \"kubernetes.io/projected/e7e4c0b7-9b70-46eb-8c7f-9ec17e0ec899-kube-api-access-ckdmw\") pod \"e7e4c0b7-9b70-46eb-8c7f-9ec17e0ec899\" (UID: \"e7e4c0b7-9b70-46eb-8c7f-9ec17e0ec899\") " Oct 10 16:52:42 crc kubenswrapper[4799]: I1010 16:52:42.045402 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e7e4c0b7-9b70-46eb-8c7f-9ec17e0ec899-kube-api-access-ckdmw" (OuterVolumeSpecName: "kube-api-access-ckdmw") pod "e7e4c0b7-9b70-46eb-8c7f-9ec17e0ec899" (UID: "e7e4c0b7-9b70-46eb-8c7f-9ec17e0ec899"). InnerVolumeSpecName "kube-api-access-ckdmw". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 16:52:42 crc kubenswrapper[4799]: I1010 16:52:42.066869 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e7e4c0b7-9b70-46eb-8c7f-9ec17e0ec899-config-data" (OuterVolumeSpecName: "config-data") pod "e7e4c0b7-9b70-46eb-8c7f-9ec17e0ec899" (UID: "e7e4c0b7-9b70-46eb-8c7f-9ec17e0ec899"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:52:42 crc kubenswrapper[4799]: I1010 16:52:42.099203 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e7e4c0b7-9b70-46eb-8c7f-9ec17e0ec899-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "e7e4c0b7-9b70-46eb-8c7f-9ec17e0ec899" (UID: "e7e4c0b7-9b70-46eb-8c7f-9ec17e0ec899"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:52:42 crc kubenswrapper[4799]: I1010 16:52:42.135092 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ckdmw\" (UniqueName: \"kubernetes.io/projected/e7e4c0b7-9b70-46eb-8c7f-9ec17e0ec899-kube-api-access-ckdmw\") on node \"crc\" DevicePath \"\"" Oct 10 16:52:42 crc kubenswrapper[4799]: I1010 16:52:42.135124 4799 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e7e4c0b7-9b70-46eb-8c7f-9ec17e0ec899-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 10 16:52:42 crc kubenswrapper[4799]: I1010 16:52:42.135134 4799 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e7e4c0b7-9b70-46eb-8c7f-9ec17e0ec899-config-data\") on node \"crc\" DevicePath \"\"" Oct 10 16:52:42 crc kubenswrapper[4799]: I1010 16:52:42.479064 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 10 16:52:42 crc kubenswrapper[4799]: I1010 16:52:42.542306 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f0e73585-38a6-4a89-9ce7-30bd4ea7fe71-combined-ca-bundle\") pod \"f0e73585-38a6-4a89-9ce7-30bd4ea7fe71\" (UID: \"f0e73585-38a6-4a89-9ce7-30bd4ea7fe71\") " Oct 10 16:52:42 crc kubenswrapper[4799]: I1010 16:52:42.542376 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f0e73585-38a6-4a89-9ce7-30bd4ea7fe71-logs\") pod \"f0e73585-38a6-4a89-9ce7-30bd4ea7fe71\" (UID: \"f0e73585-38a6-4a89-9ce7-30bd4ea7fe71\") " Oct 10 16:52:42 crc kubenswrapper[4799]: I1010 16:52:42.542428 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jtwtl\" (UniqueName: \"kubernetes.io/projected/f0e73585-38a6-4a89-9ce7-30bd4ea7fe71-kube-api-access-jtwtl\") pod \"f0e73585-38a6-4a89-9ce7-30bd4ea7fe71\" (UID: \"f0e73585-38a6-4a89-9ce7-30bd4ea7fe71\") " Oct 10 16:52:42 crc kubenswrapper[4799]: I1010 16:52:42.542569 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f0e73585-38a6-4a89-9ce7-30bd4ea7fe71-config-data\") pod \"f0e73585-38a6-4a89-9ce7-30bd4ea7fe71\" (UID: \"f0e73585-38a6-4a89-9ce7-30bd4ea7fe71\") " Oct 10 16:52:42 crc kubenswrapper[4799]: I1010 16:52:42.543350 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f0e73585-38a6-4a89-9ce7-30bd4ea7fe71-logs" (OuterVolumeSpecName: "logs") pod "f0e73585-38a6-4a89-9ce7-30bd4ea7fe71" (UID: "f0e73585-38a6-4a89-9ce7-30bd4ea7fe71"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 10 16:52:42 crc kubenswrapper[4799]: I1010 16:52:42.551049 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f0e73585-38a6-4a89-9ce7-30bd4ea7fe71-kube-api-access-jtwtl" (OuterVolumeSpecName: "kube-api-access-jtwtl") pod "f0e73585-38a6-4a89-9ce7-30bd4ea7fe71" (UID: "f0e73585-38a6-4a89-9ce7-30bd4ea7fe71"). InnerVolumeSpecName "kube-api-access-jtwtl". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 16:52:42 crc kubenswrapper[4799]: I1010 16:52:42.575842 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f0e73585-38a6-4a89-9ce7-30bd4ea7fe71-config-data" (OuterVolumeSpecName: "config-data") pod "f0e73585-38a6-4a89-9ce7-30bd4ea7fe71" (UID: "f0e73585-38a6-4a89-9ce7-30bd4ea7fe71"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:52:42 crc kubenswrapper[4799]: I1010 16:52:42.584097 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f0e73585-38a6-4a89-9ce7-30bd4ea7fe71-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f0e73585-38a6-4a89-9ce7-30bd4ea7fe71" (UID: "f0e73585-38a6-4a89-9ce7-30bd4ea7fe71"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:52:42 crc kubenswrapper[4799]: I1010 16:52:42.588810 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 10 16:52:42 crc kubenswrapper[4799]: I1010 16:52:42.588811 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"e7e4c0b7-9b70-46eb-8c7f-9ec17e0ec899","Type":"ContainerDied","Data":"9c32b285ed31c36d61a898fae3dbe1adf1b84f9ef12ddd55c224e63d06525bb8"} Oct 10 16:52:42 crc kubenswrapper[4799]: I1010 16:52:42.588888 4799 scope.go:117] "RemoveContainer" containerID="820ca220df9e5c5262ce2f09ca286e73e432d401da8a517181dfb0db455e36e4" Oct 10 16:52:42 crc kubenswrapper[4799]: I1010 16:52:42.602741 4799 generic.go:334] "Generic (PLEG): container finished" podID="f0e73585-38a6-4a89-9ce7-30bd4ea7fe71" containerID="67400cc2426eb5ea4cc47a3cb3542ce051591dc896fe9dc87b8d30bbffa78d50" exitCode=0 Oct 10 16:52:42 crc kubenswrapper[4799]: I1010 16:52:42.602836 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 10 16:52:42 crc kubenswrapper[4799]: I1010 16:52:42.602864 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"f0e73585-38a6-4a89-9ce7-30bd4ea7fe71","Type":"ContainerDied","Data":"67400cc2426eb5ea4cc47a3cb3542ce051591dc896fe9dc87b8d30bbffa78d50"} Oct 10 16:52:42 crc kubenswrapper[4799]: I1010 16:52:42.602918 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"f0e73585-38a6-4a89-9ce7-30bd4ea7fe71","Type":"ContainerDied","Data":"499d9f98e05522f459aca4332340fcbc91432396c2b61bf6c1c3691904479ba2"} Oct 10 16:52:42 crc kubenswrapper[4799]: I1010 16:52:42.645390 4799 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f0e73585-38a6-4a89-9ce7-30bd4ea7fe71-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 10 16:52:42 crc kubenswrapper[4799]: I1010 16:52:42.645427 4799 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f0e73585-38a6-4a89-9ce7-30bd4ea7fe71-logs\") on node \"crc\" DevicePath \"\"" Oct 10 16:52:42 crc kubenswrapper[4799]: I1010 16:52:42.645440 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jtwtl\" (UniqueName: \"kubernetes.io/projected/f0e73585-38a6-4a89-9ce7-30bd4ea7fe71-kube-api-access-jtwtl\") on node \"crc\" DevicePath \"\"" Oct 10 16:52:42 crc kubenswrapper[4799]: I1010 16:52:42.645533 4799 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f0e73585-38a6-4a89-9ce7-30bd4ea7fe71-config-data\") on node \"crc\" DevicePath \"\"" Oct 10 16:52:42 crc kubenswrapper[4799]: I1010 16:52:42.694969 4799 scope.go:117] "RemoveContainer" containerID="67400cc2426eb5ea4cc47a3cb3542ce051591dc896fe9dc87b8d30bbffa78d50" Oct 10 16:52:42 crc kubenswrapper[4799]: I1010 16:52:42.698137 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Oct 10 16:52:42 crc kubenswrapper[4799]: I1010 16:52:42.707319 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Oct 10 16:52:42 crc kubenswrapper[4799]: I1010 16:52:42.714343 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Oct 10 16:52:42 crc kubenswrapper[4799]: I1010 16:52:42.720798 4799 scope.go:117] "RemoveContainer" containerID="4ce31eab40b5b3f12567f9c020fb29a29b561a6b6a7213cbae5edec02226ca49" Oct 10 16:52:42 crc kubenswrapper[4799]: I1010 16:52:42.722123 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Oct 10 16:52:42 crc kubenswrapper[4799]: I1010 16:52:42.732666 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Oct 10 16:52:42 crc kubenswrapper[4799]: E1010 16:52:42.733211 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e7e4c0b7-9b70-46eb-8c7f-9ec17e0ec899" containerName="nova-scheduler-scheduler" Oct 10 16:52:42 crc kubenswrapper[4799]: I1010 16:52:42.733236 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="e7e4c0b7-9b70-46eb-8c7f-9ec17e0ec899" containerName="nova-scheduler-scheduler" Oct 10 16:52:42 crc kubenswrapper[4799]: E1010 16:52:42.733268 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f0e73585-38a6-4a89-9ce7-30bd4ea7fe71" containerName="nova-api-log" Oct 10 16:52:42 crc kubenswrapper[4799]: I1010 16:52:42.733278 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="f0e73585-38a6-4a89-9ce7-30bd4ea7fe71" containerName="nova-api-log" Oct 10 16:52:42 crc kubenswrapper[4799]: E1010 16:52:42.733304 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f0e73585-38a6-4a89-9ce7-30bd4ea7fe71" containerName="nova-api-api" Oct 10 16:52:42 crc kubenswrapper[4799]: I1010 16:52:42.733314 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="f0e73585-38a6-4a89-9ce7-30bd4ea7fe71" containerName="nova-api-api" Oct 10 16:52:42 crc kubenswrapper[4799]: I1010 16:52:42.733530 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="f0e73585-38a6-4a89-9ce7-30bd4ea7fe71" containerName="nova-api-api" Oct 10 16:52:42 crc kubenswrapper[4799]: I1010 16:52:42.733545 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="e7e4c0b7-9b70-46eb-8c7f-9ec17e0ec899" containerName="nova-scheduler-scheduler" Oct 10 16:52:42 crc kubenswrapper[4799]: I1010 16:52:42.733578 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="f0e73585-38a6-4a89-9ce7-30bd4ea7fe71" containerName="nova-api-log" Oct 10 16:52:42 crc kubenswrapper[4799]: I1010 16:52:42.734917 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 10 16:52:42 crc kubenswrapper[4799]: I1010 16:52:42.738646 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Oct 10 16:52:42 crc kubenswrapper[4799]: I1010 16:52:42.744103 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Oct 10 16:52:42 crc kubenswrapper[4799]: I1010 16:52:42.755806 4799 scope.go:117] "RemoveContainer" containerID="67400cc2426eb5ea4cc47a3cb3542ce051591dc896fe9dc87b8d30bbffa78d50" Oct 10 16:52:42 crc kubenswrapper[4799]: E1010 16:52:42.756391 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"67400cc2426eb5ea4cc47a3cb3542ce051591dc896fe9dc87b8d30bbffa78d50\": container with ID starting with 67400cc2426eb5ea4cc47a3cb3542ce051591dc896fe9dc87b8d30bbffa78d50 not found: ID does not exist" containerID="67400cc2426eb5ea4cc47a3cb3542ce051591dc896fe9dc87b8d30bbffa78d50" Oct 10 16:52:42 crc kubenswrapper[4799]: I1010 16:52:42.756436 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"67400cc2426eb5ea4cc47a3cb3542ce051591dc896fe9dc87b8d30bbffa78d50"} err="failed to get container status \"67400cc2426eb5ea4cc47a3cb3542ce051591dc896fe9dc87b8d30bbffa78d50\": rpc error: code = NotFound desc = could not find container \"67400cc2426eb5ea4cc47a3cb3542ce051591dc896fe9dc87b8d30bbffa78d50\": container with ID starting with 67400cc2426eb5ea4cc47a3cb3542ce051591dc896fe9dc87b8d30bbffa78d50 not found: ID does not exist" Oct 10 16:52:42 crc kubenswrapper[4799]: I1010 16:52:42.756465 4799 scope.go:117] "RemoveContainer" containerID="4ce31eab40b5b3f12567f9c020fb29a29b561a6b6a7213cbae5edec02226ca49" Oct 10 16:52:42 crc kubenswrapper[4799]: E1010 16:52:42.756834 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4ce31eab40b5b3f12567f9c020fb29a29b561a6b6a7213cbae5edec02226ca49\": container with ID starting with 4ce31eab40b5b3f12567f9c020fb29a29b561a6b6a7213cbae5edec02226ca49 not found: ID does not exist" containerID="4ce31eab40b5b3f12567f9c020fb29a29b561a6b6a7213cbae5edec02226ca49" Oct 10 16:52:42 crc kubenswrapper[4799]: I1010 16:52:42.756863 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4ce31eab40b5b3f12567f9c020fb29a29b561a6b6a7213cbae5edec02226ca49"} err="failed to get container status \"4ce31eab40b5b3f12567f9c020fb29a29b561a6b6a7213cbae5edec02226ca49\": rpc error: code = NotFound desc = could not find container \"4ce31eab40b5b3f12567f9c020fb29a29b561a6b6a7213cbae5edec02226ca49\": container with ID starting with 4ce31eab40b5b3f12567f9c020fb29a29b561a6b6a7213cbae5edec02226ca49 not found: ID does not exist" Oct 10 16:52:42 crc kubenswrapper[4799]: I1010 16:52:42.767318 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Oct 10 16:52:42 crc kubenswrapper[4799]: I1010 16:52:42.768736 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 10 16:52:42 crc kubenswrapper[4799]: I1010 16:52:42.775489 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Oct 10 16:52:42 crc kubenswrapper[4799]: I1010 16:52:42.776871 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Oct 10 16:52:42 crc kubenswrapper[4799]: I1010 16:52:42.862328 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2c70468e-4a71-405f-9171-9246f62aec11-config-data\") pod \"nova-api-0\" (UID: \"2c70468e-4a71-405f-9171-9246f62aec11\") " pod="openstack/nova-api-0" Oct 10 16:52:42 crc kubenswrapper[4799]: I1010 16:52:42.862370 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w6gkt\" (UniqueName: \"kubernetes.io/projected/2c70468e-4a71-405f-9171-9246f62aec11-kube-api-access-w6gkt\") pod \"nova-api-0\" (UID: \"2c70468e-4a71-405f-9171-9246f62aec11\") " pod="openstack/nova-api-0" Oct 10 16:52:42 crc kubenswrapper[4799]: I1010 16:52:42.863376 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b86c2c2a-e776-4223-acd0-3a4832b67cb0-config-data\") pod \"nova-scheduler-0\" (UID: \"b86c2c2a-e776-4223-acd0-3a4832b67cb0\") " pod="openstack/nova-scheduler-0" Oct 10 16:52:42 crc kubenswrapper[4799]: I1010 16:52:42.863776 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dqcj5\" (UniqueName: \"kubernetes.io/projected/b86c2c2a-e776-4223-acd0-3a4832b67cb0-kube-api-access-dqcj5\") pod \"nova-scheduler-0\" (UID: \"b86c2c2a-e776-4223-acd0-3a4832b67cb0\") " pod="openstack/nova-scheduler-0" Oct 10 16:52:42 crc kubenswrapper[4799]: I1010 16:52:42.863903 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2c70468e-4a71-405f-9171-9246f62aec11-logs\") pod \"nova-api-0\" (UID: \"2c70468e-4a71-405f-9171-9246f62aec11\") " pod="openstack/nova-api-0" Oct 10 16:52:42 crc kubenswrapper[4799]: I1010 16:52:42.863943 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b86c2c2a-e776-4223-acd0-3a4832b67cb0-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"b86c2c2a-e776-4223-acd0-3a4832b67cb0\") " pod="openstack/nova-scheduler-0" Oct 10 16:52:42 crc kubenswrapper[4799]: I1010 16:52:42.864067 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2c70468e-4a71-405f-9171-9246f62aec11-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"2c70468e-4a71-405f-9171-9246f62aec11\") " pod="openstack/nova-api-0" Oct 10 16:52:42 crc kubenswrapper[4799]: I1010 16:52:42.966007 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2c70468e-4a71-405f-9171-9246f62aec11-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"2c70468e-4a71-405f-9171-9246f62aec11\") " pod="openstack/nova-api-0" Oct 10 16:52:42 crc kubenswrapper[4799]: I1010 16:52:42.966062 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2c70468e-4a71-405f-9171-9246f62aec11-config-data\") pod \"nova-api-0\" (UID: \"2c70468e-4a71-405f-9171-9246f62aec11\") " pod="openstack/nova-api-0" Oct 10 16:52:42 crc kubenswrapper[4799]: I1010 16:52:42.966079 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w6gkt\" (UniqueName: \"kubernetes.io/projected/2c70468e-4a71-405f-9171-9246f62aec11-kube-api-access-w6gkt\") pod \"nova-api-0\" (UID: \"2c70468e-4a71-405f-9171-9246f62aec11\") " pod="openstack/nova-api-0" Oct 10 16:52:42 crc kubenswrapper[4799]: I1010 16:52:42.966131 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b86c2c2a-e776-4223-acd0-3a4832b67cb0-config-data\") pod \"nova-scheduler-0\" (UID: \"b86c2c2a-e776-4223-acd0-3a4832b67cb0\") " pod="openstack/nova-scheduler-0" Oct 10 16:52:42 crc kubenswrapper[4799]: I1010 16:52:42.966243 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dqcj5\" (UniqueName: \"kubernetes.io/projected/b86c2c2a-e776-4223-acd0-3a4832b67cb0-kube-api-access-dqcj5\") pod \"nova-scheduler-0\" (UID: \"b86c2c2a-e776-4223-acd0-3a4832b67cb0\") " pod="openstack/nova-scheduler-0" Oct 10 16:52:42 crc kubenswrapper[4799]: I1010 16:52:42.966288 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2c70468e-4a71-405f-9171-9246f62aec11-logs\") pod \"nova-api-0\" (UID: \"2c70468e-4a71-405f-9171-9246f62aec11\") " pod="openstack/nova-api-0" Oct 10 16:52:42 crc kubenswrapper[4799]: I1010 16:52:42.966788 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b86c2c2a-e776-4223-acd0-3a4832b67cb0-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"b86c2c2a-e776-4223-acd0-3a4832b67cb0\") " pod="openstack/nova-scheduler-0" Oct 10 16:52:42 crc kubenswrapper[4799]: I1010 16:52:42.966808 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2c70468e-4a71-405f-9171-9246f62aec11-logs\") pod \"nova-api-0\" (UID: \"2c70468e-4a71-405f-9171-9246f62aec11\") " pod="openstack/nova-api-0" Oct 10 16:52:42 crc kubenswrapper[4799]: I1010 16:52:42.969738 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2c70468e-4a71-405f-9171-9246f62aec11-config-data\") pod \"nova-api-0\" (UID: \"2c70468e-4a71-405f-9171-9246f62aec11\") " pod="openstack/nova-api-0" Oct 10 16:52:42 crc kubenswrapper[4799]: I1010 16:52:42.970795 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b86c2c2a-e776-4223-acd0-3a4832b67cb0-config-data\") pod \"nova-scheduler-0\" (UID: \"b86c2c2a-e776-4223-acd0-3a4832b67cb0\") " pod="openstack/nova-scheduler-0" Oct 10 16:52:42 crc kubenswrapper[4799]: I1010 16:52:42.978426 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b86c2c2a-e776-4223-acd0-3a4832b67cb0-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"b86c2c2a-e776-4223-acd0-3a4832b67cb0\") " pod="openstack/nova-scheduler-0" Oct 10 16:52:42 crc kubenswrapper[4799]: I1010 16:52:42.981008 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2c70468e-4a71-405f-9171-9246f62aec11-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"2c70468e-4a71-405f-9171-9246f62aec11\") " pod="openstack/nova-api-0" Oct 10 16:52:42 crc kubenswrapper[4799]: I1010 16:52:42.984407 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w6gkt\" (UniqueName: \"kubernetes.io/projected/2c70468e-4a71-405f-9171-9246f62aec11-kube-api-access-w6gkt\") pod \"nova-api-0\" (UID: \"2c70468e-4a71-405f-9171-9246f62aec11\") " pod="openstack/nova-api-0" Oct 10 16:52:42 crc kubenswrapper[4799]: I1010 16:52:42.988243 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dqcj5\" (UniqueName: \"kubernetes.io/projected/b86c2c2a-e776-4223-acd0-3a4832b67cb0-kube-api-access-dqcj5\") pod \"nova-scheduler-0\" (UID: \"b86c2c2a-e776-4223-acd0-3a4832b67cb0\") " pod="openstack/nova-scheduler-0" Oct 10 16:52:43 crc kubenswrapper[4799]: I1010 16:52:43.065624 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 10 16:52:43 crc kubenswrapper[4799]: I1010 16:52:43.089636 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 10 16:52:43 crc kubenswrapper[4799]: I1010 16:52:43.412938 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e7e4c0b7-9b70-46eb-8c7f-9ec17e0ec899" path="/var/lib/kubelet/pods/e7e4c0b7-9b70-46eb-8c7f-9ec17e0ec899/volumes" Oct 10 16:52:43 crc kubenswrapper[4799]: I1010 16:52:43.413905 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f0e73585-38a6-4a89-9ce7-30bd4ea7fe71" path="/var/lib/kubelet/pods/f0e73585-38a6-4a89-9ce7-30bd4ea7fe71/volumes" Oct 10 16:52:43 crc kubenswrapper[4799]: I1010 16:52:43.556242 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Oct 10 16:52:43 crc kubenswrapper[4799]: I1010 16:52:43.586621 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Oct 10 16:52:43 crc kubenswrapper[4799]: I1010 16:52:43.620413 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/kube-state-metrics-0"] Oct 10 16:52:43 crc kubenswrapper[4799]: I1010 16:52:43.620915 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/kube-state-metrics-0" podUID="55f598f2-d46d-4810-9b39-315e6d90221a" containerName="kube-state-metrics" containerID="cri-o://7eb5912860b51cfc7a126e352bd00277971d2df409a2d5e241478f27fcf39ec0" gracePeriod=30 Oct 10 16:52:43 crc kubenswrapper[4799]: I1010 16:52:43.685455 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"b86c2c2a-e776-4223-acd0-3a4832b67cb0","Type":"ContainerStarted","Data":"0d258efde3054d8a9370f4fa8a9f0fb32c81919613506cd1232443da82e4aeb5"} Oct 10 16:52:43 crc kubenswrapper[4799]: I1010 16:52:43.686705 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"2c70468e-4a71-405f-9171-9246f62aec11","Type":"ContainerStarted","Data":"fe52afb72d64bfe0d045c17b174ae9447ef50652e6ec84c2cf50412b096de6d1"} Oct 10 16:52:44 crc kubenswrapper[4799]: I1010 16:52:44.053146 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Oct 10 16:52:44 crc kubenswrapper[4799]: I1010 16:52:44.193051 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qmzzr\" (UniqueName: \"kubernetes.io/projected/55f598f2-d46d-4810-9b39-315e6d90221a-kube-api-access-qmzzr\") pod \"55f598f2-d46d-4810-9b39-315e6d90221a\" (UID: \"55f598f2-d46d-4810-9b39-315e6d90221a\") " Oct 10 16:52:44 crc kubenswrapper[4799]: I1010 16:52:44.196542 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/55f598f2-d46d-4810-9b39-315e6d90221a-kube-api-access-qmzzr" (OuterVolumeSpecName: "kube-api-access-qmzzr") pod "55f598f2-d46d-4810-9b39-315e6d90221a" (UID: "55f598f2-d46d-4810-9b39-315e6d90221a"). InnerVolumeSpecName "kube-api-access-qmzzr". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 16:52:44 crc kubenswrapper[4799]: I1010 16:52:44.295252 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qmzzr\" (UniqueName: \"kubernetes.io/projected/55f598f2-d46d-4810-9b39-315e6d90221a-kube-api-access-qmzzr\") on node \"crc\" DevicePath \"\"" Oct 10 16:52:44 crc kubenswrapper[4799]: I1010 16:52:44.703563 4799 generic.go:334] "Generic (PLEG): container finished" podID="55f598f2-d46d-4810-9b39-315e6d90221a" containerID="7eb5912860b51cfc7a126e352bd00277971d2df409a2d5e241478f27fcf39ec0" exitCode=2 Oct 10 16:52:44 crc kubenswrapper[4799]: I1010 16:52:44.703705 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Oct 10 16:52:44 crc kubenswrapper[4799]: I1010 16:52:44.703706 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"55f598f2-d46d-4810-9b39-315e6d90221a","Type":"ContainerDied","Data":"7eb5912860b51cfc7a126e352bd00277971d2df409a2d5e241478f27fcf39ec0"} Oct 10 16:52:44 crc kubenswrapper[4799]: I1010 16:52:44.703795 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"55f598f2-d46d-4810-9b39-315e6d90221a","Type":"ContainerDied","Data":"8d23b1caa185848314227f8c56ea77f53784e096cee997d6152c271a1935c0ed"} Oct 10 16:52:44 crc kubenswrapper[4799]: I1010 16:52:44.703816 4799 scope.go:117] "RemoveContainer" containerID="7eb5912860b51cfc7a126e352bd00277971d2df409a2d5e241478f27fcf39ec0" Oct 10 16:52:44 crc kubenswrapper[4799]: I1010 16:52:44.711352 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"b86c2c2a-e776-4223-acd0-3a4832b67cb0","Type":"ContainerStarted","Data":"683c1d1b9da069d04a5ff9242785ac2d1ff7be01587f4c72dd9da5b4555f4926"} Oct 10 16:52:44 crc kubenswrapper[4799]: I1010 16:52:44.716643 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"2c70468e-4a71-405f-9171-9246f62aec11","Type":"ContainerStarted","Data":"c3b98dfa47145b9b453ce0521311875ad5f48cae96b9f8f78a53d856bf09adce"} Oct 10 16:52:44 crc kubenswrapper[4799]: I1010 16:52:44.716677 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"2c70468e-4a71-405f-9171-9246f62aec11","Type":"ContainerStarted","Data":"566a37796a21dfea1dedc2df7d9a7b47ffce31cd1bf00e70fbadeaea806938ca"} Oct 10 16:52:44 crc kubenswrapper[4799]: I1010 16:52:44.732061 4799 scope.go:117] "RemoveContainer" containerID="7eb5912860b51cfc7a126e352bd00277971d2df409a2d5e241478f27fcf39ec0" Oct 10 16:52:44 crc kubenswrapper[4799]: E1010 16:52:44.732717 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7eb5912860b51cfc7a126e352bd00277971d2df409a2d5e241478f27fcf39ec0\": container with ID starting with 7eb5912860b51cfc7a126e352bd00277971d2df409a2d5e241478f27fcf39ec0 not found: ID does not exist" containerID="7eb5912860b51cfc7a126e352bd00277971d2df409a2d5e241478f27fcf39ec0" Oct 10 16:52:44 crc kubenswrapper[4799]: I1010 16:52:44.732787 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7eb5912860b51cfc7a126e352bd00277971d2df409a2d5e241478f27fcf39ec0"} err="failed to get container status \"7eb5912860b51cfc7a126e352bd00277971d2df409a2d5e241478f27fcf39ec0\": rpc error: code = NotFound desc = could not find container \"7eb5912860b51cfc7a126e352bd00277971d2df409a2d5e241478f27fcf39ec0\": container with ID starting with 7eb5912860b51cfc7a126e352bd00277971d2df409a2d5e241478f27fcf39ec0 not found: ID does not exist" Oct 10 16:52:44 crc kubenswrapper[4799]: I1010 16:52:44.733349 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.733338724 podStartE2EDuration="2.733338724s" podCreationTimestamp="2025-10-10 16:52:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-10 16:52:44.732512273 +0000 UTC m=+1258.240836408" watchObservedRunningTime="2025-10-10 16:52:44.733338724 +0000 UTC m=+1258.241662839" Oct 10 16:52:44 crc kubenswrapper[4799]: I1010 16:52:44.753790 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/kube-state-metrics-0"] Oct 10 16:52:44 crc kubenswrapper[4799]: I1010 16:52:44.763985 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/kube-state-metrics-0"] Oct 10 16:52:44 crc kubenswrapper[4799]: I1010 16:52:44.776240 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/kube-state-metrics-0"] Oct 10 16:52:44 crc kubenswrapper[4799]: E1010 16:52:44.776707 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="55f598f2-d46d-4810-9b39-315e6d90221a" containerName="kube-state-metrics" Oct 10 16:52:44 crc kubenswrapper[4799]: I1010 16:52:44.776731 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="55f598f2-d46d-4810-9b39-315e6d90221a" containerName="kube-state-metrics" Oct 10 16:52:44 crc kubenswrapper[4799]: I1010 16:52:44.777016 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="55f598f2-d46d-4810-9b39-315e6d90221a" containerName="kube-state-metrics" Oct 10 16:52:44 crc kubenswrapper[4799]: I1010 16:52:44.777807 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Oct 10 16:52:44 crc kubenswrapper[4799]: I1010 16:52:44.779305 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-kube-state-metrics-svc" Oct 10 16:52:44 crc kubenswrapper[4799]: I1010 16:52:44.783530 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"kube-state-metrics-tls-config" Oct 10 16:52:44 crc kubenswrapper[4799]: I1010 16:52:44.785408 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.785391355 podStartE2EDuration="2.785391355s" podCreationTimestamp="2025-10-10 16:52:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-10 16:52:44.769418132 +0000 UTC m=+1258.277742247" watchObservedRunningTime="2025-10-10 16:52:44.785391355 +0000 UTC m=+1258.293715470" Oct 10 16:52:44 crc kubenswrapper[4799]: I1010 16:52:44.805178 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Oct 10 16:52:44 crc kubenswrapper[4799]: I1010 16:52:44.909145 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/69aa641a-13ff-4f65-b2ea-7fee3ad42134-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"69aa641a-13ff-4f65-b2ea-7fee3ad42134\") " pod="openstack/kube-state-metrics-0" Oct 10 16:52:44 crc kubenswrapper[4799]: I1010 16:52:44.909265 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7wqb9\" (UniqueName: \"kubernetes.io/projected/69aa641a-13ff-4f65-b2ea-7fee3ad42134-kube-api-access-7wqb9\") pod \"kube-state-metrics-0\" (UID: \"69aa641a-13ff-4f65-b2ea-7fee3ad42134\") " pod="openstack/kube-state-metrics-0" Oct 10 16:52:44 crc kubenswrapper[4799]: I1010 16:52:44.909359 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/69aa641a-13ff-4f65-b2ea-7fee3ad42134-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"69aa641a-13ff-4f65-b2ea-7fee3ad42134\") " pod="openstack/kube-state-metrics-0" Oct 10 16:52:44 crc kubenswrapper[4799]: I1010 16:52:44.909457 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/69aa641a-13ff-4f65-b2ea-7fee3ad42134-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"69aa641a-13ff-4f65-b2ea-7fee3ad42134\") " pod="openstack/kube-state-metrics-0" Oct 10 16:52:45 crc kubenswrapper[4799]: I1010 16:52:45.010855 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/69aa641a-13ff-4f65-b2ea-7fee3ad42134-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"69aa641a-13ff-4f65-b2ea-7fee3ad42134\") " pod="openstack/kube-state-metrics-0" Oct 10 16:52:45 crc kubenswrapper[4799]: I1010 16:52:45.011008 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/69aa641a-13ff-4f65-b2ea-7fee3ad42134-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"69aa641a-13ff-4f65-b2ea-7fee3ad42134\") " pod="openstack/kube-state-metrics-0" Oct 10 16:52:45 crc kubenswrapper[4799]: I1010 16:52:45.011077 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/69aa641a-13ff-4f65-b2ea-7fee3ad42134-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"69aa641a-13ff-4f65-b2ea-7fee3ad42134\") " pod="openstack/kube-state-metrics-0" Oct 10 16:52:45 crc kubenswrapper[4799]: I1010 16:52:45.011193 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7wqb9\" (UniqueName: \"kubernetes.io/projected/69aa641a-13ff-4f65-b2ea-7fee3ad42134-kube-api-access-7wqb9\") pod \"kube-state-metrics-0\" (UID: \"69aa641a-13ff-4f65-b2ea-7fee3ad42134\") " pod="openstack/kube-state-metrics-0" Oct 10 16:52:45 crc kubenswrapper[4799]: I1010 16:52:45.016458 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/69aa641a-13ff-4f65-b2ea-7fee3ad42134-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"69aa641a-13ff-4f65-b2ea-7fee3ad42134\") " pod="openstack/kube-state-metrics-0" Oct 10 16:52:45 crc kubenswrapper[4799]: I1010 16:52:45.018180 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/69aa641a-13ff-4f65-b2ea-7fee3ad42134-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"69aa641a-13ff-4f65-b2ea-7fee3ad42134\") " pod="openstack/kube-state-metrics-0" Oct 10 16:52:45 crc kubenswrapper[4799]: I1010 16:52:45.026518 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/69aa641a-13ff-4f65-b2ea-7fee3ad42134-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"69aa641a-13ff-4f65-b2ea-7fee3ad42134\") " pod="openstack/kube-state-metrics-0" Oct 10 16:52:45 crc kubenswrapper[4799]: I1010 16:52:45.033700 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7wqb9\" (UniqueName: \"kubernetes.io/projected/69aa641a-13ff-4f65-b2ea-7fee3ad42134-kube-api-access-7wqb9\") pod \"kube-state-metrics-0\" (UID: \"69aa641a-13ff-4f65-b2ea-7fee3ad42134\") " pod="openstack/kube-state-metrics-0" Oct 10 16:52:45 crc kubenswrapper[4799]: I1010 16:52:45.094151 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Oct 10 16:52:45 crc kubenswrapper[4799]: I1010 16:52:45.416594 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="55f598f2-d46d-4810-9b39-315e6d90221a" path="/var/lib/kubelet/pods/55f598f2-d46d-4810-9b39-315e6d90221a/volumes" Oct 10 16:52:45 crc kubenswrapper[4799]: I1010 16:52:45.504290 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 10 16:52:45 crc kubenswrapper[4799]: I1010 16:52:45.504543 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="ea58536a-f98a-46dd-b3a9-90612fe9a438" containerName="ceilometer-central-agent" containerID="cri-o://382f609b14ca6577c1d275d1762255d6754d3113ac3741fae5f0bb95acfa7851" gracePeriod=30 Oct 10 16:52:45 crc kubenswrapper[4799]: I1010 16:52:45.504595 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="ea58536a-f98a-46dd-b3a9-90612fe9a438" containerName="sg-core" containerID="cri-o://0685ce541d3855e534e7fa279df6a22b07a42590b5dd25b6669dafdf92acd8bf" gracePeriod=30 Oct 10 16:52:45 crc kubenswrapper[4799]: I1010 16:52:45.504670 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="ea58536a-f98a-46dd-b3a9-90612fe9a438" containerName="proxy-httpd" containerID="cri-o://79de021fa6d1b97a7f1f765431e167489c1b950430a175cb7a0f6addf9e70ac2" gracePeriod=30 Oct 10 16:52:45 crc kubenswrapper[4799]: I1010 16:52:45.504715 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="ea58536a-f98a-46dd-b3a9-90612fe9a438" containerName="ceilometer-notification-agent" containerID="cri-o://5ca541c2236fe349c764f953bc2a4d10b8a704c1db2fef0341deb5eb558cd280" gracePeriod=30 Oct 10 16:52:45 crc kubenswrapper[4799]: I1010 16:52:45.562021 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Oct 10 16:52:45 crc kubenswrapper[4799]: I1010 16:52:45.741095 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"69aa641a-13ff-4f65-b2ea-7fee3ad42134","Type":"ContainerStarted","Data":"9c17349ea4964f7a71e4a61b06276dec0451a6ac1845317abbf909498f2ec2d6"} Oct 10 16:52:45 crc kubenswrapper[4799]: I1010 16:52:45.745475 4799 generic.go:334] "Generic (PLEG): container finished" podID="ea58536a-f98a-46dd-b3a9-90612fe9a438" containerID="79de021fa6d1b97a7f1f765431e167489c1b950430a175cb7a0f6addf9e70ac2" exitCode=0 Oct 10 16:52:45 crc kubenswrapper[4799]: I1010 16:52:45.745503 4799 generic.go:334] "Generic (PLEG): container finished" podID="ea58536a-f98a-46dd-b3a9-90612fe9a438" containerID="0685ce541d3855e534e7fa279df6a22b07a42590b5dd25b6669dafdf92acd8bf" exitCode=2 Oct 10 16:52:45 crc kubenswrapper[4799]: I1010 16:52:45.745549 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ea58536a-f98a-46dd-b3a9-90612fe9a438","Type":"ContainerDied","Data":"79de021fa6d1b97a7f1f765431e167489c1b950430a175cb7a0f6addf9e70ac2"} Oct 10 16:52:45 crc kubenswrapper[4799]: I1010 16:52:45.745578 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ea58536a-f98a-46dd-b3a9-90612fe9a438","Type":"ContainerDied","Data":"0685ce541d3855e534e7fa279df6a22b07a42590b5dd25b6669dafdf92acd8bf"} Oct 10 16:52:46 crc kubenswrapper[4799]: I1010 16:52:46.760554 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"69aa641a-13ff-4f65-b2ea-7fee3ad42134","Type":"ContainerStarted","Data":"9a73e41efcf012c81cfb3fdb00ec877a3a4f57b043b7fb464fdebcd73d9d80d1"} Oct 10 16:52:46 crc kubenswrapper[4799]: I1010 16:52:46.761108 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/kube-state-metrics-0" Oct 10 16:52:46 crc kubenswrapper[4799]: I1010 16:52:46.764104 4799 generic.go:334] "Generic (PLEG): container finished" podID="ea58536a-f98a-46dd-b3a9-90612fe9a438" containerID="382f609b14ca6577c1d275d1762255d6754d3113ac3741fae5f0bb95acfa7851" exitCode=0 Oct 10 16:52:46 crc kubenswrapper[4799]: I1010 16:52:46.764144 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ea58536a-f98a-46dd-b3a9-90612fe9a438","Type":"ContainerDied","Data":"382f609b14ca6577c1d275d1762255d6754d3113ac3741fae5f0bb95acfa7851"} Oct 10 16:52:46 crc kubenswrapper[4799]: I1010 16:52:46.782079 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/kube-state-metrics-0" podStartSLOduration=2.39579305 podStartE2EDuration="2.782063531s" podCreationTimestamp="2025-10-10 16:52:44 +0000 UTC" firstStartedPulling="2025-10-10 16:52:45.555936819 +0000 UTC m=+1259.064260944" lastFinishedPulling="2025-10-10 16:52:45.94220731 +0000 UTC m=+1259.450531425" observedRunningTime="2025-10-10 16:52:46.780532603 +0000 UTC m=+1260.288856718" watchObservedRunningTime="2025-10-10 16:52:46.782063531 +0000 UTC m=+1260.290387646" Oct 10 16:52:48 crc kubenswrapper[4799]: I1010 16:52:48.090542 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Oct 10 16:52:48 crc kubenswrapper[4799]: I1010 16:52:48.439180 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 10 16:52:48 crc kubenswrapper[4799]: I1010 16:52:48.576534 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ea58536a-f98a-46dd-b3a9-90612fe9a438-combined-ca-bundle\") pod \"ea58536a-f98a-46dd-b3a9-90612fe9a438\" (UID: \"ea58536a-f98a-46dd-b3a9-90612fe9a438\") " Oct 10 16:52:48 crc kubenswrapper[4799]: I1010 16:52:48.576607 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ea58536a-f98a-46dd-b3a9-90612fe9a438-config-data\") pod \"ea58536a-f98a-46dd-b3a9-90612fe9a438\" (UID: \"ea58536a-f98a-46dd-b3a9-90612fe9a438\") " Oct 10 16:52:48 crc kubenswrapper[4799]: I1010 16:52:48.576779 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ea58536a-f98a-46dd-b3a9-90612fe9a438-run-httpd\") pod \"ea58536a-f98a-46dd-b3a9-90612fe9a438\" (UID: \"ea58536a-f98a-46dd-b3a9-90612fe9a438\") " Oct 10 16:52:48 crc kubenswrapper[4799]: I1010 16:52:48.576835 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ea58536a-f98a-46dd-b3a9-90612fe9a438-log-httpd\") pod \"ea58536a-f98a-46dd-b3a9-90612fe9a438\" (UID: \"ea58536a-f98a-46dd-b3a9-90612fe9a438\") " Oct 10 16:52:48 crc kubenswrapper[4799]: I1010 16:52:48.576873 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/ea58536a-f98a-46dd-b3a9-90612fe9a438-sg-core-conf-yaml\") pod \"ea58536a-f98a-46dd-b3a9-90612fe9a438\" (UID: \"ea58536a-f98a-46dd-b3a9-90612fe9a438\") " Oct 10 16:52:48 crc kubenswrapper[4799]: I1010 16:52:48.576895 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ea58536a-f98a-46dd-b3a9-90612fe9a438-scripts\") pod \"ea58536a-f98a-46dd-b3a9-90612fe9a438\" (UID: \"ea58536a-f98a-46dd-b3a9-90612fe9a438\") " Oct 10 16:52:48 crc kubenswrapper[4799]: I1010 16:52:48.576942 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7cndq\" (UniqueName: \"kubernetes.io/projected/ea58536a-f98a-46dd-b3a9-90612fe9a438-kube-api-access-7cndq\") pod \"ea58536a-f98a-46dd-b3a9-90612fe9a438\" (UID: \"ea58536a-f98a-46dd-b3a9-90612fe9a438\") " Oct 10 16:52:48 crc kubenswrapper[4799]: I1010 16:52:48.577817 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ea58536a-f98a-46dd-b3a9-90612fe9a438-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "ea58536a-f98a-46dd-b3a9-90612fe9a438" (UID: "ea58536a-f98a-46dd-b3a9-90612fe9a438"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 10 16:52:48 crc kubenswrapper[4799]: I1010 16:52:48.579111 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ea58536a-f98a-46dd-b3a9-90612fe9a438-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "ea58536a-f98a-46dd-b3a9-90612fe9a438" (UID: "ea58536a-f98a-46dd-b3a9-90612fe9a438"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 10 16:52:48 crc kubenswrapper[4799]: I1010 16:52:48.584157 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ea58536a-f98a-46dd-b3a9-90612fe9a438-scripts" (OuterVolumeSpecName: "scripts") pod "ea58536a-f98a-46dd-b3a9-90612fe9a438" (UID: "ea58536a-f98a-46dd-b3a9-90612fe9a438"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:52:48 crc kubenswrapper[4799]: I1010 16:52:48.586720 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ea58536a-f98a-46dd-b3a9-90612fe9a438-kube-api-access-7cndq" (OuterVolumeSpecName: "kube-api-access-7cndq") pod "ea58536a-f98a-46dd-b3a9-90612fe9a438" (UID: "ea58536a-f98a-46dd-b3a9-90612fe9a438"). InnerVolumeSpecName "kube-api-access-7cndq". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 16:52:48 crc kubenswrapper[4799]: I1010 16:52:48.608728 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ea58536a-f98a-46dd-b3a9-90612fe9a438-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "ea58536a-f98a-46dd-b3a9-90612fe9a438" (UID: "ea58536a-f98a-46dd-b3a9-90612fe9a438"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:52:48 crc kubenswrapper[4799]: I1010 16:52:48.675530 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ea58536a-f98a-46dd-b3a9-90612fe9a438-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "ea58536a-f98a-46dd-b3a9-90612fe9a438" (UID: "ea58536a-f98a-46dd-b3a9-90612fe9a438"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:52:48 crc kubenswrapper[4799]: I1010 16:52:48.678868 4799 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ea58536a-f98a-46dd-b3a9-90612fe9a438-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 10 16:52:48 crc kubenswrapper[4799]: I1010 16:52:48.678897 4799 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ea58536a-f98a-46dd-b3a9-90612fe9a438-run-httpd\") on node \"crc\" DevicePath \"\"" Oct 10 16:52:48 crc kubenswrapper[4799]: I1010 16:52:48.678910 4799 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ea58536a-f98a-46dd-b3a9-90612fe9a438-log-httpd\") on node \"crc\" DevicePath \"\"" Oct 10 16:52:48 crc kubenswrapper[4799]: I1010 16:52:48.678922 4799 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/ea58536a-f98a-46dd-b3a9-90612fe9a438-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Oct 10 16:52:48 crc kubenswrapper[4799]: I1010 16:52:48.678935 4799 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ea58536a-f98a-46dd-b3a9-90612fe9a438-scripts\") on node \"crc\" DevicePath \"\"" Oct 10 16:52:48 crc kubenswrapper[4799]: I1010 16:52:48.678946 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7cndq\" (UniqueName: \"kubernetes.io/projected/ea58536a-f98a-46dd-b3a9-90612fe9a438-kube-api-access-7cndq\") on node \"crc\" DevicePath \"\"" Oct 10 16:52:48 crc kubenswrapper[4799]: I1010 16:52:48.695897 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ea58536a-f98a-46dd-b3a9-90612fe9a438-config-data" (OuterVolumeSpecName: "config-data") pod "ea58536a-f98a-46dd-b3a9-90612fe9a438" (UID: "ea58536a-f98a-46dd-b3a9-90612fe9a438"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:52:48 crc kubenswrapper[4799]: I1010 16:52:48.780164 4799 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ea58536a-f98a-46dd-b3a9-90612fe9a438-config-data\") on node \"crc\" DevicePath \"\"" Oct 10 16:52:48 crc kubenswrapper[4799]: I1010 16:52:48.784788 4799 generic.go:334] "Generic (PLEG): container finished" podID="ea58536a-f98a-46dd-b3a9-90612fe9a438" containerID="5ca541c2236fe349c764f953bc2a4d10b8a704c1db2fef0341deb5eb558cd280" exitCode=0 Oct 10 16:52:48 crc kubenswrapper[4799]: I1010 16:52:48.784829 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ea58536a-f98a-46dd-b3a9-90612fe9a438","Type":"ContainerDied","Data":"5ca541c2236fe349c764f953bc2a4d10b8a704c1db2fef0341deb5eb558cd280"} Oct 10 16:52:48 crc kubenswrapper[4799]: I1010 16:52:48.784855 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ea58536a-f98a-46dd-b3a9-90612fe9a438","Type":"ContainerDied","Data":"1c51338ba739155587f6d018ed5e71fd76f096520548cf1e94cd33f19707cd1b"} Oct 10 16:52:48 crc kubenswrapper[4799]: I1010 16:52:48.784872 4799 scope.go:117] "RemoveContainer" containerID="79de021fa6d1b97a7f1f765431e167489c1b950430a175cb7a0f6addf9e70ac2" Oct 10 16:52:48 crc kubenswrapper[4799]: I1010 16:52:48.785203 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 10 16:52:48 crc kubenswrapper[4799]: I1010 16:52:48.807671 4799 scope.go:117] "RemoveContainer" containerID="0685ce541d3855e534e7fa279df6a22b07a42590b5dd25b6669dafdf92acd8bf" Oct 10 16:52:48 crc kubenswrapper[4799]: I1010 16:52:48.827328 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 10 16:52:48 crc kubenswrapper[4799]: I1010 16:52:48.837799 4799 scope.go:117] "RemoveContainer" containerID="5ca541c2236fe349c764f953bc2a4d10b8a704c1db2fef0341deb5eb558cd280" Oct 10 16:52:48 crc kubenswrapper[4799]: I1010 16:52:48.841051 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Oct 10 16:52:48 crc kubenswrapper[4799]: I1010 16:52:48.849425 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Oct 10 16:52:48 crc kubenswrapper[4799]: E1010 16:52:48.849822 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ea58536a-f98a-46dd-b3a9-90612fe9a438" containerName="proxy-httpd" Oct 10 16:52:48 crc kubenswrapper[4799]: I1010 16:52:48.849840 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="ea58536a-f98a-46dd-b3a9-90612fe9a438" containerName="proxy-httpd" Oct 10 16:52:48 crc kubenswrapper[4799]: E1010 16:52:48.849853 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ea58536a-f98a-46dd-b3a9-90612fe9a438" containerName="ceilometer-central-agent" Oct 10 16:52:48 crc kubenswrapper[4799]: I1010 16:52:48.849860 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="ea58536a-f98a-46dd-b3a9-90612fe9a438" containerName="ceilometer-central-agent" Oct 10 16:52:48 crc kubenswrapper[4799]: E1010 16:52:48.849878 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ea58536a-f98a-46dd-b3a9-90612fe9a438" containerName="sg-core" Oct 10 16:52:48 crc kubenswrapper[4799]: I1010 16:52:48.849885 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="ea58536a-f98a-46dd-b3a9-90612fe9a438" containerName="sg-core" Oct 10 16:52:48 crc kubenswrapper[4799]: E1010 16:52:48.849910 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ea58536a-f98a-46dd-b3a9-90612fe9a438" containerName="ceilometer-notification-agent" Oct 10 16:52:48 crc kubenswrapper[4799]: I1010 16:52:48.849918 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="ea58536a-f98a-46dd-b3a9-90612fe9a438" containerName="ceilometer-notification-agent" Oct 10 16:52:48 crc kubenswrapper[4799]: I1010 16:52:48.850113 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="ea58536a-f98a-46dd-b3a9-90612fe9a438" containerName="ceilometer-central-agent" Oct 10 16:52:48 crc kubenswrapper[4799]: I1010 16:52:48.850128 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="ea58536a-f98a-46dd-b3a9-90612fe9a438" containerName="proxy-httpd" Oct 10 16:52:48 crc kubenswrapper[4799]: I1010 16:52:48.850153 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="ea58536a-f98a-46dd-b3a9-90612fe9a438" containerName="sg-core" Oct 10 16:52:48 crc kubenswrapper[4799]: I1010 16:52:48.850163 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="ea58536a-f98a-46dd-b3a9-90612fe9a438" containerName="ceilometer-notification-agent" Oct 10 16:52:48 crc kubenswrapper[4799]: I1010 16:52:48.852098 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 10 16:52:48 crc kubenswrapper[4799]: I1010 16:52:48.854312 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Oct 10 16:52:48 crc kubenswrapper[4799]: I1010 16:52:48.861533 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Oct 10 16:52:48 crc kubenswrapper[4799]: I1010 16:52:48.876984 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Oct 10 16:52:48 crc kubenswrapper[4799]: I1010 16:52:48.889571 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 10 16:52:48 crc kubenswrapper[4799]: I1010 16:52:48.896171 4799 scope.go:117] "RemoveContainer" containerID="382f609b14ca6577c1d275d1762255d6754d3113ac3741fae5f0bb95acfa7851" Oct 10 16:52:48 crc kubenswrapper[4799]: I1010 16:52:48.919416 4799 scope.go:117] "RemoveContainer" containerID="79de021fa6d1b97a7f1f765431e167489c1b950430a175cb7a0f6addf9e70ac2" Oct 10 16:52:48 crc kubenswrapper[4799]: E1010 16:52:48.919828 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"79de021fa6d1b97a7f1f765431e167489c1b950430a175cb7a0f6addf9e70ac2\": container with ID starting with 79de021fa6d1b97a7f1f765431e167489c1b950430a175cb7a0f6addf9e70ac2 not found: ID does not exist" containerID="79de021fa6d1b97a7f1f765431e167489c1b950430a175cb7a0f6addf9e70ac2" Oct 10 16:52:48 crc kubenswrapper[4799]: I1010 16:52:48.919866 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"79de021fa6d1b97a7f1f765431e167489c1b950430a175cb7a0f6addf9e70ac2"} err="failed to get container status \"79de021fa6d1b97a7f1f765431e167489c1b950430a175cb7a0f6addf9e70ac2\": rpc error: code = NotFound desc = could not find container \"79de021fa6d1b97a7f1f765431e167489c1b950430a175cb7a0f6addf9e70ac2\": container with ID starting with 79de021fa6d1b97a7f1f765431e167489c1b950430a175cb7a0f6addf9e70ac2 not found: ID does not exist" Oct 10 16:52:48 crc kubenswrapper[4799]: I1010 16:52:48.919891 4799 scope.go:117] "RemoveContainer" containerID="0685ce541d3855e534e7fa279df6a22b07a42590b5dd25b6669dafdf92acd8bf" Oct 10 16:52:48 crc kubenswrapper[4799]: E1010 16:52:48.920149 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0685ce541d3855e534e7fa279df6a22b07a42590b5dd25b6669dafdf92acd8bf\": container with ID starting with 0685ce541d3855e534e7fa279df6a22b07a42590b5dd25b6669dafdf92acd8bf not found: ID does not exist" containerID="0685ce541d3855e534e7fa279df6a22b07a42590b5dd25b6669dafdf92acd8bf" Oct 10 16:52:48 crc kubenswrapper[4799]: I1010 16:52:48.920176 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0685ce541d3855e534e7fa279df6a22b07a42590b5dd25b6669dafdf92acd8bf"} err="failed to get container status \"0685ce541d3855e534e7fa279df6a22b07a42590b5dd25b6669dafdf92acd8bf\": rpc error: code = NotFound desc = could not find container \"0685ce541d3855e534e7fa279df6a22b07a42590b5dd25b6669dafdf92acd8bf\": container with ID starting with 0685ce541d3855e534e7fa279df6a22b07a42590b5dd25b6669dafdf92acd8bf not found: ID does not exist" Oct 10 16:52:48 crc kubenswrapper[4799]: I1010 16:52:48.920192 4799 scope.go:117] "RemoveContainer" containerID="5ca541c2236fe349c764f953bc2a4d10b8a704c1db2fef0341deb5eb558cd280" Oct 10 16:52:48 crc kubenswrapper[4799]: E1010 16:52:48.920494 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5ca541c2236fe349c764f953bc2a4d10b8a704c1db2fef0341deb5eb558cd280\": container with ID starting with 5ca541c2236fe349c764f953bc2a4d10b8a704c1db2fef0341deb5eb558cd280 not found: ID does not exist" containerID="5ca541c2236fe349c764f953bc2a4d10b8a704c1db2fef0341deb5eb558cd280" Oct 10 16:52:48 crc kubenswrapper[4799]: I1010 16:52:48.920525 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5ca541c2236fe349c764f953bc2a4d10b8a704c1db2fef0341deb5eb558cd280"} err="failed to get container status \"5ca541c2236fe349c764f953bc2a4d10b8a704c1db2fef0341deb5eb558cd280\": rpc error: code = NotFound desc = could not find container \"5ca541c2236fe349c764f953bc2a4d10b8a704c1db2fef0341deb5eb558cd280\": container with ID starting with 5ca541c2236fe349c764f953bc2a4d10b8a704c1db2fef0341deb5eb558cd280 not found: ID does not exist" Oct 10 16:52:48 crc kubenswrapper[4799]: I1010 16:52:48.920550 4799 scope.go:117] "RemoveContainer" containerID="382f609b14ca6577c1d275d1762255d6754d3113ac3741fae5f0bb95acfa7851" Oct 10 16:52:48 crc kubenswrapper[4799]: E1010 16:52:48.920831 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"382f609b14ca6577c1d275d1762255d6754d3113ac3741fae5f0bb95acfa7851\": container with ID starting with 382f609b14ca6577c1d275d1762255d6754d3113ac3741fae5f0bb95acfa7851 not found: ID does not exist" containerID="382f609b14ca6577c1d275d1762255d6754d3113ac3741fae5f0bb95acfa7851" Oct 10 16:52:48 crc kubenswrapper[4799]: I1010 16:52:48.920858 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"382f609b14ca6577c1d275d1762255d6754d3113ac3741fae5f0bb95acfa7851"} err="failed to get container status \"382f609b14ca6577c1d275d1762255d6754d3113ac3741fae5f0bb95acfa7851\": rpc error: code = NotFound desc = could not find container \"382f609b14ca6577c1d275d1762255d6754d3113ac3741fae5f0bb95acfa7851\": container with ID starting with 382f609b14ca6577c1d275d1762255d6754d3113ac3741fae5f0bb95acfa7851 not found: ID does not exist" Oct 10 16:52:48 crc kubenswrapper[4799]: I1010 16:52:48.951950 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-conductor-0" Oct 10 16:52:48 crc kubenswrapper[4799]: I1010 16:52:48.985789 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/83d0f03e-1c6a-4559-940c-312be653c7c3-scripts\") pod \"ceilometer-0\" (UID: \"83d0f03e-1c6a-4559-940c-312be653c7c3\") " pod="openstack/ceilometer-0" Oct 10 16:52:48 crc kubenswrapper[4799]: I1010 16:52:48.985922 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/83d0f03e-1c6a-4559-940c-312be653c7c3-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"83d0f03e-1c6a-4559-940c-312be653c7c3\") " pod="openstack/ceilometer-0" Oct 10 16:52:48 crc kubenswrapper[4799]: I1010 16:52:48.986032 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-flfxw\" (UniqueName: \"kubernetes.io/projected/83d0f03e-1c6a-4559-940c-312be653c7c3-kube-api-access-flfxw\") pod \"ceilometer-0\" (UID: \"83d0f03e-1c6a-4559-940c-312be653c7c3\") " pod="openstack/ceilometer-0" Oct 10 16:52:48 crc kubenswrapper[4799]: I1010 16:52:48.986080 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/83d0f03e-1c6a-4559-940c-312be653c7c3-log-httpd\") pod \"ceilometer-0\" (UID: \"83d0f03e-1c6a-4559-940c-312be653c7c3\") " pod="openstack/ceilometer-0" Oct 10 16:52:48 crc kubenswrapper[4799]: I1010 16:52:48.986122 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/83d0f03e-1c6a-4559-940c-312be653c7c3-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"83d0f03e-1c6a-4559-940c-312be653c7c3\") " pod="openstack/ceilometer-0" Oct 10 16:52:48 crc kubenswrapper[4799]: I1010 16:52:48.986149 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/83d0f03e-1c6a-4559-940c-312be653c7c3-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"83d0f03e-1c6a-4559-940c-312be653c7c3\") " pod="openstack/ceilometer-0" Oct 10 16:52:48 crc kubenswrapper[4799]: I1010 16:52:48.986207 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/83d0f03e-1c6a-4559-940c-312be653c7c3-run-httpd\") pod \"ceilometer-0\" (UID: \"83d0f03e-1c6a-4559-940c-312be653c7c3\") " pod="openstack/ceilometer-0" Oct 10 16:52:48 crc kubenswrapper[4799]: I1010 16:52:48.986233 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/83d0f03e-1c6a-4559-940c-312be653c7c3-config-data\") pod \"ceilometer-0\" (UID: \"83d0f03e-1c6a-4559-940c-312be653c7c3\") " pod="openstack/ceilometer-0" Oct 10 16:52:49 crc kubenswrapper[4799]: I1010 16:52:49.088364 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/83d0f03e-1c6a-4559-940c-312be653c7c3-run-httpd\") pod \"ceilometer-0\" (UID: \"83d0f03e-1c6a-4559-940c-312be653c7c3\") " pod="openstack/ceilometer-0" Oct 10 16:52:49 crc kubenswrapper[4799]: I1010 16:52:49.088431 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/83d0f03e-1c6a-4559-940c-312be653c7c3-config-data\") pod \"ceilometer-0\" (UID: \"83d0f03e-1c6a-4559-940c-312be653c7c3\") " pod="openstack/ceilometer-0" Oct 10 16:52:49 crc kubenswrapper[4799]: I1010 16:52:49.088459 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/83d0f03e-1c6a-4559-940c-312be653c7c3-scripts\") pod \"ceilometer-0\" (UID: \"83d0f03e-1c6a-4559-940c-312be653c7c3\") " pod="openstack/ceilometer-0" Oct 10 16:52:49 crc kubenswrapper[4799]: I1010 16:52:49.088634 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/83d0f03e-1c6a-4559-940c-312be653c7c3-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"83d0f03e-1c6a-4559-940c-312be653c7c3\") " pod="openstack/ceilometer-0" Oct 10 16:52:49 crc kubenswrapper[4799]: I1010 16:52:49.089044 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/83d0f03e-1c6a-4559-940c-312be653c7c3-run-httpd\") pod \"ceilometer-0\" (UID: \"83d0f03e-1c6a-4559-940c-312be653c7c3\") " pod="openstack/ceilometer-0" Oct 10 16:52:49 crc kubenswrapper[4799]: I1010 16:52:49.089240 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-flfxw\" (UniqueName: \"kubernetes.io/projected/83d0f03e-1c6a-4559-940c-312be653c7c3-kube-api-access-flfxw\") pod \"ceilometer-0\" (UID: \"83d0f03e-1c6a-4559-940c-312be653c7c3\") " pod="openstack/ceilometer-0" Oct 10 16:52:49 crc kubenswrapper[4799]: I1010 16:52:49.089347 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/83d0f03e-1c6a-4559-940c-312be653c7c3-log-httpd\") pod \"ceilometer-0\" (UID: \"83d0f03e-1c6a-4559-940c-312be653c7c3\") " pod="openstack/ceilometer-0" Oct 10 16:52:49 crc kubenswrapper[4799]: I1010 16:52:49.089423 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/83d0f03e-1c6a-4559-940c-312be653c7c3-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"83d0f03e-1c6a-4559-940c-312be653c7c3\") " pod="openstack/ceilometer-0" Oct 10 16:52:49 crc kubenswrapper[4799]: I1010 16:52:49.089472 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/83d0f03e-1c6a-4559-940c-312be653c7c3-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"83d0f03e-1c6a-4559-940c-312be653c7c3\") " pod="openstack/ceilometer-0" Oct 10 16:52:49 crc kubenswrapper[4799]: I1010 16:52:49.089548 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/83d0f03e-1c6a-4559-940c-312be653c7c3-log-httpd\") pod \"ceilometer-0\" (UID: \"83d0f03e-1c6a-4559-940c-312be653c7c3\") " pod="openstack/ceilometer-0" Oct 10 16:52:49 crc kubenswrapper[4799]: I1010 16:52:49.092106 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/83d0f03e-1c6a-4559-940c-312be653c7c3-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"83d0f03e-1c6a-4559-940c-312be653c7c3\") " pod="openstack/ceilometer-0" Oct 10 16:52:49 crc kubenswrapper[4799]: I1010 16:52:49.092264 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/83d0f03e-1c6a-4559-940c-312be653c7c3-scripts\") pod \"ceilometer-0\" (UID: \"83d0f03e-1c6a-4559-940c-312be653c7c3\") " pod="openstack/ceilometer-0" Oct 10 16:52:49 crc kubenswrapper[4799]: I1010 16:52:49.092274 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/83d0f03e-1c6a-4559-940c-312be653c7c3-config-data\") pod \"ceilometer-0\" (UID: \"83d0f03e-1c6a-4559-940c-312be653c7c3\") " pod="openstack/ceilometer-0" Oct 10 16:52:49 crc kubenswrapper[4799]: I1010 16:52:49.092937 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/83d0f03e-1c6a-4559-940c-312be653c7c3-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"83d0f03e-1c6a-4559-940c-312be653c7c3\") " pod="openstack/ceilometer-0" Oct 10 16:52:49 crc kubenswrapper[4799]: I1010 16:52:49.093291 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/83d0f03e-1c6a-4559-940c-312be653c7c3-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"83d0f03e-1c6a-4559-940c-312be653c7c3\") " pod="openstack/ceilometer-0" Oct 10 16:52:49 crc kubenswrapper[4799]: I1010 16:52:49.109118 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-flfxw\" (UniqueName: \"kubernetes.io/projected/83d0f03e-1c6a-4559-940c-312be653c7c3-kube-api-access-flfxw\") pod \"ceilometer-0\" (UID: \"83d0f03e-1c6a-4559-940c-312be653c7c3\") " pod="openstack/ceilometer-0" Oct 10 16:52:49 crc kubenswrapper[4799]: I1010 16:52:49.186040 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 10 16:52:49 crc kubenswrapper[4799]: I1010 16:52:49.421498 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ea58536a-f98a-46dd-b3a9-90612fe9a438" path="/var/lib/kubelet/pods/ea58536a-f98a-46dd-b3a9-90612fe9a438/volumes" Oct 10 16:52:49 crc kubenswrapper[4799]: W1010 16:52:49.631542 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod83d0f03e_1c6a_4559_940c_312be653c7c3.slice/crio-c2d6ee29905488b8223a554e503cf14d8fe6ff5ab2b583ef9189466fb7ff8989 WatchSource:0}: Error finding container c2d6ee29905488b8223a554e503cf14d8fe6ff5ab2b583ef9189466fb7ff8989: Status 404 returned error can't find the container with id c2d6ee29905488b8223a554e503cf14d8fe6ff5ab2b583ef9189466fb7ff8989 Oct 10 16:52:49 crc kubenswrapper[4799]: I1010 16:52:49.635872 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 10 16:52:49 crc kubenswrapper[4799]: I1010 16:52:49.796179 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"83d0f03e-1c6a-4559-940c-312be653c7c3","Type":"ContainerStarted","Data":"c2d6ee29905488b8223a554e503cf14d8fe6ff5ab2b583ef9189466fb7ff8989"} Oct 10 16:52:50 crc kubenswrapper[4799]: I1010 16:52:50.808078 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"83d0f03e-1c6a-4559-940c-312be653c7c3","Type":"ContainerStarted","Data":"225460eb0d91d90328909fe4e27056c7632675d24481ae318706175ae217a850"} Oct 10 16:52:51 crc kubenswrapper[4799]: I1010 16:52:51.819226 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"83d0f03e-1c6a-4559-940c-312be653c7c3","Type":"ContainerStarted","Data":"d0e485d6bf2ba97948b186f7d0f4cb2887563e953b6f33f2edcd98f85503f73a"} Oct 10 16:52:52 crc kubenswrapper[4799]: I1010 16:52:52.832053 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"83d0f03e-1c6a-4559-940c-312be653c7c3","Type":"ContainerStarted","Data":"9b5f6a40528602c70f049ed8e70815b14bf3cfc52939bf60939bb675f80adcd0"} Oct 10 16:52:53 crc kubenswrapper[4799]: I1010 16:52:53.066582 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Oct 10 16:52:53 crc kubenswrapper[4799]: I1010 16:52:53.066646 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Oct 10 16:52:53 crc kubenswrapper[4799]: I1010 16:52:53.090647 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Oct 10 16:52:53 crc kubenswrapper[4799]: I1010 16:52:53.118814 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Oct 10 16:52:53 crc kubenswrapper[4799]: I1010 16:52:53.859515 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"83d0f03e-1c6a-4559-940c-312be653c7c3","Type":"ContainerStarted","Data":"853ab9f6c0dab10928372d18487047719af14f07aeaaba927bd1569a2b81c7bd"} Oct 10 16:52:53 crc kubenswrapper[4799]: I1010 16:52:53.863167 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Oct 10 16:52:53 crc kubenswrapper[4799]: I1010 16:52:53.907517 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Oct 10 16:52:53 crc kubenswrapper[4799]: I1010 16:52:53.925301 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.374102719 podStartE2EDuration="5.9252738s" podCreationTimestamp="2025-10-10 16:52:48 +0000 UTC" firstStartedPulling="2025-10-10 16:52:49.6338037 +0000 UTC m=+1263.142127825" lastFinishedPulling="2025-10-10 16:52:53.184974791 +0000 UTC m=+1266.693298906" observedRunningTime="2025-10-10 16:52:53.894316368 +0000 UTC m=+1267.402640493" watchObservedRunningTime="2025-10-10 16:52:53.9252738 +0000 UTC m=+1267.433597915" Oct 10 16:52:54 crc kubenswrapper[4799]: I1010 16:52:54.149130 4799 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="2c70468e-4a71-405f-9171-9246f62aec11" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.0.197:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Oct 10 16:52:54 crc kubenswrapper[4799]: I1010 16:52:54.149168 4799 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="2c70468e-4a71-405f-9171-9246f62aec11" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.0.197:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Oct 10 16:52:55 crc kubenswrapper[4799]: I1010 16:52:55.107537 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/kube-state-metrics-0" Oct 10 16:53:00 crc kubenswrapper[4799]: I1010 16:53:00.918276 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Oct 10 16:53:00 crc kubenswrapper[4799]: I1010 16:53:00.927668 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 10 16:53:00 crc kubenswrapper[4799]: I1010 16:53:00.942466 4799 generic.go:334] "Generic (PLEG): container finished" podID="54b8af05-cb42-4c0b-85af-e1cd04c3f5af" containerID="1d7442524389524b37c35f42abf2fbafa97cbf60950462f30223e7c21af358f9" exitCode=137 Oct 10 16:53:00 crc kubenswrapper[4799]: I1010 16:53:00.942530 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"54b8af05-cb42-4c0b-85af-e1cd04c3f5af","Type":"ContainerDied","Data":"1d7442524389524b37c35f42abf2fbafa97cbf60950462f30223e7c21af358f9"} Oct 10 16:53:00 crc kubenswrapper[4799]: I1010 16:53:00.942557 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"54b8af05-cb42-4c0b-85af-e1cd04c3f5af","Type":"ContainerDied","Data":"dbbb78124876a5e03da04fb33480cfa2f49c164ef063758d101d2f699a90c8e9"} Oct 10 16:53:00 crc kubenswrapper[4799]: I1010 16:53:00.942576 4799 scope.go:117] "RemoveContainer" containerID="1d7442524389524b37c35f42abf2fbafa97cbf60950462f30223e7c21af358f9" Oct 10 16:53:00 crc kubenswrapper[4799]: I1010 16:53:00.942702 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Oct 10 16:53:00 crc kubenswrapper[4799]: I1010 16:53:00.972134 4799 generic.go:334] "Generic (PLEG): container finished" podID="e98f1eab-bf7e-4d86-ab62-5294603982ae" containerID="aa85025ce5fb865ee1f7f2d10b63641cfa05cd447835f60d17888291883b56d3" exitCode=137 Oct 10 16:53:00 crc kubenswrapper[4799]: I1010 16:53:00.972179 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"e98f1eab-bf7e-4d86-ab62-5294603982ae","Type":"ContainerDied","Data":"aa85025ce5fb865ee1f7f2d10b63641cfa05cd447835f60d17888291883b56d3"} Oct 10 16:53:00 crc kubenswrapper[4799]: I1010 16:53:00.972211 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"e98f1eab-bf7e-4d86-ab62-5294603982ae","Type":"ContainerDied","Data":"8f180477fba8651cc60f3dabc12a29b9d3825b0f9476af8e4f00bd21b198f53f"} Oct 10 16:53:00 crc kubenswrapper[4799]: I1010 16:53:00.972283 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 10 16:53:01 crc kubenswrapper[4799]: I1010 16:53:01.002823 4799 scope.go:117] "RemoveContainer" containerID="1d7442524389524b37c35f42abf2fbafa97cbf60950462f30223e7c21af358f9" Oct 10 16:53:01 crc kubenswrapper[4799]: E1010 16:53:01.003330 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1d7442524389524b37c35f42abf2fbafa97cbf60950462f30223e7c21af358f9\": container with ID starting with 1d7442524389524b37c35f42abf2fbafa97cbf60950462f30223e7c21af358f9 not found: ID does not exist" containerID="1d7442524389524b37c35f42abf2fbafa97cbf60950462f30223e7c21af358f9" Oct 10 16:53:01 crc kubenswrapper[4799]: I1010 16:53:01.003358 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1d7442524389524b37c35f42abf2fbafa97cbf60950462f30223e7c21af358f9"} err="failed to get container status \"1d7442524389524b37c35f42abf2fbafa97cbf60950462f30223e7c21af358f9\": rpc error: code = NotFound desc = could not find container \"1d7442524389524b37c35f42abf2fbafa97cbf60950462f30223e7c21af358f9\": container with ID starting with 1d7442524389524b37c35f42abf2fbafa97cbf60950462f30223e7c21af358f9 not found: ID does not exist" Oct 10 16:53:01 crc kubenswrapper[4799]: I1010 16:53:01.003380 4799 scope.go:117] "RemoveContainer" containerID="aa85025ce5fb865ee1f7f2d10b63641cfa05cd447835f60d17888291883b56d3" Oct 10 16:53:01 crc kubenswrapper[4799]: I1010 16:53:01.022965 4799 scope.go:117] "RemoveContainer" containerID="7bbf71222455af1fff4ae4868d27ad31019220e3e9b1ab5e5465f8b8c1705c20" Oct 10 16:53:01 crc kubenswrapper[4799]: I1010 16:53:01.042899 4799 scope.go:117] "RemoveContainer" containerID="aa85025ce5fb865ee1f7f2d10b63641cfa05cd447835f60d17888291883b56d3" Oct 10 16:53:01 crc kubenswrapper[4799]: E1010 16:53:01.043438 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"aa85025ce5fb865ee1f7f2d10b63641cfa05cd447835f60d17888291883b56d3\": container with ID starting with aa85025ce5fb865ee1f7f2d10b63641cfa05cd447835f60d17888291883b56d3 not found: ID does not exist" containerID="aa85025ce5fb865ee1f7f2d10b63641cfa05cd447835f60d17888291883b56d3" Oct 10 16:53:01 crc kubenswrapper[4799]: I1010 16:53:01.043473 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"aa85025ce5fb865ee1f7f2d10b63641cfa05cd447835f60d17888291883b56d3"} err="failed to get container status \"aa85025ce5fb865ee1f7f2d10b63641cfa05cd447835f60d17888291883b56d3\": rpc error: code = NotFound desc = could not find container \"aa85025ce5fb865ee1f7f2d10b63641cfa05cd447835f60d17888291883b56d3\": container with ID starting with aa85025ce5fb865ee1f7f2d10b63641cfa05cd447835f60d17888291883b56d3 not found: ID does not exist" Oct 10 16:53:01 crc kubenswrapper[4799]: I1010 16:53:01.043499 4799 scope.go:117] "RemoveContainer" containerID="7bbf71222455af1fff4ae4868d27ad31019220e3e9b1ab5e5465f8b8c1705c20" Oct 10 16:53:01 crc kubenswrapper[4799]: E1010 16:53:01.043770 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7bbf71222455af1fff4ae4868d27ad31019220e3e9b1ab5e5465f8b8c1705c20\": container with ID starting with 7bbf71222455af1fff4ae4868d27ad31019220e3e9b1ab5e5465f8b8c1705c20 not found: ID does not exist" containerID="7bbf71222455af1fff4ae4868d27ad31019220e3e9b1ab5e5465f8b8c1705c20" Oct 10 16:53:01 crc kubenswrapper[4799]: I1010 16:53:01.043797 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7bbf71222455af1fff4ae4868d27ad31019220e3e9b1ab5e5465f8b8c1705c20"} err="failed to get container status \"7bbf71222455af1fff4ae4868d27ad31019220e3e9b1ab5e5465f8b8c1705c20\": rpc error: code = NotFound desc = could not find container \"7bbf71222455af1fff4ae4868d27ad31019220e3e9b1ab5e5465f8b8c1705c20\": container with ID starting with 7bbf71222455af1fff4ae4868d27ad31019220e3e9b1ab5e5465f8b8c1705c20 not found: ID does not exist" Oct 10 16:53:01 crc kubenswrapper[4799]: I1010 16:53:01.073383 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e98f1eab-bf7e-4d86-ab62-5294603982ae-combined-ca-bundle\") pod \"e98f1eab-bf7e-4d86-ab62-5294603982ae\" (UID: \"e98f1eab-bf7e-4d86-ab62-5294603982ae\") " Oct 10 16:53:01 crc kubenswrapper[4799]: I1010 16:53:01.073490 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e98f1eab-bf7e-4d86-ab62-5294603982ae-logs\") pod \"e98f1eab-bf7e-4d86-ab62-5294603982ae\" (UID: \"e98f1eab-bf7e-4d86-ab62-5294603982ae\") " Oct 10 16:53:01 crc kubenswrapper[4799]: I1010 16:53:01.073540 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x7dn5\" (UniqueName: \"kubernetes.io/projected/e98f1eab-bf7e-4d86-ab62-5294603982ae-kube-api-access-x7dn5\") pod \"e98f1eab-bf7e-4d86-ab62-5294603982ae\" (UID: \"e98f1eab-bf7e-4d86-ab62-5294603982ae\") " Oct 10 16:53:01 crc kubenswrapper[4799]: I1010 16:53:01.073583 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e98f1eab-bf7e-4d86-ab62-5294603982ae-config-data\") pod \"e98f1eab-bf7e-4d86-ab62-5294603982ae\" (UID: \"e98f1eab-bf7e-4d86-ab62-5294603982ae\") " Oct 10 16:53:01 crc kubenswrapper[4799]: I1010 16:53:01.073652 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/54b8af05-cb42-4c0b-85af-e1cd04c3f5af-combined-ca-bundle\") pod \"54b8af05-cb42-4c0b-85af-e1cd04c3f5af\" (UID: \"54b8af05-cb42-4c0b-85af-e1cd04c3f5af\") " Oct 10 16:53:01 crc kubenswrapper[4799]: I1010 16:53:01.074173 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e98f1eab-bf7e-4d86-ab62-5294603982ae-logs" (OuterVolumeSpecName: "logs") pod "e98f1eab-bf7e-4d86-ab62-5294603982ae" (UID: "e98f1eab-bf7e-4d86-ab62-5294603982ae"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 10 16:53:01 crc kubenswrapper[4799]: I1010 16:53:01.074718 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fm6xm\" (UniqueName: \"kubernetes.io/projected/54b8af05-cb42-4c0b-85af-e1cd04c3f5af-kube-api-access-fm6xm\") pod \"54b8af05-cb42-4c0b-85af-e1cd04c3f5af\" (UID: \"54b8af05-cb42-4c0b-85af-e1cd04c3f5af\") " Oct 10 16:53:01 crc kubenswrapper[4799]: I1010 16:53:01.074826 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/54b8af05-cb42-4c0b-85af-e1cd04c3f5af-config-data\") pod \"54b8af05-cb42-4c0b-85af-e1cd04c3f5af\" (UID: \"54b8af05-cb42-4c0b-85af-e1cd04c3f5af\") " Oct 10 16:53:01 crc kubenswrapper[4799]: I1010 16:53:01.075325 4799 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e98f1eab-bf7e-4d86-ab62-5294603982ae-logs\") on node \"crc\" DevicePath \"\"" Oct 10 16:53:01 crc kubenswrapper[4799]: I1010 16:53:01.080354 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e98f1eab-bf7e-4d86-ab62-5294603982ae-kube-api-access-x7dn5" (OuterVolumeSpecName: "kube-api-access-x7dn5") pod "e98f1eab-bf7e-4d86-ab62-5294603982ae" (UID: "e98f1eab-bf7e-4d86-ab62-5294603982ae"). InnerVolumeSpecName "kube-api-access-x7dn5". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 16:53:01 crc kubenswrapper[4799]: I1010 16:53:01.080511 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/54b8af05-cb42-4c0b-85af-e1cd04c3f5af-kube-api-access-fm6xm" (OuterVolumeSpecName: "kube-api-access-fm6xm") pod "54b8af05-cb42-4c0b-85af-e1cd04c3f5af" (UID: "54b8af05-cb42-4c0b-85af-e1cd04c3f5af"). InnerVolumeSpecName "kube-api-access-fm6xm". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 16:53:01 crc kubenswrapper[4799]: I1010 16:53:01.100310 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e98f1eab-bf7e-4d86-ab62-5294603982ae-config-data" (OuterVolumeSpecName: "config-data") pod "e98f1eab-bf7e-4d86-ab62-5294603982ae" (UID: "e98f1eab-bf7e-4d86-ab62-5294603982ae"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:53:01 crc kubenswrapper[4799]: I1010 16:53:01.107191 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/54b8af05-cb42-4c0b-85af-e1cd04c3f5af-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "54b8af05-cb42-4c0b-85af-e1cd04c3f5af" (UID: "54b8af05-cb42-4c0b-85af-e1cd04c3f5af"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:53:01 crc kubenswrapper[4799]: I1010 16:53:01.109496 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/54b8af05-cb42-4c0b-85af-e1cd04c3f5af-config-data" (OuterVolumeSpecName: "config-data") pod "54b8af05-cb42-4c0b-85af-e1cd04c3f5af" (UID: "54b8af05-cb42-4c0b-85af-e1cd04c3f5af"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:53:01 crc kubenswrapper[4799]: I1010 16:53:01.113674 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e98f1eab-bf7e-4d86-ab62-5294603982ae-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "e98f1eab-bf7e-4d86-ab62-5294603982ae" (UID: "e98f1eab-bf7e-4d86-ab62-5294603982ae"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:53:01 crc kubenswrapper[4799]: I1010 16:53:01.178364 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fm6xm\" (UniqueName: \"kubernetes.io/projected/54b8af05-cb42-4c0b-85af-e1cd04c3f5af-kube-api-access-fm6xm\") on node \"crc\" DevicePath \"\"" Oct 10 16:53:01 crc kubenswrapper[4799]: I1010 16:53:01.178411 4799 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/54b8af05-cb42-4c0b-85af-e1cd04c3f5af-config-data\") on node \"crc\" DevicePath \"\"" Oct 10 16:53:01 crc kubenswrapper[4799]: I1010 16:53:01.178431 4799 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e98f1eab-bf7e-4d86-ab62-5294603982ae-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 10 16:53:01 crc kubenswrapper[4799]: I1010 16:53:01.178447 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x7dn5\" (UniqueName: \"kubernetes.io/projected/e98f1eab-bf7e-4d86-ab62-5294603982ae-kube-api-access-x7dn5\") on node \"crc\" DevicePath \"\"" Oct 10 16:53:01 crc kubenswrapper[4799]: I1010 16:53:01.178464 4799 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e98f1eab-bf7e-4d86-ab62-5294603982ae-config-data\") on node \"crc\" DevicePath \"\"" Oct 10 16:53:01 crc kubenswrapper[4799]: I1010 16:53:01.178480 4799 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/54b8af05-cb42-4c0b-85af-e1cd04c3f5af-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 10 16:53:01 crc kubenswrapper[4799]: I1010 16:53:01.278355 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Oct 10 16:53:01 crc kubenswrapper[4799]: I1010 16:53:01.325098 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Oct 10 16:53:01 crc kubenswrapper[4799]: I1010 16:53:01.354370 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Oct 10 16:53:01 crc kubenswrapper[4799]: E1010 16:53:01.354872 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e98f1eab-bf7e-4d86-ab62-5294603982ae" containerName="nova-metadata-log" Oct 10 16:53:01 crc kubenswrapper[4799]: I1010 16:53:01.354892 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="e98f1eab-bf7e-4d86-ab62-5294603982ae" containerName="nova-metadata-log" Oct 10 16:53:01 crc kubenswrapper[4799]: E1010 16:53:01.354912 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e98f1eab-bf7e-4d86-ab62-5294603982ae" containerName="nova-metadata-metadata" Oct 10 16:53:01 crc kubenswrapper[4799]: I1010 16:53:01.354921 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="e98f1eab-bf7e-4d86-ab62-5294603982ae" containerName="nova-metadata-metadata" Oct 10 16:53:01 crc kubenswrapper[4799]: E1010 16:53:01.354946 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="54b8af05-cb42-4c0b-85af-e1cd04c3f5af" containerName="nova-cell1-novncproxy-novncproxy" Oct 10 16:53:01 crc kubenswrapper[4799]: I1010 16:53:01.354958 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="54b8af05-cb42-4c0b-85af-e1cd04c3f5af" containerName="nova-cell1-novncproxy-novncproxy" Oct 10 16:53:01 crc kubenswrapper[4799]: I1010 16:53:01.355189 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="e98f1eab-bf7e-4d86-ab62-5294603982ae" containerName="nova-metadata-log" Oct 10 16:53:01 crc kubenswrapper[4799]: I1010 16:53:01.355208 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="e98f1eab-bf7e-4d86-ab62-5294603982ae" containerName="nova-metadata-metadata" Oct 10 16:53:01 crc kubenswrapper[4799]: I1010 16:53:01.355217 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="54b8af05-cb42-4c0b-85af-e1cd04c3f5af" containerName="nova-cell1-novncproxy-novncproxy" Oct 10 16:53:01 crc kubenswrapper[4799]: I1010 16:53:01.356163 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Oct 10 16:53:01 crc kubenswrapper[4799]: I1010 16:53:01.363865 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-novncproxy-config-data" Oct 10 16:53:01 crc kubenswrapper[4799]: I1010 16:53:01.364153 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-novncproxy-cell1-vencrypt" Oct 10 16:53:01 crc kubenswrapper[4799]: I1010 16:53:01.364334 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-novncproxy-cell1-public-svc" Oct 10 16:53:01 crc kubenswrapper[4799]: I1010 16:53:01.391816 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Oct 10 16:53:01 crc kubenswrapper[4799]: I1010 16:53:01.415213 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="54b8af05-cb42-4c0b-85af-e1cd04c3f5af" path="/var/lib/kubelet/pods/54b8af05-cb42-4c0b-85af-e1cd04c3f5af/volumes" Oct 10 16:53:01 crc kubenswrapper[4799]: I1010 16:53:01.416057 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Oct 10 16:53:01 crc kubenswrapper[4799]: I1010 16:53:01.416097 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Oct 10 16:53:01 crc kubenswrapper[4799]: I1010 16:53:01.423107 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Oct 10 16:53:01 crc kubenswrapper[4799]: I1010 16:53:01.425369 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 10 16:53:01 crc kubenswrapper[4799]: I1010 16:53:01.427302 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Oct 10 16:53:01 crc kubenswrapper[4799]: I1010 16:53:01.428649 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Oct 10 16:53:01 crc kubenswrapper[4799]: I1010 16:53:01.441183 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Oct 10 16:53:01 crc kubenswrapper[4799]: I1010 16:53:01.484387 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zlx6g\" (UniqueName: \"kubernetes.io/projected/785f8ce9-5280-44fe-891c-8162f2fdcd7a-kube-api-access-zlx6g\") pod \"nova-cell1-novncproxy-0\" (UID: \"785f8ce9-5280-44fe-891c-8162f2fdcd7a\") " pod="openstack/nova-cell1-novncproxy-0" Oct 10 16:53:01 crc kubenswrapper[4799]: I1010 16:53:01.486314 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/785f8ce9-5280-44fe-891c-8162f2fdcd7a-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"785f8ce9-5280-44fe-891c-8162f2fdcd7a\") " pod="openstack/nova-cell1-novncproxy-0" Oct 10 16:53:01 crc kubenswrapper[4799]: I1010 16:53:01.486453 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/785f8ce9-5280-44fe-891c-8162f2fdcd7a-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"785f8ce9-5280-44fe-891c-8162f2fdcd7a\") " pod="openstack/nova-cell1-novncproxy-0" Oct 10 16:53:01 crc kubenswrapper[4799]: I1010 16:53:01.486536 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/785f8ce9-5280-44fe-891c-8162f2fdcd7a-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"785f8ce9-5280-44fe-891c-8162f2fdcd7a\") " pod="openstack/nova-cell1-novncproxy-0" Oct 10 16:53:01 crc kubenswrapper[4799]: I1010 16:53:01.486601 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/785f8ce9-5280-44fe-891c-8162f2fdcd7a-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"785f8ce9-5280-44fe-891c-8162f2fdcd7a\") " pod="openstack/nova-cell1-novncproxy-0" Oct 10 16:53:01 crc kubenswrapper[4799]: I1010 16:53:01.588517 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zlx6g\" (UniqueName: \"kubernetes.io/projected/785f8ce9-5280-44fe-891c-8162f2fdcd7a-kube-api-access-zlx6g\") pod \"nova-cell1-novncproxy-0\" (UID: \"785f8ce9-5280-44fe-891c-8162f2fdcd7a\") " pod="openstack/nova-cell1-novncproxy-0" Oct 10 16:53:01 crc kubenswrapper[4799]: I1010 16:53:01.588610 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7r6sp\" (UniqueName: \"kubernetes.io/projected/dc550ba1-d76f-494e-b725-337877360fa7-kube-api-access-7r6sp\") pod \"nova-metadata-0\" (UID: \"dc550ba1-d76f-494e-b725-337877360fa7\") " pod="openstack/nova-metadata-0" Oct 10 16:53:01 crc kubenswrapper[4799]: I1010 16:53:01.588741 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/785f8ce9-5280-44fe-891c-8162f2fdcd7a-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"785f8ce9-5280-44fe-891c-8162f2fdcd7a\") " pod="openstack/nova-cell1-novncproxy-0" Oct 10 16:53:01 crc kubenswrapper[4799]: I1010 16:53:01.588791 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/785f8ce9-5280-44fe-891c-8162f2fdcd7a-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"785f8ce9-5280-44fe-891c-8162f2fdcd7a\") " pod="openstack/nova-cell1-novncproxy-0" Oct 10 16:53:01 crc kubenswrapper[4799]: I1010 16:53:01.588818 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/785f8ce9-5280-44fe-891c-8162f2fdcd7a-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"785f8ce9-5280-44fe-891c-8162f2fdcd7a\") " pod="openstack/nova-cell1-novncproxy-0" Oct 10 16:53:01 crc kubenswrapper[4799]: I1010 16:53:01.588841 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dc550ba1-d76f-494e-b725-337877360fa7-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"dc550ba1-d76f-494e-b725-337877360fa7\") " pod="openstack/nova-metadata-0" Oct 10 16:53:01 crc kubenswrapper[4799]: I1010 16:53:01.588859 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/785f8ce9-5280-44fe-891c-8162f2fdcd7a-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"785f8ce9-5280-44fe-891c-8162f2fdcd7a\") " pod="openstack/nova-cell1-novncproxy-0" Oct 10 16:53:01 crc kubenswrapper[4799]: I1010 16:53:01.588889 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/dc550ba1-d76f-494e-b725-337877360fa7-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"dc550ba1-d76f-494e-b725-337877360fa7\") " pod="openstack/nova-metadata-0" Oct 10 16:53:01 crc kubenswrapper[4799]: I1010 16:53:01.589468 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/dc550ba1-d76f-494e-b725-337877360fa7-logs\") pod \"nova-metadata-0\" (UID: \"dc550ba1-d76f-494e-b725-337877360fa7\") " pod="openstack/nova-metadata-0" Oct 10 16:53:01 crc kubenswrapper[4799]: I1010 16:53:01.589507 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dc550ba1-d76f-494e-b725-337877360fa7-config-data\") pod \"nova-metadata-0\" (UID: \"dc550ba1-d76f-494e-b725-337877360fa7\") " pod="openstack/nova-metadata-0" Oct 10 16:53:01 crc kubenswrapper[4799]: I1010 16:53:01.594267 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/785f8ce9-5280-44fe-891c-8162f2fdcd7a-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"785f8ce9-5280-44fe-891c-8162f2fdcd7a\") " pod="openstack/nova-cell1-novncproxy-0" Oct 10 16:53:01 crc kubenswrapper[4799]: I1010 16:53:01.594481 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/785f8ce9-5280-44fe-891c-8162f2fdcd7a-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"785f8ce9-5280-44fe-891c-8162f2fdcd7a\") " pod="openstack/nova-cell1-novncproxy-0" Oct 10 16:53:01 crc kubenswrapper[4799]: I1010 16:53:01.596707 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/785f8ce9-5280-44fe-891c-8162f2fdcd7a-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"785f8ce9-5280-44fe-891c-8162f2fdcd7a\") " pod="openstack/nova-cell1-novncproxy-0" Oct 10 16:53:01 crc kubenswrapper[4799]: I1010 16:53:01.598152 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/785f8ce9-5280-44fe-891c-8162f2fdcd7a-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"785f8ce9-5280-44fe-891c-8162f2fdcd7a\") " pod="openstack/nova-cell1-novncproxy-0" Oct 10 16:53:01 crc kubenswrapper[4799]: I1010 16:53:01.617909 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zlx6g\" (UniqueName: \"kubernetes.io/projected/785f8ce9-5280-44fe-891c-8162f2fdcd7a-kube-api-access-zlx6g\") pod \"nova-cell1-novncproxy-0\" (UID: \"785f8ce9-5280-44fe-891c-8162f2fdcd7a\") " pod="openstack/nova-cell1-novncproxy-0" Oct 10 16:53:01 crc kubenswrapper[4799]: I1010 16:53:01.675438 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Oct 10 16:53:01 crc kubenswrapper[4799]: I1010 16:53:01.690317 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7r6sp\" (UniqueName: \"kubernetes.io/projected/dc550ba1-d76f-494e-b725-337877360fa7-kube-api-access-7r6sp\") pod \"nova-metadata-0\" (UID: \"dc550ba1-d76f-494e-b725-337877360fa7\") " pod="openstack/nova-metadata-0" Oct 10 16:53:01 crc kubenswrapper[4799]: I1010 16:53:01.690460 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dc550ba1-d76f-494e-b725-337877360fa7-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"dc550ba1-d76f-494e-b725-337877360fa7\") " pod="openstack/nova-metadata-0" Oct 10 16:53:01 crc kubenswrapper[4799]: I1010 16:53:01.690502 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/dc550ba1-d76f-494e-b725-337877360fa7-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"dc550ba1-d76f-494e-b725-337877360fa7\") " pod="openstack/nova-metadata-0" Oct 10 16:53:01 crc kubenswrapper[4799]: I1010 16:53:01.690538 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/dc550ba1-d76f-494e-b725-337877360fa7-logs\") pod \"nova-metadata-0\" (UID: \"dc550ba1-d76f-494e-b725-337877360fa7\") " pod="openstack/nova-metadata-0" Oct 10 16:53:01 crc kubenswrapper[4799]: I1010 16:53:01.690579 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dc550ba1-d76f-494e-b725-337877360fa7-config-data\") pod \"nova-metadata-0\" (UID: \"dc550ba1-d76f-494e-b725-337877360fa7\") " pod="openstack/nova-metadata-0" Oct 10 16:53:01 crc kubenswrapper[4799]: I1010 16:53:01.692092 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/dc550ba1-d76f-494e-b725-337877360fa7-logs\") pod \"nova-metadata-0\" (UID: \"dc550ba1-d76f-494e-b725-337877360fa7\") " pod="openstack/nova-metadata-0" Oct 10 16:53:01 crc kubenswrapper[4799]: I1010 16:53:01.694260 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dc550ba1-d76f-494e-b725-337877360fa7-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"dc550ba1-d76f-494e-b725-337877360fa7\") " pod="openstack/nova-metadata-0" Oct 10 16:53:01 crc kubenswrapper[4799]: I1010 16:53:01.695818 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/dc550ba1-d76f-494e-b725-337877360fa7-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"dc550ba1-d76f-494e-b725-337877360fa7\") " pod="openstack/nova-metadata-0" Oct 10 16:53:01 crc kubenswrapper[4799]: I1010 16:53:01.701470 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dc550ba1-d76f-494e-b725-337877360fa7-config-data\") pod \"nova-metadata-0\" (UID: \"dc550ba1-d76f-494e-b725-337877360fa7\") " pod="openstack/nova-metadata-0" Oct 10 16:53:01 crc kubenswrapper[4799]: I1010 16:53:01.710169 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7r6sp\" (UniqueName: \"kubernetes.io/projected/dc550ba1-d76f-494e-b725-337877360fa7-kube-api-access-7r6sp\") pod \"nova-metadata-0\" (UID: \"dc550ba1-d76f-494e-b725-337877360fa7\") " pod="openstack/nova-metadata-0" Oct 10 16:53:01 crc kubenswrapper[4799]: I1010 16:53:01.746457 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 10 16:53:02 crc kubenswrapper[4799]: W1010 16:53:02.157741 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod785f8ce9_5280_44fe_891c_8162f2fdcd7a.slice/crio-aed2b40ab19d669e9d5375e3eca065a85cc8e7869b61fca099967cea718e2b8a WatchSource:0}: Error finding container aed2b40ab19d669e9d5375e3eca065a85cc8e7869b61fca099967cea718e2b8a: Status 404 returned error can't find the container with id aed2b40ab19d669e9d5375e3eca065a85cc8e7869b61fca099967cea718e2b8a Oct 10 16:53:02 crc kubenswrapper[4799]: I1010 16:53:02.159844 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Oct 10 16:53:02 crc kubenswrapper[4799]: W1010 16:53:02.282938 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poddc550ba1_d76f_494e_b725_337877360fa7.slice/crio-75b5a84abfadf06b61f75623f1b303c1e58267647afaa101b6f645ff95c9333b WatchSource:0}: Error finding container 75b5a84abfadf06b61f75623f1b303c1e58267647afaa101b6f645ff95c9333b: Status 404 returned error can't find the container with id 75b5a84abfadf06b61f75623f1b303c1e58267647afaa101b6f645ff95c9333b Oct 10 16:53:02 crc kubenswrapper[4799]: I1010 16:53:02.290610 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Oct 10 16:53:02 crc kubenswrapper[4799]: I1010 16:53:02.997256 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"785f8ce9-5280-44fe-891c-8162f2fdcd7a","Type":"ContainerStarted","Data":"e41896910a44ed236459449a6ff81d15407ef8ae9a64ddb6a35d2d9e2aa86ba0"} Oct 10 16:53:02 crc kubenswrapper[4799]: I1010 16:53:02.997841 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"785f8ce9-5280-44fe-891c-8162f2fdcd7a","Type":"ContainerStarted","Data":"aed2b40ab19d669e9d5375e3eca065a85cc8e7869b61fca099967cea718e2b8a"} Oct 10 16:53:03 crc kubenswrapper[4799]: I1010 16:53:03.000551 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"dc550ba1-d76f-494e-b725-337877360fa7","Type":"ContainerStarted","Data":"709ca06e7bc32ac924ab6a7082972570df73a5b5a092a31b57885d573ca50d30"} Oct 10 16:53:03 crc kubenswrapper[4799]: I1010 16:53:03.000687 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"dc550ba1-d76f-494e-b725-337877360fa7","Type":"ContainerStarted","Data":"2ed4d65b4875269def7a21a90dee04ee82b85598fc7409dabceea69f593953d6"} Oct 10 16:53:03 crc kubenswrapper[4799]: I1010 16:53:03.000815 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"dc550ba1-d76f-494e-b725-337877360fa7","Type":"ContainerStarted","Data":"75b5a84abfadf06b61f75623f1b303c1e58267647afaa101b6f645ff95c9333b"} Oct 10 16:53:03 crc kubenswrapper[4799]: I1010 16:53:03.020487 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-novncproxy-0" podStartSLOduration=2.020472073 podStartE2EDuration="2.020472073s" podCreationTimestamp="2025-10-10 16:53:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-10 16:53:03.018634918 +0000 UTC m=+1276.526959033" watchObservedRunningTime="2025-10-10 16:53:03.020472073 +0000 UTC m=+1276.528796198" Oct 10 16:53:03 crc kubenswrapper[4799]: I1010 16:53:03.050501 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.050474952 podStartE2EDuration="2.050474952s" podCreationTimestamp="2025-10-10 16:53:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-10 16:53:03.043963991 +0000 UTC m=+1276.552288186" watchObservedRunningTime="2025-10-10 16:53:03.050474952 +0000 UTC m=+1276.558799087" Oct 10 16:53:03 crc kubenswrapper[4799]: I1010 16:53:03.072047 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Oct 10 16:53:03 crc kubenswrapper[4799]: I1010 16:53:03.072143 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Oct 10 16:53:03 crc kubenswrapper[4799]: I1010 16:53:03.072484 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Oct 10 16:53:03 crc kubenswrapper[4799]: I1010 16:53:03.072516 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Oct 10 16:53:03 crc kubenswrapper[4799]: I1010 16:53:03.077159 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Oct 10 16:53:03 crc kubenswrapper[4799]: I1010 16:53:03.078361 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Oct 10 16:53:03 crc kubenswrapper[4799]: I1010 16:53:03.264591 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-64986d45b9-khcqw"] Oct 10 16:53:03 crc kubenswrapper[4799]: I1010 16:53:03.267032 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-64986d45b9-khcqw" Oct 10 16:53:03 crc kubenswrapper[4799]: I1010 16:53:03.305725 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-64986d45b9-khcqw"] Oct 10 16:53:03 crc kubenswrapper[4799]: I1010 16:53:03.417324 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e98f1eab-bf7e-4d86-ab62-5294603982ae" path="/var/lib/kubelet/pods/e98f1eab-bf7e-4d86-ab62-5294603982ae/volumes" Oct 10 16:53:03 crc kubenswrapper[4799]: I1010 16:53:03.440479 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/361ecbc5-676b-42af-9eb3-fb761f842265-dns-svc\") pod \"dnsmasq-dns-64986d45b9-khcqw\" (UID: \"361ecbc5-676b-42af-9eb3-fb761f842265\") " pod="openstack/dnsmasq-dns-64986d45b9-khcqw" Oct 10 16:53:03 crc kubenswrapper[4799]: I1010 16:53:03.440554 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/361ecbc5-676b-42af-9eb3-fb761f842265-ovsdbserver-sb\") pod \"dnsmasq-dns-64986d45b9-khcqw\" (UID: \"361ecbc5-676b-42af-9eb3-fb761f842265\") " pod="openstack/dnsmasq-dns-64986d45b9-khcqw" Oct 10 16:53:03 crc kubenswrapper[4799]: I1010 16:53:03.440623 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/361ecbc5-676b-42af-9eb3-fb761f842265-ovsdbserver-nb\") pod \"dnsmasq-dns-64986d45b9-khcqw\" (UID: \"361ecbc5-676b-42af-9eb3-fb761f842265\") " pod="openstack/dnsmasq-dns-64986d45b9-khcqw" Oct 10 16:53:03 crc kubenswrapper[4799]: I1010 16:53:03.440695 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/361ecbc5-676b-42af-9eb3-fb761f842265-config\") pod \"dnsmasq-dns-64986d45b9-khcqw\" (UID: \"361ecbc5-676b-42af-9eb3-fb761f842265\") " pod="openstack/dnsmasq-dns-64986d45b9-khcqw" Oct 10 16:53:03 crc kubenswrapper[4799]: I1010 16:53:03.440831 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b2qsx\" (UniqueName: \"kubernetes.io/projected/361ecbc5-676b-42af-9eb3-fb761f842265-kube-api-access-b2qsx\") pod \"dnsmasq-dns-64986d45b9-khcqw\" (UID: \"361ecbc5-676b-42af-9eb3-fb761f842265\") " pod="openstack/dnsmasq-dns-64986d45b9-khcqw" Oct 10 16:53:03 crc kubenswrapper[4799]: I1010 16:53:03.440889 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/361ecbc5-676b-42af-9eb3-fb761f842265-dns-swift-storage-0\") pod \"dnsmasq-dns-64986d45b9-khcqw\" (UID: \"361ecbc5-676b-42af-9eb3-fb761f842265\") " pod="openstack/dnsmasq-dns-64986d45b9-khcqw" Oct 10 16:53:03 crc kubenswrapper[4799]: I1010 16:53:03.544069 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/361ecbc5-676b-42af-9eb3-fb761f842265-dns-svc\") pod \"dnsmasq-dns-64986d45b9-khcqw\" (UID: \"361ecbc5-676b-42af-9eb3-fb761f842265\") " pod="openstack/dnsmasq-dns-64986d45b9-khcqw" Oct 10 16:53:03 crc kubenswrapper[4799]: I1010 16:53:03.544126 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/361ecbc5-676b-42af-9eb3-fb761f842265-ovsdbserver-sb\") pod \"dnsmasq-dns-64986d45b9-khcqw\" (UID: \"361ecbc5-676b-42af-9eb3-fb761f842265\") " pod="openstack/dnsmasq-dns-64986d45b9-khcqw" Oct 10 16:53:03 crc kubenswrapper[4799]: I1010 16:53:03.544179 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/361ecbc5-676b-42af-9eb3-fb761f842265-ovsdbserver-nb\") pod \"dnsmasq-dns-64986d45b9-khcqw\" (UID: \"361ecbc5-676b-42af-9eb3-fb761f842265\") " pod="openstack/dnsmasq-dns-64986d45b9-khcqw" Oct 10 16:53:03 crc kubenswrapper[4799]: I1010 16:53:03.544216 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/361ecbc5-676b-42af-9eb3-fb761f842265-config\") pod \"dnsmasq-dns-64986d45b9-khcqw\" (UID: \"361ecbc5-676b-42af-9eb3-fb761f842265\") " pod="openstack/dnsmasq-dns-64986d45b9-khcqw" Oct 10 16:53:03 crc kubenswrapper[4799]: I1010 16:53:03.544270 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b2qsx\" (UniqueName: \"kubernetes.io/projected/361ecbc5-676b-42af-9eb3-fb761f842265-kube-api-access-b2qsx\") pod \"dnsmasq-dns-64986d45b9-khcqw\" (UID: \"361ecbc5-676b-42af-9eb3-fb761f842265\") " pod="openstack/dnsmasq-dns-64986d45b9-khcqw" Oct 10 16:53:03 crc kubenswrapper[4799]: I1010 16:53:03.544304 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/361ecbc5-676b-42af-9eb3-fb761f842265-dns-swift-storage-0\") pod \"dnsmasq-dns-64986d45b9-khcqw\" (UID: \"361ecbc5-676b-42af-9eb3-fb761f842265\") " pod="openstack/dnsmasq-dns-64986d45b9-khcqw" Oct 10 16:53:03 crc kubenswrapper[4799]: I1010 16:53:03.547617 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/361ecbc5-676b-42af-9eb3-fb761f842265-ovsdbserver-nb\") pod \"dnsmasq-dns-64986d45b9-khcqw\" (UID: \"361ecbc5-676b-42af-9eb3-fb761f842265\") " pod="openstack/dnsmasq-dns-64986d45b9-khcqw" Oct 10 16:53:03 crc kubenswrapper[4799]: I1010 16:53:03.548094 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/361ecbc5-676b-42af-9eb3-fb761f842265-ovsdbserver-sb\") pod \"dnsmasq-dns-64986d45b9-khcqw\" (UID: \"361ecbc5-676b-42af-9eb3-fb761f842265\") " pod="openstack/dnsmasq-dns-64986d45b9-khcqw" Oct 10 16:53:03 crc kubenswrapper[4799]: I1010 16:53:03.548895 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/361ecbc5-676b-42af-9eb3-fb761f842265-dns-swift-storage-0\") pod \"dnsmasq-dns-64986d45b9-khcqw\" (UID: \"361ecbc5-676b-42af-9eb3-fb761f842265\") " pod="openstack/dnsmasq-dns-64986d45b9-khcqw" Oct 10 16:53:03 crc kubenswrapper[4799]: I1010 16:53:03.548991 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/361ecbc5-676b-42af-9eb3-fb761f842265-config\") pod \"dnsmasq-dns-64986d45b9-khcqw\" (UID: \"361ecbc5-676b-42af-9eb3-fb761f842265\") " pod="openstack/dnsmasq-dns-64986d45b9-khcqw" Oct 10 16:53:03 crc kubenswrapper[4799]: I1010 16:53:03.549293 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/361ecbc5-676b-42af-9eb3-fb761f842265-dns-svc\") pod \"dnsmasq-dns-64986d45b9-khcqw\" (UID: \"361ecbc5-676b-42af-9eb3-fb761f842265\") " pod="openstack/dnsmasq-dns-64986d45b9-khcqw" Oct 10 16:53:03 crc kubenswrapper[4799]: I1010 16:53:03.568785 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b2qsx\" (UniqueName: \"kubernetes.io/projected/361ecbc5-676b-42af-9eb3-fb761f842265-kube-api-access-b2qsx\") pod \"dnsmasq-dns-64986d45b9-khcqw\" (UID: \"361ecbc5-676b-42af-9eb3-fb761f842265\") " pod="openstack/dnsmasq-dns-64986d45b9-khcqw" Oct 10 16:53:03 crc kubenswrapper[4799]: I1010 16:53:03.592238 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-64986d45b9-khcqw" Oct 10 16:53:04 crc kubenswrapper[4799]: I1010 16:53:04.075807 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-64986d45b9-khcqw"] Oct 10 16:53:05 crc kubenswrapper[4799]: I1010 16:53:05.022563 4799 generic.go:334] "Generic (PLEG): container finished" podID="361ecbc5-676b-42af-9eb3-fb761f842265" containerID="2a76b9a7b4fe69e531aad8d796bff209d24a3b141da26b70b841b3b26b793ad9" exitCode=0 Oct 10 16:53:05 crc kubenswrapper[4799]: I1010 16:53:05.024153 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-64986d45b9-khcqw" event={"ID":"361ecbc5-676b-42af-9eb3-fb761f842265","Type":"ContainerDied","Data":"2a76b9a7b4fe69e531aad8d796bff209d24a3b141da26b70b841b3b26b793ad9"} Oct 10 16:53:05 crc kubenswrapper[4799]: I1010 16:53:05.024197 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-64986d45b9-khcqw" event={"ID":"361ecbc5-676b-42af-9eb3-fb761f842265","Type":"ContainerStarted","Data":"886979bf64ce0ec140fc36d3a375aff98b6c302ad49a10258df82e63682e6dc8"} Oct 10 16:53:05 crc kubenswrapper[4799]: I1010 16:53:05.046915 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 10 16:53:05 crc kubenswrapper[4799]: I1010 16:53:05.047177 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="83d0f03e-1c6a-4559-940c-312be653c7c3" containerName="ceilometer-central-agent" containerID="cri-o://225460eb0d91d90328909fe4e27056c7632675d24481ae318706175ae217a850" gracePeriod=30 Oct 10 16:53:05 crc kubenswrapper[4799]: I1010 16:53:05.047716 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="83d0f03e-1c6a-4559-940c-312be653c7c3" containerName="proxy-httpd" containerID="cri-o://853ab9f6c0dab10928372d18487047719af14f07aeaaba927bd1569a2b81c7bd" gracePeriod=30 Oct 10 16:53:05 crc kubenswrapper[4799]: I1010 16:53:05.049088 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="83d0f03e-1c6a-4559-940c-312be653c7c3" containerName="sg-core" containerID="cri-o://9b5f6a40528602c70f049ed8e70815b14bf3cfc52939bf60939bb675f80adcd0" gracePeriod=30 Oct 10 16:53:05 crc kubenswrapper[4799]: I1010 16:53:05.049256 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="83d0f03e-1c6a-4559-940c-312be653c7c3" containerName="ceilometer-notification-agent" containerID="cri-o://d0e485d6bf2ba97948b186f7d0f4cb2887563e953b6f33f2edcd98f85503f73a" gracePeriod=30 Oct 10 16:53:05 crc kubenswrapper[4799]: I1010 16:53:05.079667 4799 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ceilometer-0" podUID="83d0f03e-1c6a-4559-940c-312be653c7c3" containerName="proxy-httpd" probeResult="failure" output="HTTP probe failed with statuscode: 502" Oct 10 16:53:06 crc kubenswrapper[4799]: I1010 16:53:06.034322 4799 generic.go:334] "Generic (PLEG): container finished" podID="83d0f03e-1c6a-4559-940c-312be653c7c3" containerID="853ab9f6c0dab10928372d18487047719af14f07aeaaba927bd1569a2b81c7bd" exitCode=0 Oct 10 16:53:06 crc kubenswrapper[4799]: I1010 16:53:06.034857 4799 generic.go:334] "Generic (PLEG): container finished" podID="83d0f03e-1c6a-4559-940c-312be653c7c3" containerID="9b5f6a40528602c70f049ed8e70815b14bf3cfc52939bf60939bb675f80adcd0" exitCode=2 Oct 10 16:53:06 crc kubenswrapper[4799]: I1010 16:53:06.034420 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"83d0f03e-1c6a-4559-940c-312be653c7c3","Type":"ContainerDied","Data":"853ab9f6c0dab10928372d18487047719af14f07aeaaba927bd1569a2b81c7bd"} Oct 10 16:53:06 crc kubenswrapper[4799]: I1010 16:53:06.034904 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"83d0f03e-1c6a-4559-940c-312be653c7c3","Type":"ContainerDied","Data":"9b5f6a40528602c70f049ed8e70815b14bf3cfc52939bf60939bb675f80adcd0"} Oct 10 16:53:06 crc kubenswrapper[4799]: I1010 16:53:06.034918 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"83d0f03e-1c6a-4559-940c-312be653c7c3","Type":"ContainerDied","Data":"225460eb0d91d90328909fe4e27056c7632675d24481ae318706175ae217a850"} Oct 10 16:53:06 crc kubenswrapper[4799]: I1010 16:53:06.034869 4799 generic.go:334] "Generic (PLEG): container finished" podID="83d0f03e-1c6a-4559-940c-312be653c7c3" containerID="225460eb0d91d90328909fe4e27056c7632675d24481ae318706175ae217a850" exitCode=0 Oct 10 16:53:06 crc kubenswrapper[4799]: I1010 16:53:06.036881 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-64986d45b9-khcqw" event={"ID":"361ecbc5-676b-42af-9eb3-fb761f842265","Type":"ContainerStarted","Data":"7e43def494bc7bc9587325e681735d75ef482a0d36aa17642f1a5fcfdc38318f"} Oct 10 16:53:06 crc kubenswrapper[4799]: I1010 16:53:06.037028 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-64986d45b9-khcqw" Oct 10 16:53:06 crc kubenswrapper[4799]: I1010 16:53:06.058732 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-64986d45b9-khcqw" podStartSLOduration=3.058715684 podStartE2EDuration="3.058715684s" podCreationTimestamp="2025-10-10 16:53:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-10 16:53:06.056036908 +0000 UTC m=+1279.564361023" watchObservedRunningTime="2025-10-10 16:53:06.058715684 +0000 UTC m=+1279.567039789" Oct 10 16:53:06 crc kubenswrapper[4799]: I1010 16:53:06.405449 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Oct 10 16:53:06 crc kubenswrapper[4799]: I1010 16:53:06.405712 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="2c70468e-4a71-405f-9171-9246f62aec11" containerName="nova-api-log" containerID="cri-o://566a37796a21dfea1dedc2df7d9a7b47ffce31cd1bf00e70fbadeaea806938ca" gracePeriod=30 Oct 10 16:53:06 crc kubenswrapper[4799]: I1010 16:53:06.405739 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="2c70468e-4a71-405f-9171-9246f62aec11" containerName="nova-api-api" containerID="cri-o://c3b98dfa47145b9b453ce0521311875ad5f48cae96b9f8f78a53d856bf09adce" gracePeriod=30 Oct 10 16:53:06 crc kubenswrapper[4799]: I1010 16:53:06.677060 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-novncproxy-0" Oct 10 16:53:06 crc kubenswrapper[4799]: I1010 16:53:06.746749 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Oct 10 16:53:06 crc kubenswrapper[4799]: I1010 16:53:06.747081 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Oct 10 16:53:07 crc kubenswrapper[4799]: I1010 16:53:07.056466 4799 generic.go:334] "Generic (PLEG): container finished" podID="83d0f03e-1c6a-4559-940c-312be653c7c3" containerID="d0e485d6bf2ba97948b186f7d0f4cb2887563e953b6f33f2edcd98f85503f73a" exitCode=0 Oct 10 16:53:07 crc kubenswrapper[4799]: I1010 16:53:07.056915 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"83d0f03e-1c6a-4559-940c-312be653c7c3","Type":"ContainerDied","Data":"d0e485d6bf2ba97948b186f7d0f4cb2887563e953b6f33f2edcd98f85503f73a"} Oct 10 16:53:07 crc kubenswrapper[4799]: I1010 16:53:07.062583 4799 generic.go:334] "Generic (PLEG): container finished" podID="2c70468e-4a71-405f-9171-9246f62aec11" containerID="566a37796a21dfea1dedc2df7d9a7b47ffce31cd1bf00e70fbadeaea806938ca" exitCode=143 Oct 10 16:53:07 crc kubenswrapper[4799]: I1010 16:53:07.063921 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"2c70468e-4a71-405f-9171-9246f62aec11","Type":"ContainerDied","Data":"566a37796a21dfea1dedc2df7d9a7b47ffce31cd1bf00e70fbadeaea806938ca"} Oct 10 16:53:07 crc kubenswrapper[4799]: I1010 16:53:07.469071 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 10 16:53:07 crc kubenswrapper[4799]: I1010 16:53:07.633539 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/83d0f03e-1c6a-4559-940c-312be653c7c3-log-httpd\") pod \"83d0f03e-1c6a-4559-940c-312be653c7c3\" (UID: \"83d0f03e-1c6a-4559-940c-312be653c7c3\") " Oct 10 16:53:07 crc kubenswrapper[4799]: I1010 16:53:07.633595 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/83d0f03e-1c6a-4559-940c-312be653c7c3-scripts\") pod \"83d0f03e-1c6a-4559-940c-312be653c7c3\" (UID: \"83d0f03e-1c6a-4559-940c-312be653c7c3\") " Oct 10 16:53:07 crc kubenswrapper[4799]: I1010 16:53:07.633673 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/83d0f03e-1c6a-4559-940c-312be653c7c3-combined-ca-bundle\") pod \"83d0f03e-1c6a-4559-940c-312be653c7c3\" (UID: \"83d0f03e-1c6a-4559-940c-312be653c7c3\") " Oct 10 16:53:07 crc kubenswrapper[4799]: I1010 16:53:07.633817 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-flfxw\" (UniqueName: \"kubernetes.io/projected/83d0f03e-1c6a-4559-940c-312be653c7c3-kube-api-access-flfxw\") pod \"83d0f03e-1c6a-4559-940c-312be653c7c3\" (UID: \"83d0f03e-1c6a-4559-940c-312be653c7c3\") " Oct 10 16:53:07 crc kubenswrapper[4799]: I1010 16:53:07.633900 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/83d0f03e-1c6a-4559-940c-312be653c7c3-sg-core-conf-yaml\") pod \"83d0f03e-1c6a-4559-940c-312be653c7c3\" (UID: \"83d0f03e-1c6a-4559-940c-312be653c7c3\") " Oct 10 16:53:07 crc kubenswrapper[4799]: I1010 16:53:07.633929 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/83d0f03e-1c6a-4559-940c-312be653c7c3-run-httpd\") pod \"83d0f03e-1c6a-4559-940c-312be653c7c3\" (UID: \"83d0f03e-1c6a-4559-940c-312be653c7c3\") " Oct 10 16:53:07 crc kubenswrapper[4799]: I1010 16:53:07.633962 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/83d0f03e-1c6a-4559-940c-312be653c7c3-config-data\") pod \"83d0f03e-1c6a-4559-940c-312be653c7c3\" (UID: \"83d0f03e-1c6a-4559-940c-312be653c7c3\") " Oct 10 16:53:07 crc kubenswrapper[4799]: I1010 16:53:07.633998 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/83d0f03e-1c6a-4559-940c-312be653c7c3-ceilometer-tls-certs\") pod \"83d0f03e-1c6a-4559-940c-312be653c7c3\" (UID: \"83d0f03e-1c6a-4559-940c-312be653c7c3\") " Oct 10 16:53:07 crc kubenswrapper[4799]: I1010 16:53:07.634645 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/83d0f03e-1c6a-4559-940c-312be653c7c3-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "83d0f03e-1c6a-4559-940c-312be653c7c3" (UID: "83d0f03e-1c6a-4559-940c-312be653c7c3"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 10 16:53:07 crc kubenswrapper[4799]: I1010 16:53:07.635155 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/83d0f03e-1c6a-4559-940c-312be653c7c3-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "83d0f03e-1c6a-4559-940c-312be653c7c3" (UID: "83d0f03e-1c6a-4559-940c-312be653c7c3"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 10 16:53:07 crc kubenswrapper[4799]: I1010 16:53:07.647599 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/83d0f03e-1c6a-4559-940c-312be653c7c3-kube-api-access-flfxw" (OuterVolumeSpecName: "kube-api-access-flfxw") pod "83d0f03e-1c6a-4559-940c-312be653c7c3" (UID: "83d0f03e-1c6a-4559-940c-312be653c7c3"). InnerVolumeSpecName "kube-api-access-flfxw". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 16:53:07 crc kubenswrapper[4799]: I1010 16:53:07.651515 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/83d0f03e-1c6a-4559-940c-312be653c7c3-scripts" (OuterVolumeSpecName: "scripts") pod "83d0f03e-1c6a-4559-940c-312be653c7c3" (UID: "83d0f03e-1c6a-4559-940c-312be653c7c3"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:53:07 crc kubenswrapper[4799]: I1010 16:53:07.693336 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/83d0f03e-1c6a-4559-940c-312be653c7c3-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "83d0f03e-1c6a-4559-940c-312be653c7c3" (UID: "83d0f03e-1c6a-4559-940c-312be653c7c3"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:53:07 crc kubenswrapper[4799]: I1010 16:53:07.709512 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/83d0f03e-1c6a-4559-940c-312be653c7c3-ceilometer-tls-certs" (OuterVolumeSpecName: "ceilometer-tls-certs") pod "83d0f03e-1c6a-4559-940c-312be653c7c3" (UID: "83d0f03e-1c6a-4559-940c-312be653c7c3"). InnerVolumeSpecName "ceilometer-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:53:07 crc kubenswrapper[4799]: I1010 16:53:07.737176 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-flfxw\" (UniqueName: \"kubernetes.io/projected/83d0f03e-1c6a-4559-940c-312be653c7c3-kube-api-access-flfxw\") on node \"crc\" DevicePath \"\"" Oct 10 16:53:07 crc kubenswrapper[4799]: I1010 16:53:07.737212 4799 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/83d0f03e-1c6a-4559-940c-312be653c7c3-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Oct 10 16:53:07 crc kubenswrapper[4799]: I1010 16:53:07.737226 4799 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/83d0f03e-1c6a-4559-940c-312be653c7c3-run-httpd\") on node \"crc\" DevicePath \"\"" Oct 10 16:53:07 crc kubenswrapper[4799]: I1010 16:53:07.737238 4799 reconciler_common.go:293] "Volume detached for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/83d0f03e-1c6a-4559-940c-312be653c7c3-ceilometer-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 10 16:53:07 crc kubenswrapper[4799]: I1010 16:53:07.737249 4799 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/83d0f03e-1c6a-4559-940c-312be653c7c3-log-httpd\") on node \"crc\" DevicePath \"\"" Oct 10 16:53:07 crc kubenswrapper[4799]: I1010 16:53:07.737258 4799 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/83d0f03e-1c6a-4559-940c-312be653c7c3-scripts\") on node \"crc\" DevicePath \"\"" Oct 10 16:53:07 crc kubenswrapper[4799]: I1010 16:53:07.749243 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/83d0f03e-1c6a-4559-940c-312be653c7c3-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "83d0f03e-1c6a-4559-940c-312be653c7c3" (UID: "83d0f03e-1c6a-4559-940c-312be653c7c3"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:53:07 crc kubenswrapper[4799]: I1010 16:53:07.776555 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/83d0f03e-1c6a-4559-940c-312be653c7c3-config-data" (OuterVolumeSpecName: "config-data") pod "83d0f03e-1c6a-4559-940c-312be653c7c3" (UID: "83d0f03e-1c6a-4559-940c-312be653c7c3"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:53:07 crc kubenswrapper[4799]: I1010 16:53:07.845209 4799 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/83d0f03e-1c6a-4559-940c-312be653c7c3-config-data\") on node \"crc\" DevicePath \"\"" Oct 10 16:53:07 crc kubenswrapper[4799]: I1010 16:53:07.845245 4799 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/83d0f03e-1c6a-4559-940c-312be653c7c3-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 10 16:53:08 crc kubenswrapper[4799]: I1010 16:53:08.076671 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"83d0f03e-1c6a-4559-940c-312be653c7c3","Type":"ContainerDied","Data":"c2d6ee29905488b8223a554e503cf14d8fe6ff5ab2b583ef9189466fb7ff8989"} Oct 10 16:53:08 crc kubenswrapper[4799]: I1010 16:53:08.076743 4799 scope.go:117] "RemoveContainer" containerID="853ab9f6c0dab10928372d18487047719af14f07aeaaba927bd1569a2b81c7bd" Oct 10 16:53:08 crc kubenswrapper[4799]: I1010 16:53:08.076951 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 10 16:53:08 crc kubenswrapper[4799]: I1010 16:53:08.117377 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 10 16:53:08 crc kubenswrapper[4799]: I1010 16:53:08.121665 4799 scope.go:117] "RemoveContainer" containerID="9b5f6a40528602c70f049ed8e70815b14bf3cfc52939bf60939bb675f80adcd0" Oct 10 16:53:08 crc kubenswrapper[4799]: I1010 16:53:08.125737 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Oct 10 16:53:08 crc kubenswrapper[4799]: I1010 16:53:08.155133 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Oct 10 16:53:08 crc kubenswrapper[4799]: E1010 16:53:08.155667 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="83d0f03e-1c6a-4559-940c-312be653c7c3" containerName="proxy-httpd" Oct 10 16:53:08 crc kubenswrapper[4799]: I1010 16:53:08.155691 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="83d0f03e-1c6a-4559-940c-312be653c7c3" containerName="proxy-httpd" Oct 10 16:53:08 crc kubenswrapper[4799]: E1010 16:53:08.155706 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="83d0f03e-1c6a-4559-940c-312be653c7c3" containerName="sg-core" Oct 10 16:53:08 crc kubenswrapper[4799]: I1010 16:53:08.155715 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="83d0f03e-1c6a-4559-940c-312be653c7c3" containerName="sg-core" Oct 10 16:53:08 crc kubenswrapper[4799]: E1010 16:53:08.155745 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="83d0f03e-1c6a-4559-940c-312be653c7c3" containerName="ceilometer-central-agent" Oct 10 16:53:08 crc kubenswrapper[4799]: I1010 16:53:08.155776 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="83d0f03e-1c6a-4559-940c-312be653c7c3" containerName="ceilometer-central-agent" Oct 10 16:53:08 crc kubenswrapper[4799]: E1010 16:53:08.155810 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="83d0f03e-1c6a-4559-940c-312be653c7c3" containerName="ceilometer-notification-agent" Oct 10 16:53:08 crc kubenswrapper[4799]: I1010 16:53:08.155818 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="83d0f03e-1c6a-4559-940c-312be653c7c3" containerName="ceilometer-notification-agent" Oct 10 16:53:08 crc kubenswrapper[4799]: I1010 16:53:08.155829 4799 scope.go:117] "RemoveContainer" containerID="d0e485d6bf2ba97948b186f7d0f4cb2887563e953b6f33f2edcd98f85503f73a" Oct 10 16:53:08 crc kubenswrapper[4799]: I1010 16:53:08.156078 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="83d0f03e-1c6a-4559-940c-312be653c7c3" containerName="sg-core" Oct 10 16:53:08 crc kubenswrapper[4799]: I1010 16:53:08.156096 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="83d0f03e-1c6a-4559-940c-312be653c7c3" containerName="proxy-httpd" Oct 10 16:53:08 crc kubenswrapper[4799]: I1010 16:53:08.156116 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="83d0f03e-1c6a-4559-940c-312be653c7c3" containerName="ceilometer-central-agent" Oct 10 16:53:08 crc kubenswrapper[4799]: I1010 16:53:08.156134 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="83d0f03e-1c6a-4559-940c-312be653c7c3" containerName="ceilometer-notification-agent" Oct 10 16:53:08 crc kubenswrapper[4799]: I1010 16:53:08.158249 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 10 16:53:08 crc kubenswrapper[4799]: I1010 16:53:08.161060 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Oct 10 16:53:08 crc kubenswrapper[4799]: I1010 16:53:08.161087 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Oct 10 16:53:08 crc kubenswrapper[4799]: I1010 16:53:08.161035 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Oct 10 16:53:08 crc kubenswrapper[4799]: I1010 16:53:08.187450 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 10 16:53:08 crc kubenswrapper[4799]: I1010 16:53:08.193094 4799 scope.go:117] "RemoveContainer" containerID="225460eb0d91d90328909fe4e27056c7632675d24481ae318706175ae217a850" Oct 10 16:53:08 crc kubenswrapper[4799]: I1010 16:53:08.254727 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/820f4f16-f7a9-4be4-ac71-ba919b532696-config-data\") pod \"ceilometer-0\" (UID: \"820f4f16-f7a9-4be4-ac71-ba919b532696\") " pod="openstack/ceilometer-0" Oct 10 16:53:08 crc kubenswrapper[4799]: I1010 16:53:08.254822 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/820f4f16-f7a9-4be4-ac71-ba919b532696-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"820f4f16-f7a9-4be4-ac71-ba919b532696\") " pod="openstack/ceilometer-0" Oct 10 16:53:08 crc kubenswrapper[4799]: I1010 16:53:08.254883 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/820f4f16-f7a9-4be4-ac71-ba919b532696-run-httpd\") pod \"ceilometer-0\" (UID: \"820f4f16-f7a9-4be4-ac71-ba919b532696\") " pod="openstack/ceilometer-0" Oct 10 16:53:08 crc kubenswrapper[4799]: I1010 16:53:08.254909 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/820f4f16-f7a9-4be4-ac71-ba919b532696-log-httpd\") pod \"ceilometer-0\" (UID: \"820f4f16-f7a9-4be4-ac71-ba919b532696\") " pod="openstack/ceilometer-0" Oct 10 16:53:08 crc kubenswrapper[4799]: I1010 16:53:08.254943 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/820f4f16-f7a9-4be4-ac71-ba919b532696-scripts\") pod \"ceilometer-0\" (UID: \"820f4f16-f7a9-4be4-ac71-ba919b532696\") " pod="openstack/ceilometer-0" Oct 10 16:53:08 crc kubenswrapper[4799]: I1010 16:53:08.254989 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q6lrz\" (UniqueName: \"kubernetes.io/projected/820f4f16-f7a9-4be4-ac71-ba919b532696-kube-api-access-q6lrz\") pod \"ceilometer-0\" (UID: \"820f4f16-f7a9-4be4-ac71-ba919b532696\") " pod="openstack/ceilometer-0" Oct 10 16:53:08 crc kubenswrapper[4799]: I1010 16:53:08.255031 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/820f4f16-f7a9-4be4-ac71-ba919b532696-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"820f4f16-f7a9-4be4-ac71-ba919b532696\") " pod="openstack/ceilometer-0" Oct 10 16:53:08 crc kubenswrapper[4799]: I1010 16:53:08.255060 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/820f4f16-f7a9-4be4-ac71-ba919b532696-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"820f4f16-f7a9-4be4-ac71-ba919b532696\") " pod="openstack/ceilometer-0" Oct 10 16:53:08 crc kubenswrapper[4799]: I1010 16:53:08.356473 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/820f4f16-f7a9-4be4-ac71-ba919b532696-run-httpd\") pod \"ceilometer-0\" (UID: \"820f4f16-f7a9-4be4-ac71-ba919b532696\") " pod="openstack/ceilometer-0" Oct 10 16:53:08 crc kubenswrapper[4799]: I1010 16:53:08.356567 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/820f4f16-f7a9-4be4-ac71-ba919b532696-log-httpd\") pod \"ceilometer-0\" (UID: \"820f4f16-f7a9-4be4-ac71-ba919b532696\") " pod="openstack/ceilometer-0" Oct 10 16:53:08 crc kubenswrapper[4799]: I1010 16:53:08.356624 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/820f4f16-f7a9-4be4-ac71-ba919b532696-scripts\") pod \"ceilometer-0\" (UID: \"820f4f16-f7a9-4be4-ac71-ba919b532696\") " pod="openstack/ceilometer-0" Oct 10 16:53:08 crc kubenswrapper[4799]: I1010 16:53:08.356700 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q6lrz\" (UniqueName: \"kubernetes.io/projected/820f4f16-f7a9-4be4-ac71-ba919b532696-kube-api-access-q6lrz\") pod \"ceilometer-0\" (UID: \"820f4f16-f7a9-4be4-ac71-ba919b532696\") " pod="openstack/ceilometer-0" Oct 10 16:53:08 crc kubenswrapper[4799]: I1010 16:53:08.356787 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/820f4f16-f7a9-4be4-ac71-ba919b532696-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"820f4f16-f7a9-4be4-ac71-ba919b532696\") " pod="openstack/ceilometer-0" Oct 10 16:53:08 crc kubenswrapper[4799]: I1010 16:53:08.356823 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/820f4f16-f7a9-4be4-ac71-ba919b532696-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"820f4f16-f7a9-4be4-ac71-ba919b532696\") " pod="openstack/ceilometer-0" Oct 10 16:53:08 crc kubenswrapper[4799]: I1010 16:53:08.356969 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/820f4f16-f7a9-4be4-ac71-ba919b532696-config-data\") pod \"ceilometer-0\" (UID: \"820f4f16-f7a9-4be4-ac71-ba919b532696\") " pod="openstack/ceilometer-0" Oct 10 16:53:08 crc kubenswrapper[4799]: I1010 16:53:08.357014 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/820f4f16-f7a9-4be4-ac71-ba919b532696-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"820f4f16-f7a9-4be4-ac71-ba919b532696\") " pod="openstack/ceilometer-0" Oct 10 16:53:08 crc kubenswrapper[4799]: I1010 16:53:08.357259 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/820f4f16-f7a9-4be4-ac71-ba919b532696-log-httpd\") pod \"ceilometer-0\" (UID: \"820f4f16-f7a9-4be4-ac71-ba919b532696\") " pod="openstack/ceilometer-0" Oct 10 16:53:08 crc kubenswrapper[4799]: I1010 16:53:08.357356 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/820f4f16-f7a9-4be4-ac71-ba919b532696-run-httpd\") pod \"ceilometer-0\" (UID: \"820f4f16-f7a9-4be4-ac71-ba919b532696\") " pod="openstack/ceilometer-0" Oct 10 16:53:08 crc kubenswrapper[4799]: I1010 16:53:08.362641 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/820f4f16-f7a9-4be4-ac71-ba919b532696-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"820f4f16-f7a9-4be4-ac71-ba919b532696\") " pod="openstack/ceilometer-0" Oct 10 16:53:08 crc kubenswrapper[4799]: I1010 16:53:08.364331 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/820f4f16-f7a9-4be4-ac71-ba919b532696-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"820f4f16-f7a9-4be4-ac71-ba919b532696\") " pod="openstack/ceilometer-0" Oct 10 16:53:08 crc kubenswrapper[4799]: I1010 16:53:08.372617 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/820f4f16-f7a9-4be4-ac71-ba919b532696-config-data\") pod \"ceilometer-0\" (UID: \"820f4f16-f7a9-4be4-ac71-ba919b532696\") " pod="openstack/ceilometer-0" Oct 10 16:53:08 crc kubenswrapper[4799]: I1010 16:53:08.373877 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/820f4f16-f7a9-4be4-ac71-ba919b532696-scripts\") pod \"ceilometer-0\" (UID: \"820f4f16-f7a9-4be4-ac71-ba919b532696\") " pod="openstack/ceilometer-0" Oct 10 16:53:08 crc kubenswrapper[4799]: I1010 16:53:08.374699 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/820f4f16-f7a9-4be4-ac71-ba919b532696-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"820f4f16-f7a9-4be4-ac71-ba919b532696\") " pod="openstack/ceilometer-0" Oct 10 16:53:08 crc kubenswrapper[4799]: I1010 16:53:08.376912 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q6lrz\" (UniqueName: \"kubernetes.io/projected/820f4f16-f7a9-4be4-ac71-ba919b532696-kube-api-access-q6lrz\") pod \"ceilometer-0\" (UID: \"820f4f16-f7a9-4be4-ac71-ba919b532696\") " pod="openstack/ceilometer-0" Oct 10 16:53:08 crc kubenswrapper[4799]: I1010 16:53:08.482273 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 10 16:53:08 crc kubenswrapper[4799]: I1010 16:53:08.843517 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 10 16:53:08 crc kubenswrapper[4799]: I1010 16:53:08.957959 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 10 16:53:09 crc kubenswrapper[4799]: I1010 16:53:09.090542 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"820f4f16-f7a9-4be4-ac71-ba919b532696","Type":"ContainerStarted","Data":"0ec78f8fd8729ae490343fc019d451e8ed10858c43266ab80a4baef32438afd1"} Oct 10 16:53:09 crc kubenswrapper[4799]: I1010 16:53:09.418832 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="83d0f03e-1c6a-4559-940c-312be653c7c3" path="/var/lib/kubelet/pods/83d0f03e-1c6a-4559-940c-312be653c7c3/volumes" Oct 10 16:53:10 crc kubenswrapper[4799]: I1010 16:53:10.015689 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 10 16:53:10 crc kubenswrapper[4799]: I1010 16:53:10.102183 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w6gkt\" (UniqueName: \"kubernetes.io/projected/2c70468e-4a71-405f-9171-9246f62aec11-kube-api-access-w6gkt\") pod \"2c70468e-4a71-405f-9171-9246f62aec11\" (UID: \"2c70468e-4a71-405f-9171-9246f62aec11\") " Oct 10 16:53:10 crc kubenswrapper[4799]: I1010 16:53:10.102312 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2c70468e-4a71-405f-9171-9246f62aec11-logs\") pod \"2c70468e-4a71-405f-9171-9246f62aec11\" (UID: \"2c70468e-4a71-405f-9171-9246f62aec11\") " Oct 10 16:53:10 crc kubenswrapper[4799]: I1010 16:53:10.102394 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2c70468e-4a71-405f-9171-9246f62aec11-config-data\") pod \"2c70468e-4a71-405f-9171-9246f62aec11\" (UID: \"2c70468e-4a71-405f-9171-9246f62aec11\") " Oct 10 16:53:10 crc kubenswrapper[4799]: I1010 16:53:10.102412 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2c70468e-4a71-405f-9171-9246f62aec11-combined-ca-bundle\") pod \"2c70468e-4a71-405f-9171-9246f62aec11\" (UID: \"2c70468e-4a71-405f-9171-9246f62aec11\") " Oct 10 16:53:10 crc kubenswrapper[4799]: I1010 16:53:10.103780 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2c70468e-4a71-405f-9171-9246f62aec11-logs" (OuterVolumeSpecName: "logs") pod "2c70468e-4a71-405f-9171-9246f62aec11" (UID: "2c70468e-4a71-405f-9171-9246f62aec11"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 10 16:53:10 crc kubenswrapper[4799]: I1010 16:53:10.117013 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2c70468e-4a71-405f-9171-9246f62aec11-kube-api-access-w6gkt" (OuterVolumeSpecName: "kube-api-access-w6gkt") pod "2c70468e-4a71-405f-9171-9246f62aec11" (UID: "2c70468e-4a71-405f-9171-9246f62aec11"). InnerVolumeSpecName "kube-api-access-w6gkt". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 16:53:10 crc kubenswrapper[4799]: I1010 16:53:10.122445 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"820f4f16-f7a9-4be4-ac71-ba919b532696","Type":"ContainerStarted","Data":"6c2032db6bdf18f707a822270aa762ba46704186f3a8f0494969a8a602f5ebec"} Oct 10 16:53:10 crc kubenswrapper[4799]: I1010 16:53:10.126456 4799 generic.go:334] "Generic (PLEG): container finished" podID="2c70468e-4a71-405f-9171-9246f62aec11" containerID="c3b98dfa47145b9b453ce0521311875ad5f48cae96b9f8f78a53d856bf09adce" exitCode=0 Oct 10 16:53:10 crc kubenswrapper[4799]: I1010 16:53:10.126497 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"2c70468e-4a71-405f-9171-9246f62aec11","Type":"ContainerDied","Data":"c3b98dfa47145b9b453ce0521311875ad5f48cae96b9f8f78a53d856bf09adce"} Oct 10 16:53:10 crc kubenswrapper[4799]: I1010 16:53:10.126524 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"2c70468e-4a71-405f-9171-9246f62aec11","Type":"ContainerDied","Data":"fe52afb72d64bfe0d045c17b174ae9447ef50652e6ec84c2cf50412b096de6d1"} Oct 10 16:53:10 crc kubenswrapper[4799]: I1010 16:53:10.126544 4799 scope.go:117] "RemoveContainer" containerID="c3b98dfa47145b9b453ce0521311875ad5f48cae96b9f8f78a53d856bf09adce" Oct 10 16:53:10 crc kubenswrapper[4799]: I1010 16:53:10.126717 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 10 16:53:10 crc kubenswrapper[4799]: I1010 16:53:10.140004 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2c70468e-4a71-405f-9171-9246f62aec11-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "2c70468e-4a71-405f-9171-9246f62aec11" (UID: "2c70468e-4a71-405f-9171-9246f62aec11"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:53:10 crc kubenswrapper[4799]: I1010 16:53:10.151183 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2c70468e-4a71-405f-9171-9246f62aec11-config-data" (OuterVolumeSpecName: "config-data") pod "2c70468e-4a71-405f-9171-9246f62aec11" (UID: "2c70468e-4a71-405f-9171-9246f62aec11"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:53:10 crc kubenswrapper[4799]: I1010 16:53:10.204381 4799 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2c70468e-4a71-405f-9171-9246f62aec11-config-data\") on node \"crc\" DevicePath \"\"" Oct 10 16:53:10 crc kubenswrapper[4799]: I1010 16:53:10.204417 4799 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2c70468e-4a71-405f-9171-9246f62aec11-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 10 16:53:10 crc kubenswrapper[4799]: I1010 16:53:10.204429 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w6gkt\" (UniqueName: \"kubernetes.io/projected/2c70468e-4a71-405f-9171-9246f62aec11-kube-api-access-w6gkt\") on node \"crc\" DevicePath \"\"" Oct 10 16:53:10 crc kubenswrapper[4799]: I1010 16:53:10.204439 4799 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2c70468e-4a71-405f-9171-9246f62aec11-logs\") on node \"crc\" DevicePath \"\"" Oct 10 16:53:10 crc kubenswrapper[4799]: I1010 16:53:10.240808 4799 scope.go:117] "RemoveContainer" containerID="566a37796a21dfea1dedc2df7d9a7b47ffce31cd1bf00e70fbadeaea806938ca" Oct 10 16:53:10 crc kubenswrapper[4799]: I1010 16:53:10.259373 4799 scope.go:117] "RemoveContainer" containerID="c3b98dfa47145b9b453ce0521311875ad5f48cae96b9f8f78a53d856bf09adce" Oct 10 16:53:10 crc kubenswrapper[4799]: E1010 16:53:10.260307 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c3b98dfa47145b9b453ce0521311875ad5f48cae96b9f8f78a53d856bf09adce\": container with ID starting with c3b98dfa47145b9b453ce0521311875ad5f48cae96b9f8f78a53d856bf09adce not found: ID does not exist" containerID="c3b98dfa47145b9b453ce0521311875ad5f48cae96b9f8f78a53d856bf09adce" Oct 10 16:53:10 crc kubenswrapper[4799]: I1010 16:53:10.260361 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c3b98dfa47145b9b453ce0521311875ad5f48cae96b9f8f78a53d856bf09adce"} err="failed to get container status \"c3b98dfa47145b9b453ce0521311875ad5f48cae96b9f8f78a53d856bf09adce\": rpc error: code = NotFound desc = could not find container \"c3b98dfa47145b9b453ce0521311875ad5f48cae96b9f8f78a53d856bf09adce\": container with ID starting with c3b98dfa47145b9b453ce0521311875ad5f48cae96b9f8f78a53d856bf09adce not found: ID does not exist" Oct 10 16:53:10 crc kubenswrapper[4799]: I1010 16:53:10.260410 4799 scope.go:117] "RemoveContainer" containerID="566a37796a21dfea1dedc2df7d9a7b47ffce31cd1bf00e70fbadeaea806938ca" Oct 10 16:53:10 crc kubenswrapper[4799]: E1010 16:53:10.261039 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"566a37796a21dfea1dedc2df7d9a7b47ffce31cd1bf00e70fbadeaea806938ca\": container with ID starting with 566a37796a21dfea1dedc2df7d9a7b47ffce31cd1bf00e70fbadeaea806938ca not found: ID does not exist" containerID="566a37796a21dfea1dedc2df7d9a7b47ffce31cd1bf00e70fbadeaea806938ca" Oct 10 16:53:10 crc kubenswrapper[4799]: I1010 16:53:10.261072 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"566a37796a21dfea1dedc2df7d9a7b47ffce31cd1bf00e70fbadeaea806938ca"} err="failed to get container status \"566a37796a21dfea1dedc2df7d9a7b47ffce31cd1bf00e70fbadeaea806938ca\": rpc error: code = NotFound desc = could not find container \"566a37796a21dfea1dedc2df7d9a7b47ffce31cd1bf00e70fbadeaea806938ca\": container with ID starting with 566a37796a21dfea1dedc2df7d9a7b47ffce31cd1bf00e70fbadeaea806938ca not found: ID does not exist" Oct 10 16:53:10 crc kubenswrapper[4799]: I1010 16:53:10.466483 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Oct 10 16:53:10 crc kubenswrapper[4799]: I1010 16:53:10.486938 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Oct 10 16:53:10 crc kubenswrapper[4799]: I1010 16:53:10.497576 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Oct 10 16:53:10 crc kubenswrapper[4799]: E1010 16:53:10.498021 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2c70468e-4a71-405f-9171-9246f62aec11" containerName="nova-api-api" Oct 10 16:53:10 crc kubenswrapper[4799]: I1010 16:53:10.498033 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="2c70468e-4a71-405f-9171-9246f62aec11" containerName="nova-api-api" Oct 10 16:53:10 crc kubenswrapper[4799]: E1010 16:53:10.498066 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2c70468e-4a71-405f-9171-9246f62aec11" containerName="nova-api-log" Oct 10 16:53:10 crc kubenswrapper[4799]: I1010 16:53:10.498072 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="2c70468e-4a71-405f-9171-9246f62aec11" containerName="nova-api-log" Oct 10 16:53:10 crc kubenswrapper[4799]: I1010 16:53:10.498254 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="2c70468e-4a71-405f-9171-9246f62aec11" containerName="nova-api-api" Oct 10 16:53:10 crc kubenswrapper[4799]: I1010 16:53:10.498279 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="2c70468e-4a71-405f-9171-9246f62aec11" containerName="nova-api-log" Oct 10 16:53:10 crc kubenswrapper[4799]: I1010 16:53:10.499279 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 10 16:53:10 crc kubenswrapper[4799]: I1010 16:53:10.501977 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Oct 10 16:53:10 crc kubenswrapper[4799]: I1010 16:53:10.503053 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-internal-svc" Oct 10 16:53:10 crc kubenswrapper[4799]: I1010 16:53:10.503320 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-public-svc" Oct 10 16:53:10 crc kubenswrapper[4799]: I1010 16:53:10.505634 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Oct 10 16:53:10 crc kubenswrapper[4799]: I1010 16:53:10.609657 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l59zx\" (UniqueName: \"kubernetes.io/projected/73962347-1cda-4c09-ade4-97eb7bdee215-kube-api-access-l59zx\") pod \"nova-api-0\" (UID: \"73962347-1cda-4c09-ade4-97eb7bdee215\") " pod="openstack/nova-api-0" Oct 10 16:53:10 crc kubenswrapper[4799]: I1010 16:53:10.609940 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/73962347-1cda-4c09-ade4-97eb7bdee215-public-tls-certs\") pod \"nova-api-0\" (UID: \"73962347-1cda-4c09-ade4-97eb7bdee215\") " pod="openstack/nova-api-0" Oct 10 16:53:10 crc kubenswrapper[4799]: I1010 16:53:10.610107 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/73962347-1cda-4c09-ade4-97eb7bdee215-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"73962347-1cda-4c09-ade4-97eb7bdee215\") " pod="openstack/nova-api-0" Oct 10 16:53:10 crc kubenswrapper[4799]: I1010 16:53:10.610174 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/73962347-1cda-4c09-ade4-97eb7bdee215-logs\") pod \"nova-api-0\" (UID: \"73962347-1cda-4c09-ade4-97eb7bdee215\") " pod="openstack/nova-api-0" Oct 10 16:53:10 crc kubenswrapper[4799]: I1010 16:53:10.610258 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/73962347-1cda-4c09-ade4-97eb7bdee215-config-data\") pod \"nova-api-0\" (UID: \"73962347-1cda-4c09-ade4-97eb7bdee215\") " pod="openstack/nova-api-0" Oct 10 16:53:10 crc kubenswrapper[4799]: I1010 16:53:10.610334 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/73962347-1cda-4c09-ade4-97eb7bdee215-internal-tls-certs\") pod \"nova-api-0\" (UID: \"73962347-1cda-4c09-ade4-97eb7bdee215\") " pod="openstack/nova-api-0" Oct 10 16:53:10 crc kubenswrapper[4799]: I1010 16:53:10.711997 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/73962347-1cda-4c09-ade4-97eb7bdee215-config-data\") pod \"nova-api-0\" (UID: \"73962347-1cda-4c09-ade4-97eb7bdee215\") " pod="openstack/nova-api-0" Oct 10 16:53:10 crc kubenswrapper[4799]: I1010 16:53:10.712289 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/73962347-1cda-4c09-ade4-97eb7bdee215-internal-tls-certs\") pod \"nova-api-0\" (UID: \"73962347-1cda-4c09-ade4-97eb7bdee215\") " pod="openstack/nova-api-0" Oct 10 16:53:10 crc kubenswrapper[4799]: I1010 16:53:10.712367 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l59zx\" (UniqueName: \"kubernetes.io/projected/73962347-1cda-4c09-ade4-97eb7bdee215-kube-api-access-l59zx\") pod \"nova-api-0\" (UID: \"73962347-1cda-4c09-ade4-97eb7bdee215\") " pod="openstack/nova-api-0" Oct 10 16:53:10 crc kubenswrapper[4799]: I1010 16:53:10.712425 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/73962347-1cda-4c09-ade4-97eb7bdee215-public-tls-certs\") pod \"nova-api-0\" (UID: \"73962347-1cda-4c09-ade4-97eb7bdee215\") " pod="openstack/nova-api-0" Oct 10 16:53:10 crc kubenswrapper[4799]: I1010 16:53:10.712456 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/73962347-1cda-4c09-ade4-97eb7bdee215-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"73962347-1cda-4c09-ade4-97eb7bdee215\") " pod="openstack/nova-api-0" Oct 10 16:53:10 crc kubenswrapper[4799]: I1010 16:53:10.712473 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/73962347-1cda-4c09-ade4-97eb7bdee215-logs\") pod \"nova-api-0\" (UID: \"73962347-1cda-4c09-ade4-97eb7bdee215\") " pod="openstack/nova-api-0" Oct 10 16:53:10 crc kubenswrapper[4799]: I1010 16:53:10.712903 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/73962347-1cda-4c09-ade4-97eb7bdee215-logs\") pod \"nova-api-0\" (UID: \"73962347-1cda-4c09-ade4-97eb7bdee215\") " pod="openstack/nova-api-0" Oct 10 16:53:10 crc kubenswrapper[4799]: I1010 16:53:10.719668 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/73962347-1cda-4c09-ade4-97eb7bdee215-internal-tls-certs\") pod \"nova-api-0\" (UID: \"73962347-1cda-4c09-ade4-97eb7bdee215\") " pod="openstack/nova-api-0" Oct 10 16:53:10 crc kubenswrapper[4799]: I1010 16:53:10.719705 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/73962347-1cda-4c09-ade4-97eb7bdee215-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"73962347-1cda-4c09-ade4-97eb7bdee215\") " pod="openstack/nova-api-0" Oct 10 16:53:10 crc kubenswrapper[4799]: I1010 16:53:10.719906 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/73962347-1cda-4c09-ade4-97eb7bdee215-config-data\") pod \"nova-api-0\" (UID: \"73962347-1cda-4c09-ade4-97eb7bdee215\") " pod="openstack/nova-api-0" Oct 10 16:53:10 crc kubenswrapper[4799]: I1010 16:53:10.720014 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/73962347-1cda-4c09-ade4-97eb7bdee215-public-tls-certs\") pod \"nova-api-0\" (UID: \"73962347-1cda-4c09-ade4-97eb7bdee215\") " pod="openstack/nova-api-0" Oct 10 16:53:10 crc kubenswrapper[4799]: I1010 16:53:10.733615 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l59zx\" (UniqueName: \"kubernetes.io/projected/73962347-1cda-4c09-ade4-97eb7bdee215-kube-api-access-l59zx\") pod \"nova-api-0\" (UID: \"73962347-1cda-4c09-ade4-97eb7bdee215\") " pod="openstack/nova-api-0" Oct 10 16:53:10 crc kubenswrapper[4799]: I1010 16:53:10.819039 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 10 16:53:11 crc kubenswrapper[4799]: I1010 16:53:11.145404 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"820f4f16-f7a9-4be4-ac71-ba919b532696","Type":"ContainerStarted","Data":"90f47e090f2ff76c1c4400c101ec5b962d84bc0a3537bffbdaf0c8c2bedb9c83"} Oct 10 16:53:11 crc kubenswrapper[4799]: W1010 16:53:11.290642 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod73962347_1cda_4c09_ade4_97eb7bdee215.slice/crio-16979e064e9410d5e9be635d15c2ca1b561886100133a0c26b8970ddefc3a415 WatchSource:0}: Error finding container 16979e064e9410d5e9be635d15c2ca1b561886100133a0c26b8970ddefc3a415: Status 404 returned error can't find the container with id 16979e064e9410d5e9be635d15c2ca1b561886100133a0c26b8970ddefc3a415 Oct 10 16:53:11 crc kubenswrapper[4799]: I1010 16:53:11.291874 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Oct 10 16:53:11 crc kubenswrapper[4799]: I1010 16:53:11.415259 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2c70468e-4a71-405f-9171-9246f62aec11" path="/var/lib/kubelet/pods/2c70468e-4a71-405f-9171-9246f62aec11/volumes" Oct 10 16:53:11 crc kubenswrapper[4799]: I1010 16:53:11.676158 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-cell1-novncproxy-0" Oct 10 16:53:11 crc kubenswrapper[4799]: I1010 16:53:11.704725 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-cell1-novncproxy-0" Oct 10 16:53:11 crc kubenswrapper[4799]: I1010 16:53:11.746566 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Oct 10 16:53:11 crc kubenswrapper[4799]: I1010 16:53:11.746607 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Oct 10 16:53:12 crc kubenswrapper[4799]: I1010 16:53:12.161361 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"73962347-1cda-4c09-ade4-97eb7bdee215","Type":"ContainerStarted","Data":"433e3df0291e42d1b28025e1db25473bfe24e681606ac3cbb380fa76839ca867"} Oct 10 16:53:12 crc kubenswrapper[4799]: I1010 16:53:12.162128 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"73962347-1cda-4c09-ade4-97eb7bdee215","Type":"ContainerStarted","Data":"8ae6269d9b128ff68b7ca387fe515c568a768963ad794c0d323b2f6a98499e16"} Oct 10 16:53:12 crc kubenswrapper[4799]: I1010 16:53:12.162233 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"73962347-1cda-4c09-ade4-97eb7bdee215","Type":"ContainerStarted","Data":"16979e064e9410d5e9be635d15c2ca1b561886100133a0c26b8970ddefc3a415"} Oct 10 16:53:12 crc kubenswrapper[4799]: I1010 16:53:12.164520 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"820f4f16-f7a9-4be4-ac71-ba919b532696","Type":"ContainerStarted","Data":"3fb0c4c6a03bfd9f995e91e82d5d39a721c96ef86a81ec6baf30474fcc747e81"} Oct 10 16:53:12 crc kubenswrapper[4799]: I1010 16:53:12.185874 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.185852415 podStartE2EDuration="2.185852415s" podCreationTimestamp="2025-10-10 16:53:10 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-10 16:53:12.185115797 +0000 UTC m=+1285.693439922" watchObservedRunningTime="2025-10-10 16:53:12.185852415 +0000 UTC m=+1285.694176540" Oct 10 16:53:12 crc kubenswrapper[4799]: I1010 16:53:12.193207 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-novncproxy-0" Oct 10 16:53:12 crc kubenswrapper[4799]: I1010 16:53:12.355674 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-cell-mapping-spv5x"] Oct 10 16:53:12 crc kubenswrapper[4799]: I1010 16:53:12.356989 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-spv5x" Oct 10 16:53:12 crc kubenswrapper[4799]: I1010 16:53:12.358945 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2396929d-6af8-4f8e-9fef-b44c7ce23afa-scripts\") pod \"nova-cell1-cell-mapping-spv5x\" (UID: \"2396929d-6af8-4f8e-9fef-b44c7ce23afa\") " pod="openstack/nova-cell1-cell-mapping-spv5x" Oct 10 16:53:12 crc kubenswrapper[4799]: I1010 16:53:12.359061 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2396929d-6af8-4f8e-9fef-b44c7ce23afa-config-data\") pod \"nova-cell1-cell-mapping-spv5x\" (UID: \"2396929d-6af8-4f8e-9fef-b44c7ce23afa\") " pod="openstack/nova-cell1-cell-mapping-spv5x" Oct 10 16:53:12 crc kubenswrapper[4799]: I1010 16:53:12.359106 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2396929d-6af8-4f8e-9fef-b44c7ce23afa-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-spv5x\" (UID: \"2396929d-6af8-4f8e-9fef-b44c7ce23afa\") " pod="openstack/nova-cell1-cell-mapping-spv5x" Oct 10 16:53:12 crc kubenswrapper[4799]: I1010 16:53:12.359202 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wx6cp\" (UniqueName: \"kubernetes.io/projected/2396929d-6af8-4f8e-9fef-b44c7ce23afa-kube-api-access-wx6cp\") pod \"nova-cell1-cell-mapping-spv5x\" (UID: \"2396929d-6af8-4f8e-9fef-b44c7ce23afa\") " pod="openstack/nova-cell1-cell-mapping-spv5x" Oct 10 16:53:12 crc kubenswrapper[4799]: I1010 16:53:12.359224 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-manage-config-data" Oct 10 16:53:12 crc kubenswrapper[4799]: I1010 16:53:12.359379 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-manage-scripts" Oct 10 16:53:12 crc kubenswrapper[4799]: I1010 16:53:12.377953 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-cell-mapping-spv5x"] Oct 10 16:53:12 crc kubenswrapper[4799]: I1010 16:53:12.460829 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2396929d-6af8-4f8e-9fef-b44c7ce23afa-scripts\") pod \"nova-cell1-cell-mapping-spv5x\" (UID: \"2396929d-6af8-4f8e-9fef-b44c7ce23afa\") " pod="openstack/nova-cell1-cell-mapping-spv5x" Oct 10 16:53:12 crc kubenswrapper[4799]: I1010 16:53:12.460898 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2396929d-6af8-4f8e-9fef-b44c7ce23afa-config-data\") pod \"nova-cell1-cell-mapping-spv5x\" (UID: \"2396929d-6af8-4f8e-9fef-b44c7ce23afa\") " pod="openstack/nova-cell1-cell-mapping-spv5x" Oct 10 16:53:12 crc kubenswrapper[4799]: I1010 16:53:12.460929 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2396929d-6af8-4f8e-9fef-b44c7ce23afa-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-spv5x\" (UID: \"2396929d-6af8-4f8e-9fef-b44c7ce23afa\") " pod="openstack/nova-cell1-cell-mapping-spv5x" Oct 10 16:53:12 crc kubenswrapper[4799]: I1010 16:53:12.460973 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wx6cp\" (UniqueName: \"kubernetes.io/projected/2396929d-6af8-4f8e-9fef-b44c7ce23afa-kube-api-access-wx6cp\") pod \"nova-cell1-cell-mapping-spv5x\" (UID: \"2396929d-6af8-4f8e-9fef-b44c7ce23afa\") " pod="openstack/nova-cell1-cell-mapping-spv5x" Oct 10 16:53:12 crc kubenswrapper[4799]: I1010 16:53:12.465436 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2396929d-6af8-4f8e-9fef-b44c7ce23afa-scripts\") pod \"nova-cell1-cell-mapping-spv5x\" (UID: \"2396929d-6af8-4f8e-9fef-b44c7ce23afa\") " pod="openstack/nova-cell1-cell-mapping-spv5x" Oct 10 16:53:12 crc kubenswrapper[4799]: I1010 16:53:12.465886 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2396929d-6af8-4f8e-9fef-b44c7ce23afa-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-spv5x\" (UID: \"2396929d-6af8-4f8e-9fef-b44c7ce23afa\") " pod="openstack/nova-cell1-cell-mapping-spv5x" Oct 10 16:53:12 crc kubenswrapper[4799]: I1010 16:53:12.465965 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2396929d-6af8-4f8e-9fef-b44c7ce23afa-config-data\") pod \"nova-cell1-cell-mapping-spv5x\" (UID: \"2396929d-6af8-4f8e-9fef-b44c7ce23afa\") " pod="openstack/nova-cell1-cell-mapping-spv5x" Oct 10 16:53:12 crc kubenswrapper[4799]: I1010 16:53:12.478236 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wx6cp\" (UniqueName: \"kubernetes.io/projected/2396929d-6af8-4f8e-9fef-b44c7ce23afa-kube-api-access-wx6cp\") pod \"nova-cell1-cell-mapping-spv5x\" (UID: \"2396929d-6af8-4f8e-9fef-b44c7ce23afa\") " pod="openstack/nova-cell1-cell-mapping-spv5x" Oct 10 16:53:12 crc kubenswrapper[4799]: I1010 16:53:12.679034 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-spv5x" Oct 10 16:53:12 crc kubenswrapper[4799]: I1010 16:53:12.760042 4799 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="dc550ba1-d76f-494e-b725-337877360fa7" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.202:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Oct 10 16:53:12 crc kubenswrapper[4799]: I1010 16:53:12.760165 4799 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="dc550ba1-d76f-494e-b725-337877360fa7" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.202:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Oct 10 16:53:13 crc kubenswrapper[4799]: I1010 16:53:13.140965 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-cell-mapping-spv5x"] Oct 10 16:53:13 crc kubenswrapper[4799]: W1010 16:53:13.145919 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2396929d_6af8_4f8e_9fef_b44c7ce23afa.slice/crio-66ea21abc8bab24004af3909ce5c877603a38b352567891c562ab89e4902f7cc WatchSource:0}: Error finding container 66ea21abc8bab24004af3909ce5c877603a38b352567891c562ab89e4902f7cc: Status 404 returned error can't find the container with id 66ea21abc8bab24004af3909ce5c877603a38b352567891c562ab89e4902f7cc Oct 10 16:53:13 crc kubenswrapper[4799]: I1010 16:53:13.177859 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"820f4f16-f7a9-4be4-ac71-ba919b532696","Type":"ContainerStarted","Data":"5e918b23ee535ace1e9b715f8ecd2762d26c9b53bf82dcf3cb687169e425c8a9"} Oct 10 16:53:13 crc kubenswrapper[4799]: I1010 16:53:13.178044 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="820f4f16-f7a9-4be4-ac71-ba919b532696" containerName="ceilometer-central-agent" containerID="cri-o://6c2032db6bdf18f707a822270aa762ba46704186f3a8f0494969a8a602f5ebec" gracePeriod=30 Oct 10 16:53:13 crc kubenswrapper[4799]: I1010 16:53:13.178315 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Oct 10 16:53:13 crc kubenswrapper[4799]: I1010 16:53:13.178658 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="820f4f16-f7a9-4be4-ac71-ba919b532696" containerName="proxy-httpd" containerID="cri-o://5e918b23ee535ace1e9b715f8ecd2762d26c9b53bf82dcf3cb687169e425c8a9" gracePeriod=30 Oct 10 16:53:13 crc kubenswrapper[4799]: I1010 16:53:13.178714 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="820f4f16-f7a9-4be4-ac71-ba919b532696" containerName="sg-core" containerID="cri-o://3fb0c4c6a03bfd9f995e91e82d5d39a721c96ef86a81ec6baf30474fcc747e81" gracePeriod=30 Oct 10 16:53:13 crc kubenswrapper[4799]: I1010 16:53:13.178774 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="820f4f16-f7a9-4be4-ac71-ba919b532696" containerName="ceilometer-notification-agent" containerID="cri-o://90f47e090f2ff76c1c4400c101ec5b962d84bc0a3537bffbdaf0c8c2bedb9c83" gracePeriod=30 Oct 10 16:53:13 crc kubenswrapper[4799]: I1010 16:53:13.186192 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-spv5x" event={"ID":"2396929d-6af8-4f8e-9fef-b44c7ce23afa","Type":"ContainerStarted","Data":"66ea21abc8bab24004af3909ce5c877603a38b352567891c562ab89e4902f7cc"} Oct 10 16:53:13 crc kubenswrapper[4799]: I1010 16:53:13.593913 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-64986d45b9-khcqw" Oct 10 16:53:13 crc kubenswrapper[4799]: I1010 16:53:13.618488 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.044546358 podStartE2EDuration="5.61846832s" podCreationTimestamp="2025-10-10 16:53:08 +0000 UTC" firstStartedPulling="2025-10-10 16:53:08.968915204 +0000 UTC m=+1282.477239319" lastFinishedPulling="2025-10-10 16:53:12.542837166 +0000 UTC m=+1286.051161281" observedRunningTime="2025-10-10 16:53:13.203212495 +0000 UTC m=+1286.711536630" watchObservedRunningTime="2025-10-10 16:53:13.61846832 +0000 UTC m=+1287.126792435" Oct 10 16:53:13 crc kubenswrapper[4799]: I1010 16:53:13.687341 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-694b6b9bcc-4mgtw"] Oct 10 16:53:13 crc kubenswrapper[4799]: I1010 16:53:13.692148 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-694b6b9bcc-4mgtw" podUID="9e4317e1-d160-41a4-adbc-8226d554bc6a" containerName="dnsmasq-dns" containerID="cri-o://c9533479ea097624f2e803b15bdb79d4faabdb36fea0ba6e3f99c85cb3a21f02" gracePeriod=10 Oct 10 16:53:14 crc kubenswrapper[4799]: I1010 16:53:14.180404 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-694b6b9bcc-4mgtw" Oct 10 16:53:14 crc kubenswrapper[4799]: I1010 16:53:14.216965 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-spv5x" event={"ID":"2396929d-6af8-4f8e-9fef-b44c7ce23afa","Type":"ContainerStarted","Data":"e7fc78b3c8230f7e21437f35ba5f2fca7d9cf97cdf46510b2801bf13113895c9"} Oct 10 16:53:14 crc kubenswrapper[4799]: I1010 16:53:14.219857 4799 generic.go:334] "Generic (PLEG): container finished" podID="9e4317e1-d160-41a4-adbc-8226d554bc6a" containerID="c9533479ea097624f2e803b15bdb79d4faabdb36fea0ba6e3f99c85cb3a21f02" exitCode=0 Oct 10 16:53:14 crc kubenswrapper[4799]: I1010 16:53:14.219924 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-694b6b9bcc-4mgtw" event={"ID":"9e4317e1-d160-41a4-adbc-8226d554bc6a","Type":"ContainerDied","Data":"c9533479ea097624f2e803b15bdb79d4faabdb36fea0ba6e3f99c85cb3a21f02"} Oct 10 16:53:14 crc kubenswrapper[4799]: I1010 16:53:14.219951 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-694b6b9bcc-4mgtw" event={"ID":"9e4317e1-d160-41a4-adbc-8226d554bc6a","Type":"ContainerDied","Data":"621a5bc361d8186bd3c7ab2749ebab664b94192c33d120d2ce0a13720edf632c"} Oct 10 16:53:14 crc kubenswrapper[4799]: I1010 16:53:14.219969 4799 scope.go:117] "RemoveContainer" containerID="c9533479ea097624f2e803b15bdb79d4faabdb36fea0ba6e3f99c85cb3a21f02" Oct 10 16:53:14 crc kubenswrapper[4799]: I1010 16:53:14.220078 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-694b6b9bcc-4mgtw" Oct 10 16:53:14 crc kubenswrapper[4799]: I1010 16:53:14.239658 4799 generic.go:334] "Generic (PLEG): container finished" podID="820f4f16-f7a9-4be4-ac71-ba919b532696" containerID="5e918b23ee535ace1e9b715f8ecd2762d26c9b53bf82dcf3cb687169e425c8a9" exitCode=0 Oct 10 16:53:14 crc kubenswrapper[4799]: I1010 16:53:14.239695 4799 generic.go:334] "Generic (PLEG): container finished" podID="820f4f16-f7a9-4be4-ac71-ba919b532696" containerID="3fb0c4c6a03bfd9f995e91e82d5d39a721c96ef86a81ec6baf30474fcc747e81" exitCode=2 Oct 10 16:53:14 crc kubenswrapper[4799]: I1010 16:53:14.239704 4799 generic.go:334] "Generic (PLEG): container finished" podID="820f4f16-f7a9-4be4-ac71-ba919b532696" containerID="90f47e090f2ff76c1c4400c101ec5b962d84bc0a3537bffbdaf0c8c2bedb9c83" exitCode=0 Oct 10 16:53:14 crc kubenswrapper[4799]: I1010 16:53:14.239727 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"820f4f16-f7a9-4be4-ac71-ba919b532696","Type":"ContainerDied","Data":"5e918b23ee535ace1e9b715f8ecd2762d26c9b53bf82dcf3cb687169e425c8a9"} Oct 10 16:53:14 crc kubenswrapper[4799]: I1010 16:53:14.239765 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"820f4f16-f7a9-4be4-ac71-ba919b532696","Type":"ContainerDied","Data":"3fb0c4c6a03bfd9f995e91e82d5d39a721c96ef86a81ec6baf30474fcc747e81"} Oct 10 16:53:14 crc kubenswrapper[4799]: I1010 16:53:14.239776 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"820f4f16-f7a9-4be4-ac71-ba919b532696","Type":"ContainerDied","Data":"90f47e090f2ff76c1c4400c101ec5b962d84bc0a3537bffbdaf0c8c2bedb9c83"} Oct 10 16:53:14 crc kubenswrapper[4799]: I1010 16:53:14.244727 4799 scope.go:117] "RemoveContainer" containerID="c1cc9ab96e0b2bf01e77f55473311f1a9e3ab523200af1d7102d8a0cafd11b0e" Oct 10 16:53:14 crc kubenswrapper[4799]: I1010 16:53:14.256268 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-cell-mapping-spv5x" podStartSLOduration=2.256245195 podStartE2EDuration="2.256245195s" podCreationTimestamp="2025-10-10 16:53:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-10 16:53:14.240981669 +0000 UTC m=+1287.749305784" watchObservedRunningTime="2025-10-10 16:53:14.256245195 +0000 UTC m=+1287.764569310" Oct 10 16:53:14 crc kubenswrapper[4799]: I1010 16:53:14.279094 4799 scope.go:117] "RemoveContainer" containerID="c9533479ea097624f2e803b15bdb79d4faabdb36fea0ba6e3f99c85cb3a21f02" Oct 10 16:53:14 crc kubenswrapper[4799]: E1010 16:53:14.279594 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c9533479ea097624f2e803b15bdb79d4faabdb36fea0ba6e3f99c85cb3a21f02\": container with ID starting with c9533479ea097624f2e803b15bdb79d4faabdb36fea0ba6e3f99c85cb3a21f02 not found: ID does not exist" containerID="c9533479ea097624f2e803b15bdb79d4faabdb36fea0ba6e3f99c85cb3a21f02" Oct 10 16:53:14 crc kubenswrapper[4799]: I1010 16:53:14.279628 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c9533479ea097624f2e803b15bdb79d4faabdb36fea0ba6e3f99c85cb3a21f02"} err="failed to get container status \"c9533479ea097624f2e803b15bdb79d4faabdb36fea0ba6e3f99c85cb3a21f02\": rpc error: code = NotFound desc = could not find container \"c9533479ea097624f2e803b15bdb79d4faabdb36fea0ba6e3f99c85cb3a21f02\": container with ID starting with c9533479ea097624f2e803b15bdb79d4faabdb36fea0ba6e3f99c85cb3a21f02 not found: ID does not exist" Oct 10 16:53:14 crc kubenswrapper[4799]: I1010 16:53:14.279654 4799 scope.go:117] "RemoveContainer" containerID="c1cc9ab96e0b2bf01e77f55473311f1a9e3ab523200af1d7102d8a0cafd11b0e" Oct 10 16:53:14 crc kubenswrapper[4799]: E1010 16:53:14.283950 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c1cc9ab96e0b2bf01e77f55473311f1a9e3ab523200af1d7102d8a0cafd11b0e\": container with ID starting with c1cc9ab96e0b2bf01e77f55473311f1a9e3ab523200af1d7102d8a0cafd11b0e not found: ID does not exist" containerID="c1cc9ab96e0b2bf01e77f55473311f1a9e3ab523200af1d7102d8a0cafd11b0e" Oct 10 16:53:14 crc kubenswrapper[4799]: I1010 16:53:14.284016 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c1cc9ab96e0b2bf01e77f55473311f1a9e3ab523200af1d7102d8a0cafd11b0e"} err="failed to get container status \"c1cc9ab96e0b2bf01e77f55473311f1a9e3ab523200af1d7102d8a0cafd11b0e\": rpc error: code = NotFound desc = could not find container \"c1cc9ab96e0b2bf01e77f55473311f1a9e3ab523200af1d7102d8a0cafd11b0e\": container with ID starting with c1cc9ab96e0b2bf01e77f55473311f1a9e3ab523200af1d7102d8a0cafd11b0e not found: ID does not exist" Oct 10 16:53:14 crc kubenswrapper[4799]: I1010 16:53:14.296237 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/9e4317e1-d160-41a4-adbc-8226d554bc6a-dns-swift-storage-0\") pod \"9e4317e1-d160-41a4-adbc-8226d554bc6a\" (UID: \"9e4317e1-d160-41a4-adbc-8226d554bc6a\") " Oct 10 16:53:14 crc kubenswrapper[4799]: I1010 16:53:14.296334 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4n44z\" (UniqueName: \"kubernetes.io/projected/9e4317e1-d160-41a4-adbc-8226d554bc6a-kube-api-access-4n44z\") pod \"9e4317e1-d160-41a4-adbc-8226d554bc6a\" (UID: \"9e4317e1-d160-41a4-adbc-8226d554bc6a\") " Oct 10 16:53:14 crc kubenswrapper[4799]: I1010 16:53:14.296476 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/9e4317e1-d160-41a4-adbc-8226d554bc6a-ovsdbserver-nb\") pod \"9e4317e1-d160-41a4-adbc-8226d554bc6a\" (UID: \"9e4317e1-d160-41a4-adbc-8226d554bc6a\") " Oct 10 16:53:14 crc kubenswrapper[4799]: I1010 16:53:14.296586 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9e4317e1-d160-41a4-adbc-8226d554bc6a-config\") pod \"9e4317e1-d160-41a4-adbc-8226d554bc6a\" (UID: \"9e4317e1-d160-41a4-adbc-8226d554bc6a\") " Oct 10 16:53:14 crc kubenswrapper[4799]: I1010 16:53:14.296622 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9e4317e1-d160-41a4-adbc-8226d554bc6a-dns-svc\") pod \"9e4317e1-d160-41a4-adbc-8226d554bc6a\" (UID: \"9e4317e1-d160-41a4-adbc-8226d554bc6a\") " Oct 10 16:53:14 crc kubenswrapper[4799]: I1010 16:53:14.296716 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/9e4317e1-d160-41a4-adbc-8226d554bc6a-ovsdbserver-sb\") pod \"9e4317e1-d160-41a4-adbc-8226d554bc6a\" (UID: \"9e4317e1-d160-41a4-adbc-8226d554bc6a\") " Oct 10 16:53:14 crc kubenswrapper[4799]: I1010 16:53:14.302467 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9e4317e1-d160-41a4-adbc-8226d554bc6a-kube-api-access-4n44z" (OuterVolumeSpecName: "kube-api-access-4n44z") pod "9e4317e1-d160-41a4-adbc-8226d554bc6a" (UID: "9e4317e1-d160-41a4-adbc-8226d554bc6a"). InnerVolumeSpecName "kube-api-access-4n44z". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 16:53:14 crc kubenswrapper[4799]: I1010 16:53:14.346162 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9e4317e1-d160-41a4-adbc-8226d554bc6a-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "9e4317e1-d160-41a4-adbc-8226d554bc6a" (UID: "9e4317e1-d160-41a4-adbc-8226d554bc6a"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 16:53:14 crc kubenswrapper[4799]: I1010 16:53:14.346682 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9e4317e1-d160-41a4-adbc-8226d554bc6a-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "9e4317e1-d160-41a4-adbc-8226d554bc6a" (UID: "9e4317e1-d160-41a4-adbc-8226d554bc6a"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 16:53:14 crc kubenswrapper[4799]: I1010 16:53:14.346818 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9e4317e1-d160-41a4-adbc-8226d554bc6a-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "9e4317e1-d160-41a4-adbc-8226d554bc6a" (UID: "9e4317e1-d160-41a4-adbc-8226d554bc6a"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 16:53:14 crc kubenswrapper[4799]: I1010 16:53:14.351801 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9e4317e1-d160-41a4-adbc-8226d554bc6a-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "9e4317e1-d160-41a4-adbc-8226d554bc6a" (UID: "9e4317e1-d160-41a4-adbc-8226d554bc6a"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 16:53:14 crc kubenswrapper[4799]: I1010 16:53:14.355583 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9e4317e1-d160-41a4-adbc-8226d554bc6a-config" (OuterVolumeSpecName: "config") pod "9e4317e1-d160-41a4-adbc-8226d554bc6a" (UID: "9e4317e1-d160-41a4-adbc-8226d554bc6a"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 16:53:14 crc kubenswrapper[4799]: I1010 16:53:14.398928 4799 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/9e4317e1-d160-41a4-adbc-8226d554bc6a-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Oct 10 16:53:14 crc kubenswrapper[4799]: I1010 16:53:14.399212 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4n44z\" (UniqueName: \"kubernetes.io/projected/9e4317e1-d160-41a4-adbc-8226d554bc6a-kube-api-access-4n44z\") on node \"crc\" DevicePath \"\"" Oct 10 16:53:14 crc kubenswrapper[4799]: I1010 16:53:14.399271 4799 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/9e4317e1-d160-41a4-adbc-8226d554bc6a-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 10 16:53:14 crc kubenswrapper[4799]: I1010 16:53:14.399335 4799 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9e4317e1-d160-41a4-adbc-8226d554bc6a-config\") on node \"crc\" DevicePath \"\"" Oct 10 16:53:14 crc kubenswrapper[4799]: I1010 16:53:14.399442 4799 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9e4317e1-d160-41a4-adbc-8226d554bc6a-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 10 16:53:14 crc kubenswrapper[4799]: I1010 16:53:14.399508 4799 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/9e4317e1-d160-41a4-adbc-8226d554bc6a-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 10 16:53:14 crc kubenswrapper[4799]: I1010 16:53:14.573151 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-694b6b9bcc-4mgtw"] Oct 10 16:53:14 crc kubenswrapper[4799]: I1010 16:53:14.587649 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-694b6b9bcc-4mgtw"] Oct 10 16:53:14 crc kubenswrapper[4799]: I1010 16:53:14.901158 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 10 16:53:15 crc kubenswrapper[4799]: I1010 16:53:15.012916 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-q6lrz\" (UniqueName: \"kubernetes.io/projected/820f4f16-f7a9-4be4-ac71-ba919b532696-kube-api-access-q6lrz\") pod \"820f4f16-f7a9-4be4-ac71-ba919b532696\" (UID: \"820f4f16-f7a9-4be4-ac71-ba919b532696\") " Oct 10 16:53:15 crc kubenswrapper[4799]: I1010 16:53:15.013237 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/820f4f16-f7a9-4be4-ac71-ba919b532696-log-httpd\") pod \"820f4f16-f7a9-4be4-ac71-ba919b532696\" (UID: \"820f4f16-f7a9-4be4-ac71-ba919b532696\") " Oct 10 16:53:15 crc kubenswrapper[4799]: I1010 16:53:15.013279 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/820f4f16-f7a9-4be4-ac71-ba919b532696-ceilometer-tls-certs\") pod \"820f4f16-f7a9-4be4-ac71-ba919b532696\" (UID: \"820f4f16-f7a9-4be4-ac71-ba919b532696\") " Oct 10 16:53:15 crc kubenswrapper[4799]: I1010 16:53:15.013300 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/820f4f16-f7a9-4be4-ac71-ba919b532696-scripts\") pod \"820f4f16-f7a9-4be4-ac71-ba919b532696\" (UID: \"820f4f16-f7a9-4be4-ac71-ba919b532696\") " Oct 10 16:53:15 crc kubenswrapper[4799]: I1010 16:53:15.014012 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/820f4f16-f7a9-4be4-ac71-ba919b532696-combined-ca-bundle\") pod \"820f4f16-f7a9-4be4-ac71-ba919b532696\" (UID: \"820f4f16-f7a9-4be4-ac71-ba919b532696\") " Oct 10 16:53:15 crc kubenswrapper[4799]: I1010 16:53:15.014096 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/820f4f16-f7a9-4be4-ac71-ba919b532696-config-data\") pod \"820f4f16-f7a9-4be4-ac71-ba919b532696\" (UID: \"820f4f16-f7a9-4be4-ac71-ba919b532696\") " Oct 10 16:53:15 crc kubenswrapper[4799]: I1010 16:53:15.014119 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/820f4f16-f7a9-4be4-ac71-ba919b532696-sg-core-conf-yaml\") pod \"820f4f16-f7a9-4be4-ac71-ba919b532696\" (UID: \"820f4f16-f7a9-4be4-ac71-ba919b532696\") " Oct 10 16:53:15 crc kubenswrapper[4799]: I1010 16:53:15.014143 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/820f4f16-f7a9-4be4-ac71-ba919b532696-run-httpd\") pod \"820f4f16-f7a9-4be4-ac71-ba919b532696\" (UID: \"820f4f16-f7a9-4be4-ac71-ba919b532696\") " Oct 10 16:53:15 crc kubenswrapper[4799]: I1010 16:53:15.014152 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/820f4f16-f7a9-4be4-ac71-ba919b532696-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "820f4f16-f7a9-4be4-ac71-ba919b532696" (UID: "820f4f16-f7a9-4be4-ac71-ba919b532696"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 10 16:53:15 crc kubenswrapper[4799]: I1010 16:53:15.014526 4799 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/820f4f16-f7a9-4be4-ac71-ba919b532696-log-httpd\") on node \"crc\" DevicePath \"\"" Oct 10 16:53:15 crc kubenswrapper[4799]: I1010 16:53:15.014882 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/820f4f16-f7a9-4be4-ac71-ba919b532696-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "820f4f16-f7a9-4be4-ac71-ba919b532696" (UID: "820f4f16-f7a9-4be4-ac71-ba919b532696"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 10 16:53:15 crc kubenswrapper[4799]: I1010 16:53:15.018890 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/820f4f16-f7a9-4be4-ac71-ba919b532696-scripts" (OuterVolumeSpecName: "scripts") pod "820f4f16-f7a9-4be4-ac71-ba919b532696" (UID: "820f4f16-f7a9-4be4-ac71-ba919b532696"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:53:15 crc kubenswrapper[4799]: I1010 16:53:15.019399 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/820f4f16-f7a9-4be4-ac71-ba919b532696-kube-api-access-q6lrz" (OuterVolumeSpecName: "kube-api-access-q6lrz") pod "820f4f16-f7a9-4be4-ac71-ba919b532696" (UID: "820f4f16-f7a9-4be4-ac71-ba919b532696"). InnerVolumeSpecName "kube-api-access-q6lrz". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 16:53:15 crc kubenswrapper[4799]: I1010 16:53:15.055780 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/820f4f16-f7a9-4be4-ac71-ba919b532696-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "820f4f16-f7a9-4be4-ac71-ba919b532696" (UID: "820f4f16-f7a9-4be4-ac71-ba919b532696"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:53:15 crc kubenswrapper[4799]: I1010 16:53:15.086896 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/820f4f16-f7a9-4be4-ac71-ba919b532696-ceilometer-tls-certs" (OuterVolumeSpecName: "ceilometer-tls-certs") pod "820f4f16-f7a9-4be4-ac71-ba919b532696" (UID: "820f4f16-f7a9-4be4-ac71-ba919b532696"). InnerVolumeSpecName "ceilometer-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:53:15 crc kubenswrapper[4799]: I1010 16:53:15.103132 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/820f4f16-f7a9-4be4-ac71-ba919b532696-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "820f4f16-f7a9-4be4-ac71-ba919b532696" (UID: "820f4f16-f7a9-4be4-ac71-ba919b532696"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:53:15 crc kubenswrapper[4799]: I1010 16:53:15.116694 4799 reconciler_common.go:293] "Volume detached for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/820f4f16-f7a9-4be4-ac71-ba919b532696-ceilometer-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 10 16:53:15 crc kubenswrapper[4799]: I1010 16:53:15.116735 4799 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/820f4f16-f7a9-4be4-ac71-ba919b532696-scripts\") on node \"crc\" DevicePath \"\"" Oct 10 16:53:15 crc kubenswrapper[4799]: I1010 16:53:15.116749 4799 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/820f4f16-f7a9-4be4-ac71-ba919b532696-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 10 16:53:15 crc kubenswrapper[4799]: I1010 16:53:15.116785 4799 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/820f4f16-f7a9-4be4-ac71-ba919b532696-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Oct 10 16:53:15 crc kubenswrapper[4799]: I1010 16:53:15.116797 4799 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/820f4f16-f7a9-4be4-ac71-ba919b532696-run-httpd\") on node \"crc\" DevicePath \"\"" Oct 10 16:53:15 crc kubenswrapper[4799]: I1010 16:53:15.116810 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-q6lrz\" (UniqueName: \"kubernetes.io/projected/820f4f16-f7a9-4be4-ac71-ba919b532696-kube-api-access-q6lrz\") on node \"crc\" DevicePath \"\"" Oct 10 16:53:15 crc kubenswrapper[4799]: I1010 16:53:15.143947 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/820f4f16-f7a9-4be4-ac71-ba919b532696-config-data" (OuterVolumeSpecName: "config-data") pod "820f4f16-f7a9-4be4-ac71-ba919b532696" (UID: "820f4f16-f7a9-4be4-ac71-ba919b532696"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:53:15 crc kubenswrapper[4799]: I1010 16:53:15.218558 4799 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/820f4f16-f7a9-4be4-ac71-ba919b532696-config-data\") on node \"crc\" DevicePath \"\"" Oct 10 16:53:15 crc kubenswrapper[4799]: I1010 16:53:15.250488 4799 patch_prober.go:28] interesting pod/machine-config-daemon-rh8zc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 10 16:53:15 crc kubenswrapper[4799]: I1010 16:53:15.250580 4799 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 10 16:53:15 crc kubenswrapper[4799]: I1010 16:53:15.272072 4799 generic.go:334] "Generic (PLEG): container finished" podID="820f4f16-f7a9-4be4-ac71-ba919b532696" containerID="6c2032db6bdf18f707a822270aa762ba46704186f3a8f0494969a8a602f5ebec" exitCode=0 Oct 10 16:53:15 crc kubenswrapper[4799]: I1010 16:53:15.272215 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 10 16:53:15 crc kubenswrapper[4799]: I1010 16:53:15.274061 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"820f4f16-f7a9-4be4-ac71-ba919b532696","Type":"ContainerDied","Data":"6c2032db6bdf18f707a822270aa762ba46704186f3a8f0494969a8a602f5ebec"} Oct 10 16:53:15 crc kubenswrapper[4799]: I1010 16:53:15.274111 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"820f4f16-f7a9-4be4-ac71-ba919b532696","Type":"ContainerDied","Data":"0ec78f8fd8729ae490343fc019d451e8ed10858c43266ab80a4baef32438afd1"} Oct 10 16:53:15 crc kubenswrapper[4799]: I1010 16:53:15.274132 4799 scope.go:117] "RemoveContainer" containerID="5e918b23ee535ace1e9b715f8ecd2762d26c9b53bf82dcf3cb687169e425c8a9" Oct 10 16:53:15 crc kubenswrapper[4799]: I1010 16:53:15.324474 4799 scope.go:117] "RemoveContainer" containerID="3fb0c4c6a03bfd9f995e91e82d5d39a721c96ef86a81ec6baf30474fcc747e81" Oct 10 16:53:15 crc kubenswrapper[4799]: I1010 16:53:15.349555 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 10 16:53:15 crc kubenswrapper[4799]: I1010 16:53:15.365794 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Oct 10 16:53:15 crc kubenswrapper[4799]: I1010 16:53:15.386581 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Oct 10 16:53:15 crc kubenswrapper[4799]: E1010 16:53:15.386981 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="820f4f16-f7a9-4be4-ac71-ba919b532696" containerName="sg-core" Oct 10 16:53:15 crc kubenswrapper[4799]: I1010 16:53:15.386995 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="820f4f16-f7a9-4be4-ac71-ba919b532696" containerName="sg-core" Oct 10 16:53:15 crc kubenswrapper[4799]: E1010 16:53:15.387005 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="820f4f16-f7a9-4be4-ac71-ba919b532696" containerName="ceilometer-central-agent" Oct 10 16:53:15 crc kubenswrapper[4799]: I1010 16:53:15.387011 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="820f4f16-f7a9-4be4-ac71-ba919b532696" containerName="ceilometer-central-agent" Oct 10 16:53:15 crc kubenswrapper[4799]: E1010 16:53:15.387033 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="820f4f16-f7a9-4be4-ac71-ba919b532696" containerName="ceilometer-notification-agent" Oct 10 16:53:15 crc kubenswrapper[4799]: I1010 16:53:15.387039 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="820f4f16-f7a9-4be4-ac71-ba919b532696" containerName="ceilometer-notification-agent" Oct 10 16:53:15 crc kubenswrapper[4799]: E1010 16:53:15.387066 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="820f4f16-f7a9-4be4-ac71-ba919b532696" containerName="proxy-httpd" Oct 10 16:53:15 crc kubenswrapper[4799]: I1010 16:53:15.387074 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="820f4f16-f7a9-4be4-ac71-ba919b532696" containerName="proxy-httpd" Oct 10 16:53:15 crc kubenswrapper[4799]: E1010 16:53:15.387086 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9e4317e1-d160-41a4-adbc-8226d554bc6a" containerName="dnsmasq-dns" Oct 10 16:53:15 crc kubenswrapper[4799]: I1010 16:53:15.387092 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="9e4317e1-d160-41a4-adbc-8226d554bc6a" containerName="dnsmasq-dns" Oct 10 16:53:15 crc kubenswrapper[4799]: E1010 16:53:15.387099 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9e4317e1-d160-41a4-adbc-8226d554bc6a" containerName="init" Oct 10 16:53:15 crc kubenswrapper[4799]: I1010 16:53:15.387106 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="9e4317e1-d160-41a4-adbc-8226d554bc6a" containerName="init" Oct 10 16:53:15 crc kubenswrapper[4799]: I1010 16:53:15.387269 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="820f4f16-f7a9-4be4-ac71-ba919b532696" containerName="ceilometer-notification-agent" Oct 10 16:53:15 crc kubenswrapper[4799]: I1010 16:53:15.387282 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="820f4f16-f7a9-4be4-ac71-ba919b532696" containerName="ceilometer-central-agent" Oct 10 16:53:15 crc kubenswrapper[4799]: I1010 16:53:15.387295 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="820f4f16-f7a9-4be4-ac71-ba919b532696" containerName="proxy-httpd" Oct 10 16:53:15 crc kubenswrapper[4799]: I1010 16:53:15.387305 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="9e4317e1-d160-41a4-adbc-8226d554bc6a" containerName="dnsmasq-dns" Oct 10 16:53:15 crc kubenswrapper[4799]: I1010 16:53:15.387316 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="820f4f16-f7a9-4be4-ac71-ba919b532696" containerName="sg-core" Oct 10 16:53:15 crc kubenswrapper[4799]: I1010 16:53:15.388939 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 10 16:53:15 crc kubenswrapper[4799]: I1010 16:53:15.390370 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 10 16:53:15 crc kubenswrapper[4799]: I1010 16:53:15.404790 4799 scope.go:117] "RemoveContainer" containerID="90f47e090f2ff76c1c4400c101ec5b962d84bc0a3537bffbdaf0c8c2bedb9c83" Oct 10 16:53:15 crc kubenswrapper[4799]: I1010 16:53:15.412662 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Oct 10 16:53:15 crc kubenswrapper[4799]: I1010 16:53:15.414002 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Oct 10 16:53:15 crc kubenswrapper[4799]: I1010 16:53:15.414235 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Oct 10 16:53:15 crc kubenswrapper[4799]: I1010 16:53:15.446075 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="820f4f16-f7a9-4be4-ac71-ba919b532696" path="/var/lib/kubelet/pods/820f4f16-f7a9-4be4-ac71-ba919b532696/volumes" Oct 10 16:53:15 crc kubenswrapper[4799]: I1010 16:53:15.447342 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9e4317e1-d160-41a4-adbc-8226d554bc6a" path="/var/lib/kubelet/pods/9e4317e1-d160-41a4-adbc-8226d554bc6a/volumes" Oct 10 16:53:15 crc kubenswrapper[4799]: I1010 16:53:15.473459 4799 scope.go:117] "RemoveContainer" containerID="6c2032db6bdf18f707a822270aa762ba46704186f3a8f0494969a8a602f5ebec" Oct 10 16:53:15 crc kubenswrapper[4799]: I1010 16:53:15.491250 4799 scope.go:117] "RemoveContainer" containerID="5e918b23ee535ace1e9b715f8ecd2762d26c9b53bf82dcf3cb687169e425c8a9" Oct 10 16:53:15 crc kubenswrapper[4799]: E1010 16:53:15.491723 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5e918b23ee535ace1e9b715f8ecd2762d26c9b53bf82dcf3cb687169e425c8a9\": container with ID starting with 5e918b23ee535ace1e9b715f8ecd2762d26c9b53bf82dcf3cb687169e425c8a9 not found: ID does not exist" containerID="5e918b23ee535ace1e9b715f8ecd2762d26c9b53bf82dcf3cb687169e425c8a9" Oct 10 16:53:15 crc kubenswrapper[4799]: I1010 16:53:15.491831 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5e918b23ee535ace1e9b715f8ecd2762d26c9b53bf82dcf3cb687169e425c8a9"} err="failed to get container status \"5e918b23ee535ace1e9b715f8ecd2762d26c9b53bf82dcf3cb687169e425c8a9\": rpc error: code = NotFound desc = could not find container \"5e918b23ee535ace1e9b715f8ecd2762d26c9b53bf82dcf3cb687169e425c8a9\": container with ID starting with 5e918b23ee535ace1e9b715f8ecd2762d26c9b53bf82dcf3cb687169e425c8a9 not found: ID does not exist" Oct 10 16:53:15 crc kubenswrapper[4799]: I1010 16:53:15.491853 4799 scope.go:117] "RemoveContainer" containerID="3fb0c4c6a03bfd9f995e91e82d5d39a721c96ef86a81ec6baf30474fcc747e81" Oct 10 16:53:15 crc kubenswrapper[4799]: E1010 16:53:15.493023 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3fb0c4c6a03bfd9f995e91e82d5d39a721c96ef86a81ec6baf30474fcc747e81\": container with ID starting with 3fb0c4c6a03bfd9f995e91e82d5d39a721c96ef86a81ec6baf30474fcc747e81 not found: ID does not exist" containerID="3fb0c4c6a03bfd9f995e91e82d5d39a721c96ef86a81ec6baf30474fcc747e81" Oct 10 16:53:15 crc kubenswrapper[4799]: I1010 16:53:15.493045 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3fb0c4c6a03bfd9f995e91e82d5d39a721c96ef86a81ec6baf30474fcc747e81"} err="failed to get container status \"3fb0c4c6a03bfd9f995e91e82d5d39a721c96ef86a81ec6baf30474fcc747e81\": rpc error: code = NotFound desc = could not find container \"3fb0c4c6a03bfd9f995e91e82d5d39a721c96ef86a81ec6baf30474fcc747e81\": container with ID starting with 3fb0c4c6a03bfd9f995e91e82d5d39a721c96ef86a81ec6baf30474fcc747e81 not found: ID does not exist" Oct 10 16:53:15 crc kubenswrapper[4799]: I1010 16:53:15.493059 4799 scope.go:117] "RemoveContainer" containerID="90f47e090f2ff76c1c4400c101ec5b962d84bc0a3537bffbdaf0c8c2bedb9c83" Oct 10 16:53:15 crc kubenswrapper[4799]: E1010 16:53:15.493266 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"90f47e090f2ff76c1c4400c101ec5b962d84bc0a3537bffbdaf0c8c2bedb9c83\": container with ID starting with 90f47e090f2ff76c1c4400c101ec5b962d84bc0a3537bffbdaf0c8c2bedb9c83 not found: ID does not exist" containerID="90f47e090f2ff76c1c4400c101ec5b962d84bc0a3537bffbdaf0c8c2bedb9c83" Oct 10 16:53:15 crc kubenswrapper[4799]: I1010 16:53:15.493289 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"90f47e090f2ff76c1c4400c101ec5b962d84bc0a3537bffbdaf0c8c2bedb9c83"} err="failed to get container status \"90f47e090f2ff76c1c4400c101ec5b962d84bc0a3537bffbdaf0c8c2bedb9c83\": rpc error: code = NotFound desc = could not find container \"90f47e090f2ff76c1c4400c101ec5b962d84bc0a3537bffbdaf0c8c2bedb9c83\": container with ID starting with 90f47e090f2ff76c1c4400c101ec5b962d84bc0a3537bffbdaf0c8c2bedb9c83 not found: ID does not exist" Oct 10 16:53:15 crc kubenswrapper[4799]: I1010 16:53:15.493302 4799 scope.go:117] "RemoveContainer" containerID="6c2032db6bdf18f707a822270aa762ba46704186f3a8f0494969a8a602f5ebec" Oct 10 16:53:15 crc kubenswrapper[4799]: E1010 16:53:15.493517 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6c2032db6bdf18f707a822270aa762ba46704186f3a8f0494969a8a602f5ebec\": container with ID starting with 6c2032db6bdf18f707a822270aa762ba46704186f3a8f0494969a8a602f5ebec not found: ID does not exist" containerID="6c2032db6bdf18f707a822270aa762ba46704186f3a8f0494969a8a602f5ebec" Oct 10 16:53:15 crc kubenswrapper[4799]: I1010 16:53:15.493536 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6c2032db6bdf18f707a822270aa762ba46704186f3a8f0494969a8a602f5ebec"} err="failed to get container status \"6c2032db6bdf18f707a822270aa762ba46704186f3a8f0494969a8a602f5ebec\": rpc error: code = NotFound desc = could not find container \"6c2032db6bdf18f707a822270aa762ba46704186f3a8f0494969a8a602f5ebec\": container with ID starting with 6c2032db6bdf18f707a822270aa762ba46704186f3a8f0494969a8a602f5ebec not found: ID does not exist" Oct 10 16:53:15 crc kubenswrapper[4799]: I1010 16:53:15.550735 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/baff4453-a6a2-4264-82b7-3ce7c22734f6-scripts\") pod \"ceilometer-0\" (UID: \"baff4453-a6a2-4264-82b7-3ce7c22734f6\") " pod="openstack/ceilometer-0" Oct 10 16:53:15 crc kubenswrapper[4799]: I1010 16:53:15.550825 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l4zpv\" (UniqueName: \"kubernetes.io/projected/baff4453-a6a2-4264-82b7-3ce7c22734f6-kube-api-access-l4zpv\") pod \"ceilometer-0\" (UID: \"baff4453-a6a2-4264-82b7-3ce7c22734f6\") " pod="openstack/ceilometer-0" Oct 10 16:53:15 crc kubenswrapper[4799]: I1010 16:53:15.550911 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/baff4453-a6a2-4264-82b7-3ce7c22734f6-run-httpd\") pod \"ceilometer-0\" (UID: \"baff4453-a6a2-4264-82b7-3ce7c22734f6\") " pod="openstack/ceilometer-0" Oct 10 16:53:15 crc kubenswrapper[4799]: I1010 16:53:15.550974 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/baff4453-a6a2-4264-82b7-3ce7c22734f6-config-data\") pod \"ceilometer-0\" (UID: \"baff4453-a6a2-4264-82b7-3ce7c22734f6\") " pod="openstack/ceilometer-0" Oct 10 16:53:15 crc kubenswrapper[4799]: I1010 16:53:15.551021 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/baff4453-a6a2-4264-82b7-3ce7c22734f6-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"baff4453-a6a2-4264-82b7-3ce7c22734f6\") " pod="openstack/ceilometer-0" Oct 10 16:53:15 crc kubenswrapper[4799]: I1010 16:53:15.551048 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/baff4453-a6a2-4264-82b7-3ce7c22734f6-log-httpd\") pod \"ceilometer-0\" (UID: \"baff4453-a6a2-4264-82b7-3ce7c22734f6\") " pod="openstack/ceilometer-0" Oct 10 16:53:15 crc kubenswrapper[4799]: I1010 16:53:15.551074 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/baff4453-a6a2-4264-82b7-3ce7c22734f6-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"baff4453-a6a2-4264-82b7-3ce7c22734f6\") " pod="openstack/ceilometer-0" Oct 10 16:53:15 crc kubenswrapper[4799]: I1010 16:53:15.551107 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/baff4453-a6a2-4264-82b7-3ce7c22734f6-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"baff4453-a6a2-4264-82b7-3ce7c22734f6\") " pod="openstack/ceilometer-0" Oct 10 16:53:15 crc kubenswrapper[4799]: I1010 16:53:15.653187 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/baff4453-a6a2-4264-82b7-3ce7c22734f6-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"baff4453-a6a2-4264-82b7-3ce7c22734f6\") " pod="openstack/ceilometer-0" Oct 10 16:53:15 crc kubenswrapper[4799]: I1010 16:53:15.653253 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/baff4453-a6a2-4264-82b7-3ce7c22734f6-log-httpd\") pod \"ceilometer-0\" (UID: \"baff4453-a6a2-4264-82b7-3ce7c22734f6\") " pod="openstack/ceilometer-0" Oct 10 16:53:15 crc kubenswrapper[4799]: I1010 16:53:15.653286 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/baff4453-a6a2-4264-82b7-3ce7c22734f6-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"baff4453-a6a2-4264-82b7-3ce7c22734f6\") " pod="openstack/ceilometer-0" Oct 10 16:53:15 crc kubenswrapper[4799]: I1010 16:53:15.653317 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/baff4453-a6a2-4264-82b7-3ce7c22734f6-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"baff4453-a6a2-4264-82b7-3ce7c22734f6\") " pod="openstack/ceilometer-0" Oct 10 16:53:15 crc kubenswrapper[4799]: I1010 16:53:15.653436 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/baff4453-a6a2-4264-82b7-3ce7c22734f6-scripts\") pod \"ceilometer-0\" (UID: \"baff4453-a6a2-4264-82b7-3ce7c22734f6\") " pod="openstack/ceilometer-0" Oct 10 16:53:15 crc kubenswrapper[4799]: I1010 16:53:15.653477 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l4zpv\" (UniqueName: \"kubernetes.io/projected/baff4453-a6a2-4264-82b7-3ce7c22734f6-kube-api-access-l4zpv\") pod \"ceilometer-0\" (UID: \"baff4453-a6a2-4264-82b7-3ce7c22734f6\") " pod="openstack/ceilometer-0" Oct 10 16:53:15 crc kubenswrapper[4799]: I1010 16:53:15.653532 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/baff4453-a6a2-4264-82b7-3ce7c22734f6-run-httpd\") pod \"ceilometer-0\" (UID: \"baff4453-a6a2-4264-82b7-3ce7c22734f6\") " pod="openstack/ceilometer-0" Oct 10 16:53:15 crc kubenswrapper[4799]: I1010 16:53:15.653585 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/baff4453-a6a2-4264-82b7-3ce7c22734f6-config-data\") pod \"ceilometer-0\" (UID: \"baff4453-a6a2-4264-82b7-3ce7c22734f6\") " pod="openstack/ceilometer-0" Oct 10 16:53:15 crc kubenswrapper[4799]: I1010 16:53:15.655228 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/baff4453-a6a2-4264-82b7-3ce7c22734f6-log-httpd\") pod \"ceilometer-0\" (UID: \"baff4453-a6a2-4264-82b7-3ce7c22734f6\") " pod="openstack/ceilometer-0" Oct 10 16:53:15 crc kubenswrapper[4799]: I1010 16:53:15.655294 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/baff4453-a6a2-4264-82b7-3ce7c22734f6-run-httpd\") pod \"ceilometer-0\" (UID: \"baff4453-a6a2-4264-82b7-3ce7c22734f6\") " pod="openstack/ceilometer-0" Oct 10 16:53:15 crc kubenswrapper[4799]: I1010 16:53:15.659845 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/baff4453-a6a2-4264-82b7-3ce7c22734f6-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"baff4453-a6a2-4264-82b7-3ce7c22734f6\") " pod="openstack/ceilometer-0" Oct 10 16:53:15 crc kubenswrapper[4799]: I1010 16:53:15.660495 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/baff4453-a6a2-4264-82b7-3ce7c22734f6-scripts\") pod \"ceilometer-0\" (UID: \"baff4453-a6a2-4264-82b7-3ce7c22734f6\") " pod="openstack/ceilometer-0" Oct 10 16:53:15 crc kubenswrapper[4799]: I1010 16:53:15.660902 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/baff4453-a6a2-4264-82b7-3ce7c22734f6-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"baff4453-a6a2-4264-82b7-3ce7c22734f6\") " pod="openstack/ceilometer-0" Oct 10 16:53:15 crc kubenswrapper[4799]: I1010 16:53:15.662265 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/baff4453-a6a2-4264-82b7-3ce7c22734f6-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"baff4453-a6a2-4264-82b7-3ce7c22734f6\") " pod="openstack/ceilometer-0" Oct 10 16:53:15 crc kubenswrapper[4799]: I1010 16:53:15.664829 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/baff4453-a6a2-4264-82b7-3ce7c22734f6-config-data\") pod \"ceilometer-0\" (UID: \"baff4453-a6a2-4264-82b7-3ce7c22734f6\") " pod="openstack/ceilometer-0" Oct 10 16:53:15 crc kubenswrapper[4799]: I1010 16:53:15.675045 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l4zpv\" (UniqueName: \"kubernetes.io/projected/baff4453-a6a2-4264-82b7-3ce7c22734f6-kube-api-access-l4zpv\") pod \"ceilometer-0\" (UID: \"baff4453-a6a2-4264-82b7-3ce7c22734f6\") " pod="openstack/ceilometer-0" Oct 10 16:53:15 crc kubenswrapper[4799]: I1010 16:53:15.750435 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 10 16:53:16 crc kubenswrapper[4799]: I1010 16:53:16.253787 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 10 16:53:16 crc kubenswrapper[4799]: I1010 16:53:16.298429 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"baff4453-a6a2-4264-82b7-3ce7c22734f6","Type":"ContainerStarted","Data":"731cc8c826413a7ac0c70dcfff809c93104a8ad1d625a251ce7c0bce4ae4651e"} Oct 10 16:53:17 crc kubenswrapper[4799]: I1010 16:53:17.312205 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"baff4453-a6a2-4264-82b7-3ce7c22734f6","Type":"ContainerStarted","Data":"50bad220d7bfe5f874d25169f5fbdb3b65af8607c4752caa21f314f09fc45b07"} Oct 10 16:53:18 crc kubenswrapper[4799]: I1010 16:53:18.323343 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"baff4453-a6a2-4264-82b7-3ce7c22734f6","Type":"ContainerStarted","Data":"c9ae3f60b475964718a4b4571c9e1633350b005dde6fdf9cbe4423f5ab16bd34"} Oct 10 16:53:18 crc kubenswrapper[4799]: I1010 16:53:18.323714 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"baff4453-a6a2-4264-82b7-3ce7c22734f6","Type":"ContainerStarted","Data":"a4337b4876c0c4b3cbed0413f818cccf0ccead577c676587b002d48a2705e440"} Oct 10 16:53:18 crc kubenswrapper[4799]: I1010 16:53:18.325367 4799 generic.go:334] "Generic (PLEG): container finished" podID="2396929d-6af8-4f8e-9fef-b44c7ce23afa" containerID="e7fc78b3c8230f7e21437f35ba5f2fca7d9cf97cdf46510b2801bf13113895c9" exitCode=0 Oct 10 16:53:18 crc kubenswrapper[4799]: I1010 16:53:18.325415 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-spv5x" event={"ID":"2396929d-6af8-4f8e-9fef-b44c7ce23afa","Type":"ContainerDied","Data":"e7fc78b3c8230f7e21437f35ba5f2fca7d9cf97cdf46510b2801bf13113895c9"} Oct 10 16:53:19 crc kubenswrapper[4799]: I1010 16:53:19.681052 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-spv5x" Oct 10 16:53:19 crc kubenswrapper[4799]: I1010 16:53:19.835900 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wx6cp\" (UniqueName: \"kubernetes.io/projected/2396929d-6af8-4f8e-9fef-b44c7ce23afa-kube-api-access-wx6cp\") pod \"2396929d-6af8-4f8e-9fef-b44c7ce23afa\" (UID: \"2396929d-6af8-4f8e-9fef-b44c7ce23afa\") " Oct 10 16:53:19 crc kubenswrapper[4799]: I1010 16:53:19.836038 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2396929d-6af8-4f8e-9fef-b44c7ce23afa-config-data\") pod \"2396929d-6af8-4f8e-9fef-b44c7ce23afa\" (UID: \"2396929d-6af8-4f8e-9fef-b44c7ce23afa\") " Oct 10 16:53:19 crc kubenswrapper[4799]: I1010 16:53:19.836167 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2396929d-6af8-4f8e-9fef-b44c7ce23afa-combined-ca-bundle\") pod \"2396929d-6af8-4f8e-9fef-b44c7ce23afa\" (UID: \"2396929d-6af8-4f8e-9fef-b44c7ce23afa\") " Oct 10 16:53:19 crc kubenswrapper[4799]: I1010 16:53:19.836287 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2396929d-6af8-4f8e-9fef-b44c7ce23afa-scripts\") pod \"2396929d-6af8-4f8e-9fef-b44c7ce23afa\" (UID: \"2396929d-6af8-4f8e-9fef-b44c7ce23afa\") " Oct 10 16:53:19 crc kubenswrapper[4799]: I1010 16:53:19.840362 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2396929d-6af8-4f8e-9fef-b44c7ce23afa-scripts" (OuterVolumeSpecName: "scripts") pod "2396929d-6af8-4f8e-9fef-b44c7ce23afa" (UID: "2396929d-6af8-4f8e-9fef-b44c7ce23afa"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:53:19 crc kubenswrapper[4799]: I1010 16:53:19.840886 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2396929d-6af8-4f8e-9fef-b44c7ce23afa-kube-api-access-wx6cp" (OuterVolumeSpecName: "kube-api-access-wx6cp") pod "2396929d-6af8-4f8e-9fef-b44c7ce23afa" (UID: "2396929d-6af8-4f8e-9fef-b44c7ce23afa"). InnerVolumeSpecName "kube-api-access-wx6cp". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 16:53:19 crc kubenswrapper[4799]: I1010 16:53:19.866342 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2396929d-6af8-4f8e-9fef-b44c7ce23afa-config-data" (OuterVolumeSpecName: "config-data") pod "2396929d-6af8-4f8e-9fef-b44c7ce23afa" (UID: "2396929d-6af8-4f8e-9fef-b44c7ce23afa"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:53:19 crc kubenswrapper[4799]: I1010 16:53:19.888017 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2396929d-6af8-4f8e-9fef-b44c7ce23afa-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "2396929d-6af8-4f8e-9fef-b44c7ce23afa" (UID: "2396929d-6af8-4f8e-9fef-b44c7ce23afa"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:53:19 crc kubenswrapper[4799]: I1010 16:53:19.938557 4799 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2396929d-6af8-4f8e-9fef-b44c7ce23afa-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 10 16:53:19 crc kubenswrapper[4799]: I1010 16:53:19.938593 4799 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2396929d-6af8-4f8e-9fef-b44c7ce23afa-scripts\") on node \"crc\" DevicePath \"\"" Oct 10 16:53:19 crc kubenswrapper[4799]: I1010 16:53:19.938602 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wx6cp\" (UniqueName: \"kubernetes.io/projected/2396929d-6af8-4f8e-9fef-b44c7ce23afa-kube-api-access-wx6cp\") on node \"crc\" DevicePath \"\"" Oct 10 16:53:19 crc kubenswrapper[4799]: I1010 16:53:19.938613 4799 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2396929d-6af8-4f8e-9fef-b44c7ce23afa-config-data\") on node \"crc\" DevicePath \"\"" Oct 10 16:53:20 crc kubenswrapper[4799]: I1010 16:53:20.380224 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"baff4453-a6a2-4264-82b7-3ce7c22734f6","Type":"ContainerStarted","Data":"f6be7ae91d5c505d0b533031b37bb272b1ce7b62110e052a7fafbcdc9d348b24"} Oct 10 16:53:20 crc kubenswrapper[4799]: I1010 16:53:20.382449 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Oct 10 16:53:20 crc kubenswrapper[4799]: I1010 16:53:20.389294 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-spv5x" event={"ID":"2396929d-6af8-4f8e-9fef-b44c7ce23afa","Type":"ContainerDied","Data":"66ea21abc8bab24004af3909ce5c877603a38b352567891c562ab89e4902f7cc"} Oct 10 16:53:20 crc kubenswrapper[4799]: I1010 16:53:20.389335 4799 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="66ea21abc8bab24004af3909ce5c877603a38b352567891c562ab89e4902f7cc" Oct 10 16:53:20 crc kubenswrapper[4799]: I1010 16:53:20.389392 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-spv5x" Oct 10 16:53:20 crc kubenswrapper[4799]: I1010 16:53:20.419802 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.499917995 podStartE2EDuration="5.419777751s" podCreationTimestamp="2025-10-10 16:53:15 +0000 UTC" firstStartedPulling="2025-10-10 16:53:16.265706655 +0000 UTC m=+1289.774030800" lastFinishedPulling="2025-10-10 16:53:19.185566401 +0000 UTC m=+1292.693890556" observedRunningTime="2025-10-10 16:53:20.403609723 +0000 UTC m=+1293.911933858" watchObservedRunningTime="2025-10-10 16:53:20.419777751 +0000 UTC m=+1293.928101886" Oct 10 16:53:20 crc kubenswrapper[4799]: I1010 16:53:20.540763 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Oct 10 16:53:20 crc kubenswrapper[4799]: I1010 16:53:20.541056 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="73962347-1cda-4c09-ade4-97eb7bdee215" containerName="nova-api-log" containerID="cri-o://8ae6269d9b128ff68b7ca387fe515c568a768963ad794c0d323b2f6a98499e16" gracePeriod=30 Oct 10 16:53:20 crc kubenswrapper[4799]: I1010 16:53:20.541155 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="73962347-1cda-4c09-ade4-97eb7bdee215" containerName="nova-api-api" containerID="cri-o://433e3df0291e42d1b28025e1db25473bfe24e681606ac3cbb380fa76839ca867" gracePeriod=30 Oct 10 16:53:20 crc kubenswrapper[4799]: I1010 16:53:20.547620 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Oct 10 16:53:20 crc kubenswrapper[4799]: I1010 16:53:20.547909 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="b86c2c2a-e776-4223-acd0-3a4832b67cb0" containerName="nova-scheduler-scheduler" containerID="cri-o://683c1d1b9da069d04a5ff9242785ac2d1ff7be01587f4c72dd9da5b4555f4926" gracePeriod=30 Oct 10 16:53:20 crc kubenswrapper[4799]: I1010 16:53:20.634708 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Oct 10 16:53:20 crc kubenswrapper[4799]: I1010 16:53:20.634957 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="dc550ba1-d76f-494e-b725-337877360fa7" containerName="nova-metadata-log" containerID="cri-o://2ed4d65b4875269def7a21a90dee04ee82b85598fc7409dabceea69f593953d6" gracePeriod=30 Oct 10 16:53:20 crc kubenswrapper[4799]: I1010 16:53:20.635032 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="dc550ba1-d76f-494e-b725-337877360fa7" containerName="nova-metadata-metadata" containerID="cri-o://709ca06e7bc32ac924ab6a7082972570df73a5b5a092a31b57885d573ca50d30" gracePeriod=30 Oct 10 16:53:21 crc kubenswrapper[4799]: I1010 16:53:21.081803 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 10 16:53:21 crc kubenswrapper[4799]: I1010 16:53:21.262816 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/73962347-1cda-4c09-ade4-97eb7bdee215-public-tls-certs\") pod \"73962347-1cda-4c09-ade4-97eb7bdee215\" (UID: \"73962347-1cda-4c09-ade4-97eb7bdee215\") " Oct 10 16:53:21 crc kubenswrapper[4799]: I1010 16:53:21.262850 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/73962347-1cda-4c09-ade4-97eb7bdee215-config-data\") pod \"73962347-1cda-4c09-ade4-97eb7bdee215\" (UID: \"73962347-1cda-4c09-ade4-97eb7bdee215\") " Oct 10 16:53:21 crc kubenswrapper[4799]: I1010 16:53:21.262925 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/73962347-1cda-4c09-ade4-97eb7bdee215-internal-tls-certs\") pod \"73962347-1cda-4c09-ade4-97eb7bdee215\" (UID: \"73962347-1cda-4c09-ade4-97eb7bdee215\") " Oct 10 16:53:21 crc kubenswrapper[4799]: I1010 16:53:21.263122 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-l59zx\" (UniqueName: \"kubernetes.io/projected/73962347-1cda-4c09-ade4-97eb7bdee215-kube-api-access-l59zx\") pod \"73962347-1cda-4c09-ade4-97eb7bdee215\" (UID: \"73962347-1cda-4c09-ade4-97eb7bdee215\") " Oct 10 16:53:21 crc kubenswrapper[4799]: I1010 16:53:21.263154 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/73962347-1cda-4c09-ade4-97eb7bdee215-logs\") pod \"73962347-1cda-4c09-ade4-97eb7bdee215\" (UID: \"73962347-1cda-4c09-ade4-97eb7bdee215\") " Oct 10 16:53:21 crc kubenswrapper[4799]: I1010 16:53:21.263173 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/73962347-1cda-4c09-ade4-97eb7bdee215-combined-ca-bundle\") pod \"73962347-1cda-4c09-ade4-97eb7bdee215\" (UID: \"73962347-1cda-4c09-ade4-97eb7bdee215\") " Oct 10 16:53:21 crc kubenswrapper[4799]: I1010 16:53:21.264338 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/73962347-1cda-4c09-ade4-97eb7bdee215-logs" (OuterVolumeSpecName: "logs") pod "73962347-1cda-4c09-ade4-97eb7bdee215" (UID: "73962347-1cda-4c09-ade4-97eb7bdee215"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 10 16:53:21 crc kubenswrapper[4799]: I1010 16:53:21.269255 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/73962347-1cda-4c09-ade4-97eb7bdee215-kube-api-access-l59zx" (OuterVolumeSpecName: "kube-api-access-l59zx") pod "73962347-1cda-4c09-ade4-97eb7bdee215" (UID: "73962347-1cda-4c09-ade4-97eb7bdee215"). InnerVolumeSpecName "kube-api-access-l59zx". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 16:53:21 crc kubenswrapper[4799]: I1010 16:53:21.293837 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/73962347-1cda-4c09-ade4-97eb7bdee215-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "73962347-1cda-4c09-ade4-97eb7bdee215" (UID: "73962347-1cda-4c09-ade4-97eb7bdee215"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:53:21 crc kubenswrapper[4799]: I1010 16:53:21.301532 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/73962347-1cda-4c09-ade4-97eb7bdee215-config-data" (OuterVolumeSpecName: "config-data") pod "73962347-1cda-4c09-ade4-97eb7bdee215" (UID: "73962347-1cda-4c09-ade4-97eb7bdee215"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:53:21 crc kubenswrapper[4799]: I1010 16:53:21.317242 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/73962347-1cda-4c09-ade4-97eb7bdee215-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "73962347-1cda-4c09-ade4-97eb7bdee215" (UID: "73962347-1cda-4c09-ade4-97eb7bdee215"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:53:21 crc kubenswrapper[4799]: I1010 16:53:21.318290 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/73962347-1cda-4c09-ade4-97eb7bdee215-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "73962347-1cda-4c09-ade4-97eb7bdee215" (UID: "73962347-1cda-4c09-ade4-97eb7bdee215"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:53:21 crc kubenswrapper[4799]: I1010 16:53:21.368342 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-l59zx\" (UniqueName: \"kubernetes.io/projected/73962347-1cda-4c09-ade4-97eb7bdee215-kube-api-access-l59zx\") on node \"crc\" DevicePath \"\"" Oct 10 16:53:21 crc kubenswrapper[4799]: I1010 16:53:21.368555 4799 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/73962347-1cda-4c09-ade4-97eb7bdee215-logs\") on node \"crc\" DevicePath \"\"" Oct 10 16:53:21 crc kubenswrapper[4799]: I1010 16:53:21.368652 4799 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/73962347-1cda-4c09-ade4-97eb7bdee215-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 10 16:53:21 crc kubenswrapper[4799]: I1010 16:53:21.371428 4799 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/73962347-1cda-4c09-ade4-97eb7bdee215-public-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 10 16:53:21 crc kubenswrapper[4799]: I1010 16:53:21.371533 4799 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/73962347-1cda-4c09-ade4-97eb7bdee215-config-data\") on node \"crc\" DevicePath \"\"" Oct 10 16:53:21 crc kubenswrapper[4799]: I1010 16:53:21.371608 4799 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/73962347-1cda-4c09-ade4-97eb7bdee215-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 10 16:53:21 crc kubenswrapper[4799]: I1010 16:53:21.403244 4799 generic.go:334] "Generic (PLEG): container finished" podID="73962347-1cda-4c09-ade4-97eb7bdee215" containerID="433e3df0291e42d1b28025e1db25473bfe24e681606ac3cbb380fa76839ca867" exitCode=0 Oct 10 16:53:21 crc kubenswrapper[4799]: I1010 16:53:21.403272 4799 generic.go:334] "Generic (PLEG): container finished" podID="73962347-1cda-4c09-ade4-97eb7bdee215" containerID="8ae6269d9b128ff68b7ca387fe515c568a768963ad794c0d323b2f6a98499e16" exitCode=143 Oct 10 16:53:21 crc kubenswrapper[4799]: I1010 16:53:21.403342 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 10 16:53:21 crc kubenswrapper[4799]: I1010 16:53:21.407216 4799 generic.go:334] "Generic (PLEG): container finished" podID="dc550ba1-d76f-494e-b725-337877360fa7" containerID="2ed4d65b4875269def7a21a90dee04ee82b85598fc7409dabceea69f593953d6" exitCode=143 Oct 10 16:53:21 crc kubenswrapper[4799]: I1010 16:53:21.417539 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"73962347-1cda-4c09-ade4-97eb7bdee215","Type":"ContainerDied","Data":"433e3df0291e42d1b28025e1db25473bfe24e681606ac3cbb380fa76839ca867"} Oct 10 16:53:21 crc kubenswrapper[4799]: I1010 16:53:21.417572 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"73962347-1cda-4c09-ade4-97eb7bdee215","Type":"ContainerDied","Data":"8ae6269d9b128ff68b7ca387fe515c568a768963ad794c0d323b2f6a98499e16"} Oct 10 16:53:21 crc kubenswrapper[4799]: I1010 16:53:21.417617 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"73962347-1cda-4c09-ade4-97eb7bdee215","Type":"ContainerDied","Data":"16979e064e9410d5e9be635d15c2ca1b561886100133a0c26b8970ddefc3a415"} Oct 10 16:53:21 crc kubenswrapper[4799]: I1010 16:53:21.417630 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"dc550ba1-d76f-494e-b725-337877360fa7","Type":"ContainerDied","Data":"2ed4d65b4875269def7a21a90dee04ee82b85598fc7409dabceea69f593953d6"} Oct 10 16:53:21 crc kubenswrapper[4799]: I1010 16:53:21.417650 4799 scope.go:117] "RemoveContainer" containerID="433e3df0291e42d1b28025e1db25473bfe24e681606ac3cbb380fa76839ca867" Oct 10 16:53:21 crc kubenswrapper[4799]: I1010 16:53:21.448478 4799 scope.go:117] "RemoveContainer" containerID="8ae6269d9b128ff68b7ca387fe515c568a768963ad794c0d323b2f6a98499e16" Oct 10 16:53:21 crc kubenswrapper[4799]: I1010 16:53:21.452250 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Oct 10 16:53:21 crc kubenswrapper[4799]: I1010 16:53:21.464824 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Oct 10 16:53:21 crc kubenswrapper[4799]: I1010 16:53:21.473815 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Oct 10 16:53:21 crc kubenswrapper[4799]: E1010 16:53:21.474196 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="73962347-1cda-4c09-ade4-97eb7bdee215" containerName="nova-api-log" Oct 10 16:53:21 crc kubenswrapper[4799]: I1010 16:53:21.474207 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="73962347-1cda-4c09-ade4-97eb7bdee215" containerName="nova-api-log" Oct 10 16:53:21 crc kubenswrapper[4799]: E1010 16:53:21.474220 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2396929d-6af8-4f8e-9fef-b44c7ce23afa" containerName="nova-manage" Oct 10 16:53:21 crc kubenswrapper[4799]: I1010 16:53:21.474226 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="2396929d-6af8-4f8e-9fef-b44c7ce23afa" containerName="nova-manage" Oct 10 16:53:21 crc kubenswrapper[4799]: E1010 16:53:21.474246 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="73962347-1cda-4c09-ade4-97eb7bdee215" containerName="nova-api-api" Oct 10 16:53:21 crc kubenswrapper[4799]: I1010 16:53:21.474251 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="73962347-1cda-4c09-ade4-97eb7bdee215" containerName="nova-api-api" Oct 10 16:53:21 crc kubenswrapper[4799]: I1010 16:53:21.474446 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="73962347-1cda-4c09-ade4-97eb7bdee215" containerName="nova-api-log" Oct 10 16:53:21 crc kubenswrapper[4799]: I1010 16:53:21.474462 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="73962347-1cda-4c09-ade4-97eb7bdee215" containerName="nova-api-api" Oct 10 16:53:21 crc kubenswrapper[4799]: I1010 16:53:21.474476 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="2396929d-6af8-4f8e-9fef-b44c7ce23afa" containerName="nova-manage" Oct 10 16:53:21 crc kubenswrapper[4799]: I1010 16:53:21.475433 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 10 16:53:21 crc kubenswrapper[4799]: I1010 16:53:21.478911 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-internal-svc" Oct 10 16:53:21 crc kubenswrapper[4799]: I1010 16:53:21.479065 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-public-svc" Oct 10 16:53:21 crc kubenswrapper[4799]: I1010 16:53:21.479252 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Oct 10 16:53:21 crc kubenswrapper[4799]: I1010 16:53:21.481953 4799 scope.go:117] "RemoveContainer" containerID="433e3df0291e42d1b28025e1db25473bfe24e681606ac3cbb380fa76839ca867" Oct 10 16:53:21 crc kubenswrapper[4799]: E1010 16:53:21.484188 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"433e3df0291e42d1b28025e1db25473bfe24e681606ac3cbb380fa76839ca867\": container with ID starting with 433e3df0291e42d1b28025e1db25473bfe24e681606ac3cbb380fa76839ca867 not found: ID does not exist" containerID="433e3df0291e42d1b28025e1db25473bfe24e681606ac3cbb380fa76839ca867" Oct 10 16:53:21 crc kubenswrapper[4799]: I1010 16:53:21.484220 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"433e3df0291e42d1b28025e1db25473bfe24e681606ac3cbb380fa76839ca867"} err="failed to get container status \"433e3df0291e42d1b28025e1db25473bfe24e681606ac3cbb380fa76839ca867\": rpc error: code = NotFound desc = could not find container \"433e3df0291e42d1b28025e1db25473bfe24e681606ac3cbb380fa76839ca867\": container with ID starting with 433e3df0291e42d1b28025e1db25473bfe24e681606ac3cbb380fa76839ca867 not found: ID does not exist" Oct 10 16:53:21 crc kubenswrapper[4799]: I1010 16:53:21.484244 4799 scope.go:117] "RemoveContainer" containerID="8ae6269d9b128ff68b7ca387fe515c568a768963ad794c0d323b2f6a98499e16" Oct 10 16:53:21 crc kubenswrapper[4799]: E1010 16:53:21.484439 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8ae6269d9b128ff68b7ca387fe515c568a768963ad794c0d323b2f6a98499e16\": container with ID starting with 8ae6269d9b128ff68b7ca387fe515c568a768963ad794c0d323b2f6a98499e16 not found: ID does not exist" containerID="8ae6269d9b128ff68b7ca387fe515c568a768963ad794c0d323b2f6a98499e16" Oct 10 16:53:21 crc kubenswrapper[4799]: I1010 16:53:21.484457 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8ae6269d9b128ff68b7ca387fe515c568a768963ad794c0d323b2f6a98499e16"} err="failed to get container status \"8ae6269d9b128ff68b7ca387fe515c568a768963ad794c0d323b2f6a98499e16\": rpc error: code = NotFound desc = could not find container \"8ae6269d9b128ff68b7ca387fe515c568a768963ad794c0d323b2f6a98499e16\": container with ID starting with 8ae6269d9b128ff68b7ca387fe515c568a768963ad794c0d323b2f6a98499e16 not found: ID does not exist" Oct 10 16:53:21 crc kubenswrapper[4799]: I1010 16:53:21.484468 4799 scope.go:117] "RemoveContainer" containerID="433e3df0291e42d1b28025e1db25473bfe24e681606ac3cbb380fa76839ca867" Oct 10 16:53:21 crc kubenswrapper[4799]: I1010 16:53:21.485594 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"433e3df0291e42d1b28025e1db25473bfe24e681606ac3cbb380fa76839ca867"} err="failed to get container status \"433e3df0291e42d1b28025e1db25473bfe24e681606ac3cbb380fa76839ca867\": rpc error: code = NotFound desc = could not find container \"433e3df0291e42d1b28025e1db25473bfe24e681606ac3cbb380fa76839ca867\": container with ID starting with 433e3df0291e42d1b28025e1db25473bfe24e681606ac3cbb380fa76839ca867 not found: ID does not exist" Oct 10 16:53:21 crc kubenswrapper[4799]: I1010 16:53:21.485890 4799 scope.go:117] "RemoveContainer" containerID="8ae6269d9b128ff68b7ca387fe515c568a768963ad794c0d323b2f6a98499e16" Oct 10 16:53:21 crc kubenswrapper[4799]: I1010 16:53:21.487731 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Oct 10 16:53:21 crc kubenswrapper[4799]: I1010 16:53:21.497244 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8ae6269d9b128ff68b7ca387fe515c568a768963ad794c0d323b2f6a98499e16"} err="failed to get container status \"8ae6269d9b128ff68b7ca387fe515c568a768963ad794c0d323b2f6a98499e16\": rpc error: code = NotFound desc = could not find container \"8ae6269d9b128ff68b7ca387fe515c568a768963ad794c0d323b2f6a98499e16\": container with ID starting with 8ae6269d9b128ff68b7ca387fe515c568a768963ad794c0d323b2f6a98499e16 not found: ID does not exist" Oct 10 16:53:21 crc kubenswrapper[4799]: I1010 16:53:21.574960 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2db42625-4f7b-479c-a580-c94d6cafb3fe-config-data\") pod \"nova-api-0\" (UID: \"2db42625-4f7b-479c-a580-c94d6cafb3fe\") " pod="openstack/nova-api-0" Oct 10 16:53:21 crc kubenswrapper[4799]: I1010 16:53:21.575047 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2db42625-4f7b-479c-a580-c94d6cafb3fe-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"2db42625-4f7b-479c-a580-c94d6cafb3fe\") " pod="openstack/nova-api-0" Oct 10 16:53:21 crc kubenswrapper[4799]: I1010 16:53:21.575090 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2p2w9\" (UniqueName: \"kubernetes.io/projected/2db42625-4f7b-479c-a580-c94d6cafb3fe-kube-api-access-2p2w9\") pod \"nova-api-0\" (UID: \"2db42625-4f7b-479c-a580-c94d6cafb3fe\") " pod="openstack/nova-api-0" Oct 10 16:53:21 crc kubenswrapper[4799]: I1010 16:53:21.575141 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2db42625-4f7b-479c-a580-c94d6cafb3fe-logs\") pod \"nova-api-0\" (UID: \"2db42625-4f7b-479c-a580-c94d6cafb3fe\") " pod="openstack/nova-api-0" Oct 10 16:53:21 crc kubenswrapper[4799]: I1010 16:53:21.575182 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/2db42625-4f7b-479c-a580-c94d6cafb3fe-public-tls-certs\") pod \"nova-api-0\" (UID: \"2db42625-4f7b-479c-a580-c94d6cafb3fe\") " pod="openstack/nova-api-0" Oct 10 16:53:21 crc kubenswrapper[4799]: I1010 16:53:21.575199 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/2db42625-4f7b-479c-a580-c94d6cafb3fe-internal-tls-certs\") pod \"nova-api-0\" (UID: \"2db42625-4f7b-479c-a580-c94d6cafb3fe\") " pod="openstack/nova-api-0" Oct 10 16:53:21 crc kubenswrapper[4799]: I1010 16:53:21.677663 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2db42625-4f7b-479c-a580-c94d6cafb3fe-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"2db42625-4f7b-479c-a580-c94d6cafb3fe\") " pod="openstack/nova-api-0" Oct 10 16:53:21 crc kubenswrapper[4799]: I1010 16:53:21.677816 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2p2w9\" (UniqueName: \"kubernetes.io/projected/2db42625-4f7b-479c-a580-c94d6cafb3fe-kube-api-access-2p2w9\") pod \"nova-api-0\" (UID: \"2db42625-4f7b-479c-a580-c94d6cafb3fe\") " pod="openstack/nova-api-0" Oct 10 16:53:21 crc kubenswrapper[4799]: I1010 16:53:21.677927 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2db42625-4f7b-479c-a580-c94d6cafb3fe-logs\") pod \"nova-api-0\" (UID: \"2db42625-4f7b-479c-a580-c94d6cafb3fe\") " pod="openstack/nova-api-0" Oct 10 16:53:21 crc kubenswrapper[4799]: I1010 16:53:21.678017 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/2db42625-4f7b-479c-a580-c94d6cafb3fe-public-tls-certs\") pod \"nova-api-0\" (UID: \"2db42625-4f7b-479c-a580-c94d6cafb3fe\") " pod="openstack/nova-api-0" Oct 10 16:53:21 crc kubenswrapper[4799]: I1010 16:53:21.678058 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/2db42625-4f7b-479c-a580-c94d6cafb3fe-internal-tls-certs\") pod \"nova-api-0\" (UID: \"2db42625-4f7b-479c-a580-c94d6cafb3fe\") " pod="openstack/nova-api-0" Oct 10 16:53:21 crc kubenswrapper[4799]: I1010 16:53:21.678119 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2db42625-4f7b-479c-a580-c94d6cafb3fe-config-data\") pod \"nova-api-0\" (UID: \"2db42625-4f7b-479c-a580-c94d6cafb3fe\") " pod="openstack/nova-api-0" Oct 10 16:53:21 crc kubenswrapper[4799]: I1010 16:53:21.679452 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2db42625-4f7b-479c-a580-c94d6cafb3fe-logs\") pod \"nova-api-0\" (UID: \"2db42625-4f7b-479c-a580-c94d6cafb3fe\") " pod="openstack/nova-api-0" Oct 10 16:53:21 crc kubenswrapper[4799]: I1010 16:53:21.686334 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/2db42625-4f7b-479c-a580-c94d6cafb3fe-public-tls-certs\") pod \"nova-api-0\" (UID: \"2db42625-4f7b-479c-a580-c94d6cafb3fe\") " pod="openstack/nova-api-0" Oct 10 16:53:21 crc kubenswrapper[4799]: I1010 16:53:21.686488 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2db42625-4f7b-479c-a580-c94d6cafb3fe-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"2db42625-4f7b-479c-a580-c94d6cafb3fe\") " pod="openstack/nova-api-0" Oct 10 16:53:21 crc kubenswrapper[4799]: I1010 16:53:21.686940 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/2db42625-4f7b-479c-a580-c94d6cafb3fe-internal-tls-certs\") pod \"nova-api-0\" (UID: \"2db42625-4f7b-479c-a580-c94d6cafb3fe\") " pod="openstack/nova-api-0" Oct 10 16:53:21 crc kubenswrapper[4799]: I1010 16:53:21.692084 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2db42625-4f7b-479c-a580-c94d6cafb3fe-config-data\") pod \"nova-api-0\" (UID: \"2db42625-4f7b-479c-a580-c94d6cafb3fe\") " pod="openstack/nova-api-0" Oct 10 16:53:21 crc kubenswrapper[4799]: I1010 16:53:21.696406 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2p2w9\" (UniqueName: \"kubernetes.io/projected/2db42625-4f7b-479c-a580-c94d6cafb3fe-kube-api-access-2p2w9\") pod \"nova-api-0\" (UID: \"2db42625-4f7b-479c-a580-c94d6cafb3fe\") " pod="openstack/nova-api-0" Oct 10 16:53:21 crc kubenswrapper[4799]: I1010 16:53:21.798942 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 10 16:53:22 crc kubenswrapper[4799]: I1010 16:53:22.232337 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Oct 10 16:53:22 crc kubenswrapper[4799]: W1010 16:53:22.241468 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2db42625_4f7b_479c_a580_c94d6cafb3fe.slice/crio-71fd6fd407296cc5885a7ecf01a417b8a732cb629421ca9799042013ff6cf68d WatchSource:0}: Error finding container 71fd6fd407296cc5885a7ecf01a417b8a732cb629421ca9799042013ff6cf68d: Status 404 returned error can't find the container with id 71fd6fd407296cc5885a7ecf01a417b8a732cb629421ca9799042013ff6cf68d Oct 10 16:53:22 crc kubenswrapper[4799]: I1010 16:53:22.417998 4799 generic.go:334] "Generic (PLEG): container finished" podID="b86c2c2a-e776-4223-acd0-3a4832b67cb0" containerID="683c1d1b9da069d04a5ff9242785ac2d1ff7be01587f4c72dd9da5b4555f4926" exitCode=0 Oct 10 16:53:22 crc kubenswrapper[4799]: I1010 16:53:22.418100 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"b86c2c2a-e776-4223-acd0-3a4832b67cb0","Type":"ContainerDied","Data":"683c1d1b9da069d04a5ff9242785ac2d1ff7be01587f4c72dd9da5b4555f4926"} Oct 10 16:53:22 crc kubenswrapper[4799]: I1010 16:53:22.420986 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"2db42625-4f7b-479c-a580-c94d6cafb3fe","Type":"ContainerStarted","Data":"71fd6fd407296cc5885a7ecf01a417b8a732cb629421ca9799042013ff6cf68d"} Oct 10 16:53:22 crc kubenswrapper[4799]: I1010 16:53:22.662361 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 10 16:53:22 crc kubenswrapper[4799]: I1010 16:53:22.707886 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b86c2c2a-e776-4223-acd0-3a4832b67cb0-config-data\") pod \"b86c2c2a-e776-4223-acd0-3a4832b67cb0\" (UID: \"b86c2c2a-e776-4223-acd0-3a4832b67cb0\") " Oct 10 16:53:22 crc kubenswrapper[4799]: I1010 16:53:22.708193 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b86c2c2a-e776-4223-acd0-3a4832b67cb0-combined-ca-bundle\") pod \"b86c2c2a-e776-4223-acd0-3a4832b67cb0\" (UID: \"b86c2c2a-e776-4223-acd0-3a4832b67cb0\") " Oct 10 16:53:22 crc kubenswrapper[4799]: I1010 16:53:22.708583 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dqcj5\" (UniqueName: \"kubernetes.io/projected/b86c2c2a-e776-4223-acd0-3a4832b67cb0-kube-api-access-dqcj5\") pod \"b86c2c2a-e776-4223-acd0-3a4832b67cb0\" (UID: \"b86c2c2a-e776-4223-acd0-3a4832b67cb0\") " Oct 10 16:53:22 crc kubenswrapper[4799]: I1010 16:53:22.712624 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b86c2c2a-e776-4223-acd0-3a4832b67cb0-kube-api-access-dqcj5" (OuterVolumeSpecName: "kube-api-access-dqcj5") pod "b86c2c2a-e776-4223-acd0-3a4832b67cb0" (UID: "b86c2c2a-e776-4223-acd0-3a4832b67cb0"). InnerVolumeSpecName "kube-api-access-dqcj5". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 16:53:22 crc kubenswrapper[4799]: I1010 16:53:22.750402 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b86c2c2a-e776-4223-acd0-3a4832b67cb0-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b86c2c2a-e776-4223-acd0-3a4832b67cb0" (UID: "b86c2c2a-e776-4223-acd0-3a4832b67cb0"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:53:22 crc kubenswrapper[4799]: I1010 16:53:22.754878 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b86c2c2a-e776-4223-acd0-3a4832b67cb0-config-data" (OuterVolumeSpecName: "config-data") pod "b86c2c2a-e776-4223-acd0-3a4832b67cb0" (UID: "b86c2c2a-e776-4223-acd0-3a4832b67cb0"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:53:22 crc kubenswrapper[4799]: I1010 16:53:22.814674 4799 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b86c2c2a-e776-4223-acd0-3a4832b67cb0-config-data\") on node \"crc\" DevicePath \"\"" Oct 10 16:53:22 crc kubenswrapper[4799]: I1010 16:53:22.814708 4799 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b86c2c2a-e776-4223-acd0-3a4832b67cb0-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 10 16:53:22 crc kubenswrapper[4799]: I1010 16:53:22.814722 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dqcj5\" (UniqueName: \"kubernetes.io/projected/b86c2c2a-e776-4223-acd0-3a4832b67cb0-kube-api-access-dqcj5\") on node \"crc\" DevicePath \"\"" Oct 10 16:53:23 crc kubenswrapper[4799]: I1010 16:53:23.440546 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 10 16:53:23 crc kubenswrapper[4799]: I1010 16:53:23.446396 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="73962347-1cda-4c09-ade4-97eb7bdee215" path="/var/lib/kubelet/pods/73962347-1cda-4c09-ade4-97eb7bdee215/volumes" Oct 10 16:53:23 crc kubenswrapper[4799]: I1010 16:53:23.447945 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"b86c2c2a-e776-4223-acd0-3a4832b67cb0","Type":"ContainerDied","Data":"0d258efde3054d8a9370f4fa8a9f0fb32c81919613506cd1232443da82e4aeb5"} Oct 10 16:53:23 crc kubenswrapper[4799]: I1010 16:53:23.447999 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"2db42625-4f7b-479c-a580-c94d6cafb3fe","Type":"ContainerStarted","Data":"95e8c5c7eeb44313269abe5e0811c66db445161e27df4b78e13b1117ddf8ecc1"} Oct 10 16:53:23 crc kubenswrapper[4799]: I1010 16:53:23.448026 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"2db42625-4f7b-479c-a580-c94d6cafb3fe","Type":"ContainerStarted","Data":"24962cee1d51a7d7eb5a1d25cab56e9384c0342e38d3013ef33f383559acf6d7"} Oct 10 16:53:23 crc kubenswrapper[4799]: I1010 16:53:23.448779 4799 scope.go:117] "RemoveContainer" containerID="683c1d1b9da069d04a5ff9242785ac2d1ff7be01587f4c72dd9da5b4555f4926" Oct 10 16:53:23 crc kubenswrapper[4799]: I1010 16:53:23.491488 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.491458036 podStartE2EDuration="2.491458036s" podCreationTimestamp="2025-10-10 16:53:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-10 16:53:23.479081122 +0000 UTC m=+1296.987405337" watchObservedRunningTime="2025-10-10 16:53:23.491458036 +0000 UTC m=+1296.999782191" Oct 10 16:53:23 crc kubenswrapper[4799]: I1010 16:53:23.522119 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Oct 10 16:53:23 crc kubenswrapper[4799]: I1010 16:53:23.534937 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Oct 10 16:53:23 crc kubenswrapper[4799]: I1010 16:53:23.550008 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Oct 10 16:53:23 crc kubenswrapper[4799]: E1010 16:53:23.550607 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b86c2c2a-e776-4223-acd0-3a4832b67cb0" containerName="nova-scheduler-scheduler" Oct 10 16:53:23 crc kubenswrapper[4799]: I1010 16:53:23.550635 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="b86c2c2a-e776-4223-acd0-3a4832b67cb0" containerName="nova-scheduler-scheduler" Oct 10 16:53:23 crc kubenswrapper[4799]: I1010 16:53:23.551064 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="b86c2c2a-e776-4223-acd0-3a4832b67cb0" containerName="nova-scheduler-scheduler" Oct 10 16:53:23 crc kubenswrapper[4799]: I1010 16:53:23.552007 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 10 16:53:23 crc kubenswrapper[4799]: I1010 16:53:23.554592 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Oct 10 16:53:23 crc kubenswrapper[4799]: I1010 16:53:23.563000 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Oct 10 16:53:23 crc kubenswrapper[4799]: I1010 16:53:23.630125 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fnf4n\" (UniqueName: \"kubernetes.io/projected/7ae9763d-31dd-44c7-bf35-11a896a4f785-kube-api-access-fnf4n\") pod \"nova-scheduler-0\" (UID: \"7ae9763d-31dd-44c7-bf35-11a896a4f785\") " pod="openstack/nova-scheduler-0" Oct 10 16:53:23 crc kubenswrapper[4799]: I1010 16:53:23.630160 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7ae9763d-31dd-44c7-bf35-11a896a4f785-config-data\") pod \"nova-scheduler-0\" (UID: \"7ae9763d-31dd-44c7-bf35-11a896a4f785\") " pod="openstack/nova-scheduler-0" Oct 10 16:53:23 crc kubenswrapper[4799]: I1010 16:53:23.630212 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7ae9763d-31dd-44c7-bf35-11a896a4f785-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"7ae9763d-31dd-44c7-bf35-11a896a4f785\") " pod="openstack/nova-scheduler-0" Oct 10 16:53:23 crc kubenswrapper[4799]: I1010 16:53:23.731982 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7ae9763d-31dd-44c7-bf35-11a896a4f785-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"7ae9763d-31dd-44c7-bf35-11a896a4f785\") " pod="openstack/nova-scheduler-0" Oct 10 16:53:23 crc kubenswrapper[4799]: I1010 16:53:23.732151 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fnf4n\" (UniqueName: \"kubernetes.io/projected/7ae9763d-31dd-44c7-bf35-11a896a4f785-kube-api-access-fnf4n\") pod \"nova-scheduler-0\" (UID: \"7ae9763d-31dd-44c7-bf35-11a896a4f785\") " pod="openstack/nova-scheduler-0" Oct 10 16:53:23 crc kubenswrapper[4799]: I1010 16:53:23.732178 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7ae9763d-31dd-44c7-bf35-11a896a4f785-config-data\") pod \"nova-scheduler-0\" (UID: \"7ae9763d-31dd-44c7-bf35-11a896a4f785\") " pod="openstack/nova-scheduler-0" Oct 10 16:53:23 crc kubenswrapper[4799]: I1010 16:53:23.744011 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7ae9763d-31dd-44c7-bf35-11a896a4f785-config-data\") pod \"nova-scheduler-0\" (UID: \"7ae9763d-31dd-44c7-bf35-11a896a4f785\") " pod="openstack/nova-scheduler-0" Oct 10 16:53:23 crc kubenswrapper[4799]: I1010 16:53:23.751313 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7ae9763d-31dd-44c7-bf35-11a896a4f785-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"7ae9763d-31dd-44c7-bf35-11a896a4f785\") " pod="openstack/nova-scheduler-0" Oct 10 16:53:23 crc kubenswrapper[4799]: I1010 16:53:23.764683 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fnf4n\" (UniqueName: \"kubernetes.io/projected/7ae9763d-31dd-44c7-bf35-11a896a4f785-kube-api-access-fnf4n\") pod \"nova-scheduler-0\" (UID: \"7ae9763d-31dd-44c7-bf35-11a896a4f785\") " pod="openstack/nova-scheduler-0" Oct 10 16:53:23 crc kubenswrapper[4799]: I1010 16:53:23.903433 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 10 16:53:24 crc kubenswrapper[4799]: I1010 16:53:24.270781 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 10 16:53:24 crc kubenswrapper[4799]: I1010 16:53:24.344827 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7r6sp\" (UniqueName: \"kubernetes.io/projected/dc550ba1-d76f-494e-b725-337877360fa7-kube-api-access-7r6sp\") pod \"dc550ba1-d76f-494e-b725-337877360fa7\" (UID: \"dc550ba1-d76f-494e-b725-337877360fa7\") " Oct 10 16:53:24 crc kubenswrapper[4799]: I1010 16:53:24.344883 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/dc550ba1-d76f-494e-b725-337877360fa7-nova-metadata-tls-certs\") pod \"dc550ba1-d76f-494e-b725-337877360fa7\" (UID: \"dc550ba1-d76f-494e-b725-337877360fa7\") " Oct 10 16:53:24 crc kubenswrapper[4799]: I1010 16:53:24.344909 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/dc550ba1-d76f-494e-b725-337877360fa7-logs\") pod \"dc550ba1-d76f-494e-b725-337877360fa7\" (UID: \"dc550ba1-d76f-494e-b725-337877360fa7\") " Oct 10 16:53:24 crc kubenswrapper[4799]: I1010 16:53:24.344931 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dc550ba1-d76f-494e-b725-337877360fa7-combined-ca-bundle\") pod \"dc550ba1-d76f-494e-b725-337877360fa7\" (UID: \"dc550ba1-d76f-494e-b725-337877360fa7\") " Oct 10 16:53:24 crc kubenswrapper[4799]: I1010 16:53:24.344963 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dc550ba1-d76f-494e-b725-337877360fa7-config-data\") pod \"dc550ba1-d76f-494e-b725-337877360fa7\" (UID: \"dc550ba1-d76f-494e-b725-337877360fa7\") " Oct 10 16:53:24 crc kubenswrapper[4799]: I1010 16:53:24.345968 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/dc550ba1-d76f-494e-b725-337877360fa7-logs" (OuterVolumeSpecName: "logs") pod "dc550ba1-d76f-494e-b725-337877360fa7" (UID: "dc550ba1-d76f-494e-b725-337877360fa7"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 10 16:53:24 crc kubenswrapper[4799]: I1010 16:53:24.351338 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dc550ba1-d76f-494e-b725-337877360fa7-kube-api-access-7r6sp" (OuterVolumeSpecName: "kube-api-access-7r6sp") pod "dc550ba1-d76f-494e-b725-337877360fa7" (UID: "dc550ba1-d76f-494e-b725-337877360fa7"). InnerVolumeSpecName "kube-api-access-7r6sp". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 16:53:24 crc kubenswrapper[4799]: I1010 16:53:24.374936 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dc550ba1-d76f-494e-b725-337877360fa7-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "dc550ba1-d76f-494e-b725-337877360fa7" (UID: "dc550ba1-d76f-494e-b725-337877360fa7"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:53:24 crc kubenswrapper[4799]: I1010 16:53:24.388804 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dc550ba1-d76f-494e-b725-337877360fa7-config-data" (OuterVolumeSpecName: "config-data") pod "dc550ba1-d76f-494e-b725-337877360fa7" (UID: "dc550ba1-d76f-494e-b725-337877360fa7"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:53:24 crc kubenswrapper[4799]: I1010 16:53:24.450579 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7r6sp\" (UniqueName: \"kubernetes.io/projected/dc550ba1-d76f-494e-b725-337877360fa7-kube-api-access-7r6sp\") on node \"crc\" DevicePath \"\"" Oct 10 16:53:24 crc kubenswrapper[4799]: I1010 16:53:24.450635 4799 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/dc550ba1-d76f-494e-b725-337877360fa7-logs\") on node \"crc\" DevicePath \"\"" Oct 10 16:53:24 crc kubenswrapper[4799]: I1010 16:53:24.450656 4799 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dc550ba1-d76f-494e-b725-337877360fa7-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 10 16:53:24 crc kubenswrapper[4799]: I1010 16:53:24.450674 4799 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dc550ba1-d76f-494e-b725-337877360fa7-config-data\") on node \"crc\" DevicePath \"\"" Oct 10 16:53:24 crc kubenswrapper[4799]: I1010 16:53:24.459311 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dc550ba1-d76f-494e-b725-337877360fa7-nova-metadata-tls-certs" (OuterVolumeSpecName: "nova-metadata-tls-certs") pod "dc550ba1-d76f-494e-b725-337877360fa7" (UID: "dc550ba1-d76f-494e-b725-337877360fa7"). InnerVolumeSpecName "nova-metadata-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:53:24 crc kubenswrapper[4799]: I1010 16:53:24.481639 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Oct 10 16:53:24 crc kubenswrapper[4799]: I1010 16:53:24.485198 4799 generic.go:334] "Generic (PLEG): container finished" podID="dc550ba1-d76f-494e-b725-337877360fa7" containerID="709ca06e7bc32ac924ab6a7082972570df73a5b5a092a31b57885d573ca50d30" exitCode=0 Oct 10 16:53:24 crc kubenswrapper[4799]: I1010 16:53:24.485369 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 10 16:53:24 crc kubenswrapper[4799]: I1010 16:53:24.485410 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"dc550ba1-d76f-494e-b725-337877360fa7","Type":"ContainerDied","Data":"709ca06e7bc32ac924ab6a7082972570df73a5b5a092a31b57885d573ca50d30"} Oct 10 16:53:24 crc kubenswrapper[4799]: I1010 16:53:24.485469 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"dc550ba1-d76f-494e-b725-337877360fa7","Type":"ContainerDied","Data":"75b5a84abfadf06b61f75623f1b303c1e58267647afaa101b6f645ff95c9333b"} Oct 10 16:53:24 crc kubenswrapper[4799]: I1010 16:53:24.485505 4799 scope.go:117] "RemoveContainer" containerID="709ca06e7bc32ac924ab6a7082972570df73a5b5a092a31b57885d573ca50d30" Oct 10 16:53:24 crc kubenswrapper[4799]: I1010 16:53:24.555113 4799 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/dc550ba1-d76f-494e-b725-337877360fa7-nova-metadata-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 10 16:53:24 crc kubenswrapper[4799]: I1010 16:53:24.570850 4799 scope.go:117] "RemoveContainer" containerID="2ed4d65b4875269def7a21a90dee04ee82b85598fc7409dabceea69f593953d6" Oct 10 16:53:24 crc kubenswrapper[4799]: I1010 16:53:24.618091 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Oct 10 16:53:24 crc kubenswrapper[4799]: I1010 16:53:24.638057 4799 scope.go:117] "RemoveContainer" containerID="709ca06e7bc32ac924ab6a7082972570df73a5b5a092a31b57885d573ca50d30" Oct 10 16:53:24 crc kubenswrapper[4799]: E1010 16:53:24.647157 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"709ca06e7bc32ac924ab6a7082972570df73a5b5a092a31b57885d573ca50d30\": container with ID starting with 709ca06e7bc32ac924ab6a7082972570df73a5b5a092a31b57885d573ca50d30 not found: ID does not exist" containerID="709ca06e7bc32ac924ab6a7082972570df73a5b5a092a31b57885d573ca50d30" Oct 10 16:53:24 crc kubenswrapper[4799]: I1010 16:53:24.647205 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"709ca06e7bc32ac924ab6a7082972570df73a5b5a092a31b57885d573ca50d30"} err="failed to get container status \"709ca06e7bc32ac924ab6a7082972570df73a5b5a092a31b57885d573ca50d30\": rpc error: code = NotFound desc = could not find container \"709ca06e7bc32ac924ab6a7082972570df73a5b5a092a31b57885d573ca50d30\": container with ID starting with 709ca06e7bc32ac924ab6a7082972570df73a5b5a092a31b57885d573ca50d30 not found: ID does not exist" Oct 10 16:53:24 crc kubenswrapper[4799]: I1010 16:53:24.647238 4799 scope.go:117] "RemoveContainer" containerID="2ed4d65b4875269def7a21a90dee04ee82b85598fc7409dabceea69f593953d6" Oct 10 16:53:24 crc kubenswrapper[4799]: I1010 16:53:24.667659 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Oct 10 16:53:24 crc kubenswrapper[4799]: E1010 16:53:24.669147 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2ed4d65b4875269def7a21a90dee04ee82b85598fc7409dabceea69f593953d6\": container with ID starting with 2ed4d65b4875269def7a21a90dee04ee82b85598fc7409dabceea69f593953d6 not found: ID does not exist" containerID="2ed4d65b4875269def7a21a90dee04ee82b85598fc7409dabceea69f593953d6" Oct 10 16:53:24 crc kubenswrapper[4799]: I1010 16:53:24.669193 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2ed4d65b4875269def7a21a90dee04ee82b85598fc7409dabceea69f593953d6"} err="failed to get container status \"2ed4d65b4875269def7a21a90dee04ee82b85598fc7409dabceea69f593953d6\": rpc error: code = NotFound desc = could not find container \"2ed4d65b4875269def7a21a90dee04ee82b85598fc7409dabceea69f593953d6\": container with ID starting with 2ed4d65b4875269def7a21a90dee04ee82b85598fc7409dabceea69f593953d6 not found: ID does not exist" Oct 10 16:53:24 crc kubenswrapper[4799]: I1010 16:53:24.679413 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Oct 10 16:53:24 crc kubenswrapper[4799]: E1010 16:53:24.679915 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dc550ba1-d76f-494e-b725-337877360fa7" containerName="nova-metadata-log" Oct 10 16:53:24 crc kubenswrapper[4799]: I1010 16:53:24.679938 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="dc550ba1-d76f-494e-b725-337877360fa7" containerName="nova-metadata-log" Oct 10 16:53:24 crc kubenswrapper[4799]: E1010 16:53:24.679956 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dc550ba1-d76f-494e-b725-337877360fa7" containerName="nova-metadata-metadata" Oct 10 16:53:24 crc kubenswrapper[4799]: I1010 16:53:24.679963 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="dc550ba1-d76f-494e-b725-337877360fa7" containerName="nova-metadata-metadata" Oct 10 16:53:24 crc kubenswrapper[4799]: I1010 16:53:24.680129 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="dc550ba1-d76f-494e-b725-337877360fa7" containerName="nova-metadata-log" Oct 10 16:53:24 crc kubenswrapper[4799]: I1010 16:53:24.680156 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="dc550ba1-d76f-494e-b725-337877360fa7" containerName="nova-metadata-metadata" Oct 10 16:53:24 crc kubenswrapper[4799]: I1010 16:53:24.681293 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 10 16:53:24 crc kubenswrapper[4799]: I1010 16:53:24.683603 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Oct 10 16:53:24 crc kubenswrapper[4799]: I1010 16:53:24.684524 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Oct 10 16:53:24 crc kubenswrapper[4799]: I1010 16:53:24.689591 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Oct 10 16:53:24 crc kubenswrapper[4799]: I1010 16:53:24.769457 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5e6870d5-faea-46d9-bebb-4d237b802910-logs\") pod \"nova-metadata-0\" (UID: \"5e6870d5-faea-46d9-bebb-4d237b802910\") " pod="openstack/nova-metadata-0" Oct 10 16:53:24 crc kubenswrapper[4799]: I1010 16:53:24.769525 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lgzck\" (UniqueName: \"kubernetes.io/projected/5e6870d5-faea-46d9-bebb-4d237b802910-kube-api-access-lgzck\") pod \"nova-metadata-0\" (UID: \"5e6870d5-faea-46d9-bebb-4d237b802910\") " pod="openstack/nova-metadata-0" Oct 10 16:53:24 crc kubenswrapper[4799]: I1010 16:53:24.769565 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/5e6870d5-faea-46d9-bebb-4d237b802910-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"5e6870d5-faea-46d9-bebb-4d237b802910\") " pod="openstack/nova-metadata-0" Oct 10 16:53:24 crc kubenswrapper[4799]: I1010 16:53:24.769648 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5e6870d5-faea-46d9-bebb-4d237b802910-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"5e6870d5-faea-46d9-bebb-4d237b802910\") " pod="openstack/nova-metadata-0" Oct 10 16:53:24 crc kubenswrapper[4799]: I1010 16:53:24.769665 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5e6870d5-faea-46d9-bebb-4d237b802910-config-data\") pod \"nova-metadata-0\" (UID: \"5e6870d5-faea-46d9-bebb-4d237b802910\") " pod="openstack/nova-metadata-0" Oct 10 16:53:24 crc kubenswrapper[4799]: I1010 16:53:24.871595 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5e6870d5-faea-46d9-bebb-4d237b802910-logs\") pod \"nova-metadata-0\" (UID: \"5e6870d5-faea-46d9-bebb-4d237b802910\") " pod="openstack/nova-metadata-0" Oct 10 16:53:24 crc kubenswrapper[4799]: I1010 16:53:24.872079 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lgzck\" (UniqueName: \"kubernetes.io/projected/5e6870d5-faea-46d9-bebb-4d237b802910-kube-api-access-lgzck\") pod \"nova-metadata-0\" (UID: \"5e6870d5-faea-46d9-bebb-4d237b802910\") " pod="openstack/nova-metadata-0" Oct 10 16:53:24 crc kubenswrapper[4799]: I1010 16:53:24.872118 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/5e6870d5-faea-46d9-bebb-4d237b802910-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"5e6870d5-faea-46d9-bebb-4d237b802910\") " pod="openstack/nova-metadata-0" Oct 10 16:53:24 crc kubenswrapper[4799]: I1010 16:53:24.872198 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5e6870d5-faea-46d9-bebb-4d237b802910-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"5e6870d5-faea-46d9-bebb-4d237b802910\") " pod="openstack/nova-metadata-0" Oct 10 16:53:24 crc kubenswrapper[4799]: I1010 16:53:24.872219 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5e6870d5-faea-46d9-bebb-4d237b802910-config-data\") pod \"nova-metadata-0\" (UID: \"5e6870d5-faea-46d9-bebb-4d237b802910\") " pod="openstack/nova-metadata-0" Oct 10 16:53:24 crc kubenswrapper[4799]: I1010 16:53:24.873387 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5e6870d5-faea-46d9-bebb-4d237b802910-logs\") pod \"nova-metadata-0\" (UID: \"5e6870d5-faea-46d9-bebb-4d237b802910\") " pod="openstack/nova-metadata-0" Oct 10 16:53:24 crc kubenswrapper[4799]: I1010 16:53:24.876346 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5e6870d5-faea-46d9-bebb-4d237b802910-config-data\") pod \"nova-metadata-0\" (UID: \"5e6870d5-faea-46d9-bebb-4d237b802910\") " pod="openstack/nova-metadata-0" Oct 10 16:53:24 crc kubenswrapper[4799]: I1010 16:53:24.876851 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/5e6870d5-faea-46d9-bebb-4d237b802910-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"5e6870d5-faea-46d9-bebb-4d237b802910\") " pod="openstack/nova-metadata-0" Oct 10 16:53:24 crc kubenswrapper[4799]: I1010 16:53:24.877007 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5e6870d5-faea-46d9-bebb-4d237b802910-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"5e6870d5-faea-46d9-bebb-4d237b802910\") " pod="openstack/nova-metadata-0" Oct 10 16:53:24 crc kubenswrapper[4799]: I1010 16:53:24.887630 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lgzck\" (UniqueName: \"kubernetes.io/projected/5e6870d5-faea-46d9-bebb-4d237b802910-kube-api-access-lgzck\") pod \"nova-metadata-0\" (UID: \"5e6870d5-faea-46d9-bebb-4d237b802910\") " pod="openstack/nova-metadata-0" Oct 10 16:53:25 crc kubenswrapper[4799]: I1010 16:53:25.014489 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 10 16:53:25 crc kubenswrapper[4799]: I1010 16:53:25.425126 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b86c2c2a-e776-4223-acd0-3a4832b67cb0" path="/var/lib/kubelet/pods/b86c2c2a-e776-4223-acd0-3a4832b67cb0/volumes" Oct 10 16:53:25 crc kubenswrapper[4799]: I1010 16:53:25.426101 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="dc550ba1-d76f-494e-b725-337877360fa7" path="/var/lib/kubelet/pods/dc550ba1-d76f-494e-b725-337877360fa7/volumes" Oct 10 16:53:25 crc kubenswrapper[4799]: I1010 16:53:25.486951 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Oct 10 16:53:25 crc kubenswrapper[4799]: I1010 16:53:25.520981 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"5e6870d5-faea-46d9-bebb-4d237b802910","Type":"ContainerStarted","Data":"27fae14afbe6e49565b8b95bbdd2deb56421b8e68ed1a22a36f290975acc0d06"} Oct 10 16:53:25 crc kubenswrapper[4799]: I1010 16:53:25.524376 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"7ae9763d-31dd-44c7-bf35-11a896a4f785","Type":"ContainerStarted","Data":"ef0cad99b2efacd5bcd212cd155d86551ed4cc35bedc046210eca5e8e009b86f"} Oct 10 16:53:25 crc kubenswrapper[4799]: I1010 16:53:25.524401 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"7ae9763d-31dd-44c7-bf35-11a896a4f785","Type":"ContainerStarted","Data":"f8c08f14c82200271971cc8055f474c1a963f8ef65bd87b7e1189773741abb8f"} Oct 10 16:53:25 crc kubenswrapper[4799]: I1010 16:53:25.547131 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.547112113 podStartE2EDuration="2.547112113s" podCreationTimestamp="2025-10-10 16:53:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-10 16:53:25.541629238 +0000 UTC m=+1299.049953373" watchObservedRunningTime="2025-10-10 16:53:25.547112113 +0000 UTC m=+1299.055436228" Oct 10 16:53:26 crc kubenswrapper[4799]: I1010 16:53:26.544530 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"5e6870d5-faea-46d9-bebb-4d237b802910","Type":"ContainerStarted","Data":"d6ec86e7f860ab8ca24a46400675b4f8e0135552b7120d2f42340e7afc614296"} Oct 10 16:53:26 crc kubenswrapper[4799]: I1010 16:53:26.544904 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"5e6870d5-faea-46d9-bebb-4d237b802910","Type":"ContainerStarted","Data":"aaea200524f506182ac0c5dffe0ab093f1a9490f2edc8d9d614d7b6635f1619c"} Oct 10 16:53:26 crc kubenswrapper[4799]: I1010 16:53:26.585228 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.585198334 podStartE2EDuration="2.585198334s" podCreationTimestamp="2025-10-10 16:53:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-10 16:53:26.573515237 +0000 UTC m=+1300.081839442" watchObservedRunningTime="2025-10-10 16:53:26.585198334 +0000 UTC m=+1300.093522489" Oct 10 16:53:28 crc kubenswrapper[4799]: I1010 16:53:28.904295 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Oct 10 16:53:30 crc kubenswrapper[4799]: I1010 16:53:30.014839 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Oct 10 16:53:30 crc kubenswrapper[4799]: I1010 16:53:30.015173 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Oct 10 16:53:31 crc kubenswrapper[4799]: I1010 16:53:31.799718 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Oct 10 16:53:31 crc kubenswrapper[4799]: I1010 16:53:31.800219 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Oct 10 16:53:32 crc kubenswrapper[4799]: I1010 16:53:32.815957 4799 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="2db42625-4f7b-479c-a580-c94d6cafb3fe" containerName="nova-api-log" probeResult="failure" output="Get \"https://10.217.0.208:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Oct 10 16:53:32 crc kubenswrapper[4799]: I1010 16:53:32.816018 4799 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="2db42625-4f7b-479c-a580-c94d6cafb3fe" containerName="nova-api-api" probeResult="failure" output="Get \"https://10.217.0.208:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Oct 10 16:53:33 crc kubenswrapper[4799]: I1010 16:53:33.905028 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Oct 10 16:53:33 crc kubenswrapper[4799]: I1010 16:53:33.967676 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Oct 10 16:53:34 crc kubenswrapper[4799]: I1010 16:53:34.668601 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Oct 10 16:53:35 crc kubenswrapper[4799]: I1010 16:53:35.015511 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Oct 10 16:53:35 crc kubenswrapper[4799]: I1010 16:53:35.015591 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Oct 10 16:53:36 crc kubenswrapper[4799]: I1010 16:53:36.028955 4799 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="5e6870d5-faea-46d9-bebb-4d237b802910" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.210:8775/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Oct 10 16:53:36 crc kubenswrapper[4799]: I1010 16:53:36.029252 4799 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="5e6870d5-faea-46d9-bebb-4d237b802910" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.210:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Oct 10 16:53:41 crc kubenswrapper[4799]: I1010 16:53:41.811174 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Oct 10 16:53:41 crc kubenswrapper[4799]: I1010 16:53:41.814495 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Oct 10 16:53:41 crc kubenswrapper[4799]: I1010 16:53:41.816623 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Oct 10 16:53:41 crc kubenswrapper[4799]: I1010 16:53:41.823164 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Oct 10 16:53:42 crc kubenswrapper[4799]: I1010 16:53:42.733157 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Oct 10 16:53:42 crc kubenswrapper[4799]: I1010 16:53:42.744277 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Oct 10 16:53:45 crc kubenswrapper[4799]: I1010 16:53:45.023968 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Oct 10 16:53:45 crc kubenswrapper[4799]: I1010 16:53:45.026775 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Oct 10 16:53:45 crc kubenswrapper[4799]: I1010 16:53:45.033518 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Oct 10 16:53:45 crc kubenswrapper[4799]: I1010 16:53:45.034467 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Oct 10 16:53:45 crc kubenswrapper[4799]: I1010 16:53:45.248417 4799 patch_prober.go:28] interesting pod/machine-config-daemon-rh8zc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 10 16:53:45 crc kubenswrapper[4799]: I1010 16:53:45.248474 4799 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 10 16:53:45 crc kubenswrapper[4799]: I1010 16:53:45.767182 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Oct 10 16:54:06 crc kubenswrapper[4799]: I1010 16:54:06.609649 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/openstackclient"] Oct 10 16:54:06 crc kubenswrapper[4799]: I1010 16:54:06.610398 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/openstackclient" podUID="95c3e251-04ea-40ab-94d0-608d6ef0d8f3" containerName="openstackclient" containerID="cri-o://7894d2a17f4958f21c9c52f542e34e11e9c6033bdf6b58c15d012c7bb5cc154f" gracePeriod=2 Oct 10 16:54:06 crc kubenswrapper[4799]: I1010 16:54:06.631617 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/openstackclient"] Oct 10 16:54:06 crc kubenswrapper[4799]: I1010 16:54:06.819632 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Oct 10 16:54:06 crc kubenswrapper[4799]: I1010 16:54:06.986744 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance2b60-account-delete-t7jh2"] Oct 10 16:54:06 crc kubenswrapper[4799]: E1010 16:54:06.987196 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="95c3e251-04ea-40ab-94d0-608d6ef0d8f3" containerName="openstackclient" Oct 10 16:54:06 crc kubenswrapper[4799]: I1010 16:54:06.987213 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="95c3e251-04ea-40ab-94d0-608d6ef0d8f3" containerName="openstackclient" Oct 10 16:54:06 crc kubenswrapper[4799]: I1010 16:54:06.987424 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="95c3e251-04ea-40ab-94d0-608d6ef0d8f3" containerName="openstackclient" Oct 10 16:54:06 crc kubenswrapper[4799]: I1010 16:54:06.988144 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance2b60-account-delete-t7jh2" Oct 10 16:54:07 crc kubenswrapper[4799]: I1010 16:54:07.000269 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance2b60-account-delete-t7jh2"] Oct 10 16:54:07 crc kubenswrapper[4799]: E1010 16:54:07.013922 4799 configmap.go:193] Couldn't get configMap openstack/rabbitmq-config-data: configmap "rabbitmq-config-data" not found Oct 10 16:54:07 crc kubenswrapper[4799]: E1010 16:54:07.014017 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/9fd6f03f-abea-4c29-8060-0705bb0af2c7-config-data podName:9fd6f03f-abea-4c29-8060-0705bb0af2c7 nodeName:}" failed. No retries permitted until 2025-10-10 16:54:07.513993873 +0000 UTC m=+1341.022318058 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/9fd6f03f-abea-4c29-8060-0705bb0af2c7-config-data") pod "rabbitmq-server-0" (UID: "9fd6f03f-abea-4c29-8060-0705bb0af2c7") : configmap "rabbitmq-config-data" not found Oct 10 16:54:07 crc kubenswrapper[4799]: I1010 16:54:07.076575 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican0700-account-delete-smncx"] Oct 10 16:54:07 crc kubenswrapper[4799]: I1010 16:54:07.077736 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican0700-account-delete-smncx" Oct 10 16:54:07 crc kubenswrapper[4799]: I1010 16:54:07.106251 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican0700-account-delete-smncx"] Oct 10 16:54:07 crc kubenswrapper[4799]: I1010 16:54:07.111319 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p947l\" (UniqueName: \"kubernetes.io/projected/71374742-0685-4486-bb2d-97116af40765-kube-api-access-p947l\") pod \"glance2b60-account-delete-t7jh2\" (UID: \"71374742-0685-4486-bb2d-97116af40765\") " pod="openstack/glance2b60-account-delete-t7jh2" Oct 10 16:54:07 crc kubenswrapper[4799]: I1010 16:54:07.130949 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cindera843-account-delete-dptkx"] Oct 10 16:54:07 crc kubenswrapper[4799]: I1010 16:54:07.132199 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cindera843-account-delete-dptkx" Oct 10 16:54:07 crc kubenswrapper[4799]: I1010 16:54:07.158792 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cindera843-account-delete-dptkx"] Oct 10 16:54:07 crc kubenswrapper[4799]: I1010 16:54:07.215109 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s9rg7\" (UniqueName: \"kubernetes.io/projected/3b59d111-5df2-4b9f-9d02-7a3f9e19d02c-kube-api-access-s9rg7\") pod \"barbican0700-account-delete-smncx\" (UID: \"3b59d111-5df2-4b9f-9d02-7a3f9e19d02c\") " pod="openstack/barbican0700-account-delete-smncx" Oct 10 16:54:07 crc kubenswrapper[4799]: I1010 16:54:07.215262 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fl7x2\" (UniqueName: \"kubernetes.io/projected/2f2d77fd-b861-4589-bdb5-ad606deb3360-kube-api-access-fl7x2\") pod \"cindera843-account-delete-dptkx\" (UID: \"2f2d77fd-b861-4589-bdb5-ad606deb3360\") " pod="openstack/cindera843-account-delete-dptkx" Oct 10 16:54:07 crc kubenswrapper[4799]: I1010 16:54:07.215337 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p947l\" (UniqueName: \"kubernetes.io/projected/71374742-0685-4486-bb2d-97116af40765-kube-api-access-p947l\") pod \"glance2b60-account-delete-t7jh2\" (UID: \"71374742-0685-4486-bb2d-97116af40765\") " pod="openstack/glance2b60-account-delete-t7jh2" Oct 10 16:54:07 crc kubenswrapper[4799]: I1010 16:54:07.237246 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placementcbdc-account-delete-9cz6z"] Oct 10 16:54:07 crc kubenswrapper[4799]: I1010 16:54:07.238482 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placementcbdc-account-delete-9cz6z" Oct 10 16:54:07 crc kubenswrapper[4799]: I1010 16:54:07.273830 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placementcbdc-account-delete-9cz6z"] Oct 10 16:54:07 crc kubenswrapper[4799]: I1010 16:54:07.275400 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p947l\" (UniqueName: \"kubernetes.io/projected/71374742-0685-4486-bb2d-97116af40765-kube-api-access-p947l\") pod \"glance2b60-account-delete-t7jh2\" (UID: \"71374742-0685-4486-bb2d-97116af40765\") " pod="openstack/glance2b60-account-delete-t7jh2" Oct 10 16:54:07 crc kubenswrapper[4799]: I1010 16:54:07.286228 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutronfdbc-account-delete-b8x6d"] Oct 10 16:54:07 crc kubenswrapper[4799]: I1010 16:54:07.287427 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutronfdbc-account-delete-b8x6d" Oct 10 16:54:07 crc kubenswrapper[4799]: I1010 16:54:07.317222 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance2b60-account-delete-t7jh2" Oct 10 16:54:07 crc kubenswrapper[4799]: I1010 16:54:07.319650 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fl7x2\" (UniqueName: \"kubernetes.io/projected/2f2d77fd-b861-4589-bdb5-ad606deb3360-kube-api-access-fl7x2\") pod \"cindera843-account-delete-dptkx\" (UID: \"2f2d77fd-b861-4589-bdb5-ad606deb3360\") " pod="openstack/cindera843-account-delete-dptkx" Oct 10 16:54:07 crc kubenswrapper[4799]: I1010 16:54:07.319820 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nh5cr\" (UniqueName: \"kubernetes.io/projected/f1d90c74-271d-45af-9c91-87250b178ca6-kube-api-access-nh5cr\") pod \"placementcbdc-account-delete-9cz6z\" (UID: \"f1d90c74-271d-45af-9c91-87250b178ca6\") " pod="openstack/placementcbdc-account-delete-9cz6z" Oct 10 16:54:07 crc kubenswrapper[4799]: I1010 16:54:07.319992 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s9rg7\" (UniqueName: \"kubernetes.io/projected/3b59d111-5df2-4b9f-9d02-7a3f9e19d02c-kube-api-access-s9rg7\") pod \"barbican0700-account-delete-smncx\" (UID: \"3b59d111-5df2-4b9f-9d02-7a3f9e19d02c\") " pod="openstack/barbican0700-account-delete-smncx" Oct 10 16:54:07 crc kubenswrapper[4799]: I1010 16:54:07.341386 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-northd-0"] Oct 10 16:54:07 crc kubenswrapper[4799]: I1010 16:54:07.341630 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovn-northd-0" podUID="fdfeebc0-d50f-42f8-a461-b0aea7ba6a11" containerName="ovn-northd" containerID="cri-o://ff8624f34fcb1a15fffee56784a5608f01adefaa3172b0477e52de09e0786400" gracePeriod=30 Oct 10 16:54:07 crc kubenswrapper[4799]: I1010 16:54:07.342038 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovn-northd-0" podUID="fdfeebc0-d50f-42f8-a461-b0aea7ba6a11" containerName="openstack-network-exporter" containerID="cri-o://0468ca6c6abf7cc599e692ce9780b9732d1baafe60c9e8e67b98e76b296d2b35" gracePeriod=30 Oct 10 16:54:07 crc kubenswrapper[4799]: I1010 16:54:07.354123 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutronfdbc-account-delete-b8x6d"] Oct 10 16:54:07 crc kubenswrapper[4799]: I1010 16:54:07.363049 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fl7x2\" (UniqueName: \"kubernetes.io/projected/2f2d77fd-b861-4589-bdb5-ad606deb3360-kube-api-access-fl7x2\") pod \"cindera843-account-delete-dptkx\" (UID: \"2f2d77fd-b861-4589-bdb5-ad606deb3360\") " pod="openstack/cindera843-account-delete-dptkx" Oct 10 16:54:07 crc kubenswrapper[4799]: I1010 16:54:07.372956 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s9rg7\" (UniqueName: \"kubernetes.io/projected/3b59d111-5df2-4b9f-9d02-7a3f9e19d02c-kube-api-access-s9rg7\") pod \"barbican0700-account-delete-smncx\" (UID: \"3b59d111-5df2-4b9f-9d02-7a3f9e19d02c\") " pod="openstack/barbican0700-account-delete-smncx" Oct 10 16:54:07 crc kubenswrapper[4799]: I1010 16:54:07.395107 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-metrics-rlrvx"] Oct 10 16:54:07 crc kubenswrapper[4799]: I1010 16:54:07.395304 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovn-controller-metrics-rlrvx" podUID="8f591cb1-902a-406e-b93a-56c2b7ec9cb8" containerName="openstack-network-exporter" containerID="cri-o://a9f8bcc0a5609ec522020652528dba810f803f6d9045a461106095c9c271fefd" gracePeriod=30 Oct 10 16:54:07 crc kubenswrapper[4799]: I1010 16:54:07.401620 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-ovs-dtplc"] Oct 10 16:54:07 crc kubenswrapper[4799]: I1010 16:54:07.422240 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nh5cr\" (UniqueName: \"kubernetes.io/projected/f1d90c74-271d-45af-9c91-87250b178ca6-kube-api-access-nh5cr\") pod \"placementcbdc-account-delete-9cz6z\" (UID: \"f1d90c74-271d-45af-9c91-87250b178ca6\") " pod="openstack/placementcbdc-account-delete-9cz6z" Oct 10 16:54:07 crc kubenswrapper[4799]: I1010 16:54:07.422289 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lk7sz\" (UniqueName: \"kubernetes.io/projected/445b5551-e072-43ca-a6e2-8f7fe726bb42-kube-api-access-lk7sz\") pod \"neutronfdbc-account-delete-b8x6d\" (UID: \"445b5551-e072-43ca-a6e2-8f7fe726bb42\") " pod="openstack/neutronfdbc-account-delete-b8x6d" Oct 10 16:54:07 crc kubenswrapper[4799]: I1010 16:54:07.422580 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican0700-account-delete-smncx" Oct 10 16:54:07 crc kubenswrapper[4799]: I1010 16:54:07.461011 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-s8gsd"] Oct 10 16:54:07 crc kubenswrapper[4799]: I1010 16:54:07.480194 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cindera843-account-delete-dptkx" Oct 10 16:54:07 crc kubenswrapper[4799]: I1010 16:54:07.481060 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nh5cr\" (UniqueName: \"kubernetes.io/projected/f1d90c74-271d-45af-9c91-87250b178ca6-kube-api-access-nh5cr\") pod \"placementcbdc-account-delete-9cz6z\" (UID: \"f1d90c74-271d-45af-9c91-87250b178ca6\") " pod="openstack/placementcbdc-account-delete-9cz6z" Oct 10 16:54:07 crc kubenswrapper[4799]: I1010 16:54:07.490516 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/novaapieef9-account-delete-qqxp5"] Oct 10 16:54:07 crc kubenswrapper[4799]: I1010 16:54:07.491679 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/novaapieef9-account-delete-qqxp5" Oct 10 16:54:07 crc kubenswrapper[4799]: I1010 16:54:07.512362 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/novaapieef9-account-delete-qqxp5"] Oct 10 16:54:07 crc kubenswrapper[4799]: I1010 16:54:07.524732 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lk7sz\" (UniqueName: \"kubernetes.io/projected/445b5551-e072-43ca-a6e2-8f7fe726bb42-kube-api-access-lk7sz\") pod \"neutronfdbc-account-delete-b8x6d\" (UID: \"445b5551-e072-43ca-a6e2-8f7fe726bb42\") " pod="openstack/neutronfdbc-account-delete-b8x6d" Oct 10 16:54:07 crc kubenswrapper[4799]: E1010 16:54:07.527555 4799 configmap.go:193] Couldn't get configMap openstack/rabbitmq-config-data: configmap "rabbitmq-config-data" not found Oct 10 16:54:07 crc kubenswrapper[4799]: E1010 16:54:07.527605 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/9fd6f03f-abea-4c29-8060-0705bb0af2c7-config-data podName:9fd6f03f-abea-4c29-8060-0705bb0af2c7 nodeName:}" failed. No retries permitted until 2025-10-10 16:54:08.52758957 +0000 UTC m=+1342.035913685 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/9fd6f03f-abea-4c29-8060-0705bb0af2c7-config-data") pod "rabbitmq-server-0" (UID: "9fd6f03f-abea-4c29-8060-0705bb0af2c7") : configmap "rabbitmq-config-data" not found Oct 10 16:54:07 crc kubenswrapper[4799]: I1010 16:54:07.541014 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Oct 10 16:54:07 crc kubenswrapper[4799]: I1010 16:54:07.563074 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-db-sync-v8rpx"] Oct 10 16:54:07 crc kubenswrapper[4799]: I1010 16:54:07.581806 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-db-sync-v8rpx"] Oct 10 16:54:07 crc kubenswrapper[4799]: I1010 16:54:07.596405 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lk7sz\" (UniqueName: \"kubernetes.io/projected/445b5551-e072-43ca-a6e2-8f7fe726bb42-kube-api-access-lk7sz\") pod \"neutronfdbc-account-delete-b8x6d\" (UID: \"445b5551-e072-43ca-a6e2-8f7fe726bb42\") " pod="openstack/neutronfdbc-account-delete-b8x6d" Oct 10 16:54:07 crc kubenswrapper[4799]: I1010 16:54:07.614799 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/novacell0f8a8-account-delete-6lbfn"] Oct 10 16:54:07 crc kubenswrapper[4799]: I1010 16:54:07.616052 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/novacell0f8a8-account-delete-6lbfn" Oct 10 16:54:07 crc kubenswrapper[4799]: I1010 16:54:07.619736 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placementcbdc-account-delete-9cz6z" Oct 10 16:54:07 crc kubenswrapper[4799]: I1010 16:54:07.620515 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutronfdbc-account-delete-b8x6d" Oct 10 16:54:07 crc kubenswrapper[4799]: I1010 16:54:07.629299 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hrd64\" (UniqueName: \"kubernetes.io/projected/4cc746af-c99b-4cb9-acde-dc3b97e424a3-kube-api-access-hrd64\") pod \"novaapieef9-account-delete-qqxp5\" (UID: \"4cc746af-c99b-4cb9-acde-dc3b97e424a3\") " pod="openstack/novaapieef9-account-delete-qqxp5" Oct 10 16:54:07 crc kubenswrapper[4799]: E1010 16:54:07.630992 4799 configmap.go:193] Couldn't get configMap openstack/rabbitmq-cell1-config-data: configmap "rabbitmq-cell1-config-data" not found Oct 10 16:54:07 crc kubenswrapper[4799]: E1010 16:54:07.631039 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/7858ee88-c7b9-4fb7-b825-569154134201-config-data podName:7858ee88-c7b9-4fb7-b825-569154134201 nodeName:}" failed. No retries permitted until 2025-10-10 16:54:08.131028077 +0000 UTC m=+1341.639352182 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/7858ee88-c7b9-4fb7-b825-569154134201-config-data") pod "rabbitmq-cell1-server-0" (UID: "7858ee88-c7b9-4fb7-b825-569154134201") : configmap "rabbitmq-cell1-config-data" not found Oct 10 16:54:07 crc kubenswrapper[4799]: I1010 16:54:07.639573 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-db-sync-mrh2w"] Oct 10 16:54:07 crc kubenswrapper[4799]: I1010 16:54:07.658819 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/novacell0f8a8-account-delete-6lbfn"] Oct 10 16:54:07 crc kubenswrapper[4799]: I1010 16:54:07.661410 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-db-sync-mrh2w"] Oct 10 16:54:07 crc kubenswrapper[4799]: I1010 16:54:07.713998 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-db-sync-dt6zw"] Oct 10 16:54:07 crc kubenswrapper[4799]: I1010 16:54:07.731129 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-49cds\" (UniqueName: \"kubernetes.io/projected/acf4a111-174e-42e8-8e71-d5bd053d5de2-kube-api-access-49cds\") pod \"novacell0f8a8-account-delete-6lbfn\" (UID: \"acf4a111-174e-42e8-8e71-d5bd053d5de2\") " pod="openstack/novacell0f8a8-account-delete-6lbfn" Oct 10 16:54:07 crc kubenswrapper[4799]: I1010 16:54:07.731182 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hrd64\" (UniqueName: \"kubernetes.io/projected/4cc746af-c99b-4cb9-acde-dc3b97e424a3-kube-api-access-hrd64\") pod \"novaapieef9-account-delete-qqxp5\" (UID: \"4cc746af-c99b-4cb9-acde-dc3b97e424a3\") " pod="openstack/novaapieef9-account-delete-qqxp5" Oct 10 16:54:07 crc kubenswrapper[4799]: I1010 16:54:07.771400 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-db-sync-dt6zw"] Oct 10 16:54:07 crc kubenswrapper[4799]: I1010 16:54:07.834840 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-49cds\" (UniqueName: \"kubernetes.io/projected/acf4a111-174e-42e8-8e71-d5bd053d5de2-kube-api-access-49cds\") pod \"novacell0f8a8-account-delete-6lbfn\" (UID: \"acf4a111-174e-42e8-8e71-d5bd053d5de2\") " pod="openstack/novacell0f8a8-account-delete-6lbfn" Oct 10 16:54:07 crc kubenswrapper[4799]: I1010 16:54:07.878803 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hrd64\" (UniqueName: \"kubernetes.io/projected/4cc746af-c99b-4cb9-acde-dc3b97e424a3-kube-api-access-hrd64\") pod \"novaapieef9-account-delete-qqxp5\" (UID: \"4cc746af-c99b-4cb9-acde-dc3b97e424a3\") " pod="openstack/novaapieef9-account-delete-qqxp5" Oct 10 16:54:07 crc kubenswrapper[4799]: I1010 16:54:07.884150 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-db-sync-5fpmb"] Oct 10 16:54:07 crc kubenswrapper[4799]: I1010 16:54:07.884712 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-49cds\" (UniqueName: \"kubernetes.io/projected/acf4a111-174e-42e8-8e71-d5bd053d5de2-kube-api-access-49cds\") pod \"novacell0f8a8-account-delete-6lbfn\" (UID: \"acf4a111-174e-42e8-8e71-d5bd053d5de2\") " pod="openstack/novacell0f8a8-account-delete-6lbfn" Oct 10 16:54:07 crc kubenswrapper[4799]: I1010 16:54:07.954212 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-db-sync-5fpmb"] Oct 10 16:54:07 crc kubenswrapper[4799]: I1010 16:54:07.971708 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-db-sync-6chg9"] Oct 10 16:54:07 crc kubenswrapper[4799]: I1010 16:54:07.989187 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/swift-ring-rebalance-46sn9"] Oct 10 16:54:08 crc kubenswrapper[4799]: I1010 16:54:08.012806 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-db-sync-6chg9"] Oct 10 16:54:08 crc kubenswrapper[4799]: I1010 16:54:08.024225 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/swift-ring-rebalance-46sn9"] Oct 10 16:54:08 crc kubenswrapper[4799]: I1010 16:54:08.042280 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-cell-mapping-spv5x"] Oct 10 16:54:08 crc kubenswrapper[4799]: I1010 16:54:08.053637 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-cell-mapping-spv5x"] Oct 10 16:54:08 crc kubenswrapper[4799]: E1010 16:54:08.079108 4799 handlers.go:78] "Exec lifecycle hook for Container in Pod failed" err="command '/usr/share/ovn/scripts/ovn-ctl stop_controller' exited with 137: " execCommand=["/usr/share/ovn/scripts/ovn-ctl","stop_controller"] containerName="ovn-controller" pod="openstack/ovn-controller-s8gsd" message=< Oct 10 16:54:08 crc kubenswrapper[4799]: Exiting ovn-controller (1) [ OK ] Oct 10 16:54:08 crc kubenswrapper[4799]: > Oct 10 16:54:08 crc kubenswrapper[4799]: E1010 16:54:08.079150 4799 kuberuntime_container.go:691] "PreStop hook failed" err="command '/usr/share/ovn/scripts/ovn-ctl stop_controller' exited with 137: " pod="openstack/ovn-controller-s8gsd" podUID="038759ba-4122-4104-8699-81c76590eb2b" containerName="ovn-controller" containerID="cri-o://b3235f6df91cdc5e70f1254705cb25138102f64441cbd1220edb479a98f3de0a" Oct 10 16:54:08 crc kubenswrapper[4799]: I1010 16:54:08.079188 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovn-controller-s8gsd" podUID="038759ba-4122-4104-8699-81c76590eb2b" containerName="ovn-controller" containerID="cri-o://b3235f6df91cdc5e70f1254705cb25138102f64441cbd1220edb479a98f3de0a" gracePeriod=30 Oct 10 16:54:08 crc kubenswrapper[4799]: I1010 16:54:08.101706 4799 generic.go:334] "Generic (PLEG): container finished" podID="fdfeebc0-d50f-42f8-a461-b0aea7ba6a11" containerID="0468ca6c6abf7cc599e692ce9780b9732d1baafe60c9e8e67b98e76b296d2b35" exitCode=2 Oct 10 16:54:08 crc kubenswrapper[4799]: I1010 16:54:08.101781 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"fdfeebc0-d50f-42f8-a461-b0aea7ba6a11","Type":"ContainerDied","Data":"0468ca6c6abf7cc599e692ce9780b9732d1baafe60c9e8e67b98e76b296d2b35"} Oct 10 16:54:08 crc kubenswrapper[4799]: I1010 16:54:08.118434 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-cell-mapping-phknx"] Oct 10 16:54:08 crc kubenswrapper[4799]: I1010 16:54:08.133531 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-cell-mapping-phknx"] Oct 10 16:54:08 crc kubenswrapper[4799]: I1010 16:54:08.139763 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovsdbserver-sb-0"] Oct 10 16:54:08 crc kubenswrapper[4799]: I1010 16:54:08.140377 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovsdbserver-sb-0" podUID="68f5ed12-8abe-46e0-a60a-086d13b7f038" containerName="openstack-network-exporter" containerID="cri-o://1626175edad35c9d874dcd747ee3e25c9a1d3785dcb4a248d8af85626d8ee7f2" gracePeriod=300 Oct 10 16:54:08 crc kubenswrapper[4799]: I1010 16:54:08.140807 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-metrics-rlrvx_8f591cb1-902a-406e-b93a-56c2b7ec9cb8/openstack-network-exporter/0.log" Oct 10 16:54:08 crc kubenswrapper[4799]: I1010 16:54:08.140854 4799 generic.go:334] "Generic (PLEG): container finished" podID="8f591cb1-902a-406e-b93a-56c2b7ec9cb8" containerID="a9f8bcc0a5609ec522020652528dba810f803f6d9045a461106095c9c271fefd" exitCode=2 Oct 10 16:54:08 crc kubenswrapper[4799]: I1010 16:54:08.140895 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-rlrvx" event={"ID":"8f591cb1-902a-406e-b93a-56c2b7ec9cb8","Type":"ContainerDied","Data":"a9f8bcc0a5609ec522020652528dba810f803f6d9045a461106095c9c271fefd"} Oct 10 16:54:08 crc kubenswrapper[4799]: I1010 16:54:08.145212 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovsdbserver-nb-0"] Oct 10 16:54:08 crc kubenswrapper[4799]: I1010 16:54:08.145530 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovsdbserver-nb-0" podUID="37642fb0-1d93-4e14-a176-fea38410097f" containerName="openstack-network-exporter" containerID="cri-o://f05670d9c23e36e24162d124b779c8a5b0a8aa589baac01d94d0aecd8b1875ef" gracePeriod=300 Oct 10 16:54:08 crc kubenswrapper[4799]: I1010 16:54:08.146020 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/novaapieef9-account-delete-qqxp5" Oct 10 16:54:08 crc kubenswrapper[4799]: E1010 16:54:08.154607 4799 configmap.go:193] Couldn't get configMap openstack/rabbitmq-cell1-config-data: configmap "rabbitmq-cell1-config-data" not found Oct 10 16:54:08 crc kubenswrapper[4799]: E1010 16:54:08.164350 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/7858ee88-c7b9-4fb7-b825-569154134201-config-data podName:7858ee88-c7b9-4fb7-b825-569154134201 nodeName:}" failed. No retries permitted until 2025-10-10 16:54:09.164317319 +0000 UTC m=+1342.672641434 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/7858ee88-c7b9-4fb7-b825-569154134201-config-data") pod "rabbitmq-cell1-server-0" (UID: "7858ee88-c7b9-4fb7-b825-569154134201") : configmap "rabbitmq-cell1-config-data" not found Oct 10 16:54:08 crc kubenswrapper[4799]: I1010 16:54:08.178609 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/novacell0f8a8-account-delete-6lbfn" Oct 10 16:54:08 crc kubenswrapper[4799]: I1010 16:54:08.186636 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-64986d45b9-khcqw"] Oct 10 16:54:08 crc kubenswrapper[4799]: I1010 16:54:08.188479 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-64986d45b9-khcqw" podUID="361ecbc5-676b-42af-9eb3-fb761f842265" containerName="dnsmasq-dns" containerID="cri-o://7e43def494bc7bc9587325e681735d75ef482a0d36aa17642f1a5fcfdc38318f" gracePeriod=10 Oct 10 16:54:08 crc kubenswrapper[4799]: I1010 16:54:08.293512 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovsdbserver-nb-0" podUID="37642fb0-1d93-4e14-a176-fea38410097f" containerName="ovsdbserver-nb" containerID="cri-o://78ebb853c6f1f78b3bad7b9528e8955d736b8b1c888e5a761ed264cd5256d2c8" gracePeriod=300 Oct 10 16:54:08 crc kubenswrapper[4799]: I1010 16:54:08.300593 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 10 16:54:08 crc kubenswrapper[4799]: I1010 16:54:08.300876 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="e424a8e6-64c8-4572-8706-33026a2cc44d" containerName="glance-log" containerID="cri-o://f9d2d1faeec7a5eede440474335541991431514b0a33516124505bcbefe52453" gracePeriod=30 Oct 10 16:54:08 crc kubenswrapper[4799]: I1010 16:54:08.300976 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="e424a8e6-64c8-4572-8706-33026a2cc44d" containerName="glance-httpd" containerID="cri-o://45b55f581534a90bac80ffd0b27bca1fc0d2639dbcc1d9165ca16243e681541e" gracePeriod=30 Oct 10 16:54:08 crc kubenswrapper[4799]: I1010 16:54:08.334677 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/swift-storage-0"] Oct 10 16:54:08 crc kubenswrapper[4799]: I1010 16:54:08.363897 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="68ea0968-070a-41d4-b023-31557446c4dc" containerName="object-server" containerID="cri-o://c3e06a4a05023171ceb2e34c51e209015c056f45bfa0faadf50fd4785e2e4d80" gracePeriod=30 Oct 10 16:54:08 crc kubenswrapper[4799]: I1010 16:54:08.363927 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="68ea0968-070a-41d4-b023-31557446c4dc" containerName="account-server" containerID="cri-o://5dff4f861c205e245abbd15cc4d3d0d0becdc5a50f4d9bd4f1427cabc2fa0347" gracePeriod=30 Oct 10 16:54:08 crc kubenswrapper[4799]: I1010 16:54:08.364158 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="68ea0968-070a-41d4-b023-31557446c4dc" containerName="container-updater" containerID="cri-o://196848f6327818ee365268243d9ccad984ffe2d581cc81fbfd2de38d6676c1a0" gracePeriod=30 Oct 10 16:54:08 crc kubenswrapper[4799]: I1010 16:54:08.364233 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="68ea0968-070a-41d4-b023-31557446c4dc" containerName="container-auditor" containerID="cri-o://892c2480a25d808a995817609b9bbb27b39738b861f9e1834be4106363fa31e8" gracePeriod=30 Oct 10 16:54:08 crc kubenswrapper[4799]: I1010 16:54:08.364272 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="68ea0968-070a-41d4-b023-31557446c4dc" containerName="swift-recon-cron" containerID="cri-o://d7211c49780feb5fa0e4a94a5ced7f5a84311b8cae847b8935e7948aa4a99e2c" gracePeriod=30 Oct 10 16:54:08 crc kubenswrapper[4799]: I1010 16:54:08.364319 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="68ea0968-070a-41d4-b023-31557446c4dc" containerName="container-replicator" containerID="cri-o://d0489a361daf254795ccae9ddca687c512362e7b439ec64189bd20c8ab4310b0" gracePeriod=30 Oct 10 16:54:08 crc kubenswrapper[4799]: I1010 16:54:08.364343 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="68ea0968-070a-41d4-b023-31557446c4dc" containerName="rsync" containerID="cri-o://94b0e5fe4497d52c34e39558472e6848a5c209b522dd73f975bdb4dc0e01da73" gracePeriod=30 Oct 10 16:54:08 crc kubenswrapper[4799]: I1010 16:54:08.364391 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="68ea0968-070a-41d4-b023-31557446c4dc" containerName="object-expirer" containerID="cri-o://432fefd63b99c8d28abb812f0362dcacaa5d81c188e06c3668e637ba465daf44" gracePeriod=30 Oct 10 16:54:08 crc kubenswrapper[4799]: I1010 16:54:08.364397 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="68ea0968-070a-41d4-b023-31557446c4dc" containerName="container-server" containerID="cri-o://5b34f901fc61925f0938ed04472b26863bfffe70a6291e77a0980d1be5dc5aa4" gracePeriod=30 Oct 10 16:54:08 crc kubenswrapper[4799]: I1010 16:54:08.364435 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="68ea0968-070a-41d4-b023-31557446c4dc" containerName="account-reaper" containerID="cri-o://3283576ff42b8ebc10b8fec21ffc203d5257c048d20b76e1f90800f9758835db" gracePeriod=30 Oct 10 16:54:08 crc kubenswrapper[4799]: I1010 16:54:08.364452 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="68ea0968-070a-41d4-b023-31557446c4dc" containerName="object-updater" containerID="cri-o://c4d301a6fc96fe120eff75102918fb4f4f64a0c1d74276e5a9d732cecede51be" gracePeriod=30 Oct 10 16:54:08 crc kubenswrapper[4799]: I1010 16:54:08.364501 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="68ea0968-070a-41d4-b023-31557446c4dc" containerName="object-auditor" containerID="cri-o://a0a44ae2f612b300ed982c9b9af495c2acaaf967a0729c5e19eda110019db7eb" gracePeriod=30 Oct 10 16:54:08 crc kubenswrapper[4799]: I1010 16:54:08.364536 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="68ea0968-070a-41d4-b023-31557446c4dc" containerName="object-replicator" containerID="cri-o://d4aecb0e485406b0a1fb96b8e50caa65a29728439d08b5b6330706ef802ddeb2" gracePeriod=30 Oct 10 16:54:08 crc kubenswrapper[4799]: I1010 16:54:08.364607 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="68ea0968-070a-41d4-b023-31557446c4dc" containerName="account-replicator" containerID="cri-o://2e2a8373854753a4479c039fcd2e9fbdfba1493d4e774ff602b6e261202c606e" gracePeriod=30 Oct 10 16:54:08 crc kubenswrapper[4799]: I1010 16:54:08.364643 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="68ea0968-070a-41d4-b023-31557446c4dc" containerName="account-auditor" containerID="cri-o://e611df1a7b5dee2c47fcfa489e23af4e0028a72aa26eea7950fe0ec36316b663" gracePeriod=30 Oct 10 16:54:08 crc kubenswrapper[4799]: I1010 16:54:08.400913 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovsdbserver-sb-0" podUID="68f5ed12-8abe-46e0-a60a-086d13b7f038" containerName="ovsdbserver-sb" containerID="cri-o://9a442c2442efda9014b2e49c109e3fd6db0be3a601326fff77372e592aa5bef9" gracePeriod=300 Oct 10 16:54:08 crc kubenswrapper[4799]: I1010 16:54:08.443956 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 10 16:54:08 crc kubenswrapper[4799]: I1010 16:54:08.444522 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="31fc68f8-af18-42b7-a94c-90a22afea5f1" containerName="glance-log" containerID="cri-o://f233fe566e513cc4d04821964bcde90cce13e4323a97a80af9c4e16bc8ddb102" gracePeriod=30 Oct 10 16:54:08 crc kubenswrapper[4799]: I1010 16:54:08.444975 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="31fc68f8-af18-42b7-a94c-90a22afea5f1" containerName="glance-httpd" containerID="cri-o://d2b3ab1b197b085ea5a23bbdabb78c44e9c002b3cd5536ddb8dc1fcd93bae475" gracePeriod=30 Oct 10 16:54:08 crc kubenswrapper[4799]: I1010 16:54:08.555615 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Oct 10 16:54:08 crc kubenswrapper[4799]: I1010 16:54:08.555909 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="7dc78f94-acb0-4411-b1a2-14dd6500674b" containerName="cinder-scheduler" containerID="cri-o://a5a1b6e00a35ec28b0a11cef63bf27aa74edf00ead5c5dff888593622c9a0138" gracePeriod=30 Oct 10 16:54:08 crc kubenswrapper[4799]: I1010 16:54:08.556035 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="7dc78f94-acb0-4411-b1a2-14dd6500674b" containerName="probe" containerID="cri-o://45d51a08521515637f5b1846723d166ebcfd370a8e928d653837e32fd1bdcaff" gracePeriod=30 Oct 10 16:54:08 crc kubenswrapper[4799]: I1010 16:54:08.582418 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-6565b9cf48-rl77d"] Oct 10 16:54:08 crc kubenswrapper[4799]: I1010 16:54:08.582618 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/placement-6565b9cf48-rl77d" podUID="7ab7b7c1-e89f-4562-882b-4f517f90f8c8" containerName="placement-log" containerID="cri-o://7ff035a8a6498fce1542054aa6ef55bd158eb92c46cb410ac3528b2a07a6250d" gracePeriod=30 Oct 10 16:54:08 crc kubenswrapper[4799]: I1010 16:54:08.582930 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/placement-6565b9cf48-rl77d" podUID="7ab7b7c1-e89f-4562-882b-4f517f90f8c8" containerName="placement-api" containerID="cri-o://a8fdb5ed39a199e1efb2bc1f77b2de74dd205c6dcb6fea5e9f71d53f89199fcd" gracePeriod=30 Oct 10 16:54:08 crc kubenswrapper[4799]: E1010 16:54:08.593186 4799 configmap.go:193] Couldn't get configMap openstack/rabbitmq-config-data: configmap "rabbitmq-config-data" not found Oct 10 16:54:08 crc kubenswrapper[4799]: E1010 16:54:08.593252 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/9fd6f03f-abea-4c29-8060-0705bb0af2c7-config-data podName:9fd6f03f-abea-4c29-8060-0705bb0af2c7 nodeName:}" failed. No retries permitted until 2025-10-10 16:54:10.59323457 +0000 UTC m=+1344.101558685 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/9fd6f03f-abea-4c29-8060-0705bb0af2c7-config-data") pod "rabbitmq-server-0" (UID: "9fd6f03f-abea-4c29-8060-0705bb0af2c7") : configmap "rabbitmq-config-data" not found Oct 10 16:54:08 crc kubenswrapper[4799]: I1010 16:54:08.593378 4799 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-64986d45b9-khcqw" podUID="361ecbc5-676b-42af-9eb3-fb761f842265" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.203:5353: connect: connection refused" Oct 10 16:54:08 crc kubenswrapper[4799]: I1010 16:54:08.644401 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-69f7ddf877-mclzd"] Oct 10 16:54:08 crc kubenswrapper[4799]: I1010 16:54:08.645178 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-69f7ddf877-mclzd" podUID="78820835-eb2d-40d8-a497-e9a351a9cef9" containerName="neutron-api" containerID="cri-o://89ea0df023f8cd2efabe4a60d20707c43edc3f235ad2613e512cece899859399" gracePeriod=30 Oct 10 16:54:08 crc kubenswrapper[4799]: I1010 16:54:08.645438 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-69f7ddf877-mclzd" podUID="78820835-eb2d-40d8-a497-e9a351a9cef9" containerName="neutron-httpd" containerID="cri-o://304a677749d95012ce2795e133dc4470b68b20824bdcbe3901e9d128f4e5ec4b" gracePeriod=30 Oct 10 16:54:08 crc kubenswrapper[4799]: I1010 16:54:08.729362 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/swift-proxy-69dc9744df-smbqh"] Oct 10 16:54:08 crc kubenswrapper[4799]: I1010 16:54:08.729589 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-proxy-69dc9744df-smbqh" podUID="34f6a30f-81f3-4240-8a4e-d7f1220801ab" containerName="proxy-httpd" containerID="cri-o://d89cab6d55e7c7a9cd3bd3ae258a2ce92f7ee5e9e44da70f5b9a36b753e9b185" gracePeriod=30 Oct 10 16:54:08 crc kubenswrapper[4799]: I1010 16:54:08.729888 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-proxy-69dc9744df-smbqh" podUID="34f6a30f-81f3-4240-8a4e-d7f1220801ab" containerName="proxy-server" containerID="cri-o://7d59d6ece2c99c837d44d3e71e1c7dc7a0dd9eaa3806c1e6d93e906cf1b5fcfd" gracePeriod=30 Oct 10 16:54:08 crc kubenswrapper[4799]: I1010 16:54:08.740693 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-db-create-vc285"] Oct 10 16:54:08 crc kubenswrapper[4799]: I1010 16:54:08.753666 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovn-controller-ovs-dtplc" podUID="e0ab4194-18b4-4c6d-96b2-d7a4a85e17d6" containerName="ovs-vswitchd" containerID="cri-o://387afba31b4e67a9ba9f7f2877d3f3af184a7c60b3843119336ceb5759893e62" gracePeriod=29 Oct 10 16:54:08 crc kubenswrapper[4799]: I1010 16:54:08.760861 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-db-create-vc285"] Oct 10 16:54:08 crc kubenswrapper[4799]: I1010 16:54:08.770813 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Oct 10 16:54:08 crc kubenswrapper[4799]: I1010 16:54:08.771057 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="f9c4cd8a-6aed-4826-b23b-328645f5801f" containerName="cinder-api-log" containerID="cri-o://1e2bb89cca75275f38d0737f645f0b33d216e5600663a5ffc8d93f24601d9e28" gracePeriod=30 Oct 10 16:54:08 crc kubenswrapper[4799]: I1010 16:54:08.771496 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="f9c4cd8a-6aed-4826-b23b-328645f5801f" containerName="cinder-api" containerID="cri-o://02155113288bc0bf63cf3a3084f2f7ed5580d5f5ad54aca68e4fb0b10bb08a28" gracePeriod=30 Oct 10 16:54:08 crc kubenswrapper[4799]: I1010 16:54:08.783529 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance2b60-account-delete-t7jh2"] Oct 10 16:54:08 crc kubenswrapper[4799]: I1010 16:54:08.800011 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-2b60-account-create-88ssw"] Oct 10 16:54:08 crc kubenswrapper[4799]: I1010 16:54:08.810985 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-2b60-account-create-88ssw"] Oct 10 16:54:08 crc kubenswrapper[4799]: I1010 16:54:08.827957 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Oct 10 16:54:08 crc kubenswrapper[4799]: I1010 16:54:08.828292 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="2db42625-4f7b-479c-a580-c94d6cafb3fe" containerName="nova-api-log" containerID="cri-o://24962cee1d51a7d7eb5a1d25cab56e9384c0342e38d3013ef33f383559acf6d7" gracePeriod=30 Oct 10 16:54:08 crc kubenswrapper[4799]: I1010 16:54:08.828743 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="2db42625-4f7b-479c-a580-c94d6cafb3fe" containerName="nova-api-api" containerID="cri-o://95e8c5c7eeb44313269abe5e0811c66db445161e27df4b78e13b1117ddf8ecc1" gracePeriod=30 Oct 10 16:54:08 crc kubenswrapper[4799]: I1010 16:54:08.844918 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/openstack-cell1-galera-0"] Oct 10 16:54:08 crc kubenswrapper[4799]: I1010 16:54:08.876718 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Oct 10 16:54:08 crc kubenswrapper[4799]: I1010 16:54:08.877967 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="5e6870d5-faea-46d9-bebb-4d237b802910" containerName="nova-metadata-log" containerID="cri-o://aaea200524f506182ac0c5dffe0ab093f1a9490f2edc8d9d614d7b6635f1619c" gracePeriod=30 Oct 10 16:54:08 crc kubenswrapper[4799]: I1010 16:54:08.878447 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="5e6870d5-faea-46d9-bebb-4d237b802910" containerName="nova-metadata-metadata" containerID="cri-o://d6ec86e7f860ab8ca24a46400675b4f8e0135552b7120d2f42340e7afc614296" gracePeriod=30 Oct 10 16:54:08 crc kubenswrapper[4799]: I1010 16:54:08.903117 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-db-create-nvghl"] Oct 10 16:54:08 crc kubenswrapper[4799]: I1010 16:54:08.916229 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-metrics-rlrvx_8f591cb1-902a-406e-b93a-56c2b7ec9cb8/openstack-network-exporter/0.log" Oct 10 16:54:08 crc kubenswrapper[4799]: I1010 16:54:08.916298 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-rlrvx" Oct 10 16:54:08 crc kubenswrapper[4799]: I1010 16:54:08.920804 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-0700-account-create-krzq2"] Oct 10 16:54:08 crc kubenswrapper[4799]: I1010 16:54:08.930188 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-0700-account-create-krzq2"] Oct 10 16:54:08 crc kubenswrapper[4799]: I1010 16:54:08.956458 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-db-create-nvghl"] Oct 10 16:54:09 crc kubenswrapper[4799]: I1010 16:54:09.014831 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican0700-account-delete-smncx"] Oct 10 16:54:09 crc kubenswrapper[4799]: I1010 16:54:09.020581 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/8f591cb1-902a-406e-b93a-56c2b7ec9cb8-ovn-rundir\") pod \"8f591cb1-902a-406e-b93a-56c2b7ec9cb8\" (UID: \"8f591cb1-902a-406e-b93a-56c2b7ec9cb8\") " Oct 10 16:54:09 crc kubenswrapper[4799]: I1010 16:54:09.020947 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/8f591cb1-902a-406e-b93a-56c2b7ec9cb8-ovn-rundir" (OuterVolumeSpecName: "ovn-rundir") pod "8f591cb1-902a-406e-b93a-56c2b7ec9cb8" (UID: "8f591cb1-902a-406e-b93a-56c2b7ec9cb8"). InnerVolumeSpecName "ovn-rundir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 10 16:54:09 crc kubenswrapper[4799]: I1010 16:54:09.021152 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/8f591cb1-902a-406e-b93a-56c2b7ec9cb8-metrics-certs-tls-certs\") pod \"8f591cb1-902a-406e-b93a-56c2b7ec9cb8\" (UID: \"8f591cb1-902a-406e-b93a-56c2b7ec9cb8\") " Oct 10 16:54:09 crc kubenswrapper[4799]: I1010 16:54:09.021276 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/8f591cb1-902a-406e-b93a-56c2b7ec9cb8-ovs-rundir\") pod \"8f591cb1-902a-406e-b93a-56c2b7ec9cb8\" (UID: \"8f591cb1-902a-406e-b93a-56c2b7ec9cb8\") " Oct 10 16:54:09 crc kubenswrapper[4799]: I1010 16:54:09.021313 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5zcgm\" (UniqueName: \"kubernetes.io/projected/8f591cb1-902a-406e-b93a-56c2b7ec9cb8-kube-api-access-5zcgm\") pod \"8f591cb1-902a-406e-b93a-56c2b7ec9cb8\" (UID: \"8f591cb1-902a-406e-b93a-56c2b7ec9cb8\") " Oct 10 16:54:09 crc kubenswrapper[4799]: I1010 16:54:09.021367 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8f591cb1-902a-406e-b93a-56c2b7ec9cb8-config\") pod \"8f591cb1-902a-406e-b93a-56c2b7ec9cb8\" (UID: \"8f591cb1-902a-406e-b93a-56c2b7ec9cb8\") " Oct 10 16:54:09 crc kubenswrapper[4799]: I1010 16:54:09.021390 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8f591cb1-902a-406e-b93a-56c2b7ec9cb8-combined-ca-bundle\") pod \"8f591cb1-902a-406e-b93a-56c2b7ec9cb8\" (UID: \"8f591cb1-902a-406e-b93a-56c2b7ec9cb8\") " Oct 10 16:54:09 crc kubenswrapper[4799]: I1010 16:54:09.022247 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/8f591cb1-902a-406e-b93a-56c2b7ec9cb8-ovs-rundir" (OuterVolumeSpecName: "ovs-rundir") pod "8f591cb1-902a-406e-b93a-56c2b7ec9cb8" (UID: "8f591cb1-902a-406e-b93a-56c2b7ec9cb8"). InnerVolumeSpecName "ovs-rundir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 10 16:54:09 crc kubenswrapper[4799]: I1010 16:54:09.024837 4799 reconciler_common.go:293] "Volume detached for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/8f591cb1-902a-406e-b93a-56c2b7ec9cb8-ovn-rundir\") on node \"crc\" DevicePath \"\"" Oct 10 16:54:09 crc kubenswrapper[4799]: I1010 16:54:09.024859 4799 reconciler_common.go:293] "Volume detached for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/8f591cb1-902a-406e-b93a-56c2b7ec9cb8-ovs-rundir\") on node \"crc\" DevicePath \"\"" Oct 10 16:54:09 crc kubenswrapper[4799]: I1010 16:54:09.027341 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f591cb1-902a-406e-b93a-56c2b7ec9cb8-config" (OuterVolumeSpecName: "config") pod "8f591cb1-902a-406e-b93a-56c2b7ec9cb8" (UID: "8f591cb1-902a-406e-b93a-56c2b7ec9cb8"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 16:54:09 crc kubenswrapper[4799]: I1010 16:54:09.109658 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-db-create-8k2k8"] Oct 10 16:54:09 crc kubenswrapper[4799]: I1010 16:54:09.110061 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f591cb1-902a-406e-b93a-56c2b7ec9cb8-kube-api-access-5zcgm" (OuterVolumeSpecName: "kube-api-access-5zcgm") pod "8f591cb1-902a-406e-b93a-56c2b7ec9cb8" (UID: "8f591cb1-902a-406e-b93a-56c2b7ec9cb8"). InnerVolumeSpecName "kube-api-access-5zcgm". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 16:54:09 crc kubenswrapper[4799]: I1010 16:54:09.116157 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-s8gsd" Oct 10 16:54:09 crc kubenswrapper[4799]: I1010 16:54:09.129007 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5zcgm\" (UniqueName: \"kubernetes.io/projected/8f591cb1-902a-406e-b93a-56c2b7ec9cb8-kube-api-access-5zcgm\") on node \"crc\" DevicePath \"\"" Oct 10 16:54:09 crc kubenswrapper[4799]: I1010 16:54:09.129040 4799 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8f591cb1-902a-406e-b93a-56c2b7ec9cb8-config\") on node \"crc\" DevicePath \"\"" Oct 10 16:54:09 crc kubenswrapper[4799]: I1010 16:54:09.199216 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-db-create-8k2k8"] Oct 10 16:54:09 crc kubenswrapper[4799]: E1010 16:54:09.218425 4799 handlers.go:78] "Exec lifecycle hook for Container in Pod failed" err=< Oct 10 16:54:09 crc kubenswrapper[4799]: command '/usr/local/bin/container-scripts/stop-ovsdb-server.sh' exited with 137: ++ dirname /usr/local/bin/container-scripts/stop-ovsdb-server.sh Oct 10 16:54:09 crc kubenswrapper[4799]: + source /usr/local/bin/container-scripts/functions Oct 10 16:54:09 crc kubenswrapper[4799]: ++ OVNBridge=br-int Oct 10 16:54:09 crc kubenswrapper[4799]: ++ OVNRemote=tcp:localhost:6642 Oct 10 16:54:09 crc kubenswrapper[4799]: ++ OVNEncapType=geneve Oct 10 16:54:09 crc kubenswrapper[4799]: ++ OVNAvailabilityZones= Oct 10 16:54:09 crc kubenswrapper[4799]: ++ EnableChassisAsGateway=true Oct 10 16:54:09 crc kubenswrapper[4799]: ++ PhysicalNetworks= Oct 10 16:54:09 crc kubenswrapper[4799]: ++ OVNHostName= Oct 10 16:54:09 crc kubenswrapper[4799]: ++ DB_FILE=/etc/openvswitch/conf.db Oct 10 16:54:09 crc kubenswrapper[4799]: ++ ovs_dir=/var/lib/openvswitch Oct 10 16:54:09 crc kubenswrapper[4799]: ++ FLOWS_RESTORE_SCRIPT=/var/lib/openvswitch/flows-script Oct 10 16:54:09 crc kubenswrapper[4799]: ++ FLOWS_RESTORE_DIR=/var/lib/openvswitch/saved-flows Oct 10 16:54:09 crc kubenswrapper[4799]: ++ SAFE_TO_STOP_OVSDB_SERVER_SEMAPHORE=/var/lib/openvswitch/is_safe_to_stop_ovsdb_server Oct 10 16:54:09 crc kubenswrapper[4799]: + '[' '!' -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server ']' Oct 10 16:54:09 crc kubenswrapper[4799]: + sleep 0.5 Oct 10 16:54:09 crc kubenswrapper[4799]: + '[' '!' -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server ']' Oct 10 16:54:09 crc kubenswrapper[4799]: + sleep 0.5 Oct 10 16:54:09 crc kubenswrapper[4799]: + '[' '!' -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server ']' Oct 10 16:54:09 crc kubenswrapper[4799]: + cleanup_ovsdb_server_semaphore Oct 10 16:54:09 crc kubenswrapper[4799]: + rm -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server Oct 10 16:54:09 crc kubenswrapper[4799]: + /usr/share/openvswitch/scripts/ovs-ctl stop --no-ovs-vswitchd Oct 10 16:54:09 crc kubenswrapper[4799]: > execCommand=["/usr/local/bin/container-scripts/stop-ovsdb-server.sh"] containerName="ovsdb-server" pod="openstack/ovn-controller-ovs-dtplc" message=< Oct 10 16:54:09 crc kubenswrapper[4799]: Exiting ovsdb-server (5) ++ dirname /usr/local/bin/container-scripts/stop-ovsdb-server.sh Oct 10 16:54:09 crc kubenswrapper[4799]: + source /usr/local/bin/container-scripts/functions Oct 10 16:54:09 crc kubenswrapper[4799]: ++ OVNBridge=br-int Oct 10 16:54:09 crc kubenswrapper[4799]: ++ OVNRemote=tcp:localhost:6642 Oct 10 16:54:09 crc kubenswrapper[4799]: ++ OVNEncapType=geneve Oct 10 16:54:09 crc kubenswrapper[4799]: ++ OVNAvailabilityZones= Oct 10 16:54:09 crc kubenswrapper[4799]: ++ EnableChassisAsGateway=true Oct 10 16:54:09 crc kubenswrapper[4799]: ++ PhysicalNetworks= Oct 10 16:54:09 crc kubenswrapper[4799]: ++ OVNHostName= Oct 10 16:54:09 crc kubenswrapper[4799]: ++ DB_FILE=/etc/openvswitch/conf.db Oct 10 16:54:09 crc kubenswrapper[4799]: ++ ovs_dir=/var/lib/openvswitch Oct 10 16:54:09 crc kubenswrapper[4799]: ++ FLOWS_RESTORE_SCRIPT=/var/lib/openvswitch/flows-script Oct 10 16:54:09 crc kubenswrapper[4799]: ++ FLOWS_RESTORE_DIR=/var/lib/openvswitch/saved-flows Oct 10 16:54:09 crc kubenswrapper[4799]: ++ SAFE_TO_STOP_OVSDB_SERVER_SEMAPHORE=/var/lib/openvswitch/is_safe_to_stop_ovsdb_server Oct 10 16:54:09 crc kubenswrapper[4799]: + '[' '!' -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server ']' Oct 10 16:54:09 crc kubenswrapper[4799]: + sleep 0.5 Oct 10 16:54:09 crc kubenswrapper[4799]: + '[' '!' -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server ']' Oct 10 16:54:09 crc kubenswrapper[4799]: + sleep 0.5 Oct 10 16:54:09 crc kubenswrapper[4799]: + '[' '!' -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server ']' Oct 10 16:54:09 crc kubenswrapper[4799]: + cleanup_ovsdb_server_semaphore Oct 10 16:54:09 crc kubenswrapper[4799]: + rm -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server Oct 10 16:54:09 crc kubenswrapper[4799]: + /usr/share/openvswitch/scripts/ovs-ctl stop --no-ovs-vswitchd Oct 10 16:54:09 crc kubenswrapper[4799]: > Oct 10 16:54:09 crc kubenswrapper[4799]: E1010 16:54:09.218468 4799 kuberuntime_container.go:691] "PreStop hook failed" err=< Oct 10 16:54:09 crc kubenswrapper[4799]: command '/usr/local/bin/container-scripts/stop-ovsdb-server.sh' exited with 137: ++ dirname /usr/local/bin/container-scripts/stop-ovsdb-server.sh Oct 10 16:54:09 crc kubenswrapper[4799]: + source /usr/local/bin/container-scripts/functions Oct 10 16:54:09 crc kubenswrapper[4799]: ++ OVNBridge=br-int Oct 10 16:54:09 crc kubenswrapper[4799]: ++ OVNRemote=tcp:localhost:6642 Oct 10 16:54:09 crc kubenswrapper[4799]: ++ OVNEncapType=geneve Oct 10 16:54:09 crc kubenswrapper[4799]: ++ OVNAvailabilityZones= Oct 10 16:54:09 crc kubenswrapper[4799]: ++ EnableChassisAsGateway=true Oct 10 16:54:09 crc kubenswrapper[4799]: ++ PhysicalNetworks= Oct 10 16:54:09 crc kubenswrapper[4799]: ++ OVNHostName= Oct 10 16:54:09 crc kubenswrapper[4799]: ++ DB_FILE=/etc/openvswitch/conf.db Oct 10 16:54:09 crc kubenswrapper[4799]: ++ ovs_dir=/var/lib/openvswitch Oct 10 16:54:09 crc kubenswrapper[4799]: ++ FLOWS_RESTORE_SCRIPT=/var/lib/openvswitch/flows-script Oct 10 16:54:09 crc kubenswrapper[4799]: ++ FLOWS_RESTORE_DIR=/var/lib/openvswitch/saved-flows Oct 10 16:54:09 crc kubenswrapper[4799]: ++ SAFE_TO_STOP_OVSDB_SERVER_SEMAPHORE=/var/lib/openvswitch/is_safe_to_stop_ovsdb_server Oct 10 16:54:09 crc kubenswrapper[4799]: + '[' '!' -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server ']' Oct 10 16:54:09 crc kubenswrapper[4799]: + sleep 0.5 Oct 10 16:54:09 crc kubenswrapper[4799]: + '[' '!' -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server ']' Oct 10 16:54:09 crc kubenswrapper[4799]: + sleep 0.5 Oct 10 16:54:09 crc kubenswrapper[4799]: + '[' '!' -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server ']' Oct 10 16:54:09 crc kubenswrapper[4799]: + cleanup_ovsdb_server_semaphore Oct 10 16:54:09 crc kubenswrapper[4799]: + rm -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server Oct 10 16:54:09 crc kubenswrapper[4799]: + /usr/share/openvswitch/scripts/ovs-ctl stop --no-ovs-vswitchd Oct 10 16:54:09 crc kubenswrapper[4799]: > pod="openstack/ovn-controller-ovs-dtplc" podUID="e0ab4194-18b4-4c6d-96b2-d7a4a85e17d6" containerName="ovsdb-server" containerID="cri-o://24f52eab75b89d5e7b9cf09d4b2c644e6fdfdfe3ddfc83b09a9363aa7efda1d1" Oct 10 16:54:09 crc kubenswrapper[4799]: I1010 16:54:09.218515 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovn-controller-ovs-dtplc" podUID="e0ab4194-18b4-4c6d-96b2-d7a4a85e17d6" containerName="ovsdb-server" containerID="cri-o://24f52eab75b89d5e7b9cf09d4b2c644e6fdfdfe3ddfc83b09a9363aa7efda1d1" gracePeriod=29 Oct 10 16:54:09 crc kubenswrapper[4799]: I1010 16:54:09.260394 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/038759ba-4122-4104-8699-81c76590eb2b-var-log-ovn\") pod \"038759ba-4122-4104-8699-81c76590eb2b\" (UID: \"038759ba-4122-4104-8699-81c76590eb2b\") " Oct 10 16:54:09 crc kubenswrapper[4799]: I1010 16:54:09.260426 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/038759ba-4122-4104-8699-81c76590eb2b-var-run-ovn\") pod \"038759ba-4122-4104-8699-81c76590eb2b\" (UID: \"038759ba-4122-4104-8699-81c76590eb2b\") " Oct 10 16:54:09 crc kubenswrapper[4799]: I1010 16:54:09.260468 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/038759ba-4122-4104-8699-81c76590eb2b-ovn-controller-tls-certs\") pod \"038759ba-4122-4104-8699-81c76590eb2b\" (UID: \"038759ba-4122-4104-8699-81c76590eb2b\") " Oct 10 16:54:09 crc kubenswrapper[4799]: I1010 16:54:09.260531 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/038759ba-4122-4104-8699-81c76590eb2b-combined-ca-bundle\") pod \"038759ba-4122-4104-8699-81c76590eb2b\" (UID: \"038759ba-4122-4104-8699-81c76590eb2b\") " Oct 10 16:54:09 crc kubenswrapper[4799]: I1010 16:54:09.260728 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8mg29\" (UniqueName: \"kubernetes.io/projected/038759ba-4122-4104-8699-81c76590eb2b-kube-api-access-8mg29\") pod \"038759ba-4122-4104-8699-81c76590eb2b\" (UID: \"038759ba-4122-4104-8699-81c76590eb2b\") " Oct 10 16:54:09 crc kubenswrapper[4799]: I1010 16:54:09.260782 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/038759ba-4122-4104-8699-81c76590eb2b-scripts\") pod \"038759ba-4122-4104-8699-81c76590eb2b\" (UID: \"038759ba-4122-4104-8699-81c76590eb2b\") " Oct 10 16:54:09 crc kubenswrapper[4799]: I1010 16:54:09.260850 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/038759ba-4122-4104-8699-81c76590eb2b-var-run\") pod \"038759ba-4122-4104-8699-81c76590eb2b\" (UID: \"038759ba-4122-4104-8699-81c76590eb2b\") " Oct 10 16:54:09 crc kubenswrapper[4799]: E1010 16:54:09.261304 4799 configmap.go:193] Couldn't get configMap openstack/rabbitmq-cell1-config-data: configmap "rabbitmq-cell1-config-data" not found Oct 10 16:54:09 crc kubenswrapper[4799]: E1010 16:54:09.261350 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/7858ee88-c7b9-4fb7-b825-569154134201-config-data podName:7858ee88-c7b9-4fb7-b825-569154134201 nodeName:}" failed. No retries permitted until 2025-10-10 16:54:11.261334931 +0000 UTC m=+1344.769659046 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/7858ee88-c7b9-4fb7-b825-569154134201-config-data") pod "rabbitmq-cell1-server-0" (UID: "7858ee88-c7b9-4fb7-b825-569154134201") : configmap "rabbitmq-cell1-config-data" not found Oct 10 16:54:09 crc kubenswrapper[4799]: I1010 16:54:09.261975 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/038759ba-4122-4104-8699-81c76590eb2b-var-log-ovn" (OuterVolumeSpecName: "var-log-ovn") pod "038759ba-4122-4104-8699-81c76590eb2b" (UID: "038759ba-4122-4104-8699-81c76590eb2b"). InnerVolumeSpecName "var-log-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 10 16:54:09 crc kubenswrapper[4799]: I1010 16:54:09.262021 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/038759ba-4122-4104-8699-81c76590eb2b-var-run-ovn" (OuterVolumeSpecName: "var-run-ovn") pod "038759ba-4122-4104-8699-81c76590eb2b" (UID: "038759ba-4122-4104-8699-81c76590eb2b"). InnerVolumeSpecName "var-run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 10 16:54:09 crc kubenswrapper[4799]: I1010 16:54:09.262850 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/038759ba-4122-4104-8699-81c76590eb2b-var-run" (OuterVolumeSpecName: "var-run") pod "038759ba-4122-4104-8699-81c76590eb2b" (UID: "038759ba-4122-4104-8699-81c76590eb2b"). InnerVolumeSpecName "var-run". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 10 16:54:09 crc kubenswrapper[4799]: I1010 16:54:09.267603 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-a843-account-create-t5zlh"] Oct 10 16:54:09 crc kubenswrapper[4799]: I1010 16:54:09.269341 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/038759ba-4122-4104-8699-81c76590eb2b-scripts" (OuterVolumeSpecName: "scripts") pod "038759ba-4122-4104-8699-81c76590eb2b" (UID: "038759ba-4122-4104-8699-81c76590eb2b"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 16:54:09 crc kubenswrapper[4799]: I1010 16:54:09.274164 4799 generic.go:334] "Generic (PLEG): container finished" podID="68ea0968-070a-41d4-b023-31557446c4dc" containerID="94b0e5fe4497d52c34e39558472e6848a5c209b522dd73f975bdb4dc0e01da73" exitCode=0 Oct 10 16:54:09 crc kubenswrapper[4799]: I1010 16:54:09.274295 4799 generic.go:334] "Generic (PLEG): container finished" podID="68ea0968-070a-41d4-b023-31557446c4dc" containerID="432fefd63b99c8d28abb812f0362dcacaa5d81c188e06c3668e637ba465daf44" exitCode=0 Oct 10 16:54:09 crc kubenswrapper[4799]: I1010 16:54:09.274350 4799 generic.go:334] "Generic (PLEG): container finished" podID="68ea0968-070a-41d4-b023-31557446c4dc" containerID="c4d301a6fc96fe120eff75102918fb4f4f64a0c1d74276e5a9d732cecede51be" exitCode=0 Oct 10 16:54:09 crc kubenswrapper[4799]: I1010 16:54:09.274397 4799 generic.go:334] "Generic (PLEG): container finished" podID="68ea0968-070a-41d4-b023-31557446c4dc" containerID="a0a44ae2f612b300ed982c9b9af495c2acaaf967a0729c5e19eda110019db7eb" exitCode=0 Oct 10 16:54:09 crc kubenswrapper[4799]: I1010 16:54:09.274444 4799 generic.go:334] "Generic (PLEG): container finished" podID="68ea0968-070a-41d4-b023-31557446c4dc" containerID="d4aecb0e485406b0a1fb96b8e50caa65a29728439d08b5b6330706ef802ddeb2" exitCode=0 Oct 10 16:54:09 crc kubenswrapper[4799]: I1010 16:54:09.274491 4799 generic.go:334] "Generic (PLEG): container finished" podID="68ea0968-070a-41d4-b023-31557446c4dc" containerID="196848f6327818ee365268243d9ccad984ffe2d581cc81fbfd2de38d6676c1a0" exitCode=0 Oct 10 16:54:09 crc kubenswrapper[4799]: I1010 16:54:09.274536 4799 generic.go:334] "Generic (PLEG): container finished" podID="68ea0968-070a-41d4-b023-31557446c4dc" containerID="892c2480a25d808a995817609b9bbb27b39738b861f9e1834be4106363fa31e8" exitCode=0 Oct 10 16:54:09 crc kubenswrapper[4799]: I1010 16:54:09.274590 4799 generic.go:334] "Generic (PLEG): container finished" podID="68ea0968-070a-41d4-b023-31557446c4dc" containerID="d0489a361daf254795ccae9ddca687c512362e7b439ec64189bd20c8ab4310b0" exitCode=0 Oct 10 16:54:09 crc kubenswrapper[4799]: I1010 16:54:09.274636 4799 generic.go:334] "Generic (PLEG): container finished" podID="68ea0968-070a-41d4-b023-31557446c4dc" containerID="3283576ff42b8ebc10b8fec21ffc203d5257c048d20b76e1f90800f9758835db" exitCode=0 Oct 10 16:54:09 crc kubenswrapper[4799]: I1010 16:54:09.274686 4799 generic.go:334] "Generic (PLEG): container finished" podID="68ea0968-070a-41d4-b023-31557446c4dc" containerID="e611df1a7b5dee2c47fcfa489e23af4e0028a72aa26eea7950fe0ec36316b663" exitCode=0 Oct 10 16:54:09 crc kubenswrapper[4799]: I1010 16:54:09.274733 4799 generic.go:334] "Generic (PLEG): container finished" podID="68ea0968-070a-41d4-b023-31557446c4dc" containerID="2e2a8373854753a4479c039fcd2e9fbdfba1493d4e774ff602b6e261202c606e" exitCode=0 Oct 10 16:54:09 crc kubenswrapper[4799]: I1010 16:54:09.274796 4799 generic.go:334] "Generic (PLEG): container finished" podID="68ea0968-070a-41d4-b023-31557446c4dc" containerID="5dff4f861c205e245abbd15cc4d3d0d0becdc5a50f4d9bd4f1427cabc2fa0347" exitCode=0 Oct 10 16:54:09 crc kubenswrapper[4799]: I1010 16:54:09.274886 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"68ea0968-070a-41d4-b023-31557446c4dc","Type":"ContainerDied","Data":"94b0e5fe4497d52c34e39558472e6848a5c209b522dd73f975bdb4dc0e01da73"} Oct 10 16:54:09 crc kubenswrapper[4799]: I1010 16:54:09.274975 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"68ea0968-070a-41d4-b023-31557446c4dc","Type":"ContainerDied","Data":"432fefd63b99c8d28abb812f0362dcacaa5d81c188e06c3668e637ba465daf44"} Oct 10 16:54:09 crc kubenswrapper[4799]: I1010 16:54:09.275034 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"68ea0968-070a-41d4-b023-31557446c4dc","Type":"ContainerDied","Data":"c4d301a6fc96fe120eff75102918fb4f4f64a0c1d74276e5a9d732cecede51be"} Oct 10 16:54:09 crc kubenswrapper[4799]: I1010 16:54:09.275089 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"68ea0968-070a-41d4-b023-31557446c4dc","Type":"ContainerDied","Data":"a0a44ae2f612b300ed982c9b9af495c2acaaf967a0729c5e19eda110019db7eb"} Oct 10 16:54:09 crc kubenswrapper[4799]: I1010 16:54:09.275152 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"68ea0968-070a-41d4-b023-31557446c4dc","Type":"ContainerDied","Data":"d4aecb0e485406b0a1fb96b8e50caa65a29728439d08b5b6330706ef802ddeb2"} Oct 10 16:54:09 crc kubenswrapper[4799]: I1010 16:54:09.275225 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"68ea0968-070a-41d4-b023-31557446c4dc","Type":"ContainerDied","Data":"196848f6327818ee365268243d9ccad984ffe2d581cc81fbfd2de38d6676c1a0"} Oct 10 16:54:09 crc kubenswrapper[4799]: I1010 16:54:09.275358 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"68ea0968-070a-41d4-b023-31557446c4dc","Type":"ContainerDied","Data":"892c2480a25d808a995817609b9bbb27b39738b861f9e1834be4106363fa31e8"} Oct 10 16:54:09 crc kubenswrapper[4799]: I1010 16:54:09.275418 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"68ea0968-070a-41d4-b023-31557446c4dc","Type":"ContainerDied","Data":"d0489a361daf254795ccae9ddca687c512362e7b439ec64189bd20c8ab4310b0"} Oct 10 16:54:09 crc kubenswrapper[4799]: I1010 16:54:09.275477 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"68ea0968-070a-41d4-b023-31557446c4dc","Type":"ContainerDied","Data":"3283576ff42b8ebc10b8fec21ffc203d5257c048d20b76e1f90800f9758835db"} Oct 10 16:54:09 crc kubenswrapper[4799]: I1010 16:54:09.275531 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"68ea0968-070a-41d4-b023-31557446c4dc","Type":"ContainerDied","Data":"e611df1a7b5dee2c47fcfa489e23af4e0028a72aa26eea7950fe0ec36316b663"} Oct 10 16:54:09 crc kubenswrapper[4799]: I1010 16:54:09.275582 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"68ea0968-070a-41d4-b023-31557446c4dc","Type":"ContainerDied","Data":"2e2a8373854753a4479c039fcd2e9fbdfba1493d4e774ff602b6e261202c606e"} Oct 10 16:54:09 crc kubenswrapper[4799]: I1010 16:54:09.275645 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"68ea0968-070a-41d4-b023-31557446c4dc","Type":"ContainerDied","Data":"5dff4f861c205e245abbd15cc4d3d0d0becdc5a50f4d9bd4f1427cabc2fa0347"} Oct 10 16:54:09 crc kubenswrapper[4799]: I1010 16:54:09.279653 4799 generic.go:334] "Generic (PLEG): container finished" podID="31fc68f8-af18-42b7-a94c-90a22afea5f1" containerID="f233fe566e513cc4d04821964bcde90cce13e4323a97a80af9c4e16bc8ddb102" exitCode=143 Oct 10 16:54:09 crc kubenswrapper[4799]: I1010 16:54:09.279781 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"31fc68f8-af18-42b7-a94c-90a22afea5f1","Type":"ContainerDied","Data":"f233fe566e513cc4d04821964bcde90cce13e4323a97a80af9c4e16bc8ddb102"} Oct 10 16:54:09 crc kubenswrapper[4799]: I1010 16:54:09.280220 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/038759ba-4122-4104-8699-81c76590eb2b-kube-api-access-8mg29" (OuterVolumeSpecName: "kube-api-access-8mg29") pod "038759ba-4122-4104-8699-81c76590eb2b" (UID: "038759ba-4122-4104-8699-81c76590eb2b"). InnerVolumeSpecName "kube-api-access-8mg29". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 16:54:09 crc kubenswrapper[4799]: I1010 16:54:09.283705 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_37642fb0-1d93-4e14-a176-fea38410097f/ovsdbserver-nb/0.log" Oct 10 16:54:09 crc kubenswrapper[4799]: I1010 16:54:09.283849 4799 generic.go:334] "Generic (PLEG): container finished" podID="37642fb0-1d93-4e14-a176-fea38410097f" containerID="f05670d9c23e36e24162d124b779c8a5b0a8aa589baac01d94d0aecd8b1875ef" exitCode=2 Oct 10 16:54:09 crc kubenswrapper[4799]: I1010 16:54:09.283910 4799 generic.go:334] "Generic (PLEG): container finished" podID="37642fb0-1d93-4e14-a176-fea38410097f" containerID="78ebb853c6f1f78b3bad7b9528e8955d736b8b1c888e5a761ed264cd5256d2c8" exitCode=143 Oct 10 16:54:09 crc kubenswrapper[4799]: I1010 16:54:09.283984 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"37642fb0-1d93-4e14-a176-fea38410097f","Type":"ContainerDied","Data":"f05670d9c23e36e24162d124b779c8a5b0a8aa589baac01d94d0aecd8b1875ef"} Oct 10 16:54:09 crc kubenswrapper[4799]: I1010 16:54:09.284049 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"37642fb0-1d93-4e14-a176-fea38410097f","Type":"ContainerDied","Data":"78ebb853c6f1f78b3bad7b9528e8955d736b8b1c888e5a761ed264cd5256d2c8"} Oct 10 16:54:09 crc kubenswrapper[4799]: I1010 16:54:09.287099 4799 generic.go:334] "Generic (PLEG): container finished" podID="e424a8e6-64c8-4572-8706-33026a2cc44d" containerID="f9d2d1faeec7a5eede440474335541991431514b0a33516124505bcbefe52453" exitCode=143 Oct 10 16:54:09 crc kubenswrapper[4799]: I1010 16:54:09.287300 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"e424a8e6-64c8-4572-8706-33026a2cc44d","Type":"ContainerDied","Data":"f9d2d1faeec7a5eede440474335541991431514b0a33516124505bcbefe52453"} Oct 10 16:54:09 crc kubenswrapper[4799]: I1010 16:54:09.289859 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-metrics-rlrvx_8f591cb1-902a-406e-b93a-56c2b7ec9cb8/openstack-network-exporter/0.log" Oct 10 16:54:09 crc kubenswrapper[4799]: I1010 16:54:09.290072 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-rlrvx" event={"ID":"8f591cb1-902a-406e-b93a-56c2b7ec9cb8","Type":"ContainerDied","Data":"b2ea6e48cc96c927f39e9c23086aaa69e556051e0d653f9e5f4a9a8694982633"} Oct 10 16:54:09 crc kubenswrapper[4799]: I1010 16:54:09.290184 4799 scope.go:117] "RemoveContainer" containerID="a9f8bcc0a5609ec522020652528dba810f803f6d9045a461106095c9c271fefd" Oct 10 16:54:09 crc kubenswrapper[4799]: I1010 16:54:09.290367 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-rlrvx" Oct 10 16:54:09 crc kubenswrapper[4799]: I1010 16:54:09.318324 4799 generic.go:334] "Generic (PLEG): container finished" podID="361ecbc5-676b-42af-9eb3-fb761f842265" containerID="7e43def494bc7bc9587325e681735d75ef482a0d36aa17642f1a5fcfdc38318f" exitCode=0 Oct 10 16:54:09 crc kubenswrapper[4799]: I1010 16:54:09.318422 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-64986d45b9-khcqw" event={"ID":"361ecbc5-676b-42af-9eb3-fb761f842265","Type":"ContainerDied","Data":"7e43def494bc7bc9587325e681735d75ef482a0d36aa17642f1a5fcfdc38318f"} Oct 10 16:54:09 crc kubenswrapper[4799]: I1010 16:54:09.327612 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-a843-account-create-t5zlh"] Oct 10 16:54:09 crc kubenswrapper[4799]: I1010 16:54:09.335700 4799 generic.go:334] "Generic (PLEG): container finished" podID="f9c4cd8a-6aed-4826-b23b-328645f5801f" containerID="1e2bb89cca75275f38d0737f645f0b33d216e5600663a5ffc8d93f24601d9e28" exitCode=143 Oct 10 16:54:09 crc kubenswrapper[4799]: I1010 16:54:09.335744 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"f9c4cd8a-6aed-4826-b23b-328645f5801f","Type":"ContainerDied","Data":"1e2bb89cca75275f38d0737f645f0b33d216e5600663a5ffc8d93f24601d9e28"} Oct 10 16:54:09 crc kubenswrapper[4799]: I1010 16:54:09.347206 4799 generic.go:334] "Generic (PLEG): container finished" podID="95c3e251-04ea-40ab-94d0-608d6ef0d8f3" containerID="7894d2a17f4958f21c9c52f542e34e11e9c6033bdf6b58c15d012c7bb5cc154f" exitCode=137 Oct 10 16:54:09 crc kubenswrapper[4799]: I1010 16:54:09.351337 4799 generic.go:334] "Generic (PLEG): container finished" podID="2db42625-4f7b-479c-a580-c94d6cafb3fe" containerID="24962cee1d51a7d7eb5a1d25cab56e9384c0342e38d3013ef33f383559acf6d7" exitCode=143 Oct 10 16:54:09 crc kubenswrapper[4799]: I1010 16:54:09.351379 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"2db42625-4f7b-479c-a580-c94d6cafb3fe","Type":"ContainerDied","Data":"24962cee1d51a7d7eb5a1d25cab56e9384c0342e38d3013ef33f383559acf6d7"} Oct 10 16:54:09 crc kubenswrapper[4799]: I1010 16:54:09.356034 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_68f5ed12-8abe-46e0-a60a-086d13b7f038/ovsdbserver-sb/0.log" Oct 10 16:54:09 crc kubenswrapper[4799]: I1010 16:54:09.356060 4799 generic.go:334] "Generic (PLEG): container finished" podID="68f5ed12-8abe-46e0-a60a-086d13b7f038" containerID="1626175edad35c9d874dcd747ee3e25c9a1d3785dcb4a248d8af85626d8ee7f2" exitCode=2 Oct 10 16:54:09 crc kubenswrapper[4799]: I1010 16:54:09.356071 4799 generic.go:334] "Generic (PLEG): container finished" podID="68f5ed12-8abe-46e0-a60a-086d13b7f038" containerID="9a442c2442efda9014b2e49c109e3fd6db0be3a601326fff77372e592aa5bef9" exitCode=143 Oct 10 16:54:09 crc kubenswrapper[4799]: I1010 16:54:09.356099 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"68f5ed12-8abe-46e0-a60a-086d13b7f038","Type":"ContainerDied","Data":"1626175edad35c9d874dcd747ee3e25c9a1d3785dcb4a248d8af85626d8ee7f2"} Oct 10 16:54:09 crc kubenswrapper[4799]: I1010 16:54:09.356115 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"68f5ed12-8abe-46e0-a60a-086d13b7f038","Type":"ContainerDied","Data":"9a442c2442efda9014b2e49c109e3fd6db0be3a601326fff77372e592aa5bef9"} Oct 10 16:54:09 crc kubenswrapper[4799]: I1010 16:54:09.367486 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cindera843-account-delete-dptkx"] Oct 10 16:54:09 crc kubenswrapper[4799]: I1010 16:54:09.379300 4799 generic.go:334] "Generic (PLEG): container finished" podID="78820835-eb2d-40d8-a497-e9a351a9cef9" containerID="304a677749d95012ce2795e133dc4470b68b20824bdcbe3901e9d128f4e5ec4b" exitCode=0 Oct 10 16:54:09 crc kubenswrapper[4799]: I1010 16:54:09.379466 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-69f7ddf877-mclzd" event={"ID":"78820835-eb2d-40d8-a497-e9a351a9cef9","Type":"ContainerDied","Data":"304a677749d95012ce2795e133dc4470b68b20824bdcbe3901e9d128f4e5ec4b"} Oct 10 16:54:09 crc kubenswrapper[4799]: I1010 16:54:09.381666 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8f591cb1-902a-406e-b93a-56c2b7ec9cb8-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "8f591cb1-902a-406e-b93a-56c2b7ec9cb8" (UID: "8f591cb1-902a-406e-b93a-56c2b7ec9cb8"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:54:09 crc kubenswrapper[4799]: I1010 16:54:09.384358 4799 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/038759ba-4122-4104-8699-81c76590eb2b-scripts\") on node \"crc\" DevicePath \"\"" Oct 10 16:54:09 crc kubenswrapper[4799]: I1010 16:54:09.384387 4799 reconciler_common.go:293] "Volume detached for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/038759ba-4122-4104-8699-81c76590eb2b-var-run\") on node \"crc\" DevicePath \"\"" Oct 10 16:54:09 crc kubenswrapper[4799]: I1010 16:54:09.384396 4799 reconciler_common.go:293] "Volume detached for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/038759ba-4122-4104-8699-81c76590eb2b-var-log-ovn\") on node \"crc\" DevicePath \"\"" Oct 10 16:54:09 crc kubenswrapper[4799]: I1010 16:54:09.384405 4799 reconciler_common.go:293] "Volume detached for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/038759ba-4122-4104-8699-81c76590eb2b-var-run-ovn\") on node \"crc\" DevicePath \"\"" Oct 10 16:54:09 crc kubenswrapper[4799]: I1010 16:54:09.384414 4799 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8f591cb1-902a-406e-b93a-56c2b7ec9cb8-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 10 16:54:09 crc kubenswrapper[4799]: I1010 16:54:09.384423 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8mg29\" (UniqueName: \"kubernetes.io/projected/038759ba-4122-4104-8699-81c76590eb2b-kube-api-access-8mg29\") on node \"crc\" DevicePath \"\"" Oct 10 16:54:09 crc kubenswrapper[4799]: I1010 16:54:09.392467 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-server-0"] Oct 10 16:54:09 crc kubenswrapper[4799]: I1010 16:54:09.398195 4799 generic.go:334] "Generic (PLEG): container finished" podID="5e6870d5-faea-46d9-bebb-4d237b802910" containerID="aaea200524f506182ac0c5dffe0ab093f1a9490f2edc8d9d614d7b6635f1619c" exitCode=143 Oct 10 16:54:09 crc kubenswrapper[4799]: I1010 16:54:09.398251 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"5e6870d5-faea-46d9-bebb-4d237b802910","Type":"ContainerDied","Data":"aaea200524f506182ac0c5dffe0ab093f1a9490f2edc8d9d614d7b6635f1619c"} Oct 10 16:54:09 crc kubenswrapper[4799]: I1010 16:54:09.411392 4799 generic.go:334] "Generic (PLEG): container finished" podID="038759ba-4122-4104-8699-81c76590eb2b" containerID="b3235f6df91cdc5e70f1254705cb25138102f64441cbd1220edb479a98f3de0a" exitCode=0 Oct 10 16:54:09 crc kubenswrapper[4799]: I1010 16:54:09.411482 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-s8gsd" Oct 10 16:54:09 crc kubenswrapper[4799]: I1010 16:54:09.424635 4799 generic.go:334] "Generic (PLEG): container finished" podID="7ab7b7c1-e89f-4562-882b-4f517f90f8c8" containerID="7ff035a8a6498fce1542054aa6ef55bd158eb92c46cb410ac3528b2a07a6250d" exitCode=143 Oct 10 16:54:09 crc kubenswrapper[4799]: I1010 16:54:09.462033 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="108ef7b4-9cd2-41c6-b4d4-6f84d0cc2f64" path="/var/lib/kubelet/pods/108ef7b4-9cd2-41c6-b4d4-6f84d0cc2f64/volumes" Oct 10 16:54:09 crc kubenswrapper[4799]: I1010 16:54:09.462811 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2396929d-6af8-4f8e-9fef-b44c7ce23afa" path="/var/lib/kubelet/pods/2396929d-6af8-4f8e-9fef-b44c7ce23afa/volumes" Oct 10 16:54:09 crc kubenswrapper[4799]: I1010 16:54:09.463397 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2fcb92c1-d267-4996-9e6f-d89982c6379d" path="/var/lib/kubelet/pods/2fcb92c1-d267-4996-9e6f-d89982c6379d/volumes" Oct 10 16:54:09 crc kubenswrapper[4799]: I1010 16:54:09.464113 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4786fb15-fe63-491e-aa13-1e3cbd1f7eef" path="/var/lib/kubelet/pods/4786fb15-fe63-491e-aa13-1e3cbd1f7eef/volumes" Oct 10 16:54:09 crc kubenswrapper[4799]: I1010 16:54:09.465745 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/038759ba-4122-4104-8699-81c76590eb2b-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "038759ba-4122-4104-8699-81c76590eb2b" (UID: "038759ba-4122-4104-8699-81c76590eb2b"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:54:09 crc kubenswrapper[4799]: I1010 16:54:09.470977 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4e0d692a-cc78-4807-a2a3-5b39c5729ee6" path="/var/lib/kubelet/pods/4e0d692a-cc78-4807-a2a3-5b39c5729ee6/volumes" Oct 10 16:54:09 crc kubenswrapper[4799]: I1010 16:54:09.471989 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6d2833c7-9e1e-4063-93a1-54aded9b6daf" path="/var/lib/kubelet/pods/6d2833c7-9e1e-4063-93a1-54aded9b6daf/volumes" Oct 10 16:54:09 crc kubenswrapper[4799]: I1010 16:54:09.472668 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="875e600d-b55a-48a9-a181-3ad09c24cc41" path="/var/lib/kubelet/pods/875e600d-b55a-48a9-a181-3ad09c24cc41/volumes" Oct 10 16:54:09 crc kubenswrapper[4799]: I1010 16:54:09.473412 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a78f157f-ef19-4bc2-abe9-9eafd833b89f" path="/var/lib/kubelet/pods/a78f157f-ef19-4bc2-abe9-9eafd833b89f/volumes" Oct 10 16:54:09 crc kubenswrapper[4799]: I1010 16:54:09.474165 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/openstack-cell1-galera-0" podUID="2a28d2a6-5dfc-414b-9eed-2f412cfc7063" containerName="galera" containerID="cri-o://2252cc52e03872ad264363f4a4f2c8970a681e759e68c07793c60a2df2a41d55" gracePeriod=30 Oct 10 16:54:09 crc kubenswrapper[4799]: I1010 16:54:09.477969 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a8dccd24-a3ca-4f98-90b4-e2943cd228d3" path="/var/lib/kubelet/pods/a8dccd24-a3ca-4f98-90b4-e2943cd228d3/volumes" Oct 10 16:54:09 crc kubenswrapper[4799]: I1010 16:54:09.478664 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ab2e1e52-21c9-40c3-b87f-59f38ebb7bff" path="/var/lib/kubelet/pods/ab2e1e52-21c9-40c3-b87f-59f38ebb7bff/volumes" Oct 10 16:54:09 crc kubenswrapper[4799]: I1010 16:54:09.479521 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="adc813f3-66fb-467b-9033-c78cba4a2d36" path="/var/lib/kubelet/pods/adc813f3-66fb-467b-9033-c78cba4a2d36/volumes" Oct 10 16:54:09 crc kubenswrapper[4799]: I1010 16:54:09.480290 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="be996514-22c0-4da7-b8fc-cc423de20fc8" path="/var/lib/kubelet/pods/be996514-22c0-4da7-b8fc-cc423de20fc8/volumes" Oct 10 16:54:09 crc kubenswrapper[4799]: I1010 16:54:09.481030 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c784371b-9184-431f-93cd-92b037921b6b" path="/var/lib/kubelet/pods/c784371b-9184-431f-93cd-92b037921b6b/volumes" Oct 10 16:54:09 crc kubenswrapper[4799]: I1010 16:54:09.483260 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d9956afc-c1f0-42b9-929b-596957f7c72f" path="/var/lib/kubelet/pods/d9956afc-c1f0-42b9-929b-596957f7c72f/volumes" Oct 10 16:54:09 crc kubenswrapper[4799]: I1010 16:54:09.487280 4799 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/038759ba-4122-4104-8699-81c76590eb2b-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 10 16:54:09 crc kubenswrapper[4799]: I1010 16:54:09.505476 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8f591cb1-902a-406e-b93a-56c2b7ec9cb8-metrics-certs-tls-certs" (OuterVolumeSpecName: "metrics-certs-tls-certs") pod "8f591cb1-902a-406e-b93a-56c2b7ec9cb8" (UID: "8f591cb1-902a-406e-b93a-56c2b7ec9cb8"). InnerVolumeSpecName "metrics-certs-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:54:09 crc kubenswrapper[4799]: I1010 16:54:09.518273 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/038759ba-4122-4104-8699-81c76590eb2b-ovn-controller-tls-certs" (OuterVolumeSpecName: "ovn-controller-tls-certs") pod "038759ba-4122-4104-8699-81c76590eb2b" (UID: "038759ba-4122-4104-8699-81c76590eb2b"). InnerVolumeSpecName "ovn-controller-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:54:09 crc kubenswrapper[4799]: I1010 16:54:09.589624 4799 reconciler_common.go:293] "Volume detached for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/038759ba-4122-4104-8699-81c76590eb2b-ovn-controller-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 10 16:54:09 crc kubenswrapper[4799]: I1010 16:54:09.589690 4799 reconciler_common.go:293] "Volume detached for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/8f591cb1-902a-406e-b93a-56c2b7ec9cb8-metrics-certs-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 10 16:54:09 crc kubenswrapper[4799]: I1010 16:54:09.641906 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/rabbitmq-server-0" podUID="9fd6f03f-abea-4c29-8060-0705bb0af2c7" containerName="rabbitmq" containerID="cri-o://22fa1f105dd6a9317c7bfadf3f75a8ae31d05888ae18ed6871e94cd9824e1b96" gracePeriod=604800 Oct 10 16:54:09 crc kubenswrapper[4799]: E1010 16:54:09.670078 4799 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="2252cc52e03872ad264363f4a4f2c8970a681e759e68c07793c60a2df2a41d55" cmd=["/bin/bash","/var/lib/operator-scripts/mysql_probe.sh","readiness"] Oct 10 16:54:09 crc kubenswrapper[4799]: E1010 16:54:09.682152 4799 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="2252cc52e03872ad264363f4a4f2c8970a681e759e68c07793c60a2df2a41d55" cmd=["/bin/bash","/var/lib/operator-scripts/mysql_probe.sh","readiness"] Oct 10 16:54:09 crc kubenswrapper[4799]: E1010 16:54:09.694175 4799 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="2252cc52e03872ad264363f4a4f2c8970a681e759e68c07793c60a2df2a41d55" cmd=["/bin/bash","/var/lib/operator-scripts/mysql_probe.sh","readiness"] Oct 10 16:54:09 crc kubenswrapper[4799]: E1010 16:54:09.694247 4799 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/openstack-cell1-galera-0" podUID="2a28d2a6-5dfc-414b-9eed-2f412cfc7063" containerName="galera" Oct 10 16:54:09 crc kubenswrapper[4799]: I1010 16:54:09.695106 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-6843-account-create-dm4pm"] Oct 10 16:54:09 crc kubenswrapper[4799]: I1010 16:54:09.695136 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-s8gsd" event={"ID":"038759ba-4122-4104-8699-81c76590eb2b","Type":"ContainerDied","Data":"b3235f6df91cdc5e70f1254705cb25138102f64441cbd1220edb479a98f3de0a"} Oct 10 16:54:09 crc kubenswrapper[4799]: I1010 16:54:09.695158 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-db-create-x9xfs"] Oct 10 16:54:09 crc kubenswrapper[4799]: I1010 16:54:09.695171 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-db-create-x9xfs"] Oct 10 16:54:09 crc kubenswrapper[4799]: I1010 16:54:09.695185 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-s8gsd" event={"ID":"038759ba-4122-4104-8699-81c76590eb2b","Type":"ContainerDied","Data":"98d66bf6f4ca25f2a01f7ec13370c35dda5b7a842e7cb12577c912dd8a2b9b48"} Oct 10 16:54:09 crc kubenswrapper[4799]: I1010 16:54:09.695195 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-6565b9cf48-rl77d" event={"ID":"7ab7b7c1-e89f-4562-882b-4f517f90f8c8","Type":"ContainerDied","Data":"7ff035a8a6498fce1542054aa6ef55bd158eb92c46cb410ac3528b2a07a6250d"} Oct 10 16:54:09 crc kubenswrapper[4799]: I1010 16:54:09.695210 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-6843-account-create-dm4pm"] Oct 10 16:54:09 crc kubenswrapper[4799]: I1010 16:54:09.695220 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-56d84d574d-x5sbm"] Oct 10 16:54:09 crc kubenswrapper[4799]: I1010 16:54:09.695235 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-db-create-dd8jn"] Oct 10 16:54:09 crc kubenswrapper[4799]: I1010 16:54:09.695246 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-db-create-dd8jn"] Oct 10 16:54:09 crc kubenswrapper[4799]: I1010 16:54:09.695257 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placementcbdc-account-delete-9cz6z"] Oct 10 16:54:09 crc kubenswrapper[4799]: I1010 16:54:09.695268 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-cbdc-account-create-jrcz2"] Oct 10 16:54:09 crc kubenswrapper[4799]: I1010 16:54:09.695279 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-cbdc-account-create-jrcz2"] Oct 10 16:54:09 crc kubenswrapper[4799]: I1010 16:54:09.695288 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutronfdbc-account-delete-b8x6d"] Oct 10 16:54:09 crc kubenswrapper[4799]: I1010 16:54:09.695297 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-db-create-mqm27"] Oct 10 16:54:09 crc kubenswrapper[4799]: I1010 16:54:09.695308 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-fdbc-account-create-kr25d"] Oct 10 16:54:09 crc kubenswrapper[4799]: I1010 16:54:09.695320 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-db-create-mqm27"] Oct 10 16:54:09 crc kubenswrapper[4799]: I1010 16:54:09.695329 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-fdbc-account-create-kr25d"] Oct 10 16:54:09 crc kubenswrapper[4799]: I1010 16:54:09.695338 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-db-create-zmshk"] Oct 10 16:54:09 crc kubenswrapper[4799]: I1010 16:54:09.695347 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-db-create-zmshk"] Oct 10 16:54:09 crc kubenswrapper[4799]: I1010 16:54:09.695358 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-keystone-listener-5b85b5697b-9wc6c"] Oct 10 16:54:09 crc kubenswrapper[4799]: I1010 16:54:09.695368 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/novaapieef9-account-delete-qqxp5"] Oct 10 16:54:09 crc kubenswrapper[4799]: I1010 16:54:09.695376 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-eef9-account-create-8rt89"] Oct 10 16:54:09 crc kubenswrapper[4799]: I1010 16:54:09.695386 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-eef9-account-create-8rt89"] Oct 10 16:54:09 crc kubenswrapper[4799]: I1010 16:54:09.695568 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-keystone-listener-5b85b5697b-9wc6c" podUID="1bd0e459-efb9-463c-a8fc-d08a3194f3d9" containerName="barbican-keystone-listener-log" containerID="cri-o://6a4a38abddd264e6f436a27a2422c3d7dee920ced5f32af3275fc86edc0ea42c" gracePeriod=30 Oct 10 16:54:09 crc kubenswrapper[4799]: I1010 16:54:09.697141 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-keystone-listener-5b85b5697b-9wc6c" podUID="1bd0e459-efb9-463c-a8fc-d08a3194f3d9" containerName="barbican-keystone-listener" containerID="cri-o://92ceb7a81421c011199b208f71ac06ee53a9dc28e8d3ab70e4e869c6e631df9b" gracePeriod=30 Oct 10 16:54:09 crc kubenswrapper[4799]: I1010 16:54:09.697443 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-56d84d574d-x5sbm" podUID="ac766919-d788-40da-879a-627919926594" containerName="barbican-api-log" containerID="cri-o://5af6a65cf06c9580d2979a8455e9bbfb9189ae51ad9185bd52c4a3fc972febbc" gracePeriod=30 Oct 10 16:54:09 crc kubenswrapper[4799]: I1010 16:54:09.697546 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-56d84d574d-x5sbm" podUID="ac766919-d788-40da-879a-627919926594" containerName="barbican-api" containerID="cri-o://3395c2ca55d83e7b3885a78bfc0d6276c0ac7455bc1ef7cc5df2ccda1fecad2d" gracePeriod=30 Oct 10 16:54:09 crc kubenswrapper[4799]: I1010 16:54:09.701089 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-db-create-srvbw"] Oct 10 16:54:09 crc kubenswrapper[4799]: I1010 16:54:09.727804 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-64986d45b9-khcqw" Oct 10 16:54:09 crc kubenswrapper[4799]: I1010 16:54:09.732933 4799 scope.go:117] "RemoveContainer" containerID="b3235f6df91cdc5e70f1254705cb25138102f64441cbd1220edb479a98f3de0a" Oct 10 16:54:09 crc kubenswrapper[4799]: I1010 16:54:09.748546 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-f8a8-account-create-bcclb"] Oct 10 16:54:09 crc kubenswrapper[4799]: I1010 16:54:09.748911 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_37642fb0-1d93-4e14-a176-fea38410097f/ovsdbserver-nb/0.log" Oct 10 16:54:09 crc kubenswrapper[4799]: I1010 16:54:09.748995 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Oct 10 16:54:09 crc kubenswrapper[4799]: I1010 16:54:09.755728 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-db-create-srvbw"] Oct 10 16:54:09 crc kubenswrapper[4799]: I1010 16:54:09.761644 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-f8a8-account-create-bcclb"] Oct 10 16:54:09 crc kubenswrapper[4799]: I1010 16:54:09.790860 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/novacell0f8a8-account-delete-6lbfn"] Oct 10 16:54:09 crc kubenswrapper[4799]: I1010 16:54:09.805875 4799 scope.go:117] "RemoveContainer" containerID="b3235f6df91cdc5e70f1254705cb25138102f64441cbd1220edb479a98f3de0a" Oct 10 16:54:09 crc kubenswrapper[4799]: I1010 16:54:09.810251 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-worker-754947f5d7-z2rks"] Oct 10 16:54:09 crc kubenswrapper[4799]: I1010 16:54:09.810724 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-worker-754947f5d7-z2rks" podUID="3a05167f-cd58-4f9f-806b-8d71271320d2" containerName="barbican-worker" containerID="cri-o://44db3563c5cb7bf8e2e66a9895632efdaaa14dc9f4496418fa8fa16fcb55a2cc" gracePeriod=30 Oct 10 16:54:09 crc kubenswrapper[4799]: I1010 16:54:09.810734 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-worker-754947f5d7-z2rks" podUID="3a05167f-cd58-4f9f-806b-8d71271320d2" containerName="barbican-worker-log" containerID="cri-o://ffeaf50de5582926c54ffdb7618e55611dd1a565aacf1bd0b7bbf41db579bdee" gracePeriod=30 Oct 10 16:54:09 crc kubenswrapper[4799]: E1010 16:54:09.813200 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b3235f6df91cdc5e70f1254705cb25138102f64441cbd1220edb479a98f3de0a\": container with ID starting with b3235f6df91cdc5e70f1254705cb25138102f64441cbd1220edb479a98f3de0a not found: ID does not exist" containerID="b3235f6df91cdc5e70f1254705cb25138102f64441cbd1220edb479a98f3de0a" Oct 10 16:54:09 crc kubenswrapper[4799]: I1010 16:54:09.813319 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b3235f6df91cdc5e70f1254705cb25138102f64441cbd1220edb479a98f3de0a"} err="failed to get container status \"b3235f6df91cdc5e70f1254705cb25138102f64441cbd1220edb479a98f3de0a\": rpc error: code = NotFound desc = could not find container \"b3235f6df91cdc5e70f1254705cb25138102f64441cbd1220edb479a98f3de0a\": container with ID starting with b3235f6df91cdc5e70f1254705cb25138102f64441cbd1220edb479a98f3de0a not found: ID does not exist" Oct 10 16:54:09 crc kubenswrapper[4799]: I1010 16:54:09.815947 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Oct 10 16:54:09 crc kubenswrapper[4799]: I1010 16:54:09.867522 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Oct 10 16:54:09 crc kubenswrapper[4799]: I1010 16:54:09.899003 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/361ecbc5-676b-42af-9eb3-fb761f842265-dns-svc\") pod \"361ecbc5-676b-42af-9eb3-fb761f842265\" (UID: \"361ecbc5-676b-42af-9eb3-fb761f842265\") " Oct 10 16:54:09 crc kubenswrapper[4799]: I1010 16:54:09.899055 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/37642fb0-1d93-4e14-a176-fea38410097f-ovsdbserver-nb-tls-certs\") pod \"37642fb0-1d93-4e14-a176-fea38410097f\" (UID: \"37642fb0-1d93-4e14-a176-fea38410097f\") " Oct 10 16:54:09 crc kubenswrapper[4799]: I1010 16:54:09.899090 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/37642fb0-1d93-4e14-a176-fea38410097f-combined-ca-bundle\") pod \"37642fb0-1d93-4e14-a176-fea38410097f\" (UID: \"37642fb0-1d93-4e14-a176-fea38410097f\") " Oct 10 16:54:09 crc kubenswrapper[4799]: I1010 16:54:09.899149 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-b2qsx\" (UniqueName: \"kubernetes.io/projected/361ecbc5-676b-42af-9eb3-fb761f842265-kube-api-access-b2qsx\") pod \"361ecbc5-676b-42af-9eb3-fb761f842265\" (UID: \"361ecbc5-676b-42af-9eb3-fb761f842265\") " Oct 10 16:54:09 crc kubenswrapper[4799]: I1010 16:54:09.899183 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/37642fb0-1d93-4e14-a176-fea38410097f-metrics-certs-tls-certs\") pod \"37642fb0-1d93-4e14-a176-fea38410097f\" (UID: \"37642fb0-1d93-4e14-a176-fea38410097f\") " Oct 10 16:54:09 crc kubenswrapper[4799]: I1010 16:54:09.899211 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/37642fb0-1d93-4e14-a176-fea38410097f-ovsdb-rundir\") pod \"37642fb0-1d93-4e14-a176-fea38410097f\" (UID: \"37642fb0-1d93-4e14-a176-fea38410097f\") " Oct 10 16:54:09 crc kubenswrapper[4799]: I1010 16:54:09.899274 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/361ecbc5-676b-42af-9eb3-fb761f842265-dns-swift-storage-0\") pod \"361ecbc5-676b-42af-9eb3-fb761f842265\" (UID: \"361ecbc5-676b-42af-9eb3-fb761f842265\") " Oct 10 16:54:09 crc kubenswrapper[4799]: I1010 16:54:09.899299 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/37642fb0-1d93-4e14-a176-fea38410097f-scripts\") pod \"37642fb0-1d93-4e14-a176-fea38410097f\" (UID: \"37642fb0-1d93-4e14-a176-fea38410097f\") " Oct 10 16:54:09 crc kubenswrapper[4799]: I1010 16:54:09.899337 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zf4qn\" (UniqueName: \"kubernetes.io/projected/37642fb0-1d93-4e14-a176-fea38410097f-kube-api-access-zf4qn\") pod \"37642fb0-1d93-4e14-a176-fea38410097f\" (UID: \"37642fb0-1d93-4e14-a176-fea38410097f\") " Oct 10 16:54:09 crc kubenswrapper[4799]: I1010 16:54:09.899364 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/361ecbc5-676b-42af-9eb3-fb761f842265-ovsdbserver-sb\") pod \"361ecbc5-676b-42af-9eb3-fb761f842265\" (UID: \"361ecbc5-676b-42af-9eb3-fb761f842265\") " Oct 10 16:54:09 crc kubenswrapper[4799]: I1010 16:54:09.899399 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/37642fb0-1d93-4e14-a176-fea38410097f-config\") pod \"37642fb0-1d93-4e14-a176-fea38410097f\" (UID: \"37642fb0-1d93-4e14-a176-fea38410097f\") " Oct 10 16:54:09 crc kubenswrapper[4799]: I1010 16:54:09.899416 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndbcluster-nb-etc-ovn\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"37642fb0-1d93-4e14-a176-fea38410097f\" (UID: \"37642fb0-1d93-4e14-a176-fea38410097f\") " Oct 10 16:54:09 crc kubenswrapper[4799]: I1010 16:54:09.899470 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/361ecbc5-676b-42af-9eb3-fb761f842265-config\") pod \"361ecbc5-676b-42af-9eb3-fb761f842265\" (UID: \"361ecbc5-676b-42af-9eb3-fb761f842265\") " Oct 10 16:54:09 crc kubenswrapper[4799]: I1010 16:54:09.899493 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/361ecbc5-676b-42af-9eb3-fb761f842265-ovsdbserver-nb\") pod \"361ecbc5-676b-42af-9eb3-fb761f842265\" (UID: \"361ecbc5-676b-42af-9eb3-fb761f842265\") " Oct 10 16:54:09 crc kubenswrapper[4799]: I1010 16:54:09.905789 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-tc47q"] Oct 10 16:54:09 crc kubenswrapper[4799]: I1010 16:54:09.907267 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/37642fb0-1d93-4e14-a176-fea38410097f-ovsdb-rundir" (OuterVolumeSpecName: "ovsdb-rundir") pod "37642fb0-1d93-4e14-a176-fea38410097f" (UID: "37642fb0-1d93-4e14-a176-fea38410097f"). InnerVolumeSpecName "ovsdb-rundir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 10 16:54:09 crc kubenswrapper[4799]: I1010 16:54:09.919858 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-tc47q"] Oct 10 16:54:09 crc kubenswrapper[4799]: I1010 16:54:09.920278 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/37642fb0-1d93-4e14-a176-fea38410097f-config" (OuterVolumeSpecName: "config") pod "37642fb0-1d93-4e14-a176-fea38410097f" (UID: "37642fb0-1d93-4e14-a176-fea38410097f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 16:54:09 crc kubenswrapper[4799]: I1010 16:54:09.930095 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/37642fb0-1d93-4e14-a176-fea38410097f-scripts" (OuterVolumeSpecName: "scripts") pod "37642fb0-1d93-4e14-a176-fea38410097f" (UID: "37642fb0-1d93-4e14-a176-fea38410097f"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 16:54:09 crc kubenswrapper[4799]: I1010 16:54:09.943193 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-conductor-0"] Oct 10 16:54:09 crc kubenswrapper[4799]: I1010 16:54:09.943472 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-cell1-conductor-0" podUID="971dd170-cc55-481f-b76d-820102f811cd" containerName="nova-cell1-conductor-conductor" containerID="cri-o://ff0b33623ee2e909045d84098d1c8b4f4ee31b12318171307a8ee09a9499c92d" gracePeriod=30 Oct 10 16:54:09 crc kubenswrapper[4799]: I1010 16:54:09.958399 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/361ecbc5-676b-42af-9eb3-fb761f842265-kube-api-access-b2qsx" (OuterVolumeSpecName: "kube-api-access-b2qsx") pod "361ecbc5-676b-42af-9eb3-fb761f842265" (UID: "361ecbc5-676b-42af-9eb3-fb761f842265"). InnerVolumeSpecName "kube-api-access-b2qsx". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 16:54:09 crc kubenswrapper[4799]: I1010 16:54:09.958538 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage03-crc" (OuterVolumeSpecName: "ovndbcluster-nb-etc-ovn") pod "37642fb0-1d93-4e14-a176-fea38410097f" (UID: "37642fb0-1d93-4e14-a176-fea38410097f"). InnerVolumeSpecName "local-storage03-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Oct 10 16:54:09 crc kubenswrapper[4799]: I1010 16:54:09.959097 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/37642fb0-1d93-4e14-a176-fea38410097f-kube-api-access-zf4qn" (OuterVolumeSpecName: "kube-api-access-zf4qn") pod "37642fb0-1d93-4e14-a176-fea38410097f" (UID: "37642fb0-1d93-4e14-a176-fea38410097f"). InnerVolumeSpecName "kube-api-access-zf4qn". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 16:54:10 crc kubenswrapper[4799]: I1010 16:54:10.003378 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/95c3e251-04ea-40ab-94d0-608d6ef0d8f3-openstack-config\") pod \"95c3e251-04ea-40ab-94d0-608d6ef0d8f3\" (UID: \"95c3e251-04ea-40ab-94d0-608d6ef0d8f3\") " Oct 10 16:54:10 crc kubenswrapper[4799]: I1010 16:54:10.003425 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/95c3e251-04ea-40ab-94d0-608d6ef0d8f3-openstack-config-secret\") pod \"95c3e251-04ea-40ab-94d0-608d6ef0d8f3\" (UID: \"95c3e251-04ea-40ab-94d0-608d6ef0d8f3\") " Oct 10 16:54:10 crc kubenswrapper[4799]: I1010 16:54:10.003599 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qwn5h\" (UniqueName: \"kubernetes.io/projected/95c3e251-04ea-40ab-94d0-608d6ef0d8f3-kube-api-access-qwn5h\") pod \"95c3e251-04ea-40ab-94d0-608d6ef0d8f3\" (UID: \"95c3e251-04ea-40ab-94d0-608d6ef0d8f3\") " Oct 10 16:54:10 crc kubenswrapper[4799]: I1010 16:54:10.003639 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/95c3e251-04ea-40ab-94d0-608d6ef0d8f3-combined-ca-bundle\") pod \"95c3e251-04ea-40ab-94d0-608d6ef0d8f3\" (UID: \"95c3e251-04ea-40ab-94d0-608d6ef0d8f3\") " Oct 10 16:54:10 crc kubenswrapper[4799]: I1010 16:54:10.004105 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-b2qsx\" (UniqueName: \"kubernetes.io/projected/361ecbc5-676b-42af-9eb3-fb761f842265-kube-api-access-b2qsx\") on node \"crc\" DevicePath \"\"" Oct 10 16:54:10 crc kubenswrapper[4799]: I1010 16:54:10.004122 4799 reconciler_common.go:293] "Volume detached for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/37642fb0-1d93-4e14-a176-fea38410097f-ovsdb-rundir\") on node \"crc\" DevicePath \"\"" Oct 10 16:54:10 crc kubenswrapper[4799]: I1010 16:54:10.004131 4799 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/37642fb0-1d93-4e14-a176-fea38410097f-scripts\") on node \"crc\" DevicePath \"\"" Oct 10 16:54:10 crc kubenswrapper[4799]: I1010 16:54:10.004141 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zf4qn\" (UniqueName: \"kubernetes.io/projected/37642fb0-1d93-4e14-a176-fea38410097f-kube-api-access-zf4qn\") on node \"crc\" DevicePath \"\"" Oct 10 16:54:10 crc kubenswrapper[4799]: I1010 16:54:10.004150 4799 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/37642fb0-1d93-4e14-a176-fea38410097f-config\") on node \"crc\" DevicePath \"\"" Oct 10 16:54:10 crc kubenswrapper[4799]: I1010 16:54:10.004169 4799 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") on node \"crc\" " Oct 10 16:54:10 crc kubenswrapper[4799]: I1010 16:54:10.037617 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/95c3e251-04ea-40ab-94d0-608d6ef0d8f3-kube-api-access-qwn5h" (OuterVolumeSpecName: "kube-api-access-qwn5h") pod "95c3e251-04ea-40ab-94d0-608d6ef0d8f3" (UID: "95c3e251-04ea-40ab-94d0-608d6ef0d8f3"). InnerVolumeSpecName "kube-api-access-qwn5h". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 16:54:10 crc kubenswrapper[4799]: I1010 16:54:10.045695 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Oct 10 16:54:10 crc kubenswrapper[4799]: I1010 16:54:10.045997 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-cell1-novncproxy-0" podUID="785f8ce9-5280-44fe-891c-8162f2fdcd7a" containerName="nova-cell1-novncproxy-novncproxy" containerID="cri-o://e41896910a44ed236459449a6ff81d15407ef8ae9a64ddb6a35d2d9e2aa86ba0" gracePeriod=30 Oct 10 16:54:10 crc kubenswrapper[4799]: I1010 16:54:10.080077 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/rabbitmq-cell1-server-0" podUID="7858ee88-c7b9-4fb7-b825-569154134201" containerName="rabbitmq" containerID="cri-o://5618f2fc1181b1ea35b16860a7cfaa8ed80f4249c7cd93a63b30a487631e90ec" gracePeriod=604800 Oct 10 16:54:10 crc kubenswrapper[4799]: I1010 16:54:10.088867 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-conductor-0"] Oct 10 16:54:10 crc kubenswrapper[4799]: I1010 16:54:10.089194 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-cell0-conductor-0" podUID="60be0e86-f2dd-4575-b3c8-0131575b1cd8" containerName="nova-cell0-conductor-conductor" containerID="cri-o://35d51a78c7ee3dde16f77dfec5a6f5f69c8e3d2b0eccd75b5f3e3226dc047eeb" gracePeriod=30 Oct 10 16:54:10 crc kubenswrapper[4799]: I1010 16:54:10.097405 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_68f5ed12-8abe-46e0-a60a-086d13b7f038/ovsdbserver-sb/0.log" Oct 10 16:54:10 crc kubenswrapper[4799]: I1010 16:54:10.097489 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Oct 10 16:54:10 crc kubenswrapper[4799]: I1010 16:54:10.110380 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qwn5h\" (UniqueName: \"kubernetes.io/projected/95c3e251-04ea-40ab-94d0-608d6ef0d8f3-kube-api-access-qwn5h\") on node \"crc\" DevicePath \"\"" Oct 10 16:54:10 crc kubenswrapper[4799]: I1010 16:54:10.137293 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-kgmbm"] Oct 10 16:54:10 crc kubenswrapper[4799]: I1010 16:54:10.165696 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-kgmbm"] Oct 10 16:54:10 crc kubenswrapper[4799]: I1010 16:54:10.168038 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/95c3e251-04ea-40ab-94d0-608d6ef0d8f3-openstack-config" (OuterVolumeSpecName: "openstack-config") pod "95c3e251-04ea-40ab-94d0-608d6ef0d8f3" (UID: "95c3e251-04ea-40ab-94d0-608d6ef0d8f3"). InnerVolumeSpecName "openstack-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 16:54:10 crc kubenswrapper[4799]: I1010 16:54:10.168859 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-proxy-69dc9744df-smbqh" Oct 10 16:54:10 crc kubenswrapper[4799]: I1010 16:54:10.177630 4799 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage03-crc" (UniqueName: "kubernetes.io/local-volume/local-storage03-crc") on node "crc" Oct 10 16:54:10 crc kubenswrapper[4799]: I1010 16:54:10.177786 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Oct 10 16:54:10 crc kubenswrapper[4799]: I1010 16:54:10.181396 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="7ae9763d-31dd-44c7-bf35-11a896a4f785" containerName="nova-scheduler-scheduler" containerID="cri-o://ef0cad99b2efacd5bcd212cd155d86551ed4cc35bedc046210eca5e8e009b86f" gracePeriod=30 Oct 10 16:54:10 crc kubenswrapper[4799]: I1010 16:54:10.193102 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance2b60-account-delete-t7jh2"] Oct 10 16:54:10 crc kubenswrapper[4799]: I1010 16:54:10.199724 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cindera843-account-delete-dptkx"] Oct 10 16:54:10 crc kubenswrapper[4799]: I1010 16:54:10.210953 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-metrics-rlrvx"] Oct 10 16:54:10 crc kubenswrapper[4799]: I1010 16:54:10.212118 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndbcluster-sb-etc-ovn\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"68f5ed12-8abe-46e0-a60a-086d13b7f038\" (UID: \"68f5ed12-8abe-46e0-a60a-086d13b7f038\") " Oct 10 16:54:10 crc kubenswrapper[4799]: I1010 16:54:10.212166 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dl6cb\" (UniqueName: \"kubernetes.io/projected/68f5ed12-8abe-46e0-a60a-086d13b7f038-kube-api-access-dl6cb\") pod \"68f5ed12-8abe-46e0-a60a-086d13b7f038\" (UID: \"68f5ed12-8abe-46e0-a60a-086d13b7f038\") " Oct 10 16:54:10 crc kubenswrapper[4799]: I1010 16:54:10.212188 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/68f5ed12-8abe-46e0-a60a-086d13b7f038-metrics-certs-tls-certs\") pod \"68f5ed12-8abe-46e0-a60a-086d13b7f038\" (UID: \"68f5ed12-8abe-46e0-a60a-086d13b7f038\") " Oct 10 16:54:10 crc kubenswrapper[4799]: I1010 16:54:10.212230 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/68f5ed12-8abe-46e0-a60a-086d13b7f038-ovsdb-rundir\") pod \"68f5ed12-8abe-46e0-a60a-086d13b7f038\" (UID: \"68f5ed12-8abe-46e0-a60a-086d13b7f038\") " Oct 10 16:54:10 crc kubenswrapper[4799]: I1010 16:54:10.212276 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/68f5ed12-8abe-46e0-a60a-086d13b7f038-ovsdbserver-sb-tls-certs\") pod \"68f5ed12-8abe-46e0-a60a-086d13b7f038\" (UID: \"68f5ed12-8abe-46e0-a60a-086d13b7f038\") " Oct 10 16:54:10 crc kubenswrapper[4799]: I1010 16:54:10.212306 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/68f5ed12-8abe-46e0-a60a-086d13b7f038-combined-ca-bundle\") pod \"68f5ed12-8abe-46e0-a60a-086d13b7f038\" (UID: \"68f5ed12-8abe-46e0-a60a-086d13b7f038\") " Oct 10 16:54:10 crc kubenswrapper[4799]: I1010 16:54:10.212396 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/68f5ed12-8abe-46e0-a60a-086d13b7f038-scripts\") pod \"68f5ed12-8abe-46e0-a60a-086d13b7f038\" (UID: \"68f5ed12-8abe-46e0-a60a-086d13b7f038\") " Oct 10 16:54:10 crc kubenswrapper[4799]: I1010 16:54:10.212412 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/68f5ed12-8abe-46e0-a60a-086d13b7f038-config\") pod \"68f5ed12-8abe-46e0-a60a-086d13b7f038\" (UID: \"68f5ed12-8abe-46e0-a60a-086d13b7f038\") " Oct 10 16:54:10 crc kubenswrapper[4799]: I1010 16:54:10.213016 4799 reconciler_common.go:293] "Volume detached for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/95c3e251-04ea-40ab-94d0-608d6ef0d8f3-openstack-config\") on node \"crc\" DevicePath \"\"" Oct 10 16:54:10 crc kubenswrapper[4799]: I1010 16:54:10.213031 4799 reconciler_common.go:293] "Volume detached for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") on node \"crc\" DevicePath \"\"" Oct 10 16:54:10 crc kubenswrapper[4799]: I1010 16:54:10.213645 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/68f5ed12-8abe-46e0-a60a-086d13b7f038-config" (OuterVolumeSpecName: "config") pod "68f5ed12-8abe-46e0-a60a-086d13b7f038" (UID: "68f5ed12-8abe-46e0-a60a-086d13b7f038"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 16:54:10 crc kubenswrapper[4799]: I1010 16:54:10.214080 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/68f5ed12-8abe-46e0-a60a-086d13b7f038-scripts" (OuterVolumeSpecName: "scripts") pod "68f5ed12-8abe-46e0-a60a-086d13b7f038" (UID: "68f5ed12-8abe-46e0-a60a-086d13b7f038"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 16:54:10 crc kubenswrapper[4799]: I1010 16:54:10.214831 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/68f5ed12-8abe-46e0-a60a-086d13b7f038-ovsdb-rundir" (OuterVolumeSpecName: "ovsdb-rundir") pod "68f5ed12-8abe-46e0-a60a-086d13b7f038" (UID: "68f5ed12-8abe-46e0-a60a-086d13b7f038"). InnerVolumeSpecName "ovsdb-rundir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 10 16:54:10 crc kubenswrapper[4799]: I1010 16:54:10.226814 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-controller-metrics-rlrvx"] Oct 10 16:54:10 crc kubenswrapper[4799]: I1010 16:54:10.234373 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican0700-account-delete-smncx"] Oct 10 16:54:10 crc kubenswrapper[4799]: I1010 16:54:10.253332 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage05-crc" (OuterVolumeSpecName: "ovndbcluster-sb-etc-ovn") pod "68f5ed12-8abe-46e0-a60a-086d13b7f038" (UID: "68f5ed12-8abe-46e0-a60a-086d13b7f038"). InnerVolumeSpecName "local-storage05-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Oct 10 16:54:10 crc kubenswrapper[4799]: I1010 16:54:10.254474 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/68f5ed12-8abe-46e0-a60a-086d13b7f038-kube-api-access-dl6cb" (OuterVolumeSpecName: "kube-api-access-dl6cb") pod "68f5ed12-8abe-46e0-a60a-086d13b7f038" (UID: "68f5ed12-8abe-46e0-a60a-086d13b7f038"). InnerVolumeSpecName "kube-api-access-dl6cb". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 16:54:10 crc kubenswrapper[4799]: I1010 16:54:10.256566 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-s8gsd"] Oct 10 16:54:10 crc kubenswrapper[4799]: I1010 16:54:10.265785 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-controller-s8gsd"] Oct 10 16:54:10 crc kubenswrapper[4799]: I1010 16:54:10.271817 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placementcbdc-account-delete-9cz6z"] Oct 10 16:54:10 crc kubenswrapper[4799]: I1010 16:54:10.279866 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutronfdbc-account-delete-b8x6d"] Oct 10 16:54:10 crc kubenswrapper[4799]: I1010 16:54:10.304899 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/novacell0f8a8-account-delete-6lbfn"] Oct 10 16:54:10 crc kubenswrapper[4799]: I1010 16:54:10.313705 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/34f6a30f-81f3-4240-8a4e-d7f1220801ab-config-data\") pod \"34f6a30f-81f3-4240-8a4e-d7f1220801ab\" (UID: \"34f6a30f-81f3-4240-8a4e-d7f1220801ab\") " Oct 10 16:54:10 crc kubenswrapper[4799]: I1010 16:54:10.314717 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/34f6a30f-81f3-4240-8a4e-d7f1220801ab-log-httpd\") pod \"34f6a30f-81f3-4240-8a4e-d7f1220801ab\" (UID: \"34f6a30f-81f3-4240-8a4e-d7f1220801ab\") " Oct 10 16:54:10 crc kubenswrapper[4799]: I1010 16:54:10.314764 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/34f6a30f-81f3-4240-8a4e-d7f1220801ab-combined-ca-bundle\") pod \"34f6a30f-81f3-4240-8a4e-d7f1220801ab\" (UID: \"34f6a30f-81f3-4240-8a4e-d7f1220801ab\") " Oct 10 16:54:10 crc kubenswrapper[4799]: I1010 16:54:10.314795 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-f672z\" (UniqueName: \"kubernetes.io/projected/34f6a30f-81f3-4240-8a4e-d7f1220801ab-kube-api-access-f672z\") pod \"34f6a30f-81f3-4240-8a4e-d7f1220801ab\" (UID: \"34f6a30f-81f3-4240-8a4e-d7f1220801ab\") " Oct 10 16:54:10 crc kubenswrapper[4799]: I1010 16:54:10.314923 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/34f6a30f-81f3-4240-8a4e-d7f1220801ab-internal-tls-certs\") pod \"34f6a30f-81f3-4240-8a4e-d7f1220801ab\" (UID: \"34f6a30f-81f3-4240-8a4e-d7f1220801ab\") " Oct 10 16:54:10 crc kubenswrapper[4799]: I1010 16:54:10.314961 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/34f6a30f-81f3-4240-8a4e-d7f1220801ab-etc-swift\") pod \"34f6a30f-81f3-4240-8a4e-d7f1220801ab\" (UID: \"34f6a30f-81f3-4240-8a4e-d7f1220801ab\") " Oct 10 16:54:10 crc kubenswrapper[4799]: I1010 16:54:10.315013 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/34f6a30f-81f3-4240-8a4e-d7f1220801ab-public-tls-certs\") pod \"34f6a30f-81f3-4240-8a4e-d7f1220801ab\" (UID: \"34f6a30f-81f3-4240-8a4e-d7f1220801ab\") " Oct 10 16:54:10 crc kubenswrapper[4799]: I1010 16:54:10.315039 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/34f6a30f-81f3-4240-8a4e-d7f1220801ab-run-httpd\") pod \"34f6a30f-81f3-4240-8a4e-d7f1220801ab\" (UID: \"34f6a30f-81f3-4240-8a4e-d7f1220801ab\") " Oct 10 16:54:10 crc kubenswrapper[4799]: I1010 16:54:10.315285 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/34f6a30f-81f3-4240-8a4e-d7f1220801ab-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "34f6a30f-81f3-4240-8a4e-d7f1220801ab" (UID: "34f6a30f-81f3-4240-8a4e-d7f1220801ab"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 10 16:54:10 crc kubenswrapper[4799]: I1010 16:54:10.315611 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/34f6a30f-81f3-4240-8a4e-d7f1220801ab-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "34f6a30f-81f3-4240-8a4e-d7f1220801ab" (UID: "34f6a30f-81f3-4240-8a4e-d7f1220801ab"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 10 16:54:10 crc kubenswrapper[4799]: I1010 16:54:10.315858 4799 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") on node \"crc\" " Oct 10 16:54:10 crc kubenswrapper[4799]: I1010 16:54:10.316299 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dl6cb\" (UniqueName: \"kubernetes.io/projected/68f5ed12-8abe-46e0-a60a-086d13b7f038-kube-api-access-dl6cb\") on node \"crc\" DevicePath \"\"" Oct 10 16:54:10 crc kubenswrapper[4799]: I1010 16:54:10.316318 4799 reconciler_common.go:293] "Volume detached for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/68f5ed12-8abe-46e0-a60a-086d13b7f038-ovsdb-rundir\") on node \"crc\" DevicePath \"\"" Oct 10 16:54:10 crc kubenswrapper[4799]: I1010 16:54:10.316328 4799 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/68f5ed12-8abe-46e0-a60a-086d13b7f038-scripts\") on node \"crc\" DevicePath \"\"" Oct 10 16:54:10 crc kubenswrapper[4799]: I1010 16:54:10.316357 4799 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/68f5ed12-8abe-46e0-a60a-086d13b7f038-config\") on node \"crc\" DevicePath \"\"" Oct 10 16:54:10 crc kubenswrapper[4799]: I1010 16:54:10.316366 4799 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/34f6a30f-81f3-4240-8a4e-d7f1220801ab-log-httpd\") on node \"crc\" DevicePath \"\"" Oct 10 16:54:10 crc kubenswrapper[4799]: I1010 16:54:10.318474 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/novaapieef9-account-delete-qqxp5"] Oct 10 16:54:10 crc kubenswrapper[4799]: E1010 16:54:10.323514 4799 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 24f52eab75b89d5e7b9cf09d4b2c644e6fdfdfe3ddfc83b09a9363aa7efda1d1 is running failed: container process not found" containerID="24f52eab75b89d5e7b9cf09d4b2c644e6fdfdfe3ddfc83b09a9363aa7efda1d1" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Oct 10 16:54:10 crc kubenswrapper[4799]: E1010 16:54:10.323838 4799 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 24f52eab75b89d5e7b9cf09d4b2c644e6fdfdfe3ddfc83b09a9363aa7efda1d1 is running failed: container process not found" containerID="24f52eab75b89d5e7b9cf09d4b2c644e6fdfdfe3ddfc83b09a9363aa7efda1d1" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Oct 10 16:54:10 crc kubenswrapper[4799]: E1010 16:54:10.324071 4799 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="387afba31b4e67a9ba9f7f2877d3f3af184a7c60b3843119336ceb5759893e62" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Oct 10 16:54:10 crc kubenswrapper[4799]: E1010 16:54:10.324127 4799 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 24f52eab75b89d5e7b9cf09d4b2c644e6fdfdfe3ddfc83b09a9363aa7efda1d1 is running failed: container process not found" containerID="24f52eab75b89d5e7b9cf09d4b2c644e6fdfdfe3ddfc83b09a9363aa7efda1d1" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Oct 10 16:54:10 crc kubenswrapper[4799]: E1010 16:54:10.324145 4799 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 24f52eab75b89d5e7b9cf09d4b2c644e6fdfdfe3ddfc83b09a9363aa7efda1d1 is running failed: container process not found" probeType="Readiness" pod="openstack/ovn-controller-ovs-dtplc" podUID="e0ab4194-18b4-4c6d-96b2-d7a4a85e17d6" containerName="ovsdb-server" Oct 10 16:54:10 crc kubenswrapper[4799]: E1010 16:54:10.325102 4799 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="387afba31b4e67a9ba9f7f2877d3f3af184a7c60b3843119336ceb5759893e62" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Oct 10 16:54:10 crc kubenswrapper[4799]: E1010 16:54:10.326329 4799 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="387afba31b4e67a9ba9f7f2877d3f3af184a7c60b3843119336ceb5759893e62" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Oct 10 16:54:10 crc kubenswrapper[4799]: E1010 16:54:10.326407 4799 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/ovn-controller-ovs-dtplc" podUID="e0ab4194-18b4-4c6d-96b2-d7a4a85e17d6" containerName="ovs-vswitchd" Oct 10 16:54:10 crc kubenswrapper[4799]: I1010 16:54:10.365280 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/34f6a30f-81f3-4240-8a4e-d7f1220801ab-kube-api-access-f672z" (OuterVolumeSpecName: "kube-api-access-f672z") pod "34f6a30f-81f3-4240-8a4e-d7f1220801ab" (UID: "34f6a30f-81f3-4240-8a4e-d7f1220801ab"). InnerVolumeSpecName "kube-api-access-f672z". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 16:54:10 crc kubenswrapper[4799]: E1010 16:54:10.384319 4799 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="ff8624f34fcb1a15fffee56784a5608f01adefaa3172b0477e52de09e0786400" cmd=["/usr/local/bin/container-scripts/status_check.sh"] Oct 10 16:54:10 crc kubenswrapper[4799]: I1010 16:54:10.385072 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/34f6a30f-81f3-4240-8a4e-d7f1220801ab-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "34f6a30f-81f3-4240-8a4e-d7f1220801ab" (UID: "34f6a30f-81f3-4240-8a4e-d7f1220801ab"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 16:54:10 crc kubenswrapper[4799]: E1010 16:54:10.387900 4799 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="ff8624f34fcb1a15fffee56784a5608f01adefaa3172b0477e52de09e0786400" cmd=["/usr/local/bin/container-scripts/status_check.sh"] Oct 10 16:54:10 crc kubenswrapper[4799]: E1010 16:54:10.394844 4799 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="ff8624f34fcb1a15fffee56784a5608f01adefaa3172b0477e52de09e0786400" cmd=["/usr/local/bin/container-scripts/status_check.sh"] Oct 10 16:54:10 crc kubenswrapper[4799]: E1010 16:54:10.394967 4799 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/ovn-northd-0" podUID="fdfeebc0-d50f-42f8-a461-b0aea7ba6a11" containerName="ovn-northd" Oct 10 16:54:10 crc kubenswrapper[4799]: I1010 16:54:10.421991 4799 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/34f6a30f-81f3-4240-8a4e-d7f1220801ab-etc-swift\") on node \"crc\" DevicePath \"\"" Oct 10 16:54:10 crc kubenswrapper[4799]: I1010 16:54:10.422026 4799 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/34f6a30f-81f3-4240-8a4e-d7f1220801ab-run-httpd\") on node \"crc\" DevicePath \"\"" Oct 10 16:54:10 crc kubenswrapper[4799]: I1010 16:54:10.422037 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-f672z\" (UniqueName: \"kubernetes.io/projected/34f6a30f-81f3-4240-8a4e-d7f1220801ab-kube-api-access-f672z\") on node \"crc\" DevicePath \"\"" Oct 10 16:54:10 crc kubenswrapper[4799]: I1010 16:54:10.448389 4799 generic.go:334] "Generic (PLEG): container finished" podID="68ea0968-070a-41d4-b023-31557446c4dc" containerID="c3e06a4a05023171ceb2e34c51e209015c056f45bfa0faadf50fd4785e2e4d80" exitCode=0 Oct 10 16:54:10 crc kubenswrapper[4799]: I1010 16:54:10.448513 4799 generic.go:334] "Generic (PLEG): container finished" podID="68ea0968-070a-41d4-b023-31557446c4dc" containerID="5b34f901fc61925f0938ed04472b26863bfffe70a6291e77a0980d1be5dc5aa4" exitCode=0 Oct 10 16:54:10 crc kubenswrapper[4799]: I1010 16:54:10.448570 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"68ea0968-070a-41d4-b023-31557446c4dc","Type":"ContainerDied","Data":"c3e06a4a05023171ceb2e34c51e209015c056f45bfa0faadf50fd4785e2e4d80"} Oct 10 16:54:10 crc kubenswrapper[4799]: I1010 16:54:10.448697 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"68ea0968-070a-41d4-b023-31557446c4dc","Type":"ContainerDied","Data":"5b34f901fc61925f0938ed04472b26863bfffe70a6291e77a0980d1be5dc5aa4"} Oct 10 16:54:10 crc kubenswrapper[4799]: I1010 16:54:10.450332 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/novaapieef9-account-delete-qqxp5" event={"ID":"4cc746af-c99b-4cb9-acde-dc3b97e424a3","Type":"ContainerStarted","Data":"424cbe41e418a491958f6d1bf8fecff1d6c225aefb5591d9e73fc218652a05da"} Oct 10 16:54:10 crc kubenswrapper[4799]: I1010 16:54:10.452212 4799 generic.go:334] "Generic (PLEG): container finished" podID="71374742-0685-4486-bb2d-97116af40765" containerID="7213f8a82d7b7669ad0bb4871635bfb72208bddeb1f8660087390bb922ac83a8" exitCode=0 Oct 10 16:54:10 crc kubenswrapper[4799]: I1010 16:54:10.452295 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance2b60-account-delete-t7jh2" event={"ID":"71374742-0685-4486-bb2d-97116af40765","Type":"ContainerDied","Data":"7213f8a82d7b7669ad0bb4871635bfb72208bddeb1f8660087390bb922ac83a8"} Oct 10 16:54:10 crc kubenswrapper[4799]: I1010 16:54:10.452359 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance2b60-account-delete-t7jh2" event={"ID":"71374742-0685-4486-bb2d-97116af40765","Type":"ContainerStarted","Data":"490244995f82beddfa7cabd11c9ca8d2e2ea7ff6c6ba6a51d1c06e6dfd1b03df"} Oct 10 16:54:10 crc kubenswrapper[4799]: I1010 16:54:10.476565 4799 generic.go:334] "Generic (PLEG): container finished" podID="ac766919-d788-40da-879a-627919926594" containerID="5af6a65cf06c9580d2979a8455e9bbfb9189ae51ad9185bd52c4a3fc972febbc" exitCode=143 Oct 10 16:54:10 crc kubenswrapper[4799]: I1010 16:54:10.476667 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-56d84d574d-x5sbm" event={"ID":"ac766919-d788-40da-879a-627919926594","Type":"ContainerDied","Data":"5af6a65cf06c9580d2979a8455e9bbfb9189ae51ad9185bd52c4a3fc972febbc"} Oct 10 16:54:10 crc kubenswrapper[4799]: I1010 16:54:10.478898 4799 generic.go:334] "Generic (PLEG): container finished" podID="34f6a30f-81f3-4240-8a4e-d7f1220801ab" containerID="7d59d6ece2c99c837d44d3e71e1c7dc7a0dd9eaa3806c1e6d93e906cf1b5fcfd" exitCode=0 Oct 10 16:54:10 crc kubenswrapper[4799]: I1010 16:54:10.478925 4799 generic.go:334] "Generic (PLEG): container finished" podID="34f6a30f-81f3-4240-8a4e-d7f1220801ab" containerID="d89cab6d55e7c7a9cd3bd3ae258a2ce92f7ee5e9e44da70f5b9a36b753e9b185" exitCode=0 Oct 10 16:54:10 crc kubenswrapper[4799]: I1010 16:54:10.478953 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-proxy-69dc9744df-smbqh" Oct 10 16:54:10 crc kubenswrapper[4799]: I1010 16:54:10.478967 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-69dc9744df-smbqh" event={"ID":"34f6a30f-81f3-4240-8a4e-d7f1220801ab","Type":"ContainerDied","Data":"7d59d6ece2c99c837d44d3e71e1c7dc7a0dd9eaa3806c1e6d93e906cf1b5fcfd"} Oct 10 16:54:10 crc kubenswrapper[4799]: I1010 16:54:10.479002 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-69dc9744df-smbqh" event={"ID":"34f6a30f-81f3-4240-8a4e-d7f1220801ab","Type":"ContainerDied","Data":"d89cab6d55e7c7a9cd3bd3ae258a2ce92f7ee5e9e44da70f5b9a36b753e9b185"} Oct 10 16:54:10 crc kubenswrapper[4799]: I1010 16:54:10.479012 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-69dc9744df-smbqh" event={"ID":"34f6a30f-81f3-4240-8a4e-d7f1220801ab","Type":"ContainerDied","Data":"5c2a32d25f9486e9f562bfeaa8fcfe59225c0df3e4b448f3f274900700e43789"} Oct 10 16:54:10 crc kubenswrapper[4799]: I1010 16:54:10.479028 4799 scope.go:117] "RemoveContainer" containerID="7d59d6ece2c99c837d44d3e71e1c7dc7a0dd9eaa3806c1e6d93e906cf1b5fcfd" Oct 10 16:54:10 crc kubenswrapper[4799]: I1010 16:54:10.497223 4799 generic.go:334] "Generic (PLEG): container finished" podID="1bd0e459-efb9-463c-a8fc-d08a3194f3d9" containerID="6a4a38abddd264e6f436a27a2422c3d7dee920ced5f32af3275fc86edc0ea42c" exitCode=143 Oct 10 16:54:10 crc kubenswrapper[4799]: I1010 16:54:10.497332 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-5b85b5697b-9wc6c" event={"ID":"1bd0e459-efb9-463c-a8fc-d08a3194f3d9","Type":"ContainerDied","Data":"6a4a38abddd264e6f436a27a2422c3d7dee920ced5f32af3275fc86edc0ea42c"} Oct 10 16:54:10 crc kubenswrapper[4799]: I1010 16:54:10.499247 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Oct 10 16:54:10 crc kubenswrapper[4799]: I1010 16:54:10.502775 4799 generic.go:334] "Generic (PLEG): container finished" podID="3a05167f-cd58-4f9f-806b-8d71271320d2" containerID="ffeaf50de5582926c54ffdb7618e55611dd1a565aacf1bd0b7bbf41db579bdee" exitCode=143 Oct 10 16:54:10 crc kubenswrapper[4799]: I1010 16:54:10.502852 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-754947f5d7-z2rks" event={"ID":"3a05167f-cd58-4f9f-806b-8d71271320d2","Type":"ContainerDied","Data":"ffeaf50de5582926c54ffdb7618e55611dd1a565aacf1bd0b7bbf41db579bdee"} Oct 10 16:54:10 crc kubenswrapper[4799]: I1010 16:54:10.504958 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_68f5ed12-8abe-46e0-a60a-086d13b7f038/ovsdbserver-sb/0.log" Oct 10 16:54:10 crc kubenswrapper[4799]: I1010 16:54:10.505006 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"68f5ed12-8abe-46e0-a60a-086d13b7f038","Type":"ContainerDied","Data":"e84c8665d27eab7d53d5aa48205d379735b8c243dfc311437100e8672795f50e"} Oct 10 16:54:10 crc kubenswrapper[4799]: I1010 16:54:10.505065 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Oct 10 16:54:10 crc kubenswrapper[4799]: I1010 16:54:10.512175 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cindera843-account-delete-dptkx" event={"ID":"2f2d77fd-b861-4589-bdb5-ad606deb3360","Type":"ContainerStarted","Data":"64ca46c0b50e20654b1f83605b4dba5b1346d8dd62ac61500d915ca578febfe0"} Oct 10 16:54:10 crc kubenswrapper[4799]: I1010 16:54:10.516006 4799 generic.go:334] "Generic (PLEG): container finished" podID="7dc78f94-acb0-4411-b1a2-14dd6500674b" containerID="45d51a08521515637f5b1846723d166ebcfd370a8e928d653837e32fd1bdcaff" exitCode=0 Oct 10 16:54:10 crc kubenswrapper[4799]: I1010 16:54:10.516058 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"7dc78f94-acb0-4411-b1a2-14dd6500674b","Type":"ContainerDied","Data":"45d51a08521515637f5b1846723d166ebcfd370a8e928d653837e32fd1bdcaff"} Oct 10 16:54:10 crc kubenswrapper[4799]: I1010 16:54:10.519343 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_37642fb0-1d93-4e14-a176-fea38410097f/ovsdbserver-nb/0.log" Oct 10 16:54:10 crc kubenswrapper[4799]: I1010 16:54:10.519413 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"37642fb0-1d93-4e14-a176-fea38410097f","Type":"ContainerDied","Data":"b23b73380c8a03acfdfd5e7a3be56d0bd43f81bcef1d051586f929206650c736"} Oct 10 16:54:10 crc kubenswrapper[4799]: I1010 16:54:10.519509 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Oct 10 16:54:10 crc kubenswrapper[4799]: I1010 16:54:10.521904 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican0700-account-delete-smncx" event={"ID":"3b59d111-5df2-4b9f-9d02-7a3f9e19d02c","Type":"ContainerStarted","Data":"1ce8d8c74c0f4246d1709206771f8526346fea444bd5dd887bbd0495bbfea7d6"} Oct 10 16:54:10 crc kubenswrapper[4799]: I1010 16:54:10.525650 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-64986d45b9-khcqw" event={"ID":"361ecbc5-676b-42af-9eb3-fb761f842265","Type":"ContainerDied","Data":"886979bf64ce0ec140fc36d3a375aff98b6c302ad49a10258df82e63682e6dc8"} Oct 10 16:54:10 crc kubenswrapper[4799]: I1010 16:54:10.525781 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-64986d45b9-khcqw" Oct 10 16:54:10 crc kubenswrapper[4799]: I1010 16:54:10.526875 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/novacell0f8a8-account-delete-6lbfn" event={"ID":"acf4a111-174e-42e8-8e71-d5bd053d5de2","Type":"ContainerStarted","Data":"0f9ddaa49add5f4f7fed87c0f54b9a4678ac17565e30745a3c27d5ad8ce19580"} Oct 10 16:54:10 crc kubenswrapper[4799]: I1010 16:54:10.536305 4799 generic.go:334] "Generic (PLEG): container finished" podID="e0ab4194-18b4-4c6d-96b2-d7a4a85e17d6" containerID="24f52eab75b89d5e7b9cf09d4b2c644e6fdfdfe3ddfc83b09a9363aa7efda1d1" exitCode=0 Oct 10 16:54:10 crc kubenswrapper[4799]: I1010 16:54:10.536375 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-dtplc" event={"ID":"e0ab4194-18b4-4c6d-96b2-d7a4a85e17d6","Type":"ContainerDied","Data":"24f52eab75b89d5e7b9cf09d4b2c644e6fdfdfe3ddfc83b09a9363aa7efda1d1"} Oct 10 16:54:10 crc kubenswrapper[4799]: I1010 16:54:10.544158 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placementcbdc-account-delete-9cz6z" event={"ID":"f1d90c74-271d-45af-9c91-87250b178ca6","Type":"ContainerStarted","Data":"3a6320145f34cd1ebd220511da24ae57b951f894bb881370e6aa1050296204e7"} Oct 10 16:54:10 crc kubenswrapper[4799]: I1010 16:54:10.550030 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutronfdbc-account-delete-b8x6d" event={"ID":"445b5551-e072-43ca-a6e2-8f7fe726bb42","Type":"ContainerStarted","Data":"bbcada6d0c09a1870bcf967da0d23cdde5bb081cbf84851a3f26dc9e5d64534b"} Oct 10 16:54:10 crc kubenswrapper[4799]: I1010 16:54:10.552642 4799 generic.go:334] "Generic (PLEG): container finished" podID="2a28d2a6-5dfc-414b-9eed-2f412cfc7063" containerID="2252cc52e03872ad264363f4a4f2c8970a681e759e68c07793c60a2df2a41d55" exitCode=0 Oct 10 16:54:10 crc kubenswrapper[4799]: I1010 16:54:10.552670 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"2a28d2a6-5dfc-414b-9eed-2f412cfc7063","Type":"ContainerDied","Data":"2252cc52e03872ad264363f4a4f2c8970a681e759e68c07793c60a2df2a41d55"} Oct 10 16:54:10 crc kubenswrapper[4799]: I1010 16:54:10.570288 4799 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage05-crc" (UniqueName: "kubernetes.io/local-volume/local-storage05-crc") on node "crc" Oct 10 16:54:10 crc kubenswrapper[4799]: I1010 16:54:10.627309 4799 reconciler_common.go:293] "Volume detached for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") on node \"crc\" DevicePath \"\"" Oct 10 16:54:10 crc kubenswrapper[4799]: E1010 16:54:10.627645 4799 configmap.go:193] Couldn't get configMap openstack/rabbitmq-config-data: configmap "rabbitmq-config-data" not found Oct 10 16:54:10 crc kubenswrapper[4799]: E1010 16:54:10.627701 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/9fd6f03f-abea-4c29-8060-0705bb0af2c7-config-data podName:9fd6f03f-abea-4c29-8060-0705bb0af2c7 nodeName:}" failed. No retries permitted until 2025-10-10 16:54:14.627682504 +0000 UTC m=+1348.136006629 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/9fd6f03f-abea-4c29-8060-0705bb0af2c7-config-data") pod "rabbitmq-server-0" (UID: "9fd6f03f-abea-4c29-8060-0705bb0af2c7") : configmap "rabbitmq-config-data" not found Oct 10 16:54:10 crc kubenswrapper[4799]: I1010 16:54:10.711834 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/361ecbc5-676b-42af-9eb3-fb761f842265-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "361ecbc5-676b-42af-9eb3-fb761f842265" (UID: "361ecbc5-676b-42af-9eb3-fb761f842265"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 16:54:10 crc kubenswrapper[4799]: I1010 16:54:10.743538 4799 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/361ecbc5-676b-42af-9eb3-fb761f842265-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 10 16:54:10 crc kubenswrapper[4799]: I1010 16:54:10.801073 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/361ecbc5-676b-42af-9eb3-fb761f842265-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "361ecbc5-676b-42af-9eb3-fb761f842265" (UID: "361ecbc5-676b-42af-9eb3-fb761f842265"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 16:54:10 crc kubenswrapper[4799]: I1010 16:54:10.810111 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/37642fb0-1d93-4e14-a176-fea38410097f-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "37642fb0-1d93-4e14-a176-fea38410097f" (UID: "37642fb0-1d93-4e14-a176-fea38410097f"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:54:10 crc kubenswrapper[4799]: I1010 16:54:10.849198 4799 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/37642fb0-1d93-4e14-a176-fea38410097f-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 10 16:54:10 crc kubenswrapper[4799]: I1010 16:54:10.849475 4799 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/361ecbc5-676b-42af-9eb3-fb761f842265-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Oct 10 16:54:10 crc kubenswrapper[4799]: I1010 16:54:10.871898 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/95c3e251-04ea-40ab-94d0-608d6ef0d8f3-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "95c3e251-04ea-40ab-94d0-608d6ef0d8f3" (UID: "95c3e251-04ea-40ab-94d0-608d6ef0d8f3"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:54:10 crc kubenswrapper[4799]: I1010 16:54:10.897327 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/361ecbc5-676b-42af-9eb3-fb761f842265-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "361ecbc5-676b-42af-9eb3-fb761f842265" (UID: "361ecbc5-676b-42af-9eb3-fb761f842265"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 16:54:10 crc kubenswrapper[4799]: I1010 16:54:10.916107 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/361ecbc5-676b-42af-9eb3-fb761f842265-config" (OuterVolumeSpecName: "config") pod "361ecbc5-676b-42af-9eb3-fb761f842265" (UID: "361ecbc5-676b-42af-9eb3-fb761f842265"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 16:54:10 crc kubenswrapper[4799]: I1010 16:54:10.921316 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/37642fb0-1d93-4e14-a176-fea38410097f-metrics-certs-tls-certs" (OuterVolumeSpecName: "metrics-certs-tls-certs") pod "37642fb0-1d93-4e14-a176-fea38410097f" (UID: "37642fb0-1d93-4e14-a176-fea38410097f"). InnerVolumeSpecName "metrics-certs-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:54:10 crc kubenswrapper[4799]: I1010 16:54:10.925561 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/68f5ed12-8abe-46e0-a60a-086d13b7f038-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "68f5ed12-8abe-46e0-a60a-086d13b7f038" (UID: "68f5ed12-8abe-46e0-a60a-086d13b7f038"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:54:10 crc kubenswrapper[4799]: I1010 16:54:10.951868 4799 reconciler_common.go:293] "Volume detached for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/37642fb0-1d93-4e14-a176-fea38410097f-metrics-certs-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 10 16:54:10 crc kubenswrapper[4799]: I1010 16:54:10.951893 4799 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/68f5ed12-8abe-46e0-a60a-086d13b7f038-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 10 16:54:10 crc kubenswrapper[4799]: I1010 16:54:10.951901 4799 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/95c3e251-04ea-40ab-94d0-608d6ef0d8f3-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 10 16:54:10 crc kubenswrapper[4799]: I1010 16:54:10.951912 4799 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/361ecbc5-676b-42af-9eb3-fb761f842265-config\") on node \"crc\" DevicePath \"\"" Oct 10 16:54:10 crc kubenswrapper[4799]: I1010 16:54:10.951921 4799 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/361ecbc5-676b-42af-9eb3-fb761f842265-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 10 16:54:11 crc kubenswrapper[4799]: I1010 16:54:11.027052 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/34f6a30f-81f3-4240-8a4e-d7f1220801ab-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "34f6a30f-81f3-4240-8a4e-d7f1220801ab" (UID: "34f6a30f-81f3-4240-8a4e-d7f1220801ab"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:54:11 crc kubenswrapper[4799]: I1010 16:54:11.055639 4799 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/34f6a30f-81f3-4240-8a4e-d7f1220801ab-public-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 10 16:54:11 crc kubenswrapper[4799]: I1010 16:54:11.077021 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/34f6a30f-81f3-4240-8a4e-d7f1220801ab-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "34f6a30f-81f3-4240-8a4e-d7f1220801ab" (UID: "34f6a30f-81f3-4240-8a4e-d7f1220801ab"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:54:11 crc kubenswrapper[4799]: I1010 16:54:11.079868 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/34f6a30f-81f3-4240-8a4e-d7f1220801ab-config-data" (OuterVolumeSpecName: "config-data") pod "34f6a30f-81f3-4240-8a4e-d7f1220801ab" (UID: "34f6a30f-81f3-4240-8a4e-d7f1220801ab"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:54:11 crc kubenswrapper[4799]: I1010 16:54:11.087869 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/34f6a30f-81f3-4240-8a4e-d7f1220801ab-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "34f6a30f-81f3-4240-8a4e-d7f1220801ab" (UID: "34f6a30f-81f3-4240-8a4e-d7f1220801ab"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:54:11 crc kubenswrapper[4799]: I1010 16:54:11.125160 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/37642fb0-1d93-4e14-a176-fea38410097f-ovsdbserver-nb-tls-certs" (OuterVolumeSpecName: "ovsdbserver-nb-tls-certs") pod "37642fb0-1d93-4e14-a176-fea38410097f" (UID: "37642fb0-1d93-4e14-a176-fea38410097f"). InnerVolumeSpecName "ovsdbserver-nb-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:54:11 crc kubenswrapper[4799]: I1010 16:54:11.138505 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/95c3e251-04ea-40ab-94d0-608d6ef0d8f3-openstack-config-secret" (OuterVolumeSpecName: "openstack-config-secret") pod "95c3e251-04ea-40ab-94d0-608d6ef0d8f3" (UID: "95c3e251-04ea-40ab-94d0-608d6ef0d8f3"). InnerVolumeSpecName "openstack-config-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:54:11 crc kubenswrapper[4799]: I1010 16:54:11.152090 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/361ecbc5-676b-42af-9eb3-fb761f842265-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "361ecbc5-676b-42af-9eb3-fb761f842265" (UID: "361ecbc5-676b-42af-9eb3-fb761f842265"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 16:54:11 crc kubenswrapper[4799]: I1010 16:54:11.157288 4799 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/361ecbc5-676b-42af-9eb3-fb761f842265-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 10 16:54:11 crc kubenswrapper[4799]: I1010 16:54:11.157332 4799 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/34f6a30f-81f3-4240-8a4e-d7f1220801ab-config-data\") on node \"crc\" DevicePath \"\"" Oct 10 16:54:11 crc kubenswrapper[4799]: I1010 16:54:11.157344 4799 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/37642fb0-1d93-4e14-a176-fea38410097f-ovsdbserver-nb-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 10 16:54:11 crc kubenswrapper[4799]: I1010 16:54:11.157353 4799 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/34f6a30f-81f3-4240-8a4e-d7f1220801ab-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 10 16:54:11 crc kubenswrapper[4799]: I1010 16:54:11.157362 4799 reconciler_common.go:293] "Volume detached for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/95c3e251-04ea-40ab-94d0-608d6ef0d8f3-openstack-config-secret\") on node \"crc\" DevicePath \"\"" Oct 10 16:54:11 crc kubenswrapper[4799]: I1010 16:54:11.157372 4799 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/34f6a30f-81f3-4240-8a4e-d7f1220801ab-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 10 16:54:11 crc kubenswrapper[4799]: I1010 16:54:11.168520 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/68f5ed12-8abe-46e0-a60a-086d13b7f038-ovsdbserver-sb-tls-certs" (OuterVolumeSpecName: "ovsdbserver-sb-tls-certs") pod "68f5ed12-8abe-46e0-a60a-086d13b7f038" (UID: "68f5ed12-8abe-46e0-a60a-086d13b7f038"). InnerVolumeSpecName "ovsdbserver-sb-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:54:11 crc kubenswrapper[4799]: I1010 16:54:11.182212 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/68f5ed12-8abe-46e0-a60a-086d13b7f038-metrics-certs-tls-certs" (OuterVolumeSpecName: "metrics-certs-tls-certs") pod "68f5ed12-8abe-46e0-a60a-086d13b7f038" (UID: "68f5ed12-8abe-46e0-a60a-086d13b7f038"). InnerVolumeSpecName "metrics-certs-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:54:11 crc kubenswrapper[4799]: I1010 16:54:11.258838 4799 reconciler_common.go:293] "Volume detached for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/68f5ed12-8abe-46e0-a60a-086d13b7f038-metrics-certs-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 10 16:54:11 crc kubenswrapper[4799]: I1010 16:54:11.258871 4799 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/68f5ed12-8abe-46e0-a60a-086d13b7f038-ovsdbserver-sb-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 10 16:54:11 crc kubenswrapper[4799]: I1010 16:54:11.333307 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 10 16:54:11 crc kubenswrapper[4799]: I1010 16:54:11.333567 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="baff4453-a6a2-4264-82b7-3ce7c22734f6" containerName="ceilometer-central-agent" containerID="cri-o://50bad220d7bfe5f874d25169f5fbdb3b65af8607c4752caa21f314f09fc45b07" gracePeriod=30 Oct 10 16:54:11 crc kubenswrapper[4799]: I1010 16:54:11.334068 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="baff4453-a6a2-4264-82b7-3ce7c22734f6" containerName="proxy-httpd" containerID="cri-o://f6be7ae91d5c505d0b533031b37bb272b1ce7b62110e052a7fafbcdc9d348b24" gracePeriod=30 Oct 10 16:54:11 crc kubenswrapper[4799]: I1010 16:54:11.334112 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="baff4453-a6a2-4264-82b7-3ce7c22734f6" containerName="sg-core" containerID="cri-o://c9ae3f60b475964718a4b4571c9e1633350b005dde6fdf9cbe4423f5ab16bd34" gracePeriod=30 Oct 10 16:54:11 crc kubenswrapper[4799]: I1010 16:54:11.334140 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="baff4453-a6a2-4264-82b7-3ce7c22734f6" containerName="ceilometer-notification-agent" containerID="cri-o://a4337b4876c0c4b3cbed0413f818cccf0ccead577c676587b002d48a2705e440" gracePeriod=30 Oct 10 16:54:11 crc kubenswrapper[4799]: E1010 16:54:11.370045 4799 configmap.go:193] Couldn't get configMap openstack/rabbitmq-cell1-config-data: configmap "rabbitmq-cell1-config-data" not found Oct 10 16:54:11 crc kubenswrapper[4799]: E1010 16:54:11.370111 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/7858ee88-c7b9-4fb7-b825-569154134201-config-data podName:7858ee88-c7b9-4fb7-b825-569154134201 nodeName:}" failed. No retries permitted until 2025-10-10 16:54:15.370096185 +0000 UTC m=+1348.878420300 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/7858ee88-c7b9-4fb7-b825-569154134201-config-data") pod "rabbitmq-cell1-server-0" (UID: "7858ee88-c7b9-4fb7-b825-569154134201") : configmap "rabbitmq-cell1-config-data" not found Oct 10 16:54:11 crc kubenswrapper[4799]: I1010 16:54:11.411650 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/kube-state-metrics-0"] Oct 10 16:54:11 crc kubenswrapper[4799]: I1010 16:54:11.412040 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/kube-state-metrics-0" podUID="69aa641a-13ff-4f65-b2ea-7fee3ad42134" containerName="kube-state-metrics" containerID="cri-o://9a73e41efcf012c81cfb3fdb00ec877a3a4f57b043b7fb464fdebcd73d9d80d1" gracePeriod=30 Oct 10 16:54:11 crc kubenswrapper[4799]: I1010 16:54:11.458357 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="038759ba-4122-4104-8699-81c76590eb2b" path="/var/lib/kubelet/pods/038759ba-4122-4104-8699-81c76590eb2b/volumes" Oct 10 16:54:11 crc kubenswrapper[4799]: I1010 16:54:11.459579 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="21f9a08f-0fc1-497c-96f9-17b436acf166" path="/var/lib/kubelet/pods/21f9a08f-0fc1-497c-96f9-17b436acf166/volumes" Oct 10 16:54:11 crc kubenswrapper[4799]: I1010 16:54:11.460346 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="23204589-3275-4935-9f16-171e3a66fe1b" path="/var/lib/kubelet/pods/23204589-3275-4935-9f16-171e3a66fe1b/volumes" Oct 10 16:54:11 crc kubenswrapper[4799]: I1010 16:54:11.462407 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="29b5dc61-70d3-4fc7-85e7-665f12d6f2ad" path="/var/lib/kubelet/pods/29b5dc61-70d3-4fc7-85e7-665f12d6f2ad/volumes" Oct 10 16:54:11 crc kubenswrapper[4799]: I1010 16:54:11.463251 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3834e038-89d1-48e1-94f2-6323bd3a9bca" path="/var/lib/kubelet/pods/3834e038-89d1-48e1-94f2-6323bd3a9bca/volumes" Oct 10 16:54:11 crc kubenswrapper[4799]: I1010 16:54:11.463701 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="688b5025-24db-4d05-a046-d26deb669312" path="/var/lib/kubelet/pods/688b5025-24db-4d05-a046-d26deb669312/volumes" Oct 10 16:54:11 crc kubenswrapper[4799]: I1010 16:54:11.464783 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8f591cb1-902a-406e-b93a-56c2b7ec9cb8" path="/var/lib/kubelet/pods/8f591cb1-902a-406e-b93a-56c2b7ec9cb8/volumes" Oct 10 16:54:11 crc kubenswrapper[4799]: I1010 16:54:11.465454 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9099bbc4-2f79-441f-a02b-6653832c7714" path="/var/lib/kubelet/pods/9099bbc4-2f79-441f-a02b-6653832c7714/volumes" Oct 10 16:54:11 crc kubenswrapper[4799]: I1010 16:54:11.466239 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="95c3e251-04ea-40ab-94d0-608d6ef0d8f3" path="/var/lib/kubelet/pods/95c3e251-04ea-40ab-94d0-608d6ef0d8f3/volumes" Oct 10 16:54:11 crc kubenswrapper[4799]: I1010 16:54:11.470454 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a2d50cf9-d24c-42fb-a3be-716a020a8b5a" path="/var/lib/kubelet/pods/a2d50cf9-d24c-42fb-a3be-716a020a8b5a/volumes" Oct 10 16:54:11 crc kubenswrapper[4799]: I1010 16:54:11.471611 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="afce335f-6c8d-422f-9ee9-f69cd8a83715" path="/var/lib/kubelet/pods/afce335f-6c8d-422f-9ee9-f69cd8a83715/volumes" Oct 10 16:54:11 crc kubenswrapper[4799]: I1010 16:54:11.472484 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b8ed5bbd-eed6-4474-80c1-ea9aed201450" path="/var/lib/kubelet/pods/b8ed5bbd-eed6-4474-80c1-ea9aed201450/volumes" Oct 10 16:54:11 crc kubenswrapper[4799]: I1010 16:54:11.473549 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c222456d-86d0-45ea-aace-e359a8f0b78a" path="/var/lib/kubelet/pods/c222456d-86d0-45ea-aace-e359a8f0b78a/volumes" Oct 10 16:54:11 crc kubenswrapper[4799]: I1010 16:54:11.475646 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e1dd8255-75af-4083-ae20-bba4b5760b3f" path="/var/lib/kubelet/pods/e1dd8255-75af-4083-ae20-bba4b5760b3f/volumes" Oct 10 16:54:11 crc kubenswrapper[4799]: I1010 16:54:11.482939 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f2b06e0c-bd3e-4928-94f4-bdb22ae99a89" path="/var/lib/kubelet/pods/f2b06e0c-bd3e-4928-94f4-bdb22ae99a89/volumes" Oct 10 16:54:11 crc kubenswrapper[4799]: I1010 16:54:11.539814 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/memcached-0"] Oct 10 16:54:11 crc kubenswrapper[4799]: I1010 16:54:11.540053 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/memcached-0" podUID="985d1485-7054-475b-8e60-85db5dc5afa3" containerName="memcached" containerID="cri-o://d7c80cad377e5693f3e27682ebd24a34de4bbf7f43f72423036babd6bf753968" gracePeriod=30 Oct 10 16:54:11 crc kubenswrapper[4799]: I1010 16:54:11.655210 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-db-sync-x5gkm"] Oct 10 16:54:11 crc kubenswrapper[4799]: I1010 16:54:11.684876 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-db-sync-x5gkm"] Oct 10 16:54:11 crc kubenswrapper[4799]: I1010 16:54:11.699061 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-bootstrap-jrrx4"] Oct 10 16:54:11 crc kubenswrapper[4799]: I1010 16:54:11.709152 4799 generic.go:334] "Generic (PLEG): container finished" podID="baff4453-a6a2-4264-82b7-3ce7c22734f6" containerID="f6be7ae91d5c505d0b533031b37bb272b1ce7b62110e052a7fafbcdc9d348b24" exitCode=0 Oct 10 16:54:11 crc kubenswrapper[4799]: I1010 16:54:11.709189 4799 generic.go:334] "Generic (PLEG): container finished" podID="baff4453-a6a2-4264-82b7-3ce7c22734f6" containerID="c9ae3f60b475964718a4b4571c9e1633350b005dde6fdf9cbe4423f5ab16bd34" exitCode=2 Oct 10 16:54:11 crc kubenswrapper[4799]: I1010 16:54:11.709303 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"baff4453-a6a2-4264-82b7-3ce7c22734f6","Type":"ContainerDied","Data":"f6be7ae91d5c505d0b533031b37bb272b1ce7b62110e052a7fafbcdc9d348b24"} Oct 10 16:54:11 crc kubenswrapper[4799]: I1010 16:54:11.709344 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"baff4453-a6a2-4264-82b7-3ce7c22734f6","Type":"ContainerDied","Data":"c9ae3f60b475964718a4b4571c9e1633350b005dde6fdf9cbe4423f5ab16bd34"} Oct 10 16:54:11 crc kubenswrapper[4799]: I1010 16:54:11.731247 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-bootstrap-jrrx4"] Oct 10 16:54:11 crc kubenswrapper[4799]: I1010 16:54:11.734465 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"2a28d2a6-5dfc-414b-9eed-2f412cfc7063","Type":"ContainerDied","Data":"5b99a6b1fcc73cc34255feadfa95870bdeb8d577bd9cef261035e98f0256eb68"} Oct 10 16:54:11 crc kubenswrapper[4799]: I1010 16:54:11.734499 4799 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5b99a6b1fcc73cc34255feadfa95870bdeb8d577bd9cef261035e98f0256eb68" Oct 10 16:54:11 crc kubenswrapper[4799]: I1010 16:54:11.750087 4799 generic.go:334] "Generic (PLEG): container finished" podID="2f2d77fd-b861-4589-bdb5-ad606deb3360" containerID="af38fd40bd18be55b2f068d68e6140436d795e517aa7b1bc8e8a4310f9752868" exitCode=0 Oct 10 16:54:11 crc kubenswrapper[4799]: I1010 16:54:11.750213 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cindera843-account-delete-dptkx" event={"ID":"2f2d77fd-b861-4589-bdb5-ad606deb3360","Type":"ContainerDied","Data":"af38fd40bd18be55b2f068d68e6140436d795e517aa7b1bc8e8a4310f9752868"} Oct 10 16:54:11 crc kubenswrapper[4799]: I1010 16:54:11.771270 4799 generic.go:334] "Generic (PLEG): container finished" podID="1bd0e459-efb9-463c-a8fc-d08a3194f3d9" containerID="92ceb7a81421c011199b208f71ac06ee53a9dc28e8d3ab70e4e869c6e631df9b" exitCode=0 Oct 10 16:54:11 crc kubenswrapper[4799]: I1010 16:54:11.773505 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystonecab5-account-delete-jnkq7"] Oct 10 16:54:11 crc kubenswrapper[4799]: I1010 16:54:11.775198 4799 generic.go:334] "Generic (PLEG): container finished" podID="69aa641a-13ff-4f65-b2ea-7fee3ad42134" containerID="9a73e41efcf012c81cfb3fdb00ec877a3a4f57b043b7fb464fdebcd73d9d80d1" exitCode=2 Oct 10 16:54:11 crc kubenswrapper[4799]: E1010 16:54:11.781810 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="34f6a30f-81f3-4240-8a4e-d7f1220801ab" containerName="proxy-server" Oct 10 16:54:11 crc kubenswrapper[4799]: I1010 16:54:11.781849 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="34f6a30f-81f3-4240-8a4e-d7f1220801ab" containerName="proxy-server" Oct 10 16:54:11 crc kubenswrapper[4799]: E1010 16:54:11.781866 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="361ecbc5-676b-42af-9eb3-fb761f842265" containerName="init" Oct 10 16:54:11 crc kubenswrapper[4799]: I1010 16:54:11.781874 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="361ecbc5-676b-42af-9eb3-fb761f842265" containerName="init" Oct 10 16:54:11 crc kubenswrapper[4799]: E1010 16:54:11.781909 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="37642fb0-1d93-4e14-a176-fea38410097f" containerName="openstack-network-exporter" Oct 10 16:54:11 crc kubenswrapper[4799]: I1010 16:54:11.781918 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="37642fb0-1d93-4e14-a176-fea38410097f" containerName="openstack-network-exporter" Oct 10 16:54:11 crc kubenswrapper[4799]: E1010 16:54:11.781945 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="038759ba-4122-4104-8699-81c76590eb2b" containerName="ovn-controller" Oct 10 16:54:11 crc kubenswrapper[4799]: I1010 16:54:11.781953 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="038759ba-4122-4104-8699-81c76590eb2b" containerName="ovn-controller" Oct 10 16:54:11 crc kubenswrapper[4799]: E1010 16:54:11.781965 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="68f5ed12-8abe-46e0-a60a-086d13b7f038" containerName="ovsdbserver-sb" Oct 10 16:54:11 crc kubenswrapper[4799]: I1010 16:54:11.781973 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="68f5ed12-8abe-46e0-a60a-086d13b7f038" containerName="ovsdbserver-sb" Oct 10 16:54:11 crc kubenswrapper[4799]: I1010 16:54:11.782338 4799 generic.go:334] "Generic (PLEG): container finished" podID="3a05167f-cd58-4f9f-806b-8d71271320d2" containerID="44db3563c5cb7bf8e2e66a9895632efdaaa14dc9f4496418fa8fa16fcb55a2cc" exitCode=0 Oct 10 16:54:11 crc kubenswrapper[4799]: I1010 16:54:11.783904 4799 generic.go:334] "Generic (PLEG): container finished" podID="3b59d111-5df2-4b9f-9d02-7a3f9e19d02c" containerID="8b87ad4f452cd40a751d6fe4d9f7832708fd10be491e8259151d3509310eab1c" exitCode=0 Oct 10 16:54:11 crc kubenswrapper[4799]: E1010 16:54:11.784007 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="34f6a30f-81f3-4240-8a4e-d7f1220801ab" containerName="proxy-httpd" Oct 10 16:54:11 crc kubenswrapper[4799]: I1010 16:54:11.784026 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="34f6a30f-81f3-4240-8a4e-d7f1220801ab" containerName="proxy-httpd" Oct 10 16:54:11 crc kubenswrapper[4799]: E1010 16:54:11.784056 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="37642fb0-1d93-4e14-a176-fea38410097f" containerName="ovsdbserver-nb" Oct 10 16:54:11 crc kubenswrapper[4799]: I1010 16:54:11.784065 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="37642fb0-1d93-4e14-a176-fea38410097f" containerName="ovsdbserver-nb" Oct 10 16:54:11 crc kubenswrapper[4799]: E1010 16:54:11.784095 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="68f5ed12-8abe-46e0-a60a-086d13b7f038" containerName="openstack-network-exporter" Oct 10 16:54:11 crc kubenswrapper[4799]: I1010 16:54:11.784107 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="68f5ed12-8abe-46e0-a60a-086d13b7f038" containerName="openstack-network-exporter" Oct 10 16:54:11 crc kubenswrapper[4799]: E1010 16:54:11.784128 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="361ecbc5-676b-42af-9eb3-fb761f842265" containerName="dnsmasq-dns" Oct 10 16:54:11 crc kubenswrapper[4799]: I1010 16:54:11.784141 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="361ecbc5-676b-42af-9eb3-fb761f842265" containerName="dnsmasq-dns" Oct 10 16:54:11 crc kubenswrapper[4799]: E1010 16:54:11.784164 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8f591cb1-902a-406e-b93a-56c2b7ec9cb8" containerName="openstack-network-exporter" Oct 10 16:54:11 crc kubenswrapper[4799]: I1010 16:54:11.784173 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="8f591cb1-902a-406e-b93a-56c2b7ec9cb8" containerName="openstack-network-exporter" Oct 10 16:54:11 crc kubenswrapper[4799]: I1010 16:54:11.784707 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="34f6a30f-81f3-4240-8a4e-d7f1220801ab" containerName="proxy-server" Oct 10 16:54:11 crc kubenswrapper[4799]: I1010 16:54:11.784752 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="37642fb0-1d93-4e14-a176-fea38410097f" containerName="ovsdbserver-nb" Oct 10 16:54:11 crc kubenswrapper[4799]: I1010 16:54:11.809856 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="8f591cb1-902a-406e-b93a-56c2b7ec9cb8" containerName="openstack-network-exporter" Oct 10 16:54:11 crc kubenswrapper[4799]: I1010 16:54:11.809903 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="68f5ed12-8abe-46e0-a60a-086d13b7f038" containerName="ovsdbserver-sb" Oct 10 16:54:11 crc kubenswrapper[4799]: I1010 16:54:11.809923 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="038759ba-4122-4104-8699-81c76590eb2b" containerName="ovn-controller" Oct 10 16:54:11 crc kubenswrapper[4799]: I1010 16:54:11.809979 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="68f5ed12-8abe-46e0-a60a-086d13b7f038" containerName="openstack-network-exporter" Oct 10 16:54:11 crc kubenswrapper[4799]: I1010 16:54:11.810015 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="37642fb0-1d93-4e14-a176-fea38410097f" containerName="openstack-network-exporter" Oct 10 16:54:11 crc kubenswrapper[4799]: I1010 16:54:11.810038 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="34f6a30f-81f3-4240-8a4e-d7f1220801ab" containerName="proxy-httpd" Oct 10 16:54:11 crc kubenswrapper[4799]: I1010 16:54:11.810060 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="361ecbc5-676b-42af-9eb3-fb761f842265" containerName="dnsmasq-dns" Oct 10 16:54:11 crc kubenswrapper[4799]: I1010 16:54:11.811112 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-5b85b5697b-9wc6c" event={"ID":"1bd0e459-efb9-463c-a8fc-d08a3194f3d9","Type":"ContainerDied","Data":"92ceb7a81421c011199b208f71ac06ee53a9dc28e8d3ab70e4e869c6e631df9b"} Oct 10 16:54:11 crc kubenswrapper[4799]: I1010 16:54:11.811173 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-5b85b5697b-9wc6c" event={"ID":"1bd0e459-efb9-463c-a8fc-d08a3194f3d9","Type":"ContainerDied","Data":"efe237b55e5242af0278a73975e4ae635c5aefa7fcda42dc748aad1699ce6abb"} Oct 10 16:54:11 crc kubenswrapper[4799]: I1010 16:54:11.811192 4799 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="efe237b55e5242af0278a73975e4ae635c5aefa7fcda42dc748aad1699ce6abb" Oct 10 16:54:11 crc kubenswrapper[4799]: I1010 16:54:11.811205 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"69aa641a-13ff-4f65-b2ea-7fee3ad42134","Type":"ContainerDied","Data":"9a73e41efcf012c81cfb3fdb00ec877a3a4f57b043b7fb464fdebcd73d9d80d1"} Oct 10 16:54:11 crc kubenswrapper[4799]: I1010 16:54:11.811220 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-754947f5d7-z2rks" event={"ID":"3a05167f-cd58-4f9f-806b-8d71271320d2","Type":"ContainerDied","Data":"44db3563c5cb7bf8e2e66a9895632efdaaa14dc9f4496418fa8fa16fcb55a2cc"} Oct 10 16:54:11 crc kubenswrapper[4799]: I1010 16:54:11.811236 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican0700-account-delete-smncx" event={"ID":"3b59d111-5df2-4b9f-9d02-7a3f9e19d02c","Type":"ContainerDied","Data":"8b87ad4f452cd40a751d6fe4d9f7832708fd10be491e8259151d3509310eab1c"} Oct 10 16:54:11 crc kubenswrapper[4799]: I1010 16:54:11.811349 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystonecab5-account-delete-jnkq7" Oct 10 16:54:11 crc kubenswrapper[4799]: I1010 16:54:11.818620 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/openstack-galera-0"] Oct 10 16:54:11 crc kubenswrapper[4799]: I1010 16:54:11.853921 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance2b60-account-delete-t7jh2" event={"ID":"71374742-0685-4486-bb2d-97116af40765","Type":"ContainerDied","Data":"490244995f82beddfa7cabd11c9ca8d2e2ea7ff6c6ba6a51d1c06e6dfd1b03df"} Oct 10 16:54:11 crc kubenswrapper[4799]: I1010 16:54:11.853975 4799 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="490244995f82beddfa7cabd11c9ca8d2e2ea7ff6c6ba6a51d1c06e6dfd1b03df" Oct 10 16:54:11 crc kubenswrapper[4799]: I1010 16:54:11.864262 4799 generic.go:334] "Generic (PLEG): container finished" podID="785f8ce9-5280-44fe-891c-8162f2fdcd7a" containerID="e41896910a44ed236459449a6ff81d15407ef8ae9a64ddb6a35d2d9e2aa86ba0" exitCode=0 Oct 10 16:54:11 crc kubenswrapper[4799]: I1010 16:54:11.864340 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"785f8ce9-5280-44fe-891c-8162f2fdcd7a","Type":"ContainerDied","Data":"e41896910a44ed236459449a6ff81d15407ef8ae9a64ddb6a35d2d9e2aa86ba0"} Oct 10 16:54:11 crc kubenswrapper[4799]: I1010 16:54:11.864368 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"785f8ce9-5280-44fe-891c-8162f2fdcd7a","Type":"ContainerDied","Data":"aed2b40ab19d669e9d5375e3eca065a85cc8e7869b61fca099967cea718e2b8a"} Oct 10 16:54:11 crc kubenswrapper[4799]: I1010 16:54:11.864379 4799 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="aed2b40ab19d669e9d5375e3eca065a85cc8e7869b61fca099967cea718e2b8a" Oct 10 16:54:11 crc kubenswrapper[4799]: I1010 16:54:11.914482 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p27l4\" (UniqueName: \"kubernetes.io/projected/3585305f-274f-416c-b59d-2dc474f54341-kube-api-access-p27l4\") pod \"keystonecab5-account-delete-jnkq7\" (UID: \"3585305f-274f-416c-b59d-2dc474f54341\") " pod="openstack/keystonecab5-account-delete-jnkq7" Oct 10 16:54:11 crc kubenswrapper[4799]: I1010 16:54:11.918099 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-657bb59659-swzhl"] Oct 10 16:54:11 crc kubenswrapper[4799]: I1010 16:54:11.918360 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/keystone-657bb59659-swzhl" podUID="eb41e34f-dc1c-4b2e-9437-44a0e84e2cd1" containerName="keystone-api" containerID="cri-o://791ae33161eebdb140cb4872e47266b15abe32970fcb198663d953365fc9278c" gracePeriod=30 Oct 10 16:54:11 crc kubenswrapper[4799]: I1010 16:54:11.954520 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Oct 10 16:54:11 crc kubenswrapper[4799]: I1010 16:54:11.996837 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystonecab5-account-delete-jnkq7"] Oct 10 16:54:12 crc kubenswrapper[4799]: I1010 16:54:12.003014 4799 scope.go:117] "RemoveContainer" containerID="d89cab6d55e7c7a9cd3bd3ae258a2ce92f7ee5e9e44da70f5b9a36b753e9b185" Oct 10 16:54:12 crc kubenswrapper[4799]: I1010 16:54:12.017658 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/2a28d2a6-5dfc-414b-9eed-2f412cfc7063-galera-tls-certs\") pod \"2a28d2a6-5dfc-414b-9eed-2f412cfc7063\" (UID: \"2a28d2a6-5dfc-414b-9eed-2f412cfc7063\") " Oct 10 16:54:12 crc kubenswrapper[4799]: I1010 16:54:12.017715 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/2a28d2a6-5dfc-414b-9eed-2f412cfc7063-kolla-config\") pod \"2a28d2a6-5dfc-414b-9eed-2f412cfc7063\" (UID: \"2a28d2a6-5dfc-414b-9eed-2f412cfc7063\") " Oct 10 16:54:12 crc kubenswrapper[4799]: I1010 16:54:12.017798 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/2a28d2a6-5dfc-414b-9eed-2f412cfc7063-config-data-default\") pod \"2a28d2a6-5dfc-414b-9eed-2f412cfc7063\" (UID: \"2a28d2a6-5dfc-414b-9eed-2f412cfc7063\") " Oct 10 16:54:12 crc kubenswrapper[4799]: I1010 16:54:12.017845 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2a28d2a6-5dfc-414b-9eed-2f412cfc7063-combined-ca-bundle\") pod \"2a28d2a6-5dfc-414b-9eed-2f412cfc7063\" (UID: \"2a28d2a6-5dfc-414b-9eed-2f412cfc7063\") " Oct 10 16:54:12 crc kubenswrapper[4799]: I1010 16:54:12.017904 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2a28d2a6-5dfc-414b-9eed-2f412cfc7063-operator-scripts\") pod \"2a28d2a6-5dfc-414b-9eed-2f412cfc7063\" (UID: \"2a28d2a6-5dfc-414b-9eed-2f412cfc7063\") " Oct 10 16:54:12 crc kubenswrapper[4799]: I1010 16:54:12.017925 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/2a28d2a6-5dfc-414b-9eed-2f412cfc7063-secrets\") pod \"2a28d2a6-5dfc-414b-9eed-2f412cfc7063\" (UID: \"2a28d2a6-5dfc-414b-9eed-2f412cfc7063\") " Oct 10 16:54:12 crc kubenswrapper[4799]: I1010 16:54:12.018004 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/2a28d2a6-5dfc-414b-9eed-2f412cfc7063-config-data-generated\") pod \"2a28d2a6-5dfc-414b-9eed-2f412cfc7063\" (UID: \"2a28d2a6-5dfc-414b-9eed-2f412cfc7063\") " Oct 10 16:54:12 crc kubenswrapper[4799]: I1010 16:54:12.018036 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xbns9\" (UniqueName: \"kubernetes.io/projected/2a28d2a6-5dfc-414b-9eed-2f412cfc7063-kube-api-access-xbns9\") pod \"2a28d2a6-5dfc-414b-9eed-2f412cfc7063\" (UID: \"2a28d2a6-5dfc-414b-9eed-2f412cfc7063\") " Oct 10 16:54:12 crc kubenswrapper[4799]: I1010 16:54:12.018108 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mysql-db\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"2a28d2a6-5dfc-414b-9eed-2f412cfc7063\" (UID: \"2a28d2a6-5dfc-414b-9eed-2f412cfc7063\") " Oct 10 16:54:12 crc kubenswrapper[4799]: I1010 16:54:12.018427 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p27l4\" (UniqueName: \"kubernetes.io/projected/3585305f-274f-416c-b59d-2dc474f54341-kube-api-access-p27l4\") pod \"keystonecab5-account-delete-jnkq7\" (UID: \"3585305f-274f-416c-b59d-2dc474f54341\") " pod="openstack/keystonecab5-account-delete-jnkq7" Oct 10 16:54:12 crc kubenswrapper[4799]: I1010 16:54:12.027174 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2a28d2a6-5dfc-414b-9eed-2f412cfc7063-config-data-generated" (OuterVolumeSpecName: "config-data-generated") pod "2a28d2a6-5dfc-414b-9eed-2f412cfc7063" (UID: "2a28d2a6-5dfc-414b-9eed-2f412cfc7063"). InnerVolumeSpecName "config-data-generated". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 10 16:54:12 crc kubenswrapper[4799]: I1010 16:54:12.028742 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2a28d2a6-5dfc-414b-9eed-2f412cfc7063-config-data-default" (OuterVolumeSpecName: "config-data-default") pod "2a28d2a6-5dfc-414b-9eed-2f412cfc7063" (UID: "2a28d2a6-5dfc-414b-9eed-2f412cfc7063"). InnerVolumeSpecName "config-data-default". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 16:54:12 crc kubenswrapper[4799]: I1010 16:54:12.029562 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2a28d2a6-5dfc-414b-9eed-2f412cfc7063-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "2a28d2a6-5dfc-414b-9eed-2f412cfc7063" (UID: "2a28d2a6-5dfc-414b-9eed-2f412cfc7063"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 16:54:12 crc kubenswrapper[4799]: I1010 16:54:12.030693 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-db-create-qv6gf"] Oct 10 16:54:12 crc kubenswrapper[4799]: I1010 16:54:12.056291 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2a28d2a6-5dfc-414b-9eed-2f412cfc7063-kolla-config" (OuterVolumeSpecName: "kolla-config") pod "2a28d2a6-5dfc-414b-9eed-2f412cfc7063" (UID: "2a28d2a6-5dfc-414b-9eed-2f412cfc7063"). InnerVolumeSpecName "kolla-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 16:54:12 crc kubenswrapper[4799]: I1010 16:54:12.064867 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-db-create-qv6gf"] Oct 10 16:54:12 crc kubenswrapper[4799]: I1010 16:54:12.076874 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-cab5-account-create-xs7tg"] Oct 10 16:54:12 crc kubenswrapper[4799]: E1010 16:54:12.079094 4799 projected.go:194] Error preparing data for projected volume kube-api-access-p27l4 for pod openstack/keystonecab5-account-delete-jnkq7: failed to fetch token: serviceaccounts "galera-openstack" not found Oct 10 16:54:12 crc kubenswrapper[4799]: E1010 16:54:12.079162 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3585305f-274f-416c-b59d-2dc474f54341-kube-api-access-p27l4 podName:3585305f-274f-416c-b59d-2dc474f54341 nodeName:}" failed. No retries permitted until 2025-10-10 16:54:12.579142935 +0000 UTC m=+1346.087467050 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-p27l4" (UniqueName: "kubernetes.io/projected/3585305f-274f-416c-b59d-2dc474f54341-kube-api-access-p27l4") pod "keystonecab5-account-delete-jnkq7" (UID: "3585305f-274f-416c-b59d-2dc474f54341") : failed to fetch token: serviceaccounts "galera-openstack" not found Oct 10 16:54:12 crc kubenswrapper[4799]: I1010 16:54:12.080101 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance2b60-account-delete-t7jh2" Oct 10 16:54:12 crc kubenswrapper[4799]: I1010 16:54:12.082834 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystonecab5-account-delete-jnkq7"] Oct 10 16:54:12 crc kubenswrapper[4799]: I1010 16:54:12.102682 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2a28d2a6-5dfc-414b-9eed-2f412cfc7063-kube-api-access-xbns9" (OuterVolumeSpecName: "kube-api-access-xbns9") pod "2a28d2a6-5dfc-414b-9eed-2f412cfc7063" (UID: "2a28d2a6-5dfc-414b-9eed-2f412cfc7063"). InnerVolumeSpecName "kube-api-access-xbns9". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 16:54:12 crc kubenswrapper[4799]: I1010 16:54:12.113985 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-cab5-account-create-xs7tg"] Oct 10 16:54:12 crc kubenswrapper[4799]: I1010 16:54:12.120569 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-p947l\" (UniqueName: \"kubernetes.io/projected/71374742-0685-4486-bb2d-97116af40765-kube-api-access-p947l\") pod \"71374742-0685-4486-bb2d-97116af40765\" (UID: \"71374742-0685-4486-bb2d-97116af40765\") " Oct 10 16:54:12 crc kubenswrapper[4799]: I1010 16:54:12.121784 4799 reconciler_common.go:293] "Volume detached for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/2a28d2a6-5dfc-414b-9eed-2f412cfc7063-config-data-default\") on node \"crc\" DevicePath \"\"" Oct 10 16:54:12 crc kubenswrapper[4799]: I1010 16:54:12.121804 4799 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2a28d2a6-5dfc-414b-9eed-2f412cfc7063-operator-scripts\") on node \"crc\" DevicePath \"\"" Oct 10 16:54:12 crc kubenswrapper[4799]: I1010 16:54:12.121813 4799 reconciler_common.go:293] "Volume detached for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/2a28d2a6-5dfc-414b-9eed-2f412cfc7063-config-data-generated\") on node \"crc\" DevicePath \"\"" Oct 10 16:54:12 crc kubenswrapper[4799]: I1010 16:54:12.121823 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xbns9\" (UniqueName: \"kubernetes.io/projected/2a28d2a6-5dfc-414b-9eed-2f412cfc7063-kube-api-access-xbns9\") on node \"crc\" DevicePath \"\"" Oct 10 16:54:12 crc kubenswrapper[4799]: I1010 16:54:12.121832 4799 reconciler_common.go:293] "Volume detached for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/2a28d2a6-5dfc-414b-9eed-2f412cfc7063-kolla-config\") on node \"crc\" DevicePath \"\"" Oct 10 16:54:12 crc kubenswrapper[4799]: I1010 16:54:12.124576 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage10-crc" (OuterVolumeSpecName: "mysql-db") pod "2a28d2a6-5dfc-414b-9eed-2f412cfc7063" (UID: "2a28d2a6-5dfc-414b-9eed-2f412cfc7063"). InnerVolumeSpecName "local-storage10-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Oct 10 16:54:12 crc kubenswrapper[4799]: I1010 16:54:12.124896 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2a28d2a6-5dfc-414b-9eed-2f412cfc7063-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "2a28d2a6-5dfc-414b-9eed-2f412cfc7063" (UID: "2a28d2a6-5dfc-414b-9eed-2f412cfc7063"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:54:12 crc kubenswrapper[4799]: I1010 16:54:12.126889 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/71374742-0685-4486-bb2d-97116af40765-kube-api-access-p947l" (OuterVolumeSpecName: "kube-api-access-p947l") pod "71374742-0685-4486-bb2d-97116af40765" (UID: "71374742-0685-4486-bb2d-97116af40765"). InnerVolumeSpecName "kube-api-access-p947l". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 16:54:12 crc kubenswrapper[4799]: I1010 16:54:12.126939 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2a28d2a6-5dfc-414b-9eed-2f412cfc7063-secrets" (OuterVolumeSpecName: "secrets") pod "2a28d2a6-5dfc-414b-9eed-2f412cfc7063" (UID: "2a28d2a6-5dfc-414b-9eed-2f412cfc7063"). InnerVolumeSpecName "secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:54:12 crc kubenswrapper[4799]: I1010 16:54:12.215584 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2a28d2a6-5dfc-414b-9eed-2f412cfc7063-galera-tls-certs" (OuterVolumeSpecName: "galera-tls-certs") pod "2a28d2a6-5dfc-414b-9eed-2f412cfc7063" (UID: "2a28d2a6-5dfc-414b-9eed-2f412cfc7063"). InnerVolumeSpecName "galera-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:54:12 crc kubenswrapper[4799]: I1010 16:54:12.223988 4799 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") on node \"crc\" " Oct 10 16:54:12 crc kubenswrapper[4799]: I1010 16:54:12.224026 4799 reconciler_common.go:293] "Volume detached for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/2a28d2a6-5dfc-414b-9eed-2f412cfc7063-galera-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 10 16:54:12 crc kubenswrapper[4799]: I1010 16:54:12.224038 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-p947l\" (UniqueName: \"kubernetes.io/projected/71374742-0685-4486-bb2d-97116af40765-kube-api-access-p947l\") on node \"crc\" DevicePath \"\"" Oct 10 16:54:12 crc kubenswrapper[4799]: I1010 16:54:12.224048 4799 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2a28d2a6-5dfc-414b-9eed-2f412cfc7063-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 10 16:54:12 crc kubenswrapper[4799]: I1010 16:54:12.224056 4799 reconciler_common.go:293] "Volume detached for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/2a28d2a6-5dfc-414b-9eed-2f412cfc7063-secrets\") on node \"crc\" DevicePath \"\"" Oct 10 16:54:12 crc kubenswrapper[4799]: I1010 16:54:12.225584 4799 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/cinder-api-0" podUID="f9c4cd8a-6aed-4826-b23b-328645f5801f" containerName="cinder-api" probeResult="failure" output="Get \"https://10.217.0.177:8776/healthcheck\": read tcp 10.217.0.2:55632->10.217.0.177:8776: read: connection reset by peer" Oct 10 16:54:12 crc kubenswrapper[4799]: I1010 16:54:12.255912 4799 scope.go:117] "RemoveContainer" containerID="7d59d6ece2c99c837d44d3e71e1c7dc7a0dd9eaa3806c1e6d93e906cf1b5fcfd" Oct 10 16:54:12 crc kubenswrapper[4799]: E1010 16:54:12.256463 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7d59d6ece2c99c837d44d3e71e1c7dc7a0dd9eaa3806c1e6d93e906cf1b5fcfd\": container with ID starting with 7d59d6ece2c99c837d44d3e71e1c7dc7a0dd9eaa3806c1e6d93e906cf1b5fcfd not found: ID does not exist" containerID="7d59d6ece2c99c837d44d3e71e1c7dc7a0dd9eaa3806c1e6d93e906cf1b5fcfd" Oct 10 16:54:12 crc kubenswrapper[4799]: I1010 16:54:12.256506 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7d59d6ece2c99c837d44d3e71e1c7dc7a0dd9eaa3806c1e6d93e906cf1b5fcfd"} err="failed to get container status \"7d59d6ece2c99c837d44d3e71e1c7dc7a0dd9eaa3806c1e6d93e906cf1b5fcfd\": rpc error: code = NotFound desc = could not find container \"7d59d6ece2c99c837d44d3e71e1c7dc7a0dd9eaa3806c1e6d93e906cf1b5fcfd\": container with ID starting with 7d59d6ece2c99c837d44d3e71e1c7dc7a0dd9eaa3806c1e6d93e906cf1b5fcfd not found: ID does not exist" Oct 10 16:54:12 crc kubenswrapper[4799]: I1010 16:54:12.256541 4799 scope.go:117] "RemoveContainer" containerID="d89cab6d55e7c7a9cd3bd3ae258a2ce92f7ee5e9e44da70f5b9a36b753e9b185" Oct 10 16:54:12 crc kubenswrapper[4799]: E1010 16:54:12.259379 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d89cab6d55e7c7a9cd3bd3ae258a2ce92f7ee5e9e44da70f5b9a36b753e9b185\": container with ID starting with d89cab6d55e7c7a9cd3bd3ae258a2ce92f7ee5e9e44da70f5b9a36b753e9b185 not found: ID does not exist" containerID="d89cab6d55e7c7a9cd3bd3ae258a2ce92f7ee5e9e44da70f5b9a36b753e9b185" Oct 10 16:54:12 crc kubenswrapper[4799]: I1010 16:54:12.259423 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d89cab6d55e7c7a9cd3bd3ae258a2ce92f7ee5e9e44da70f5b9a36b753e9b185"} err="failed to get container status \"d89cab6d55e7c7a9cd3bd3ae258a2ce92f7ee5e9e44da70f5b9a36b753e9b185\": rpc error: code = NotFound desc = could not find container \"d89cab6d55e7c7a9cd3bd3ae258a2ce92f7ee5e9e44da70f5b9a36b753e9b185\": container with ID starting with d89cab6d55e7c7a9cd3bd3ae258a2ce92f7ee5e9e44da70f5b9a36b753e9b185 not found: ID does not exist" Oct 10 16:54:12 crc kubenswrapper[4799]: I1010 16:54:12.259446 4799 scope.go:117] "RemoveContainer" containerID="7d59d6ece2c99c837d44d3e71e1c7dc7a0dd9eaa3806c1e6d93e906cf1b5fcfd" Oct 10 16:54:12 crc kubenswrapper[4799]: I1010 16:54:12.259886 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7d59d6ece2c99c837d44d3e71e1c7dc7a0dd9eaa3806c1e6d93e906cf1b5fcfd"} err="failed to get container status \"7d59d6ece2c99c837d44d3e71e1c7dc7a0dd9eaa3806c1e6d93e906cf1b5fcfd\": rpc error: code = NotFound desc = could not find container \"7d59d6ece2c99c837d44d3e71e1c7dc7a0dd9eaa3806c1e6d93e906cf1b5fcfd\": container with ID starting with 7d59d6ece2c99c837d44d3e71e1c7dc7a0dd9eaa3806c1e6d93e906cf1b5fcfd not found: ID does not exist" Oct 10 16:54:12 crc kubenswrapper[4799]: I1010 16:54:12.259912 4799 scope.go:117] "RemoveContainer" containerID="d89cab6d55e7c7a9cd3bd3ae258a2ce92f7ee5e9e44da70f5b9a36b753e9b185" Oct 10 16:54:12 crc kubenswrapper[4799]: I1010 16:54:12.265231 4799 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/glance-default-external-api-0" podUID="31fc68f8-af18-42b7-a94c-90a22afea5f1" containerName="glance-httpd" probeResult="failure" output="Get \"https://10.217.0.178:9292/healthcheck\": read tcp 10.217.0.2:46640->10.217.0.178:9292: read: connection reset by peer" Oct 10 16:54:12 crc kubenswrapper[4799]: I1010 16:54:12.265249 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d89cab6d55e7c7a9cd3bd3ae258a2ce92f7ee5e9e44da70f5b9a36b753e9b185"} err="failed to get container status \"d89cab6d55e7c7a9cd3bd3ae258a2ce92f7ee5e9e44da70f5b9a36b753e9b185\": rpc error: code = NotFound desc = could not find container \"d89cab6d55e7c7a9cd3bd3ae258a2ce92f7ee5e9e44da70f5b9a36b753e9b185\": container with ID starting with d89cab6d55e7c7a9cd3bd3ae258a2ce92f7ee5e9e44da70f5b9a36b753e9b185 not found: ID does not exist" Oct 10 16:54:12 crc kubenswrapper[4799]: I1010 16:54:12.265377 4799 scope.go:117] "RemoveContainer" containerID="7894d2a17f4958f21c9c52f542e34e11e9c6033bdf6b58c15d012c7bb5cc154f" Oct 10 16:54:12 crc kubenswrapper[4799]: I1010 16:54:12.265917 4799 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/glance-default-external-api-0" podUID="31fc68f8-af18-42b7-a94c-90a22afea5f1" containerName="glance-log" probeResult="failure" output="Get \"https://10.217.0.178:9292/healthcheck\": read tcp 10.217.0.2:46642->10.217.0.178:9292: read: connection reset by peer" Oct 10 16:54:12 crc kubenswrapper[4799]: I1010 16:54:12.294027 4799 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage10-crc" (UniqueName: "kubernetes.io/local-volume/local-storage10-crc") on node "crc" Oct 10 16:54:12 crc kubenswrapper[4799]: I1010 16:54:12.328163 4799 reconciler_common.go:293] "Volume detached for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") on node \"crc\" DevicePath \"\"" Oct 10 16:54:12 crc kubenswrapper[4799]: I1010 16:54:12.342640 4799 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-metadata-0" podUID="5e6870d5-faea-46d9-bebb-4d237b802910" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.210:8775/\": read tcp 10.217.0.2:34550->10.217.0.210:8775: read: connection reset by peer" Oct 10 16:54:12 crc kubenswrapper[4799]: I1010 16:54:12.342673 4799 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-metadata-0" podUID="5e6870d5-faea-46d9-bebb-4d237b802910" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.210:8775/\": read tcp 10.217.0.2:34564->10.217.0.210:8775: read: connection reset by peer" Oct 10 16:54:12 crc kubenswrapper[4799]: I1010 16:54:12.473084 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/openstack-galera-0" podUID="fe3f0980-0eb7-4267-953a-3fcfa08a22b3" containerName="galera" containerID="cri-o://fad55af2475f84915150c67ad522384e34d6b8041a129873b036bf99434ad863" gracePeriod=30 Oct 10 16:54:12 crc kubenswrapper[4799]: I1010 16:54:12.640148 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p27l4\" (UniqueName: \"kubernetes.io/projected/3585305f-274f-416c-b59d-2dc474f54341-kube-api-access-p27l4\") pod \"keystonecab5-account-delete-jnkq7\" (UID: \"3585305f-274f-416c-b59d-2dc474f54341\") " pod="openstack/keystonecab5-account-delete-jnkq7" Oct 10 16:54:12 crc kubenswrapper[4799]: E1010 16:54:12.643937 4799 projected.go:194] Error preparing data for projected volume kube-api-access-p27l4 for pod openstack/keystonecab5-account-delete-jnkq7: failed to fetch token: serviceaccounts "galera-openstack" not found Oct 10 16:54:12 crc kubenswrapper[4799]: E1010 16:54:12.644010 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3585305f-274f-416c-b59d-2dc474f54341-kube-api-access-p27l4 podName:3585305f-274f-416c-b59d-2dc474f54341 nodeName:}" failed. No retries permitted until 2025-10-10 16:54:13.643988823 +0000 UTC m=+1347.152313018 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-p27l4" (UniqueName: "kubernetes.io/projected/3585305f-274f-416c-b59d-2dc474f54341-kube-api-access-p27l4") pod "keystonecab5-account-delete-jnkq7" (UID: "3585305f-274f-416c-b59d-2dc474f54341") : failed to fetch token: serviceaccounts "galera-openstack" not found Oct 10 16:54:12 crc kubenswrapper[4799]: I1010 16:54:12.870102 4799 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-56d84d574d-x5sbm" podUID="ac766919-d788-40da-879a-627919926594" containerName="barbican-api-log" probeResult="failure" output="Get \"https://10.217.0.162:9311/healthcheck\": read tcp 10.217.0.2:47822->10.217.0.162:9311: read: connection reset by peer" Oct 10 16:54:12 crc kubenswrapper[4799]: I1010 16:54:12.870160 4799 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-56d84d574d-x5sbm" podUID="ac766919-d788-40da-879a-627919926594" containerName="barbican-api" probeResult="failure" output="Get \"https://10.217.0.162:9311/healthcheck\": read tcp 10.217.0.2:47814->10.217.0.162:9311: read: connection reset by peer" Oct 10 16:54:12 crc kubenswrapper[4799]: I1010 16:54:12.884042 4799 scope.go:117] "RemoveContainer" containerID="1626175edad35c9d874dcd747ee3e25c9a1d3785dcb4a248d8af85626d8ee7f2" Oct 10 16:54:12 crc kubenswrapper[4799]: I1010 16:54:12.885714 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-754947f5d7-z2rks" Oct 10 16:54:12 crc kubenswrapper[4799]: I1010 16:54:12.910011 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovsdbserver-sb-0"] Oct 10 16:54:12 crc kubenswrapper[4799]: I1010 16:54:12.919107 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovsdbserver-sb-0"] Oct 10 16:54:12 crc kubenswrapper[4799]: I1010 16:54:12.923940 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Oct 10 16:54:12 crc kubenswrapper[4799]: I1010 16:54:12.930284 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cindera843-account-delete-dptkx" event={"ID":"2f2d77fd-b861-4589-bdb5-ad606deb3360","Type":"ContainerDied","Data":"64ca46c0b50e20654b1f83605b4dba5b1346d8dd62ac61500d915ca578febfe0"} Oct 10 16:54:12 crc kubenswrapper[4799]: I1010 16:54:12.930328 4799 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="64ca46c0b50e20654b1f83605b4dba5b1346d8dd62ac61500d915ca578febfe0" Oct 10 16:54:12 crc kubenswrapper[4799]: I1010 16:54:12.940625 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/swift-proxy-69dc9744df-smbqh"] Oct 10 16:54:12 crc kubenswrapper[4799]: I1010 16:54:12.946175 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/785f8ce9-5280-44fe-891c-8162f2fdcd7a-combined-ca-bundle\") pod \"785f8ce9-5280-44fe-891c-8162f2fdcd7a\" (UID: \"785f8ce9-5280-44fe-891c-8162f2fdcd7a\") " Oct 10 16:54:12 crc kubenswrapper[4799]: I1010 16:54:12.946223 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/785f8ce9-5280-44fe-891c-8162f2fdcd7a-vencrypt-tls-certs\") pod \"785f8ce9-5280-44fe-891c-8162f2fdcd7a\" (UID: \"785f8ce9-5280-44fe-891c-8162f2fdcd7a\") " Oct 10 16:54:12 crc kubenswrapper[4799]: I1010 16:54:12.946316 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3a05167f-cd58-4f9f-806b-8d71271320d2-combined-ca-bundle\") pod \"3a05167f-cd58-4f9f-806b-8d71271320d2\" (UID: \"3a05167f-cd58-4f9f-806b-8d71271320d2\") " Oct 10 16:54:12 crc kubenswrapper[4799]: I1010 16:54:12.946347 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3a05167f-cd58-4f9f-806b-8d71271320d2-logs\") pod \"3a05167f-cd58-4f9f-806b-8d71271320d2\" (UID: \"3a05167f-cd58-4f9f-806b-8d71271320d2\") " Oct 10 16:54:12 crc kubenswrapper[4799]: I1010 16:54:12.946461 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dfbmz\" (UniqueName: \"kubernetes.io/projected/3a05167f-cd58-4f9f-806b-8d71271320d2-kube-api-access-dfbmz\") pod \"3a05167f-cd58-4f9f-806b-8d71271320d2\" (UID: \"3a05167f-cd58-4f9f-806b-8d71271320d2\") " Oct 10 16:54:12 crc kubenswrapper[4799]: I1010 16:54:12.946521 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zlx6g\" (UniqueName: \"kubernetes.io/projected/785f8ce9-5280-44fe-891c-8162f2fdcd7a-kube-api-access-zlx6g\") pod \"785f8ce9-5280-44fe-891c-8162f2fdcd7a\" (UID: \"785f8ce9-5280-44fe-891c-8162f2fdcd7a\") " Oct 10 16:54:12 crc kubenswrapper[4799]: I1010 16:54:12.946542 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3a05167f-cd58-4f9f-806b-8d71271320d2-config-data\") pod \"3a05167f-cd58-4f9f-806b-8d71271320d2\" (UID: \"3a05167f-cd58-4f9f-806b-8d71271320d2\") " Oct 10 16:54:12 crc kubenswrapper[4799]: I1010 16:54:12.946562 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/3a05167f-cd58-4f9f-806b-8d71271320d2-config-data-custom\") pod \"3a05167f-cd58-4f9f-806b-8d71271320d2\" (UID: \"3a05167f-cd58-4f9f-806b-8d71271320d2\") " Oct 10 16:54:12 crc kubenswrapper[4799]: I1010 16:54:12.946579 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/785f8ce9-5280-44fe-891c-8162f2fdcd7a-config-data\") pod \"785f8ce9-5280-44fe-891c-8162f2fdcd7a\" (UID: \"785f8ce9-5280-44fe-891c-8162f2fdcd7a\") " Oct 10 16:54:12 crc kubenswrapper[4799]: I1010 16:54:12.946603 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/785f8ce9-5280-44fe-891c-8162f2fdcd7a-nova-novncproxy-tls-certs\") pod \"785f8ce9-5280-44fe-891c-8162f2fdcd7a\" (UID: \"785f8ce9-5280-44fe-891c-8162f2fdcd7a\") " Oct 10 16:54:12 crc kubenswrapper[4799]: I1010 16:54:12.952255 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/swift-proxy-69dc9744df-smbqh"] Oct 10 16:54:12 crc kubenswrapper[4799]: I1010 16:54:12.952466 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3a05167f-cd58-4f9f-806b-8d71271320d2-logs" (OuterVolumeSpecName: "logs") pod "3a05167f-cd58-4f9f-806b-8d71271320d2" (UID: "3a05167f-cd58-4f9f-806b-8d71271320d2"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 10 16:54:12 crc kubenswrapper[4799]: I1010 16:54:12.956917 4799 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3a05167f-cd58-4f9f-806b-8d71271320d2-logs\") on node \"crc\" DevicePath \"\"" Oct 10 16:54:12 crc kubenswrapper[4799]: I1010 16:54:12.962769 4799 generic.go:334] "Generic (PLEG): container finished" podID="f9c4cd8a-6aed-4826-b23b-328645f5801f" containerID="02155113288bc0bf63cf3a3084f2f7ed5580d5f5ad54aca68e4fb0b10bb08a28" exitCode=0 Oct 10 16:54:12 crc kubenswrapper[4799]: I1010 16:54:12.962779 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3a05167f-cd58-4f9f-806b-8d71271320d2-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "3a05167f-cd58-4f9f-806b-8d71271320d2" (UID: "3a05167f-cd58-4f9f-806b-8d71271320d2"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:54:12 crc kubenswrapper[4799]: I1010 16:54:12.962822 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"f9c4cd8a-6aed-4826-b23b-328645f5801f","Type":"ContainerDied","Data":"02155113288bc0bf63cf3a3084f2f7ed5580d5f5ad54aca68e4fb0b10bb08a28"} Oct 10 16:54:12 crc kubenswrapper[4799]: I1010 16:54:12.963220 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"f9c4cd8a-6aed-4826-b23b-328645f5801f","Type":"ContainerDied","Data":"af3e54d179c2ce9e28e0158ccfc133816d62b3e2bb192e4e76f70dd1617edde0"} Oct 10 16:54:12 crc kubenswrapper[4799]: I1010 16:54:12.963294 4799 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="af3e54d179c2ce9e28e0158ccfc133816d62b3e2bb192e4e76f70dd1617edde0" Oct 10 16:54:12 crc kubenswrapper[4799]: I1010 16:54:12.974611 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-5b85b5697b-9wc6c" Oct 10 16:54:12 crc kubenswrapper[4799]: I1010 16:54:12.974730 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3a05167f-cd58-4f9f-806b-8d71271320d2-kube-api-access-dfbmz" (OuterVolumeSpecName: "kube-api-access-dfbmz") pod "3a05167f-cd58-4f9f-806b-8d71271320d2" (UID: "3a05167f-cd58-4f9f-806b-8d71271320d2"). InnerVolumeSpecName "kube-api-access-dfbmz". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 16:54:12 crc kubenswrapper[4799]: I1010 16:54:12.976743 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican0700-account-delete-smncx" event={"ID":"3b59d111-5df2-4b9f-9d02-7a3f9e19d02c","Type":"ContainerDied","Data":"1ce8d8c74c0f4246d1709206771f8526346fea444bd5dd887bbd0495bbfea7d6"} Oct 10 16:54:12 crc kubenswrapper[4799]: I1010 16:54:12.977056 4799 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1ce8d8c74c0f4246d1709206771f8526346fea444bd5dd887bbd0495bbfea7d6" Oct 10 16:54:12 crc kubenswrapper[4799]: E1010 16:54:12.980372 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="unmounted volumes=[kube-api-access-p27l4], unattached volumes=[], failed to process volumes=[]: context canceled" pod="openstack/keystonecab5-account-delete-jnkq7" podUID="3585305f-274f-416c-b59d-2dc474f54341" Oct 10 16:54:12 crc kubenswrapper[4799]: I1010 16:54:12.983051 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/785f8ce9-5280-44fe-891c-8162f2fdcd7a-kube-api-access-zlx6g" (OuterVolumeSpecName: "kube-api-access-zlx6g") pod "785f8ce9-5280-44fe-891c-8162f2fdcd7a" (UID: "785f8ce9-5280-44fe-891c-8162f2fdcd7a"). InnerVolumeSpecName "kube-api-access-zlx6g". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 16:54:12 crc kubenswrapper[4799]: I1010 16:54:12.985888 4799 scope.go:117] "RemoveContainer" containerID="9a442c2442efda9014b2e49c109e3fd6db0be3a601326fff77372e592aa5bef9" Oct 10 16:54:12 crc kubenswrapper[4799]: I1010 16:54:12.987364 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Oct 10 16:54:12 crc kubenswrapper[4799]: I1010 16:54:12.996320 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cindera843-account-delete-dptkx" Oct 10 16:54:12 crc kubenswrapper[4799]: I1010 16:54:12.996453 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-754947f5d7-z2rks" event={"ID":"3a05167f-cd58-4f9f-806b-8d71271320d2","Type":"ContainerDied","Data":"6ab739e3c938fbee42c14c3d3c3f28963dac6979d90fade8897e3b358371438b"} Oct 10 16:54:12 crc kubenswrapper[4799]: I1010 16:54:12.996506 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-754947f5d7-z2rks" Oct 10 16:54:12 crc kubenswrapper[4799]: I1010 16:54:12.999745 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/novaapieef9-account-delete-qqxp5" event={"ID":"4cc746af-c99b-4cb9-acde-dc3b97e424a3","Type":"ContainerStarted","Data":"8be05ed436116475cdf9a313be0e5fb3e3d22f288f745f295d88ac7f0de2c8cd"} Oct 10 16:54:12 crc kubenswrapper[4799]: I1010 16:54:12.999912 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/novaapieef9-account-delete-qqxp5" podUID="4cc746af-c99b-4cb9-acde-dc3b97e424a3" containerName="mariadb-account-delete" containerID="cri-o://8be05ed436116475cdf9a313be0e5fb3e3d22f288f745f295d88ac7f0de2c8cd" gracePeriod=30 Oct 10 16:54:13 crc kubenswrapper[4799]: I1010 16:54:13.000517 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican0700-account-delete-smncx" Oct 10 16:54:13 crc kubenswrapper[4799]: I1010 16:54:13.007233 4799 generic.go:334] "Generic (PLEG): container finished" podID="31fc68f8-af18-42b7-a94c-90a22afea5f1" containerID="d2b3ab1b197b085ea5a23bbdabb78c44e9c002b3cd5536ddb8dc1fcd93bae475" exitCode=0 Oct 10 16:54:13 crc kubenswrapper[4799]: I1010 16:54:13.007288 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"31fc68f8-af18-42b7-a94c-90a22afea5f1","Type":"ContainerDied","Data":"d2b3ab1b197b085ea5a23bbdabb78c44e9c002b3cd5536ddb8dc1fcd93bae475"} Oct 10 16:54:13 crc kubenswrapper[4799]: I1010 16:54:13.009339 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placementcbdc-account-delete-9cz6z" event={"ID":"f1d90c74-271d-45af-9c91-87250b178ca6","Type":"ContainerStarted","Data":"c9af17389b65f1506044e0ff931f039bde8034005b3476a949f660fb65fd52bf"} Oct 10 16:54:13 crc kubenswrapper[4799]: I1010 16:54:13.009448 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/placementcbdc-account-delete-9cz6z" podUID="f1d90c74-271d-45af-9c91-87250b178ca6" containerName="mariadb-account-delete" containerID="cri-o://c9af17389b65f1506044e0ff931f039bde8034005b3476a949f660fb65fd52bf" gracePeriod=30 Oct 10 16:54:13 crc kubenswrapper[4799]: I1010 16:54:13.012797 4799 generic.go:334] "Generic (PLEG): container finished" podID="5e6870d5-faea-46d9-bebb-4d237b802910" containerID="d6ec86e7f860ab8ca24a46400675b4f8e0135552b7120d2f42340e7afc614296" exitCode=0 Oct 10 16:54:13 crc kubenswrapper[4799]: I1010 16:54:13.012834 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"5e6870d5-faea-46d9-bebb-4d237b802910","Type":"ContainerDied","Data":"d6ec86e7f860ab8ca24a46400675b4f8e0135552b7120d2f42340e7afc614296"} Oct 10 16:54:13 crc kubenswrapper[4799]: I1010 16:54:13.017927 4799 generic.go:334] "Generic (PLEG): container finished" podID="985d1485-7054-475b-8e60-85db5dc5afa3" containerID="d7c80cad377e5693f3e27682ebd24a34de4bbf7f43f72423036babd6bf753968" exitCode=0 Oct 10 16:54:13 crc kubenswrapper[4799]: I1010 16:54:13.018002 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"985d1485-7054-475b-8e60-85db5dc5afa3","Type":"ContainerDied","Data":"d7c80cad377e5693f3e27682ebd24a34de4bbf7f43f72423036babd6bf753968"} Oct 10 16:54:13 crc kubenswrapper[4799]: I1010 16:54:13.056212 4799 scope.go:117] "RemoveContainer" containerID="f05670d9c23e36e24162d124b779c8a5b0a8aa589baac01d94d0aecd8b1875ef" Oct 10 16:54:13 crc kubenswrapper[4799]: I1010 16:54:13.057089 4799 generic.go:334] "Generic (PLEG): container finished" podID="7ab7b7c1-e89f-4562-882b-4f517f90f8c8" containerID="a8fdb5ed39a199e1efb2bc1f77b2de74dd205c6dcb6fea5e9f71d53f89199fcd" exitCode=0 Oct 10 16:54:13 crc kubenswrapper[4799]: I1010 16:54:13.057255 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-6565b9cf48-rl77d" Oct 10 16:54:13 crc kubenswrapper[4799]: I1010 16:54:13.057264 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-6565b9cf48-rl77d" event={"ID":"7ab7b7c1-e89f-4562-882b-4f517f90f8c8","Type":"ContainerDied","Data":"a8fdb5ed39a199e1efb2bc1f77b2de74dd205c6dcb6fea5e9f71d53f89199fcd"} Oct 10 16:54:13 crc kubenswrapper[4799]: I1010 16:54:13.057518 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-6565b9cf48-rl77d" event={"ID":"7ab7b7c1-e89f-4562-882b-4f517f90f8c8","Type":"ContainerDied","Data":"ef70f52ae58b93b1c0d97afd58eaabfed4177c0eb894fd9aa8da2869f2bbae4d"} Oct 10 16:54:13 crc kubenswrapper[4799]: I1010 16:54:13.058531 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/1bd0e459-efb9-463c-a8fc-d08a3194f3d9-config-data-custom\") pod \"1bd0e459-efb9-463c-a8fc-d08a3194f3d9\" (UID: \"1bd0e459-efb9-463c-a8fc-d08a3194f3d9\") " Oct 10 16:54:13 crc kubenswrapper[4799]: I1010 16:54:13.058650 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7wqb9\" (UniqueName: \"kubernetes.io/projected/69aa641a-13ff-4f65-b2ea-7fee3ad42134-kube-api-access-7wqb9\") pod \"69aa641a-13ff-4f65-b2ea-7fee3ad42134\" (UID: \"69aa641a-13ff-4f65-b2ea-7fee3ad42134\") " Oct 10 16:54:13 crc kubenswrapper[4799]: I1010 16:54:13.058695 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/69aa641a-13ff-4f65-b2ea-7fee3ad42134-kube-state-metrics-tls-certs\") pod \"69aa641a-13ff-4f65-b2ea-7fee3ad42134\" (UID: \"69aa641a-13ff-4f65-b2ea-7fee3ad42134\") " Oct 10 16:54:13 crc kubenswrapper[4799]: I1010 16:54:13.058766 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1bd0e459-efb9-463c-a8fc-d08a3194f3d9-logs\") pod \"1bd0e459-efb9-463c-a8fc-d08a3194f3d9\" (UID: \"1bd0e459-efb9-463c-a8fc-d08a3194f3d9\") " Oct 10 16:54:13 crc kubenswrapper[4799]: I1010 16:54:13.058803 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1bd0e459-efb9-463c-a8fc-d08a3194f3d9-combined-ca-bundle\") pod \"1bd0e459-efb9-463c-a8fc-d08a3194f3d9\" (UID: \"1bd0e459-efb9-463c-a8fc-d08a3194f3d9\") " Oct 10 16:54:13 crc kubenswrapper[4799]: I1010 16:54:13.058829 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s9rg7\" (UniqueName: \"kubernetes.io/projected/3b59d111-5df2-4b9f-9d02-7a3f9e19d02c-kube-api-access-s9rg7\") pod \"3b59d111-5df2-4b9f-9d02-7a3f9e19d02c\" (UID: \"3b59d111-5df2-4b9f-9d02-7a3f9e19d02c\") " Oct 10 16:54:13 crc kubenswrapper[4799]: I1010 16:54:13.058857 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/69aa641a-13ff-4f65-b2ea-7fee3ad42134-combined-ca-bundle\") pod \"69aa641a-13ff-4f65-b2ea-7fee3ad42134\" (UID: \"69aa641a-13ff-4f65-b2ea-7fee3ad42134\") " Oct 10 16:54:13 crc kubenswrapper[4799]: I1010 16:54:13.058888 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-887gk\" (UniqueName: \"kubernetes.io/projected/1bd0e459-efb9-463c-a8fc-d08a3194f3d9-kube-api-access-887gk\") pod \"1bd0e459-efb9-463c-a8fc-d08a3194f3d9\" (UID: \"1bd0e459-efb9-463c-a8fc-d08a3194f3d9\") " Oct 10 16:54:13 crc kubenswrapper[4799]: I1010 16:54:13.058924 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/69aa641a-13ff-4f65-b2ea-7fee3ad42134-kube-state-metrics-tls-config\") pod \"69aa641a-13ff-4f65-b2ea-7fee3ad42134\" (UID: \"69aa641a-13ff-4f65-b2ea-7fee3ad42134\") " Oct 10 16:54:13 crc kubenswrapper[4799]: I1010 16:54:13.058990 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1bd0e459-efb9-463c-a8fc-d08a3194f3d9-config-data\") pod \"1bd0e459-efb9-463c-a8fc-d08a3194f3d9\" (UID: \"1bd0e459-efb9-463c-a8fc-d08a3194f3d9\") " Oct 10 16:54:13 crc kubenswrapper[4799]: I1010 16:54:13.059013 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fl7x2\" (UniqueName: \"kubernetes.io/projected/2f2d77fd-b861-4589-bdb5-ad606deb3360-kube-api-access-fl7x2\") pod \"2f2d77fd-b861-4589-bdb5-ad606deb3360\" (UID: \"2f2d77fd-b861-4589-bdb5-ad606deb3360\") " Oct 10 16:54:13 crc kubenswrapper[4799]: I1010 16:54:13.060062 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dfbmz\" (UniqueName: \"kubernetes.io/projected/3a05167f-cd58-4f9f-806b-8d71271320d2-kube-api-access-dfbmz\") on node \"crc\" DevicePath \"\"" Oct 10 16:54:13 crc kubenswrapper[4799]: I1010 16:54:13.060088 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zlx6g\" (UniqueName: \"kubernetes.io/projected/785f8ce9-5280-44fe-891c-8162f2fdcd7a-kube-api-access-zlx6g\") on node \"crc\" DevicePath \"\"" Oct 10 16:54:13 crc kubenswrapper[4799]: I1010 16:54:13.060101 4799 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/3a05167f-cd58-4f9f-806b-8d71271320d2-config-data-custom\") on node \"crc\" DevicePath \"\"" Oct 10 16:54:13 crc kubenswrapper[4799]: I1010 16:54:13.061481 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1bd0e459-efb9-463c-a8fc-d08a3194f3d9-logs" (OuterVolumeSpecName: "logs") pod "1bd0e459-efb9-463c-a8fc-d08a3194f3d9" (UID: "1bd0e459-efb9-463c-a8fc-d08a3194f3d9"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 10 16:54:13 crc kubenswrapper[4799]: I1010 16:54:13.063176 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Oct 10 16:54:13 crc kubenswrapper[4799]: I1010 16:54:13.082433 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/novaapieef9-account-delete-qqxp5" podStartSLOduration=6.082412369 podStartE2EDuration="6.082412369s" podCreationTimestamp="2025-10-10 16:54:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-10 16:54:13.074148875 +0000 UTC m=+1346.582472990" watchObservedRunningTime="2025-10-10 16:54:13.082412369 +0000 UTC m=+1346.590736484" Oct 10 16:54:13 crc kubenswrapper[4799]: I1010 16:54:13.105623 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/69aa641a-13ff-4f65-b2ea-7fee3ad42134-kube-api-access-7wqb9" (OuterVolumeSpecName: "kube-api-access-7wqb9") pod "69aa641a-13ff-4f65-b2ea-7fee3ad42134" (UID: "69aa641a-13ff-4f65-b2ea-7fee3ad42134"). InnerVolumeSpecName "kube-api-access-7wqb9". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 16:54:13 crc kubenswrapper[4799]: I1010 16:54:13.105656 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bd0e459-efb9-463c-a8fc-d08a3194f3d9-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "1bd0e459-efb9-463c-a8fc-d08a3194f3d9" (UID: "1bd0e459-efb9-463c-a8fc-d08a3194f3d9"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:54:13 crc kubenswrapper[4799]: I1010 16:54:13.106665 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placementcbdc-account-delete-9cz6z" podStartSLOduration=6.106649965 podStartE2EDuration="6.106649965s" podCreationTimestamp="2025-10-10 16:54:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-10 16:54:13.104100033 +0000 UTC m=+1346.612424148" watchObservedRunningTime="2025-10-10 16:54:13.106649965 +0000 UTC m=+1346.614974080" Oct 10 16:54:13 crc kubenswrapper[4799]: I1010 16:54:13.113237 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2f2d77fd-b861-4589-bdb5-ad606deb3360-kube-api-access-fl7x2" (OuterVolumeSpecName: "kube-api-access-fl7x2") pod "2f2d77fd-b861-4589-bdb5-ad606deb3360" (UID: "2f2d77fd-b861-4589-bdb5-ad606deb3360"). InnerVolumeSpecName "kube-api-access-fl7x2". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 16:54:13 crc kubenswrapper[4799]: I1010 16:54:13.114065 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3b59d111-5df2-4b9f-9d02-7a3f9e19d02c-kube-api-access-s9rg7" (OuterVolumeSpecName: "kube-api-access-s9rg7") pod "3b59d111-5df2-4b9f-9d02-7a3f9e19d02c" (UID: "3b59d111-5df2-4b9f-9d02-7a3f9e19d02c"). InnerVolumeSpecName "kube-api-access-s9rg7". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 16:54:13 crc kubenswrapper[4799]: I1010 16:54:13.116352 4799 generic.go:334] "Generic (PLEG): container finished" podID="baff4453-a6a2-4264-82b7-3ce7c22734f6" containerID="50bad220d7bfe5f874d25169f5fbdb3b65af8607c4752caa21f314f09fc45b07" exitCode=0 Oct 10 16:54:13 crc kubenswrapper[4799]: I1010 16:54:13.116413 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"baff4453-a6a2-4264-82b7-3ce7c22734f6","Type":"ContainerDied","Data":"50bad220d7bfe5f874d25169f5fbdb3b65af8607c4752caa21f314f09fc45b07"} Oct 10 16:54:13 crc kubenswrapper[4799]: I1010 16:54:13.124927 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1bd0e459-efb9-463c-a8fc-d08a3194f3d9-kube-api-access-887gk" (OuterVolumeSpecName: "kube-api-access-887gk") pod "1bd0e459-efb9-463c-a8fc-d08a3194f3d9" (UID: "1bd0e459-efb9-463c-a8fc-d08a3194f3d9"). InnerVolumeSpecName "kube-api-access-887gk". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 16:54:13 crc kubenswrapper[4799]: I1010 16:54:13.127022 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"69aa641a-13ff-4f65-b2ea-7fee3ad42134","Type":"ContainerDied","Data":"9c17349ea4964f7a71e4a61b06276dec0451a6ac1845317abbf909498f2ec2d6"} Oct 10 16:54:13 crc kubenswrapper[4799]: I1010 16:54:13.127390 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Oct 10 16:54:13 crc kubenswrapper[4799]: I1010 16:54:13.132322 4799 scope.go:117] "RemoveContainer" containerID="78ebb853c6f1f78b3bad7b9528e8955d736b8b1c888e5a761ed264cd5256d2c8" Oct 10 16:54:13 crc kubenswrapper[4799]: I1010 16:54:13.133895 4799 generic.go:334] "Generic (PLEG): container finished" podID="e424a8e6-64c8-4572-8706-33026a2cc44d" containerID="45b55f581534a90bac80ffd0b27bca1fc0d2639dbcc1d9165ca16243e681541e" exitCode=0 Oct 10 16:54:13 crc kubenswrapper[4799]: I1010 16:54:13.133978 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Oct 10 16:54:13 crc kubenswrapper[4799]: I1010 16:54:13.133980 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"e424a8e6-64c8-4572-8706-33026a2cc44d","Type":"ContainerDied","Data":"45b55f581534a90bac80ffd0b27bca1fc0d2639dbcc1d9165ca16243e681541e"} Oct 10 16:54:13 crc kubenswrapper[4799]: I1010 16:54:13.134299 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance2b60-account-delete-t7jh2" Oct 10 16:54:13 crc kubenswrapper[4799]: I1010 16:54:13.160975 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/7ab7b7c1-e89f-4562-882b-4f517f90f8c8-internal-tls-certs\") pod \"7ab7b7c1-e89f-4562-882b-4f517f90f8c8\" (UID: \"7ab7b7c1-e89f-4562-882b-4f517f90f8c8\") " Oct 10 16:54:13 crc kubenswrapper[4799]: I1010 16:54:13.161031 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f9c4cd8a-6aed-4826-b23b-328645f5801f-config-data\") pod \"f9c4cd8a-6aed-4826-b23b-328645f5801f\" (UID: \"f9c4cd8a-6aed-4826-b23b-328645f5801f\") " Oct 10 16:54:13 crc kubenswrapper[4799]: I1010 16:54:13.161060 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f9c4cd8a-6aed-4826-b23b-328645f5801f-scripts\") pod \"f9c4cd8a-6aed-4826-b23b-328645f5801f\" (UID: \"f9c4cd8a-6aed-4826-b23b-328645f5801f\") " Oct 10 16:54:13 crc kubenswrapper[4799]: I1010 16:54:13.161092 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/f9c4cd8a-6aed-4826-b23b-328645f5801f-internal-tls-certs\") pod \"f9c4cd8a-6aed-4826-b23b-328645f5801f\" (UID: \"f9c4cd8a-6aed-4826-b23b-328645f5801f\") " Oct 10 16:54:13 crc kubenswrapper[4799]: I1010 16:54:13.161108 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7ab7b7c1-e89f-4562-882b-4f517f90f8c8-combined-ca-bundle\") pod \"7ab7b7c1-e89f-4562-882b-4f517f90f8c8\" (UID: \"7ab7b7c1-e89f-4562-882b-4f517f90f8c8\") " Oct 10 16:54:13 crc kubenswrapper[4799]: I1010 16:54:13.161159 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7ab7b7c1-e89f-4562-882b-4f517f90f8c8-scripts\") pod \"7ab7b7c1-e89f-4562-882b-4f517f90f8c8\" (UID: \"7ab7b7c1-e89f-4562-882b-4f517f90f8c8\") " Oct 10 16:54:13 crc kubenswrapper[4799]: I1010 16:54:13.161192 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/f9c4cd8a-6aed-4826-b23b-328645f5801f-etc-machine-id\") pod \"f9c4cd8a-6aed-4826-b23b-328645f5801f\" (UID: \"f9c4cd8a-6aed-4826-b23b-328645f5801f\") " Oct 10 16:54:13 crc kubenswrapper[4799]: I1010 16:54:13.161212 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vxgvv\" (UniqueName: \"kubernetes.io/projected/7ab7b7c1-e89f-4562-882b-4f517f90f8c8-kube-api-access-vxgvv\") pod \"7ab7b7c1-e89f-4562-882b-4f517f90f8c8\" (UID: \"7ab7b7c1-e89f-4562-882b-4f517f90f8c8\") " Oct 10 16:54:13 crc kubenswrapper[4799]: I1010 16:54:13.161272 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7ab7b7c1-e89f-4562-882b-4f517f90f8c8-config-data\") pod \"7ab7b7c1-e89f-4562-882b-4f517f90f8c8\" (UID: \"7ab7b7c1-e89f-4562-882b-4f517f90f8c8\") " Oct 10 16:54:13 crc kubenswrapper[4799]: I1010 16:54:13.161301 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f9c4cd8a-6aed-4826-b23b-328645f5801f-config-data-custom\") pod \"f9c4cd8a-6aed-4826-b23b-328645f5801f\" (UID: \"f9c4cd8a-6aed-4826-b23b-328645f5801f\") " Oct 10 16:54:13 crc kubenswrapper[4799]: I1010 16:54:13.161371 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7ab7b7c1-e89f-4562-882b-4f517f90f8c8-logs\") pod \"7ab7b7c1-e89f-4562-882b-4f517f90f8c8\" (UID: \"7ab7b7c1-e89f-4562-882b-4f517f90f8c8\") " Oct 10 16:54:13 crc kubenswrapper[4799]: I1010 16:54:13.161410 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/f9c4cd8a-6aed-4826-b23b-328645f5801f-public-tls-certs\") pod \"f9c4cd8a-6aed-4826-b23b-328645f5801f\" (UID: \"f9c4cd8a-6aed-4826-b23b-328645f5801f\") " Oct 10 16:54:13 crc kubenswrapper[4799]: I1010 16:54:13.161474 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f9c4cd8a-6aed-4826-b23b-328645f5801f-combined-ca-bundle\") pod \"f9c4cd8a-6aed-4826-b23b-328645f5801f\" (UID: \"f9c4cd8a-6aed-4826-b23b-328645f5801f\") " Oct 10 16:54:13 crc kubenswrapper[4799]: I1010 16:54:13.161500 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/7ab7b7c1-e89f-4562-882b-4f517f90f8c8-public-tls-certs\") pod \"7ab7b7c1-e89f-4562-882b-4f517f90f8c8\" (UID: \"7ab7b7c1-e89f-4562-882b-4f517f90f8c8\") " Oct 10 16:54:13 crc kubenswrapper[4799]: I1010 16:54:13.161520 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gpp2r\" (UniqueName: \"kubernetes.io/projected/f9c4cd8a-6aed-4826-b23b-328645f5801f-kube-api-access-gpp2r\") pod \"f9c4cd8a-6aed-4826-b23b-328645f5801f\" (UID: \"f9c4cd8a-6aed-4826-b23b-328645f5801f\") " Oct 10 16:54:13 crc kubenswrapper[4799]: I1010 16:54:13.161537 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f9c4cd8a-6aed-4826-b23b-328645f5801f-logs\") pod \"f9c4cd8a-6aed-4826-b23b-328645f5801f\" (UID: \"f9c4cd8a-6aed-4826-b23b-328645f5801f\") " Oct 10 16:54:13 crc kubenswrapper[4799]: I1010 16:54:13.161900 4799 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/1bd0e459-efb9-463c-a8fc-d08a3194f3d9-config-data-custom\") on node \"crc\" DevicePath \"\"" Oct 10 16:54:13 crc kubenswrapper[4799]: I1010 16:54:13.161910 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7wqb9\" (UniqueName: \"kubernetes.io/projected/69aa641a-13ff-4f65-b2ea-7fee3ad42134-kube-api-access-7wqb9\") on node \"crc\" DevicePath \"\"" Oct 10 16:54:13 crc kubenswrapper[4799]: I1010 16:54:13.161919 4799 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1bd0e459-efb9-463c-a8fc-d08a3194f3d9-logs\") on node \"crc\" DevicePath \"\"" Oct 10 16:54:13 crc kubenswrapper[4799]: I1010 16:54:13.161928 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s9rg7\" (UniqueName: \"kubernetes.io/projected/3b59d111-5df2-4b9f-9d02-7a3f9e19d02c-kube-api-access-s9rg7\") on node \"crc\" DevicePath \"\"" Oct 10 16:54:13 crc kubenswrapper[4799]: I1010 16:54:13.161936 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-887gk\" (UniqueName: \"kubernetes.io/projected/1bd0e459-efb9-463c-a8fc-d08a3194f3d9-kube-api-access-887gk\") on node \"crc\" DevicePath \"\"" Oct 10 16:54:13 crc kubenswrapper[4799]: I1010 16:54:13.161944 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fl7x2\" (UniqueName: \"kubernetes.io/projected/2f2d77fd-b861-4589-bdb5-ad606deb3360-kube-api-access-fl7x2\") on node \"crc\" DevicePath \"\"" Oct 10 16:54:13 crc kubenswrapper[4799]: I1010 16:54:13.163031 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f9c4cd8a-6aed-4826-b23b-328645f5801f-logs" (OuterVolumeSpecName: "logs") pod "f9c4cd8a-6aed-4826-b23b-328645f5801f" (UID: "f9c4cd8a-6aed-4826-b23b-328645f5801f"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 10 16:54:13 crc kubenswrapper[4799]: I1010 16:54:13.174807 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7ab7b7c1-e89f-4562-882b-4f517f90f8c8-logs" (OuterVolumeSpecName: "logs") pod "7ab7b7c1-e89f-4562-882b-4f517f90f8c8" (UID: "7ab7b7c1-e89f-4562-882b-4f517f90f8c8"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 10 16:54:13 crc kubenswrapper[4799]: I1010 16:54:13.174856 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f9c4cd8a-6aed-4826-b23b-328645f5801f-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "f9c4cd8a-6aed-4826-b23b-328645f5801f" (UID: "f9c4cd8a-6aed-4826-b23b-328645f5801f"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 10 16:54:13 crc kubenswrapper[4799]: I1010 16:54:13.189897 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f9c4cd8a-6aed-4826-b23b-328645f5801f-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "f9c4cd8a-6aed-4826-b23b-328645f5801f" (UID: "f9c4cd8a-6aed-4826-b23b-328645f5801f"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:54:13 crc kubenswrapper[4799]: I1010 16:54:13.224326 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7ab7b7c1-e89f-4562-882b-4f517f90f8c8-scripts" (OuterVolumeSpecName: "scripts") pod "7ab7b7c1-e89f-4562-882b-4f517f90f8c8" (UID: "7ab7b7c1-e89f-4562-882b-4f517f90f8c8"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:54:13 crc kubenswrapper[4799]: I1010 16:54:13.224331 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f9c4cd8a-6aed-4826-b23b-328645f5801f-kube-api-access-gpp2r" (OuterVolumeSpecName: "kube-api-access-gpp2r") pod "f9c4cd8a-6aed-4826-b23b-328645f5801f" (UID: "f9c4cd8a-6aed-4826-b23b-328645f5801f"). InnerVolumeSpecName "kube-api-access-gpp2r". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 16:54:13 crc kubenswrapper[4799]: I1010 16:54:13.224480 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7ab7b7c1-e89f-4562-882b-4f517f90f8c8-kube-api-access-vxgvv" (OuterVolumeSpecName: "kube-api-access-vxgvv") pod "7ab7b7c1-e89f-4562-882b-4f517f90f8c8" (UID: "7ab7b7c1-e89f-4562-882b-4f517f90f8c8"). InnerVolumeSpecName "kube-api-access-vxgvv". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 16:54:13 crc kubenswrapper[4799]: I1010 16:54:13.224562 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f9c4cd8a-6aed-4826-b23b-328645f5801f-scripts" (OuterVolumeSpecName: "scripts") pod "f9c4cd8a-6aed-4826-b23b-328645f5801f" (UID: "f9c4cd8a-6aed-4826-b23b-328645f5801f"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:54:13 crc kubenswrapper[4799]: I1010 16:54:13.262327 4799 scope.go:117] "RemoveContainer" containerID="7e43def494bc7bc9587325e681735d75ef482a0d36aa17642f1a5fcfdc38318f" Oct 10 16:54:13 crc kubenswrapper[4799]: I1010 16:54:13.264196 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gpp2r\" (UniqueName: \"kubernetes.io/projected/f9c4cd8a-6aed-4826-b23b-328645f5801f-kube-api-access-gpp2r\") on node \"crc\" DevicePath \"\"" Oct 10 16:54:13 crc kubenswrapper[4799]: I1010 16:54:13.264219 4799 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f9c4cd8a-6aed-4826-b23b-328645f5801f-logs\") on node \"crc\" DevicePath \"\"" Oct 10 16:54:13 crc kubenswrapper[4799]: I1010 16:54:13.264228 4799 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f9c4cd8a-6aed-4826-b23b-328645f5801f-scripts\") on node \"crc\" DevicePath \"\"" Oct 10 16:54:13 crc kubenswrapper[4799]: I1010 16:54:13.264236 4799 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7ab7b7c1-e89f-4562-882b-4f517f90f8c8-scripts\") on node \"crc\" DevicePath \"\"" Oct 10 16:54:13 crc kubenswrapper[4799]: I1010 16:54:13.264247 4799 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/f9c4cd8a-6aed-4826-b23b-328645f5801f-etc-machine-id\") on node \"crc\" DevicePath \"\"" Oct 10 16:54:13 crc kubenswrapper[4799]: I1010 16:54:13.264255 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vxgvv\" (UniqueName: \"kubernetes.io/projected/7ab7b7c1-e89f-4562-882b-4f517f90f8c8-kube-api-access-vxgvv\") on node \"crc\" DevicePath \"\"" Oct 10 16:54:13 crc kubenswrapper[4799]: I1010 16:54:13.264264 4799 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f9c4cd8a-6aed-4826-b23b-328645f5801f-config-data-custom\") on node \"crc\" DevicePath \"\"" Oct 10 16:54:13 crc kubenswrapper[4799]: I1010 16:54:13.264272 4799 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7ab7b7c1-e89f-4562-882b-4f517f90f8c8-logs\") on node \"crc\" DevicePath \"\"" Oct 10 16:54:13 crc kubenswrapper[4799]: I1010 16:54:13.304729 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Oct 10 16:54:13 crc kubenswrapper[4799]: I1010 16:54:13.320824 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/openstack-cell1-galera-0"] Oct 10 16:54:13 crc kubenswrapper[4799]: I1010 16:54:13.336447 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/openstack-cell1-galera-0"] Oct 10 16:54:13 crc kubenswrapper[4799]: I1010 16:54:13.358272 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance2b60-account-delete-t7jh2"] Oct 10 16:54:13 crc kubenswrapper[4799]: I1010 16:54:13.365492 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"e424a8e6-64c8-4572-8706-33026a2cc44d\" (UID: \"e424a8e6-64c8-4572-8706-33026a2cc44d\") " Oct 10 16:54:13 crc kubenswrapper[4799]: I1010 16:54:13.365867 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/e424a8e6-64c8-4572-8706-33026a2cc44d-httpd-run\") pod \"e424a8e6-64c8-4572-8706-33026a2cc44d\" (UID: \"e424a8e6-64c8-4572-8706-33026a2cc44d\") " Oct 10 16:54:13 crc kubenswrapper[4799]: I1010 16:54:13.368743 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e424a8e6-64c8-4572-8706-33026a2cc44d-combined-ca-bundle\") pod \"e424a8e6-64c8-4572-8706-33026a2cc44d\" (UID: \"e424a8e6-64c8-4572-8706-33026a2cc44d\") " Oct 10 16:54:13 crc kubenswrapper[4799]: I1010 16:54:13.368873 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sjm4m\" (UniqueName: \"kubernetes.io/projected/e424a8e6-64c8-4572-8706-33026a2cc44d-kube-api-access-sjm4m\") pod \"e424a8e6-64c8-4572-8706-33026a2cc44d\" (UID: \"e424a8e6-64c8-4572-8706-33026a2cc44d\") " Oct 10 16:54:13 crc kubenswrapper[4799]: I1010 16:54:13.368903 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e424a8e6-64c8-4572-8706-33026a2cc44d-config-data\") pod \"e424a8e6-64c8-4572-8706-33026a2cc44d\" (UID: \"e424a8e6-64c8-4572-8706-33026a2cc44d\") " Oct 10 16:54:13 crc kubenswrapper[4799]: I1010 16:54:13.368971 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e424a8e6-64c8-4572-8706-33026a2cc44d-logs\") pod \"e424a8e6-64c8-4572-8706-33026a2cc44d\" (UID: \"e424a8e6-64c8-4572-8706-33026a2cc44d\") " Oct 10 16:54:13 crc kubenswrapper[4799]: I1010 16:54:13.369044 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/e424a8e6-64c8-4572-8706-33026a2cc44d-internal-tls-certs\") pod \"e424a8e6-64c8-4572-8706-33026a2cc44d\" (UID: \"e424a8e6-64c8-4572-8706-33026a2cc44d\") " Oct 10 16:54:13 crc kubenswrapper[4799]: I1010 16:54:13.369070 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e424a8e6-64c8-4572-8706-33026a2cc44d-scripts\") pod \"e424a8e6-64c8-4572-8706-33026a2cc44d\" (UID: \"e424a8e6-64c8-4572-8706-33026a2cc44d\") " Oct 10 16:54:13 crc kubenswrapper[4799]: I1010 16:54:13.374319 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e424a8e6-64c8-4572-8706-33026a2cc44d-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "e424a8e6-64c8-4572-8706-33026a2cc44d" (UID: "e424a8e6-64c8-4572-8706-33026a2cc44d"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 10 16:54:13 crc kubenswrapper[4799]: I1010 16:54:13.377366 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e424a8e6-64c8-4572-8706-33026a2cc44d-logs" (OuterVolumeSpecName: "logs") pod "e424a8e6-64c8-4572-8706-33026a2cc44d" (UID: "e424a8e6-64c8-4572-8706-33026a2cc44d"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 10 16:54:13 crc kubenswrapper[4799]: I1010 16:54:13.385869 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance2b60-account-delete-t7jh2"] Oct 10 16:54:13 crc kubenswrapper[4799]: I1010 16:54:13.386802 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e424a8e6-64c8-4572-8706-33026a2cc44d-scripts" (OuterVolumeSpecName: "scripts") pod "e424a8e6-64c8-4572-8706-33026a2cc44d" (UID: "e424a8e6-64c8-4572-8706-33026a2cc44d"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:54:13 crc kubenswrapper[4799]: I1010 16:54:13.423979 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage12-crc" (OuterVolumeSpecName: "glance") pod "e424a8e6-64c8-4572-8706-33026a2cc44d" (UID: "e424a8e6-64c8-4572-8706-33026a2cc44d"). InnerVolumeSpecName "local-storage12-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Oct 10 16:54:13 crc kubenswrapper[4799]: I1010 16:54:13.443311 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0e585e37-862b-4a19-bb06-36f89755f4e0" path="/var/lib/kubelet/pods/0e585e37-862b-4a19-bb06-36f89755f4e0/volumes" Oct 10 16:54:13 crc kubenswrapper[4799]: I1010 16:54:13.443997 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2a28d2a6-5dfc-414b-9eed-2f412cfc7063" path="/var/lib/kubelet/pods/2a28d2a6-5dfc-414b-9eed-2f412cfc7063/volumes" Oct 10 16:54:13 crc kubenswrapper[4799]: I1010 16:54:13.444564 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="34f6a30f-81f3-4240-8a4e-d7f1220801ab" path="/var/lib/kubelet/pods/34f6a30f-81f3-4240-8a4e-d7f1220801ab/volumes" Oct 10 16:54:13 crc kubenswrapper[4799]: I1010 16:54:13.446523 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="68f5ed12-8abe-46e0-a60a-086d13b7f038" path="/var/lib/kubelet/pods/68f5ed12-8abe-46e0-a60a-086d13b7f038/volumes" Oct 10 16:54:13 crc kubenswrapper[4799]: I1010 16:54:13.447233 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6f8a5006-c9ab-4b58-850e-a044229b2460" path="/var/lib/kubelet/pods/6f8a5006-c9ab-4b58-850e-a044229b2460/volumes" Oct 10 16:54:13 crc kubenswrapper[4799]: I1010 16:54:13.447728 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="71374742-0685-4486-bb2d-97116af40765" path="/var/lib/kubelet/pods/71374742-0685-4486-bb2d-97116af40765/volumes" Oct 10 16:54:13 crc kubenswrapper[4799]: I1010 16:54:13.459129 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="730fe650-110f-4bae-8f7d-6b3d6755b4f8" path="/var/lib/kubelet/pods/730fe650-110f-4bae-8f7d-6b3d6755b4f8/volumes" Oct 10 16:54:13 crc kubenswrapper[4799]: I1010 16:54:13.468883 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fd273e06-ed83-42c6-aa3e-3ed6eda94c1d" path="/var/lib/kubelet/pods/fd273e06-ed83-42c6-aa3e-3ed6eda94c1d/volumes" Oct 10 16:54:13 crc kubenswrapper[4799]: I1010 16:54:13.476872 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e424a8e6-64c8-4572-8706-33026a2cc44d-kube-api-access-sjm4m" (OuterVolumeSpecName: "kube-api-access-sjm4m") pod "e424a8e6-64c8-4572-8706-33026a2cc44d" (UID: "e424a8e6-64c8-4572-8706-33026a2cc44d"). InnerVolumeSpecName "kube-api-access-sjm4m". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 16:54:13 crc kubenswrapper[4799]: I1010 16:54:13.493242 4799 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/e424a8e6-64c8-4572-8706-33026a2cc44d-httpd-run\") on node \"crc\" DevicePath \"\"" Oct 10 16:54:13 crc kubenswrapper[4799]: I1010 16:54:13.493280 4799 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e424a8e6-64c8-4572-8706-33026a2cc44d-logs\") on node \"crc\" DevicePath \"\"" Oct 10 16:54:13 crc kubenswrapper[4799]: I1010 16:54:13.493290 4799 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e424a8e6-64c8-4572-8706-33026a2cc44d-scripts\") on node \"crc\" DevicePath \"\"" Oct 10 16:54:13 crc kubenswrapper[4799]: I1010 16:54:13.493320 4799 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") on node \"crc\" " Oct 10 16:54:13 crc kubenswrapper[4799]: I1010 16:54:13.527838 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/785f8ce9-5280-44fe-891c-8162f2fdcd7a-config-data" (OuterVolumeSpecName: "config-data") pod "785f8ce9-5280-44fe-891c-8162f2fdcd7a" (UID: "785f8ce9-5280-44fe-891c-8162f2fdcd7a"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:54:13 crc kubenswrapper[4799]: I1010 16:54:13.533003 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/785f8ce9-5280-44fe-891c-8162f2fdcd7a-nova-novncproxy-tls-certs" (OuterVolumeSpecName: "nova-novncproxy-tls-certs") pod "785f8ce9-5280-44fe-891c-8162f2fdcd7a" (UID: "785f8ce9-5280-44fe-891c-8162f2fdcd7a"). InnerVolumeSpecName "nova-novncproxy-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:54:13 crc kubenswrapper[4799]: I1010 16:54:13.578488 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/785f8ce9-5280-44fe-891c-8162f2fdcd7a-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "785f8ce9-5280-44fe-891c-8162f2fdcd7a" (UID: "785f8ce9-5280-44fe-891c-8162f2fdcd7a"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:54:13 crc kubenswrapper[4799]: I1010 16:54:13.594363 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/69aa641a-13ff-4f65-b2ea-7fee3ad42134-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "69aa641a-13ff-4f65-b2ea-7fee3ad42134" (UID: "69aa641a-13ff-4f65-b2ea-7fee3ad42134"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:54:13 crc kubenswrapper[4799]: I1010 16:54:13.595933 4799 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/69aa641a-13ff-4f65-b2ea-7fee3ad42134-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 10 16:54:13 crc kubenswrapper[4799]: I1010 16:54:13.595963 4799 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/785f8ce9-5280-44fe-891c-8162f2fdcd7a-config-data\") on node \"crc\" DevicePath \"\"" Oct 10 16:54:13 crc kubenswrapper[4799]: I1010 16:54:13.595973 4799 reconciler_common.go:293] "Volume detached for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/785f8ce9-5280-44fe-891c-8162f2fdcd7a-nova-novncproxy-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 10 16:54:13 crc kubenswrapper[4799]: I1010 16:54:13.595984 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sjm4m\" (UniqueName: \"kubernetes.io/projected/e424a8e6-64c8-4572-8706-33026a2cc44d-kube-api-access-sjm4m\") on node \"crc\" DevicePath \"\"" Oct 10 16:54:13 crc kubenswrapper[4799]: I1010 16:54:13.595993 4799 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/785f8ce9-5280-44fe-891c-8162f2fdcd7a-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 10 16:54:13 crc kubenswrapper[4799]: I1010 16:54:13.625942 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/69aa641a-13ff-4f65-b2ea-7fee3ad42134-kube-state-metrics-tls-config" (OuterVolumeSpecName: "kube-state-metrics-tls-config") pod "69aa641a-13ff-4f65-b2ea-7fee3ad42134" (UID: "69aa641a-13ff-4f65-b2ea-7fee3ad42134"). InnerVolumeSpecName "kube-state-metrics-tls-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:54:13 crc kubenswrapper[4799]: I1010 16:54:13.641563 4799 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage12-crc" (UniqueName: "kubernetes.io/local-volume/local-storage12-crc") on node "crc" Oct 10 16:54:13 crc kubenswrapper[4799]: I1010 16:54:13.658088 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7ab7b7c1-e89f-4562-882b-4f517f90f8c8-config-data" (OuterVolumeSpecName: "config-data") pod "7ab7b7c1-e89f-4562-882b-4f517f90f8c8" (UID: "7ab7b7c1-e89f-4562-882b-4f517f90f8c8"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:54:13 crc kubenswrapper[4799]: I1010 16:54:13.662949 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f9c4cd8a-6aed-4826-b23b-328645f5801f-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f9c4cd8a-6aed-4826-b23b-328645f5801f" (UID: "f9c4cd8a-6aed-4826-b23b-328645f5801f"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:54:13 crc kubenswrapper[4799]: I1010 16:54:13.685401 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f9c4cd8a-6aed-4826-b23b-328645f5801f-config-data" (OuterVolumeSpecName: "config-data") pod "f9c4cd8a-6aed-4826-b23b-328645f5801f" (UID: "f9c4cd8a-6aed-4826-b23b-328645f5801f"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:54:13 crc kubenswrapper[4799]: I1010 16:54:13.696797 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e424a8e6-64c8-4572-8706-33026a2cc44d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "e424a8e6-64c8-4572-8706-33026a2cc44d" (UID: "e424a8e6-64c8-4572-8706-33026a2cc44d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:54:13 crc kubenswrapper[4799]: I1010 16:54:13.697242 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e424a8e6-64c8-4572-8706-33026a2cc44d-combined-ca-bundle\") pod \"e424a8e6-64c8-4572-8706-33026a2cc44d\" (UID: \"e424a8e6-64c8-4572-8706-33026a2cc44d\") " Oct 10 16:54:13 crc kubenswrapper[4799]: W1010 16:54:13.697347 4799 empty_dir.go:500] Warning: Unmount skipped because path does not exist: /var/lib/kubelet/pods/e424a8e6-64c8-4572-8706-33026a2cc44d/volumes/kubernetes.io~secret/combined-ca-bundle Oct 10 16:54:13 crc kubenswrapper[4799]: I1010 16:54:13.697360 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e424a8e6-64c8-4572-8706-33026a2cc44d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "e424a8e6-64c8-4572-8706-33026a2cc44d" (UID: "e424a8e6-64c8-4572-8706-33026a2cc44d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:54:13 crc kubenswrapper[4799]: I1010 16:54:13.697638 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p27l4\" (UniqueName: \"kubernetes.io/projected/3585305f-274f-416c-b59d-2dc474f54341-kube-api-access-p27l4\") pod \"keystonecab5-account-delete-jnkq7\" (UID: \"3585305f-274f-416c-b59d-2dc474f54341\") " pod="openstack/keystonecab5-account-delete-jnkq7" Oct 10 16:54:13 crc kubenswrapper[4799]: I1010 16:54:13.697704 4799 reconciler_common.go:293] "Volume detached for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") on node \"crc\" DevicePath \"\"" Oct 10 16:54:13 crc kubenswrapper[4799]: I1010 16:54:13.697715 4799 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f9c4cd8a-6aed-4826-b23b-328645f5801f-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 10 16:54:13 crc kubenswrapper[4799]: I1010 16:54:13.697726 4799 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f9c4cd8a-6aed-4826-b23b-328645f5801f-config-data\") on node \"crc\" DevicePath \"\"" Oct 10 16:54:13 crc kubenswrapper[4799]: I1010 16:54:13.697735 4799 reconciler_common.go:293] "Volume detached for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/69aa641a-13ff-4f65-b2ea-7fee3ad42134-kube-state-metrics-tls-config\") on node \"crc\" DevicePath \"\"" Oct 10 16:54:13 crc kubenswrapper[4799]: I1010 16:54:13.697743 4799 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e424a8e6-64c8-4572-8706-33026a2cc44d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 10 16:54:13 crc kubenswrapper[4799]: I1010 16:54:13.697765 4799 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7ab7b7c1-e89f-4562-882b-4f517f90f8c8-config-data\") on node \"crc\" DevicePath \"\"" Oct 10 16:54:13 crc kubenswrapper[4799]: E1010 16:54:13.700641 4799 projected.go:194] Error preparing data for projected volume kube-api-access-p27l4 for pod openstack/keystonecab5-account-delete-jnkq7: failed to fetch token: serviceaccounts "galera-openstack" not found Oct 10 16:54:13 crc kubenswrapper[4799]: E1010 16:54:13.700719 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3585305f-274f-416c-b59d-2dc474f54341-kube-api-access-p27l4 podName:3585305f-274f-416c-b59d-2dc474f54341 nodeName:}" failed. No retries permitted until 2025-10-10 16:54:15.700699372 +0000 UTC m=+1349.209023487 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-p27l4" (UniqueName: "kubernetes.io/projected/3585305f-274f-416c-b59d-2dc474f54341-kube-api-access-p27l4") pod "keystonecab5-account-delete-jnkq7" (UID: "3585305f-274f-416c-b59d-2dc474f54341") : failed to fetch token: serviceaccounts "galera-openstack" not found Oct 10 16:54:13 crc kubenswrapper[4799]: I1010 16:54:13.705622 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bd0e459-efb9-463c-a8fc-d08a3194f3d9-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "1bd0e459-efb9-463c-a8fc-d08a3194f3d9" (UID: "1bd0e459-efb9-463c-a8fc-d08a3194f3d9"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:54:13 crc kubenswrapper[4799]: I1010 16:54:13.731054 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3a05167f-cd58-4f9f-806b-8d71271320d2-config-data" (OuterVolumeSpecName: "config-data") pod "3a05167f-cd58-4f9f-806b-8d71271320d2" (UID: "3a05167f-cd58-4f9f-806b-8d71271320d2"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:54:13 crc kubenswrapper[4799]: I1010 16:54:13.732045 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e424a8e6-64c8-4572-8706-33026a2cc44d-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "e424a8e6-64c8-4572-8706-33026a2cc44d" (UID: "e424a8e6-64c8-4572-8706-33026a2cc44d"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:54:13 crc kubenswrapper[4799]: I1010 16:54:13.745139 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f9c4cd8a-6aed-4826-b23b-328645f5801f-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "f9c4cd8a-6aed-4826-b23b-328645f5801f" (UID: "f9c4cd8a-6aed-4826-b23b-328645f5801f"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:54:13 crc kubenswrapper[4799]: I1010 16:54:13.750890 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/69aa641a-13ff-4f65-b2ea-7fee3ad42134-kube-state-metrics-tls-certs" (OuterVolumeSpecName: "kube-state-metrics-tls-certs") pod "69aa641a-13ff-4f65-b2ea-7fee3ad42134" (UID: "69aa641a-13ff-4f65-b2ea-7fee3ad42134"). InnerVolumeSpecName "kube-state-metrics-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:54:13 crc kubenswrapper[4799]: I1010 16:54:13.767797 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f9c4cd8a-6aed-4826-b23b-328645f5801f-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "f9c4cd8a-6aed-4826-b23b-328645f5801f" (UID: "f9c4cd8a-6aed-4826-b23b-328645f5801f"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:54:13 crc kubenswrapper[4799]: I1010 16:54:13.779217 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/785f8ce9-5280-44fe-891c-8162f2fdcd7a-vencrypt-tls-certs" (OuterVolumeSpecName: "vencrypt-tls-certs") pod "785f8ce9-5280-44fe-891c-8162f2fdcd7a" (UID: "785f8ce9-5280-44fe-891c-8162f2fdcd7a"). InnerVolumeSpecName "vencrypt-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:54:13 crc kubenswrapper[4799]: I1010 16:54:13.784517 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e424a8e6-64c8-4572-8706-33026a2cc44d-config-data" (OuterVolumeSpecName: "config-data") pod "e424a8e6-64c8-4572-8706-33026a2cc44d" (UID: "e424a8e6-64c8-4572-8706-33026a2cc44d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:54:13 crc kubenswrapper[4799]: I1010 16:54:13.787350 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3a05167f-cd58-4f9f-806b-8d71271320d2-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "3a05167f-cd58-4f9f-806b-8d71271320d2" (UID: "3a05167f-cd58-4f9f-806b-8d71271320d2"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:54:13 crc kubenswrapper[4799]: I1010 16:54:13.799723 4799 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/f9c4cd8a-6aed-4826-b23b-328645f5801f-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 10 16:54:13 crc kubenswrapper[4799]: I1010 16:54:13.799747 4799 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3a05167f-cd58-4f9f-806b-8d71271320d2-config-data\") on node \"crc\" DevicePath \"\"" Oct 10 16:54:13 crc kubenswrapper[4799]: I1010 16:54:13.799785 4799 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e424a8e6-64c8-4572-8706-33026a2cc44d-config-data\") on node \"crc\" DevicePath \"\"" Oct 10 16:54:13 crc kubenswrapper[4799]: I1010 16:54:13.799794 4799 reconciler_common.go:293] "Volume detached for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/785f8ce9-5280-44fe-891c-8162f2fdcd7a-vencrypt-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 10 16:54:13 crc kubenswrapper[4799]: I1010 16:54:13.799803 4799 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/f9c4cd8a-6aed-4826-b23b-328645f5801f-public-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 10 16:54:13 crc kubenswrapper[4799]: I1010 16:54:13.799811 4799 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3a05167f-cd58-4f9f-806b-8d71271320d2-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 10 16:54:13 crc kubenswrapper[4799]: I1010 16:54:13.799820 4799 reconciler_common.go:293] "Volume detached for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/69aa641a-13ff-4f65-b2ea-7fee3ad42134-kube-state-metrics-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 10 16:54:13 crc kubenswrapper[4799]: I1010 16:54:13.799829 4799 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/e424a8e6-64c8-4572-8706-33026a2cc44d-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 10 16:54:13 crc kubenswrapper[4799]: I1010 16:54:13.799857 4799 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1bd0e459-efb9-463c-a8fc-d08a3194f3d9-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 10 16:54:13 crc kubenswrapper[4799]: I1010 16:54:13.823210 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7ab7b7c1-e89f-4562-882b-4f517f90f8c8-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "7ab7b7c1-e89f-4562-882b-4f517f90f8c8" (UID: "7ab7b7c1-e89f-4562-882b-4f517f90f8c8"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:54:13 crc kubenswrapper[4799]: I1010 16:54:13.828249 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bd0e459-efb9-463c-a8fc-d08a3194f3d9-config-data" (OuterVolumeSpecName: "config-data") pod "1bd0e459-efb9-463c-a8fc-d08a3194f3d9" (UID: "1bd0e459-efb9-463c-a8fc-d08a3194f3d9"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:54:13 crc kubenswrapper[4799]: I1010 16:54:13.846113 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7ab7b7c1-e89f-4562-882b-4f517f90f8c8-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "7ab7b7c1-e89f-4562-882b-4f517f90f8c8" (UID: "7ab7b7c1-e89f-4562-882b-4f517f90f8c8"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:54:13 crc kubenswrapper[4799]: I1010 16:54:13.868229 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7ab7b7c1-e89f-4562-882b-4f517f90f8c8-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "7ab7b7c1-e89f-4562-882b-4f517f90f8c8" (UID: "7ab7b7c1-e89f-4562-882b-4f517f90f8c8"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:54:13 crc kubenswrapper[4799]: I1010 16:54:13.901923 4799 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7ab7b7c1-e89f-4562-882b-4f517f90f8c8-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 10 16:54:13 crc kubenswrapper[4799]: I1010 16:54:13.901952 4799 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1bd0e459-efb9-463c-a8fc-d08a3194f3d9-config-data\") on node \"crc\" DevicePath \"\"" Oct 10 16:54:13 crc kubenswrapper[4799]: I1010 16:54:13.901962 4799 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/7ab7b7c1-e89f-4562-882b-4f517f90f8c8-public-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 10 16:54:13 crc kubenswrapper[4799]: I1010 16:54:13.901970 4799 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/7ab7b7c1-e89f-4562-882b-4f517f90f8c8-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 10 16:54:13 crc kubenswrapper[4799]: E1010 16:54:13.907825 4799 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of ef0cad99b2efacd5bcd212cd155d86551ed4cc35bedc046210eca5e8e009b86f is running failed: container process not found" containerID="ef0cad99b2efacd5bcd212cd155d86551ed4cc35bedc046210eca5e8e009b86f" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Oct 10 16:54:13 crc kubenswrapper[4799]: E1010 16:54:13.910998 4799 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of ef0cad99b2efacd5bcd212cd155d86551ed4cc35bedc046210eca5e8e009b86f is running failed: container process not found" containerID="ef0cad99b2efacd5bcd212cd155d86551ed4cc35bedc046210eca5e8e009b86f" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Oct 10 16:54:13 crc kubenswrapper[4799]: E1010 16:54:13.911255 4799 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of ef0cad99b2efacd5bcd212cd155d86551ed4cc35bedc046210eca5e8e009b86f is running failed: container process not found" containerID="ef0cad99b2efacd5bcd212cd155d86551ed4cc35bedc046210eca5e8e009b86f" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Oct 10 16:54:13 crc kubenswrapper[4799]: E1010 16:54:13.911534 4799 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of ef0cad99b2efacd5bcd212cd155d86551ed4cc35bedc046210eca5e8e009b86f is running failed: container process not found" probeType="Readiness" pod="openstack/nova-scheduler-0" podUID="7ae9763d-31dd-44c7-bf35-11a896a4f785" containerName="nova-scheduler-scheduler" Oct 10 16:54:13 crc kubenswrapper[4799]: E1010 16:54:13.925090 4799 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of ff0b33623ee2e909045d84098d1c8b4f4ee31b12318171307a8ee09a9499c92d is running failed: container process not found" containerID="ff0b33623ee2e909045d84098d1c8b4f4ee31b12318171307a8ee09a9499c92d" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Oct 10 16:54:13 crc kubenswrapper[4799]: E1010 16:54:13.925663 4799 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of ff0b33623ee2e909045d84098d1c8b4f4ee31b12318171307a8ee09a9499c92d is running failed: container process not found" containerID="ff0b33623ee2e909045d84098d1c8b4f4ee31b12318171307a8ee09a9499c92d" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Oct 10 16:54:13 crc kubenswrapper[4799]: E1010 16:54:13.926131 4799 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of ff0b33623ee2e909045d84098d1c8b4f4ee31b12318171307a8ee09a9499c92d is running failed: container process not found" containerID="ff0b33623ee2e909045d84098d1c8b4f4ee31b12318171307a8ee09a9499c92d" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Oct 10 16:54:13 crc kubenswrapper[4799]: E1010 16:54:13.926422 4799 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of ff0b33623ee2e909045d84098d1c8b4f4ee31b12318171307a8ee09a9499c92d is running failed: container process not found" probeType="Readiness" pod="openstack/nova-cell1-conductor-0" podUID="971dd170-cc55-481f-b76d-820102f811cd" containerName="nova-cell1-conductor-conductor" Oct 10 16:54:14 crc kubenswrapper[4799]: I1010 16:54:14.056720 4799 scope.go:117] "RemoveContainer" containerID="2a76b9a7b4fe69e531aad8d796bff209d24a3b141da26b70b841b3b26b793ad9" Oct 10 16:54:14 crc kubenswrapper[4799]: I1010 16:54:14.094698 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 10 16:54:14 crc kubenswrapper[4799]: I1010 16:54:14.107393 4799 scope.go:117] "RemoveContainer" containerID="44db3563c5cb7bf8e2e66a9895632efdaaa14dc9f4496418fa8fa16fcb55a2cc" Oct 10 16:54:14 crc kubenswrapper[4799]: I1010 16:54:14.108373 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Oct 10 16:54:14 crc kubenswrapper[4799]: I1010 16:54:14.133403 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-worker-754947f5d7-z2rks"] Oct 10 16:54:14 crc kubenswrapper[4799]: I1010 16:54:14.139190 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-worker-754947f5d7-z2rks"] Oct 10 16:54:14 crc kubenswrapper[4799]: I1010 16:54:14.152254 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/novacell0f8a8-account-delete-6lbfn" event={"ID":"acf4a111-174e-42e8-8e71-d5bd053d5de2","Type":"ContainerStarted","Data":"d564a59e74011f00908e5d028874c8e790afb8ab03756bf88623b435ac7d513a"} Oct 10 16:54:14 crc kubenswrapper[4799]: I1010 16:54:14.152338 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/novacell0f8a8-account-delete-6lbfn" podUID="acf4a111-174e-42e8-8e71-d5bd053d5de2" containerName="mariadb-account-delete" containerID="cri-o://d564a59e74011f00908e5d028874c8e790afb8ab03756bf88623b435ac7d513a" gracePeriod=30 Oct 10 16:54:14 crc kubenswrapper[4799]: I1010 16:54:14.152720 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Oct 10 16:54:14 crc kubenswrapper[4799]: I1010 16:54:14.163622 4799 generic.go:334] "Generic (PLEG): container finished" podID="2db42625-4f7b-479c-a580-c94d6cafb3fe" containerID="95e8c5c7eeb44313269abe5e0811c66db445161e27df4b78e13b1117ddf8ecc1" exitCode=0 Oct 10 16:54:14 crc kubenswrapper[4799]: I1010 16:54:14.163678 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"2db42625-4f7b-479c-a580-c94d6cafb3fe","Type":"ContainerDied","Data":"95e8c5c7eeb44313269abe5e0811c66db445161e27df4b78e13b1117ddf8ecc1"} Oct 10 16:54:14 crc kubenswrapper[4799]: I1010 16:54:14.163704 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"2db42625-4f7b-479c-a580-c94d6cafb3fe","Type":"ContainerDied","Data":"71fd6fd407296cc5885a7ecf01a417b8a732cb629421ca9799042013ff6cf68d"} Oct 10 16:54:14 crc kubenswrapper[4799]: I1010 16:54:14.163715 4799 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="71fd6fd407296cc5885a7ecf01a417b8a732cb629421ca9799042013ff6cf68d" Oct 10 16:54:14 crc kubenswrapper[4799]: I1010 16:54:14.173243 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 10 16:54:14 crc kubenswrapper[4799]: I1010 16:54:14.173243 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"5e6870d5-faea-46d9-bebb-4d237b802910","Type":"ContainerDied","Data":"27fae14afbe6e49565b8b95bbdd2deb56421b8e68ed1a22a36f290975acc0d06"} Oct 10 16:54:14 crc kubenswrapper[4799]: I1010 16:54:14.179116 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Oct 10 16:54:14 crc kubenswrapper[4799]: I1010 16:54:14.179135 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"985d1485-7054-475b-8e60-85db5dc5afa3","Type":"ContainerDied","Data":"84483c379ac0b0a6a31e4b9cb3e92368a20abf5e0b33eaffc729e36b4500d232"} Oct 10 16:54:14 crc kubenswrapper[4799]: I1010 16:54:14.196501 4799 scope.go:117] "RemoveContainer" containerID="ffeaf50de5582926c54ffdb7618e55611dd1a565aacf1bd0b7bbf41db579bdee" Oct 10 16:54:14 crc kubenswrapper[4799]: I1010 16:54:14.204517 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"31fc68f8-af18-42b7-a94c-90a22afea5f1","Type":"ContainerDied","Data":"1729d6e07b94453ac709f5c5f7a9355fe6c0f4500c9e56089fdbb9d43d7b42e8"} Oct 10 16:54:14 crc kubenswrapper[4799]: I1010 16:54:14.204644 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Oct 10 16:54:14 crc kubenswrapper[4799]: I1010 16:54:14.206668 4799 generic.go:334] "Generic (PLEG): container finished" podID="971dd170-cc55-481f-b76d-820102f811cd" containerID="ff0b33623ee2e909045d84098d1c8b4f4ee31b12318171307a8ee09a9499c92d" exitCode=0 Oct 10 16:54:14 crc kubenswrapper[4799]: I1010 16:54:14.206801 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"971dd170-cc55-481f-b76d-820102f811cd","Type":"ContainerDied","Data":"ff0b33623ee2e909045d84098d1c8b4f4ee31b12318171307a8ee09a9499c92d"} Oct 10 16:54:14 crc kubenswrapper[4799]: I1010 16:54:14.207649 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/novacell0f8a8-account-delete-6lbfn" podStartSLOduration=7.207610414 podStartE2EDuration="7.207610414s" podCreationTimestamp="2025-10-10 16:54:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-10 16:54:14.194262385 +0000 UTC m=+1347.702586500" watchObservedRunningTime="2025-10-10 16:54:14.207610414 +0000 UTC m=+1347.715934539" Oct 10 16:54:14 crc kubenswrapper[4799]: I1010 16:54:14.210731 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/31fc68f8-af18-42b7-a94c-90a22afea5f1-public-tls-certs\") pod \"31fc68f8-af18-42b7-a94c-90a22afea5f1\" (UID: \"31fc68f8-af18-42b7-a94c-90a22afea5f1\") " Oct 10 16:54:14 crc kubenswrapper[4799]: I1010 16:54:14.210830 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2p989\" (UniqueName: \"kubernetes.io/projected/31fc68f8-af18-42b7-a94c-90a22afea5f1-kube-api-access-2p989\") pod \"31fc68f8-af18-42b7-a94c-90a22afea5f1\" (UID: \"31fc68f8-af18-42b7-a94c-90a22afea5f1\") " Oct 10 16:54:14 crc kubenswrapper[4799]: I1010 16:54:14.210885 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/5e6870d5-faea-46d9-bebb-4d237b802910-nova-metadata-tls-certs\") pod \"5e6870d5-faea-46d9-bebb-4d237b802910\" (UID: \"5e6870d5-faea-46d9-bebb-4d237b802910\") " Oct 10 16:54:14 crc kubenswrapper[4799]: I1010 16:54:14.210922 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/31fc68f8-af18-42b7-a94c-90a22afea5f1-httpd-run\") pod \"31fc68f8-af18-42b7-a94c-90a22afea5f1\" (UID: \"31fc68f8-af18-42b7-a94c-90a22afea5f1\") " Oct 10 16:54:14 crc kubenswrapper[4799]: I1010 16:54:14.210950 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/31fc68f8-af18-42b7-a94c-90a22afea5f1-config-data\") pod \"31fc68f8-af18-42b7-a94c-90a22afea5f1\" (UID: \"31fc68f8-af18-42b7-a94c-90a22afea5f1\") " Oct 10 16:54:14 crc kubenswrapper[4799]: I1010 16:54:14.210973 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/31fc68f8-af18-42b7-a94c-90a22afea5f1-combined-ca-bundle\") pod \"31fc68f8-af18-42b7-a94c-90a22afea5f1\" (UID: \"31fc68f8-af18-42b7-a94c-90a22afea5f1\") " Oct 10 16:54:14 crc kubenswrapper[4799]: I1010 16:54:14.210999 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5e6870d5-faea-46d9-bebb-4d237b802910-combined-ca-bundle\") pod \"5e6870d5-faea-46d9-bebb-4d237b802910\" (UID: \"5e6870d5-faea-46d9-bebb-4d237b802910\") " Oct 10 16:54:14 crc kubenswrapper[4799]: I1010 16:54:14.211022 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5e6870d5-faea-46d9-bebb-4d237b802910-config-data\") pod \"5e6870d5-faea-46d9-bebb-4d237b802910\" (UID: \"5e6870d5-faea-46d9-bebb-4d237b802910\") " Oct 10 16:54:14 crc kubenswrapper[4799]: I1010 16:54:14.211050 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"31fc68f8-af18-42b7-a94c-90a22afea5f1\" (UID: \"31fc68f8-af18-42b7-a94c-90a22afea5f1\") " Oct 10 16:54:14 crc kubenswrapper[4799]: I1010 16:54:14.211078 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"e424a8e6-64c8-4572-8706-33026a2cc44d","Type":"ContainerDied","Data":"0c7274b1845353423dc9ca09628f12d4f64ce4d85b22e824833d76018af77dc1"} Oct 10 16:54:14 crc kubenswrapper[4799]: I1010 16:54:14.211111 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/31fc68f8-af18-42b7-a94c-90a22afea5f1-scripts\") pod \"31fc68f8-af18-42b7-a94c-90a22afea5f1\" (UID: \"31fc68f8-af18-42b7-a94c-90a22afea5f1\") " Oct 10 16:54:14 crc kubenswrapper[4799]: I1010 16:54:14.211198 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lgzck\" (UniqueName: \"kubernetes.io/projected/5e6870d5-faea-46d9-bebb-4d237b802910-kube-api-access-lgzck\") pod \"5e6870d5-faea-46d9-bebb-4d237b802910\" (UID: \"5e6870d5-faea-46d9-bebb-4d237b802910\") " Oct 10 16:54:14 crc kubenswrapper[4799]: I1010 16:54:14.211250 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/31fc68f8-af18-42b7-a94c-90a22afea5f1-logs\") pod \"31fc68f8-af18-42b7-a94c-90a22afea5f1\" (UID: \"31fc68f8-af18-42b7-a94c-90a22afea5f1\") " Oct 10 16:54:14 crc kubenswrapper[4799]: I1010 16:54:14.211554 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5e6870d5-faea-46d9-bebb-4d237b802910-logs\") pod \"5e6870d5-faea-46d9-bebb-4d237b802910\" (UID: \"5e6870d5-faea-46d9-bebb-4d237b802910\") " Oct 10 16:54:14 crc kubenswrapper[4799]: I1010 16:54:14.212930 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5e6870d5-faea-46d9-bebb-4d237b802910-logs" (OuterVolumeSpecName: "logs") pod "5e6870d5-faea-46d9-bebb-4d237b802910" (UID: "5e6870d5-faea-46d9-bebb-4d237b802910"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 10 16:54:14 crc kubenswrapper[4799]: I1010 16:54:14.211205 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Oct 10 16:54:14 crc kubenswrapper[4799]: I1010 16:54:14.215165 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/31fc68f8-af18-42b7-a94c-90a22afea5f1-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "31fc68f8-af18-42b7-a94c-90a22afea5f1" (UID: "31fc68f8-af18-42b7-a94c-90a22afea5f1"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 10 16:54:14 crc kubenswrapper[4799]: I1010 16:54:14.215627 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/31fc68f8-af18-42b7-a94c-90a22afea5f1-logs" (OuterVolumeSpecName: "logs") pod "31fc68f8-af18-42b7-a94c-90a22afea5f1" (UID: "31fc68f8-af18-42b7-a94c-90a22afea5f1"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 10 16:54:14 crc kubenswrapper[4799]: I1010 16:54:14.218802 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/31fc68f8-af18-42b7-a94c-90a22afea5f1-scripts" (OuterVolumeSpecName: "scripts") pod "31fc68f8-af18-42b7-a94c-90a22afea5f1" (UID: "31fc68f8-af18-42b7-a94c-90a22afea5f1"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:54:14 crc kubenswrapper[4799]: I1010 16:54:14.219201 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5e6870d5-faea-46d9-bebb-4d237b802910-kube-api-access-lgzck" (OuterVolumeSpecName: "kube-api-access-lgzck") pod "5e6870d5-faea-46d9-bebb-4d237b802910" (UID: "5e6870d5-faea-46d9-bebb-4d237b802910"). InnerVolumeSpecName "kube-api-access-lgzck". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 16:54:14 crc kubenswrapper[4799]: I1010 16:54:14.222578 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage11-crc" (OuterVolumeSpecName: "glance") pod "31fc68f8-af18-42b7-a94c-90a22afea5f1" (UID: "31fc68f8-af18-42b7-a94c-90a22afea5f1"). InnerVolumeSpecName "local-storage11-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Oct 10 16:54:14 crc kubenswrapper[4799]: I1010 16:54:14.222593 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/31fc68f8-af18-42b7-a94c-90a22afea5f1-kube-api-access-2p989" (OuterVolumeSpecName: "kube-api-access-2p989") pod "31fc68f8-af18-42b7-a94c-90a22afea5f1" (UID: "31fc68f8-af18-42b7-a94c-90a22afea5f1"). InnerVolumeSpecName "kube-api-access-2p989". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 16:54:14 crc kubenswrapper[4799]: I1010 16:54:14.224362 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-56d84d574d-x5sbm" Oct 10 16:54:14 crc kubenswrapper[4799]: I1010 16:54:14.225055 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutronfdbc-account-delete-b8x6d" podUID="445b5551-e072-43ca-a6e2-8f7fe726bb42" containerName="mariadb-account-delete" containerID="cri-o://84892136290e5ee51f0b78717f3f57f778e954c4b91dd0b7d98046a5d798a718" gracePeriod=30 Oct 10 16:54:14 crc kubenswrapper[4799]: I1010 16:54:14.225015 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutronfdbc-account-delete-b8x6d" event={"ID":"445b5551-e072-43ca-a6e2-8f7fe726bb42","Type":"ContainerStarted","Data":"84892136290e5ee51f0b78717f3f57f778e954c4b91dd0b7d98046a5d798a718"} Oct 10 16:54:14 crc kubenswrapper[4799]: I1010 16:54:14.239659 4799 generic.go:334] "Generic (PLEG): container finished" podID="7ae9763d-31dd-44c7-bf35-11a896a4f785" containerID="ef0cad99b2efacd5bcd212cd155d86551ed4cc35bedc046210eca5e8e009b86f" exitCode=0 Oct 10 16:54:14 crc kubenswrapper[4799]: I1010 16:54:14.240053 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"7ae9763d-31dd-44c7-bf35-11a896a4f785","Type":"ContainerDied","Data":"ef0cad99b2efacd5bcd212cd155d86551ed4cc35bedc046210eca5e8e009b86f"} Oct 10 16:54:14 crc kubenswrapper[4799]: I1010 16:54:14.240138 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"7ae9763d-31dd-44c7-bf35-11a896a4f785","Type":"ContainerDied","Data":"f8c08f14c82200271971cc8055f474c1a963f8ef65bd87b7e1189773741abb8f"} Oct 10 16:54:14 crc kubenswrapper[4799]: I1010 16:54:14.240212 4799 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f8c08f14c82200271971cc8055f474c1a963f8ef65bd87b7e1189773741abb8f" Oct 10 16:54:14 crc kubenswrapper[4799]: I1010 16:54:14.240356 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 10 16:54:14 crc kubenswrapper[4799]: I1010 16:54:14.253959 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/31fc68f8-af18-42b7-a94c-90a22afea5f1-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "31fc68f8-af18-42b7-a94c-90a22afea5f1" (UID: "31fc68f8-af18-42b7-a94c-90a22afea5f1"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:54:14 crc kubenswrapper[4799]: I1010 16:54:14.257642 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 10 16:54:14 crc kubenswrapper[4799]: I1010 16:54:14.259387 4799 generic.go:334] "Generic (PLEG): container finished" podID="ac766919-d788-40da-879a-627919926594" containerID="3395c2ca55d83e7b3885a78bfc0d6276c0ac7455bc1ef7cc5df2ccda1fecad2d" exitCode=0 Oct 10 16:54:14 crc kubenswrapper[4799]: I1010 16:54:14.259537 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-56d84d574d-x5sbm" Oct 10 16:54:14 crc kubenswrapper[4799]: I1010 16:54:14.259598 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-56d84d574d-x5sbm" event={"ID":"ac766919-d788-40da-879a-627919926594","Type":"ContainerDied","Data":"3395c2ca55d83e7b3885a78bfc0d6276c0ac7455bc1ef7cc5df2ccda1fecad2d"} Oct 10 16:54:14 crc kubenswrapper[4799]: I1010 16:54:14.260409 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-56d84d574d-x5sbm" event={"ID":"ac766919-d788-40da-879a-627919926594","Type":"ContainerDied","Data":"c532cde8cbfd5bdb70765bccab4d581443eb1828222a4e880f8d0cab6492ff21"} Oct 10 16:54:14 crc kubenswrapper[4799]: I1010 16:54:14.263712 4799 scope.go:117] "RemoveContainer" containerID="a8fdb5ed39a199e1efb2bc1f77b2de74dd205c6dcb6fea5e9f71d53f89199fcd" Oct 10 16:54:14 crc kubenswrapper[4799]: I1010 16:54:14.267646 4799 generic.go:334] "Generic (PLEG): container finished" podID="7dc78f94-acb0-4411-b1a2-14dd6500674b" containerID="a5a1b6e00a35ec28b0a11cef63bf27aa74edf00ead5c5dff888593622c9a0138" exitCode=0 Oct 10 16:54:14 crc kubenswrapper[4799]: I1010 16:54:14.267775 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican0700-account-delete-smncx" Oct 10 16:54:14 crc kubenswrapper[4799]: I1010 16:54:14.268255 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-6565b9cf48-rl77d" Oct 10 16:54:14 crc kubenswrapper[4799]: I1010 16:54:14.268399 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"7dc78f94-acb0-4411-b1a2-14dd6500674b","Type":"ContainerDied","Data":"a5a1b6e00a35ec28b0a11cef63bf27aa74edf00ead5c5dff888593622c9a0138"} Oct 10 16:54:14 crc kubenswrapper[4799]: I1010 16:54:14.268425 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"7dc78f94-acb0-4411-b1a2-14dd6500674b","Type":"ContainerDied","Data":"09c101bdfa8200db69fbedd9879fffc56304b95ad83b7a1bac433f5c3fed197c"} Oct 10 16:54:14 crc kubenswrapper[4799]: I1010 16:54:14.268435 4799 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="09c101bdfa8200db69fbedd9879fffc56304b95ad83b7a1bac433f5c3fed197c" Oct 10 16:54:14 crc kubenswrapper[4799]: I1010 16:54:14.268571 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Oct 10 16:54:14 crc kubenswrapper[4799]: I1010 16:54:14.268617 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Oct 10 16:54:14 crc kubenswrapper[4799]: I1010 16:54:14.268645 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystonecab5-account-delete-jnkq7" Oct 10 16:54:14 crc kubenswrapper[4799]: I1010 16:54:14.268680 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cindera843-account-delete-dptkx" Oct 10 16:54:14 crc kubenswrapper[4799]: I1010 16:54:14.268710 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Oct 10 16:54:14 crc kubenswrapper[4799]: I1010 16:54:14.268859 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-5b85b5697b-9wc6c" Oct 10 16:54:14 crc kubenswrapper[4799]: I1010 16:54:14.276440 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Oct 10 16:54:14 crc kubenswrapper[4799]: I1010 16:54:14.291571 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5e6870d5-faea-46d9-bebb-4d237b802910-config-data" (OuterVolumeSpecName: "config-data") pod "5e6870d5-faea-46d9-bebb-4d237b802910" (UID: "5e6870d5-faea-46d9-bebb-4d237b802910"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:54:14 crc kubenswrapper[4799]: I1010 16:54:14.304003 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5e6870d5-faea-46d9-bebb-4d237b802910-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "5e6870d5-faea-46d9-bebb-4d237b802910" (UID: "5e6870d5-faea-46d9-bebb-4d237b802910"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:54:14 crc kubenswrapper[4799]: I1010 16:54:14.313616 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/985d1485-7054-475b-8e60-85db5dc5afa3-kolla-config\") pod \"985d1485-7054-475b-8e60-85db5dc5afa3\" (UID: \"985d1485-7054-475b-8e60-85db5dc5afa3\") " Oct 10 16:54:14 crc kubenswrapper[4799]: I1010 16:54:14.313664 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/985d1485-7054-475b-8e60-85db5dc5afa3-memcached-tls-certs\") pod \"985d1485-7054-475b-8e60-85db5dc5afa3\" (UID: \"985d1485-7054-475b-8e60-85db5dc5afa3\") " Oct 10 16:54:14 crc kubenswrapper[4799]: I1010 16:54:14.313702 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tp8sb\" (UniqueName: \"kubernetes.io/projected/985d1485-7054-475b-8e60-85db5dc5afa3-kube-api-access-tp8sb\") pod \"985d1485-7054-475b-8e60-85db5dc5afa3\" (UID: \"985d1485-7054-475b-8e60-85db5dc5afa3\") " Oct 10 16:54:14 crc kubenswrapper[4799]: I1010 16:54:14.313774 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/985d1485-7054-475b-8e60-85db5dc5afa3-config-data\") pod \"985d1485-7054-475b-8e60-85db5dc5afa3\" (UID: \"985d1485-7054-475b-8e60-85db5dc5afa3\") " Oct 10 16:54:14 crc kubenswrapper[4799]: I1010 16:54:14.313834 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/985d1485-7054-475b-8e60-85db5dc5afa3-combined-ca-bundle\") pod \"985d1485-7054-475b-8e60-85db5dc5afa3\" (UID: \"985d1485-7054-475b-8e60-85db5dc5afa3\") " Oct 10 16:54:14 crc kubenswrapper[4799]: I1010 16:54:14.314274 4799 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/31fc68f8-af18-42b7-a94c-90a22afea5f1-httpd-run\") on node \"crc\" DevicePath \"\"" Oct 10 16:54:14 crc kubenswrapper[4799]: I1010 16:54:14.314293 4799 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/31fc68f8-af18-42b7-a94c-90a22afea5f1-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 10 16:54:14 crc kubenswrapper[4799]: I1010 16:54:14.314303 4799 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5e6870d5-faea-46d9-bebb-4d237b802910-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 10 16:54:14 crc kubenswrapper[4799]: I1010 16:54:14.314312 4799 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5e6870d5-faea-46d9-bebb-4d237b802910-config-data\") on node \"crc\" DevicePath \"\"" Oct 10 16:54:14 crc kubenswrapper[4799]: I1010 16:54:14.314330 4799 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") on node \"crc\" " Oct 10 16:54:14 crc kubenswrapper[4799]: I1010 16:54:14.314339 4799 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/31fc68f8-af18-42b7-a94c-90a22afea5f1-scripts\") on node \"crc\" DevicePath \"\"" Oct 10 16:54:14 crc kubenswrapper[4799]: I1010 16:54:14.314347 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lgzck\" (UniqueName: \"kubernetes.io/projected/5e6870d5-faea-46d9-bebb-4d237b802910-kube-api-access-lgzck\") on node \"crc\" DevicePath \"\"" Oct 10 16:54:14 crc kubenswrapper[4799]: I1010 16:54:14.314358 4799 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/31fc68f8-af18-42b7-a94c-90a22afea5f1-logs\") on node \"crc\" DevicePath \"\"" Oct 10 16:54:14 crc kubenswrapper[4799]: I1010 16:54:14.314351 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/985d1485-7054-475b-8e60-85db5dc5afa3-kolla-config" (OuterVolumeSpecName: "kolla-config") pod "985d1485-7054-475b-8e60-85db5dc5afa3" (UID: "985d1485-7054-475b-8e60-85db5dc5afa3"). InnerVolumeSpecName "kolla-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 16:54:14 crc kubenswrapper[4799]: I1010 16:54:14.314366 4799 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5e6870d5-faea-46d9-bebb-4d237b802910-logs\") on node \"crc\" DevicePath \"\"" Oct 10 16:54:14 crc kubenswrapper[4799]: I1010 16:54:14.314414 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2p989\" (UniqueName: \"kubernetes.io/projected/31fc68f8-af18-42b7-a94c-90a22afea5f1-kube-api-access-2p989\") on node \"crc\" DevicePath \"\"" Oct 10 16:54:14 crc kubenswrapper[4799]: I1010 16:54:14.314939 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/985d1485-7054-475b-8e60-85db5dc5afa3-config-data" (OuterVolumeSpecName: "config-data") pod "985d1485-7054-475b-8e60-85db5dc5afa3" (UID: "985d1485-7054-475b-8e60-85db5dc5afa3"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 16:54:14 crc kubenswrapper[4799]: I1010 16:54:14.317880 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutronfdbc-account-delete-b8x6d" podStartSLOduration=7.317859359 podStartE2EDuration="7.317859359s" podCreationTimestamp="2025-10-10 16:54:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-10 16:54:14.285712517 +0000 UTC m=+1347.794036632" watchObservedRunningTime="2025-10-10 16:54:14.317859359 +0000 UTC m=+1347.826183464" Oct 10 16:54:14 crc kubenswrapper[4799]: I1010 16:54:14.334928 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/kube-state-metrics-0"] Oct 10 16:54:14 crc kubenswrapper[4799]: I1010 16:54:14.342024 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/kube-state-metrics-0"] Oct 10 16:54:14 crc kubenswrapper[4799]: I1010 16:54:14.354887 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5e6870d5-faea-46d9-bebb-4d237b802910-nova-metadata-tls-certs" (OuterVolumeSpecName: "nova-metadata-tls-certs") pod "5e6870d5-faea-46d9-bebb-4d237b802910" (UID: "5e6870d5-faea-46d9-bebb-4d237b802910"). InnerVolumeSpecName "nova-metadata-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:54:14 crc kubenswrapper[4799]: I1010 16:54:14.365645 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/31fc68f8-af18-42b7-a94c-90a22afea5f1-config-data" (OuterVolumeSpecName: "config-data") pod "31fc68f8-af18-42b7-a94c-90a22afea5f1" (UID: "31fc68f8-af18-42b7-a94c-90a22afea5f1"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:54:14 crc kubenswrapper[4799]: I1010 16:54:14.366713 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/985d1485-7054-475b-8e60-85db5dc5afa3-kube-api-access-tp8sb" (OuterVolumeSpecName: "kube-api-access-tp8sb") pod "985d1485-7054-475b-8e60-85db5dc5afa3" (UID: "985d1485-7054-475b-8e60-85db5dc5afa3"). InnerVolumeSpecName "kube-api-access-tp8sb". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 16:54:14 crc kubenswrapper[4799]: I1010 16:54:14.385203 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/31fc68f8-af18-42b7-a94c-90a22afea5f1-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "31fc68f8-af18-42b7-a94c-90a22afea5f1" (UID: "31fc68f8-af18-42b7-a94c-90a22afea5f1"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:54:14 crc kubenswrapper[4799]: I1010 16:54:14.396698 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/985d1485-7054-475b-8e60-85db5dc5afa3-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "985d1485-7054-475b-8e60-85db5dc5afa3" (UID: "985d1485-7054-475b-8e60-85db5dc5afa3"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:54:14 crc kubenswrapper[4799]: I1010 16:54:14.416815 4799 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage11-crc" (UniqueName: "kubernetes.io/local-volume/local-storage11-crc") on node "crc" Oct 10 16:54:14 crc kubenswrapper[4799]: I1010 16:54:14.418970 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2db42625-4f7b-479c-a580-c94d6cafb3fe-config-data\") pod \"2db42625-4f7b-479c-a580-c94d6cafb3fe\" (UID: \"2db42625-4f7b-479c-a580-c94d6cafb3fe\") " Oct 10 16:54:14 crc kubenswrapper[4799]: I1010 16:54:14.418998 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7ae9763d-31dd-44c7-bf35-11a896a4f785-config-data\") pod \"7ae9763d-31dd-44c7-bf35-11a896a4f785\" (UID: \"7ae9763d-31dd-44c7-bf35-11a896a4f785\") " Oct 10 16:54:14 crc kubenswrapper[4799]: I1010 16:54:14.419020 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ac766919-d788-40da-879a-627919926594-config-data\") pod \"ac766919-d788-40da-879a-627919926594\" (UID: \"ac766919-d788-40da-879a-627919926594\") " Oct 10 16:54:14 crc kubenswrapper[4799]: I1010 16:54:14.419039 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/971dd170-cc55-481f-b76d-820102f811cd-combined-ca-bundle\") pod \"971dd170-cc55-481f-b76d-820102f811cd\" (UID: \"971dd170-cc55-481f-b76d-820102f811cd\") " Oct 10 16:54:14 crc kubenswrapper[4799]: I1010 16:54:14.419057 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/2db42625-4f7b-479c-a580-c94d6cafb3fe-public-tls-certs\") pod \"2db42625-4f7b-479c-a580-c94d6cafb3fe\" (UID: \"2db42625-4f7b-479c-a580-c94d6cafb3fe\") " Oct 10 16:54:14 crc kubenswrapper[4799]: I1010 16:54:14.419080 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2db42625-4f7b-479c-a580-c94d6cafb3fe-combined-ca-bundle\") pod \"2db42625-4f7b-479c-a580-c94d6cafb3fe\" (UID: \"2db42625-4f7b-479c-a580-c94d6cafb3fe\") " Oct 10 16:54:14 crc kubenswrapper[4799]: I1010 16:54:14.419118 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8xx82\" (UniqueName: \"kubernetes.io/projected/7dc78f94-acb0-4411-b1a2-14dd6500674b-kube-api-access-8xx82\") pod \"7dc78f94-acb0-4411-b1a2-14dd6500674b\" (UID: \"7dc78f94-acb0-4411-b1a2-14dd6500674b\") " Oct 10 16:54:14 crc kubenswrapper[4799]: I1010 16:54:14.419139 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ac766919-d788-40da-879a-627919926594-logs\") pod \"ac766919-d788-40da-879a-627919926594\" (UID: \"ac766919-d788-40da-879a-627919926594\") " Oct 10 16:54:14 crc kubenswrapper[4799]: I1010 16:54:14.419157 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fnf4n\" (UniqueName: \"kubernetes.io/projected/7ae9763d-31dd-44c7-bf35-11a896a4f785-kube-api-access-fnf4n\") pod \"7ae9763d-31dd-44c7-bf35-11a896a4f785\" (UID: \"7ae9763d-31dd-44c7-bf35-11a896a4f785\") " Oct 10 16:54:14 crc kubenswrapper[4799]: I1010 16:54:14.419171 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7dc78f94-acb0-4411-b1a2-14dd6500674b-scripts\") pod \"7dc78f94-acb0-4411-b1a2-14dd6500674b\" (UID: \"7dc78f94-acb0-4411-b1a2-14dd6500674b\") " Oct 10 16:54:14 crc kubenswrapper[4799]: I1010 16:54:14.419187 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7ae9763d-31dd-44c7-bf35-11a896a4f785-combined-ca-bundle\") pod \"7ae9763d-31dd-44c7-bf35-11a896a4f785\" (UID: \"7ae9763d-31dd-44c7-bf35-11a896a4f785\") " Oct 10 16:54:14 crc kubenswrapper[4799]: I1010 16:54:14.419202 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kpmn4\" (UniqueName: \"kubernetes.io/projected/ac766919-d788-40da-879a-627919926594-kube-api-access-kpmn4\") pod \"ac766919-d788-40da-879a-627919926594\" (UID: \"ac766919-d788-40da-879a-627919926594\") " Oct 10 16:54:14 crc kubenswrapper[4799]: I1010 16:54:14.419228 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ac766919-d788-40da-879a-627919926594-combined-ca-bundle\") pod \"ac766919-d788-40da-879a-627919926594\" (UID: \"ac766919-d788-40da-879a-627919926594\") " Oct 10 16:54:14 crc kubenswrapper[4799]: I1010 16:54:14.419243 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2p2w9\" (UniqueName: \"kubernetes.io/projected/2db42625-4f7b-479c-a580-c94d6cafb3fe-kube-api-access-2p2w9\") pod \"2db42625-4f7b-479c-a580-c94d6cafb3fe\" (UID: \"2db42625-4f7b-479c-a580-c94d6cafb3fe\") " Oct 10 16:54:14 crc kubenswrapper[4799]: I1010 16:54:14.419265 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/7dc78f94-acb0-4411-b1a2-14dd6500674b-config-data-custom\") pod \"7dc78f94-acb0-4411-b1a2-14dd6500674b\" (UID: \"7dc78f94-acb0-4411-b1a2-14dd6500674b\") " Oct 10 16:54:14 crc kubenswrapper[4799]: I1010 16:54:14.419282 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/ac766919-d788-40da-879a-627919926594-public-tls-certs\") pod \"ac766919-d788-40da-879a-627919926594\" (UID: \"ac766919-d788-40da-879a-627919926594\") " Oct 10 16:54:14 crc kubenswrapper[4799]: I1010 16:54:14.419303 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/2db42625-4f7b-479c-a580-c94d6cafb3fe-internal-tls-certs\") pod \"2db42625-4f7b-479c-a580-c94d6cafb3fe\" (UID: \"2db42625-4f7b-479c-a580-c94d6cafb3fe\") " Oct 10 16:54:14 crc kubenswrapper[4799]: I1010 16:54:14.419325 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/7dc78f94-acb0-4411-b1a2-14dd6500674b-etc-machine-id\") pod \"7dc78f94-acb0-4411-b1a2-14dd6500674b\" (UID: \"7dc78f94-acb0-4411-b1a2-14dd6500674b\") " Oct 10 16:54:14 crc kubenswrapper[4799]: I1010 16:54:14.419342 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7dc78f94-acb0-4411-b1a2-14dd6500674b-config-data\") pod \"7dc78f94-acb0-4411-b1a2-14dd6500674b\" (UID: \"7dc78f94-acb0-4411-b1a2-14dd6500674b\") " Oct 10 16:54:14 crc kubenswrapper[4799]: I1010 16:54:14.419358 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/ac766919-d788-40da-879a-627919926594-internal-tls-certs\") pod \"ac766919-d788-40da-879a-627919926594\" (UID: \"ac766919-d788-40da-879a-627919926594\") " Oct 10 16:54:14 crc kubenswrapper[4799]: I1010 16:54:14.419383 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2db42625-4f7b-479c-a580-c94d6cafb3fe-logs\") pod \"2db42625-4f7b-479c-a580-c94d6cafb3fe\" (UID: \"2db42625-4f7b-479c-a580-c94d6cafb3fe\") " Oct 10 16:54:14 crc kubenswrapper[4799]: I1010 16:54:14.419404 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ac766919-d788-40da-879a-627919926594-config-data-custom\") pod \"ac766919-d788-40da-879a-627919926594\" (UID: \"ac766919-d788-40da-879a-627919926594\") " Oct 10 16:54:14 crc kubenswrapper[4799]: I1010 16:54:14.419430 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7f2f6\" (UniqueName: \"kubernetes.io/projected/971dd170-cc55-481f-b76d-820102f811cd-kube-api-access-7f2f6\") pod \"971dd170-cc55-481f-b76d-820102f811cd\" (UID: \"971dd170-cc55-481f-b76d-820102f811cd\") " Oct 10 16:54:14 crc kubenswrapper[4799]: I1010 16:54:14.419445 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/971dd170-cc55-481f-b76d-820102f811cd-config-data\") pod \"971dd170-cc55-481f-b76d-820102f811cd\" (UID: \"971dd170-cc55-481f-b76d-820102f811cd\") " Oct 10 16:54:14 crc kubenswrapper[4799]: I1010 16:54:14.419460 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7dc78f94-acb0-4411-b1a2-14dd6500674b-combined-ca-bundle\") pod \"7dc78f94-acb0-4411-b1a2-14dd6500674b\" (UID: \"7dc78f94-acb0-4411-b1a2-14dd6500674b\") " Oct 10 16:54:14 crc kubenswrapper[4799]: I1010 16:54:14.419749 4799 reconciler_common.go:293] "Volume detached for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/985d1485-7054-475b-8e60-85db5dc5afa3-kolla-config\") on node \"crc\" DevicePath \"\"" Oct 10 16:54:14 crc kubenswrapper[4799]: I1010 16:54:14.419775 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tp8sb\" (UniqueName: \"kubernetes.io/projected/985d1485-7054-475b-8e60-85db5dc5afa3-kube-api-access-tp8sb\") on node \"crc\" DevicePath \"\"" Oct 10 16:54:14 crc kubenswrapper[4799]: I1010 16:54:14.419786 4799 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/31fc68f8-af18-42b7-a94c-90a22afea5f1-public-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 10 16:54:14 crc kubenswrapper[4799]: I1010 16:54:14.419796 4799 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/985d1485-7054-475b-8e60-85db5dc5afa3-config-data\") on node \"crc\" DevicePath \"\"" Oct 10 16:54:14 crc kubenswrapper[4799]: I1010 16:54:14.419805 4799 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/5e6870d5-faea-46d9-bebb-4d237b802910-nova-metadata-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 10 16:54:14 crc kubenswrapper[4799]: I1010 16:54:14.419814 4799 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/985d1485-7054-475b-8e60-85db5dc5afa3-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 10 16:54:14 crc kubenswrapper[4799]: I1010 16:54:14.419823 4799 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/31fc68f8-af18-42b7-a94c-90a22afea5f1-config-data\") on node \"crc\" DevicePath \"\"" Oct 10 16:54:14 crc kubenswrapper[4799]: I1010 16:54:14.419831 4799 reconciler_common.go:293] "Volume detached for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") on node \"crc\" DevicePath \"\"" Oct 10 16:54:14 crc kubenswrapper[4799]: I1010 16:54:14.421953 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2db42625-4f7b-479c-a580-c94d6cafb3fe-logs" (OuterVolumeSpecName: "logs") pod "2db42625-4f7b-479c-a580-c94d6cafb3fe" (UID: "2db42625-4f7b-479c-a580-c94d6cafb3fe"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 10 16:54:14 crc kubenswrapper[4799]: I1010 16:54:14.428103 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ac766919-d788-40da-879a-627919926594-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "ac766919-d788-40da-879a-627919926594" (UID: "ac766919-d788-40da-879a-627919926594"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:54:14 crc kubenswrapper[4799]: I1010 16:54:14.435813 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/971dd170-cc55-481f-b76d-820102f811cd-kube-api-access-7f2f6" (OuterVolumeSpecName: "kube-api-access-7f2f6") pod "971dd170-cc55-481f-b76d-820102f811cd" (UID: "971dd170-cc55-481f-b76d-820102f811cd"). InnerVolumeSpecName "kube-api-access-7f2f6". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 16:54:14 crc kubenswrapper[4799]: I1010 16:54:14.438961 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/7dc78f94-acb0-4411-b1a2-14dd6500674b-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "7dc78f94-acb0-4411-b1a2-14dd6500674b" (UID: "7dc78f94-acb0-4411-b1a2-14dd6500674b"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 10 16:54:14 crc kubenswrapper[4799]: I1010 16:54:14.439279 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ac766919-d788-40da-879a-627919926594-kube-api-access-kpmn4" (OuterVolumeSpecName: "kube-api-access-kpmn4") pod "ac766919-d788-40da-879a-627919926594" (UID: "ac766919-d788-40da-879a-627919926594"). InnerVolumeSpecName "kube-api-access-kpmn4". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 16:54:14 crc kubenswrapper[4799]: I1010 16:54:14.452260 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ac766919-d788-40da-879a-627919926594-logs" (OuterVolumeSpecName: "logs") pod "ac766919-d788-40da-879a-627919926594" (UID: "ac766919-d788-40da-879a-627919926594"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 10 16:54:14 crc kubenswrapper[4799]: I1010 16:54:14.452389 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7dc78f94-acb0-4411-b1a2-14dd6500674b-scripts" (OuterVolumeSpecName: "scripts") pod "7dc78f94-acb0-4411-b1a2-14dd6500674b" (UID: "7dc78f94-acb0-4411-b1a2-14dd6500674b"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:54:14 crc kubenswrapper[4799]: I1010 16:54:14.453837 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7ae9763d-31dd-44c7-bf35-11a896a4f785-kube-api-access-fnf4n" (OuterVolumeSpecName: "kube-api-access-fnf4n") pod "7ae9763d-31dd-44c7-bf35-11a896a4f785" (UID: "7ae9763d-31dd-44c7-bf35-11a896a4f785"). InnerVolumeSpecName "kube-api-access-fnf4n". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 16:54:14 crc kubenswrapper[4799]: I1010 16:54:14.453898 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7dc78f94-acb0-4411-b1a2-14dd6500674b-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "7dc78f94-acb0-4411-b1a2-14dd6500674b" (UID: "7dc78f94-acb0-4411-b1a2-14dd6500674b"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:54:14 crc kubenswrapper[4799]: I1010 16:54:14.482997 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 10 16:54:14 crc kubenswrapper[4799]: I1010 16:54:14.483251 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7dc78f94-acb0-4411-b1a2-14dd6500674b-kube-api-access-8xx82" (OuterVolumeSpecName: "kube-api-access-8xx82") pod "7dc78f94-acb0-4411-b1a2-14dd6500674b" (UID: "7dc78f94-acb0-4411-b1a2-14dd6500674b"). InnerVolumeSpecName "kube-api-access-8xx82". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 16:54:14 crc kubenswrapper[4799]: I1010 16:54:14.506127 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2db42625-4f7b-479c-a580-c94d6cafb3fe-kube-api-access-2p2w9" (OuterVolumeSpecName: "kube-api-access-2p2w9") pod "2db42625-4f7b-479c-a580-c94d6cafb3fe" (UID: "2db42625-4f7b-479c-a580-c94d6cafb3fe"). InnerVolumeSpecName "kube-api-access-2p2w9". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 16:54:14 crc kubenswrapper[4799]: I1010 16:54:14.519059 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 10 16:54:14 crc kubenswrapper[4799]: I1010 16:54:14.520715 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7ae9763d-31dd-44c7-bf35-11a896a4f785-config-data" (OuterVolumeSpecName: "config-data") pod "7ae9763d-31dd-44c7-bf35-11a896a4f785" (UID: "7ae9763d-31dd-44c7-bf35-11a896a4f785"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:54:14 crc kubenswrapper[4799]: I1010 16:54:14.523849 4799 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7ae9763d-31dd-44c7-bf35-11a896a4f785-config-data\") on node \"crc\" DevicePath \"\"" Oct 10 16:54:14 crc kubenswrapper[4799]: I1010 16:54:14.523873 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8xx82\" (UniqueName: \"kubernetes.io/projected/7dc78f94-acb0-4411-b1a2-14dd6500674b-kube-api-access-8xx82\") on node \"crc\" DevicePath \"\"" Oct 10 16:54:14 crc kubenswrapper[4799]: I1010 16:54:14.523890 4799 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ac766919-d788-40da-879a-627919926594-logs\") on node \"crc\" DevicePath \"\"" Oct 10 16:54:14 crc kubenswrapper[4799]: I1010 16:54:14.523901 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fnf4n\" (UniqueName: \"kubernetes.io/projected/7ae9763d-31dd-44c7-bf35-11a896a4f785-kube-api-access-fnf4n\") on node \"crc\" DevicePath \"\"" Oct 10 16:54:14 crc kubenswrapper[4799]: I1010 16:54:14.523910 4799 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7dc78f94-acb0-4411-b1a2-14dd6500674b-scripts\") on node \"crc\" DevicePath \"\"" Oct 10 16:54:14 crc kubenswrapper[4799]: I1010 16:54:14.523919 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kpmn4\" (UniqueName: \"kubernetes.io/projected/ac766919-d788-40da-879a-627919926594-kube-api-access-kpmn4\") on node \"crc\" DevicePath \"\"" Oct 10 16:54:14 crc kubenswrapper[4799]: I1010 16:54:14.523932 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2p2w9\" (UniqueName: \"kubernetes.io/projected/2db42625-4f7b-479c-a580-c94d6cafb3fe-kube-api-access-2p2w9\") on node \"crc\" DevicePath \"\"" Oct 10 16:54:14 crc kubenswrapper[4799]: I1010 16:54:14.523942 4799 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/7dc78f94-acb0-4411-b1a2-14dd6500674b-config-data-custom\") on node \"crc\" DevicePath \"\"" Oct 10 16:54:14 crc kubenswrapper[4799]: I1010 16:54:14.523951 4799 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/7dc78f94-acb0-4411-b1a2-14dd6500674b-etc-machine-id\") on node \"crc\" DevicePath \"\"" Oct 10 16:54:14 crc kubenswrapper[4799]: I1010 16:54:14.523959 4799 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2db42625-4f7b-479c-a580-c94d6cafb3fe-logs\") on node \"crc\" DevicePath \"\"" Oct 10 16:54:14 crc kubenswrapper[4799]: I1010 16:54:14.523972 4799 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ac766919-d788-40da-879a-627919926594-config-data-custom\") on node \"crc\" DevicePath \"\"" Oct 10 16:54:14 crc kubenswrapper[4799]: I1010 16:54:14.523981 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7f2f6\" (UniqueName: \"kubernetes.io/projected/971dd170-cc55-481f-b76d-820102f811cd-kube-api-access-7f2f6\") on node \"crc\" DevicePath \"\"" Oct 10 16:54:14 crc kubenswrapper[4799]: I1010 16:54:14.573899 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7ae9763d-31dd-44c7-bf35-11a896a4f785-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "7ae9763d-31dd-44c7-bf35-11a896a4f785" (UID: "7ae9763d-31dd-44c7-bf35-11a896a4f785"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:54:14 crc kubenswrapper[4799]: I1010 16:54:14.587350 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ac766919-d788-40da-879a-627919926594-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "ac766919-d788-40da-879a-627919926594" (UID: "ac766919-d788-40da-879a-627919926594"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:54:14 crc kubenswrapper[4799]: I1010 16:54:14.595787 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2db42625-4f7b-479c-a580-c94d6cafb3fe-config-data" (OuterVolumeSpecName: "config-data") pod "2db42625-4f7b-479c-a580-c94d6cafb3fe" (UID: "2db42625-4f7b-479c-a580-c94d6cafb3fe"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:54:14 crc kubenswrapper[4799]: I1010 16:54:14.605152 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2db42625-4f7b-479c-a580-c94d6cafb3fe-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "2db42625-4f7b-479c-a580-c94d6cafb3fe" (UID: "2db42625-4f7b-479c-a580-c94d6cafb3fe"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:54:14 crc kubenswrapper[4799]: I1010 16:54:14.627274 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2db42625-4f7b-479c-a580-c94d6cafb3fe-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "2db42625-4f7b-479c-a580-c94d6cafb3fe" (UID: "2db42625-4f7b-479c-a580-c94d6cafb3fe"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:54:14 crc kubenswrapper[4799]: I1010 16:54:14.629406 4799 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2db42625-4f7b-479c-a580-c94d6cafb3fe-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 10 16:54:14 crc kubenswrapper[4799]: I1010 16:54:14.629510 4799 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7ae9763d-31dd-44c7-bf35-11a896a4f785-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 10 16:54:14 crc kubenswrapper[4799]: I1010 16:54:14.629523 4799 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ac766919-d788-40da-879a-627919926594-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 10 16:54:14 crc kubenswrapper[4799]: I1010 16:54:14.629532 4799 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/2db42625-4f7b-479c-a580-c94d6cafb3fe-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 10 16:54:14 crc kubenswrapper[4799]: I1010 16:54:14.629541 4799 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2db42625-4f7b-479c-a580-c94d6cafb3fe-config-data\") on node \"crc\" DevicePath \"\"" Oct 10 16:54:14 crc kubenswrapper[4799]: I1010 16:54:14.645046 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/985d1485-7054-475b-8e60-85db5dc5afa3-memcached-tls-certs" (OuterVolumeSpecName: "memcached-tls-certs") pod "985d1485-7054-475b-8e60-85db5dc5afa3" (UID: "985d1485-7054-475b-8e60-85db5dc5afa3"). InnerVolumeSpecName "memcached-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:54:14 crc kubenswrapper[4799]: I1010 16:54:14.645668 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/971dd170-cc55-481f-b76d-820102f811cd-config-data" (OuterVolumeSpecName: "config-data") pod "971dd170-cc55-481f-b76d-820102f811cd" (UID: "971dd170-cc55-481f-b76d-820102f811cd"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:54:14 crc kubenswrapper[4799]: I1010 16:54:14.647236 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ac766919-d788-40da-879a-627919926594-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "ac766919-d788-40da-879a-627919926594" (UID: "ac766919-d788-40da-879a-627919926594"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:54:14 crc kubenswrapper[4799]: I1010 16:54:14.647514 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2db42625-4f7b-479c-a580-c94d6cafb3fe-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "2db42625-4f7b-479c-a580-c94d6cafb3fe" (UID: "2db42625-4f7b-479c-a580-c94d6cafb3fe"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:54:14 crc kubenswrapper[4799]: I1010 16:54:14.647595 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/971dd170-cc55-481f-b76d-820102f811cd-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "971dd170-cc55-481f-b76d-820102f811cd" (UID: "971dd170-cc55-481f-b76d-820102f811cd"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:54:14 crc kubenswrapper[4799]: I1010 16:54:14.666095 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ac766919-d788-40da-879a-627919926594-config-data" (OuterVolumeSpecName: "config-data") pod "ac766919-d788-40da-879a-627919926594" (UID: "ac766919-d788-40da-879a-627919926594"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:54:14 crc kubenswrapper[4799]: I1010 16:54:14.676725 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ac766919-d788-40da-879a-627919926594-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "ac766919-d788-40da-879a-627919926594" (UID: "ac766919-d788-40da-879a-627919926594"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:54:14 crc kubenswrapper[4799]: I1010 16:54:14.678975 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7dc78f94-acb0-4411-b1a2-14dd6500674b-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "7dc78f94-acb0-4411-b1a2-14dd6500674b" (UID: "7dc78f94-acb0-4411-b1a2-14dd6500674b"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:54:14 crc kubenswrapper[4799]: I1010 16:54:14.679465 4799 scope.go:117] "RemoveContainer" containerID="7ff035a8a6498fce1542054aa6ef55bd158eb92c46cb410ac3528b2a07a6250d" Oct 10 16:54:14 crc kubenswrapper[4799]: I1010 16:54:14.679588 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystonecab5-account-delete-jnkq7" Oct 10 16:54:14 crc kubenswrapper[4799]: I1010 16:54:14.692781 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican0700-account-delete-smncx"] Oct 10 16:54:14 crc kubenswrapper[4799]: I1010 16:54:14.700774 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican0700-account-delete-smncx"] Oct 10 16:54:14 crc kubenswrapper[4799]: I1010 16:54:14.716592 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Oct 10 16:54:14 crc kubenswrapper[4799]: I1010 16:54:14.718792 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-api-0"] Oct 10 16:54:14 crc kubenswrapper[4799]: I1010 16:54:14.733951 4799 reconciler_common.go:293] "Volume detached for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/985d1485-7054-475b-8e60-85db5dc5afa3-memcached-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 10 16:54:14 crc kubenswrapper[4799]: I1010 16:54:14.733988 4799 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/2db42625-4f7b-479c-a580-c94d6cafb3fe-public-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 10 16:54:14 crc kubenswrapper[4799]: I1010 16:54:14.733998 4799 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/ac766919-d788-40da-879a-627919926594-public-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 10 16:54:14 crc kubenswrapper[4799]: I1010 16:54:14.734006 4799 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/ac766919-d788-40da-879a-627919926594-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 10 16:54:14 crc kubenswrapper[4799]: I1010 16:54:14.734015 4799 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/971dd170-cc55-481f-b76d-820102f811cd-config-data\") on node \"crc\" DevicePath \"\"" Oct 10 16:54:14 crc kubenswrapper[4799]: I1010 16:54:14.734023 4799 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7dc78f94-acb0-4411-b1a2-14dd6500674b-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 10 16:54:14 crc kubenswrapper[4799]: I1010 16:54:14.734031 4799 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ac766919-d788-40da-879a-627919926594-config-data\") on node \"crc\" DevicePath \"\"" Oct 10 16:54:14 crc kubenswrapper[4799]: I1010 16:54:14.734039 4799 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/971dd170-cc55-481f-b76d-820102f811cd-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 10 16:54:14 crc kubenswrapper[4799]: E1010 16:54:14.734103 4799 configmap.go:193] Couldn't get configMap openstack/rabbitmq-config-data: configmap "rabbitmq-config-data" not found Oct 10 16:54:14 crc kubenswrapper[4799]: E1010 16:54:14.734153 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/9fd6f03f-abea-4c29-8060-0705bb0af2c7-config-data podName:9fd6f03f-abea-4c29-8060-0705bb0af2c7 nodeName:}" failed. No retries permitted until 2025-10-10 16:54:22.734134589 +0000 UTC m=+1356.242458704 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/9fd6f03f-abea-4c29-8060-0705bb0af2c7-config-data") pod "rabbitmq-server-0" (UID: "9fd6f03f-abea-4c29-8060-0705bb0af2c7") : configmap "rabbitmq-config-data" not found Oct 10 16:54:14 crc kubenswrapper[4799]: I1010 16:54:14.730628 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cindera843-account-delete-dptkx"] Oct 10 16:54:14 crc kubenswrapper[4799]: I1010 16:54:14.754115 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cindera843-account-delete-dptkx"] Oct 10 16:54:14 crc kubenswrapper[4799]: I1010 16:54:14.754134 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-6565b9cf48-rl77d"] Oct 10 16:54:14 crc kubenswrapper[4799]: E1010 16:54:14.756140 4799 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="35d51a78c7ee3dde16f77dfec5a6f5f69c8e3d2b0eccd75b5f3e3226dc047eeb" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Oct 10 16:54:14 crc kubenswrapper[4799]: I1010 16:54:14.760332 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-6565b9cf48-rl77d"] Oct 10 16:54:14 crc kubenswrapper[4799]: I1010 16:54:14.760917 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7dc78f94-acb0-4411-b1a2-14dd6500674b-config-data" (OuterVolumeSpecName: "config-data") pod "7dc78f94-acb0-4411-b1a2-14dd6500674b" (UID: "7dc78f94-acb0-4411-b1a2-14dd6500674b"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:54:14 crc kubenswrapper[4799]: E1010 16:54:14.761082 4799 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="35d51a78c7ee3dde16f77dfec5a6f5f69c8e3d2b0eccd75b5f3e3226dc047eeb" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Oct 10 16:54:14 crc kubenswrapper[4799]: E1010 16:54:14.762873 4799 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="35d51a78c7ee3dde16f77dfec5a6f5f69c8e3d2b0eccd75b5f3e3226dc047eeb" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Oct 10 16:54:14 crc kubenswrapper[4799]: E1010 16:54:14.762923 4799 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/nova-cell0-conductor-0" podUID="60be0e86-f2dd-4575-b3c8-0131575b1cd8" containerName="nova-cell0-conductor-conductor" Oct 10 16:54:14 crc kubenswrapper[4799]: I1010 16:54:14.776428 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-keystone-listener-5b85b5697b-9wc6c"] Oct 10 16:54:14 crc kubenswrapper[4799]: I1010 16:54:14.783670 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-keystone-listener-5b85b5697b-9wc6c"] Oct 10 16:54:14 crc kubenswrapper[4799]: I1010 16:54:14.804911 4799 scope.go:117] "RemoveContainer" containerID="a8fdb5ed39a199e1efb2bc1f77b2de74dd205c6dcb6fea5e9f71d53f89199fcd" Oct 10 16:54:14 crc kubenswrapper[4799]: E1010 16:54:14.805626 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a8fdb5ed39a199e1efb2bc1f77b2de74dd205c6dcb6fea5e9f71d53f89199fcd\": container with ID starting with a8fdb5ed39a199e1efb2bc1f77b2de74dd205c6dcb6fea5e9f71d53f89199fcd not found: ID does not exist" containerID="a8fdb5ed39a199e1efb2bc1f77b2de74dd205c6dcb6fea5e9f71d53f89199fcd" Oct 10 16:54:14 crc kubenswrapper[4799]: I1010 16:54:14.805698 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a8fdb5ed39a199e1efb2bc1f77b2de74dd205c6dcb6fea5e9f71d53f89199fcd"} err="failed to get container status \"a8fdb5ed39a199e1efb2bc1f77b2de74dd205c6dcb6fea5e9f71d53f89199fcd\": rpc error: code = NotFound desc = could not find container \"a8fdb5ed39a199e1efb2bc1f77b2de74dd205c6dcb6fea5e9f71d53f89199fcd\": container with ID starting with a8fdb5ed39a199e1efb2bc1f77b2de74dd205c6dcb6fea5e9f71d53f89199fcd not found: ID does not exist" Oct 10 16:54:14 crc kubenswrapper[4799]: I1010 16:54:14.805735 4799 scope.go:117] "RemoveContainer" containerID="7ff035a8a6498fce1542054aa6ef55bd158eb92c46cb410ac3528b2a07a6250d" Oct 10 16:54:14 crc kubenswrapper[4799]: E1010 16:54:14.807767 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7ff035a8a6498fce1542054aa6ef55bd158eb92c46cb410ac3528b2a07a6250d\": container with ID starting with 7ff035a8a6498fce1542054aa6ef55bd158eb92c46cb410ac3528b2a07a6250d not found: ID does not exist" containerID="7ff035a8a6498fce1542054aa6ef55bd158eb92c46cb410ac3528b2a07a6250d" Oct 10 16:54:14 crc kubenswrapper[4799]: I1010 16:54:14.807797 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7ff035a8a6498fce1542054aa6ef55bd158eb92c46cb410ac3528b2a07a6250d"} err="failed to get container status \"7ff035a8a6498fce1542054aa6ef55bd158eb92c46cb410ac3528b2a07a6250d\": rpc error: code = NotFound desc = could not find container \"7ff035a8a6498fce1542054aa6ef55bd158eb92c46cb410ac3528b2a07a6250d\": container with ID starting with 7ff035a8a6498fce1542054aa6ef55bd158eb92c46cb410ac3528b2a07a6250d not found: ID does not exist" Oct 10 16:54:14 crc kubenswrapper[4799]: I1010 16:54:14.807811 4799 scope.go:117] "RemoveContainer" containerID="9a73e41efcf012c81cfb3fdb00ec877a3a4f57b043b7fb464fdebcd73d9d80d1" Oct 10 16:54:14 crc kubenswrapper[4799]: I1010 16:54:14.817115 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Oct 10 16:54:14 crc kubenswrapper[4799]: I1010 16:54:14.836556 4799 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7dc78f94-acb0-4411-b1a2-14dd6500674b-config-data\") on node \"crc\" DevicePath \"\"" Oct 10 16:54:14 crc kubenswrapper[4799]: I1010 16:54:14.840286 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Oct 10 16:54:14 crc kubenswrapper[4799]: I1010 16:54:14.855404 4799 scope.go:117] "RemoveContainer" containerID="d6ec86e7f860ab8ca24a46400675b4f8e0135552b7120d2f42340e7afc614296" Oct 10 16:54:14 crc kubenswrapper[4799]: I1010 16:54:14.858010 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Oct 10 16:54:14 crc kubenswrapper[4799]: I1010 16:54:14.865731 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 10 16:54:14 crc kubenswrapper[4799]: I1010 16:54:14.882694 4799 scope.go:117] "RemoveContainer" containerID="aaea200524f506182ac0c5dffe0ab093f1a9490f2edc8d9d614d7b6635f1619c" Oct 10 16:54:14 crc kubenswrapper[4799]: I1010 16:54:14.882985 4799 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/neutron-69f7ddf877-mclzd" podUID="78820835-eb2d-40d8-a497-e9a351a9cef9" containerName="neutron-httpd" probeResult="failure" output="Get \"https://10.217.0.166:9696/\": dial tcp 10.217.0.166:9696: connect: connection refused" Oct 10 16:54:14 crc kubenswrapper[4799]: I1010 16:54:14.889796 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 10 16:54:14 crc kubenswrapper[4799]: I1010 16:54:14.907404 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Oct 10 16:54:14 crc kubenswrapper[4799]: I1010 16:54:14.916491 4799 scope.go:117] "RemoveContainer" containerID="d7c80cad377e5693f3e27682ebd24a34de4bbf7f43f72423036babd6bf753968" Oct 10 16:54:14 crc kubenswrapper[4799]: I1010 16:54:14.918131 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Oct 10 16:54:14 crc kubenswrapper[4799]: I1010 16:54:14.920234 4799 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/swift-proxy-69dc9744df-smbqh" podUID="34f6a30f-81f3-4240-8a4e-d7f1220801ab" containerName="proxy-httpd" probeResult="failure" output="Get \"https://10.217.0.169:8080/healthcheck\": context deadline exceeded" Oct 10 16:54:14 crc kubenswrapper[4799]: I1010 16:54:14.920273 4799 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/swift-proxy-69dc9744df-smbqh" podUID="34f6a30f-81f3-4240-8a4e-d7f1220801ab" containerName="proxy-server" probeResult="failure" output="Get \"https://10.217.0.169:8080/healthcheck\": context deadline exceeded" Oct 10 16:54:14 crc kubenswrapper[4799]: I1010 16:54:14.926723 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/memcached-0"] Oct 10 16:54:14 crc kubenswrapper[4799]: I1010 16:54:14.931857 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/memcached-0"] Oct 10 16:54:14 crc kubenswrapper[4799]: I1010 16:54:14.939896 4799 scope.go:117] "RemoveContainer" containerID="d2b3ab1b197b085ea5a23bbdabb78c44e9c002b3cd5536ddb8dc1fcd93bae475" Oct 10 16:54:14 crc kubenswrapper[4799]: I1010 16:54:14.945678 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-56d84d574d-x5sbm"] Oct 10 16:54:14 crc kubenswrapper[4799]: I1010 16:54:14.952156 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-api-56d84d574d-x5sbm"] Oct 10 16:54:14 crc kubenswrapper[4799]: I1010 16:54:14.960766 4799 scope.go:117] "RemoveContainer" containerID="f233fe566e513cc4d04821964bcde90cce13e4323a97a80af9c4e16bc8ddb102" Oct 10 16:54:14 crc kubenswrapper[4799]: I1010 16:54:14.979585 4799 scope.go:117] "RemoveContainer" containerID="45b55f581534a90bac80ffd0b27bca1fc0d2639dbcc1d9165ca16243e681541e" Oct 10 16:54:15 crc kubenswrapper[4799]: I1010 16:54:15.015208 4799 scope.go:117] "RemoveContainer" containerID="f9d2d1faeec7a5eede440474335541991431514b0a33516124505bcbefe52453" Oct 10 16:54:15 crc kubenswrapper[4799]: I1010 16:54:15.038554 4799 scope.go:117] "RemoveContainer" containerID="3395c2ca55d83e7b3885a78bfc0d6276c0ac7455bc1ef7cc5df2ccda1fecad2d" Oct 10 16:54:15 crc kubenswrapper[4799]: I1010 16:54:15.039248 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/fe3f0980-0eb7-4267-953a-3fcfa08a22b3-config-data-generated\") pod \"fe3f0980-0eb7-4267-953a-3fcfa08a22b3\" (UID: \"fe3f0980-0eb7-4267-953a-3fcfa08a22b3\") " Oct 10 16:54:15 crc kubenswrapper[4799]: I1010 16:54:15.039318 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/fe3f0980-0eb7-4267-953a-3fcfa08a22b3-operator-scripts\") pod \"fe3f0980-0eb7-4267-953a-3fcfa08a22b3\" (UID: \"fe3f0980-0eb7-4267-953a-3fcfa08a22b3\") " Oct 10 16:54:15 crc kubenswrapper[4799]: I1010 16:54:15.039537 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/fe3f0980-0eb7-4267-953a-3fcfa08a22b3-kolla-config\") pod \"fe3f0980-0eb7-4267-953a-3fcfa08a22b3\" (UID: \"fe3f0980-0eb7-4267-953a-3fcfa08a22b3\") " Oct 10 16:54:15 crc kubenswrapper[4799]: I1010 16:54:15.039793 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/fe3f0980-0eb7-4267-953a-3fcfa08a22b3-config-data-default\") pod \"fe3f0980-0eb7-4267-953a-3fcfa08a22b3\" (UID: \"fe3f0980-0eb7-4267-953a-3fcfa08a22b3\") " Oct 10 16:54:15 crc kubenswrapper[4799]: I1010 16:54:15.039824 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mysql-db\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"fe3f0980-0eb7-4267-953a-3fcfa08a22b3\" (UID: \"fe3f0980-0eb7-4267-953a-3fcfa08a22b3\") " Oct 10 16:54:15 crc kubenswrapper[4799]: I1010 16:54:15.040002 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/fe3f0980-0eb7-4267-953a-3fcfa08a22b3-secrets\") pod \"fe3f0980-0eb7-4267-953a-3fcfa08a22b3\" (UID: \"fe3f0980-0eb7-4267-953a-3fcfa08a22b3\") " Oct 10 16:54:15 crc kubenswrapper[4799]: I1010 16:54:15.040099 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/fe3f0980-0eb7-4267-953a-3fcfa08a22b3-galera-tls-certs\") pod \"fe3f0980-0eb7-4267-953a-3fcfa08a22b3\" (UID: \"fe3f0980-0eb7-4267-953a-3fcfa08a22b3\") " Oct 10 16:54:15 crc kubenswrapper[4799]: I1010 16:54:15.040187 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-q4v4t\" (UniqueName: \"kubernetes.io/projected/fe3f0980-0eb7-4267-953a-3fcfa08a22b3-kube-api-access-q4v4t\") pod \"fe3f0980-0eb7-4267-953a-3fcfa08a22b3\" (UID: \"fe3f0980-0eb7-4267-953a-3fcfa08a22b3\") " Oct 10 16:54:15 crc kubenswrapper[4799]: I1010 16:54:15.040220 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fe3f0980-0eb7-4267-953a-3fcfa08a22b3-combined-ca-bundle\") pod \"fe3f0980-0eb7-4267-953a-3fcfa08a22b3\" (UID: \"fe3f0980-0eb7-4267-953a-3fcfa08a22b3\") " Oct 10 16:54:15 crc kubenswrapper[4799]: I1010 16:54:15.040638 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fe3f0980-0eb7-4267-953a-3fcfa08a22b3-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "fe3f0980-0eb7-4267-953a-3fcfa08a22b3" (UID: "fe3f0980-0eb7-4267-953a-3fcfa08a22b3"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 16:54:15 crc kubenswrapper[4799]: I1010 16:54:15.040962 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fe3f0980-0eb7-4267-953a-3fcfa08a22b3-config-data-generated" (OuterVolumeSpecName: "config-data-generated") pod "fe3f0980-0eb7-4267-953a-3fcfa08a22b3" (UID: "fe3f0980-0eb7-4267-953a-3fcfa08a22b3"). InnerVolumeSpecName "config-data-generated". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 10 16:54:15 crc kubenswrapper[4799]: I1010 16:54:15.041877 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fe3f0980-0eb7-4267-953a-3fcfa08a22b3-kolla-config" (OuterVolumeSpecName: "kolla-config") pod "fe3f0980-0eb7-4267-953a-3fcfa08a22b3" (UID: "fe3f0980-0eb7-4267-953a-3fcfa08a22b3"). InnerVolumeSpecName "kolla-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 16:54:15 crc kubenswrapper[4799]: I1010 16:54:15.042886 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fe3f0980-0eb7-4267-953a-3fcfa08a22b3-config-data-default" (OuterVolumeSpecName: "config-data-default") pod "fe3f0980-0eb7-4267-953a-3fcfa08a22b3" (UID: "fe3f0980-0eb7-4267-953a-3fcfa08a22b3"). InnerVolumeSpecName "config-data-default". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 16:54:15 crc kubenswrapper[4799]: I1010 16:54:15.049288 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fe3f0980-0eb7-4267-953a-3fcfa08a22b3-kube-api-access-q4v4t" (OuterVolumeSpecName: "kube-api-access-q4v4t") pod "fe3f0980-0eb7-4267-953a-3fcfa08a22b3" (UID: "fe3f0980-0eb7-4267-953a-3fcfa08a22b3"). InnerVolumeSpecName "kube-api-access-q4v4t". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 16:54:15 crc kubenswrapper[4799]: I1010 16:54:15.051230 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fe3f0980-0eb7-4267-953a-3fcfa08a22b3-secrets" (OuterVolumeSpecName: "secrets") pod "fe3f0980-0eb7-4267-953a-3fcfa08a22b3" (UID: "fe3f0980-0eb7-4267-953a-3fcfa08a22b3"). InnerVolumeSpecName "secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:54:15 crc kubenswrapper[4799]: I1010 16:54:15.056335 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage01-crc" (OuterVolumeSpecName: "mysql-db") pod "fe3f0980-0eb7-4267-953a-3fcfa08a22b3" (UID: "fe3f0980-0eb7-4267-953a-3fcfa08a22b3"). InnerVolumeSpecName "local-storage01-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Oct 10 16:54:15 crc kubenswrapper[4799]: I1010 16:54:15.074399 4799 scope.go:117] "RemoveContainer" containerID="5af6a65cf06c9580d2979a8455e9bbfb9189ae51ad9185bd52c4a3fc972febbc" Oct 10 16:54:15 crc kubenswrapper[4799]: I1010 16:54:15.084030 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fe3f0980-0eb7-4267-953a-3fcfa08a22b3-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "fe3f0980-0eb7-4267-953a-3fcfa08a22b3" (UID: "fe3f0980-0eb7-4267-953a-3fcfa08a22b3"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:54:15 crc kubenswrapper[4799]: I1010 16:54:15.097744 4799 scope.go:117] "RemoveContainer" containerID="3395c2ca55d83e7b3885a78bfc0d6276c0ac7455bc1ef7cc5df2ccda1fecad2d" Oct 10 16:54:15 crc kubenswrapper[4799]: E1010 16:54:15.098155 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3395c2ca55d83e7b3885a78bfc0d6276c0ac7455bc1ef7cc5df2ccda1fecad2d\": container with ID starting with 3395c2ca55d83e7b3885a78bfc0d6276c0ac7455bc1ef7cc5df2ccda1fecad2d not found: ID does not exist" containerID="3395c2ca55d83e7b3885a78bfc0d6276c0ac7455bc1ef7cc5df2ccda1fecad2d" Oct 10 16:54:15 crc kubenswrapper[4799]: I1010 16:54:15.098190 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3395c2ca55d83e7b3885a78bfc0d6276c0ac7455bc1ef7cc5df2ccda1fecad2d"} err="failed to get container status \"3395c2ca55d83e7b3885a78bfc0d6276c0ac7455bc1ef7cc5df2ccda1fecad2d\": rpc error: code = NotFound desc = could not find container \"3395c2ca55d83e7b3885a78bfc0d6276c0ac7455bc1ef7cc5df2ccda1fecad2d\": container with ID starting with 3395c2ca55d83e7b3885a78bfc0d6276c0ac7455bc1ef7cc5df2ccda1fecad2d not found: ID does not exist" Oct 10 16:54:15 crc kubenswrapper[4799]: I1010 16:54:15.098214 4799 scope.go:117] "RemoveContainer" containerID="5af6a65cf06c9580d2979a8455e9bbfb9189ae51ad9185bd52c4a3fc972febbc" Oct 10 16:54:15 crc kubenswrapper[4799]: E1010 16:54:15.098514 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5af6a65cf06c9580d2979a8455e9bbfb9189ae51ad9185bd52c4a3fc972febbc\": container with ID starting with 5af6a65cf06c9580d2979a8455e9bbfb9189ae51ad9185bd52c4a3fc972febbc not found: ID does not exist" containerID="5af6a65cf06c9580d2979a8455e9bbfb9189ae51ad9185bd52c4a3fc972febbc" Oct 10 16:54:15 crc kubenswrapper[4799]: I1010 16:54:15.098580 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5af6a65cf06c9580d2979a8455e9bbfb9189ae51ad9185bd52c4a3fc972febbc"} err="failed to get container status \"5af6a65cf06c9580d2979a8455e9bbfb9189ae51ad9185bd52c4a3fc972febbc\": rpc error: code = NotFound desc = could not find container \"5af6a65cf06c9580d2979a8455e9bbfb9189ae51ad9185bd52c4a3fc972febbc\": container with ID starting with 5af6a65cf06c9580d2979a8455e9bbfb9189ae51ad9185bd52c4a3fc972febbc not found: ID does not exist" Oct 10 16:54:15 crc kubenswrapper[4799]: I1010 16:54:15.101971 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fe3f0980-0eb7-4267-953a-3fcfa08a22b3-galera-tls-certs" (OuterVolumeSpecName: "galera-tls-certs") pod "fe3f0980-0eb7-4267-953a-3fcfa08a22b3" (UID: "fe3f0980-0eb7-4267-953a-3fcfa08a22b3"). InnerVolumeSpecName "galera-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:54:15 crc kubenswrapper[4799]: I1010 16:54:15.141537 4799 reconciler_common.go:293] "Volume detached for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/fe3f0980-0eb7-4267-953a-3fcfa08a22b3-config-data-generated\") on node \"crc\" DevicePath \"\"" Oct 10 16:54:15 crc kubenswrapper[4799]: I1010 16:54:15.141570 4799 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/fe3f0980-0eb7-4267-953a-3fcfa08a22b3-operator-scripts\") on node \"crc\" DevicePath \"\"" Oct 10 16:54:15 crc kubenswrapper[4799]: I1010 16:54:15.141582 4799 reconciler_common.go:293] "Volume detached for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/fe3f0980-0eb7-4267-953a-3fcfa08a22b3-kolla-config\") on node \"crc\" DevicePath \"\"" Oct 10 16:54:15 crc kubenswrapper[4799]: I1010 16:54:15.141593 4799 reconciler_common.go:293] "Volume detached for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/fe3f0980-0eb7-4267-953a-3fcfa08a22b3-config-data-default\") on node \"crc\" DevicePath \"\"" Oct 10 16:54:15 crc kubenswrapper[4799]: I1010 16:54:15.141618 4799 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") on node \"crc\" " Oct 10 16:54:15 crc kubenswrapper[4799]: I1010 16:54:15.141630 4799 reconciler_common.go:293] "Volume detached for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/fe3f0980-0eb7-4267-953a-3fcfa08a22b3-secrets\") on node \"crc\" DevicePath \"\"" Oct 10 16:54:15 crc kubenswrapper[4799]: I1010 16:54:15.141640 4799 reconciler_common.go:293] "Volume detached for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/fe3f0980-0eb7-4267-953a-3fcfa08a22b3-galera-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 10 16:54:15 crc kubenswrapper[4799]: I1010 16:54:15.141650 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-q4v4t\" (UniqueName: \"kubernetes.io/projected/fe3f0980-0eb7-4267-953a-3fcfa08a22b3-kube-api-access-q4v4t\") on node \"crc\" DevicePath \"\"" Oct 10 16:54:15 crc kubenswrapper[4799]: I1010 16:54:15.141659 4799 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fe3f0980-0eb7-4267-953a-3fcfa08a22b3-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 10 16:54:15 crc kubenswrapper[4799]: I1010 16:54:15.160160 4799 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage01-crc" (UniqueName: "kubernetes.io/local-volume/local-storage01-crc") on node "crc" Oct 10 16:54:15 crc kubenswrapper[4799]: I1010 16:54:15.244291 4799 reconciler_common.go:293] "Volume detached for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") on node \"crc\" DevicePath \"\"" Oct 10 16:54:15 crc kubenswrapper[4799]: I1010 16:54:15.249033 4799 patch_prober.go:28] interesting pod/machine-config-daemon-rh8zc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 10 16:54:15 crc kubenswrapper[4799]: I1010 16:54:15.249098 4799 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 10 16:54:15 crc kubenswrapper[4799]: I1010 16:54:15.249147 4799 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" Oct 10 16:54:15 crc kubenswrapper[4799]: I1010 16:54:15.250141 4799 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"145ba828d4b654e155342b2053228303da0bf7c989b77f4342b3cbafaea6b6c8"} pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 10 16:54:15 crc kubenswrapper[4799]: I1010 16:54:15.250219 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" containerName="machine-config-daemon" containerID="cri-o://145ba828d4b654e155342b2053228303da0bf7c989b77f4342b3cbafaea6b6c8" gracePeriod=600 Oct 10 16:54:15 crc kubenswrapper[4799]: I1010 16:54:15.282681 4799 generic.go:334] "Generic (PLEG): container finished" podID="fe3f0980-0eb7-4267-953a-3fcfa08a22b3" containerID="fad55af2475f84915150c67ad522384e34d6b8041a129873b036bf99434ad863" exitCode=0 Oct 10 16:54:15 crc kubenswrapper[4799]: I1010 16:54:15.282766 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"fe3f0980-0eb7-4267-953a-3fcfa08a22b3","Type":"ContainerDied","Data":"fad55af2475f84915150c67ad522384e34d6b8041a129873b036bf99434ad863"} Oct 10 16:54:15 crc kubenswrapper[4799]: I1010 16:54:15.282797 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"fe3f0980-0eb7-4267-953a-3fcfa08a22b3","Type":"ContainerDied","Data":"5eaca428f78ae2e2d16bc7508311a4fa042608c0c33fe3fc2756c0df348f7aa2"} Oct 10 16:54:15 crc kubenswrapper[4799]: I1010 16:54:15.282815 4799 scope.go:117] "RemoveContainer" containerID="fad55af2475f84915150c67ad522384e34d6b8041a129873b036bf99434ad863" Oct 10 16:54:15 crc kubenswrapper[4799]: I1010 16:54:15.282940 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Oct 10 16:54:15 crc kubenswrapper[4799]: I1010 16:54:15.314351 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Oct 10 16:54:15 crc kubenswrapper[4799]: I1010 16:54:15.314383 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"971dd170-cc55-481f-b76d-820102f811cd","Type":"ContainerDied","Data":"4c5f15e3c16097cc6eeac97326e01d5fd2c7aa95ecbb95452de322a6b6a25a4d"} Oct 10 16:54:15 crc kubenswrapper[4799]: E1010 16:54:15.321357 4799 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 24f52eab75b89d5e7b9cf09d4b2c644e6fdfdfe3ddfc83b09a9363aa7efda1d1 is running failed: container process not found" containerID="24f52eab75b89d5e7b9cf09d4b2c644e6fdfdfe3ddfc83b09a9363aa7efda1d1" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Oct 10 16:54:15 crc kubenswrapper[4799]: E1010 16:54:15.321856 4799 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 24f52eab75b89d5e7b9cf09d4b2c644e6fdfdfe3ddfc83b09a9363aa7efda1d1 is running failed: container process not found" containerID="24f52eab75b89d5e7b9cf09d4b2c644e6fdfdfe3ddfc83b09a9363aa7efda1d1" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Oct 10 16:54:15 crc kubenswrapper[4799]: E1010 16:54:15.322311 4799 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 24f52eab75b89d5e7b9cf09d4b2c644e6fdfdfe3ddfc83b09a9363aa7efda1d1 is running failed: container process not found" containerID="24f52eab75b89d5e7b9cf09d4b2c644e6fdfdfe3ddfc83b09a9363aa7efda1d1" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Oct 10 16:54:15 crc kubenswrapper[4799]: E1010 16:54:15.322364 4799 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 24f52eab75b89d5e7b9cf09d4b2c644e6fdfdfe3ddfc83b09a9363aa7efda1d1 is running failed: container process not found" probeType="Readiness" pod="openstack/ovn-controller-ovs-dtplc" podUID="e0ab4194-18b4-4c6d-96b2-d7a4a85e17d6" containerName="ovsdb-server" Oct 10 16:54:15 crc kubenswrapper[4799]: E1010 16:54:15.323699 4799 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="387afba31b4e67a9ba9f7f2877d3f3af184a7c60b3843119336ceb5759893e62" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Oct 10 16:54:15 crc kubenswrapper[4799]: I1010 16:54:15.323768 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Oct 10 16:54:15 crc kubenswrapper[4799]: I1010 16:54:15.323794 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 10 16:54:15 crc kubenswrapper[4799]: I1010 16:54:15.323752 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystonecab5-account-delete-jnkq7" Oct 10 16:54:15 crc kubenswrapper[4799]: I1010 16:54:15.324433 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 10 16:54:15 crc kubenswrapper[4799]: E1010 16:54:15.328723 4799 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="387afba31b4e67a9ba9f7f2877d3f3af184a7c60b3843119336ceb5759893e62" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Oct 10 16:54:15 crc kubenswrapper[4799]: E1010 16:54:15.330780 4799 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="387afba31b4e67a9ba9f7f2877d3f3af184a7c60b3843119336ceb5759893e62" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Oct 10 16:54:15 crc kubenswrapper[4799]: E1010 16:54:15.330810 4799 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/ovn-controller-ovs-dtplc" podUID="e0ab4194-18b4-4c6d-96b2-d7a4a85e17d6" containerName="ovs-vswitchd" Oct 10 16:54:15 crc kubenswrapper[4799]: E1010 16:54:15.385100 4799 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="ff8624f34fcb1a15fffee56784a5608f01adefaa3172b0477e52de09e0786400" cmd=["/usr/local/bin/container-scripts/status_check.sh"] Oct 10 16:54:15 crc kubenswrapper[4799]: E1010 16:54:15.386805 4799 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="ff8624f34fcb1a15fffee56784a5608f01adefaa3172b0477e52de09e0786400" cmd=["/usr/local/bin/container-scripts/status_check.sh"] Oct 10 16:54:15 crc kubenswrapper[4799]: E1010 16:54:15.389485 4799 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="ff8624f34fcb1a15fffee56784a5608f01adefaa3172b0477e52de09e0786400" cmd=["/usr/local/bin/container-scripts/status_check.sh"] Oct 10 16:54:15 crc kubenswrapper[4799]: E1010 16:54:15.389558 4799 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/ovn-northd-0" podUID="fdfeebc0-d50f-42f8-a461-b0aea7ba6a11" containerName="ovn-northd" Oct 10 16:54:15 crc kubenswrapper[4799]: I1010 16:54:15.418445 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1bd0e459-efb9-463c-a8fc-d08a3194f3d9" path="/var/lib/kubelet/pods/1bd0e459-efb9-463c-a8fc-d08a3194f3d9/volumes" Oct 10 16:54:15 crc kubenswrapper[4799]: I1010 16:54:15.419023 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2f2d77fd-b861-4589-bdb5-ad606deb3360" path="/var/lib/kubelet/pods/2f2d77fd-b861-4589-bdb5-ad606deb3360/volumes" Oct 10 16:54:15 crc kubenswrapper[4799]: I1010 16:54:15.419534 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="31fc68f8-af18-42b7-a94c-90a22afea5f1" path="/var/lib/kubelet/pods/31fc68f8-af18-42b7-a94c-90a22afea5f1/volumes" Oct 10 16:54:15 crc kubenswrapper[4799]: I1010 16:54:15.420798 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3a05167f-cd58-4f9f-806b-8d71271320d2" path="/var/lib/kubelet/pods/3a05167f-cd58-4f9f-806b-8d71271320d2/volumes" Oct 10 16:54:15 crc kubenswrapper[4799]: I1010 16:54:15.421262 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3b59d111-5df2-4b9f-9d02-7a3f9e19d02c" path="/var/lib/kubelet/pods/3b59d111-5df2-4b9f-9d02-7a3f9e19d02c/volumes" Oct 10 16:54:15 crc kubenswrapper[4799]: I1010 16:54:15.421730 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5e6870d5-faea-46d9-bebb-4d237b802910" path="/var/lib/kubelet/pods/5e6870d5-faea-46d9-bebb-4d237b802910/volumes" Oct 10 16:54:15 crc kubenswrapper[4799]: I1010 16:54:15.422656 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="69aa641a-13ff-4f65-b2ea-7fee3ad42134" path="/var/lib/kubelet/pods/69aa641a-13ff-4f65-b2ea-7fee3ad42134/volumes" Oct 10 16:54:15 crc kubenswrapper[4799]: I1010 16:54:15.423108 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="785f8ce9-5280-44fe-891c-8162f2fdcd7a" path="/var/lib/kubelet/pods/785f8ce9-5280-44fe-891c-8162f2fdcd7a/volumes" Oct 10 16:54:15 crc kubenswrapper[4799]: I1010 16:54:15.423570 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7ab7b7c1-e89f-4562-882b-4f517f90f8c8" path="/var/lib/kubelet/pods/7ab7b7c1-e89f-4562-882b-4f517f90f8c8/volumes" Oct 10 16:54:15 crc kubenswrapper[4799]: I1010 16:54:15.428068 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="985d1485-7054-475b-8e60-85db5dc5afa3" path="/var/lib/kubelet/pods/985d1485-7054-475b-8e60-85db5dc5afa3/volumes" Oct 10 16:54:15 crc kubenswrapper[4799]: I1010 16:54:15.430027 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ac766919-d788-40da-879a-627919926594" path="/var/lib/kubelet/pods/ac766919-d788-40da-879a-627919926594/volumes" Oct 10 16:54:15 crc kubenswrapper[4799]: I1010 16:54:15.430597 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e424a8e6-64c8-4572-8706-33026a2cc44d" path="/var/lib/kubelet/pods/e424a8e6-64c8-4572-8706-33026a2cc44d/volumes" Oct 10 16:54:15 crc kubenswrapper[4799]: I1010 16:54:15.432476 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f9c4cd8a-6aed-4826-b23b-328645f5801f" path="/var/lib/kubelet/pods/f9c4cd8a-6aed-4826-b23b-328645f5801f/volumes" Oct 10 16:54:15 crc kubenswrapper[4799]: E1010 16:54:15.451615 4799 configmap.go:193] Couldn't get configMap openstack/rabbitmq-cell1-config-data: configmap "rabbitmq-cell1-config-data" not found Oct 10 16:54:15 crc kubenswrapper[4799]: E1010 16:54:15.451684 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/7858ee88-c7b9-4fb7-b825-569154134201-config-data podName:7858ee88-c7b9-4fb7-b825-569154134201 nodeName:}" failed. No retries permitted until 2025-10-10 16:54:23.451667237 +0000 UTC m=+1356.959991352 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/7858ee88-c7b9-4fb7-b825-569154134201-config-data") pod "rabbitmq-cell1-server-0" (UID: "7858ee88-c7b9-4fb7-b825-569154134201") : configmap "rabbitmq-cell1-config-data" not found Oct 10 16:54:15 crc kubenswrapper[4799]: I1010 16:54:15.466780 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Oct 10 16:54:15 crc kubenswrapper[4799]: I1010 16:54:15.480090 4799 scope.go:117] "RemoveContainer" containerID="ea589f2e4da954eafaee6ac906e5b45cec97bf54ed54d2037ffdc855f4a6f323" Oct 10 16:54:15 crc kubenswrapper[4799]: I1010 16:54:15.550875 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Oct 10 16:54:15 crc kubenswrapper[4799]: I1010 16:54:15.556930 4799 scope.go:117] "RemoveContainer" containerID="fad55af2475f84915150c67ad522384e34d6b8041a129873b036bf99434ad863" Oct 10 16:54:15 crc kubenswrapper[4799]: E1010 16:54:15.557511 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fad55af2475f84915150c67ad522384e34d6b8041a129873b036bf99434ad863\": container with ID starting with fad55af2475f84915150c67ad522384e34d6b8041a129873b036bf99434ad863 not found: ID does not exist" containerID="fad55af2475f84915150c67ad522384e34d6b8041a129873b036bf99434ad863" Oct 10 16:54:15 crc kubenswrapper[4799]: I1010 16:54:15.557547 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fad55af2475f84915150c67ad522384e34d6b8041a129873b036bf99434ad863"} err="failed to get container status \"fad55af2475f84915150c67ad522384e34d6b8041a129873b036bf99434ad863\": rpc error: code = NotFound desc = could not find container \"fad55af2475f84915150c67ad522384e34d6b8041a129873b036bf99434ad863\": container with ID starting with fad55af2475f84915150c67ad522384e34d6b8041a129873b036bf99434ad863 not found: ID does not exist" Oct 10 16:54:15 crc kubenswrapper[4799]: I1010 16:54:15.557571 4799 scope.go:117] "RemoveContainer" containerID="ea589f2e4da954eafaee6ac906e5b45cec97bf54ed54d2037ffdc855f4a6f323" Oct 10 16:54:15 crc kubenswrapper[4799]: E1010 16:54:15.557840 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ea589f2e4da954eafaee6ac906e5b45cec97bf54ed54d2037ffdc855f4a6f323\": container with ID starting with ea589f2e4da954eafaee6ac906e5b45cec97bf54ed54d2037ffdc855f4a6f323 not found: ID does not exist" containerID="ea589f2e4da954eafaee6ac906e5b45cec97bf54ed54d2037ffdc855f4a6f323" Oct 10 16:54:15 crc kubenswrapper[4799]: I1010 16:54:15.557888 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ea589f2e4da954eafaee6ac906e5b45cec97bf54ed54d2037ffdc855f4a6f323"} err="failed to get container status \"ea589f2e4da954eafaee6ac906e5b45cec97bf54ed54d2037ffdc855f4a6f323\": rpc error: code = NotFound desc = could not find container \"ea589f2e4da954eafaee6ac906e5b45cec97bf54ed54d2037ffdc855f4a6f323\": container with ID starting with ea589f2e4da954eafaee6ac906e5b45cec97bf54ed54d2037ffdc855f4a6f323 not found: ID does not exist" Oct 10 16:54:15 crc kubenswrapper[4799]: I1010 16:54:15.557906 4799 scope.go:117] "RemoveContainer" containerID="ff0b33623ee2e909045d84098d1c8b4f4ee31b12318171307a8ee09a9499c92d" Oct 10 16:54:15 crc kubenswrapper[4799]: I1010 16:54:15.595639 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Oct 10 16:54:15 crc kubenswrapper[4799]: I1010 16:54:15.603632 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Oct 10 16:54:15 crc kubenswrapper[4799]: I1010 16:54:15.608857 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-conductor-0"] Oct 10 16:54:15 crc kubenswrapper[4799]: I1010 16:54:15.616214 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-conductor-0"] Oct 10 16:54:15 crc kubenswrapper[4799]: I1010 16:54:15.623016 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Oct 10 16:54:15 crc kubenswrapper[4799]: I1010 16:54:15.629974 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-scheduler-0"] Oct 10 16:54:15 crc kubenswrapper[4799]: I1010 16:54:15.634954 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/openstack-galera-0"] Oct 10 16:54:15 crc kubenswrapper[4799]: I1010 16:54:15.640284 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/openstack-galera-0"] Oct 10 16:54:15 crc kubenswrapper[4799]: I1010 16:54:15.656066 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystonecab5-account-delete-jnkq7"] Oct 10 16:54:15 crc kubenswrapper[4799]: I1010 16:54:15.664376 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystonecab5-account-delete-jnkq7"] Oct 10 16:54:15 crc kubenswrapper[4799]: I1010 16:54:15.759637 4799 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ceilometer-0" podUID="baff4453-a6a2-4264-82b7-3ce7c22734f6" containerName="proxy-httpd" probeResult="failure" output="Get \"https://10.217.0.207:3000/\": dial tcp 10.217.0.207:3000: connect: connection refused" Oct 10 16:54:15 crc kubenswrapper[4799]: I1010 16:54:15.760457 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-p27l4\" (UniqueName: \"kubernetes.io/projected/3585305f-274f-416c-b59d-2dc474f54341-kube-api-access-p27l4\") on node \"crc\" DevicePath \"\"" Oct 10 16:54:15 crc kubenswrapper[4799]: I1010 16:54:15.912352 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-657bb59659-swzhl" Oct 10 16:54:16 crc kubenswrapper[4799]: I1010 16:54:16.063674 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/eb41e34f-dc1c-4b2e-9437-44a0e84e2cd1-config-data\") pod \"eb41e34f-dc1c-4b2e-9437-44a0e84e2cd1\" (UID: \"eb41e34f-dc1c-4b2e-9437-44a0e84e2cd1\") " Oct 10 16:54:16 crc kubenswrapper[4799]: I1010 16:54:16.063782 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-52fg8\" (UniqueName: \"kubernetes.io/projected/eb41e34f-dc1c-4b2e-9437-44a0e84e2cd1-kube-api-access-52fg8\") pod \"eb41e34f-dc1c-4b2e-9437-44a0e84e2cd1\" (UID: \"eb41e34f-dc1c-4b2e-9437-44a0e84e2cd1\") " Oct 10 16:54:16 crc kubenswrapper[4799]: I1010 16:54:16.063806 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/eb41e34f-dc1c-4b2e-9437-44a0e84e2cd1-fernet-keys\") pod \"eb41e34f-dc1c-4b2e-9437-44a0e84e2cd1\" (UID: \"eb41e34f-dc1c-4b2e-9437-44a0e84e2cd1\") " Oct 10 16:54:16 crc kubenswrapper[4799]: I1010 16:54:16.063823 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/eb41e34f-dc1c-4b2e-9437-44a0e84e2cd1-public-tls-certs\") pod \"eb41e34f-dc1c-4b2e-9437-44a0e84e2cd1\" (UID: \"eb41e34f-dc1c-4b2e-9437-44a0e84e2cd1\") " Oct 10 16:54:16 crc kubenswrapper[4799]: I1010 16:54:16.063900 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/eb41e34f-dc1c-4b2e-9437-44a0e84e2cd1-credential-keys\") pod \"eb41e34f-dc1c-4b2e-9437-44a0e84e2cd1\" (UID: \"eb41e34f-dc1c-4b2e-9437-44a0e84e2cd1\") " Oct 10 16:54:16 crc kubenswrapper[4799]: I1010 16:54:16.063919 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eb41e34f-dc1c-4b2e-9437-44a0e84e2cd1-combined-ca-bundle\") pod \"eb41e34f-dc1c-4b2e-9437-44a0e84e2cd1\" (UID: \"eb41e34f-dc1c-4b2e-9437-44a0e84e2cd1\") " Oct 10 16:54:16 crc kubenswrapper[4799]: I1010 16:54:16.063986 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/eb41e34f-dc1c-4b2e-9437-44a0e84e2cd1-scripts\") pod \"eb41e34f-dc1c-4b2e-9437-44a0e84e2cd1\" (UID: \"eb41e34f-dc1c-4b2e-9437-44a0e84e2cd1\") " Oct 10 16:54:16 crc kubenswrapper[4799]: I1010 16:54:16.064016 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/eb41e34f-dc1c-4b2e-9437-44a0e84e2cd1-internal-tls-certs\") pod \"eb41e34f-dc1c-4b2e-9437-44a0e84e2cd1\" (UID: \"eb41e34f-dc1c-4b2e-9437-44a0e84e2cd1\") " Oct 10 16:54:16 crc kubenswrapper[4799]: I1010 16:54:16.069386 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/eb41e34f-dc1c-4b2e-9437-44a0e84e2cd1-scripts" (OuterVolumeSpecName: "scripts") pod "eb41e34f-dc1c-4b2e-9437-44a0e84e2cd1" (UID: "eb41e34f-dc1c-4b2e-9437-44a0e84e2cd1"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:54:16 crc kubenswrapper[4799]: I1010 16:54:16.070948 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/eb41e34f-dc1c-4b2e-9437-44a0e84e2cd1-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "eb41e34f-dc1c-4b2e-9437-44a0e84e2cd1" (UID: "eb41e34f-dc1c-4b2e-9437-44a0e84e2cd1"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:54:16 crc kubenswrapper[4799]: I1010 16:54:16.071226 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/eb41e34f-dc1c-4b2e-9437-44a0e84e2cd1-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "eb41e34f-dc1c-4b2e-9437-44a0e84e2cd1" (UID: "eb41e34f-dc1c-4b2e-9437-44a0e84e2cd1"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:54:16 crc kubenswrapper[4799]: I1010 16:54:16.071745 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/eb41e34f-dc1c-4b2e-9437-44a0e84e2cd1-kube-api-access-52fg8" (OuterVolumeSpecName: "kube-api-access-52fg8") pod "eb41e34f-dc1c-4b2e-9437-44a0e84e2cd1" (UID: "eb41e34f-dc1c-4b2e-9437-44a0e84e2cd1"). InnerVolumeSpecName "kube-api-access-52fg8". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 16:54:16 crc kubenswrapper[4799]: I1010 16:54:16.089873 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/eb41e34f-dc1c-4b2e-9437-44a0e84e2cd1-config-data" (OuterVolumeSpecName: "config-data") pod "eb41e34f-dc1c-4b2e-9437-44a0e84e2cd1" (UID: "eb41e34f-dc1c-4b2e-9437-44a0e84e2cd1"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:54:16 crc kubenswrapper[4799]: I1010 16:54:16.118688 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/eb41e34f-dc1c-4b2e-9437-44a0e84e2cd1-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "eb41e34f-dc1c-4b2e-9437-44a0e84e2cd1" (UID: "eb41e34f-dc1c-4b2e-9437-44a0e84e2cd1"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:54:16 crc kubenswrapper[4799]: I1010 16:54:16.122893 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/eb41e34f-dc1c-4b2e-9437-44a0e84e2cd1-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "eb41e34f-dc1c-4b2e-9437-44a0e84e2cd1" (UID: "eb41e34f-dc1c-4b2e-9437-44a0e84e2cd1"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:54:16 crc kubenswrapper[4799]: I1010 16:54:16.166746 4799 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/eb41e34f-dc1c-4b2e-9437-44a0e84e2cd1-config-data\") on node \"crc\" DevicePath \"\"" Oct 10 16:54:16 crc kubenswrapper[4799]: I1010 16:54:16.166792 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-52fg8\" (UniqueName: \"kubernetes.io/projected/eb41e34f-dc1c-4b2e-9437-44a0e84e2cd1-kube-api-access-52fg8\") on node \"crc\" DevicePath \"\"" Oct 10 16:54:16 crc kubenswrapper[4799]: I1010 16:54:16.166801 4799 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/eb41e34f-dc1c-4b2e-9437-44a0e84e2cd1-fernet-keys\") on node \"crc\" DevicePath \"\"" Oct 10 16:54:16 crc kubenswrapper[4799]: I1010 16:54:16.166809 4799 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/eb41e34f-dc1c-4b2e-9437-44a0e84e2cd1-public-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 10 16:54:16 crc kubenswrapper[4799]: I1010 16:54:16.166817 4799 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/eb41e34f-dc1c-4b2e-9437-44a0e84e2cd1-credential-keys\") on node \"crc\" DevicePath \"\"" Oct 10 16:54:16 crc kubenswrapper[4799]: I1010 16:54:16.166826 4799 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eb41e34f-dc1c-4b2e-9437-44a0e84e2cd1-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 10 16:54:16 crc kubenswrapper[4799]: I1010 16:54:16.166835 4799 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/eb41e34f-dc1c-4b2e-9437-44a0e84e2cd1-scripts\") on node \"crc\" DevicePath \"\"" Oct 10 16:54:16 crc kubenswrapper[4799]: I1010 16:54:16.169349 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/eb41e34f-dc1c-4b2e-9437-44a0e84e2cd1-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "eb41e34f-dc1c-4b2e-9437-44a0e84e2cd1" (UID: "eb41e34f-dc1c-4b2e-9437-44a0e84e2cd1"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:54:16 crc kubenswrapper[4799]: I1010 16:54:16.269833 4799 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/eb41e34f-dc1c-4b2e-9437-44a0e84e2cd1-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 10 16:54:16 crc kubenswrapper[4799]: I1010 16:54:16.339647 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_fdfeebc0-d50f-42f8-a461-b0aea7ba6a11/ovn-northd/0.log" Oct 10 16:54:16 crc kubenswrapper[4799]: I1010 16:54:16.340000 4799 generic.go:334] "Generic (PLEG): container finished" podID="fdfeebc0-d50f-42f8-a461-b0aea7ba6a11" containerID="ff8624f34fcb1a15fffee56784a5608f01adefaa3172b0477e52de09e0786400" exitCode=139 Oct 10 16:54:16 crc kubenswrapper[4799]: I1010 16:54:16.340077 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"fdfeebc0-d50f-42f8-a461-b0aea7ba6a11","Type":"ContainerDied","Data":"ff8624f34fcb1a15fffee56784a5608f01adefaa3172b0477e52de09e0786400"} Oct 10 16:54:16 crc kubenswrapper[4799]: I1010 16:54:16.340122 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"fdfeebc0-d50f-42f8-a461-b0aea7ba6a11","Type":"ContainerDied","Data":"367957c4450034f9a361064a669daefa18feb7cc6133316ac9035859a9695dc3"} Oct 10 16:54:16 crc kubenswrapper[4799]: I1010 16:54:16.340135 4799 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="367957c4450034f9a361064a669daefa18feb7cc6133316ac9035859a9695dc3" Oct 10 16:54:16 crc kubenswrapper[4799]: I1010 16:54:16.343201 4799 generic.go:334] "Generic (PLEG): container finished" podID="7858ee88-c7b9-4fb7-b825-569154134201" containerID="5618f2fc1181b1ea35b16860a7cfaa8ed80f4249c7cd93a63b30a487631e90ec" exitCode=0 Oct 10 16:54:16 crc kubenswrapper[4799]: I1010 16:54:16.343256 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"7858ee88-c7b9-4fb7-b825-569154134201","Type":"ContainerDied","Data":"5618f2fc1181b1ea35b16860a7cfaa8ed80f4249c7cd93a63b30a487631e90ec"} Oct 10 16:54:16 crc kubenswrapper[4799]: I1010 16:54:16.345580 4799 generic.go:334] "Generic (PLEG): container finished" podID="eb41e34f-dc1c-4b2e-9437-44a0e84e2cd1" containerID="791ae33161eebdb140cb4872e47266b15abe32970fcb198663d953365fc9278c" exitCode=0 Oct 10 16:54:16 crc kubenswrapper[4799]: I1010 16:54:16.345710 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-657bb59659-swzhl" Oct 10 16:54:16 crc kubenswrapper[4799]: I1010 16:54:16.345747 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-657bb59659-swzhl" event={"ID":"eb41e34f-dc1c-4b2e-9437-44a0e84e2cd1","Type":"ContainerDied","Data":"791ae33161eebdb140cb4872e47266b15abe32970fcb198663d953365fc9278c"} Oct 10 16:54:16 crc kubenswrapper[4799]: I1010 16:54:16.345818 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-657bb59659-swzhl" event={"ID":"eb41e34f-dc1c-4b2e-9437-44a0e84e2cd1","Type":"ContainerDied","Data":"f8a7e55488073c557402150956988c39ba3b5d6889e82d7f6be3ccf70be386d7"} Oct 10 16:54:16 crc kubenswrapper[4799]: I1010 16:54:16.345845 4799 scope.go:117] "RemoveContainer" containerID="791ae33161eebdb140cb4872e47266b15abe32970fcb198663d953365fc9278c" Oct 10 16:54:16 crc kubenswrapper[4799]: I1010 16:54:16.353418 4799 generic.go:334] "Generic (PLEG): container finished" podID="9fd6f03f-abea-4c29-8060-0705bb0af2c7" containerID="22fa1f105dd6a9317c7bfadf3f75a8ae31d05888ae18ed6871e94cd9824e1b96" exitCode=0 Oct 10 16:54:16 crc kubenswrapper[4799]: I1010 16:54:16.353501 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"9fd6f03f-abea-4c29-8060-0705bb0af2c7","Type":"ContainerDied","Data":"22fa1f105dd6a9317c7bfadf3f75a8ae31d05888ae18ed6871e94cd9824e1b96"} Oct 10 16:54:16 crc kubenswrapper[4799]: I1010 16:54:16.356570 4799 generic.go:334] "Generic (PLEG): container finished" podID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" containerID="145ba828d4b654e155342b2053228303da0bf7c989b77f4342b3cbafaea6b6c8" exitCode=0 Oct 10 16:54:16 crc kubenswrapper[4799]: I1010 16:54:16.356626 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" event={"ID":"6cebefda-e31d-4be2-9bf4-8e1f8ec002cb","Type":"ContainerDied","Data":"145ba828d4b654e155342b2053228303da0bf7c989b77f4342b3cbafaea6b6c8"} Oct 10 16:54:16 crc kubenswrapper[4799]: I1010 16:54:16.356664 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" event={"ID":"6cebefda-e31d-4be2-9bf4-8e1f8ec002cb","Type":"ContainerStarted","Data":"eb9efabd31e0bc119cd431c6228f2fbf6db48806c09df2881081667d9ddd75e0"} Oct 10 16:54:16 crc kubenswrapper[4799]: I1010 16:54:16.430084 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_fdfeebc0-d50f-42f8-a461-b0aea7ba6a11/ovn-northd/0.log" Oct 10 16:54:16 crc kubenswrapper[4799]: I1010 16:54:16.430212 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Oct 10 16:54:16 crc kubenswrapper[4799]: I1010 16:54:16.443236 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-657bb59659-swzhl"] Oct 10 16:54:16 crc kubenswrapper[4799]: I1010 16:54:16.466745 4799 scope.go:117] "RemoveContainer" containerID="791ae33161eebdb140cb4872e47266b15abe32970fcb198663d953365fc9278c" Oct 10 16:54:16 crc kubenswrapper[4799]: E1010 16:54:16.467402 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"791ae33161eebdb140cb4872e47266b15abe32970fcb198663d953365fc9278c\": container with ID starting with 791ae33161eebdb140cb4872e47266b15abe32970fcb198663d953365fc9278c not found: ID does not exist" containerID="791ae33161eebdb140cb4872e47266b15abe32970fcb198663d953365fc9278c" Oct 10 16:54:16 crc kubenswrapper[4799]: I1010 16:54:16.467449 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"791ae33161eebdb140cb4872e47266b15abe32970fcb198663d953365fc9278c"} err="failed to get container status \"791ae33161eebdb140cb4872e47266b15abe32970fcb198663d953365fc9278c\": rpc error: code = NotFound desc = could not find container \"791ae33161eebdb140cb4872e47266b15abe32970fcb198663d953365fc9278c\": container with ID starting with 791ae33161eebdb140cb4872e47266b15abe32970fcb198663d953365fc9278c not found: ID does not exist" Oct 10 16:54:16 crc kubenswrapper[4799]: I1010 16:54:16.467479 4799 scope.go:117] "RemoveContainer" containerID="7660328ebc7154335d94320ea1d630296da5d0b7a601ee21c41b533b20ba0a49" Oct 10 16:54:16 crc kubenswrapper[4799]: I1010 16:54:16.467557 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-657bb59659-swzhl"] Oct 10 16:54:16 crc kubenswrapper[4799]: I1010 16:54:16.468114 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Oct 10 16:54:16 crc kubenswrapper[4799]: I1010 16:54:16.576846 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fdfeebc0-d50f-42f8-a461-b0aea7ba6a11-config\") pod \"fdfeebc0-d50f-42f8-a461-b0aea7ba6a11\" (UID: \"fdfeebc0-d50f-42f8-a461-b0aea7ba6a11\") " Oct 10 16:54:16 crc kubenswrapper[4799]: I1010 16:54:16.576881 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/fdfeebc0-d50f-42f8-a461-b0aea7ba6a11-ovn-northd-tls-certs\") pod \"fdfeebc0-d50f-42f8-a461-b0aea7ba6a11\" (UID: \"fdfeebc0-d50f-42f8-a461-b0aea7ba6a11\") " Oct 10 16:54:16 crc kubenswrapper[4799]: I1010 16:54:16.576907 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/9fd6f03f-abea-4c29-8060-0705bb0af2c7-rabbitmq-plugins\") pod \"9fd6f03f-abea-4c29-8060-0705bb0af2c7\" (UID: \"9fd6f03f-abea-4c29-8060-0705bb0af2c7\") " Oct 10 16:54:16 crc kubenswrapper[4799]: I1010 16:54:16.576926 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/9fd6f03f-abea-4c29-8060-0705bb0af2c7-rabbitmq-tls\") pod \"9fd6f03f-abea-4c29-8060-0705bb0af2c7\" (UID: \"9fd6f03f-abea-4c29-8060-0705bb0af2c7\") " Oct 10 16:54:16 crc kubenswrapper[4799]: I1010 16:54:16.576952 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/fdfeebc0-d50f-42f8-a461-b0aea7ba6a11-ovn-rundir\") pod \"fdfeebc0-d50f-42f8-a461-b0aea7ba6a11\" (UID: \"fdfeebc0-d50f-42f8-a461-b0aea7ba6a11\") " Oct 10 16:54:16 crc kubenswrapper[4799]: I1010 16:54:16.576974 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fx9nd\" (UniqueName: \"kubernetes.io/projected/9fd6f03f-abea-4c29-8060-0705bb0af2c7-kube-api-access-fx9nd\") pod \"9fd6f03f-abea-4c29-8060-0705bb0af2c7\" (UID: \"9fd6f03f-abea-4c29-8060-0705bb0af2c7\") " Oct 10 16:54:16 crc kubenswrapper[4799]: I1010 16:54:16.577002 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/9fd6f03f-abea-4c29-8060-0705bb0af2c7-pod-info\") pod \"9fd6f03f-abea-4c29-8060-0705bb0af2c7\" (UID: \"9fd6f03f-abea-4c29-8060-0705bb0af2c7\") " Oct 10 16:54:16 crc kubenswrapper[4799]: I1010 16:54:16.577021 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/9fd6f03f-abea-4c29-8060-0705bb0af2c7-plugins-conf\") pod \"9fd6f03f-abea-4c29-8060-0705bb0af2c7\" (UID: \"9fd6f03f-abea-4c29-8060-0705bb0af2c7\") " Oct 10 16:54:16 crc kubenswrapper[4799]: I1010 16:54:16.577068 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/9fd6f03f-abea-4c29-8060-0705bb0af2c7-rabbitmq-erlang-cookie\") pod \"9fd6f03f-abea-4c29-8060-0705bb0af2c7\" (UID: \"9fd6f03f-abea-4c29-8060-0705bb0af2c7\") " Oct 10 16:54:16 crc kubenswrapper[4799]: I1010 16:54:16.577082 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/9fd6f03f-abea-4c29-8060-0705bb0af2c7-server-conf\") pod \"9fd6f03f-abea-4c29-8060-0705bb0af2c7\" (UID: \"9fd6f03f-abea-4c29-8060-0705bb0af2c7\") " Oct 10 16:54:16 crc kubenswrapper[4799]: I1010 16:54:16.577160 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/9fd6f03f-abea-4c29-8060-0705bb0af2c7-config-data\") pod \"9fd6f03f-abea-4c29-8060-0705bb0af2c7\" (UID: \"9fd6f03f-abea-4c29-8060-0705bb0af2c7\") " Oct 10 16:54:16 crc kubenswrapper[4799]: I1010 16:54:16.577178 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/9fd6f03f-abea-4c29-8060-0705bb0af2c7-rabbitmq-confd\") pod \"9fd6f03f-abea-4c29-8060-0705bb0af2c7\" (UID: \"9fd6f03f-abea-4c29-8060-0705bb0af2c7\") " Oct 10 16:54:16 crc kubenswrapper[4799]: I1010 16:54:16.577203 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"9fd6f03f-abea-4c29-8060-0705bb0af2c7\" (UID: \"9fd6f03f-abea-4c29-8060-0705bb0af2c7\") " Oct 10 16:54:16 crc kubenswrapper[4799]: I1010 16:54:16.577220 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/fdfeebc0-d50f-42f8-a461-b0aea7ba6a11-metrics-certs-tls-certs\") pod \"fdfeebc0-d50f-42f8-a461-b0aea7ba6a11\" (UID: \"fdfeebc0-d50f-42f8-a461-b0aea7ba6a11\") " Oct 10 16:54:16 crc kubenswrapper[4799]: I1010 16:54:16.577235 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/fdfeebc0-d50f-42f8-a461-b0aea7ba6a11-scripts\") pod \"fdfeebc0-d50f-42f8-a461-b0aea7ba6a11\" (UID: \"fdfeebc0-d50f-42f8-a461-b0aea7ba6a11\") " Oct 10 16:54:16 crc kubenswrapper[4799]: I1010 16:54:16.577260 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/9fd6f03f-abea-4c29-8060-0705bb0af2c7-erlang-cookie-secret\") pod \"9fd6f03f-abea-4c29-8060-0705bb0af2c7\" (UID: \"9fd6f03f-abea-4c29-8060-0705bb0af2c7\") " Oct 10 16:54:16 crc kubenswrapper[4799]: I1010 16:54:16.577286 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fdfeebc0-d50f-42f8-a461-b0aea7ba6a11-combined-ca-bundle\") pod \"fdfeebc0-d50f-42f8-a461-b0aea7ba6a11\" (UID: \"fdfeebc0-d50f-42f8-a461-b0aea7ba6a11\") " Oct 10 16:54:16 crc kubenswrapper[4799]: I1010 16:54:16.577303 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mnr4x\" (UniqueName: \"kubernetes.io/projected/fdfeebc0-d50f-42f8-a461-b0aea7ba6a11-kube-api-access-mnr4x\") pod \"fdfeebc0-d50f-42f8-a461-b0aea7ba6a11\" (UID: \"fdfeebc0-d50f-42f8-a461-b0aea7ba6a11\") " Oct 10 16:54:16 crc kubenswrapper[4799]: I1010 16:54:16.578562 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9fd6f03f-abea-4c29-8060-0705bb0af2c7-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "9fd6f03f-abea-4c29-8060-0705bb0af2c7" (UID: "9fd6f03f-abea-4c29-8060-0705bb0af2c7"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 10 16:54:16 crc kubenswrapper[4799]: I1010 16:54:16.579013 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fdfeebc0-d50f-42f8-a461-b0aea7ba6a11-config" (OuterVolumeSpecName: "config") pod "fdfeebc0-d50f-42f8-a461-b0aea7ba6a11" (UID: "fdfeebc0-d50f-42f8-a461-b0aea7ba6a11"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 16:54:16 crc kubenswrapper[4799]: I1010 16:54:16.583003 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fdfeebc0-d50f-42f8-a461-b0aea7ba6a11-kube-api-access-mnr4x" (OuterVolumeSpecName: "kube-api-access-mnr4x") pod "fdfeebc0-d50f-42f8-a461-b0aea7ba6a11" (UID: "fdfeebc0-d50f-42f8-a461-b0aea7ba6a11"). InnerVolumeSpecName "kube-api-access-mnr4x". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 16:54:16 crc kubenswrapper[4799]: I1010 16:54:16.590092 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fdfeebc0-d50f-42f8-a461-b0aea7ba6a11-ovn-rundir" (OuterVolumeSpecName: "ovn-rundir") pod "fdfeebc0-d50f-42f8-a461-b0aea7ba6a11" (UID: "fdfeebc0-d50f-42f8-a461-b0aea7ba6a11"). InnerVolumeSpecName "ovn-rundir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 10 16:54:16 crc kubenswrapper[4799]: I1010 16:54:16.590643 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9fd6f03f-abea-4c29-8060-0705bb0af2c7-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "9fd6f03f-abea-4c29-8060-0705bb0af2c7" (UID: "9fd6f03f-abea-4c29-8060-0705bb0af2c7"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 10 16:54:16 crc kubenswrapper[4799]: I1010 16:54:16.592193 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fdfeebc0-d50f-42f8-a461-b0aea7ba6a11-scripts" (OuterVolumeSpecName: "scripts") pod "fdfeebc0-d50f-42f8-a461-b0aea7ba6a11" (UID: "fdfeebc0-d50f-42f8-a461-b0aea7ba6a11"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 16:54:16 crc kubenswrapper[4799]: I1010 16:54:16.592589 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9fd6f03f-abea-4c29-8060-0705bb0af2c7-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "9fd6f03f-abea-4c29-8060-0705bb0af2c7" (UID: "9fd6f03f-abea-4c29-8060-0705bb0af2c7"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 16:54:16 crc kubenswrapper[4799]: I1010 16:54:16.593529 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9fd6f03f-abea-4c29-8060-0705bb0af2c7-rabbitmq-tls" (OuterVolumeSpecName: "rabbitmq-tls") pod "9fd6f03f-abea-4c29-8060-0705bb0af2c7" (UID: "9fd6f03f-abea-4c29-8060-0705bb0af2c7"). InnerVolumeSpecName "rabbitmq-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 16:54:16 crc kubenswrapper[4799]: I1010 16:54:16.593795 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/9fd6f03f-abea-4c29-8060-0705bb0af2c7-pod-info" (OuterVolumeSpecName: "pod-info") pod "9fd6f03f-abea-4c29-8060-0705bb0af2c7" (UID: "9fd6f03f-abea-4c29-8060-0705bb0af2c7"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Oct 10 16:54:16 crc kubenswrapper[4799]: I1010 16:54:16.596162 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage09-crc" (OuterVolumeSpecName: "persistence") pod "9fd6f03f-abea-4c29-8060-0705bb0af2c7" (UID: "9fd6f03f-abea-4c29-8060-0705bb0af2c7"). InnerVolumeSpecName "local-storage09-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Oct 10 16:54:16 crc kubenswrapper[4799]: I1010 16:54:16.596183 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9fd6f03f-abea-4c29-8060-0705bb0af2c7-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "9fd6f03f-abea-4c29-8060-0705bb0af2c7" (UID: "9fd6f03f-abea-4c29-8060-0705bb0af2c7"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:54:16 crc kubenswrapper[4799]: I1010 16:54:16.597846 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9fd6f03f-abea-4c29-8060-0705bb0af2c7-kube-api-access-fx9nd" (OuterVolumeSpecName: "kube-api-access-fx9nd") pod "9fd6f03f-abea-4c29-8060-0705bb0af2c7" (UID: "9fd6f03f-abea-4c29-8060-0705bb0af2c7"). InnerVolumeSpecName "kube-api-access-fx9nd". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 16:54:16 crc kubenswrapper[4799]: I1010 16:54:16.642743 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9fd6f03f-abea-4c29-8060-0705bb0af2c7-config-data" (OuterVolumeSpecName: "config-data") pod "9fd6f03f-abea-4c29-8060-0705bb0af2c7" (UID: "9fd6f03f-abea-4c29-8060-0705bb0af2c7"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 16:54:16 crc kubenswrapper[4799]: I1010 16:54:16.643418 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fdfeebc0-d50f-42f8-a461-b0aea7ba6a11-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "fdfeebc0-d50f-42f8-a461-b0aea7ba6a11" (UID: "fdfeebc0-d50f-42f8-a461-b0aea7ba6a11"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:54:16 crc kubenswrapper[4799]: I1010 16:54:16.644285 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9fd6f03f-abea-4c29-8060-0705bb0af2c7-server-conf" (OuterVolumeSpecName: "server-conf") pod "9fd6f03f-abea-4c29-8060-0705bb0af2c7" (UID: "9fd6f03f-abea-4c29-8060-0705bb0af2c7"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 16:54:16 crc kubenswrapper[4799]: I1010 16:54:16.654672 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fdfeebc0-d50f-42f8-a461-b0aea7ba6a11-metrics-certs-tls-certs" (OuterVolumeSpecName: "metrics-certs-tls-certs") pod "fdfeebc0-d50f-42f8-a461-b0aea7ba6a11" (UID: "fdfeebc0-d50f-42f8-a461-b0aea7ba6a11"). InnerVolumeSpecName "metrics-certs-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:54:16 crc kubenswrapper[4799]: I1010 16:54:16.654899 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fdfeebc0-d50f-42f8-a461-b0aea7ba6a11-ovn-northd-tls-certs" (OuterVolumeSpecName: "ovn-northd-tls-certs") pod "fdfeebc0-d50f-42f8-a461-b0aea7ba6a11" (UID: "fdfeebc0-d50f-42f8-a461-b0aea7ba6a11"). InnerVolumeSpecName "ovn-northd-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:54:16 crc kubenswrapper[4799]: I1010 16:54:16.673486 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Oct 10 16:54:16 crc kubenswrapper[4799]: I1010 16:54:16.678352 4799 reconciler_common.go:293] "Volume detached for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/fdfeebc0-d50f-42f8-a461-b0aea7ba6a11-ovn-northd-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 10 16:54:16 crc kubenswrapper[4799]: I1010 16:54:16.678441 4799 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/9fd6f03f-abea-4c29-8060-0705bb0af2c7-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Oct 10 16:54:16 crc kubenswrapper[4799]: I1010 16:54:16.678496 4799 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/9fd6f03f-abea-4c29-8060-0705bb0af2c7-rabbitmq-tls\") on node \"crc\" DevicePath \"\"" Oct 10 16:54:16 crc kubenswrapper[4799]: I1010 16:54:16.678548 4799 reconciler_common.go:293] "Volume detached for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/fdfeebc0-d50f-42f8-a461-b0aea7ba6a11-ovn-rundir\") on node \"crc\" DevicePath \"\"" Oct 10 16:54:16 crc kubenswrapper[4799]: I1010 16:54:16.678600 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fx9nd\" (UniqueName: \"kubernetes.io/projected/9fd6f03f-abea-4c29-8060-0705bb0af2c7-kube-api-access-fx9nd\") on node \"crc\" DevicePath \"\"" Oct 10 16:54:16 crc kubenswrapper[4799]: I1010 16:54:16.678667 4799 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/9fd6f03f-abea-4c29-8060-0705bb0af2c7-pod-info\") on node \"crc\" DevicePath \"\"" Oct 10 16:54:16 crc kubenswrapper[4799]: I1010 16:54:16.678719 4799 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/9fd6f03f-abea-4c29-8060-0705bb0af2c7-plugins-conf\") on node \"crc\" DevicePath \"\"" Oct 10 16:54:16 crc kubenswrapper[4799]: I1010 16:54:16.678790 4799 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/9fd6f03f-abea-4c29-8060-0705bb0af2c7-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Oct 10 16:54:16 crc kubenswrapper[4799]: I1010 16:54:16.678844 4799 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/9fd6f03f-abea-4c29-8060-0705bb0af2c7-server-conf\") on node \"crc\" DevicePath \"\"" Oct 10 16:54:16 crc kubenswrapper[4799]: I1010 16:54:16.678894 4799 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/9fd6f03f-abea-4c29-8060-0705bb0af2c7-config-data\") on node \"crc\" DevicePath \"\"" Oct 10 16:54:16 crc kubenswrapper[4799]: I1010 16:54:16.678966 4799 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") on node \"crc\" " Oct 10 16:54:16 crc kubenswrapper[4799]: I1010 16:54:16.679027 4799 reconciler_common.go:293] "Volume detached for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/fdfeebc0-d50f-42f8-a461-b0aea7ba6a11-metrics-certs-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 10 16:54:16 crc kubenswrapper[4799]: I1010 16:54:16.679081 4799 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/fdfeebc0-d50f-42f8-a461-b0aea7ba6a11-scripts\") on node \"crc\" DevicePath \"\"" Oct 10 16:54:16 crc kubenswrapper[4799]: I1010 16:54:16.679138 4799 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/9fd6f03f-abea-4c29-8060-0705bb0af2c7-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Oct 10 16:54:16 crc kubenswrapper[4799]: I1010 16:54:16.679193 4799 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fdfeebc0-d50f-42f8-a461-b0aea7ba6a11-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 10 16:54:16 crc kubenswrapper[4799]: I1010 16:54:16.679248 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mnr4x\" (UniqueName: \"kubernetes.io/projected/fdfeebc0-d50f-42f8-a461-b0aea7ba6a11-kube-api-access-mnr4x\") on node \"crc\" DevicePath \"\"" Oct 10 16:54:16 crc kubenswrapper[4799]: I1010 16:54:16.679302 4799 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fdfeebc0-d50f-42f8-a461-b0aea7ba6a11-config\") on node \"crc\" DevicePath \"\"" Oct 10 16:54:16 crc kubenswrapper[4799]: I1010 16:54:16.687055 4799 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-cell1-novncproxy-0" podUID="785f8ce9-5280-44fe-891c-8162f2fdcd7a" containerName="nova-cell1-novncproxy-novncproxy" probeResult="failure" output="Get \"https://10.217.0.201:6080/vnc_lite.html\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Oct 10 16:54:16 crc kubenswrapper[4799]: I1010 16:54:16.689414 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9fd6f03f-abea-4c29-8060-0705bb0af2c7-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "9fd6f03f-abea-4c29-8060-0705bb0af2c7" (UID: "9fd6f03f-abea-4c29-8060-0705bb0af2c7"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 16:54:16 crc kubenswrapper[4799]: I1010 16:54:16.698790 4799 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage09-crc" (UniqueName: "kubernetes.io/local-volume/local-storage09-crc") on node "crc" Oct 10 16:54:16 crc kubenswrapper[4799]: I1010 16:54:16.780394 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/7858ee88-c7b9-4fb7-b825-569154134201-pod-info\") pod \"7858ee88-c7b9-4fb7-b825-569154134201\" (UID: \"7858ee88-c7b9-4fb7-b825-569154134201\") " Oct 10 16:54:16 crc kubenswrapper[4799]: I1010 16:54:16.780439 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/7858ee88-c7b9-4fb7-b825-569154134201-plugins-conf\") pod \"7858ee88-c7b9-4fb7-b825-569154134201\" (UID: \"7858ee88-c7b9-4fb7-b825-569154134201\") " Oct 10 16:54:16 crc kubenswrapper[4799]: I1010 16:54:16.780458 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4rpnw\" (UniqueName: \"kubernetes.io/projected/7858ee88-c7b9-4fb7-b825-569154134201-kube-api-access-4rpnw\") pod \"7858ee88-c7b9-4fb7-b825-569154134201\" (UID: \"7858ee88-c7b9-4fb7-b825-569154134201\") " Oct 10 16:54:16 crc kubenswrapper[4799]: I1010 16:54:16.780503 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/7858ee88-c7b9-4fb7-b825-569154134201-rabbitmq-plugins\") pod \"7858ee88-c7b9-4fb7-b825-569154134201\" (UID: \"7858ee88-c7b9-4fb7-b825-569154134201\") " Oct 10 16:54:16 crc kubenswrapper[4799]: I1010 16:54:16.780572 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/7858ee88-c7b9-4fb7-b825-569154134201-erlang-cookie-secret\") pod \"7858ee88-c7b9-4fb7-b825-569154134201\" (UID: \"7858ee88-c7b9-4fb7-b825-569154134201\") " Oct 10 16:54:16 crc kubenswrapper[4799]: I1010 16:54:16.780592 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/7858ee88-c7b9-4fb7-b825-569154134201-rabbitmq-erlang-cookie\") pod \"7858ee88-c7b9-4fb7-b825-569154134201\" (UID: \"7858ee88-c7b9-4fb7-b825-569154134201\") " Oct 10 16:54:16 crc kubenswrapper[4799]: I1010 16:54:16.780636 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/7858ee88-c7b9-4fb7-b825-569154134201-server-conf\") pod \"7858ee88-c7b9-4fb7-b825-569154134201\" (UID: \"7858ee88-c7b9-4fb7-b825-569154134201\") " Oct 10 16:54:16 crc kubenswrapper[4799]: I1010 16:54:16.780689 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/7858ee88-c7b9-4fb7-b825-569154134201-config-data\") pod \"7858ee88-c7b9-4fb7-b825-569154134201\" (UID: \"7858ee88-c7b9-4fb7-b825-569154134201\") " Oct 10 16:54:16 crc kubenswrapper[4799]: I1010 16:54:16.780997 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"7858ee88-c7b9-4fb7-b825-569154134201\" (UID: \"7858ee88-c7b9-4fb7-b825-569154134201\") " Oct 10 16:54:16 crc kubenswrapper[4799]: I1010 16:54:16.781057 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/7858ee88-c7b9-4fb7-b825-569154134201-rabbitmq-confd\") pod \"7858ee88-c7b9-4fb7-b825-569154134201\" (UID: \"7858ee88-c7b9-4fb7-b825-569154134201\") " Oct 10 16:54:16 crc kubenswrapper[4799]: I1010 16:54:16.781078 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/7858ee88-c7b9-4fb7-b825-569154134201-rabbitmq-tls\") pod \"7858ee88-c7b9-4fb7-b825-569154134201\" (UID: \"7858ee88-c7b9-4fb7-b825-569154134201\") " Oct 10 16:54:16 crc kubenswrapper[4799]: I1010 16:54:16.781178 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7858ee88-c7b9-4fb7-b825-569154134201-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "7858ee88-c7b9-4fb7-b825-569154134201" (UID: "7858ee88-c7b9-4fb7-b825-569154134201"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 10 16:54:16 crc kubenswrapper[4799]: I1010 16:54:16.781318 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7858ee88-c7b9-4fb7-b825-569154134201-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "7858ee88-c7b9-4fb7-b825-569154134201" (UID: "7858ee88-c7b9-4fb7-b825-569154134201"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 10 16:54:16 crc kubenswrapper[4799]: I1010 16:54:16.781356 4799 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/9fd6f03f-abea-4c29-8060-0705bb0af2c7-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Oct 10 16:54:16 crc kubenswrapper[4799]: I1010 16:54:16.781371 4799 reconciler_common.go:293] "Volume detached for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") on node \"crc\" DevicePath \"\"" Oct 10 16:54:16 crc kubenswrapper[4799]: I1010 16:54:16.781380 4799 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/7858ee88-c7b9-4fb7-b825-569154134201-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Oct 10 16:54:16 crc kubenswrapper[4799]: I1010 16:54:16.781415 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7858ee88-c7b9-4fb7-b825-569154134201-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "7858ee88-c7b9-4fb7-b825-569154134201" (UID: "7858ee88-c7b9-4fb7-b825-569154134201"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 16:54:16 crc kubenswrapper[4799]: I1010 16:54:16.784348 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7858ee88-c7b9-4fb7-b825-569154134201-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "7858ee88-c7b9-4fb7-b825-569154134201" (UID: "7858ee88-c7b9-4fb7-b825-569154134201"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:54:16 crc kubenswrapper[4799]: I1010 16:54:16.784366 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7858ee88-c7b9-4fb7-b825-569154134201-kube-api-access-4rpnw" (OuterVolumeSpecName: "kube-api-access-4rpnw") pod "7858ee88-c7b9-4fb7-b825-569154134201" (UID: "7858ee88-c7b9-4fb7-b825-569154134201"). InnerVolumeSpecName "kube-api-access-4rpnw". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 16:54:16 crc kubenswrapper[4799]: I1010 16:54:16.784393 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/7858ee88-c7b9-4fb7-b825-569154134201-pod-info" (OuterVolumeSpecName: "pod-info") pod "7858ee88-c7b9-4fb7-b825-569154134201" (UID: "7858ee88-c7b9-4fb7-b825-569154134201"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Oct 10 16:54:16 crc kubenswrapper[4799]: I1010 16:54:16.784407 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7858ee88-c7b9-4fb7-b825-569154134201-rabbitmq-tls" (OuterVolumeSpecName: "rabbitmq-tls") pod "7858ee88-c7b9-4fb7-b825-569154134201" (UID: "7858ee88-c7b9-4fb7-b825-569154134201"). InnerVolumeSpecName "rabbitmq-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 16:54:16 crc kubenswrapper[4799]: I1010 16:54:16.784518 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage02-crc" (OuterVolumeSpecName: "persistence") pod "7858ee88-c7b9-4fb7-b825-569154134201" (UID: "7858ee88-c7b9-4fb7-b825-569154134201"). InnerVolumeSpecName "local-storage02-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Oct 10 16:54:16 crc kubenswrapper[4799]: I1010 16:54:16.804067 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7858ee88-c7b9-4fb7-b825-569154134201-config-data" (OuterVolumeSpecName: "config-data") pod "7858ee88-c7b9-4fb7-b825-569154134201" (UID: "7858ee88-c7b9-4fb7-b825-569154134201"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 16:54:16 crc kubenswrapper[4799]: I1010 16:54:16.821708 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7858ee88-c7b9-4fb7-b825-569154134201-server-conf" (OuterVolumeSpecName: "server-conf") pod "7858ee88-c7b9-4fb7-b825-569154134201" (UID: "7858ee88-c7b9-4fb7-b825-569154134201"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 16:54:16 crc kubenswrapper[4799]: I1010 16:54:16.855483 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7858ee88-c7b9-4fb7-b825-569154134201-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "7858ee88-c7b9-4fb7-b825-569154134201" (UID: "7858ee88-c7b9-4fb7-b825-569154134201"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 16:54:16 crc kubenswrapper[4799]: I1010 16:54:16.883549 4799 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/7858ee88-c7b9-4fb7-b825-569154134201-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Oct 10 16:54:16 crc kubenswrapper[4799]: I1010 16:54:16.883578 4799 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/7858ee88-c7b9-4fb7-b825-569154134201-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Oct 10 16:54:16 crc kubenswrapper[4799]: I1010 16:54:16.883590 4799 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/7858ee88-c7b9-4fb7-b825-569154134201-server-conf\") on node \"crc\" DevicePath \"\"" Oct 10 16:54:16 crc kubenswrapper[4799]: I1010 16:54:16.883599 4799 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/7858ee88-c7b9-4fb7-b825-569154134201-config-data\") on node \"crc\" DevicePath \"\"" Oct 10 16:54:16 crc kubenswrapper[4799]: I1010 16:54:16.883629 4799 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") on node \"crc\" " Oct 10 16:54:16 crc kubenswrapper[4799]: I1010 16:54:16.883639 4799 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/7858ee88-c7b9-4fb7-b825-569154134201-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Oct 10 16:54:16 crc kubenswrapper[4799]: I1010 16:54:16.883647 4799 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/7858ee88-c7b9-4fb7-b825-569154134201-rabbitmq-tls\") on node \"crc\" DevicePath \"\"" Oct 10 16:54:16 crc kubenswrapper[4799]: I1010 16:54:16.883655 4799 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/7858ee88-c7b9-4fb7-b825-569154134201-plugins-conf\") on node \"crc\" DevicePath \"\"" Oct 10 16:54:16 crc kubenswrapper[4799]: I1010 16:54:16.883670 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4rpnw\" (UniqueName: \"kubernetes.io/projected/7858ee88-c7b9-4fb7-b825-569154134201-kube-api-access-4rpnw\") on node \"crc\" DevicePath \"\"" Oct 10 16:54:16 crc kubenswrapper[4799]: I1010 16:54:16.883678 4799 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/7858ee88-c7b9-4fb7-b825-569154134201-pod-info\") on node \"crc\" DevicePath \"\"" Oct 10 16:54:16 crc kubenswrapper[4799]: I1010 16:54:16.900153 4799 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage02-crc" (UniqueName: "kubernetes.io/local-volume/local-storage02-crc") on node "crc" Oct 10 16:54:16 crc kubenswrapper[4799]: I1010 16:54:16.985022 4799 reconciler_common.go:293] "Volume detached for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") on node \"crc\" DevicePath \"\"" Oct 10 16:54:17 crc kubenswrapper[4799]: I1010 16:54:17.375734 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"9fd6f03f-abea-4c29-8060-0705bb0af2c7","Type":"ContainerDied","Data":"25e32e81cf2a1229f1f01e83272289d9d568d9b1b40bb8d385017d4faaff08d5"} Oct 10 16:54:17 crc kubenswrapper[4799]: I1010 16:54:17.376341 4799 scope.go:117] "RemoveContainer" containerID="22fa1f105dd6a9317c7bfadf3f75a8ae31d05888ae18ed6871e94cd9824e1b96" Oct 10 16:54:17 crc kubenswrapper[4799]: I1010 16:54:17.376266 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Oct 10 16:54:17 crc kubenswrapper[4799]: I1010 16:54:17.383436 4799 generic.go:334] "Generic (PLEG): container finished" podID="60be0e86-f2dd-4575-b3c8-0131575b1cd8" containerID="35d51a78c7ee3dde16f77dfec5a6f5f69c8e3d2b0eccd75b5f3e3226dc047eeb" exitCode=0 Oct 10 16:54:17 crc kubenswrapper[4799]: I1010 16:54:17.383479 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"60be0e86-f2dd-4575-b3c8-0131575b1cd8","Type":"ContainerDied","Data":"35d51a78c7ee3dde16f77dfec5a6f5f69c8e3d2b0eccd75b5f3e3226dc047eeb"} Oct 10 16:54:17 crc kubenswrapper[4799]: I1010 16:54:17.441156 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Oct 10 16:54:17 crc kubenswrapper[4799]: I1010 16:54:17.445521 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2db42625-4f7b-479c-a580-c94d6cafb3fe" path="/var/lib/kubelet/pods/2db42625-4f7b-479c-a580-c94d6cafb3fe/volumes" Oct 10 16:54:17 crc kubenswrapper[4799]: I1010 16:54:17.446041 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3585305f-274f-416c-b59d-2dc474f54341" path="/var/lib/kubelet/pods/3585305f-274f-416c-b59d-2dc474f54341/volumes" Oct 10 16:54:17 crc kubenswrapper[4799]: I1010 16:54:17.446382 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7ae9763d-31dd-44c7-bf35-11a896a4f785" path="/var/lib/kubelet/pods/7ae9763d-31dd-44c7-bf35-11a896a4f785/volumes" Oct 10 16:54:17 crc kubenswrapper[4799]: I1010 16:54:17.457617 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Oct 10 16:54:17 crc kubenswrapper[4799]: I1010 16:54:17.493361 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7dc78f94-acb0-4411-b1a2-14dd6500674b" path="/var/lib/kubelet/pods/7dc78f94-acb0-4411-b1a2-14dd6500674b/volumes" Oct 10 16:54:17 crc kubenswrapper[4799]: I1010 16:54:17.508113 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="971dd170-cc55-481f-b76d-820102f811cd" path="/var/lib/kubelet/pods/971dd170-cc55-481f-b76d-820102f811cd/volumes" Oct 10 16:54:17 crc kubenswrapper[4799]: I1010 16:54:17.508685 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="eb41e34f-dc1c-4b2e-9437-44a0e84e2cd1" path="/var/lib/kubelet/pods/eb41e34f-dc1c-4b2e-9437-44a0e84e2cd1/volumes" Oct 10 16:54:17 crc kubenswrapper[4799]: I1010 16:54:17.509986 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fe3f0980-0eb7-4267-953a-3fcfa08a22b3" path="/var/lib/kubelet/pods/fe3f0980-0eb7-4267-953a-3fcfa08a22b3/volumes" Oct 10 16:54:17 crc kubenswrapper[4799]: I1010 16:54:17.511638 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-server-0"] Oct 10 16:54:17 crc kubenswrapper[4799]: I1010 16:54:17.511674 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/rabbitmq-server-0"] Oct 10 16:54:17 crc kubenswrapper[4799]: I1010 16:54:17.511704 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"7858ee88-c7b9-4fb7-b825-569154134201","Type":"ContainerDied","Data":"d86026ff1c29ab1594cd0dff83494d16f80431f3dbdbbe06584ec4d4ea404aea"} Oct 10 16:54:17 crc kubenswrapper[4799]: I1010 16:54:17.522491 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Oct 10 16:54:17 crc kubenswrapper[4799]: I1010 16:54:17.528669 4799 scope.go:117] "RemoveContainer" containerID="adfdbb90972668f2d71dad450618269e6685fe2f84e1846228c2c17d1cd7c04c" Oct 10 16:54:17 crc kubenswrapper[4799]: I1010 16:54:17.543825 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Oct 10 16:54:17 crc kubenswrapper[4799]: I1010 16:54:17.551999 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-northd-0"] Oct 10 16:54:17 crc kubenswrapper[4799]: I1010 16:54:17.560586 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-northd-0"] Oct 10 16:54:17 crc kubenswrapper[4799]: I1010 16:54:17.595069 4799 scope.go:117] "RemoveContainer" containerID="5618f2fc1181b1ea35b16860a7cfaa8ed80f4249c7cd93a63b30a487631e90ec" Oct 10 16:54:17 crc kubenswrapper[4799]: I1010 16:54:17.609103 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Oct 10 16:54:17 crc kubenswrapper[4799]: I1010 16:54:17.623215 4799 scope.go:117] "RemoveContainer" containerID="530054c73abfb931af9932880c554d60aaa19e406d6b80e4c78cfa9e40a7c9a7" Oct 10 16:54:17 crc kubenswrapper[4799]: I1010 16:54:17.717687 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/60be0e86-f2dd-4575-b3c8-0131575b1cd8-config-data\") pod \"60be0e86-f2dd-4575-b3c8-0131575b1cd8\" (UID: \"60be0e86-f2dd-4575-b3c8-0131575b1cd8\") " Oct 10 16:54:17 crc kubenswrapper[4799]: I1010 16:54:17.718019 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/60be0e86-f2dd-4575-b3c8-0131575b1cd8-combined-ca-bundle\") pod \"60be0e86-f2dd-4575-b3c8-0131575b1cd8\" (UID: \"60be0e86-f2dd-4575-b3c8-0131575b1cd8\") " Oct 10 16:54:17 crc kubenswrapper[4799]: I1010 16:54:17.718133 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nsq2j\" (UniqueName: \"kubernetes.io/projected/60be0e86-f2dd-4575-b3c8-0131575b1cd8-kube-api-access-nsq2j\") pod \"60be0e86-f2dd-4575-b3c8-0131575b1cd8\" (UID: \"60be0e86-f2dd-4575-b3c8-0131575b1cd8\") " Oct 10 16:54:17 crc kubenswrapper[4799]: I1010 16:54:17.724577 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/60be0e86-f2dd-4575-b3c8-0131575b1cd8-kube-api-access-nsq2j" (OuterVolumeSpecName: "kube-api-access-nsq2j") pod "60be0e86-f2dd-4575-b3c8-0131575b1cd8" (UID: "60be0e86-f2dd-4575-b3c8-0131575b1cd8"). InnerVolumeSpecName "kube-api-access-nsq2j". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 16:54:17 crc kubenswrapper[4799]: I1010 16:54:17.738726 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/60be0e86-f2dd-4575-b3c8-0131575b1cd8-config-data" (OuterVolumeSpecName: "config-data") pod "60be0e86-f2dd-4575-b3c8-0131575b1cd8" (UID: "60be0e86-f2dd-4575-b3c8-0131575b1cd8"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:54:17 crc kubenswrapper[4799]: I1010 16:54:17.738865 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/60be0e86-f2dd-4575-b3c8-0131575b1cd8-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "60be0e86-f2dd-4575-b3c8-0131575b1cd8" (UID: "60be0e86-f2dd-4575-b3c8-0131575b1cd8"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:54:17 crc kubenswrapper[4799]: I1010 16:54:17.820376 4799 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/60be0e86-f2dd-4575-b3c8-0131575b1cd8-config-data\") on node \"crc\" DevicePath \"\"" Oct 10 16:54:17 crc kubenswrapper[4799]: I1010 16:54:17.820413 4799 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/60be0e86-f2dd-4575-b3c8-0131575b1cd8-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 10 16:54:17 crc kubenswrapper[4799]: I1010 16:54:17.820423 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nsq2j\" (UniqueName: \"kubernetes.io/projected/60be0e86-f2dd-4575-b3c8-0131575b1cd8-kube-api-access-nsq2j\") on node \"crc\" DevicePath \"\"" Oct 10 16:54:18 crc kubenswrapper[4799]: I1010 16:54:18.468193 4799 generic.go:334] "Generic (PLEG): container finished" podID="baff4453-a6a2-4264-82b7-3ce7c22734f6" containerID="a4337b4876c0c4b3cbed0413f818cccf0ccead577c676587b002d48a2705e440" exitCode=0 Oct 10 16:54:18 crc kubenswrapper[4799]: I1010 16:54:18.468362 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"baff4453-a6a2-4264-82b7-3ce7c22734f6","Type":"ContainerDied","Data":"a4337b4876c0c4b3cbed0413f818cccf0ccead577c676587b002d48a2705e440"} Oct 10 16:54:18 crc kubenswrapper[4799]: I1010 16:54:18.471861 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"60be0e86-f2dd-4575-b3c8-0131575b1cd8","Type":"ContainerDied","Data":"b8dd1b5626b63450e4276f12d6277470cef7ff847854fedeafff007b81b42d87"} Oct 10 16:54:18 crc kubenswrapper[4799]: I1010 16:54:18.471900 4799 scope.go:117] "RemoveContainer" containerID="35d51a78c7ee3dde16f77dfec5a6f5f69c8e3d2b0eccd75b5f3e3226dc047eeb" Oct 10 16:54:18 crc kubenswrapper[4799]: I1010 16:54:18.472005 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Oct 10 16:54:18 crc kubenswrapper[4799]: I1010 16:54:18.592602 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 10 16:54:18 crc kubenswrapper[4799]: I1010 16:54:18.599036 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-conductor-0"] Oct 10 16:54:18 crc kubenswrapper[4799]: I1010 16:54:18.608037 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-conductor-0"] Oct 10 16:54:18 crc kubenswrapper[4799]: I1010 16:54:18.733243 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-l4zpv\" (UniqueName: \"kubernetes.io/projected/baff4453-a6a2-4264-82b7-3ce7c22734f6-kube-api-access-l4zpv\") pod \"baff4453-a6a2-4264-82b7-3ce7c22734f6\" (UID: \"baff4453-a6a2-4264-82b7-3ce7c22734f6\") " Oct 10 16:54:18 crc kubenswrapper[4799]: I1010 16:54:18.733300 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/baff4453-a6a2-4264-82b7-3ce7c22734f6-run-httpd\") pod \"baff4453-a6a2-4264-82b7-3ce7c22734f6\" (UID: \"baff4453-a6a2-4264-82b7-3ce7c22734f6\") " Oct 10 16:54:18 crc kubenswrapper[4799]: I1010 16:54:18.733352 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/baff4453-a6a2-4264-82b7-3ce7c22734f6-ceilometer-tls-certs\") pod \"baff4453-a6a2-4264-82b7-3ce7c22734f6\" (UID: \"baff4453-a6a2-4264-82b7-3ce7c22734f6\") " Oct 10 16:54:18 crc kubenswrapper[4799]: I1010 16:54:18.733412 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/baff4453-a6a2-4264-82b7-3ce7c22734f6-config-data\") pod \"baff4453-a6a2-4264-82b7-3ce7c22734f6\" (UID: \"baff4453-a6a2-4264-82b7-3ce7c22734f6\") " Oct 10 16:54:18 crc kubenswrapper[4799]: I1010 16:54:18.733471 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/baff4453-a6a2-4264-82b7-3ce7c22734f6-sg-core-conf-yaml\") pod \"baff4453-a6a2-4264-82b7-3ce7c22734f6\" (UID: \"baff4453-a6a2-4264-82b7-3ce7c22734f6\") " Oct 10 16:54:18 crc kubenswrapper[4799]: I1010 16:54:18.733571 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/baff4453-a6a2-4264-82b7-3ce7c22734f6-combined-ca-bundle\") pod \"baff4453-a6a2-4264-82b7-3ce7c22734f6\" (UID: \"baff4453-a6a2-4264-82b7-3ce7c22734f6\") " Oct 10 16:54:18 crc kubenswrapper[4799]: I1010 16:54:18.733599 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/baff4453-a6a2-4264-82b7-3ce7c22734f6-log-httpd\") pod \"baff4453-a6a2-4264-82b7-3ce7c22734f6\" (UID: \"baff4453-a6a2-4264-82b7-3ce7c22734f6\") " Oct 10 16:54:18 crc kubenswrapper[4799]: I1010 16:54:18.733679 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/baff4453-a6a2-4264-82b7-3ce7c22734f6-scripts\") pod \"baff4453-a6a2-4264-82b7-3ce7c22734f6\" (UID: \"baff4453-a6a2-4264-82b7-3ce7c22734f6\") " Oct 10 16:54:18 crc kubenswrapper[4799]: I1010 16:54:18.733773 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/baff4453-a6a2-4264-82b7-3ce7c22734f6-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "baff4453-a6a2-4264-82b7-3ce7c22734f6" (UID: "baff4453-a6a2-4264-82b7-3ce7c22734f6"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 10 16:54:18 crc kubenswrapper[4799]: I1010 16:54:18.734111 4799 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/baff4453-a6a2-4264-82b7-3ce7c22734f6-run-httpd\") on node \"crc\" DevicePath \"\"" Oct 10 16:54:18 crc kubenswrapper[4799]: I1010 16:54:18.734388 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/baff4453-a6a2-4264-82b7-3ce7c22734f6-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "baff4453-a6a2-4264-82b7-3ce7c22734f6" (UID: "baff4453-a6a2-4264-82b7-3ce7c22734f6"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 10 16:54:18 crc kubenswrapper[4799]: I1010 16:54:18.737425 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/baff4453-a6a2-4264-82b7-3ce7c22734f6-kube-api-access-l4zpv" (OuterVolumeSpecName: "kube-api-access-l4zpv") pod "baff4453-a6a2-4264-82b7-3ce7c22734f6" (UID: "baff4453-a6a2-4264-82b7-3ce7c22734f6"). InnerVolumeSpecName "kube-api-access-l4zpv". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 16:54:18 crc kubenswrapper[4799]: I1010 16:54:18.742599 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/baff4453-a6a2-4264-82b7-3ce7c22734f6-scripts" (OuterVolumeSpecName: "scripts") pod "baff4453-a6a2-4264-82b7-3ce7c22734f6" (UID: "baff4453-a6a2-4264-82b7-3ce7c22734f6"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:54:18 crc kubenswrapper[4799]: I1010 16:54:18.760120 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/baff4453-a6a2-4264-82b7-3ce7c22734f6-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "baff4453-a6a2-4264-82b7-3ce7c22734f6" (UID: "baff4453-a6a2-4264-82b7-3ce7c22734f6"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:54:18 crc kubenswrapper[4799]: I1010 16:54:18.784004 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/baff4453-a6a2-4264-82b7-3ce7c22734f6-ceilometer-tls-certs" (OuterVolumeSpecName: "ceilometer-tls-certs") pod "baff4453-a6a2-4264-82b7-3ce7c22734f6" (UID: "baff4453-a6a2-4264-82b7-3ce7c22734f6"). InnerVolumeSpecName "ceilometer-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:54:18 crc kubenswrapper[4799]: I1010 16:54:18.810403 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/baff4453-a6a2-4264-82b7-3ce7c22734f6-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "baff4453-a6a2-4264-82b7-3ce7c22734f6" (UID: "baff4453-a6a2-4264-82b7-3ce7c22734f6"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:54:18 crc kubenswrapper[4799]: I1010 16:54:18.815157 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/baff4453-a6a2-4264-82b7-3ce7c22734f6-config-data" (OuterVolumeSpecName: "config-data") pod "baff4453-a6a2-4264-82b7-3ce7c22734f6" (UID: "baff4453-a6a2-4264-82b7-3ce7c22734f6"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:54:18 crc kubenswrapper[4799]: I1010 16:54:18.834983 4799 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/baff4453-a6a2-4264-82b7-3ce7c22734f6-scripts\") on node \"crc\" DevicePath \"\"" Oct 10 16:54:18 crc kubenswrapper[4799]: I1010 16:54:18.835014 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-l4zpv\" (UniqueName: \"kubernetes.io/projected/baff4453-a6a2-4264-82b7-3ce7c22734f6-kube-api-access-l4zpv\") on node \"crc\" DevicePath \"\"" Oct 10 16:54:18 crc kubenswrapper[4799]: I1010 16:54:18.835025 4799 reconciler_common.go:293] "Volume detached for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/baff4453-a6a2-4264-82b7-3ce7c22734f6-ceilometer-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 10 16:54:18 crc kubenswrapper[4799]: I1010 16:54:18.835036 4799 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/baff4453-a6a2-4264-82b7-3ce7c22734f6-config-data\") on node \"crc\" DevicePath \"\"" Oct 10 16:54:18 crc kubenswrapper[4799]: I1010 16:54:18.835068 4799 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/baff4453-a6a2-4264-82b7-3ce7c22734f6-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Oct 10 16:54:18 crc kubenswrapper[4799]: I1010 16:54:18.835081 4799 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/baff4453-a6a2-4264-82b7-3ce7c22734f6-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 10 16:54:18 crc kubenswrapper[4799]: I1010 16:54:18.835090 4799 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/baff4453-a6a2-4264-82b7-3ce7c22734f6-log-httpd\") on node \"crc\" DevicePath \"\"" Oct 10 16:54:19 crc kubenswrapper[4799]: I1010 16:54:19.425394 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="60be0e86-f2dd-4575-b3c8-0131575b1cd8" path="/var/lib/kubelet/pods/60be0e86-f2dd-4575-b3c8-0131575b1cd8/volumes" Oct 10 16:54:19 crc kubenswrapper[4799]: I1010 16:54:19.427567 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7858ee88-c7b9-4fb7-b825-569154134201" path="/var/lib/kubelet/pods/7858ee88-c7b9-4fb7-b825-569154134201/volumes" Oct 10 16:54:19 crc kubenswrapper[4799]: I1010 16:54:19.429722 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9fd6f03f-abea-4c29-8060-0705bb0af2c7" path="/var/lib/kubelet/pods/9fd6f03f-abea-4c29-8060-0705bb0af2c7/volumes" Oct 10 16:54:19 crc kubenswrapper[4799]: I1010 16:54:19.432082 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fdfeebc0-d50f-42f8-a461-b0aea7ba6a11" path="/var/lib/kubelet/pods/fdfeebc0-d50f-42f8-a461-b0aea7ba6a11/volumes" Oct 10 16:54:19 crc kubenswrapper[4799]: I1010 16:54:19.486548 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"baff4453-a6a2-4264-82b7-3ce7c22734f6","Type":"ContainerDied","Data":"731cc8c826413a7ac0c70dcfff809c93104a8ad1d625a251ce7c0bce4ae4651e"} Oct 10 16:54:19 crc kubenswrapper[4799]: I1010 16:54:19.486621 4799 scope.go:117] "RemoveContainer" containerID="f6be7ae91d5c505d0b533031b37bb272b1ce7b62110e052a7fafbcdc9d348b24" Oct 10 16:54:19 crc kubenswrapper[4799]: I1010 16:54:19.486632 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 10 16:54:19 crc kubenswrapper[4799]: I1010 16:54:19.523880 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 10 16:54:19 crc kubenswrapper[4799]: I1010 16:54:19.523994 4799 scope.go:117] "RemoveContainer" containerID="c9ae3f60b475964718a4b4571c9e1633350b005dde6fdf9cbe4423f5ab16bd34" Oct 10 16:54:19 crc kubenswrapper[4799]: I1010 16:54:19.528644 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Oct 10 16:54:19 crc kubenswrapper[4799]: I1010 16:54:19.556879 4799 scope.go:117] "RemoveContainer" containerID="a4337b4876c0c4b3cbed0413f818cccf0ccead577c676587b002d48a2705e440" Oct 10 16:54:19 crc kubenswrapper[4799]: I1010 16:54:19.585192 4799 scope.go:117] "RemoveContainer" containerID="50bad220d7bfe5f874d25169f5fbdb3b65af8607c4752caa21f314f09fc45b07" Oct 10 16:54:20 crc kubenswrapper[4799]: E1010 16:54:20.321645 4799 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 24f52eab75b89d5e7b9cf09d4b2c644e6fdfdfe3ddfc83b09a9363aa7efda1d1 is running failed: container process not found" containerID="24f52eab75b89d5e7b9cf09d4b2c644e6fdfdfe3ddfc83b09a9363aa7efda1d1" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Oct 10 16:54:20 crc kubenswrapper[4799]: E1010 16:54:20.321982 4799 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 24f52eab75b89d5e7b9cf09d4b2c644e6fdfdfe3ddfc83b09a9363aa7efda1d1 is running failed: container process not found" containerID="24f52eab75b89d5e7b9cf09d4b2c644e6fdfdfe3ddfc83b09a9363aa7efda1d1" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Oct 10 16:54:20 crc kubenswrapper[4799]: E1010 16:54:20.322298 4799 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 24f52eab75b89d5e7b9cf09d4b2c644e6fdfdfe3ddfc83b09a9363aa7efda1d1 is running failed: container process not found" containerID="24f52eab75b89d5e7b9cf09d4b2c644e6fdfdfe3ddfc83b09a9363aa7efda1d1" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Oct 10 16:54:20 crc kubenswrapper[4799]: E1010 16:54:20.322327 4799 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 24f52eab75b89d5e7b9cf09d4b2c644e6fdfdfe3ddfc83b09a9363aa7efda1d1 is running failed: container process not found" probeType="Readiness" pod="openstack/ovn-controller-ovs-dtplc" podUID="e0ab4194-18b4-4c6d-96b2-d7a4a85e17d6" containerName="ovsdb-server" Oct 10 16:54:20 crc kubenswrapper[4799]: E1010 16:54:20.327828 4799 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="387afba31b4e67a9ba9f7f2877d3f3af184a7c60b3843119336ceb5759893e62" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Oct 10 16:54:20 crc kubenswrapper[4799]: E1010 16:54:20.329609 4799 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="387afba31b4e67a9ba9f7f2877d3f3af184a7c60b3843119336ceb5759893e62" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Oct 10 16:54:20 crc kubenswrapper[4799]: E1010 16:54:20.331489 4799 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="387afba31b4e67a9ba9f7f2877d3f3af184a7c60b3843119336ceb5759893e62" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Oct 10 16:54:20 crc kubenswrapper[4799]: E1010 16:54:20.331546 4799 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/ovn-controller-ovs-dtplc" podUID="e0ab4194-18b4-4c6d-96b2-d7a4a85e17d6" containerName="ovs-vswitchd" Oct 10 16:54:21 crc kubenswrapper[4799]: I1010 16:54:21.178151 4799 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/rabbitmq-server-0" podUID="9fd6f03f-abea-4c29-8060-0705bb0af2c7" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.106:5671: i/o timeout" Oct 10 16:54:21 crc kubenswrapper[4799]: I1010 16:54:21.421289 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="baff4453-a6a2-4264-82b7-3ce7c22734f6" path="/var/lib/kubelet/pods/baff4453-a6a2-4264-82b7-3ce7c22734f6/volumes" Oct 10 16:54:25 crc kubenswrapper[4799]: E1010 16:54:25.323041 4799 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 24f52eab75b89d5e7b9cf09d4b2c644e6fdfdfe3ddfc83b09a9363aa7efda1d1 is running failed: container process not found" containerID="24f52eab75b89d5e7b9cf09d4b2c644e6fdfdfe3ddfc83b09a9363aa7efda1d1" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Oct 10 16:54:25 crc kubenswrapper[4799]: E1010 16:54:25.324049 4799 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 24f52eab75b89d5e7b9cf09d4b2c644e6fdfdfe3ddfc83b09a9363aa7efda1d1 is running failed: container process not found" containerID="24f52eab75b89d5e7b9cf09d4b2c644e6fdfdfe3ddfc83b09a9363aa7efda1d1" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Oct 10 16:54:25 crc kubenswrapper[4799]: E1010 16:54:25.324594 4799 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 24f52eab75b89d5e7b9cf09d4b2c644e6fdfdfe3ddfc83b09a9363aa7efda1d1 is running failed: container process not found" containerID="24f52eab75b89d5e7b9cf09d4b2c644e6fdfdfe3ddfc83b09a9363aa7efda1d1" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Oct 10 16:54:25 crc kubenswrapper[4799]: E1010 16:54:25.324644 4799 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 24f52eab75b89d5e7b9cf09d4b2c644e6fdfdfe3ddfc83b09a9363aa7efda1d1 is running failed: container process not found" probeType="Readiness" pod="openstack/ovn-controller-ovs-dtplc" podUID="e0ab4194-18b4-4c6d-96b2-d7a4a85e17d6" containerName="ovsdb-server" Oct 10 16:54:25 crc kubenswrapper[4799]: E1010 16:54:25.326313 4799 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="387afba31b4e67a9ba9f7f2877d3f3af184a7c60b3843119336ceb5759893e62" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Oct 10 16:54:25 crc kubenswrapper[4799]: E1010 16:54:25.328177 4799 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="387afba31b4e67a9ba9f7f2877d3f3af184a7c60b3843119336ceb5759893e62" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Oct 10 16:54:25 crc kubenswrapper[4799]: E1010 16:54:25.332194 4799 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="387afba31b4e67a9ba9f7f2877d3f3af184a7c60b3843119336ceb5759893e62" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Oct 10 16:54:25 crc kubenswrapper[4799]: E1010 16:54:25.332280 4799 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/ovn-controller-ovs-dtplc" podUID="e0ab4194-18b4-4c6d-96b2-d7a4a85e17d6" containerName="ovs-vswitchd" Oct 10 16:54:25 crc kubenswrapper[4799]: I1010 16:54:25.396661 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-69f7ddf877-mclzd" Oct 10 16:54:25 crc kubenswrapper[4799]: I1010 16:54:25.542563 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/78820835-eb2d-40d8-a497-e9a351a9cef9-httpd-config\") pod \"78820835-eb2d-40d8-a497-e9a351a9cef9\" (UID: \"78820835-eb2d-40d8-a497-e9a351a9cef9\") " Oct 10 16:54:25 crc kubenswrapper[4799]: I1010 16:54:25.542702 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/78820835-eb2d-40d8-a497-e9a351a9cef9-ovndb-tls-certs\") pod \"78820835-eb2d-40d8-a497-e9a351a9cef9\" (UID: \"78820835-eb2d-40d8-a497-e9a351a9cef9\") " Oct 10 16:54:25 crc kubenswrapper[4799]: I1010 16:54:25.542784 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/78820835-eb2d-40d8-a497-e9a351a9cef9-public-tls-certs\") pod \"78820835-eb2d-40d8-a497-e9a351a9cef9\" (UID: \"78820835-eb2d-40d8-a497-e9a351a9cef9\") " Oct 10 16:54:25 crc kubenswrapper[4799]: I1010 16:54:25.542852 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/78820835-eb2d-40d8-a497-e9a351a9cef9-config\") pod \"78820835-eb2d-40d8-a497-e9a351a9cef9\" (UID: \"78820835-eb2d-40d8-a497-e9a351a9cef9\") " Oct 10 16:54:25 crc kubenswrapper[4799]: I1010 16:54:25.542915 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pczq6\" (UniqueName: \"kubernetes.io/projected/78820835-eb2d-40d8-a497-e9a351a9cef9-kube-api-access-pczq6\") pod \"78820835-eb2d-40d8-a497-e9a351a9cef9\" (UID: \"78820835-eb2d-40d8-a497-e9a351a9cef9\") " Oct 10 16:54:25 crc kubenswrapper[4799]: I1010 16:54:25.542990 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/78820835-eb2d-40d8-a497-e9a351a9cef9-internal-tls-certs\") pod \"78820835-eb2d-40d8-a497-e9a351a9cef9\" (UID: \"78820835-eb2d-40d8-a497-e9a351a9cef9\") " Oct 10 16:54:25 crc kubenswrapper[4799]: I1010 16:54:25.543065 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/78820835-eb2d-40d8-a497-e9a351a9cef9-combined-ca-bundle\") pod \"78820835-eb2d-40d8-a497-e9a351a9cef9\" (UID: \"78820835-eb2d-40d8-a497-e9a351a9cef9\") " Oct 10 16:54:25 crc kubenswrapper[4799]: I1010 16:54:25.549079 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/78820835-eb2d-40d8-a497-e9a351a9cef9-httpd-config" (OuterVolumeSpecName: "httpd-config") pod "78820835-eb2d-40d8-a497-e9a351a9cef9" (UID: "78820835-eb2d-40d8-a497-e9a351a9cef9"). InnerVolumeSpecName "httpd-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:54:25 crc kubenswrapper[4799]: I1010 16:54:25.550624 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/78820835-eb2d-40d8-a497-e9a351a9cef9-kube-api-access-pczq6" (OuterVolumeSpecName: "kube-api-access-pczq6") pod "78820835-eb2d-40d8-a497-e9a351a9cef9" (UID: "78820835-eb2d-40d8-a497-e9a351a9cef9"). InnerVolumeSpecName "kube-api-access-pczq6". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 16:54:25 crc kubenswrapper[4799]: I1010 16:54:25.565933 4799 generic.go:334] "Generic (PLEG): container finished" podID="78820835-eb2d-40d8-a497-e9a351a9cef9" containerID="89ea0df023f8cd2efabe4a60d20707c43edc3f235ad2613e512cece899859399" exitCode=0 Oct 10 16:54:25 crc kubenswrapper[4799]: I1010 16:54:25.565989 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-69f7ddf877-mclzd" event={"ID":"78820835-eb2d-40d8-a497-e9a351a9cef9","Type":"ContainerDied","Data":"89ea0df023f8cd2efabe4a60d20707c43edc3f235ad2613e512cece899859399"} Oct 10 16:54:25 crc kubenswrapper[4799]: I1010 16:54:25.566038 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-69f7ddf877-mclzd" event={"ID":"78820835-eb2d-40d8-a497-e9a351a9cef9","Type":"ContainerDied","Data":"d9ca68973d27f952ebfee715da6c21a44b21d8c5cf3ff3dc873bc028ecb31da7"} Oct 10 16:54:25 crc kubenswrapper[4799]: I1010 16:54:25.566061 4799 scope.go:117] "RemoveContainer" containerID="304a677749d95012ce2795e133dc4470b68b20824bdcbe3901e9d128f4e5ec4b" Oct 10 16:54:25 crc kubenswrapper[4799]: I1010 16:54:25.566259 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-69f7ddf877-mclzd" Oct 10 16:54:25 crc kubenswrapper[4799]: I1010 16:54:25.606625 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/78820835-eb2d-40d8-a497-e9a351a9cef9-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "78820835-eb2d-40d8-a497-e9a351a9cef9" (UID: "78820835-eb2d-40d8-a497-e9a351a9cef9"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:54:25 crc kubenswrapper[4799]: I1010 16:54:25.608877 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/78820835-eb2d-40d8-a497-e9a351a9cef9-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "78820835-eb2d-40d8-a497-e9a351a9cef9" (UID: "78820835-eb2d-40d8-a497-e9a351a9cef9"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:54:25 crc kubenswrapper[4799]: I1010 16:54:25.612427 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/78820835-eb2d-40d8-a497-e9a351a9cef9-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "78820835-eb2d-40d8-a497-e9a351a9cef9" (UID: "78820835-eb2d-40d8-a497-e9a351a9cef9"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:54:25 crc kubenswrapper[4799]: I1010 16:54:25.623644 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/78820835-eb2d-40d8-a497-e9a351a9cef9-config" (OuterVolumeSpecName: "config") pod "78820835-eb2d-40d8-a497-e9a351a9cef9" (UID: "78820835-eb2d-40d8-a497-e9a351a9cef9"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:54:25 crc kubenswrapper[4799]: I1010 16:54:25.631905 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/78820835-eb2d-40d8-a497-e9a351a9cef9-ovndb-tls-certs" (OuterVolumeSpecName: "ovndb-tls-certs") pod "78820835-eb2d-40d8-a497-e9a351a9cef9" (UID: "78820835-eb2d-40d8-a497-e9a351a9cef9"). InnerVolumeSpecName "ovndb-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 16:54:25 crc kubenswrapper[4799]: I1010 16:54:25.635196 4799 scope.go:117] "RemoveContainer" containerID="89ea0df023f8cd2efabe4a60d20707c43edc3f235ad2613e512cece899859399" Oct 10 16:54:25 crc kubenswrapper[4799]: I1010 16:54:25.645231 4799 reconciler_common.go:293] "Volume detached for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/78820835-eb2d-40d8-a497-e9a351a9cef9-ovndb-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 10 16:54:25 crc kubenswrapper[4799]: I1010 16:54:25.645265 4799 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/78820835-eb2d-40d8-a497-e9a351a9cef9-public-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 10 16:54:25 crc kubenswrapper[4799]: I1010 16:54:25.645278 4799 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/78820835-eb2d-40d8-a497-e9a351a9cef9-config\") on node \"crc\" DevicePath \"\"" Oct 10 16:54:25 crc kubenswrapper[4799]: I1010 16:54:25.645294 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pczq6\" (UniqueName: \"kubernetes.io/projected/78820835-eb2d-40d8-a497-e9a351a9cef9-kube-api-access-pczq6\") on node \"crc\" DevicePath \"\"" Oct 10 16:54:25 crc kubenswrapper[4799]: I1010 16:54:25.645308 4799 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/78820835-eb2d-40d8-a497-e9a351a9cef9-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 10 16:54:25 crc kubenswrapper[4799]: I1010 16:54:25.645320 4799 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/78820835-eb2d-40d8-a497-e9a351a9cef9-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 10 16:54:25 crc kubenswrapper[4799]: I1010 16:54:25.645332 4799 reconciler_common.go:293] "Volume detached for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/78820835-eb2d-40d8-a497-e9a351a9cef9-httpd-config\") on node \"crc\" DevicePath \"\"" Oct 10 16:54:25 crc kubenswrapper[4799]: I1010 16:54:25.657113 4799 scope.go:117] "RemoveContainer" containerID="304a677749d95012ce2795e133dc4470b68b20824bdcbe3901e9d128f4e5ec4b" Oct 10 16:54:25 crc kubenswrapper[4799]: E1010 16:54:25.657544 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"304a677749d95012ce2795e133dc4470b68b20824bdcbe3901e9d128f4e5ec4b\": container with ID starting with 304a677749d95012ce2795e133dc4470b68b20824bdcbe3901e9d128f4e5ec4b not found: ID does not exist" containerID="304a677749d95012ce2795e133dc4470b68b20824bdcbe3901e9d128f4e5ec4b" Oct 10 16:54:25 crc kubenswrapper[4799]: I1010 16:54:25.657572 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"304a677749d95012ce2795e133dc4470b68b20824bdcbe3901e9d128f4e5ec4b"} err="failed to get container status \"304a677749d95012ce2795e133dc4470b68b20824bdcbe3901e9d128f4e5ec4b\": rpc error: code = NotFound desc = could not find container \"304a677749d95012ce2795e133dc4470b68b20824bdcbe3901e9d128f4e5ec4b\": container with ID starting with 304a677749d95012ce2795e133dc4470b68b20824bdcbe3901e9d128f4e5ec4b not found: ID does not exist" Oct 10 16:54:25 crc kubenswrapper[4799]: I1010 16:54:25.657593 4799 scope.go:117] "RemoveContainer" containerID="89ea0df023f8cd2efabe4a60d20707c43edc3f235ad2613e512cece899859399" Oct 10 16:54:25 crc kubenswrapper[4799]: E1010 16:54:25.657970 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"89ea0df023f8cd2efabe4a60d20707c43edc3f235ad2613e512cece899859399\": container with ID starting with 89ea0df023f8cd2efabe4a60d20707c43edc3f235ad2613e512cece899859399 not found: ID does not exist" containerID="89ea0df023f8cd2efabe4a60d20707c43edc3f235ad2613e512cece899859399" Oct 10 16:54:25 crc kubenswrapper[4799]: I1010 16:54:25.657989 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"89ea0df023f8cd2efabe4a60d20707c43edc3f235ad2613e512cece899859399"} err="failed to get container status \"89ea0df023f8cd2efabe4a60d20707c43edc3f235ad2613e512cece899859399\": rpc error: code = NotFound desc = could not find container \"89ea0df023f8cd2efabe4a60d20707c43edc3f235ad2613e512cece899859399\": container with ID starting with 89ea0df023f8cd2efabe4a60d20707c43edc3f235ad2613e512cece899859399 not found: ID does not exist" Oct 10 16:54:25 crc kubenswrapper[4799]: I1010 16:54:25.916486 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-69f7ddf877-mclzd"] Oct 10 16:54:25 crc kubenswrapper[4799]: I1010 16:54:25.923109 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-69f7ddf877-mclzd"] Oct 10 16:54:27 crc kubenswrapper[4799]: I1010 16:54:27.419587 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="78820835-eb2d-40d8-a497-e9a351a9cef9" path="/var/lib/kubelet/pods/78820835-eb2d-40d8-a497-e9a351a9cef9/volumes" Oct 10 16:54:30 crc kubenswrapper[4799]: E1010 16:54:30.322069 4799 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 24f52eab75b89d5e7b9cf09d4b2c644e6fdfdfe3ddfc83b09a9363aa7efda1d1 is running failed: container process not found" containerID="24f52eab75b89d5e7b9cf09d4b2c644e6fdfdfe3ddfc83b09a9363aa7efda1d1" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Oct 10 16:54:30 crc kubenswrapper[4799]: E1010 16:54:30.323885 4799 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 24f52eab75b89d5e7b9cf09d4b2c644e6fdfdfe3ddfc83b09a9363aa7efda1d1 is running failed: container process not found" containerID="24f52eab75b89d5e7b9cf09d4b2c644e6fdfdfe3ddfc83b09a9363aa7efda1d1" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Oct 10 16:54:30 crc kubenswrapper[4799]: E1010 16:54:30.324030 4799 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="387afba31b4e67a9ba9f7f2877d3f3af184a7c60b3843119336ceb5759893e62" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Oct 10 16:54:30 crc kubenswrapper[4799]: E1010 16:54:30.324584 4799 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 24f52eab75b89d5e7b9cf09d4b2c644e6fdfdfe3ddfc83b09a9363aa7efda1d1 is running failed: container process not found" containerID="24f52eab75b89d5e7b9cf09d4b2c644e6fdfdfe3ddfc83b09a9363aa7efda1d1" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Oct 10 16:54:30 crc kubenswrapper[4799]: E1010 16:54:30.324675 4799 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 24f52eab75b89d5e7b9cf09d4b2c644e6fdfdfe3ddfc83b09a9363aa7efda1d1 is running failed: container process not found" probeType="Readiness" pod="openstack/ovn-controller-ovs-dtplc" podUID="e0ab4194-18b4-4c6d-96b2-d7a4a85e17d6" containerName="ovsdb-server" Oct 10 16:54:30 crc kubenswrapper[4799]: E1010 16:54:30.326605 4799 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="387afba31b4e67a9ba9f7f2877d3f3af184a7c60b3843119336ceb5759893e62" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Oct 10 16:54:30 crc kubenswrapper[4799]: E1010 16:54:30.329050 4799 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="387afba31b4e67a9ba9f7f2877d3f3af184a7c60b3843119336ceb5759893e62" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Oct 10 16:54:30 crc kubenswrapper[4799]: E1010 16:54:30.329136 4799 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/ovn-controller-ovs-dtplc" podUID="e0ab4194-18b4-4c6d-96b2-d7a4a85e17d6" containerName="ovs-vswitchd" Oct 10 16:54:35 crc kubenswrapper[4799]: E1010 16:54:35.321348 4799 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 24f52eab75b89d5e7b9cf09d4b2c644e6fdfdfe3ddfc83b09a9363aa7efda1d1 is running failed: container process not found" containerID="24f52eab75b89d5e7b9cf09d4b2c644e6fdfdfe3ddfc83b09a9363aa7efda1d1" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Oct 10 16:54:35 crc kubenswrapper[4799]: E1010 16:54:35.323083 4799 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 24f52eab75b89d5e7b9cf09d4b2c644e6fdfdfe3ddfc83b09a9363aa7efda1d1 is running failed: container process not found" containerID="24f52eab75b89d5e7b9cf09d4b2c644e6fdfdfe3ddfc83b09a9363aa7efda1d1" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Oct 10 16:54:35 crc kubenswrapper[4799]: E1010 16:54:35.323145 4799 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="387afba31b4e67a9ba9f7f2877d3f3af184a7c60b3843119336ceb5759893e62" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Oct 10 16:54:35 crc kubenswrapper[4799]: E1010 16:54:35.323823 4799 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 24f52eab75b89d5e7b9cf09d4b2c644e6fdfdfe3ddfc83b09a9363aa7efda1d1 is running failed: container process not found" containerID="24f52eab75b89d5e7b9cf09d4b2c644e6fdfdfe3ddfc83b09a9363aa7efda1d1" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Oct 10 16:54:35 crc kubenswrapper[4799]: E1010 16:54:35.323903 4799 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 24f52eab75b89d5e7b9cf09d4b2c644e6fdfdfe3ddfc83b09a9363aa7efda1d1 is running failed: container process not found" probeType="Readiness" pod="openstack/ovn-controller-ovs-dtplc" podUID="e0ab4194-18b4-4c6d-96b2-d7a4a85e17d6" containerName="ovsdb-server" Oct 10 16:54:35 crc kubenswrapper[4799]: E1010 16:54:35.325189 4799 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="387afba31b4e67a9ba9f7f2877d3f3af184a7c60b3843119336ceb5759893e62" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Oct 10 16:54:35 crc kubenswrapper[4799]: E1010 16:54:35.330601 4799 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="387afba31b4e67a9ba9f7f2877d3f3af184a7c60b3843119336ceb5759893e62" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Oct 10 16:54:35 crc kubenswrapper[4799]: E1010 16:54:35.330693 4799 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/ovn-controller-ovs-dtplc" podUID="e0ab4194-18b4-4c6d-96b2-d7a4a85e17d6" containerName="ovs-vswitchd" Oct 10 16:54:38 crc kubenswrapper[4799]: I1010 16:54:38.518980 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-dtplc_e0ab4194-18b4-4c6d-96b2-d7a4a85e17d6/ovs-vswitchd/0.log" Oct 10 16:54:38 crc kubenswrapper[4799]: I1010 16:54:38.520679 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-dtplc" Oct 10 16:54:38 crc kubenswrapper[4799]: I1010 16:54:38.563359 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/e0ab4194-18b4-4c6d-96b2-d7a4a85e17d6-var-log\") pod \"e0ab4194-18b4-4c6d-96b2-d7a4a85e17d6\" (UID: \"e0ab4194-18b4-4c6d-96b2-d7a4a85e17d6\") " Oct 10 16:54:38 crc kubenswrapper[4799]: I1010 16:54:38.563505 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9sclh\" (UniqueName: \"kubernetes.io/projected/e0ab4194-18b4-4c6d-96b2-d7a4a85e17d6-kube-api-access-9sclh\") pod \"e0ab4194-18b4-4c6d-96b2-d7a4a85e17d6\" (UID: \"e0ab4194-18b4-4c6d-96b2-d7a4a85e17d6\") " Oct 10 16:54:38 crc kubenswrapper[4799]: I1010 16:54:38.563538 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/e0ab4194-18b4-4c6d-96b2-d7a4a85e17d6-var-run\") pod \"e0ab4194-18b4-4c6d-96b2-d7a4a85e17d6\" (UID: \"e0ab4194-18b4-4c6d-96b2-d7a4a85e17d6\") " Oct 10 16:54:38 crc kubenswrapper[4799]: I1010 16:54:38.563483 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/e0ab4194-18b4-4c6d-96b2-d7a4a85e17d6-var-log" (OuterVolumeSpecName: "var-log") pod "e0ab4194-18b4-4c6d-96b2-d7a4a85e17d6" (UID: "e0ab4194-18b4-4c6d-96b2-d7a4a85e17d6"). InnerVolumeSpecName "var-log". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 10 16:54:38 crc kubenswrapper[4799]: I1010 16:54:38.563583 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/e0ab4194-18b4-4c6d-96b2-d7a4a85e17d6-scripts\") pod \"e0ab4194-18b4-4c6d-96b2-d7a4a85e17d6\" (UID: \"e0ab4194-18b4-4c6d-96b2-d7a4a85e17d6\") " Oct 10 16:54:38 crc kubenswrapper[4799]: I1010 16:54:38.563626 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/e0ab4194-18b4-4c6d-96b2-d7a4a85e17d6-var-lib\") pod \"e0ab4194-18b4-4c6d-96b2-d7a4a85e17d6\" (UID: \"e0ab4194-18b4-4c6d-96b2-d7a4a85e17d6\") " Oct 10 16:54:38 crc kubenswrapper[4799]: I1010 16:54:38.563640 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/e0ab4194-18b4-4c6d-96b2-d7a4a85e17d6-etc-ovs\") pod \"e0ab4194-18b4-4c6d-96b2-d7a4a85e17d6\" (UID: \"e0ab4194-18b4-4c6d-96b2-d7a4a85e17d6\") " Oct 10 16:54:38 crc kubenswrapper[4799]: I1010 16:54:38.563675 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/e0ab4194-18b4-4c6d-96b2-d7a4a85e17d6-var-run" (OuterVolumeSpecName: "var-run") pod "e0ab4194-18b4-4c6d-96b2-d7a4a85e17d6" (UID: "e0ab4194-18b4-4c6d-96b2-d7a4a85e17d6"). InnerVolumeSpecName "var-run". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 10 16:54:38 crc kubenswrapper[4799]: I1010 16:54:38.563769 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/e0ab4194-18b4-4c6d-96b2-d7a4a85e17d6-var-lib" (OuterVolumeSpecName: "var-lib") pod "e0ab4194-18b4-4c6d-96b2-d7a4a85e17d6" (UID: "e0ab4194-18b4-4c6d-96b2-d7a4a85e17d6"). InnerVolumeSpecName "var-lib". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 10 16:54:38 crc kubenswrapper[4799]: I1010 16:54:38.563869 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/e0ab4194-18b4-4c6d-96b2-d7a4a85e17d6-etc-ovs" (OuterVolumeSpecName: "etc-ovs") pod "e0ab4194-18b4-4c6d-96b2-d7a4a85e17d6" (UID: "e0ab4194-18b4-4c6d-96b2-d7a4a85e17d6"). InnerVolumeSpecName "etc-ovs". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 10 16:54:38 crc kubenswrapper[4799]: I1010 16:54:38.564117 4799 reconciler_common.go:293] "Volume detached for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/e0ab4194-18b4-4c6d-96b2-d7a4a85e17d6-var-log\") on node \"crc\" DevicePath \"\"" Oct 10 16:54:38 crc kubenswrapper[4799]: I1010 16:54:38.564143 4799 reconciler_common.go:293] "Volume detached for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/e0ab4194-18b4-4c6d-96b2-d7a4a85e17d6-var-run\") on node \"crc\" DevicePath \"\"" Oct 10 16:54:38 crc kubenswrapper[4799]: I1010 16:54:38.564155 4799 reconciler_common.go:293] "Volume detached for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/e0ab4194-18b4-4c6d-96b2-d7a4a85e17d6-var-lib\") on node \"crc\" DevicePath \"\"" Oct 10 16:54:38 crc kubenswrapper[4799]: I1010 16:54:38.564167 4799 reconciler_common.go:293] "Volume detached for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/e0ab4194-18b4-4c6d-96b2-d7a4a85e17d6-etc-ovs\") on node \"crc\" DevicePath \"\"" Oct 10 16:54:38 crc kubenswrapper[4799]: I1010 16:54:38.564805 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e0ab4194-18b4-4c6d-96b2-d7a4a85e17d6-scripts" (OuterVolumeSpecName: "scripts") pod "e0ab4194-18b4-4c6d-96b2-d7a4a85e17d6" (UID: "e0ab4194-18b4-4c6d-96b2-d7a4a85e17d6"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 16:54:38 crc kubenswrapper[4799]: I1010 16:54:38.569790 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e0ab4194-18b4-4c6d-96b2-d7a4a85e17d6-kube-api-access-9sclh" (OuterVolumeSpecName: "kube-api-access-9sclh") pod "e0ab4194-18b4-4c6d-96b2-d7a4a85e17d6" (UID: "e0ab4194-18b4-4c6d-96b2-d7a4a85e17d6"). InnerVolumeSpecName "kube-api-access-9sclh". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 16:54:38 crc kubenswrapper[4799]: I1010 16:54:38.666005 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9sclh\" (UniqueName: \"kubernetes.io/projected/e0ab4194-18b4-4c6d-96b2-d7a4a85e17d6-kube-api-access-9sclh\") on node \"crc\" DevicePath \"\"" Oct 10 16:54:38 crc kubenswrapper[4799]: I1010 16:54:38.666045 4799 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/e0ab4194-18b4-4c6d-96b2-d7a4a85e17d6-scripts\") on node \"crc\" DevicePath \"\"" Oct 10 16:54:38 crc kubenswrapper[4799]: I1010 16:54:38.755414 4799 generic.go:334] "Generic (PLEG): container finished" podID="68ea0968-070a-41d4-b023-31557446c4dc" containerID="d7211c49780feb5fa0e4a94a5ced7f5a84311b8cae847b8935e7948aa4a99e2c" exitCode=137 Oct 10 16:54:38 crc kubenswrapper[4799]: I1010 16:54:38.755458 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"68ea0968-070a-41d4-b023-31557446c4dc","Type":"ContainerDied","Data":"d7211c49780feb5fa0e4a94a5ced7f5a84311b8cae847b8935e7948aa4a99e2c"} Oct 10 16:54:38 crc kubenswrapper[4799]: I1010 16:54:38.758563 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-dtplc_e0ab4194-18b4-4c6d-96b2-d7a4a85e17d6/ovs-vswitchd/0.log" Oct 10 16:54:38 crc kubenswrapper[4799]: I1010 16:54:38.759494 4799 generic.go:334] "Generic (PLEG): container finished" podID="e0ab4194-18b4-4c6d-96b2-d7a4a85e17d6" containerID="387afba31b4e67a9ba9f7f2877d3f3af184a7c60b3843119336ceb5759893e62" exitCode=137 Oct 10 16:54:38 crc kubenswrapper[4799]: I1010 16:54:38.759524 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-dtplc" event={"ID":"e0ab4194-18b4-4c6d-96b2-d7a4a85e17d6","Type":"ContainerDied","Data":"387afba31b4e67a9ba9f7f2877d3f3af184a7c60b3843119336ceb5759893e62"} Oct 10 16:54:38 crc kubenswrapper[4799]: I1010 16:54:38.759551 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-dtplc" event={"ID":"e0ab4194-18b4-4c6d-96b2-d7a4a85e17d6","Type":"ContainerDied","Data":"2ac6e83071a616986284e4825a2b05c663588602a39cf3bd68ac0ca47c7c0741"} Oct 10 16:54:38 crc kubenswrapper[4799]: I1010 16:54:38.759571 4799 scope.go:117] "RemoveContainer" containerID="387afba31b4e67a9ba9f7f2877d3f3af184a7c60b3843119336ceb5759893e62" Oct 10 16:54:38 crc kubenswrapper[4799]: I1010 16:54:38.759588 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-dtplc" Oct 10 16:54:38 crc kubenswrapper[4799]: I1010 16:54:38.827335 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-storage-0" Oct 10 16:54:38 crc kubenswrapper[4799]: I1010 16:54:38.831801 4799 scope.go:117] "RemoveContainer" containerID="24f52eab75b89d5e7b9cf09d4b2c644e6fdfdfe3ddfc83b09a9363aa7efda1d1" Oct 10 16:54:38 crc kubenswrapper[4799]: I1010 16:54:38.833033 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-ovs-dtplc"] Oct 10 16:54:38 crc kubenswrapper[4799]: I1010 16:54:38.843280 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-controller-ovs-dtplc"] Oct 10 16:54:38 crc kubenswrapper[4799]: I1010 16:54:38.864857 4799 scope.go:117] "RemoveContainer" containerID="65d844d41d22e8a89359b0ab4a69c944a58fa895f58a1f8c5e6ad153c601d704" Oct 10 16:54:38 crc kubenswrapper[4799]: I1010 16:54:38.867913 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/68ea0968-070a-41d4-b023-31557446c4dc-cache\") pod \"68ea0968-070a-41d4-b023-31557446c4dc\" (UID: \"68ea0968-070a-41d4-b023-31557446c4dc\") " Oct 10 16:54:38 crc kubenswrapper[4799]: I1010 16:54:38.867959 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/68ea0968-070a-41d4-b023-31557446c4dc-lock\") pod \"68ea0968-070a-41d4-b023-31557446c4dc\" (UID: \"68ea0968-070a-41d4-b023-31557446c4dc\") " Oct 10 16:54:38 crc kubenswrapper[4799]: I1010 16:54:38.868034 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/68ea0968-070a-41d4-b023-31557446c4dc-etc-swift\") pod \"68ea0968-070a-41d4-b023-31557446c4dc\" (UID: \"68ea0968-070a-41d4-b023-31557446c4dc\") " Oct 10 16:54:38 crc kubenswrapper[4799]: I1010 16:54:38.868059 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"swift\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"68ea0968-070a-41d4-b023-31557446c4dc\" (UID: \"68ea0968-070a-41d4-b023-31557446c4dc\") " Oct 10 16:54:38 crc kubenswrapper[4799]: I1010 16:54:38.868152 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cc9s9\" (UniqueName: \"kubernetes.io/projected/68ea0968-070a-41d4-b023-31557446c4dc-kube-api-access-cc9s9\") pod \"68ea0968-070a-41d4-b023-31557446c4dc\" (UID: \"68ea0968-070a-41d4-b023-31557446c4dc\") " Oct 10 16:54:38 crc kubenswrapper[4799]: I1010 16:54:38.868802 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/68ea0968-070a-41d4-b023-31557446c4dc-cache" (OuterVolumeSpecName: "cache") pod "68ea0968-070a-41d4-b023-31557446c4dc" (UID: "68ea0968-070a-41d4-b023-31557446c4dc"). InnerVolumeSpecName "cache". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 10 16:54:38 crc kubenswrapper[4799]: I1010 16:54:38.868938 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/68ea0968-070a-41d4-b023-31557446c4dc-lock" (OuterVolumeSpecName: "lock") pod "68ea0968-070a-41d4-b023-31557446c4dc" (UID: "68ea0968-070a-41d4-b023-31557446c4dc"). InnerVolumeSpecName "lock". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 10 16:54:38 crc kubenswrapper[4799]: I1010 16:54:38.872172 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage04-crc" (OuterVolumeSpecName: "swift") pod "68ea0968-070a-41d4-b023-31557446c4dc" (UID: "68ea0968-070a-41d4-b023-31557446c4dc"). InnerVolumeSpecName "local-storage04-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Oct 10 16:54:38 crc kubenswrapper[4799]: I1010 16:54:38.872331 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/68ea0968-070a-41d4-b023-31557446c4dc-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "68ea0968-070a-41d4-b023-31557446c4dc" (UID: "68ea0968-070a-41d4-b023-31557446c4dc"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 16:54:38 crc kubenswrapper[4799]: I1010 16:54:38.872401 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/68ea0968-070a-41d4-b023-31557446c4dc-kube-api-access-cc9s9" (OuterVolumeSpecName: "kube-api-access-cc9s9") pod "68ea0968-070a-41d4-b023-31557446c4dc" (UID: "68ea0968-070a-41d4-b023-31557446c4dc"). InnerVolumeSpecName "kube-api-access-cc9s9". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 16:54:38 crc kubenswrapper[4799]: I1010 16:54:38.926183 4799 scope.go:117] "RemoveContainer" containerID="387afba31b4e67a9ba9f7f2877d3f3af184a7c60b3843119336ceb5759893e62" Oct 10 16:54:38 crc kubenswrapper[4799]: E1010 16:54:38.926619 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"387afba31b4e67a9ba9f7f2877d3f3af184a7c60b3843119336ceb5759893e62\": container with ID starting with 387afba31b4e67a9ba9f7f2877d3f3af184a7c60b3843119336ceb5759893e62 not found: ID does not exist" containerID="387afba31b4e67a9ba9f7f2877d3f3af184a7c60b3843119336ceb5759893e62" Oct 10 16:54:38 crc kubenswrapper[4799]: I1010 16:54:38.926663 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"387afba31b4e67a9ba9f7f2877d3f3af184a7c60b3843119336ceb5759893e62"} err="failed to get container status \"387afba31b4e67a9ba9f7f2877d3f3af184a7c60b3843119336ceb5759893e62\": rpc error: code = NotFound desc = could not find container \"387afba31b4e67a9ba9f7f2877d3f3af184a7c60b3843119336ceb5759893e62\": container with ID starting with 387afba31b4e67a9ba9f7f2877d3f3af184a7c60b3843119336ceb5759893e62 not found: ID does not exist" Oct 10 16:54:38 crc kubenswrapper[4799]: I1010 16:54:38.926688 4799 scope.go:117] "RemoveContainer" containerID="24f52eab75b89d5e7b9cf09d4b2c644e6fdfdfe3ddfc83b09a9363aa7efda1d1" Oct 10 16:54:38 crc kubenswrapper[4799]: E1010 16:54:38.927031 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"24f52eab75b89d5e7b9cf09d4b2c644e6fdfdfe3ddfc83b09a9363aa7efda1d1\": container with ID starting with 24f52eab75b89d5e7b9cf09d4b2c644e6fdfdfe3ddfc83b09a9363aa7efda1d1 not found: ID does not exist" containerID="24f52eab75b89d5e7b9cf09d4b2c644e6fdfdfe3ddfc83b09a9363aa7efda1d1" Oct 10 16:54:38 crc kubenswrapper[4799]: I1010 16:54:38.927066 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"24f52eab75b89d5e7b9cf09d4b2c644e6fdfdfe3ddfc83b09a9363aa7efda1d1"} err="failed to get container status \"24f52eab75b89d5e7b9cf09d4b2c644e6fdfdfe3ddfc83b09a9363aa7efda1d1\": rpc error: code = NotFound desc = could not find container \"24f52eab75b89d5e7b9cf09d4b2c644e6fdfdfe3ddfc83b09a9363aa7efda1d1\": container with ID starting with 24f52eab75b89d5e7b9cf09d4b2c644e6fdfdfe3ddfc83b09a9363aa7efda1d1 not found: ID does not exist" Oct 10 16:54:38 crc kubenswrapper[4799]: I1010 16:54:38.927085 4799 scope.go:117] "RemoveContainer" containerID="65d844d41d22e8a89359b0ab4a69c944a58fa895f58a1f8c5e6ad153c601d704" Oct 10 16:54:38 crc kubenswrapper[4799]: E1010 16:54:38.927321 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"65d844d41d22e8a89359b0ab4a69c944a58fa895f58a1f8c5e6ad153c601d704\": container with ID starting with 65d844d41d22e8a89359b0ab4a69c944a58fa895f58a1f8c5e6ad153c601d704 not found: ID does not exist" containerID="65d844d41d22e8a89359b0ab4a69c944a58fa895f58a1f8c5e6ad153c601d704" Oct 10 16:54:38 crc kubenswrapper[4799]: I1010 16:54:38.927346 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"65d844d41d22e8a89359b0ab4a69c944a58fa895f58a1f8c5e6ad153c601d704"} err="failed to get container status \"65d844d41d22e8a89359b0ab4a69c944a58fa895f58a1f8c5e6ad153c601d704\": rpc error: code = NotFound desc = could not find container \"65d844d41d22e8a89359b0ab4a69c944a58fa895f58a1f8c5e6ad153c601d704\": container with ID starting with 65d844d41d22e8a89359b0ab4a69c944a58fa895f58a1f8c5e6ad153c601d704 not found: ID does not exist" Oct 10 16:54:38 crc kubenswrapper[4799]: I1010 16:54:38.969835 4799 reconciler_common.go:293] "Volume detached for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/68ea0968-070a-41d4-b023-31557446c4dc-cache\") on node \"crc\" DevicePath \"\"" Oct 10 16:54:38 crc kubenswrapper[4799]: I1010 16:54:38.969868 4799 reconciler_common.go:293] "Volume detached for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/68ea0968-070a-41d4-b023-31557446c4dc-lock\") on node \"crc\" DevicePath \"\"" Oct 10 16:54:38 crc kubenswrapper[4799]: I1010 16:54:38.969882 4799 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/68ea0968-070a-41d4-b023-31557446c4dc-etc-swift\") on node \"crc\" DevicePath \"\"" Oct 10 16:54:38 crc kubenswrapper[4799]: I1010 16:54:38.969922 4799 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") on node \"crc\" " Oct 10 16:54:38 crc kubenswrapper[4799]: I1010 16:54:38.969936 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cc9s9\" (UniqueName: \"kubernetes.io/projected/68ea0968-070a-41d4-b023-31557446c4dc-kube-api-access-cc9s9\") on node \"crc\" DevicePath \"\"" Oct 10 16:54:38 crc kubenswrapper[4799]: I1010 16:54:38.988807 4799 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage04-crc" (UniqueName: "kubernetes.io/local-volume/local-storage04-crc") on node "crc" Oct 10 16:54:39 crc kubenswrapper[4799]: I1010 16:54:39.071614 4799 reconciler_common.go:293] "Volume detached for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") on node \"crc\" DevicePath \"\"" Oct 10 16:54:39 crc kubenswrapper[4799]: I1010 16:54:39.464494 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e0ab4194-18b4-4c6d-96b2-d7a4a85e17d6" path="/var/lib/kubelet/pods/e0ab4194-18b4-4c6d-96b2-d7a4a85e17d6/volumes" Oct 10 16:54:39 crc kubenswrapper[4799]: I1010 16:54:39.789530 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"68ea0968-070a-41d4-b023-31557446c4dc","Type":"ContainerDied","Data":"9855e476445a0ac9b17564ca4265acfe54663f1f39acf56d013baebb701fa764"} Oct 10 16:54:39 crc kubenswrapper[4799]: I1010 16:54:39.789619 4799 scope.go:117] "RemoveContainer" containerID="d7211c49780feb5fa0e4a94a5ced7f5a84311b8cae847b8935e7948aa4a99e2c" Oct 10 16:54:39 crc kubenswrapper[4799]: I1010 16:54:39.789690 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-storage-0" Oct 10 16:54:39 crc kubenswrapper[4799]: I1010 16:54:39.840292 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/swift-storage-0"] Oct 10 16:54:39 crc kubenswrapper[4799]: I1010 16:54:39.847044 4799 scope.go:117] "RemoveContainer" containerID="94b0e5fe4497d52c34e39558472e6848a5c209b522dd73f975bdb4dc0e01da73" Oct 10 16:54:39 crc kubenswrapper[4799]: I1010 16:54:39.853631 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/swift-storage-0"] Oct 10 16:54:39 crc kubenswrapper[4799]: I1010 16:54:39.883573 4799 scope.go:117] "RemoveContainer" containerID="432fefd63b99c8d28abb812f0362dcacaa5d81c188e06c3668e637ba465daf44" Oct 10 16:54:39 crc kubenswrapper[4799]: I1010 16:54:39.908984 4799 scope.go:117] "RemoveContainer" containerID="c4d301a6fc96fe120eff75102918fb4f4f64a0c1d74276e5a9d732cecede51be" Oct 10 16:54:39 crc kubenswrapper[4799]: I1010 16:54:39.944154 4799 scope.go:117] "RemoveContainer" containerID="a0a44ae2f612b300ed982c9b9af495c2acaaf967a0729c5e19eda110019db7eb" Oct 10 16:54:39 crc kubenswrapper[4799]: I1010 16:54:39.974498 4799 scope.go:117] "RemoveContainer" containerID="d4aecb0e485406b0a1fb96b8e50caa65a29728439d08b5b6330706ef802ddeb2" Oct 10 16:54:40 crc kubenswrapper[4799]: I1010 16:54:40.004187 4799 scope.go:117] "RemoveContainer" containerID="c3e06a4a05023171ceb2e34c51e209015c056f45bfa0faadf50fd4785e2e4d80" Oct 10 16:54:40 crc kubenswrapper[4799]: I1010 16:54:40.029476 4799 scope.go:117] "RemoveContainer" containerID="196848f6327818ee365268243d9ccad984ffe2d581cc81fbfd2de38d6676c1a0" Oct 10 16:54:40 crc kubenswrapper[4799]: I1010 16:54:40.057101 4799 scope.go:117] "RemoveContainer" containerID="892c2480a25d808a995817609b9bbb27b39738b861f9e1834be4106363fa31e8" Oct 10 16:54:40 crc kubenswrapper[4799]: I1010 16:54:40.079308 4799 scope.go:117] "RemoveContainer" containerID="d0489a361daf254795ccae9ddca687c512362e7b439ec64189bd20c8ab4310b0" Oct 10 16:54:40 crc kubenswrapper[4799]: I1010 16:54:40.114355 4799 scope.go:117] "RemoveContainer" containerID="5b34f901fc61925f0938ed04472b26863bfffe70a6291e77a0980d1be5dc5aa4" Oct 10 16:54:40 crc kubenswrapper[4799]: I1010 16:54:40.144122 4799 scope.go:117] "RemoveContainer" containerID="3283576ff42b8ebc10b8fec21ffc203d5257c048d20b76e1f90800f9758835db" Oct 10 16:54:40 crc kubenswrapper[4799]: I1010 16:54:40.167604 4799 scope.go:117] "RemoveContainer" containerID="e611df1a7b5dee2c47fcfa489e23af4e0028a72aa26eea7950fe0ec36316b663" Oct 10 16:54:40 crc kubenswrapper[4799]: I1010 16:54:40.194590 4799 scope.go:117] "RemoveContainer" containerID="2e2a8373854753a4479c039fcd2e9fbdfba1493d4e774ff602b6e261202c606e" Oct 10 16:54:40 crc kubenswrapper[4799]: I1010 16:54:40.224112 4799 scope.go:117] "RemoveContainer" containerID="5dff4f861c205e245abbd15cc4d3d0d0becdc5a50f4d9bd4f1427cabc2fa0347" Oct 10 16:54:41 crc kubenswrapper[4799]: I1010 16:54:41.419382 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="68ea0968-070a-41d4-b023-31557446c4dc" path="/var/lib/kubelet/pods/68ea0968-070a-41d4-b023-31557446c4dc/volumes" Oct 10 16:54:41 crc kubenswrapper[4799]: I1010 16:54:41.915723 4799 pod_container_manager_linux.go:210] "Failed to delete cgroup paths" cgroupName=["kubepods","besteffort","pod37642fb0-1d93-4e14-a176-fea38410097f"] err="unable to destroy cgroup paths for cgroup [kubepods besteffort pod37642fb0-1d93-4e14-a176-fea38410097f] : Timed out while waiting for systemd to remove kubepods-besteffort-pod37642fb0_1d93_4e14_a176_fea38410097f.slice" Oct 10 16:54:41 crc kubenswrapper[4799]: E1010 16:54:41.915855 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to delete cgroup paths for [kubepods besteffort pod37642fb0-1d93-4e14-a176-fea38410097f] : unable to destroy cgroup paths for cgroup [kubepods besteffort pod37642fb0-1d93-4e14-a176-fea38410097f] : Timed out while waiting for systemd to remove kubepods-besteffort-pod37642fb0_1d93_4e14_a176_fea38410097f.slice" pod="openstack/ovsdbserver-nb-0" podUID="37642fb0-1d93-4e14-a176-fea38410097f" Oct 10 16:54:41 crc kubenswrapper[4799]: I1010 16:54:41.938837 4799 pod_container_manager_linux.go:210] "Failed to delete cgroup paths" cgroupName=["kubepods","besteffort","pod361ecbc5-676b-42af-9eb3-fb761f842265"] err="unable to destroy cgroup paths for cgroup [kubepods besteffort pod361ecbc5-676b-42af-9eb3-fb761f842265] : Timed out while waiting for systemd to remove kubepods-besteffort-pod361ecbc5_676b_42af_9eb3_fb761f842265.slice" Oct 10 16:54:41 crc kubenswrapper[4799]: E1010 16:54:41.938890 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to delete cgroup paths for [kubepods besteffort pod361ecbc5-676b-42af-9eb3-fb761f842265] : unable to destroy cgroup paths for cgroup [kubepods besteffort pod361ecbc5-676b-42af-9eb3-fb761f842265] : Timed out while waiting for systemd to remove kubepods-besteffort-pod361ecbc5_676b_42af_9eb3_fb761f842265.slice" pod="openstack/dnsmasq-dns-64986d45b9-khcqw" podUID="361ecbc5-676b-42af-9eb3-fb761f842265" Oct 10 16:54:42 crc kubenswrapper[4799]: I1010 16:54:42.827931 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-64986d45b9-khcqw" Oct 10 16:54:42 crc kubenswrapper[4799]: I1010 16:54:42.828045 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Oct 10 16:54:42 crc kubenswrapper[4799]: I1010 16:54:42.871466 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovsdbserver-nb-0"] Oct 10 16:54:42 crc kubenswrapper[4799]: I1010 16:54:42.885424 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovsdbserver-nb-0"] Oct 10 16:54:42 crc kubenswrapper[4799]: I1010 16:54:42.901297 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-64986d45b9-khcqw"] Oct 10 16:54:42 crc kubenswrapper[4799]: I1010 16:54:42.908704 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-64986d45b9-khcqw"] Oct 10 16:54:43 crc kubenswrapper[4799]: I1010 16:54:43.418102 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="361ecbc5-676b-42af-9eb3-fb761f842265" path="/var/lib/kubelet/pods/361ecbc5-676b-42af-9eb3-fb761f842265/volumes" Oct 10 16:54:43 crc kubenswrapper[4799]: I1010 16:54:43.419342 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="37642fb0-1d93-4e14-a176-fea38410097f" path="/var/lib/kubelet/pods/37642fb0-1d93-4e14-a176-fea38410097f/volumes" Oct 10 16:54:43 crc kubenswrapper[4799]: I1010 16:54:43.484645 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/novaapieef9-account-delete-qqxp5" Oct 10 16:54:43 crc kubenswrapper[4799]: I1010 16:54:43.491116 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placementcbdc-account-delete-9cz6z" Oct 10 16:54:43 crc kubenswrapper[4799]: I1010 16:54:43.543667 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nh5cr\" (UniqueName: \"kubernetes.io/projected/f1d90c74-271d-45af-9c91-87250b178ca6-kube-api-access-nh5cr\") pod \"f1d90c74-271d-45af-9c91-87250b178ca6\" (UID: \"f1d90c74-271d-45af-9c91-87250b178ca6\") " Oct 10 16:54:43 crc kubenswrapper[4799]: I1010 16:54:43.543865 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hrd64\" (UniqueName: \"kubernetes.io/projected/4cc746af-c99b-4cb9-acde-dc3b97e424a3-kube-api-access-hrd64\") pod \"4cc746af-c99b-4cb9-acde-dc3b97e424a3\" (UID: \"4cc746af-c99b-4cb9-acde-dc3b97e424a3\") " Oct 10 16:54:43 crc kubenswrapper[4799]: I1010 16:54:43.550566 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f1d90c74-271d-45af-9c91-87250b178ca6-kube-api-access-nh5cr" (OuterVolumeSpecName: "kube-api-access-nh5cr") pod "f1d90c74-271d-45af-9c91-87250b178ca6" (UID: "f1d90c74-271d-45af-9c91-87250b178ca6"). InnerVolumeSpecName "kube-api-access-nh5cr". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 16:54:43 crc kubenswrapper[4799]: I1010 16:54:43.556205 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4cc746af-c99b-4cb9-acde-dc3b97e424a3-kube-api-access-hrd64" (OuterVolumeSpecName: "kube-api-access-hrd64") pod "4cc746af-c99b-4cb9-acde-dc3b97e424a3" (UID: "4cc746af-c99b-4cb9-acde-dc3b97e424a3"). InnerVolumeSpecName "kube-api-access-hrd64". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 16:54:43 crc kubenswrapper[4799]: I1010 16:54:43.645395 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hrd64\" (UniqueName: \"kubernetes.io/projected/4cc746af-c99b-4cb9-acde-dc3b97e424a3-kube-api-access-hrd64\") on node \"crc\" DevicePath \"\"" Oct 10 16:54:43 crc kubenswrapper[4799]: I1010 16:54:43.645429 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nh5cr\" (UniqueName: \"kubernetes.io/projected/f1d90c74-271d-45af-9c91-87250b178ca6-kube-api-access-nh5cr\") on node \"crc\" DevicePath \"\"" Oct 10 16:54:43 crc kubenswrapper[4799]: I1010 16:54:43.842443 4799 generic.go:334] "Generic (PLEG): container finished" podID="4cc746af-c99b-4cb9-acde-dc3b97e424a3" containerID="8be05ed436116475cdf9a313be0e5fb3e3d22f288f745f295d88ac7f0de2c8cd" exitCode=137 Oct 10 16:54:43 crc kubenswrapper[4799]: I1010 16:54:43.842531 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/novaapieef9-account-delete-qqxp5" Oct 10 16:54:43 crc kubenswrapper[4799]: I1010 16:54:43.842555 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/novaapieef9-account-delete-qqxp5" event={"ID":"4cc746af-c99b-4cb9-acde-dc3b97e424a3","Type":"ContainerDied","Data":"8be05ed436116475cdf9a313be0e5fb3e3d22f288f745f295d88ac7f0de2c8cd"} Oct 10 16:54:43 crc kubenswrapper[4799]: I1010 16:54:43.842608 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/novaapieef9-account-delete-qqxp5" event={"ID":"4cc746af-c99b-4cb9-acde-dc3b97e424a3","Type":"ContainerDied","Data":"424cbe41e418a491958f6d1bf8fecff1d6c225aefb5591d9e73fc218652a05da"} Oct 10 16:54:43 crc kubenswrapper[4799]: I1010 16:54:43.842641 4799 scope.go:117] "RemoveContainer" containerID="8be05ed436116475cdf9a313be0e5fb3e3d22f288f745f295d88ac7f0de2c8cd" Oct 10 16:54:43 crc kubenswrapper[4799]: I1010 16:54:43.846321 4799 generic.go:334] "Generic (PLEG): container finished" podID="f1d90c74-271d-45af-9c91-87250b178ca6" containerID="c9af17389b65f1506044e0ff931f039bde8034005b3476a949f660fb65fd52bf" exitCode=137 Oct 10 16:54:43 crc kubenswrapper[4799]: I1010 16:54:43.846380 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placementcbdc-account-delete-9cz6z" event={"ID":"f1d90c74-271d-45af-9c91-87250b178ca6","Type":"ContainerDied","Data":"c9af17389b65f1506044e0ff931f039bde8034005b3476a949f660fb65fd52bf"} Oct 10 16:54:43 crc kubenswrapper[4799]: I1010 16:54:43.846419 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placementcbdc-account-delete-9cz6z" event={"ID":"f1d90c74-271d-45af-9c91-87250b178ca6","Type":"ContainerDied","Data":"3a6320145f34cd1ebd220511da24ae57b951f894bb881370e6aa1050296204e7"} Oct 10 16:54:43 crc kubenswrapper[4799]: I1010 16:54:43.846471 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placementcbdc-account-delete-9cz6z" Oct 10 16:54:43 crc kubenswrapper[4799]: I1010 16:54:43.894008 4799 scope.go:117] "RemoveContainer" containerID="8be05ed436116475cdf9a313be0e5fb3e3d22f288f745f295d88ac7f0de2c8cd" Oct 10 16:54:43 crc kubenswrapper[4799]: E1010 16:54:43.894743 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8be05ed436116475cdf9a313be0e5fb3e3d22f288f745f295d88ac7f0de2c8cd\": container with ID starting with 8be05ed436116475cdf9a313be0e5fb3e3d22f288f745f295d88ac7f0de2c8cd not found: ID does not exist" containerID="8be05ed436116475cdf9a313be0e5fb3e3d22f288f745f295d88ac7f0de2c8cd" Oct 10 16:54:43 crc kubenswrapper[4799]: I1010 16:54:43.894802 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8be05ed436116475cdf9a313be0e5fb3e3d22f288f745f295d88ac7f0de2c8cd"} err="failed to get container status \"8be05ed436116475cdf9a313be0e5fb3e3d22f288f745f295d88ac7f0de2c8cd\": rpc error: code = NotFound desc = could not find container \"8be05ed436116475cdf9a313be0e5fb3e3d22f288f745f295d88ac7f0de2c8cd\": container with ID starting with 8be05ed436116475cdf9a313be0e5fb3e3d22f288f745f295d88ac7f0de2c8cd not found: ID does not exist" Oct 10 16:54:43 crc kubenswrapper[4799]: I1010 16:54:43.894826 4799 scope.go:117] "RemoveContainer" containerID="c9af17389b65f1506044e0ff931f039bde8034005b3476a949f660fb65fd52bf" Oct 10 16:54:43 crc kubenswrapper[4799]: I1010 16:54:43.915500 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/novaapieef9-account-delete-qqxp5"] Oct 10 16:54:43 crc kubenswrapper[4799]: I1010 16:54:43.923178 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/novaapieef9-account-delete-qqxp5"] Oct 10 16:54:43 crc kubenswrapper[4799]: I1010 16:54:43.924703 4799 scope.go:117] "RemoveContainer" containerID="c9af17389b65f1506044e0ff931f039bde8034005b3476a949f660fb65fd52bf" Oct 10 16:54:43 crc kubenswrapper[4799]: E1010 16:54:43.925338 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c9af17389b65f1506044e0ff931f039bde8034005b3476a949f660fb65fd52bf\": container with ID starting with c9af17389b65f1506044e0ff931f039bde8034005b3476a949f660fb65fd52bf not found: ID does not exist" containerID="c9af17389b65f1506044e0ff931f039bde8034005b3476a949f660fb65fd52bf" Oct 10 16:54:43 crc kubenswrapper[4799]: I1010 16:54:43.925408 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c9af17389b65f1506044e0ff931f039bde8034005b3476a949f660fb65fd52bf"} err="failed to get container status \"c9af17389b65f1506044e0ff931f039bde8034005b3476a949f660fb65fd52bf\": rpc error: code = NotFound desc = could not find container \"c9af17389b65f1506044e0ff931f039bde8034005b3476a949f660fb65fd52bf\": container with ID starting with c9af17389b65f1506044e0ff931f039bde8034005b3476a949f660fb65fd52bf not found: ID does not exist" Oct 10 16:54:43 crc kubenswrapper[4799]: I1010 16:54:43.930151 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placementcbdc-account-delete-9cz6z"] Oct 10 16:54:43 crc kubenswrapper[4799]: I1010 16:54:43.937663 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placementcbdc-account-delete-9cz6z"] Oct 10 16:54:44 crc kubenswrapper[4799]: I1010 16:54:44.247750 4799 pod_container_manager_linux.go:210] "Failed to delete cgroup paths" cgroupName=["kubepods","besteffort","pode424a8e6-64c8-4572-8706-33026a2cc44d"] err="unable to destroy cgroup paths for cgroup [kubepods besteffort pode424a8e6-64c8-4572-8706-33026a2cc44d] : Timed out while waiting for systemd to remove kubepods-besteffort-pode424a8e6_64c8_4572_8706_33026a2cc44d.slice" Oct 10 16:54:44 crc kubenswrapper[4799]: I1010 16:54:44.598973 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/novacell0f8a8-account-delete-6lbfn" Oct 10 16:54:44 crc kubenswrapper[4799]: I1010 16:54:44.638112 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutronfdbc-account-delete-b8x6d" Oct 10 16:54:44 crc kubenswrapper[4799]: I1010 16:54:44.663450 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-49cds\" (UniqueName: \"kubernetes.io/projected/acf4a111-174e-42e8-8e71-d5bd053d5de2-kube-api-access-49cds\") pod \"acf4a111-174e-42e8-8e71-d5bd053d5de2\" (UID: \"acf4a111-174e-42e8-8e71-d5bd053d5de2\") " Oct 10 16:54:44 crc kubenswrapper[4799]: I1010 16:54:44.673418 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/acf4a111-174e-42e8-8e71-d5bd053d5de2-kube-api-access-49cds" (OuterVolumeSpecName: "kube-api-access-49cds") pod "acf4a111-174e-42e8-8e71-d5bd053d5de2" (UID: "acf4a111-174e-42e8-8e71-d5bd053d5de2"). InnerVolumeSpecName "kube-api-access-49cds". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 16:54:44 crc kubenswrapper[4799]: I1010 16:54:44.764675 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lk7sz\" (UniqueName: \"kubernetes.io/projected/445b5551-e072-43ca-a6e2-8f7fe726bb42-kube-api-access-lk7sz\") pod \"445b5551-e072-43ca-a6e2-8f7fe726bb42\" (UID: \"445b5551-e072-43ca-a6e2-8f7fe726bb42\") " Oct 10 16:54:44 crc kubenswrapper[4799]: I1010 16:54:44.765020 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-49cds\" (UniqueName: \"kubernetes.io/projected/acf4a111-174e-42e8-8e71-d5bd053d5de2-kube-api-access-49cds\") on node \"crc\" DevicePath \"\"" Oct 10 16:54:44 crc kubenswrapper[4799]: I1010 16:54:44.768253 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/445b5551-e072-43ca-a6e2-8f7fe726bb42-kube-api-access-lk7sz" (OuterVolumeSpecName: "kube-api-access-lk7sz") pod "445b5551-e072-43ca-a6e2-8f7fe726bb42" (UID: "445b5551-e072-43ca-a6e2-8f7fe726bb42"). InnerVolumeSpecName "kube-api-access-lk7sz". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 16:54:44 crc kubenswrapper[4799]: I1010 16:54:44.859498 4799 generic.go:334] "Generic (PLEG): container finished" podID="acf4a111-174e-42e8-8e71-d5bd053d5de2" containerID="d564a59e74011f00908e5d028874c8e790afb8ab03756bf88623b435ac7d513a" exitCode=137 Oct 10 16:54:44 crc kubenswrapper[4799]: I1010 16:54:44.859574 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/novacell0f8a8-account-delete-6lbfn" Oct 10 16:54:44 crc kubenswrapper[4799]: I1010 16:54:44.859580 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/novacell0f8a8-account-delete-6lbfn" event={"ID":"acf4a111-174e-42e8-8e71-d5bd053d5de2","Type":"ContainerDied","Data":"d564a59e74011f00908e5d028874c8e790afb8ab03756bf88623b435ac7d513a"} Oct 10 16:54:44 crc kubenswrapper[4799]: I1010 16:54:44.859691 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/novacell0f8a8-account-delete-6lbfn" event={"ID":"acf4a111-174e-42e8-8e71-d5bd053d5de2","Type":"ContainerDied","Data":"0f9ddaa49add5f4f7fed87c0f54b9a4678ac17565e30745a3c27d5ad8ce19580"} Oct 10 16:54:44 crc kubenswrapper[4799]: I1010 16:54:44.859713 4799 scope.go:117] "RemoveContainer" containerID="d564a59e74011f00908e5d028874c8e790afb8ab03756bf88623b435ac7d513a" Oct 10 16:54:44 crc kubenswrapper[4799]: I1010 16:54:44.866312 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lk7sz\" (UniqueName: \"kubernetes.io/projected/445b5551-e072-43ca-a6e2-8f7fe726bb42-kube-api-access-lk7sz\") on node \"crc\" DevicePath \"\"" Oct 10 16:54:44 crc kubenswrapper[4799]: I1010 16:54:44.872381 4799 generic.go:334] "Generic (PLEG): container finished" podID="445b5551-e072-43ca-a6e2-8f7fe726bb42" containerID="84892136290e5ee51f0b78717f3f57f778e954c4b91dd0b7d98046a5d798a718" exitCode=137 Oct 10 16:54:44 crc kubenswrapper[4799]: I1010 16:54:44.872419 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutronfdbc-account-delete-b8x6d" event={"ID":"445b5551-e072-43ca-a6e2-8f7fe726bb42","Type":"ContainerDied","Data":"84892136290e5ee51f0b78717f3f57f778e954c4b91dd0b7d98046a5d798a718"} Oct 10 16:54:44 crc kubenswrapper[4799]: I1010 16:54:44.872449 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutronfdbc-account-delete-b8x6d" event={"ID":"445b5551-e072-43ca-a6e2-8f7fe726bb42","Type":"ContainerDied","Data":"bbcada6d0c09a1870bcf967da0d23cdde5bb081cbf84851a3f26dc9e5d64534b"} Oct 10 16:54:44 crc kubenswrapper[4799]: I1010 16:54:44.872475 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutronfdbc-account-delete-b8x6d" Oct 10 16:54:44 crc kubenswrapper[4799]: I1010 16:54:44.879333 4799 scope.go:117] "RemoveContainer" containerID="d564a59e74011f00908e5d028874c8e790afb8ab03756bf88623b435ac7d513a" Oct 10 16:54:44 crc kubenswrapper[4799]: E1010 16:54:44.880118 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d564a59e74011f00908e5d028874c8e790afb8ab03756bf88623b435ac7d513a\": container with ID starting with d564a59e74011f00908e5d028874c8e790afb8ab03756bf88623b435ac7d513a not found: ID does not exist" containerID="d564a59e74011f00908e5d028874c8e790afb8ab03756bf88623b435ac7d513a" Oct 10 16:54:44 crc kubenswrapper[4799]: I1010 16:54:44.880186 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d564a59e74011f00908e5d028874c8e790afb8ab03756bf88623b435ac7d513a"} err="failed to get container status \"d564a59e74011f00908e5d028874c8e790afb8ab03756bf88623b435ac7d513a\": rpc error: code = NotFound desc = could not find container \"d564a59e74011f00908e5d028874c8e790afb8ab03756bf88623b435ac7d513a\": container with ID starting with d564a59e74011f00908e5d028874c8e790afb8ab03756bf88623b435ac7d513a not found: ID does not exist" Oct 10 16:54:44 crc kubenswrapper[4799]: I1010 16:54:44.880227 4799 scope.go:117] "RemoveContainer" containerID="84892136290e5ee51f0b78717f3f57f778e954c4b91dd0b7d98046a5d798a718" Oct 10 16:54:44 crc kubenswrapper[4799]: I1010 16:54:44.909915 4799 scope.go:117] "RemoveContainer" containerID="84892136290e5ee51f0b78717f3f57f778e954c4b91dd0b7d98046a5d798a718" Oct 10 16:54:44 crc kubenswrapper[4799]: E1010 16:54:44.910535 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"84892136290e5ee51f0b78717f3f57f778e954c4b91dd0b7d98046a5d798a718\": container with ID starting with 84892136290e5ee51f0b78717f3f57f778e954c4b91dd0b7d98046a5d798a718 not found: ID does not exist" containerID="84892136290e5ee51f0b78717f3f57f778e954c4b91dd0b7d98046a5d798a718" Oct 10 16:54:44 crc kubenswrapper[4799]: I1010 16:54:44.910590 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"84892136290e5ee51f0b78717f3f57f778e954c4b91dd0b7d98046a5d798a718"} err="failed to get container status \"84892136290e5ee51f0b78717f3f57f778e954c4b91dd0b7d98046a5d798a718\": rpc error: code = NotFound desc = could not find container \"84892136290e5ee51f0b78717f3f57f778e954c4b91dd0b7d98046a5d798a718\": container with ID starting with 84892136290e5ee51f0b78717f3f57f778e954c4b91dd0b7d98046a5d798a718 not found: ID does not exist" Oct 10 16:54:44 crc kubenswrapper[4799]: I1010 16:54:44.913522 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/novacell0f8a8-account-delete-6lbfn"] Oct 10 16:54:44 crc kubenswrapper[4799]: I1010 16:54:44.931167 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/novacell0f8a8-account-delete-6lbfn"] Oct 10 16:54:44 crc kubenswrapper[4799]: I1010 16:54:44.942002 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutronfdbc-account-delete-b8x6d"] Oct 10 16:54:44 crc kubenswrapper[4799]: I1010 16:54:44.950591 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutronfdbc-account-delete-b8x6d"] Oct 10 16:54:45 crc kubenswrapper[4799]: I1010 16:54:45.417615 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="445b5551-e072-43ca-a6e2-8f7fe726bb42" path="/var/lib/kubelet/pods/445b5551-e072-43ca-a6e2-8f7fe726bb42/volumes" Oct 10 16:54:45 crc kubenswrapper[4799]: I1010 16:54:45.418621 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4cc746af-c99b-4cb9-acde-dc3b97e424a3" path="/var/lib/kubelet/pods/4cc746af-c99b-4cb9-acde-dc3b97e424a3/volumes" Oct 10 16:54:45 crc kubenswrapper[4799]: I1010 16:54:45.419566 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="acf4a111-174e-42e8-8e71-d5bd053d5de2" path="/var/lib/kubelet/pods/acf4a111-174e-42e8-8e71-d5bd053d5de2/volumes" Oct 10 16:54:45 crc kubenswrapper[4799]: I1010 16:54:45.420478 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f1d90c74-271d-45af-9c91-87250b178ca6" path="/var/lib/kubelet/pods/f1d90c74-271d-45af-9c91-87250b178ca6/volumes" Oct 10 16:55:10 crc kubenswrapper[4799]: I1010 16:55:10.918289 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-5xwlk"] Oct 10 16:55:10 crc kubenswrapper[4799]: E1010 16:55:10.919193 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="68ea0968-070a-41d4-b023-31557446c4dc" containerName="object-updater" Oct 10 16:55:10 crc kubenswrapper[4799]: I1010 16:55:10.919210 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="68ea0968-070a-41d4-b023-31557446c4dc" containerName="object-updater" Oct 10 16:55:10 crc kubenswrapper[4799]: E1010 16:55:10.919225 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2a28d2a6-5dfc-414b-9eed-2f412cfc7063" containerName="mysql-bootstrap" Oct 10 16:55:10 crc kubenswrapper[4799]: I1010 16:55:10.919235 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="2a28d2a6-5dfc-414b-9eed-2f412cfc7063" containerName="mysql-bootstrap" Oct 10 16:55:10 crc kubenswrapper[4799]: E1010 16:55:10.919251 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f1d90c74-271d-45af-9c91-87250b178ca6" containerName="mariadb-account-delete" Oct 10 16:55:10 crc kubenswrapper[4799]: I1010 16:55:10.919261 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="f1d90c74-271d-45af-9c91-87250b178ca6" containerName="mariadb-account-delete" Oct 10 16:55:10 crc kubenswrapper[4799]: E1010 16:55:10.919272 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="60be0e86-f2dd-4575-b3c8-0131575b1cd8" containerName="nova-cell0-conductor-conductor" Oct 10 16:55:10 crc kubenswrapper[4799]: I1010 16:55:10.919282 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="60be0e86-f2dd-4575-b3c8-0131575b1cd8" containerName="nova-cell0-conductor-conductor" Oct 10 16:55:10 crc kubenswrapper[4799]: E1010 16:55:10.919299 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="baff4453-a6a2-4264-82b7-3ce7c22734f6" containerName="proxy-httpd" Oct 10 16:55:10 crc kubenswrapper[4799]: I1010 16:55:10.919306 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="baff4453-a6a2-4264-82b7-3ce7c22734f6" containerName="proxy-httpd" Oct 10 16:55:10 crc kubenswrapper[4799]: E1010 16:55:10.919316 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3a05167f-cd58-4f9f-806b-8d71271320d2" containerName="barbican-worker" Oct 10 16:55:10 crc kubenswrapper[4799]: I1010 16:55:10.919324 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="3a05167f-cd58-4f9f-806b-8d71271320d2" containerName="barbican-worker" Oct 10 16:55:10 crc kubenswrapper[4799]: E1010 16:55:10.919334 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7858ee88-c7b9-4fb7-b825-569154134201" containerName="rabbitmq" Oct 10 16:55:10 crc kubenswrapper[4799]: I1010 16:55:10.919340 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="7858ee88-c7b9-4fb7-b825-569154134201" containerName="rabbitmq" Oct 10 16:55:10 crc kubenswrapper[4799]: E1010 16:55:10.919351 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9fd6f03f-abea-4c29-8060-0705bb0af2c7" containerName="rabbitmq" Oct 10 16:55:10 crc kubenswrapper[4799]: I1010 16:55:10.919358 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="9fd6f03f-abea-4c29-8060-0705bb0af2c7" containerName="rabbitmq" Oct 10 16:55:10 crc kubenswrapper[4799]: E1010 16:55:10.919368 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="acf4a111-174e-42e8-8e71-d5bd053d5de2" containerName="mariadb-account-delete" Oct 10 16:55:10 crc kubenswrapper[4799]: I1010 16:55:10.919376 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="acf4a111-174e-42e8-8e71-d5bd053d5de2" containerName="mariadb-account-delete" Oct 10 16:55:10 crc kubenswrapper[4799]: E1010 16:55:10.919385 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7ab7b7c1-e89f-4562-882b-4f517f90f8c8" containerName="placement-api" Oct 10 16:55:10 crc kubenswrapper[4799]: I1010 16:55:10.919395 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="7ab7b7c1-e89f-4562-882b-4f517f90f8c8" containerName="placement-api" Oct 10 16:55:10 crc kubenswrapper[4799]: E1010 16:55:10.919405 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7dc78f94-acb0-4411-b1a2-14dd6500674b" containerName="probe" Oct 10 16:55:10 crc kubenswrapper[4799]: I1010 16:55:10.919413 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="7dc78f94-acb0-4411-b1a2-14dd6500674b" containerName="probe" Oct 10 16:55:10 crc kubenswrapper[4799]: E1010 16:55:10.919427 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e424a8e6-64c8-4572-8706-33026a2cc44d" containerName="glance-log" Oct 10 16:55:10 crc kubenswrapper[4799]: I1010 16:55:10.919434 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="e424a8e6-64c8-4572-8706-33026a2cc44d" containerName="glance-log" Oct 10 16:55:10 crc kubenswrapper[4799]: E1010 16:55:10.919443 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1bd0e459-efb9-463c-a8fc-d08a3194f3d9" containerName="barbican-keystone-listener-log" Oct 10 16:55:10 crc kubenswrapper[4799]: I1010 16:55:10.919450 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="1bd0e459-efb9-463c-a8fc-d08a3194f3d9" containerName="barbican-keystone-listener-log" Oct 10 16:55:10 crc kubenswrapper[4799]: E1010 16:55:10.919461 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7858ee88-c7b9-4fb7-b825-569154134201" containerName="setup-container" Oct 10 16:55:10 crc kubenswrapper[4799]: I1010 16:55:10.919469 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="7858ee88-c7b9-4fb7-b825-569154134201" containerName="setup-container" Oct 10 16:55:10 crc kubenswrapper[4799]: E1010 16:55:10.919478 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2a28d2a6-5dfc-414b-9eed-2f412cfc7063" containerName="galera" Oct 10 16:55:10 crc kubenswrapper[4799]: I1010 16:55:10.919485 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="2a28d2a6-5dfc-414b-9eed-2f412cfc7063" containerName="galera" Oct 10 16:55:10 crc kubenswrapper[4799]: E1010 16:55:10.919496 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e0ab4194-18b4-4c6d-96b2-d7a4a85e17d6" containerName="ovsdb-server-init" Oct 10 16:55:10 crc kubenswrapper[4799]: I1010 16:55:10.919506 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="e0ab4194-18b4-4c6d-96b2-d7a4a85e17d6" containerName="ovsdb-server-init" Oct 10 16:55:10 crc kubenswrapper[4799]: E1010 16:55:10.919519 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2db42625-4f7b-479c-a580-c94d6cafb3fe" containerName="nova-api-log" Oct 10 16:55:10 crc kubenswrapper[4799]: I1010 16:55:10.919527 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="2db42625-4f7b-479c-a580-c94d6cafb3fe" containerName="nova-api-log" Oct 10 16:55:10 crc kubenswrapper[4799]: E1010 16:55:10.919535 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1bd0e459-efb9-463c-a8fc-d08a3194f3d9" containerName="barbican-keystone-listener" Oct 10 16:55:10 crc kubenswrapper[4799]: I1010 16:55:10.919542 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="1bd0e459-efb9-463c-a8fc-d08a3194f3d9" containerName="barbican-keystone-listener" Oct 10 16:55:10 crc kubenswrapper[4799]: E1010 16:55:10.919552 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4cc746af-c99b-4cb9-acde-dc3b97e424a3" containerName="mariadb-account-delete" Oct 10 16:55:10 crc kubenswrapper[4799]: I1010 16:55:10.919561 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="4cc746af-c99b-4cb9-acde-dc3b97e424a3" containerName="mariadb-account-delete" Oct 10 16:55:10 crc kubenswrapper[4799]: E1010 16:55:10.919575 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="785f8ce9-5280-44fe-891c-8162f2fdcd7a" containerName="nova-cell1-novncproxy-novncproxy" Oct 10 16:55:10 crc kubenswrapper[4799]: I1010 16:55:10.919583 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="785f8ce9-5280-44fe-891c-8162f2fdcd7a" containerName="nova-cell1-novncproxy-novncproxy" Oct 10 16:55:10 crc kubenswrapper[4799]: E1010 16:55:10.919599 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e0ab4194-18b4-4c6d-96b2-d7a4a85e17d6" containerName="ovs-vswitchd" Oct 10 16:55:10 crc kubenswrapper[4799]: I1010 16:55:10.919606 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="e0ab4194-18b4-4c6d-96b2-d7a4a85e17d6" containerName="ovs-vswitchd" Oct 10 16:55:10 crc kubenswrapper[4799]: E1010 16:55:10.919617 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="68ea0968-070a-41d4-b023-31557446c4dc" containerName="account-auditor" Oct 10 16:55:10 crc kubenswrapper[4799]: I1010 16:55:10.919624 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="68ea0968-070a-41d4-b023-31557446c4dc" containerName="account-auditor" Oct 10 16:55:10 crc kubenswrapper[4799]: E1010 16:55:10.919639 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="baff4453-a6a2-4264-82b7-3ce7c22734f6" containerName="ceilometer-notification-agent" Oct 10 16:55:10 crc kubenswrapper[4799]: I1010 16:55:10.919646 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="baff4453-a6a2-4264-82b7-3ce7c22734f6" containerName="ceilometer-notification-agent" Oct 10 16:55:10 crc kubenswrapper[4799]: E1010 16:55:10.919660 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7ae9763d-31dd-44c7-bf35-11a896a4f785" containerName="nova-scheduler-scheduler" Oct 10 16:55:10 crc kubenswrapper[4799]: I1010 16:55:10.919668 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="7ae9763d-31dd-44c7-bf35-11a896a4f785" containerName="nova-scheduler-scheduler" Oct 10 16:55:10 crc kubenswrapper[4799]: E1010 16:55:10.919678 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fdfeebc0-d50f-42f8-a461-b0aea7ba6a11" containerName="openstack-network-exporter" Oct 10 16:55:10 crc kubenswrapper[4799]: I1010 16:55:10.919686 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="fdfeebc0-d50f-42f8-a461-b0aea7ba6a11" containerName="openstack-network-exporter" Oct 10 16:55:10 crc kubenswrapper[4799]: E1010 16:55:10.919700 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="baff4453-a6a2-4264-82b7-3ce7c22734f6" containerName="sg-core" Oct 10 16:55:10 crc kubenswrapper[4799]: I1010 16:55:10.919707 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="baff4453-a6a2-4264-82b7-3ce7c22734f6" containerName="sg-core" Oct 10 16:55:10 crc kubenswrapper[4799]: E1010 16:55:10.919718 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="31fc68f8-af18-42b7-a94c-90a22afea5f1" containerName="glance-log" Oct 10 16:55:10 crc kubenswrapper[4799]: I1010 16:55:10.919725 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="31fc68f8-af18-42b7-a94c-90a22afea5f1" containerName="glance-log" Oct 10 16:55:10 crc kubenswrapper[4799]: E1010 16:55:10.919738 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="68ea0968-070a-41d4-b023-31557446c4dc" containerName="account-replicator" Oct 10 16:55:10 crc kubenswrapper[4799]: I1010 16:55:10.919746 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="68ea0968-070a-41d4-b023-31557446c4dc" containerName="account-replicator" Oct 10 16:55:10 crc kubenswrapper[4799]: E1010 16:55:10.919822 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="68ea0968-070a-41d4-b023-31557446c4dc" containerName="object-auditor" Oct 10 16:55:10 crc kubenswrapper[4799]: I1010 16:55:10.919830 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="68ea0968-070a-41d4-b023-31557446c4dc" containerName="object-auditor" Oct 10 16:55:10 crc kubenswrapper[4799]: E1010 16:55:10.919843 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f9c4cd8a-6aed-4826-b23b-328645f5801f" containerName="cinder-api" Oct 10 16:55:10 crc kubenswrapper[4799]: I1010 16:55:10.919851 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="f9c4cd8a-6aed-4826-b23b-328645f5801f" containerName="cinder-api" Oct 10 16:55:10 crc kubenswrapper[4799]: E1010 16:55:10.919859 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="971dd170-cc55-481f-b76d-820102f811cd" containerName="nova-cell1-conductor-conductor" Oct 10 16:55:10 crc kubenswrapper[4799]: I1010 16:55:10.919866 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="971dd170-cc55-481f-b76d-820102f811cd" containerName="nova-cell1-conductor-conductor" Oct 10 16:55:10 crc kubenswrapper[4799]: E1010 16:55:10.919877 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="baff4453-a6a2-4264-82b7-3ce7c22734f6" containerName="ceilometer-central-agent" Oct 10 16:55:10 crc kubenswrapper[4799]: I1010 16:55:10.919885 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="baff4453-a6a2-4264-82b7-3ce7c22734f6" containerName="ceilometer-central-agent" Oct 10 16:55:10 crc kubenswrapper[4799]: E1010 16:55:10.919901 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e424a8e6-64c8-4572-8706-33026a2cc44d" containerName="glance-httpd" Oct 10 16:55:10 crc kubenswrapper[4799]: I1010 16:55:10.919908 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="e424a8e6-64c8-4572-8706-33026a2cc44d" containerName="glance-httpd" Oct 10 16:55:10 crc kubenswrapper[4799]: E1010 16:55:10.919920 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="68ea0968-070a-41d4-b023-31557446c4dc" containerName="container-server" Oct 10 16:55:10 crc kubenswrapper[4799]: I1010 16:55:10.919929 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="68ea0968-070a-41d4-b023-31557446c4dc" containerName="container-server" Oct 10 16:55:10 crc kubenswrapper[4799]: E1010 16:55:10.919939 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="68ea0968-070a-41d4-b023-31557446c4dc" containerName="container-updater" Oct 10 16:55:10 crc kubenswrapper[4799]: I1010 16:55:10.919946 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="68ea0968-070a-41d4-b023-31557446c4dc" containerName="container-updater" Oct 10 16:55:10 crc kubenswrapper[4799]: E1010 16:55:10.919957 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e0ab4194-18b4-4c6d-96b2-d7a4a85e17d6" containerName="ovsdb-server" Oct 10 16:55:10 crc kubenswrapper[4799]: I1010 16:55:10.919965 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="e0ab4194-18b4-4c6d-96b2-d7a4a85e17d6" containerName="ovsdb-server" Oct 10 16:55:10 crc kubenswrapper[4799]: E1010 16:55:10.919974 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fe3f0980-0eb7-4267-953a-3fcfa08a22b3" containerName="mysql-bootstrap" Oct 10 16:55:10 crc kubenswrapper[4799]: I1010 16:55:10.919982 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="fe3f0980-0eb7-4267-953a-3fcfa08a22b3" containerName="mysql-bootstrap" Oct 10 16:55:10 crc kubenswrapper[4799]: E1010 16:55:10.919993 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="68ea0968-070a-41d4-b023-31557446c4dc" containerName="container-replicator" Oct 10 16:55:10 crc kubenswrapper[4799]: I1010 16:55:10.920000 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="68ea0968-070a-41d4-b023-31557446c4dc" containerName="container-replicator" Oct 10 16:55:10 crc kubenswrapper[4799]: E1010 16:55:10.920012 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="68ea0968-070a-41d4-b023-31557446c4dc" containerName="account-server" Oct 10 16:55:10 crc kubenswrapper[4799]: I1010 16:55:10.920019 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="68ea0968-070a-41d4-b023-31557446c4dc" containerName="account-server" Oct 10 16:55:10 crc kubenswrapper[4799]: E1010 16:55:10.920032 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="31fc68f8-af18-42b7-a94c-90a22afea5f1" containerName="glance-httpd" Oct 10 16:55:10 crc kubenswrapper[4799]: I1010 16:55:10.920040 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="31fc68f8-af18-42b7-a94c-90a22afea5f1" containerName="glance-httpd" Oct 10 16:55:10 crc kubenswrapper[4799]: E1010 16:55:10.920049 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3b59d111-5df2-4b9f-9d02-7a3f9e19d02c" containerName="mariadb-account-delete" Oct 10 16:55:10 crc kubenswrapper[4799]: I1010 16:55:10.920057 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="3b59d111-5df2-4b9f-9d02-7a3f9e19d02c" containerName="mariadb-account-delete" Oct 10 16:55:10 crc kubenswrapper[4799]: E1010 16:55:10.920070 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f9c4cd8a-6aed-4826-b23b-328645f5801f" containerName="cinder-api-log" Oct 10 16:55:10 crc kubenswrapper[4799]: I1010 16:55:10.920077 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="f9c4cd8a-6aed-4826-b23b-328645f5801f" containerName="cinder-api-log" Oct 10 16:55:10 crc kubenswrapper[4799]: E1010 16:55:10.920087 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="68ea0968-070a-41d4-b023-31557446c4dc" containerName="rsync" Oct 10 16:55:10 crc kubenswrapper[4799]: I1010 16:55:10.920095 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="68ea0968-070a-41d4-b023-31557446c4dc" containerName="rsync" Oct 10 16:55:10 crc kubenswrapper[4799]: E1010 16:55:10.920106 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9fd6f03f-abea-4c29-8060-0705bb0af2c7" containerName="setup-container" Oct 10 16:55:10 crc kubenswrapper[4799]: I1010 16:55:10.920114 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="9fd6f03f-abea-4c29-8060-0705bb0af2c7" containerName="setup-container" Oct 10 16:55:10 crc kubenswrapper[4799]: E1010 16:55:10.920127 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7ab7b7c1-e89f-4562-882b-4f517f90f8c8" containerName="placement-log" Oct 10 16:55:10 crc kubenswrapper[4799]: I1010 16:55:10.920135 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="7ab7b7c1-e89f-4562-882b-4f517f90f8c8" containerName="placement-log" Oct 10 16:55:10 crc kubenswrapper[4799]: E1010 16:55:10.920156 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="68ea0968-070a-41d4-b023-31557446c4dc" containerName="container-auditor" Oct 10 16:55:10 crc kubenswrapper[4799]: I1010 16:55:10.920165 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="68ea0968-070a-41d4-b023-31557446c4dc" containerName="container-auditor" Oct 10 16:55:10 crc kubenswrapper[4799]: E1010 16:55:10.920175 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7dc78f94-acb0-4411-b1a2-14dd6500674b" containerName="cinder-scheduler" Oct 10 16:55:10 crc kubenswrapper[4799]: I1010 16:55:10.920183 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="7dc78f94-acb0-4411-b1a2-14dd6500674b" containerName="cinder-scheduler" Oct 10 16:55:10 crc kubenswrapper[4799]: E1010 16:55:10.920192 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="445b5551-e072-43ca-a6e2-8f7fe726bb42" containerName="mariadb-account-delete" Oct 10 16:55:10 crc kubenswrapper[4799]: I1010 16:55:10.920201 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="445b5551-e072-43ca-a6e2-8f7fe726bb42" containerName="mariadb-account-delete" Oct 10 16:55:10 crc kubenswrapper[4799]: E1010 16:55:10.920214 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="78820835-eb2d-40d8-a497-e9a351a9cef9" containerName="neutron-api" Oct 10 16:55:10 crc kubenswrapper[4799]: I1010 16:55:10.920222 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="78820835-eb2d-40d8-a497-e9a351a9cef9" containerName="neutron-api" Oct 10 16:55:10 crc kubenswrapper[4799]: E1010 16:55:10.920230 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5e6870d5-faea-46d9-bebb-4d237b802910" containerName="nova-metadata-metadata" Oct 10 16:55:10 crc kubenswrapper[4799]: I1010 16:55:10.920238 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="5e6870d5-faea-46d9-bebb-4d237b802910" containerName="nova-metadata-metadata" Oct 10 16:55:10 crc kubenswrapper[4799]: E1010 16:55:10.920250 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="68ea0968-070a-41d4-b023-31557446c4dc" containerName="swift-recon-cron" Oct 10 16:55:10 crc kubenswrapper[4799]: I1010 16:55:10.920257 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="68ea0968-070a-41d4-b023-31557446c4dc" containerName="swift-recon-cron" Oct 10 16:55:10 crc kubenswrapper[4799]: E1010 16:55:10.920270 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fdfeebc0-d50f-42f8-a461-b0aea7ba6a11" containerName="ovn-northd" Oct 10 16:55:10 crc kubenswrapper[4799]: I1010 16:55:10.920277 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="fdfeebc0-d50f-42f8-a461-b0aea7ba6a11" containerName="ovn-northd" Oct 10 16:55:10 crc kubenswrapper[4799]: E1010 16:55:10.920289 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="71374742-0685-4486-bb2d-97116af40765" containerName="mariadb-account-delete" Oct 10 16:55:10 crc kubenswrapper[4799]: I1010 16:55:10.920297 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="71374742-0685-4486-bb2d-97116af40765" containerName="mariadb-account-delete" Oct 10 16:55:10 crc kubenswrapper[4799]: E1010 16:55:10.920309 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="985d1485-7054-475b-8e60-85db5dc5afa3" containerName="memcached" Oct 10 16:55:10 crc kubenswrapper[4799]: I1010 16:55:10.920316 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="985d1485-7054-475b-8e60-85db5dc5afa3" containerName="memcached" Oct 10 16:55:10 crc kubenswrapper[4799]: E1010 16:55:10.920330 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="68ea0968-070a-41d4-b023-31557446c4dc" containerName="object-expirer" Oct 10 16:55:10 crc kubenswrapper[4799]: I1010 16:55:10.920338 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="68ea0968-070a-41d4-b023-31557446c4dc" containerName="object-expirer" Oct 10 16:55:10 crc kubenswrapper[4799]: E1010 16:55:10.920349 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ac766919-d788-40da-879a-627919926594" containerName="barbican-api" Oct 10 16:55:10 crc kubenswrapper[4799]: I1010 16:55:10.920357 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="ac766919-d788-40da-879a-627919926594" containerName="barbican-api" Oct 10 16:55:10 crc kubenswrapper[4799]: E1010 16:55:10.920365 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5e6870d5-faea-46d9-bebb-4d237b802910" containerName="nova-metadata-log" Oct 10 16:55:10 crc kubenswrapper[4799]: I1010 16:55:10.920373 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="5e6870d5-faea-46d9-bebb-4d237b802910" containerName="nova-metadata-log" Oct 10 16:55:10 crc kubenswrapper[4799]: E1010 16:55:10.920387 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="68ea0968-070a-41d4-b023-31557446c4dc" containerName="account-reaper" Oct 10 16:55:10 crc kubenswrapper[4799]: I1010 16:55:10.920396 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="68ea0968-070a-41d4-b023-31557446c4dc" containerName="account-reaper" Oct 10 16:55:10 crc kubenswrapper[4799]: E1010 16:55:10.920407 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2f2d77fd-b861-4589-bdb5-ad606deb3360" containerName="mariadb-account-delete" Oct 10 16:55:10 crc kubenswrapper[4799]: I1010 16:55:10.920415 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="2f2d77fd-b861-4589-bdb5-ad606deb3360" containerName="mariadb-account-delete" Oct 10 16:55:10 crc kubenswrapper[4799]: E1010 16:55:10.920429 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2db42625-4f7b-479c-a580-c94d6cafb3fe" containerName="nova-api-api" Oct 10 16:55:10 crc kubenswrapper[4799]: I1010 16:55:10.920437 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="2db42625-4f7b-479c-a580-c94d6cafb3fe" containerName="nova-api-api" Oct 10 16:55:10 crc kubenswrapper[4799]: E1010 16:55:10.920451 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fe3f0980-0eb7-4267-953a-3fcfa08a22b3" containerName="galera" Oct 10 16:55:10 crc kubenswrapper[4799]: I1010 16:55:10.920459 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="fe3f0980-0eb7-4267-953a-3fcfa08a22b3" containerName="galera" Oct 10 16:55:10 crc kubenswrapper[4799]: E1010 16:55:10.920469 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="68ea0968-070a-41d4-b023-31557446c4dc" containerName="object-server" Oct 10 16:55:10 crc kubenswrapper[4799]: I1010 16:55:10.920477 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="68ea0968-070a-41d4-b023-31557446c4dc" containerName="object-server" Oct 10 16:55:10 crc kubenswrapper[4799]: E1010 16:55:10.920485 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eb41e34f-dc1c-4b2e-9437-44a0e84e2cd1" containerName="keystone-api" Oct 10 16:55:10 crc kubenswrapper[4799]: I1010 16:55:10.920493 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="eb41e34f-dc1c-4b2e-9437-44a0e84e2cd1" containerName="keystone-api" Oct 10 16:55:10 crc kubenswrapper[4799]: E1010 16:55:10.920506 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="78820835-eb2d-40d8-a497-e9a351a9cef9" containerName="neutron-httpd" Oct 10 16:55:10 crc kubenswrapper[4799]: I1010 16:55:10.920515 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="78820835-eb2d-40d8-a497-e9a351a9cef9" containerName="neutron-httpd" Oct 10 16:55:10 crc kubenswrapper[4799]: E1010 16:55:10.920530 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3a05167f-cd58-4f9f-806b-8d71271320d2" containerName="barbican-worker-log" Oct 10 16:55:10 crc kubenswrapper[4799]: I1010 16:55:10.920539 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="3a05167f-cd58-4f9f-806b-8d71271320d2" containerName="barbican-worker-log" Oct 10 16:55:10 crc kubenswrapper[4799]: E1010 16:55:10.920552 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="68ea0968-070a-41d4-b023-31557446c4dc" containerName="object-replicator" Oct 10 16:55:10 crc kubenswrapper[4799]: I1010 16:55:10.920560 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="68ea0968-070a-41d4-b023-31557446c4dc" containerName="object-replicator" Oct 10 16:55:10 crc kubenswrapper[4799]: E1010 16:55:10.920572 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ac766919-d788-40da-879a-627919926594" containerName="barbican-api-log" Oct 10 16:55:10 crc kubenswrapper[4799]: I1010 16:55:10.920579 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="ac766919-d788-40da-879a-627919926594" containerName="barbican-api-log" Oct 10 16:55:10 crc kubenswrapper[4799]: E1010 16:55:10.920587 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="69aa641a-13ff-4f65-b2ea-7fee3ad42134" containerName="kube-state-metrics" Oct 10 16:55:10 crc kubenswrapper[4799]: I1010 16:55:10.920594 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="69aa641a-13ff-4f65-b2ea-7fee3ad42134" containerName="kube-state-metrics" Oct 10 16:55:10 crc kubenswrapper[4799]: I1010 16:55:10.920796 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="f9c4cd8a-6aed-4826-b23b-328645f5801f" containerName="cinder-api" Oct 10 16:55:10 crc kubenswrapper[4799]: I1010 16:55:10.920809 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="7ab7b7c1-e89f-4562-882b-4f517f90f8c8" containerName="placement-api" Oct 10 16:55:10 crc kubenswrapper[4799]: I1010 16:55:10.920831 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="fe3f0980-0eb7-4267-953a-3fcfa08a22b3" containerName="galera" Oct 10 16:55:10 crc kubenswrapper[4799]: I1010 16:55:10.920850 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="68ea0968-070a-41d4-b023-31557446c4dc" containerName="object-auditor" Oct 10 16:55:10 crc kubenswrapper[4799]: I1010 16:55:10.920860 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="e0ab4194-18b4-4c6d-96b2-d7a4a85e17d6" containerName="ovs-vswitchd" Oct 10 16:55:10 crc kubenswrapper[4799]: I1010 16:55:10.920873 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="68ea0968-070a-41d4-b023-31557446c4dc" containerName="account-server" Oct 10 16:55:10 crc kubenswrapper[4799]: I1010 16:55:10.920885 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="68ea0968-070a-41d4-b023-31557446c4dc" containerName="account-reaper" Oct 10 16:55:10 crc kubenswrapper[4799]: I1010 16:55:10.920897 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="e424a8e6-64c8-4572-8706-33026a2cc44d" containerName="glance-log" Oct 10 16:55:10 crc kubenswrapper[4799]: I1010 16:55:10.920905 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="baff4453-a6a2-4264-82b7-3ce7c22734f6" containerName="proxy-httpd" Oct 10 16:55:10 crc kubenswrapper[4799]: I1010 16:55:10.920916 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="60be0e86-f2dd-4575-b3c8-0131575b1cd8" containerName="nova-cell0-conductor-conductor" Oct 10 16:55:10 crc kubenswrapper[4799]: I1010 16:55:10.920926 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="69aa641a-13ff-4f65-b2ea-7fee3ad42134" containerName="kube-state-metrics" Oct 10 16:55:10 crc kubenswrapper[4799]: I1010 16:55:10.920940 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="31fc68f8-af18-42b7-a94c-90a22afea5f1" containerName="glance-httpd" Oct 10 16:55:10 crc kubenswrapper[4799]: I1010 16:55:10.920955 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="68ea0968-070a-41d4-b023-31557446c4dc" containerName="container-auditor" Oct 10 16:55:10 crc kubenswrapper[4799]: I1010 16:55:10.920963 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="f9c4cd8a-6aed-4826-b23b-328645f5801f" containerName="cinder-api-log" Oct 10 16:55:10 crc kubenswrapper[4799]: I1010 16:55:10.920971 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="4cc746af-c99b-4cb9-acde-dc3b97e424a3" containerName="mariadb-account-delete" Oct 10 16:55:10 crc kubenswrapper[4799]: I1010 16:55:10.920982 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="68ea0968-070a-41d4-b023-31557446c4dc" containerName="object-server" Oct 10 16:55:10 crc kubenswrapper[4799]: I1010 16:55:10.920993 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="1bd0e459-efb9-463c-a8fc-d08a3194f3d9" containerName="barbican-keystone-listener-log" Oct 10 16:55:10 crc kubenswrapper[4799]: I1010 16:55:10.921001 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="68ea0968-070a-41d4-b023-31557446c4dc" containerName="container-replicator" Oct 10 16:55:10 crc kubenswrapper[4799]: I1010 16:55:10.921015 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="acf4a111-174e-42e8-8e71-d5bd053d5de2" containerName="mariadb-account-delete" Oct 10 16:55:10 crc kubenswrapper[4799]: I1010 16:55:10.921029 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="68ea0968-070a-41d4-b023-31557446c4dc" containerName="object-updater" Oct 10 16:55:10 crc kubenswrapper[4799]: I1010 16:55:10.921040 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="7dc78f94-acb0-4411-b1a2-14dd6500674b" containerName="probe" Oct 10 16:55:10 crc kubenswrapper[4799]: I1010 16:55:10.921049 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="78820835-eb2d-40d8-a497-e9a351a9cef9" containerName="neutron-api" Oct 10 16:55:10 crc kubenswrapper[4799]: I1010 16:55:10.921060 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="68ea0968-070a-41d4-b023-31557446c4dc" containerName="account-auditor" Oct 10 16:55:10 crc kubenswrapper[4799]: I1010 16:55:10.921074 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="785f8ce9-5280-44fe-891c-8162f2fdcd7a" containerName="nova-cell1-novncproxy-novncproxy" Oct 10 16:55:10 crc kubenswrapper[4799]: I1010 16:55:10.921085 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="2a28d2a6-5dfc-414b-9eed-2f412cfc7063" containerName="galera" Oct 10 16:55:10 crc kubenswrapper[4799]: I1010 16:55:10.921095 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="7858ee88-c7b9-4fb7-b825-569154134201" containerName="rabbitmq" Oct 10 16:55:10 crc kubenswrapper[4799]: I1010 16:55:10.921108 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="baff4453-a6a2-4264-82b7-3ce7c22734f6" containerName="ceilometer-notification-agent" Oct 10 16:55:10 crc kubenswrapper[4799]: I1010 16:55:10.921117 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="3a05167f-cd58-4f9f-806b-8d71271320d2" containerName="barbican-worker" Oct 10 16:55:10 crc kubenswrapper[4799]: I1010 16:55:10.921126 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="baff4453-a6a2-4264-82b7-3ce7c22734f6" containerName="ceilometer-central-agent" Oct 10 16:55:10 crc kubenswrapper[4799]: I1010 16:55:10.921139 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="7dc78f94-acb0-4411-b1a2-14dd6500674b" containerName="cinder-scheduler" Oct 10 16:55:10 crc kubenswrapper[4799]: I1010 16:55:10.921150 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="7ab7b7c1-e89f-4562-882b-4f517f90f8c8" containerName="placement-log" Oct 10 16:55:10 crc kubenswrapper[4799]: I1010 16:55:10.921162 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="ac766919-d788-40da-879a-627919926594" containerName="barbican-api" Oct 10 16:55:10 crc kubenswrapper[4799]: I1010 16:55:10.921171 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="2db42625-4f7b-479c-a580-c94d6cafb3fe" containerName="nova-api-api" Oct 10 16:55:10 crc kubenswrapper[4799]: I1010 16:55:10.921183 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="eb41e34f-dc1c-4b2e-9437-44a0e84e2cd1" containerName="keystone-api" Oct 10 16:55:10 crc kubenswrapper[4799]: I1010 16:55:10.921194 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="9fd6f03f-abea-4c29-8060-0705bb0af2c7" containerName="rabbitmq" Oct 10 16:55:10 crc kubenswrapper[4799]: I1010 16:55:10.921203 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="78820835-eb2d-40d8-a497-e9a351a9cef9" containerName="neutron-httpd" Oct 10 16:55:10 crc kubenswrapper[4799]: I1010 16:55:10.921213 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="68ea0968-070a-41d4-b023-31557446c4dc" containerName="container-server" Oct 10 16:55:10 crc kubenswrapper[4799]: I1010 16:55:10.921226 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="68ea0968-070a-41d4-b023-31557446c4dc" containerName="container-updater" Oct 10 16:55:10 crc kubenswrapper[4799]: I1010 16:55:10.921234 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="1bd0e459-efb9-463c-a8fc-d08a3194f3d9" containerName="barbican-keystone-listener" Oct 10 16:55:10 crc kubenswrapper[4799]: I1010 16:55:10.921247 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="985d1485-7054-475b-8e60-85db5dc5afa3" containerName="memcached" Oct 10 16:55:10 crc kubenswrapper[4799]: I1010 16:55:10.921258 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="31fc68f8-af18-42b7-a94c-90a22afea5f1" containerName="glance-log" Oct 10 16:55:10 crc kubenswrapper[4799]: I1010 16:55:10.921271 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="68ea0968-070a-41d4-b023-31557446c4dc" containerName="object-replicator" Oct 10 16:55:10 crc kubenswrapper[4799]: I1010 16:55:10.921282 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="445b5551-e072-43ca-a6e2-8f7fe726bb42" containerName="mariadb-account-delete" Oct 10 16:55:10 crc kubenswrapper[4799]: I1010 16:55:10.921291 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="e424a8e6-64c8-4572-8706-33026a2cc44d" containerName="glance-httpd" Oct 10 16:55:10 crc kubenswrapper[4799]: I1010 16:55:10.921301 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="2f2d77fd-b861-4589-bdb5-ad606deb3360" containerName="mariadb-account-delete" Oct 10 16:55:10 crc kubenswrapper[4799]: I1010 16:55:10.921312 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="68ea0968-070a-41d4-b023-31557446c4dc" containerName="account-replicator" Oct 10 16:55:10 crc kubenswrapper[4799]: I1010 16:55:10.921322 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="fdfeebc0-d50f-42f8-a461-b0aea7ba6a11" containerName="openstack-network-exporter" Oct 10 16:55:10 crc kubenswrapper[4799]: I1010 16:55:10.921331 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="5e6870d5-faea-46d9-bebb-4d237b802910" containerName="nova-metadata-metadata" Oct 10 16:55:10 crc kubenswrapper[4799]: I1010 16:55:10.921341 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="e0ab4194-18b4-4c6d-96b2-d7a4a85e17d6" containerName="ovsdb-server" Oct 10 16:55:10 crc kubenswrapper[4799]: I1010 16:55:10.921352 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="fdfeebc0-d50f-42f8-a461-b0aea7ba6a11" containerName="ovn-northd" Oct 10 16:55:10 crc kubenswrapper[4799]: I1010 16:55:10.921363 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="2db42625-4f7b-479c-a580-c94d6cafb3fe" containerName="nova-api-log" Oct 10 16:55:10 crc kubenswrapper[4799]: I1010 16:55:10.921374 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="68ea0968-070a-41d4-b023-31557446c4dc" containerName="swift-recon-cron" Oct 10 16:55:10 crc kubenswrapper[4799]: I1010 16:55:10.921383 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="68ea0968-070a-41d4-b023-31557446c4dc" containerName="object-expirer" Oct 10 16:55:10 crc kubenswrapper[4799]: I1010 16:55:10.921392 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="ac766919-d788-40da-879a-627919926594" containerName="barbican-api-log" Oct 10 16:55:10 crc kubenswrapper[4799]: I1010 16:55:10.921402 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="3b59d111-5df2-4b9f-9d02-7a3f9e19d02c" containerName="mariadb-account-delete" Oct 10 16:55:10 crc kubenswrapper[4799]: I1010 16:55:10.921413 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="971dd170-cc55-481f-b76d-820102f811cd" containerName="nova-cell1-conductor-conductor" Oct 10 16:55:10 crc kubenswrapper[4799]: I1010 16:55:10.921421 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="baff4453-a6a2-4264-82b7-3ce7c22734f6" containerName="sg-core" Oct 10 16:55:10 crc kubenswrapper[4799]: I1010 16:55:10.921432 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="7ae9763d-31dd-44c7-bf35-11a896a4f785" containerName="nova-scheduler-scheduler" Oct 10 16:55:10 crc kubenswrapper[4799]: I1010 16:55:10.921442 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="5e6870d5-faea-46d9-bebb-4d237b802910" containerName="nova-metadata-log" Oct 10 16:55:10 crc kubenswrapper[4799]: I1010 16:55:10.921453 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="f1d90c74-271d-45af-9c91-87250b178ca6" containerName="mariadb-account-delete" Oct 10 16:55:10 crc kubenswrapper[4799]: I1010 16:55:10.921464 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="68ea0968-070a-41d4-b023-31557446c4dc" containerName="rsync" Oct 10 16:55:10 crc kubenswrapper[4799]: I1010 16:55:10.921475 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="71374742-0685-4486-bb2d-97116af40765" containerName="mariadb-account-delete" Oct 10 16:55:10 crc kubenswrapper[4799]: I1010 16:55:10.921488 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="3a05167f-cd58-4f9f-806b-8d71271320d2" containerName="barbican-worker-log" Oct 10 16:55:10 crc kubenswrapper[4799]: I1010 16:55:10.922961 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-5xwlk" Oct 10 16:55:10 crc kubenswrapper[4799]: I1010 16:55:10.937919 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-5xwlk"] Oct 10 16:55:11 crc kubenswrapper[4799]: I1010 16:55:11.006556 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7e4d244c-c12c-4a02-a2d6-1fe805485a85-catalog-content\") pod \"redhat-operators-5xwlk\" (UID: \"7e4d244c-c12c-4a02-a2d6-1fe805485a85\") " pod="openshift-marketplace/redhat-operators-5xwlk" Oct 10 16:55:11 crc kubenswrapper[4799]: I1010 16:55:11.006798 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-27f59\" (UniqueName: \"kubernetes.io/projected/7e4d244c-c12c-4a02-a2d6-1fe805485a85-kube-api-access-27f59\") pod \"redhat-operators-5xwlk\" (UID: \"7e4d244c-c12c-4a02-a2d6-1fe805485a85\") " pod="openshift-marketplace/redhat-operators-5xwlk" Oct 10 16:55:11 crc kubenswrapper[4799]: I1010 16:55:11.006916 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7e4d244c-c12c-4a02-a2d6-1fe805485a85-utilities\") pod \"redhat-operators-5xwlk\" (UID: \"7e4d244c-c12c-4a02-a2d6-1fe805485a85\") " pod="openshift-marketplace/redhat-operators-5xwlk" Oct 10 16:55:11 crc kubenswrapper[4799]: I1010 16:55:11.107360 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7e4d244c-c12c-4a02-a2d6-1fe805485a85-catalog-content\") pod \"redhat-operators-5xwlk\" (UID: \"7e4d244c-c12c-4a02-a2d6-1fe805485a85\") " pod="openshift-marketplace/redhat-operators-5xwlk" Oct 10 16:55:11 crc kubenswrapper[4799]: I1010 16:55:11.107439 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-27f59\" (UniqueName: \"kubernetes.io/projected/7e4d244c-c12c-4a02-a2d6-1fe805485a85-kube-api-access-27f59\") pod \"redhat-operators-5xwlk\" (UID: \"7e4d244c-c12c-4a02-a2d6-1fe805485a85\") " pod="openshift-marketplace/redhat-operators-5xwlk" Oct 10 16:55:11 crc kubenswrapper[4799]: I1010 16:55:11.107494 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7e4d244c-c12c-4a02-a2d6-1fe805485a85-utilities\") pod \"redhat-operators-5xwlk\" (UID: \"7e4d244c-c12c-4a02-a2d6-1fe805485a85\") " pod="openshift-marketplace/redhat-operators-5xwlk" Oct 10 16:55:11 crc kubenswrapper[4799]: I1010 16:55:11.108021 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7e4d244c-c12c-4a02-a2d6-1fe805485a85-utilities\") pod \"redhat-operators-5xwlk\" (UID: \"7e4d244c-c12c-4a02-a2d6-1fe805485a85\") " pod="openshift-marketplace/redhat-operators-5xwlk" Oct 10 16:55:11 crc kubenswrapper[4799]: I1010 16:55:11.108177 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7e4d244c-c12c-4a02-a2d6-1fe805485a85-catalog-content\") pod \"redhat-operators-5xwlk\" (UID: \"7e4d244c-c12c-4a02-a2d6-1fe805485a85\") " pod="openshift-marketplace/redhat-operators-5xwlk" Oct 10 16:55:11 crc kubenswrapper[4799]: I1010 16:55:11.134499 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-27f59\" (UniqueName: \"kubernetes.io/projected/7e4d244c-c12c-4a02-a2d6-1fe805485a85-kube-api-access-27f59\") pod \"redhat-operators-5xwlk\" (UID: \"7e4d244c-c12c-4a02-a2d6-1fe805485a85\") " pod="openshift-marketplace/redhat-operators-5xwlk" Oct 10 16:55:11 crc kubenswrapper[4799]: I1010 16:55:11.244674 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-5xwlk" Oct 10 16:55:11 crc kubenswrapper[4799]: I1010 16:55:11.516471 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-5xwlk"] Oct 10 16:55:11 crc kubenswrapper[4799]: W1010 16:55:11.516923 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7e4d244c_c12c_4a02_a2d6_1fe805485a85.slice/crio-6b26cef733c7e66616540666418d870f7d8af663502f55c4b7a2037ef57177b7 WatchSource:0}: Error finding container 6b26cef733c7e66616540666418d870f7d8af663502f55c4b7a2037ef57177b7: Status 404 returned error can't find the container with id 6b26cef733c7e66616540666418d870f7d8af663502f55c4b7a2037ef57177b7 Oct 10 16:55:12 crc kubenswrapper[4799]: I1010 16:55:12.206852 4799 generic.go:334] "Generic (PLEG): container finished" podID="7e4d244c-c12c-4a02-a2d6-1fe805485a85" containerID="e1d6adcac57534d4b58a5a436217683417ad4591b6bcf3a1629a404dbe1991ca" exitCode=0 Oct 10 16:55:12 crc kubenswrapper[4799]: I1010 16:55:12.206916 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5xwlk" event={"ID":"7e4d244c-c12c-4a02-a2d6-1fe805485a85","Type":"ContainerDied","Data":"e1d6adcac57534d4b58a5a436217683417ad4591b6bcf3a1629a404dbe1991ca"} Oct 10 16:55:12 crc kubenswrapper[4799]: I1010 16:55:12.207133 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5xwlk" event={"ID":"7e4d244c-c12c-4a02-a2d6-1fe805485a85","Type":"ContainerStarted","Data":"6b26cef733c7e66616540666418d870f7d8af663502f55c4b7a2037ef57177b7"} Oct 10 16:55:12 crc kubenswrapper[4799]: I1010 16:55:12.209849 4799 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 10 16:55:13 crc kubenswrapper[4799]: I1010 16:55:13.218917 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5xwlk" event={"ID":"7e4d244c-c12c-4a02-a2d6-1fe805485a85","Type":"ContainerStarted","Data":"9948b5860b59c5200cbae6f1ddad4ea8e981a79770fd15adbcecd83856db00f9"} Oct 10 16:55:14 crc kubenswrapper[4799]: I1010 16:55:14.229892 4799 generic.go:334] "Generic (PLEG): container finished" podID="7e4d244c-c12c-4a02-a2d6-1fe805485a85" containerID="9948b5860b59c5200cbae6f1ddad4ea8e981a79770fd15adbcecd83856db00f9" exitCode=0 Oct 10 16:55:14 crc kubenswrapper[4799]: I1010 16:55:14.229939 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5xwlk" event={"ID":"7e4d244c-c12c-4a02-a2d6-1fe805485a85","Type":"ContainerDied","Data":"9948b5860b59c5200cbae6f1ddad4ea8e981a79770fd15adbcecd83856db00f9"} Oct 10 16:55:15 crc kubenswrapper[4799]: I1010 16:55:15.245028 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5xwlk" event={"ID":"7e4d244c-c12c-4a02-a2d6-1fe805485a85","Type":"ContainerStarted","Data":"2c6e4ce6acf312be71891f90940a34c39b48ca2ce76dd93889b0b3295461d7c1"} Oct 10 16:55:15 crc kubenswrapper[4799]: I1010 16:55:15.271374 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-5xwlk" podStartSLOduration=2.627632742 podStartE2EDuration="5.271350892s" podCreationTimestamp="2025-10-10 16:55:10 +0000 UTC" firstStartedPulling="2025-10-10 16:55:12.209311627 +0000 UTC m=+1405.717635782" lastFinishedPulling="2025-10-10 16:55:14.853029777 +0000 UTC m=+1408.361353932" observedRunningTime="2025-10-10 16:55:15.267907557 +0000 UTC m=+1408.776231692" watchObservedRunningTime="2025-10-10 16:55:15.271350892 +0000 UTC m=+1408.779675017" Oct 10 16:55:21 crc kubenswrapper[4799]: I1010 16:55:21.245184 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-5xwlk" Oct 10 16:55:21 crc kubenswrapper[4799]: I1010 16:55:21.246013 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-5xwlk" Oct 10 16:55:21 crc kubenswrapper[4799]: I1010 16:55:21.325489 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-5xwlk" Oct 10 16:55:21 crc kubenswrapper[4799]: I1010 16:55:21.415604 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-5xwlk" Oct 10 16:55:21 crc kubenswrapper[4799]: I1010 16:55:21.597561 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-5xwlk"] Oct 10 16:55:23 crc kubenswrapper[4799]: I1010 16:55:23.328603 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-5xwlk" podUID="7e4d244c-c12c-4a02-a2d6-1fe805485a85" containerName="registry-server" containerID="cri-o://2c6e4ce6acf312be71891f90940a34c39b48ca2ce76dd93889b0b3295461d7c1" gracePeriod=2 Oct 10 16:55:23 crc kubenswrapper[4799]: I1010 16:55:23.808710 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-5xwlk" Oct 10 16:55:23 crc kubenswrapper[4799]: I1010 16:55:23.917379 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7e4d244c-c12c-4a02-a2d6-1fe805485a85-catalog-content\") pod \"7e4d244c-c12c-4a02-a2d6-1fe805485a85\" (UID: \"7e4d244c-c12c-4a02-a2d6-1fe805485a85\") " Oct 10 16:55:23 crc kubenswrapper[4799]: I1010 16:55:23.917446 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7e4d244c-c12c-4a02-a2d6-1fe805485a85-utilities\") pod \"7e4d244c-c12c-4a02-a2d6-1fe805485a85\" (UID: \"7e4d244c-c12c-4a02-a2d6-1fe805485a85\") " Oct 10 16:55:23 crc kubenswrapper[4799]: I1010 16:55:23.917546 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-27f59\" (UniqueName: \"kubernetes.io/projected/7e4d244c-c12c-4a02-a2d6-1fe805485a85-kube-api-access-27f59\") pod \"7e4d244c-c12c-4a02-a2d6-1fe805485a85\" (UID: \"7e4d244c-c12c-4a02-a2d6-1fe805485a85\") " Oct 10 16:55:23 crc kubenswrapper[4799]: I1010 16:55:23.920824 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7e4d244c-c12c-4a02-a2d6-1fe805485a85-utilities" (OuterVolumeSpecName: "utilities") pod "7e4d244c-c12c-4a02-a2d6-1fe805485a85" (UID: "7e4d244c-c12c-4a02-a2d6-1fe805485a85"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 10 16:55:23 crc kubenswrapper[4799]: I1010 16:55:23.925512 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7e4d244c-c12c-4a02-a2d6-1fe805485a85-kube-api-access-27f59" (OuterVolumeSpecName: "kube-api-access-27f59") pod "7e4d244c-c12c-4a02-a2d6-1fe805485a85" (UID: "7e4d244c-c12c-4a02-a2d6-1fe805485a85"). InnerVolumeSpecName "kube-api-access-27f59". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 16:55:24 crc kubenswrapper[4799]: I1010 16:55:24.020009 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-27f59\" (UniqueName: \"kubernetes.io/projected/7e4d244c-c12c-4a02-a2d6-1fe805485a85-kube-api-access-27f59\") on node \"crc\" DevicePath \"\"" Oct 10 16:55:24 crc kubenswrapper[4799]: I1010 16:55:24.020067 4799 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7e4d244c-c12c-4a02-a2d6-1fe805485a85-utilities\") on node \"crc\" DevicePath \"\"" Oct 10 16:55:24 crc kubenswrapper[4799]: I1010 16:55:24.027506 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7e4d244c-c12c-4a02-a2d6-1fe805485a85-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "7e4d244c-c12c-4a02-a2d6-1fe805485a85" (UID: "7e4d244c-c12c-4a02-a2d6-1fe805485a85"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 10 16:55:24 crc kubenswrapper[4799]: I1010 16:55:24.120904 4799 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7e4d244c-c12c-4a02-a2d6-1fe805485a85-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 10 16:55:24 crc kubenswrapper[4799]: I1010 16:55:24.344382 4799 generic.go:334] "Generic (PLEG): container finished" podID="7e4d244c-c12c-4a02-a2d6-1fe805485a85" containerID="2c6e4ce6acf312be71891f90940a34c39b48ca2ce76dd93889b0b3295461d7c1" exitCode=0 Oct 10 16:55:24 crc kubenswrapper[4799]: I1010 16:55:24.344447 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-5xwlk" Oct 10 16:55:24 crc kubenswrapper[4799]: I1010 16:55:24.344492 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5xwlk" event={"ID":"7e4d244c-c12c-4a02-a2d6-1fe805485a85","Type":"ContainerDied","Data":"2c6e4ce6acf312be71891f90940a34c39b48ca2ce76dd93889b0b3295461d7c1"} Oct 10 16:55:24 crc kubenswrapper[4799]: I1010 16:55:24.344812 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5xwlk" event={"ID":"7e4d244c-c12c-4a02-a2d6-1fe805485a85","Type":"ContainerDied","Data":"6b26cef733c7e66616540666418d870f7d8af663502f55c4b7a2037ef57177b7"} Oct 10 16:55:24 crc kubenswrapper[4799]: I1010 16:55:24.344850 4799 scope.go:117] "RemoveContainer" containerID="2c6e4ce6acf312be71891f90940a34c39b48ca2ce76dd93889b0b3295461d7c1" Oct 10 16:55:24 crc kubenswrapper[4799]: I1010 16:55:24.385273 4799 scope.go:117] "RemoveContainer" containerID="9948b5860b59c5200cbae6f1ddad4ea8e981a79770fd15adbcecd83856db00f9" Oct 10 16:55:24 crc kubenswrapper[4799]: I1010 16:55:24.404252 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-5xwlk"] Oct 10 16:55:24 crc kubenswrapper[4799]: I1010 16:55:24.412155 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-5xwlk"] Oct 10 16:55:24 crc kubenswrapper[4799]: I1010 16:55:24.440736 4799 scope.go:117] "RemoveContainer" containerID="e1d6adcac57534d4b58a5a436217683417ad4591b6bcf3a1629a404dbe1991ca" Oct 10 16:55:24 crc kubenswrapper[4799]: I1010 16:55:24.479441 4799 scope.go:117] "RemoveContainer" containerID="2c6e4ce6acf312be71891f90940a34c39b48ca2ce76dd93889b0b3295461d7c1" Oct 10 16:55:24 crc kubenswrapper[4799]: E1010 16:55:24.480031 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2c6e4ce6acf312be71891f90940a34c39b48ca2ce76dd93889b0b3295461d7c1\": container with ID starting with 2c6e4ce6acf312be71891f90940a34c39b48ca2ce76dd93889b0b3295461d7c1 not found: ID does not exist" containerID="2c6e4ce6acf312be71891f90940a34c39b48ca2ce76dd93889b0b3295461d7c1" Oct 10 16:55:24 crc kubenswrapper[4799]: I1010 16:55:24.480087 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2c6e4ce6acf312be71891f90940a34c39b48ca2ce76dd93889b0b3295461d7c1"} err="failed to get container status \"2c6e4ce6acf312be71891f90940a34c39b48ca2ce76dd93889b0b3295461d7c1\": rpc error: code = NotFound desc = could not find container \"2c6e4ce6acf312be71891f90940a34c39b48ca2ce76dd93889b0b3295461d7c1\": container with ID starting with 2c6e4ce6acf312be71891f90940a34c39b48ca2ce76dd93889b0b3295461d7c1 not found: ID does not exist" Oct 10 16:55:24 crc kubenswrapper[4799]: I1010 16:55:24.480112 4799 scope.go:117] "RemoveContainer" containerID="9948b5860b59c5200cbae6f1ddad4ea8e981a79770fd15adbcecd83856db00f9" Oct 10 16:55:24 crc kubenswrapper[4799]: E1010 16:55:24.480638 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9948b5860b59c5200cbae6f1ddad4ea8e981a79770fd15adbcecd83856db00f9\": container with ID starting with 9948b5860b59c5200cbae6f1ddad4ea8e981a79770fd15adbcecd83856db00f9 not found: ID does not exist" containerID="9948b5860b59c5200cbae6f1ddad4ea8e981a79770fd15adbcecd83856db00f9" Oct 10 16:55:24 crc kubenswrapper[4799]: I1010 16:55:24.480675 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9948b5860b59c5200cbae6f1ddad4ea8e981a79770fd15adbcecd83856db00f9"} err="failed to get container status \"9948b5860b59c5200cbae6f1ddad4ea8e981a79770fd15adbcecd83856db00f9\": rpc error: code = NotFound desc = could not find container \"9948b5860b59c5200cbae6f1ddad4ea8e981a79770fd15adbcecd83856db00f9\": container with ID starting with 9948b5860b59c5200cbae6f1ddad4ea8e981a79770fd15adbcecd83856db00f9 not found: ID does not exist" Oct 10 16:55:24 crc kubenswrapper[4799]: I1010 16:55:24.480693 4799 scope.go:117] "RemoveContainer" containerID="e1d6adcac57534d4b58a5a436217683417ad4591b6bcf3a1629a404dbe1991ca" Oct 10 16:55:24 crc kubenswrapper[4799]: E1010 16:55:24.481195 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e1d6adcac57534d4b58a5a436217683417ad4591b6bcf3a1629a404dbe1991ca\": container with ID starting with e1d6adcac57534d4b58a5a436217683417ad4591b6bcf3a1629a404dbe1991ca not found: ID does not exist" containerID="e1d6adcac57534d4b58a5a436217683417ad4591b6bcf3a1629a404dbe1991ca" Oct 10 16:55:24 crc kubenswrapper[4799]: I1010 16:55:24.481239 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e1d6adcac57534d4b58a5a436217683417ad4591b6bcf3a1629a404dbe1991ca"} err="failed to get container status \"e1d6adcac57534d4b58a5a436217683417ad4591b6bcf3a1629a404dbe1991ca\": rpc error: code = NotFound desc = could not find container \"e1d6adcac57534d4b58a5a436217683417ad4591b6bcf3a1629a404dbe1991ca\": container with ID starting with e1d6adcac57534d4b58a5a436217683417ad4591b6bcf3a1629a404dbe1991ca not found: ID does not exist" Oct 10 16:55:25 crc kubenswrapper[4799]: I1010 16:55:25.415978 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7e4d244c-c12c-4a02-a2d6-1fe805485a85" path="/var/lib/kubelet/pods/7e4d244c-c12c-4a02-a2d6-1fe805485a85/volumes" Oct 10 16:55:37 crc kubenswrapper[4799]: I1010 16:55:37.578974 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-rqgcb"] Oct 10 16:55:37 crc kubenswrapper[4799]: E1010 16:55:37.579898 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7e4d244c-c12c-4a02-a2d6-1fe805485a85" containerName="registry-server" Oct 10 16:55:37 crc kubenswrapper[4799]: I1010 16:55:37.579917 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="7e4d244c-c12c-4a02-a2d6-1fe805485a85" containerName="registry-server" Oct 10 16:55:37 crc kubenswrapper[4799]: E1010 16:55:37.579939 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7e4d244c-c12c-4a02-a2d6-1fe805485a85" containerName="extract-utilities" Oct 10 16:55:37 crc kubenswrapper[4799]: I1010 16:55:37.579948 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="7e4d244c-c12c-4a02-a2d6-1fe805485a85" containerName="extract-utilities" Oct 10 16:55:37 crc kubenswrapper[4799]: E1010 16:55:37.579964 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7e4d244c-c12c-4a02-a2d6-1fe805485a85" containerName="extract-content" Oct 10 16:55:37 crc kubenswrapper[4799]: I1010 16:55:37.579973 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="7e4d244c-c12c-4a02-a2d6-1fe805485a85" containerName="extract-content" Oct 10 16:55:37 crc kubenswrapper[4799]: I1010 16:55:37.580181 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="7e4d244c-c12c-4a02-a2d6-1fe805485a85" containerName="registry-server" Oct 10 16:55:37 crc kubenswrapper[4799]: I1010 16:55:37.581375 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-rqgcb" Oct 10 16:55:37 crc kubenswrapper[4799]: I1010 16:55:37.605180 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-rqgcb"] Oct 10 16:55:37 crc kubenswrapper[4799]: I1010 16:55:37.639949 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/20e74b81-9c23-4e57-85e2-92d4979c4da6-utilities\") pod \"certified-operators-rqgcb\" (UID: \"20e74b81-9c23-4e57-85e2-92d4979c4da6\") " pod="openshift-marketplace/certified-operators-rqgcb" Oct 10 16:55:37 crc kubenswrapper[4799]: I1010 16:55:37.640145 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vs9pk\" (UniqueName: \"kubernetes.io/projected/20e74b81-9c23-4e57-85e2-92d4979c4da6-kube-api-access-vs9pk\") pod \"certified-operators-rqgcb\" (UID: \"20e74b81-9c23-4e57-85e2-92d4979c4da6\") " pod="openshift-marketplace/certified-operators-rqgcb" Oct 10 16:55:37 crc kubenswrapper[4799]: I1010 16:55:37.640236 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/20e74b81-9c23-4e57-85e2-92d4979c4da6-catalog-content\") pod \"certified-operators-rqgcb\" (UID: \"20e74b81-9c23-4e57-85e2-92d4979c4da6\") " pod="openshift-marketplace/certified-operators-rqgcb" Oct 10 16:55:37 crc kubenswrapper[4799]: I1010 16:55:37.741650 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vs9pk\" (UniqueName: \"kubernetes.io/projected/20e74b81-9c23-4e57-85e2-92d4979c4da6-kube-api-access-vs9pk\") pod \"certified-operators-rqgcb\" (UID: \"20e74b81-9c23-4e57-85e2-92d4979c4da6\") " pod="openshift-marketplace/certified-operators-rqgcb" Oct 10 16:55:37 crc kubenswrapper[4799]: I1010 16:55:37.742268 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/20e74b81-9c23-4e57-85e2-92d4979c4da6-catalog-content\") pod \"certified-operators-rqgcb\" (UID: \"20e74b81-9c23-4e57-85e2-92d4979c4da6\") " pod="openshift-marketplace/certified-operators-rqgcb" Oct 10 16:55:37 crc kubenswrapper[4799]: I1010 16:55:37.742479 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/20e74b81-9c23-4e57-85e2-92d4979c4da6-utilities\") pod \"certified-operators-rqgcb\" (UID: \"20e74b81-9c23-4e57-85e2-92d4979c4da6\") " pod="openshift-marketplace/certified-operators-rqgcb" Oct 10 16:55:37 crc kubenswrapper[4799]: I1010 16:55:37.742750 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/20e74b81-9c23-4e57-85e2-92d4979c4da6-catalog-content\") pod \"certified-operators-rqgcb\" (UID: \"20e74b81-9c23-4e57-85e2-92d4979c4da6\") " pod="openshift-marketplace/certified-operators-rqgcb" Oct 10 16:55:37 crc kubenswrapper[4799]: I1010 16:55:37.742933 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/20e74b81-9c23-4e57-85e2-92d4979c4da6-utilities\") pod \"certified-operators-rqgcb\" (UID: \"20e74b81-9c23-4e57-85e2-92d4979c4da6\") " pod="openshift-marketplace/certified-operators-rqgcb" Oct 10 16:55:37 crc kubenswrapper[4799]: I1010 16:55:37.771469 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vs9pk\" (UniqueName: \"kubernetes.io/projected/20e74b81-9c23-4e57-85e2-92d4979c4da6-kube-api-access-vs9pk\") pod \"certified-operators-rqgcb\" (UID: \"20e74b81-9c23-4e57-85e2-92d4979c4da6\") " pod="openshift-marketplace/certified-operators-rqgcb" Oct 10 16:55:37 crc kubenswrapper[4799]: I1010 16:55:37.918174 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-rqgcb" Oct 10 16:55:38 crc kubenswrapper[4799]: I1010 16:55:38.424878 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-rqgcb"] Oct 10 16:55:38 crc kubenswrapper[4799]: I1010 16:55:38.499795 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-rqgcb" event={"ID":"20e74b81-9c23-4e57-85e2-92d4979c4da6","Type":"ContainerStarted","Data":"ffaba672fedb9d56483b6f3bd69f46271e204dede73f9f7e943cbb477947165a"} Oct 10 16:55:39 crc kubenswrapper[4799]: I1010 16:55:39.512548 4799 generic.go:334] "Generic (PLEG): container finished" podID="20e74b81-9c23-4e57-85e2-92d4979c4da6" containerID="4724151a2547fd7447db66758558f9103432958cda02728e30b043cc402aa356" exitCode=0 Oct 10 16:55:39 crc kubenswrapper[4799]: I1010 16:55:39.512591 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-rqgcb" event={"ID":"20e74b81-9c23-4e57-85e2-92d4979c4da6","Type":"ContainerDied","Data":"4724151a2547fd7447db66758558f9103432958cda02728e30b043cc402aa356"} Oct 10 16:55:40 crc kubenswrapper[4799]: I1010 16:55:40.522898 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-rqgcb" event={"ID":"20e74b81-9c23-4e57-85e2-92d4979c4da6","Type":"ContainerStarted","Data":"0481098e3faa249aa62bf8fba26ada43216c6290ccb4d2b783af5ca73bdf6c8c"} Oct 10 16:55:41 crc kubenswrapper[4799]: I1010 16:55:41.537076 4799 generic.go:334] "Generic (PLEG): container finished" podID="20e74b81-9c23-4e57-85e2-92d4979c4da6" containerID="0481098e3faa249aa62bf8fba26ada43216c6290ccb4d2b783af5ca73bdf6c8c" exitCode=0 Oct 10 16:55:41 crc kubenswrapper[4799]: I1010 16:55:41.537172 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-rqgcb" event={"ID":"20e74b81-9c23-4e57-85e2-92d4979c4da6","Type":"ContainerDied","Data":"0481098e3faa249aa62bf8fba26ada43216c6290ccb4d2b783af5ca73bdf6c8c"} Oct 10 16:55:42 crc kubenswrapper[4799]: I1010 16:55:42.602409 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-rqgcb" event={"ID":"20e74b81-9c23-4e57-85e2-92d4979c4da6","Type":"ContainerStarted","Data":"ff238c5740713cc7c5e28589c9d57b3ee8b20130c909dab34574fb0c616ba4bc"} Oct 10 16:55:42 crc kubenswrapper[4799]: I1010 16:55:42.643187 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-rqgcb" podStartSLOduration=3.188767366 podStartE2EDuration="5.643161878s" podCreationTimestamp="2025-10-10 16:55:37 +0000 UTC" firstStartedPulling="2025-10-10 16:55:39.51431256 +0000 UTC m=+1433.022636715" lastFinishedPulling="2025-10-10 16:55:41.968707102 +0000 UTC m=+1435.477031227" observedRunningTime="2025-10-10 16:55:42.638291279 +0000 UTC m=+1436.146615394" watchObservedRunningTime="2025-10-10 16:55:42.643161878 +0000 UTC m=+1436.151485993" Oct 10 16:55:47 crc kubenswrapper[4799]: I1010 16:55:47.918925 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-rqgcb" Oct 10 16:55:47 crc kubenswrapper[4799]: I1010 16:55:47.919253 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-rqgcb" Oct 10 16:55:48 crc kubenswrapper[4799]: I1010 16:55:48.000085 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-rqgcb" Oct 10 16:55:48 crc kubenswrapper[4799]: I1010 16:55:48.728747 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-rqgcb" Oct 10 16:55:48 crc kubenswrapper[4799]: I1010 16:55:48.793248 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-rqgcb"] Oct 10 16:55:50 crc kubenswrapper[4799]: I1010 16:55:50.694983 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-rqgcb" podUID="20e74b81-9c23-4e57-85e2-92d4979c4da6" containerName="registry-server" containerID="cri-o://ff238c5740713cc7c5e28589c9d57b3ee8b20130c909dab34574fb0c616ba4bc" gracePeriod=2 Oct 10 16:55:51 crc kubenswrapper[4799]: I1010 16:55:51.518981 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-rqgcb" Oct 10 16:55:51 crc kubenswrapper[4799]: I1010 16:55:51.651546 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/20e74b81-9c23-4e57-85e2-92d4979c4da6-catalog-content\") pod \"20e74b81-9c23-4e57-85e2-92d4979c4da6\" (UID: \"20e74b81-9c23-4e57-85e2-92d4979c4da6\") " Oct 10 16:55:51 crc kubenswrapper[4799]: I1010 16:55:51.651673 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/20e74b81-9c23-4e57-85e2-92d4979c4da6-utilities\") pod \"20e74b81-9c23-4e57-85e2-92d4979c4da6\" (UID: \"20e74b81-9c23-4e57-85e2-92d4979c4da6\") " Oct 10 16:55:51 crc kubenswrapper[4799]: I1010 16:55:51.651746 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vs9pk\" (UniqueName: \"kubernetes.io/projected/20e74b81-9c23-4e57-85e2-92d4979c4da6-kube-api-access-vs9pk\") pod \"20e74b81-9c23-4e57-85e2-92d4979c4da6\" (UID: \"20e74b81-9c23-4e57-85e2-92d4979c4da6\") " Oct 10 16:55:51 crc kubenswrapper[4799]: I1010 16:55:51.654069 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/20e74b81-9c23-4e57-85e2-92d4979c4da6-utilities" (OuterVolumeSpecName: "utilities") pod "20e74b81-9c23-4e57-85e2-92d4979c4da6" (UID: "20e74b81-9c23-4e57-85e2-92d4979c4da6"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 10 16:55:51 crc kubenswrapper[4799]: I1010 16:55:51.661508 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/20e74b81-9c23-4e57-85e2-92d4979c4da6-kube-api-access-vs9pk" (OuterVolumeSpecName: "kube-api-access-vs9pk") pod "20e74b81-9c23-4e57-85e2-92d4979c4da6" (UID: "20e74b81-9c23-4e57-85e2-92d4979c4da6"). InnerVolumeSpecName "kube-api-access-vs9pk". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 16:55:51 crc kubenswrapper[4799]: I1010 16:55:51.710294 4799 generic.go:334] "Generic (PLEG): container finished" podID="20e74b81-9c23-4e57-85e2-92d4979c4da6" containerID="ff238c5740713cc7c5e28589c9d57b3ee8b20130c909dab34574fb0c616ba4bc" exitCode=0 Oct 10 16:55:51 crc kubenswrapper[4799]: I1010 16:55:51.710351 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-rqgcb" event={"ID":"20e74b81-9c23-4e57-85e2-92d4979c4da6","Type":"ContainerDied","Data":"ff238c5740713cc7c5e28589c9d57b3ee8b20130c909dab34574fb0c616ba4bc"} Oct 10 16:55:51 crc kubenswrapper[4799]: I1010 16:55:51.710385 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-rqgcb" event={"ID":"20e74b81-9c23-4e57-85e2-92d4979c4da6","Type":"ContainerDied","Data":"ffaba672fedb9d56483b6f3bd69f46271e204dede73f9f7e943cbb477947165a"} Oct 10 16:55:51 crc kubenswrapper[4799]: I1010 16:55:51.710411 4799 scope.go:117] "RemoveContainer" containerID="ff238c5740713cc7c5e28589c9d57b3ee8b20130c909dab34574fb0c616ba4bc" Oct 10 16:55:51 crc kubenswrapper[4799]: I1010 16:55:51.710587 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-rqgcb" Oct 10 16:55:51 crc kubenswrapper[4799]: I1010 16:55:51.737146 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/20e74b81-9c23-4e57-85e2-92d4979c4da6-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "20e74b81-9c23-4e57-85e2-92d4979c4da6" (UID: "20e74b81-9c23-4e57-85e2-92d4979c4da6"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 10 16:55:51 crc kubenswrapper[4799]: I1010 16:55:51.742938 4799 scope.go:117] "RemoveContainer" containerID="0481098e3faa249aa62bf8fba26ada43216c6290ccb4d2b783af5ca73bdf6c8c" Oct 10 16:55:51 crc kubenswrapper[4799]: I1010 16:55:51.757911 4799 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/20e74b81-9c23-4e57-85e2-92d4979c4da6-utilities\") on node \"crc\" DevicePath \"\"" Oct 10 16:55:51 crc kubenswrapper[4799]: I1010 16:55:51.758113 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vs9pk\" (UniqueName: \"kubernetes.io/projected/20e74b81-9c23-4e57-85e2-92d4979c4da6-kube-api-access-vs9pk\") on node \"crc\" DevicePath \"\"" Oct 10 16:55:51 crc kubenswrapper[4799]: I1010 16:55:51.758225 4799 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/20e74b81-9c23-4e57-85e2-92d4979c4da6-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 10 16:55:51 crc kubenswrapper[4799]: I1010 16:55:51.780467 4799 scope.go:117] "RemoveContainer" containerID="4724151a2547fd7447db66758558f9103432958cda02728e30b043cc402aa356" Oct 10 16:55:51 crc kubenswrapper[4799]: I1010 16:55:51.821661 4799 scope.go:117] "RemoveContainer" containerID="ff238c5740713cc7c5e28589c9d57b3ee8b20130c909dab34574fb0c616ba4bc" Oct 10 16:55:51 crc kubenswrapper[4799]: E1010 16:55:51.822115 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ff238c5740713cc7c5e28589c9d57b3ee8b20130c909dab34574fb0c616ba4bc\": container with ID starting with ff238c5740713cc7c5e28589c9d57b3ee8b20130c909dab34574fb0c616ba4bc not found: ID does not exist" containerID="ff238c5740713cc7c5e28589c9d57b3ee8b20130c909dab34574fb0c616ba4bc" Oct 10 16:55:51 crc kubenswrapper[4799]: I1010 16:55:51.822149 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ff238c5740713cc7c5e28589c9d57b3ee8b20130c909dab34574fb0c616ba4bc"} err="failed to get container status \"ff238c5740713cc7c5e28589c9d57b3ee8b20130c909dab34574fb0c616ba4bc\": rpc error: code = NotFound desc = could not find container \"ff238c5740713cc7c5e28589c9d57b3ee8b20130c909dab34574fb0c616ba4bc\": container with ID starting with ff238c5740713cc7c5e28589c9d57b3ee8b20130c909dab34574fb0c616ba4bc not found: ID does not exist" Oct 10 16:55:51 crc kubenswrapper[4799]: I1010 16:55:51.822173 4799 scope.go:117] "RemoveContainer" containerID="0481098e3faa249aa62bf8fba26ada43216c6290ccb4d2b783af5ca73bdf6c8c" Oct 10 16:55:51 crc kubenswrapper[4799]: E1010 16:55:51.822388 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0481098e3faa249aa62bf8fba26ada43216c6290ccb4d2b783af5ca73bdf6c8c\": container with ID starting with 0481098e3faa249aa62bf8fba26ada43216c6290ccb4d2b783af5ca73bdf6c8c not found: ID does not exist" containerID="0481098e3faa249aa62bf8fba26ada43216c6290ccb4d2b783af5ca73bdf6c8c" Oct 10 16:55:51 crc kubenswrapper[4799]: I1010 16:55:51.822418 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0481098e3faa249aa62bf8fba26ada43216c6290ccb4d2b783af5ca73bdf6c8c"} err="failed to get container status \"0481098e3faa249aa62bf8fba26ada43216c6290ccb4d2b783af5ca73bdf6c8c\": rpc error: code = NotFound desc = could not find container \"0481098e3faa249aa62bf8fba26ada43216c6290ccb4d2b783af5ca73bdf6c8c\": container with ID starting with 0481098e3faa249aa62bf8fba26ada43216c6290ccb4d2b783af5ca73bdf6c8c not found: ID does not exist" Oct 10 16:55:51 crc kubenswrapper[4799]: I1010 16:55:51.822439 4799 scope.go:117] "RemoveContainer" containerID="4724151a2547fd7447db66758558f9103432958cda02728e30b043cc402aa356" Oct 10 16:55:51 crc kubenswrapper[4799]: E1010 16:55:51.822646 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4724151a2547fd7447db66758558f9103432958cda02728e30b043cc402aa356\": container with ID starting with 4724151a2547fd7447db66758558f9103432958cda02728e30b043cc402aa356 not found: ID does not exist" containerID="4724151a2547fd7447db66758558f9103432958cda02728e30b043cc402aa356" Oct 10 16:55:51 crc kubenswrapper[4799]: I1010 16:55:51.822674 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4724151a2547fd7447db66758558f9103432958cda02728e30b043cc402aa356"} err="failed to get container status \"4724151a2547fd7447db66758558f9103432958cda02728e30b043cc402aa356\": rpc error: code = NotFound desc = could not find container \"4724151a2547fd7447db66758558f9103432958cda02728e30b043cc402aa356\": container with ID starting with 4724151a2547fd7447db66758558f9103432958cda02728e30b043cc402aa356 not found: ID does not exist" Oct 10 16:55:52 crc kubenswrapper[4799]: I1010 16:55:52.036782 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-rqgcb"] Oct 10 16:55:52 crc kubenswrapper[4799]: I1010 16:55:52.044793 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-rqgcb"] Oct 10 16:55:53 crc kubenswrapper[4799]: I1010 16:55:53.418408 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="20e74b81-9c23-4e57-85e2-92d4979c4da6" path="/var/lib/kubelet/pods/20e74b81-9c23-4e57-85e2-92d4979c4da6/volumes" Oct 10 16:56:02 crc kubenswrapper[4799]: I1010 16:56:02.351931 4799 scope.go:117] "RemoveContainer" containerID="20ca13845dacc22922c1aceaf375f23c384acc1113f31fe2986a13feeabab05e" Oct 10 16:56:02 crc kubenswrapper[4799]: I1010 16:56:02.384921 4799 scope.go:117] "RemoveContainer" containerID="46d306b5bea3bc195ac0b91193e1f5645352cac207d4c286eb08fc864f88ab77" Oct 10 16:56:02 crc kubenswrapper[4799]: I1010 16:56:02.435567 4799 scope.go:117] "RemoveContainer" containerID="2252cc52e03872ad264363f4a4f2c8970a681e759e68c07793c60a2df2a41d55" Oct 10 16:56:02 crc kubenswrapper[4799]: I1010 16:56:02.461482 4799 scope.go:117] "RemoveContainer" containerID="31d6aa31490ff7aaded0daf0398838f4567228d35b367d267909a86536b475a5" Oct 10 16:56:02 crc kubenswrapper[4799]: I1010 16:56:02.488823 4799 scope.go:117] "RemoveContainer" containerID="95dc04655e0b7d260ecc540674c64342a8f66e0ec033ce83971f3cfb94ab556c" Oct 10 16:56:02 crc kubenswrapper[4799]: I1010 16:56:02.520676 4799 scope.go:117] "RemoveContainer" containerID="0c1e78ba8474ec6005a2d9c9475e87357ba32aba07afc86bc8dd92174c72576e" Oct 10 16:56:02 crc kubenswrapper[4799]: I1010 16:56:02.542420 4799 scope.go:117] "RemoveContainer" containerID="a771d17cd9b8383520937bd5d1de25a135fd8d690577e07820b96a54f49d1faa" Oct 10 16:56:02 crc kubenswrapper[4799]: I1010 16:56:02.565918 4799 scope.go:117] "RemoveContainer" containerID="8ed2fd28620331dafe1aae7fcdf98ed40484c59fd04f142204fdfc59012c7cc2" Oct 10 16:56:02 crc kubenswrapper[4799]: I1010 16:56:02.592011 4799 scope.go:117] "RemoveContainer" containerID="0468ca6c6abf7cc599e692ce9780b9732d1baafe60c9e8e67b98e76b296d2b35" Oct 10 16:56:02 crc kubenswrapper[4799]: I1010 16:56:02.613454 4799 scope.go:117] "RemoveContainer" containerID="ff8624f34fcb1a15fffee56784a5608f01adefaa3172b0477e52de09e0786400" Oct 10 16:56:02 crc kubenswrapper[4799]: I1010 16:56:02.638124 4799 scope.go:117] "RemoveContainer" containerID="ccf69d61dbdbccfd51b38731cf2fa1a5da19e5eec374fe7dcb94d8b10259b899" Oct 10 16:56:02 crc kubenswrapper[4799]: I1010 16:56:02.658372 4799 scope.go:117] "RemoveContainer" containerID="6a79ce82fdaad196267612d6635747e431f63cb7b8ed234aa89621488cd6187d" Oct 10 16:56:02 crc kubenswrapper[4799]: I1010 16:56:02.677236 4799 scope.go:117] "RemoveContainer" containerID="714553f7ebe432f9fd4eeeae273403d9c3ff673b9a55481665624e3067441b8c" Oct 10 16:56:15 crc kubenswrapper[4799]: I1010 16:56:15.249289 4799 patch_prober.go:28] interesting pod/machine-config-daemon-rh8zc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 10 16:56:15 crc kubenswrapper[4799]: I1010 16:56:15.249968 4799 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 10 16:56:19 crc kubenswrapper[4799]: I1010 16:56:19.511350 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-szrvb"] Oct 10 16:56:19 crc kubenswrapper[4799]: E1010 16:56:19.512143 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="20e74b81-9c23-4e57-85e2-92d4979c4da6" containerName="registry-server" Oct 10 16:56:19 crc kubenswrapper[4799]: I1010 16:56:19.512169 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="20e74b81-9c23-4e57-85e2-92d4979c4da6" containerName="registry-server" Oct 10 16:56:19 crc kubenswrapper[4799]: E1010 16:56:19.512205 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="20e74b81-9c23-4e57-85e2-92d4979c4da6" containerName="extract-content" Oct 10 16:56:19 crc kubenswrapper[4799]: I1010 16:56:19.512218 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="20e74b81-9c23-4e57-85e2-92d4979c4da6" containerName="extract-content" Oct 10 16:56:19 crc kubenswrapper[4799]: E1010 16:56:19.512247 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="20e74b81-9c23-4e57-85e2-92d4979c4da6" containerName="extract-utilities" Oct 10 16:56:19 crc kubenswrapper[4799]: I1010 16:56:19.512260 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="20e74b81-9c23-4e57-85e2-92d4979c4da6" containerName="extract-utilities" Oct 10 16:56:19 crc kubenswrapper[4799]: I1010 16:56:19.512532 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="20e74b81-9c23-4e57-85e2-92d4979c4da6" containerName="registry-server" Oct 10 16:56:19 crc kubenswrapper[4799]: I1010 16:56:19.514256 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-szrvb" Oct 10 16:56:19 crc kubenswrapper[4799]: I1010 16:56:19.527158 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-szrvb"] Oct 10 16:56:19 crc kubenswrapper[4799]: I1010 16:56:19.707584 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j7vrb\" (UniqueName: \"kubernetes.io/projected/da30d2d7-38bc-474b-84f0-64e4221ef0fb-kube-api-access-j7vrb\") pod \"community-operators-szrvb\" (UID: \"da30d2d7-38bc-474b-84f0-64e4221ef0fb\") " pod="openshift-marketplace/community-operators-szrvb" Oct 10 16:56:19 crc kubenswrapper[4799]: I1010 16:56:19.707663 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/da30d2d7-38bc-474b-84f0-64e4221ef0fb-utilities\") pod \"community-operators-szrvb\" (UID: \"da30d2d7-38bc-474b-84f0-64e4221ef0fb\") " pod="openshift-marketplace/community-operators-szrvb" Oct 10 16:56:19 crc kubenswrapper[4799]: I1010 16:56:19.707697 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/da30d2d7-38bc-474b-84f0-64e4221ef0fb-catalog-content\") pod \"community-operators-szrvb\" (UID: \"da30d2d7-38bc-474b-84f0-64e4221ef0fb\") " pod="openshift-marketplace/community-operators-szrvb" Oct 10 16:56:19 crc kubenswrapper[4799]: I1010 16:56:19.808719 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j7vrb\" (UniqueName: \"kubernetes.io/projected/da30d2d7-38bc-474b-84f0-64e4221ef0fb-kube-api-access-j7vrb\") pod \"community-operators-szrvb\" (UID: \"da30d2d7-38bc-474b-84f0-64e4221ef0fb\") " pod="openshift-marketplace/community-operators-szrvb" Oct 10 16:56:19 crc kubenswrapper[4799]: I1010 16:56:19.808825 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/da30d2d7-38bc-474b-84f0-64e4221ef0fb-utilities\") pod \"community-operators-szrvb\" (UID: \"da30d2d7-38bc-474b-84f0-64e4221ef0fb\") " pod="openshift-marketplace/community-operators-szrvb" Oct 10 16:56:19 crc kubenswrapper[4799]: I1010 16:56:19.808857 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/da30d2d7-38bc-474b-84f0-64e4221ef0fb-catalog-content\") pod \"community-operators-szrvb\" (UID: \"da30d2d7-38bc-474b-84f0-64e4221ef0fb\") " pod="openshift-marketplace/community-operators-szrvb" Oct 10 16:56:19 crc kubenswrapper[4799]: I1010 16:56:19.809417 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/da30d2d7-38bc-474b-84f0-64e4221ef0fb-utilities\") pod \"community-operators-szrvb\" (UID: \"da30d2d7-38bc-474b-84f0-64e4221ef0fb\") " pod="openshift-marketplace/community-operators-szrvb" Oct 10 16:56:19 crc kubenswrapper[4799]: I1010 16:56:19.809436 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/da30d2d7-38bc-474b-84f0-64e4221ef0fb-catalog-content\") pod \"community-operators-szrvb\" (UID: \"da30d2d7-38bc-474b-84f0-64e4221ef0fb\") " pod="openshift-marketplace/community-operators-szrvb" Oct 10 16:56:19 crc kubenswrapper[4799]: I1010 16:56:19.836682 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j7vrb\" (UniqueName: \"kubernetes.io/projected/da30d2d7-38bc-474b-84f0-64e4221ef0fb-kube-api-access-j7vrb\") pod \"community-operators-szrvb\" (UID: \"da30d2d7-38bc-474b-84f0-64e4221ef0fb\") " pod="openshift-marketplace/community-operators-szrvb" Oct 10 16:56:19 crc kubenswrapper[4799]: I1010 16:56:19.851208 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-szrvb" Oct 10 16:56:20 crc kubenswrapper[4799]: I1010 16:56:20.323177 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-szrvb"] Oct 10 16:56:21 crc kubenswrapper[4799]: I1010 16:56:21.052038 4799 generic.go:334] "Generic (PLEG): container finished" podID="da30d2d7-38bc-474b-84f0-64e4221ef0fb" containerID="3932a4d0abca7a3101ebadb9ad8390c5bacf8421ff1ca78e202750d4da805650" exitCode=0 Oct 10 16:56:21 crc kubenswrapper[4799]: I1010 16:56:21.052135 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-szrvb" event={"ID":"da30d2d7-38bc-474b-84f0-64e4221ef0fb","Type":"ContainerDied","Data":"3932a4d0abca7a3101ebadb9ad8390c5bacf8421ff1ca78e202750d4da805650"} Oct 10 16:56:21 crc kubenswrapper[4799]: I1010 16:56:21.052415 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-szrvb" event={"ID":"da30d2d7-38bc-474b-84f0-64e4221ef0fb","Type":"ContainerStarted","Data":"3113cd1c2fc1bab2d679678f300fab5ec1728808182718928a3144cf7ccc776a"} Oct 10 16:56:22 crc kubenswrapper[4799]: I1010 16:56:22.065316 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-szrvb" event={"ID":"da30d2d7-38bc-474b-84f0-64e4221ef0fb","Type":"ContainerStarted","Data":"d04ba219e9ee7df67bb8284325efffa00fa7b68804c0d1a4bbdd08fb22275643"} Oct 10 16:56:23 crc kubenswrapper[4799]: I1010 16:56:23.079596 4799 generic.go:334] "Generic (PLEG): container finished" podID="da30d2d7-38bc-474b-84f0-64e4221ef0fb" containerID="d04ba219e9ee7df67bb8284325efffa00fa7b68804c0d1a4bbdd08fb22275643" exitCode=0 Oct 10 16:56:23 crc kubenswrapper[4799]: I1010 16:56:23.079665 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-szrvb" event={"ID":"da30d2d7-38bc-474b-84f0-64e4221ef0fb","Type":"ContainerDied","Data":"d04ba219e9ee7df67bb8284325efffa00fa7b68804c0d1a4bbdd08fb22275643"} Oct 10 16:56:24 crc kubenswrapper[4799]: I1010 16:56:24.090488 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-szrvb" event={"ID":"da30d2d7-38bc-474b-84f0-64e4221ef0fb","Type":"ContainerStarted","Data":"febeb45f2bcf81fdd39735aa9db85c98a683ed3499efc3a41b5d45e2f10a5153"} Oct 10 16:56:29 crc kubenswrapper[4799]: I1010 16:56:29.851878 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-szrvb" Oct 10 16:56:29 crc kubenswrapper[4799]: I1010 16:56:29.852802 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-szrvb" Oct 10 16:56:29 crc kubenswrapper[4799]: I1010 16:56:29.914936 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-szrvb" Oct 10 16:56:29 crc kubenswrapper[4799]: I1010 16:56:29.939970 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-szrvb" podStartSLOduration=8.403145719 podStartE2EDuration="10.939947816s" podCreationTimestamp="2025-10-10 16:56:19 +0000 UTC" firstStartedPulling="2025-10-10 16:56:21.054761767 +0000 UTC m=+1474.563085912" lastFinishedPulling="2025-10-10 16:56:23.591563884 +0000 UTC m=+1477.099888009" observedRunningTime="2025-10-10 16:56:24.120780939 +0000 UTC m=+1477.629105054" watchObservedRunningTime="2025-10-10 16:56:29.939947816 +0000 UTC m=+1483.448271951" Oct 10 16:56:30 crc kubenswrapper[4799]: I1010 16:56:30.192068 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-szrvb" Oct 10 16:56:30 crc kubenswrapper[4799]: I1010 16:56:30.248357 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-szrvb"] Oct 10 16:56:32 crc kubenswrapper[4799]: I1010 16:56:32.159169 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-szrvb" podUID="da30d2d7-38bc-474b-84f0-64e4221ef0fb" containerName="registry-server" containerID="cri-o://febeb45f2bcf81fdd39735aa9db85c98a683ed3499efc3a41b5d45e2f10a5153" gracePeriod=2 Oct 10 16:56:32 crc kubenswrapper[4799]: I1010 16:56:32.610950 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-szrvb" Oct 10 16:56:32 crc kubenswrapper[4799]: I1010 16:56:32.799517 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/da30d2d7-38bc-474b-84f0-64e4221ef0fb-utilities\") pod \"da30d2d7-38bc-474b-84f0-64e4221ef0fb\" (UID: \"da30d2d7-38bc-474b-84f0-64e4221ef0fb\") " Oct 10 16:56:32 crc kubenswrapper[4799]: I1010 16:56:32.800018 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/da30d2d7-38bc-474b-84f0-64e4221ef0fb-catalog-content\") pod \"da30d2d7-38bc-474b-84f0-64e4221ef0fb\" (UID: \"da30d2d7-38bc-474b-84f0-64e4221ef0fb\") " Oct 10 16:56:32 crc kubenswrapper[4799]: I1010 16:56:32.800224 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-j7vrb\" (UniqueName: \"kubernetes.io/projected/da30d2d7-38bc-474b-84f0-64e4221ef0fb-kube-api-access-j7vrb\") pod \"da30d2d7-38bc-474b-84f0-64e4221ef0fb\" (UID: \"da30d2d7-38bc-474b-84f0-64e4221ef0fb\") " Oct 10 16:56:32 crc kubenswrapper[4799]: I1010 16:56:32.801274 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/da30d2d7-38bc-474b-84f0-64e4221ef0fb-utilities" (OuterVolumeSpecName: "utilities") pod "da30d2d7-38bc-474b-84f0-64e4221ef0fb" (UID: "da30d2d7-38bc-474b-84f0-64e4221ef0fb"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 10 16:56:32 crc kubenswrapper[4799]: I1010 16:56:32.813102 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/da30d2d7-38bc-474b-84f0-64e4221ef0fb-kube-api-access-j7vrb" (OuterVolumeSpecName: "kube-api-access-j7vrb") pod "da30d2d7-38bc-474b-84f0-64e4221ef0fb" (UID: "da30d2d7-38bc-474b-84f0-64e4221ef0fb"). InnerVolumeSpecName "kube-api-access-j7vrb". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 16:56:32 crc kubenswrapper[4799]: I1010 16:56:32.902305 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-j7vrb\" (UniqueName: \"kubernetes.io/projected/da30d2d7-38bc-474b-84f0-64e4221ef0fb-kube-api-access-j7vrb\") on node \"crc\" DevicePath \"\"" Oct 10 16:56:32 crc kubenswrapper[4799]: I1010 16:56:32.902353 4799 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/da30d2d7-38bc-474b-84f0-64e4221ef0fb-utilities\") on node \"crc\" DevicePath \"\"" Oct 10 16:56:32 crc kubenswrapper[4799]: I1010 16:56:32.956347 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/da30d2d7-38bc-474b-84f0-64e4221ef0fb-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "da30d2d7-38bc-474b-84f0-64e4221ef0fb" (UID: "da30d2d7-38bc-474b-84f0-64e4221ef0fb"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 10 16:56:33 crc kubenswrapper[4799]: I1010 16:56:33.003686 4799 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/da30d2d7-38bc-474b-84f0-64e4221ef0fb-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 10 16:56:33 crc kubenswrapper[4799]: I1010 16:56:33.173459 4799 generic.go:334] "Generic (PLEG): container finished" podID="da30d2d7-38bc-474b-84f0-64e4221ef0fb" containerID="febeb45f2bcf81fdd39735aa9db85c98a683ed3499efc3a41b5d45e2f10a5153" exitCode=0 Oct 10 16:56:33 crc kubenswrapper[4799]: I1010 16:56:33.173520 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-szrvb" event={"ID":"da30d2d7-38bc-474b-84f0-64e4221ef0fb","Type":"ContainerDied","Data":"febeb45f2bcf81fdd39735aa9db85c98a683ed3499efc3a41b5d45e2f10a5153"} Oct 10 16:56:33 crc kubenswrapper[4799]: I1010 16:56:33.173566 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-szrvb" Oct 10 16:56:33 crc kubenswrapper[4799]: I1010 16:56:33.173587 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-szrvb" event={"ID":"da30d2d7-38bc-474b-84f0-64e4221ef0fb","Type":"ContainerDied","Data":"3113cd1c2fc1bab2d679678f300fab5ec1728808182718928a3144cf7ccc776a"} Oct 10 16:56:33 crc kubenswrapper[4799]: I1010 16:56:33.173623 4799 scope.go:117] "RemoveContainer" containerID="febeb45f2bcf81fdd39735aa9db85c98a683ed3499efc3a41b5d45e2f10a5153" Oct 10 16:56:33 crc kubenswrapper[4799]: I1010 16:56:33.230948 4799 scope.go:117] "RemoveContainer" containerID="d04ba219e9ee7df67bb8284325efffa00fa7b68804c0d1a4bbdd08fb22275643" Oct 10 16:56:33 crc kubenswrapper[4799]: I1010 16:56:33.232714 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-szrvb"] Oct 10 16:56:33 crc kubenswrapper[4799]: I1010 16:56:33.242864 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-szrvb"] Oct 10 16:56:33 crc kubenswrapper[4799]: I1010 16:56:33.286955 4799 scope.go:117] "RemoveContainer" containerID="3932a4d0abca7a3101ebadb9ad8390c5bacf8421ff1ca78e202750d4da805650" Oct 10 16:56:33 crc kubenswrapper[4799]: I1010 16:56:33.323277 4799 scope.go:117] "RemoveContainer" containerID="febeb45f2bcf81fdd39735aa9db85c98a683ed3499efc3a41b5d45e2f10a5153" Oct 10 16:56:33 crc kubenswrapper[4799]: E1010 16:56:33.323953 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"febeb45f2bcf81fdd39735aa9db85c98a683ed3499efc3a41b5d45e2f10a5153\": container with ID starting with febeb45f2bcf81fdd39735aa9db85c98a683ed3499efc3a41b5d45e2f10a5153 not found: ID does not exist" containerID="febeb45f2bcf81fdd39735aa9db85c98a683ed3499efc3a41b5d45e2f10a5153" Oct 10 16:56:33 crc kubenswrapper[4799]: I1010 16:56:33.323991 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"febeb45f2bcf81fdd39735aa9db85c98a683ed3499efc3a41b5d45e2f10a5153"} err="failed to get container status \"febeb45f2bcf81fdd39735aa9db85c98a683ed3499efc3a41b5d45e2f10a5153\": rpc error: code = NotFound desc = could not find container \"febeb45f2bcf81fdd39735aa9db85c98a683ed3499efc3a41b5d45e2f10a5153\": container with ID starting with febeb45f2bcf81fdd39735aa9db85c98a683ed3499efc3a41b5d45e2f10a5153 not found: ID does not exist" Oct 10 16:56:33 crc kubenswrapper[4799]: I1010 16:56:33.324023 4799 scope.go:117] "RemoveContainer" containerID="d04ba219e9ee7df67bb8284325efffa00fa7b68804c0d1a4bbdd08fb22275643" Oct 10 16:56:33 crc kubenswrapper[4799]: E1010 16:56:33.324700 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d04ba219e9ee7df67bb8284325efffa00fa7b68804c0d1a4bbdd08fb22275643\": container with ID starting with d04ba219e9ee7df67bb8284325efffa00fa7b68804c0d1a4bbdd08fb22275643 not found: ID does not exist" containerID="d04ba219e9ee7df67bb8284325efffa00fa7b68804c0d1a4bbdd08fb22275643" Oct 10 16:56:33 crc kubenswrapper[4799]: I1010 16:56:33.324802 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d04ba219e9ee7df67bb8284325efffa00fa7b68804c0d1a4bbdd08fb22275643"} err="failed to get container status \"d04ba219e9ee7df67bb8284325efffa00fa7b68804c0d1a4bbdd08fb22275643\": rpc error: code = NotFound desc = could not find container \"d04ba219e9ee7df67bb8284325efffa00fa7b68804c0d1a4bbdd08fb22275643\": container with ID starting with d04ba219e9ee7df67bb8284325efffa00fa7b68804c0d1a4bbdd08fb22275643 not found: ID does not exist" Oct 10 16:56:33 crc kubenswrapper[4799]: I1010 16:56:33.324850 4799 scope.go:117] "RemoveContainer" containerID="3932a4d0abca7a3101ebadb9ad8390c5bacf8421ff1ca78e202750d4da805650" Oct 10 16:56:33 crc kubenswrapper[4799]: E1010 16:56:33.325347 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3932a4d0abca7a3101ebadb9ad8390c5bacf8421ff1ca78e202750d4da805650\": container with ID starting with 3932a4d0abca7a3101ebadb9ad8390c5bacf8421ff1ca78e202750d4da805650 not found: ID does not exist" containerID="3932a4d0abca7a3101ebadb9ad8390c5bacf8421ff1ca78e202750d4da805650" Oct 10 16:56:33 crc kubenswrapper[4799]: I1010 16:56:33.325388 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3932a4d0abca7a3101ebadb9ad8390c5bacf8421ff1ca78e202750d4da805650"} err="failed to get container status \"3932a4d0abca7a3101ebadb9ad8390c5bacf8421ff1ca78e202750d4da805650\": rpc error: code = NotFound desc = could not find container \"3932a4d0abca7a3101ebadb9ad8390c5bacf8421ff1ca78e202750d4da805650\": container with ID starting with 3932a4d0abca7a3101ebadb9ad8390c5bacf8421ff1ca78e202750d4da805650 not found: ID does not exist" Oct 10 16:56:33 crc kubenswrapper[4799]: I1010 16:56:33.425914 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="da30d2d7-38bc-474b-84f0-64e4221ef0fb" path="/var/lib/kubelet/pods/da30d2d7-38bc-474b-84f0-64e4221ef0fb/volumes" Oct 10 16:56:45 crc kubenswrapper[4799]: I1010 16:56:45.248472 4799 patch_prober.go:28] interesting pod/machine-config-daemon-rh8zc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 10 16:56:45 crc kubenswrapper[4799]: I1010 16:56:45.249006 4799 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 10 16:57:02 crc kubenswrapper[4799]: I1010 16:57:02.936473 4799 scope.go:117] "RemoveContainer" containerID="7c99ddca75e3e8ee09f4a867e51dabb2d051c4f46efe616d1137e55c2a018e7e" Oct 10 16:57:02 crc kubenswrapper[4799]: I1010 16:57:02.986849 4799 scope.go:117] "RemoveContainer" containerID="30ebb4c5a6f4b490de87b23dfbca83edb669e1b06479d40a92b5b82a0cd80d33" Oct 10 16:57:03 crc kubenswrapper[4799]: I1010 16:57:03.046467 4799 scope.go:117] "RemoveContainer" containerID="98ec02c95b376af3e09c59e35caeec0f9973d9de6538cd288bc37643ef1c1cff" Oct 10 16:57:03 crc kubenswrapper[4799]: I1010 16:57:03.067856 4799 scope.go:117] "RemoveContainer" containerID="1ba620724d6ec0019ebffc23f81702b7d8bb95b7aecda8d154251b288a4a2c53" Oct 10 16:57:03 crc kubenswrapper[4799]: I1010 16:57:03.104927 4799 scope.go:117] "RemoveContainer" containerID="a72c77a71b8c17e28293a02f3a365989a67c30c588ba4cdf5ef928ea4ed49719" Oct 10 16:57:03 crc kubenswrapper[4799]: I1010 16:57:03.131749 4799 scope.go:117] "RemoveContainer" containerID="9130b5b2922996b3ae5e11a6833e22b47fbd0cb063fb0cb83a0b3202990aa273" Oct 10 16:57:03 crc kubenswrapper[4799]: I1010 16:57:03.184870 4799 scope.go:117] "RemoveContainer" containerID="721a6c184700479f8f430832a3bec901fd25498e982805e28af8c51139ef2304" Oct 10 16:57:03 crc kubenswrapper[4799]: I1010 16:57:03.211451 4799 scope.go:117] "RemoveContainer" containerID="125ba823d513b27ef11a444f22553a11ba55c0f70d03679b66ec233ce953c8e0" Oct 10 16:57:03 crc kubenswrapper[4799]: I1010 16:57:03.232437 4799 scope.go:117] "RemoveContainer" containerID="4a7f26c0a29102620ae44684ad6b914b3b0a0afa490ea3c581dc7f606034046b" Oct 10 16:57:03 crc kubenswrapper[4799]: I1010 16:57:03.261876 4799 scope.go:117] "RemoveContainer" containerID="194dec3f34924527e2cec1db990873105b661b96fad790933950a8b6a9a87518" Oct 10 16:57:03 crc kubenswrapper[4799]: I1010 16:57:03.320986 4799 scope.go:117] "RemoveContainer" containerID="7521cb801e0a8accc208d2be60734cccb2b3fe5514d504e26c34084683264aee" Oct 10 16:57:03 crc kubenswrapper[4799]: I1010 16:57:03.368408 4799 scope.go:117] "RemoveContainer" containerID="e0f865b58ad5a365298ef3d503d2862f51abaae4b78ba76578a371ef732177bd" Oct 10 16:57:03 crc kubenswrapper[4799]: I1010 16:57:03.391865 4799 scope.go:117] "RemoveContainer" containerID="7ac4115cc5be558a70d0208dfd901d2138a9ae99495f1c58e108568e2e2fac0e" Oct 10 16:57:15 crc kubenswrapper[4799]: I1010 16:57:15.248790 4799 patch_prober.go:28] interesting pod/machine-config-daemon-rh8zc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 10 16:57:15 crc kubenswrapper[4799]: I1010 16:57:15.249467 4799 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 10 16:57:15 crc kubenswrapper[4799]: I1010 16:57:15.249525 4799 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" Oct 10 16:57:15 crc kubenswrapper[4799]: I1010 16:57:15.250488 4799 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"eb9efabd31e0bc119cd431c6228f2fbf6db48806c09df2881081667d9ddd75e0"} pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 10 16:57:15 crc kubenswrapper[4799]: I1010 16:57:15.250584 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" containerName="machine-config-daemon" containerID="cri-o://eb9efabd31e0bc119cd431c6228f2fbf6db48806c09df2881081667d9ddd75e0" gracePeriod=600 Oct 10 16:57:15 crc kubenswrapper[4799]: E1010 16:57:15.397680 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 16:57:15 crc kubenswrapper[4799]: I1010 16:57:15.628604 4799 generic.go:334] "Generic (PLEG): container finished" podID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" containerID="eb9efabd31e0bc119cd431c6228f2fbf6db48806c09df2881081667d9ddd75e0" exitCode=0 Oct 10 16:57:15 crc kubenswrapper[4799]: I1010 16:57:15.628714 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" event={"ID":"6cebefda-e31d-4be2-9bf4-8e1f8ec002cb","Type":"ContainerDied","Data":"eb9efabd31e0bc119cd431c6228f2fbf6db48806c09df2881081667d9ddd75e0"} Oct 10 16:57:15 crc kubenswrapper[4799]: I1010 16:57:15.628850 4799 scope.go:117] "RemoveContainer" containerID="145ba828d4b654e155342b2053228303da0bf7c989b77f4342b3cbafaea6b6c8" Oct 10 16:57:15 crc kubenswrapper[4799]: I1010 16:57:15.629363 4799 scope.go:117] "RemoveContainer" containerID="eb9efabd31e0bc119cd431c6228f2fbf6db48806c09df2881081667d9ddd75e0" Oct 10 16:57:15 crc kubenswrapper[4799]: E1010 16:57:15.629886 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 16:57:29 crc kubenswrapper[4799]: I1010 16:57:29.402747 4799 scope.go:117] "RemoveContainer" containerID="eb9efabd31e0bc119cd431c6228f2fbf6db48806c09df2881081667d9ddd75e0" Oct 10 16:57:29 crc kubenswrapper[4799]: E1010 16:57:29.403519 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 16:57:40 crc kubenswrapper[4799]: I1010 16:57:40.402218 4799 scope.go:117] "RemoveContainer" containerID="eb9efabd31e0bc119cd431c6228f2fbf6db48806c09df2881081667d9ddd75e0" Oct 10 16:57:40 crc kubenswrapper[4799]: E1010 16:57:40.402985 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 16:57:51 crc kubenswrapper[4799]: I1010 16:57:51.403377 4799 scope.go:117] "RemoveContainer" containerID="eb9efabd31e0bc119cd431c6228f2fbf6db48806c09df2881081667d9ddd75e0" Oct 10 16:57:51 crc kubenswrapper[4799]: E1010 16:57:51.404833 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 16:58:03 crc kubenswrapper[4799]: I1010 16:58:03.694804 4799 scope.go:117] "RemoveContainer" containerID="cba2769cec2ab6be62ce3ab6c11812ca168c95b6a4c44b2275fc0c2801c3b3a7" Oct 10 16:58:03 crc kubenswrapper[4799]: I1010 16:58:03.735670 4799 scope.go:117] "RemoveContainer" containerID="3d6a3a8b4b49974d5c09eeb8c6ec4a2ebb70c3d484c47ca792ba983856130907" Oct 10 16:58:03 crc kubenswrapper[4799]: I1010 16:58:03.783419 4799 scope.go:117] "RemoveContainer" containerID="d62e15674081e38d400533f852a94a64631beca6244fed0891de2bc949a8005b" Oct 10 16:58:03 crc kubenswrapper[4799]: I1010 16:58:03.849928 4799 scope.go:117] "RemoveContainer" containerID="1e2bb89cca75275f38d0737f645f0b33d216e5600663a5ffc8d93f24601d9e28" Oct 10 16:58:03 crc kubenswrapper[4799]: I1010 16:58:03.880286 4799 scope.go:117] "RemoveContainer" containerID="6a4a38abddd264e6f436a27a2422c3d7dee920ced5f32af3275fc86edc0ea42c" Oct 10 16:58:03 crc kubenswrapper[4799]: I1010 16:58:03.906966 4799 scope.go:117] "RemoveContainer" containerID="b23ac04dd50a4eaaa66ae053dec6bce7db1f6a62a4f25b005e5b7204ecb4bdc1" Oct 10 16:58:03 crc kubenswrapper[4799]: I1010 16:58:03.970965 4799 scope.go:117] "RemoveContainer" containerID="cc3bf43932097b81efc1b1d49b76a3c7a6d6672678274efa32ce397549e1ff65" Oct 10 16:58:03 crc kubenswrapper[4799]: I1010 16:58:03.996637 4799 scope.go:117] "RemoveContainer" containerID="a5a1b6e00a35ec28b0a11cef63bf27aa74edf00ead5c5dff888593622c9a0138" Oct 10 16:58:04 crc kubenswrapper[4799]: I1010 16:58:04.023385 4799 scope.go:117] "RemoveContainer" containerID="733db5a009878bd263eb72d80af0d8d8c163d3807ed5234db056107575883b22" Oct 10 16:58:04 crc kubenswrapper[4799]: I1010 16:58:04.047734 4799 scope.go:117] "RemoveContainer" containerID="a7744f1f6b9b90abac5951b974c22a05f9b743a15b598bd4cd1b427036db5928" Oct 10 16:58:04 crc kubenswrapper[4799]: I1010 16:58:04.072065 4799 scope.go:117] "RemoveContainer" containerID="02155113288bc0bf63cf3a3084f2f7ed5580d5f5ad54aca68e4fb0b10bb08a28" Oct 10 16:58:04 crc kubenswrapper[4799]: I1010 16:58:04.089623 4799 scope.go:117] "RemoveContainer" containerID="92ceb7a81421c011199b208f71ac06ee53a9dc28e8d3ab70e4e869c6e631df9b" Oct 10 16:58:04 crc kubenswrapper[4799]: I1010 16:58:04.106094 4799 scope.go:117] "RemoveContainer" containerID="e433847b37d87c8813ed78b7c50a3a66b8a762a349bba6c0f50b139812bbe003" Oct 10 16:58:04 crc kubenswrapper[4799]: I1010 16:58:04.125566 4799 scope.go:117] "RemoveContainer" containerID="45d51a08521515637f5b1846723d166ebcfd370a8e928d653837e32fd1bdcaff" Oct 10 16:58:05 crc kubenswrapper[4799]: I1010 16:58:05.402726 4799 scope.go:117] "RemoveContainer" containerID="eb9efabd31e0bc119cd431c6228f2fbf6db48806c09df2881081667d9ddd75e0" Oct 10 16:58:05 crc kubenswrapper[4799]: E1010 16:58:05.404309 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 16:58:20 crc kubenswrapper[4799]: I1010 16:58:20.403075 4799 scope.go:117] "RemoveContainer" containerID="eb9efabd31e0bc119cd431c6228f2fbf6db48806c09df2881081667d9ddd75e0" Oct 10 16:58:20 crc kubenswrapper[4799]: E1010 16:58:20.403910 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 16:58:32 crc kubenswrapper[4799]: I1010 16:58:32.402438 4799 scope.go:117] "RemoveContainer" containerID="eb9efabd31e0bc119cd431c6228f2fbf6db48806c09df2881081667d9ddd75e0" Oct 10 16:58:32 crc kubenswrapper[4799]: E1010 16:58:32.403561 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 16:58:46 crc kubenswrapper[4799]: I1010 16:58:46.403227 4799 scope.go:117] "RemoveContainer" containerID="eb9efabd31e0bc119cd431c6228f2fbf6db48806c09df2881081667d9ddd75e0" Oct 10 16:58:46 crc kubenswrapper[4799]: E1010 16:58:46.404314 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 16:58:59 crc kubenswrapper[4799]: I1010 16:58:59.403134 4799 scope.go:117] "RemoveContainer" containerID="eb9efabd31e0bc119cd431c6228f2fbf6db48806c09df2881081667d9ddd75e0" Oct 10 16:58:59 crc kubenswrapper[4799]: E1010 16:58:59.404005 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 16:59:04 crc kubenswrapper[4799]: I1010 16:59:04.350949 4799 scope.go:117] "RemoveContainer" containerID="7bf6c7d3613def6536265da4a2ef17104e40af69add4759f9d5cb3a1dff047ff" Oct 10 16:59:04 crc kubenswrapper[4799]: I1010 16:59:04.394529 4799 scope.go:117] "RemoveContainer" containerID="e41896910a44ed236459449a6ff81d15407ef8ae9a64ddb6a35d2d9e2aa86ba0" Oct 10 16:59:04 crc kubenswrapper[4799]: I1010 16:59:04.429521 4799 scope.go:117] "RemoveContainer" containerID="addd6c933147fa3bde0b56efc182d2aac6eb56fac50867086fb25a7bb39e07ca" Oct 10 16:59:13 crc kubenswrapper[4799]: I1010 16:59:13.403190 4799 scope.go:117] "RemoveContainer" containerID="eb9efabd31e0bc119cd431c6228f2fbf6db48806c09df2881081667d9ddd75e0" Oct 10 16:59:13 crc kubenswrapper[4799]: E1010 16:59:13.404269 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 16:59:25 crc kubenswrapper[4799]: I1010 16:59:25.403234 4799 scope.go:117] "RemoveContainer" containerID="eb9efabd31e0bc119cd431c6228f2fbf6db48806c09df2881081667d9ddd75e0" Oct 10 16:59:25 crc kubenswrapper[4799]: E1010 16:59:25.404438 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 16:59:38 crc kubenswrapper[4799]: I1010 16:59:38.402106 4799 scope.go:117] "RemoveContainer" containerID="eb9efabd31e0bc119cd431c6228f2fbf6db48806c09df2881081667d9ddd75e0" Oct 10 16:59:38 crc kubenswrapper[4799]: E1010 16:59:38.404211 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 16:59:53 crc kubenswrapper[4799]: I1010 16:59:53.408826 4799 scope.go:117] "RemoveContainer" containerID="eb9efabd31e0bc119cd431c6228f2fbf6db48806c09df2881081667d9ddd75e0" Oct 10 16:59:53 crc kubenswrapper[4799]: E1010 16:59:53.409499 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 17:00:00 crc kubenswrapper[4799]: I1010 17:00:00.170746 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29335260-bpk9t"] Oct 10 17:00:00 crc kubenswrapper[4799]: E1010 17:00:00.171800 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="da30d2d7-38bc-474b-84f0-64e4221ef0fb" containerName="extract-content" Oct 10 17:00:00 crc kubenswrapper[4799]: I1010 17:00:00.171830 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="da30d2d7-38bc-474b-84f0-64e4221ef0fb" containerName="extract-content" Oct 10 17:00:00 crc kubenswrapper[4799]: E1010 17:00:00.171862 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="da30d2d7-38bc-474b-84f0-64e4221ef0fb" containerName="extract-utilities" Oct 10 17:00:00 crc kubenswrapper[4799]: I1010 17:00:00.171880 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="da30d2d7-38bc-474b-84f0-64e4221ef0fb" containerName="extract-utilities" Oct 10 17:00:00 crc kubenswrapper[4799]: E1010 17:00:00.171934 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="da30d2d7-38bc-474b-84f0-64e4221ef0fb" containerName="registry-server" Oct 10 17:00:00 crc kubenswrapper[4799]: I1010 17:00:00.171950 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="da30d2d7-38bc-474b-84f0-64e4221ef0fb" containerName="registry-server" Oct 10 17:00:00 crc kubenswrapper[4799]: I1010 17:00:00.172236 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="da30d2d7-38bc-474b-84f0-64e4221ef0fb" containerName="registry-server" Oct 10 17:00:00 crc kubenswrapper[4799]: I1010 17:00:00.173276 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29335260-bpk9t" Oct 10 17:00:00 crc kubenswrapper[4799]: I1010 17:00:00.179480 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Oct 10 17:00:00 crc kubenswrapper[4799]: I1010 17:00:00.180126 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Oct 10 17:00:00 crc kubenswrapper[4799]: I1010 17:00:00.194301 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29335260-bpk9t"] Oct 10 17:00:00 crc kubenswrapper[4799]: I1010 17:00:00.372681 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/0c7420b8-4eaa-4820-bf6f-a7fce3d2cfca-secret-volume\") pod \"collect-profiles-29335260-bpk9t\" (UID: \"0c7420b8-4eaa-4820-bf6f-a7fce3d2cfca\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29335260-bpk9t" Oct 10 17:00:00 crc kubenswrapper[4799]: I1010 17:00:00.372782 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pvxz6\" (UniqueName: \"kubernetes.io/projected/0c7420b8-4eaa-4820-bf6f-a7fce3d2cfca-kube-api-access-pvxz6\") pod \"collect-profiles-29335260-bpk9t\" (UID: \"0c7420b8-4eaa-4820-bf6f-a7fce3d2cfca\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29335260-bpk9t" Oct 10 17:00:00 crc kubenswrapper[4799]: I1010 17:00:00.372931 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/0c7420b8-4eaa-4820-bf6f-a7fce3d2cfca-config-volume\") pod \"collect-profiles-29335260-bpk9t\" (UID: \"0c7420b8-4eaa-4820-bf6f-a7fce3d2cfca\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29335260-bpk9t" Oct 10 17:00:00 crc kubenswrapper[4799]: I1010 17:00:00.473982 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/0c7420b8-4eaa-4820-bf6f-a7fce3d2cfca-secret-volume\") pod \"collect-profiles-29335260-bpk9t\" (UID: \"0c7420b8-4eaa-4820-bf6f-a7fce3d2cfca\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29335260-bpk9t" Oct 10 17:00:00 crc kubenswrapper[4799]: I1010 17:00:00.474127 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pvxz6\" (UniqueName: \"kubernetes.io/projected/0c7420b8-4eaa-4820-bf6f-a7fce3d2cfca-kube-api-access-pvxz6\") pod \"collect-profiles-29335260-bpk9t\" (UID: \"0c7420b8-4eaa-4820-bf6f-a7fce3d2cfca\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29335260-bpk9t" Oct 10 17:00:00 crc kubenswrapper[4799]: I1010 17:00:00.474226 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/0c7420b8-4eaa-4820-bf6f-a7fce3d2cfca-config-volume\") pod \"collect-profiles-29335260-bpk9t\" (UID: \"0c7420b8-4eaa-4820-bf6f-a7fce3d2cfca\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29335260-bpk9t" Oct 10 17:00:00 crc kubenswrapper[4799]: I1010 17:00:00.475694 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/0c7420b8-4eaa-4820-bf6f-a7fce3d2cfca-config-volume\") pod \"collect-profiles-29335260-bpk9t\" (UID: \"0c7420b8-4eaa-4820-bf6f-a7fce3d2cfca\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29335260-bpk9t" Oct 10 17:00:00 crc kubenswrapper[4799]: I1010 17:00:00.490960 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/0c7420b8-4eaa-4820-bf6f-a7fce3d2cfca-secret-volume\") pod \"collect-profiles-29335260-bpk9t\" (UID: \"0c7420b8-4eaa-4820-bf6f-a7fce3d2cfca\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29335260-bpk9t" Oct 10 17:00:00 crc kubenswrapper[4799]: I1010 17:00:00.503037 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pvxz6\" (UniqueName: \"kubernetes.io/projected/0c7420b8-4eaa-4820-bf6f-a7fce3d2cfca-kube-api-access-pvxz6\") pod \"collect-profiles-29335260-bpk9t\" (UID: \"0c7420b8-4eaa-4820-bf6f-a7fce3d2cfca\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29335260-bpk9t" Oct 10 17:00:00 crc kubenswrapper[4799]: I1010 17:00:00.796001 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29335260-bpk9t" Oct 10 17:00:01 crc kubenswrapper[4799]: I1010 17:00:01.296374 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29335260-bpk9t"] Oct 10 17:00:02 crc kubenswrapper[4799]: I1010 17:00:02.265382 4799 generic.go:334] "Generic (PLEG): container finished" podID="0c7420b8-4eaa-4820-bf6f-a7fce3d2cfca" containerID="a39d6697438fc86ddd6c1ef500bcdc5df4792167e65c6301313efcfb45597a19" exitCode=0 Oct 10 17:00:02 crc kubenswrapper[4799]: I1010 17:00:02.265446 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29335260-bpk9t" event={"ID":"0c7420b8-4eaa-4820-bf6f-a7fce3d2cfca","Type":"ContainerDied","Data":"a39d6697438fc86ddd6c1ef500bcdc5df4792167e65c6301313efcfb45597a19"} Oct 10 17:00:02 crc kubenswrapper[4799]: I1010 17:00:02.265501 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29335260-bpk9t" event={"ID":"0c7420b8-4eaa-4820-bf6f-a7fce3d2cfca","Type":"ContainerStarted","Data":"d099a6d16662eb19e6dbd3c39cc919de858569fc3bc9f1e46113801c12867195"} Oct 10 17:00:03 crc kubenswrapper[4799]: I1010 17:00:03.608683 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29335260-bpk9t" Oct 10 17:00:03 crc kubenswrapper[4799]: I1010 17:00:03.728037 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/0c7420b8-4eaa-4820-bf6f-a7fce3d2cfca-config-volume\") pod \"0c7420b8-4eaa-4820-bf6f-a7fce3d2cfca\" (UID: \"0c7420b8-4eaa-4820-bf6f-a7fce3d2cfca\") " Oct 10 17:00:03 crc kubenswrapper[4799]: I1010 17:00:03.728171 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/0c7420b8-4eaa-4820-bf6f-a7fce3d2cfca-secret-volume\") pod \"0c7420b8-4eaa-4820-bf6f-a7fce3d2cfca\" (UID: \"0c7420b8-4eaa-4820-bf6f-a7fce3d2cfca\") " Oct 10 17:00:03 crc kubenswrapper[4799]: I1010 17:00:03.728262 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pvxz6\" (UniqueName: \"kubernetes.io/projected/0c7420b8-4eaa-4820-bf6f-a7fce3d2cfca-kube-api-access-pvxz6\") pod \"0c7420b8-4eaa-4820-bf6f-a7fce3d2cfca\" (UID: \"0c7420b8-4eaa-4820-bf6f-a7fce3d2cfca\") " Oct 10 17:00:03 crc kubenswrapper[4799]: I1010 17:00:03.728942 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0c7420b8-4eaa-4820-bf6f-a7fce3d2cfca-config-volume" (OuterVolumeSpecName: "config-volume") pod "0c7420b8-4eaa-4820-bf6f-a7fce3d2cfca" (UID: "0c7420b8-4eaa-4820-bf6f-a7fce3d2cfca"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 17:00:03 crc kubenswrapper[4799]: I1010 17:00:03.736300 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0c7420b8-4eaa-4820-bf6f-a7fce3d2cfca-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "0c7420b8-4eaa-4820-bf6f-a7fce3d2cfca" (UID: "0c7420b8-4eaa-4820-bf6f-a7fce3d2cfca"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 17:00:03 crc kubenswrapper[4799]: I1010 17:00:03.736303 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0c7420b8-4eaa-4820-bf6f-a7fce3d2cfca-kube-api-access-pvxz6" (OuterVolumeSpecName: "kube-api-access-pvxz6") pod "0c7420b8-4eaa-4820-bf6f-a7fce3d2cfca" (UID: "0c7420b8-4eaa-4820-bf6f-a7fce3d2cfca"). InnerVolumeSpecName "kube-api-access-pvxz6". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 17:00:03 crc kubenswrapper[4799]: I1010 17:00:03.830124 4799 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/0c7420b8-4eaa-4820-bf6f-a7fce3d2cfca-config-volume\") on node \"crc\" DevicePath \"\"" Oct 10 17:00:03 crc kubenswrapper[4799]: I1010 17:00:03.830156 4799 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/0c7420b8-4eaa-4820-bf6f-a7fce3d2cfca-secret-volume\") on node \"crc\" DevicePath \"\"" Oct 10 17:00:03 crc kubenswrapper[4799]: I1010 17:00:03.830167 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pvxz6\" (UniqueName: \"kubernetes.io/projected/0c7420b8-4eaa-4820-bf6f-a7fce3d2cfca-kube-api-access-pvxz6\") on node \"crc\" DevicePath \"\"" Oct 10 17:00:04 crc kubenswrapper[4799]: I1010 17:00:04.288338 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29335260-bpk9t" event={"ID":"0c7420b8-4eaa-4820-bf6f-a7fce3d2cfca","Type":"ContainerDied","Data":"d099a6d16662eb19e6dbd3c39cc919de858569fc3bc9f1e46113801c12867195"} Oct 10 17:00:04 crc kubenswrapper[4799]: I1010 17:00:04.288398 4799 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d099a6d16662eb19e6dbd3c39cc919de858569fc3bc9f1e46113801c12867195" Oct 10 17:00:04 crc kubenswrapper[4799]: I1010 17:00:04.288435 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29335260-bpk9t" Oct 10 17:00:04 crc kubenswrapper[4799]: I1010 17:00:04.561674 4799 scope.go:117] "RemoveContainer" containerID="ef0cad99b2efacd5bcd212cd155d86551ed4cc35bedc046210eca5e8e009b86f" Oct 10 17:00:04 crc kubenswrapper[4799]: I1010 17:00:04.582356 4799 scope.go:117] "RemoveContainer" containerID="24962cee1d51a7d7eb5a1d25cab56e9384c0342e38d3013ef33f383559acf6d7" Oct 10 17:00:04 crc kubenswrapper[4799]: I1010 17:00:04.599541 4799 scope.go:117] "RemoveContainer" containerID="e7fc78b3c8230f7e21437f35ba5f2fca7d9cf97cdf46510b2801bf13113895c9" Oct 10 17:00:04 crc kubenswrapper[4799]: I1010 17:00:04.646900 4799 scope.go:117] "RemoveContainer" containerID="95e8c5c7eeb44313269abe5e0811c66db445161e27df4b78e13b1117ddf8ecc1" Oct 10 17:00:08 crc kubenswrapper[4799]: I1010 17:00:08.402166 4799 scope.go:117] "RemoveContainer" containerID="eb9efabd31e0bc119cd431c6228f2fbf6db48806c09df2881081667d9ddd75e0" Oct 10 17:00:08 crc kubenswrapper[4799]: E1010 17:00:08.402745 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 17:00:21 crc kubenswrapper[4799]: I1010 17:00:21.402364 4799 scope.go:117] "RemoveContainer" containerID="eb9efabd31e0bc119cd431c6228f2fbf6db48806c09df2881081667d9ddd75e0" Oct 10 17:00:21 crc kubenswrapper[4799]: E1010 17:00:21.403101 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 17:00:34 crc kubenswrapper[4799]: I1010 17:00:34.873566 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-vflcz"] Oct 10 17:00:34 crc kubenswrapper[4799]: E1010 17:00:34.874566 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0c7420b8-4eaa-4820-bf6f-a7fce3d2cfca" containerName="collect-profiles" Oct 10 17:00:34 crc kubenswrapper[4799]: I1010 17:00:34.874583 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="0c7420b8-4eaa-4820-bf6f-a7fce3d2cfca" containerName="collect-profiles" Oct 10 17:00:34 crc kubenswrapper[4799]: I1010 17:00:34.874739 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="0c7420b8-4eaa-4820-bf6f-a7fce3d2cfca" containerName="collect-profiles" Oct 10 17:00:34 crc kubenswrapper[4799]: I1010 17:00:34.876193 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-vflcz" Oct 10 17:00:34 crc kubenswrapper[4799]: I1010 17:00:34.905347 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-vflcz"] Oct 10 17:00:35 crc kubenswrapper[4799]: I1010 17:00:35.036862 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fa3b1074-e673-45b4-a772-b7c5efbdf945-utilities\") pod \"redhat-marketplace-vflcz\" (UID: \"fa3b1074-e673-45b4-a772-b7c5efbdf945\") " pod="openshift-marketplace/redhat-marketplace-vflcz" Oct 10 17:00:35 crc kubenswrapper[4799]: I1010 17:00:35.036949 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vb9r4\" (UniqueName: \"kubernetes.io/projected/fa3b1074-e673-45b4-a772-b7c5efbdf945-kube-api-access-vb9r4\") pod \"redhat-marketplace-vflcz\" (UID: \"fa3b1074-e673-45b4-a772-b7c5efbdf945\") " pod="openshift-marketplace/redhat-marketplace-vflcz" Oct 10 17:00:35 crc kubenswrapper[4799]: I1010 17:00:35.037024 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fa3b1074-e673-45b4-a772-b7c5efbdf945-catalog-content\") pod \"redhat-marketplace-vflcz\" (UID: \"fa3b1074-e673-45b4-a772-b7c5efbdf945\") " pod="openshift-marketplace/redhat-marketplace-vflcz" Oct 10 17:00:35 crc kubenswrapper[4799]: I1010 17:00:35.138232 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fa3b1074-e673-45b4-a772-b7c5efbdf945-catalog-content\") pod \"redhat-marketplace-vflcz\" (UID: \"fa3b1074-e673-45b4-a772-b7c5efbdf945\") " pod="openshift-marketplace/redhat-marketplace-vflcz" Oct 10 17:00:35 crc kubenswrapper[4799]: I1010 17:00:35.138324 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fa3b1074-e673-45b4-a772-b7c5efbdf945-utilities\") pod \"redhat-marketplace-vflcz\" (UID: \"fa3b1074-e673-45b4-a772-b7c5efbdf945\") " pod="openshift-marketplace/redhat-marketplace-vflcz" Oct 10 17:00:35 crc kubenswrapper[4799]: I1010 17:00:35.138348 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vb9r4\" (UniqueName: \"kubernetes.io/projected/fa3b1074-e673-45b4-a772-b7c5efbdf945-kube-api-access-vb9r4\") pod \"redhat-marketplace-vflcz\" (UID: \"fa3b1074-e673-45b4-a772-b7c5efbdf945\") " pod="openshift-marketplace/redhat-marketplace-vflcz" Oct 10 17:00:35 crc kubenswrapper[4799]: I1010 17:00:35.138848 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fa3b1074-e673-45b4-a772-b7c5efbdf945-catalog-content\") pod \"redhat-marketplace-vflcz\" (UID: \"fa3b1074-e673-45b4-a772-b7c5efbdf945\") " pod="openshift-marketplace/redhat-marketplace-vflcz" Oct 10 17:00:35 crc kubenswrapper[4799]: I1010 17:00:35.138858 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fa3b1074-e673-45b4-a772-b7c5efbdf945-utilities\") pod \"redhat-marketplace-vflcz\" (UID: \"fa3b1074-e673-45b4-a772-b7c5efbdf945\") " pod="openshift-marketplace/redhat-marketplace-vflcz" Oct 10 17:00:35 crc kubenswrapper[4799]: I1010 17:00:35.157169 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vb9r4\" (UniqueName: \"kubernetes.io/projected/fa3b1074-e673-45b4-a772-b7c5efbdf945-kube-api-access-vb9r4\") pod \"redhat-marketplace-vflcz\" (UID: \"fa3b1074-e673-45b4-a772-b7c5efbdf945\") " pod="openshift-marketplace/redhat-marketplace-vflcz" Oct 10 17:00:35 crc kubenswrapper[4799]: I1010 17:00:35.215311 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-vflcz" Oct 10 17:00:35 crc kubenswrapper[4799]: I1010 17:00:35.403601 4799 scope.go:117] "RemoveContainer" containerID="eb9efabd31e0bc119cd431c6228f2fbf6db48806c09df2881081667d9ddd75e0" Oct 10 17:00:35 crc kubenswrapper[4799]: E1010 17:00:35.404109 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 17:00:35 crc kubenswrapper[4799]: I1010 17:00:35.685657 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-vflcz"] Oct 10 17:00:36 crc kubenswrapper[4799]: I1010 17:00:36.577034 4799 generic.go:334] "Generic (PLEG): container finished" podID="fa3b1074-e673-45b4-a772-b7c5efbdf945" containerID="ba40082f31a6ecd2726fc9105afd4269fba404cdcd41c4ec6d246e68035ee9a3" exitCode=0 Oct 10 17:00:36 crc kubenswrapper[4799]: I1010 17:00:36.577121 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vflcz" event={"ID":"fa3b1074-e673-45b4-a772-b7c5efbdf945","Type":"ContainerDied","Data":"ba40082f31a6ecd2726fc9105afd4269fba404cdcd41c4ec6d246e68035ee9a3"} Oct 10 17:00:36 crc kubenswrapper[4799]: I1010 17:00:36.577180 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vflcz" event={"ID":"fa3b1074-e673-45b4-a772-b7c5efbdf945","Type":"ContainerStarted","Data":"04c1ff9e7474d574a00226bbb98e6425494c6b7df38ed441bd76b9b42653eba0"} Oct 10 17:00:36 crc kubenswrapper[4799]: I1010 17:00:36.580127 4799 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 10 17:00:38 crc kubenswrapper[4799]: I1010 17:00:38.600067 4799 generic.go:334] "Generic (PLEG): container finished" podID="fa3b1074-e673-45b4-a772-b7c5efbdf945" containerID="11d533f9d458bd5d5a4279dbe67d81bd804cac00cfbbaad4da4d99edc0b95ae9" exitCode=0 Oct 10 17:00:38 crc kubenswrapper[4799]: I1010 17:00:38.600182 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vflcz" event={"ID":"fa3b1074-e673-45b4-a772-b7c5efbdf945","Type":"ContainerDied","Data":"11d533f9d458bd5d5a4279dbe67d81bd804cac00cfbbaad4da4d99edc0b95ae9"} Oct 10 17:00:39 crc kubenswrapper[4799]: I1010 17:00:39.611354 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vflcz" event={"ID":"fa3b1074-e673-45b4-a772-b7c5efbdf945","Type":"ContainerStarted","Data":"290b7eacb6979e64462981393bc2561714c5f287ae35d0287a4cffd9de8bc92e"} Oct 10 17:00:45 crc kubenswrapper[4799]: I1010 17:00:45.215478 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-vflcz" Oct 10 17:00:45 crc kubenswrapper[4799]: I1010 17:00:45.216045 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-vflcz" Oct 10 17:00:45 crc kubenswrapper[4799]: I1010 17:00:45.278201 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-vflcz" Oct 10 17:00:45 crc kubenswrapper[4799]: I1010 17:00:45.310880 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-vflcz" podStartSLOduration=8.872915552 podStartE2EDuration="11.310857912s" podCreationTimestamp="2025-10-10 17:00:34 +0000 UTC" firstStartedPulling="2025-10-10 17:00:36.57932475 +0000 UTC m=+1730.087648915" lastFinishedPulling="2025-10-10 17:00:39.01726713 +0000 UTC m=+1732.525591275" observedRunningTime="2025-10-10 17:00:39.663482247 +0000 UTC m=+1733.171806462" watchObservedRunningTime="2025-10-10 17:00:45.310857912 +0000 UTC m=+1738.819182037" Oct 10 17:00:45 crc kubenswrapper[4799]: I1010 17:00:45.710215 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-vflcz" Oct 10 17:00:45 crc kubenswrapper[4799]: I1010 17:00:45.772022 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-vflcz"] Oct 10 17:00:46 crc kubenswrapper[4799]: I1010 17:00:46.402722 4799 scope.go:117] "RemoveContainer" containerID="eb9efabd31e0bc119cd431c6228f2fbf6db48806c09df2881081667d9ddd75e0" Oct 10 17:00:46 crc kubenswrapper[4799]: E1010 17:00:46.403185 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 17:00:47 crc kubenswrapper[4799]: I1010 17:00:47.679650 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-vflcz" podUID="fa3b1074-e673-45b4-a772-b7c5efbdf945" containerName="registry-server" containerID="cri-o://290b7eacb6979e64462981393bc2561714c5f287ae35d0287a4cffd9de8bc92e" gracePeriod=2 Oct 10 17:00:48 crc kubenswrapper[4799]: I1010 17:00:48.171229 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-vflcz" Oct 10 17:00:48 crc kubenswrapper[4799]: I1010 17:00:48.357726 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vb9r4\" (UniqueName: \"kubernetes.io/projected/fa3b1074-e673-45b4-a772-b7c5efbdf945-kube-api-access-vb9r4\") pod \"fa3b1074-e673-45b4-a772-b7c5efbdf945\" (UID: \"fa3b1074-e673-45b4-a772-b7c5efbdf945\") " Oct 10 17:00:48 crc kubenswrapper[4799]: I1010 17:00:48.357855 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fa3b1074-e673-45b4-a772-b7c5efbdf945-utilities\") pod \"fa3b1074-e673-45b4-a772-b7c5efbdf945\" (UID: \"fa3b1074-e673-45b4-a772-b7c5efbdf945\") " Oct 10 17:00:48 crc kubenswrapper[4799]: I1010 17:00:48.358076 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fa3b1074-e673-45b4-a772-b7c5efbdf945-catalog-content\") pod \"fa3b1074-e673-45b4-a772-b7c5efbdf945\" (UID: \"fa3b1074-e673-45b4-a772-b7c5efbdf945\") " Oct 10 17:00:48 crc kubenswrapper[4799]: I1010 17:00:48.359930 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fa3b1074-e673-45b4-a772-b7c5efbdf945-utilities" (OuterVolumeSpecName: "utilities") pod "fa3b1074-e673-45b4-a772-b7c5efbdf945" (UID: "fa3b1074-e673-45b4-a772-b7c5efbdf945"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 10 17:00:48 crc kubenswrapper[4799]: I1010 17:00:48.366711 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fa3b1074-e673-45b4-a772-b7c5efbdf945-kube-api-access-vb9r4" (OuterVolumeSpecName: "kube-api-access-vb9r4") pod "fa3b1074-e673-45b4-a772-b7c5efbdf945" (UID: "fa3b1074-e673-45b4-a772-b7c5efbdf945"). InnerVolumeSpecName "kube-api-access-vb9r4". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 17:00:48 crc kubenswrapper[4799]: I1010 17:00:48.375039 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fa3b1074-e673-45b4-a772-b7c5efbdf945-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "fa3b1074-e673-45b4-a772-b7c5efbdf945" (UID: "fa3b1074-e673-45b4-a772-b7c5efbdf945"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 10 17:00:48 crc kubenswrapper[4799]: I1010 17:00:48.460158 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vb9r4\" (UniqueName: \"kubernetes.io/projected/fa3b1074-e673-45b4-a772-b7c5efbdf945-kube-api-access-vb9r4\") on node \"crc\" DevicePath \"\"" Oct 10 17:00:48 crc kubenswrapper[4799]: I1010 17:00:48.460223 4799 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fa3b1074-e673-45b4-a772-b7c5efbdf945-utilities\") on node \"crc\" DevicePath \"\"" Oct 10 17:00:48 crc kubenswrapper[4799]: I1010 17:00:48.460253 4799 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fa3b1074-e673-45b4-a772-b7c5efbdf945-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 10 17:00:48 crc kubenswrapper[4799]: I1010 17:00:48.694840 4799 generic.go:334] "Generic (PLEG): container finished" podID="fa3b1074-e673-45b4-a772-b7c5efbdf945" containerID="290b7eacb6979e64462981393bc2561714c5f287ae35d0287a4cffd9de8bc92e" exitCode=0 Oct 10 17:00:48 crc kubenswrapper[4799]: I1010 17:00:48.694904 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vflcz" event={"ID":"fa3b1074-e673-45b4-a772-b7c5efbdf945","Type":"ContainerDied","Data":"290b7eacb6979e64462981393bc2561714c5f287ae35d0287a4cffd9de8bc92e"} Oct 10 17:00:48 crc kubenswrapper[4799]: I1010 17:00:48.694948 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vflcz" event={"ID":"fa3b1074-e673-45b4-a772-b7c5efbdf945","Type":"ContainerDied","Data":"04c1ff9e7474d574a00226bbb98e6425494c6b7df38ed441bd76b9b42653eba0"} Oct 10 17:00:48 crc kubenswrapper[4799]: I1010 17:00:48.694980 4799 scope.go:117] "RemoveContainer" containerID="290b7eacb6979e64462981393bc2561714c5f287ae35d0287a4cffd9de8bc92e" Oct 10 17:00:48 crc kubenswrapper[4799]: I1010 17:00:48.695003 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-vflcz" Oct 10 17:00:48 crc kubenswrapper[4799]: I1010 17:00:48.724232 4799 scope.go:117] "RemoveContainer" containerID="11d533f9d458bd5d5a4279dbe67d81bd804cac00cfbbaad4da4d99edc0b95ae9" Oct 10 17:00:48 crc kubenswrapper[4799]: I1010 17:00:48.757455 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-vflcz"] Oct 10 17:00:48 crc kubenswrapper[4799]: I1010 17:00:48.769420 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-vflcz"] Oct 10 17:00:48 crc kubenswrapper[4799]: I1010 17:00:48.793016 4799 scope.go:117] "RemoveContainer" containerID="ba40082f31a6ecd2726fc9105afd4269fba404cdcd41c4ec6d246e68035ee9a3" Oct 10 17:00:48 crc kubenswrapper[4799]: I1010 17:00:48.830841 4799 scope.go:117] "RemoveContainer" containerID="290b7eacb6979e64462981393bc2561714c5f287ae35d0287a4cffd9de8bc92e" Oct 10 17:00:48 crc kubenswrapper[4799]: E1010 17:00:48.831823 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"290b7eacb6979e64462981393bc2561714c5f287ae35d0287a4cffd9de8bc92e\": container with ID starting with 290b7eacb6979e64462981393bc2561714c5f287ae35d0287a4cffd9de8bc92e not found: ID does not exist" containerID="290b7eacb6979e64462981393bc2561714c5f287ae35d0287a4cffd9de8bc92e" Oct 10 17:00:48 crc kubenswrapper[4799]: I1010 17:00:48.831911 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"290b7eacb6979e64462981393bc2561714c5f287ae35d0287a4cffd9de8bc92e"} err="failed to get container status \"290b7eacb6979e64462981393bc2561714c5f287ae35d0287a4cffd9de8bc92e\": rpc error: code = NotFound desc = could not find container \"290b7eacb6979e64462981393bc2561714c5f287ae35d0287a4cffd9de8bc92e\": container with ID starting with 290b7eacb6979e64462981393bc2561714c5f287ae35d0287a4cffd9de8bc92e not found: ID does not exist" Oct 10 17:00:48 crc kubenswrapper[4799]: I1010 17:00:48.831978 4799 scope.go:117] "RemoveContainer" containerID="11d533f9d458bd5d5a4279dbe67d81bd804cac00cfbbaad4da4d99edc0b95ae9" Oct 10 17:00:48 crc kubenswrapper[4799]: E1010 17:00:48.832559 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"11d533f9d458bd5d5a4279dbe67d81bd804cac00cfbbaad4da4d99edc0b95ae9\": container with ID starting with 11d533f9d458bd5d5a4279dbe67d81bd804cac00cfbbaad4da4d99edc0b95ae9 not found: ID does not exist" containerID="11d533f9d458bd5d5a4279dbe67d81bd804cac00cfbbaad4da4d99edc0b95ae9" Oct 10 17:00:48 crc kubenswrapper[4799]: I1010 17:00:48.832605 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"11d533f9d458bd5d5a4279dbe67d81bd804cac00cfbbaad4da4d99edc0b95ae9"} err="failed to get container status \"11d533f9d458bd5d5a4279dbe67d81bd804cac00cfbbaad4da4d99edc0b95ae9\": rpc error: code = NotFound desc = could not find container \"11d533f9d458bd5d5a4279dbe67d81bd804cac00cfbbaad4da4d99edc0b95ae9\": container with ID starting with 11d533f9d458bd5d5a4279dbe67d81bd804cac00cfbbaad4da4d99edc0b95ae9 not found: ID does not exist" Oct 10 17:00:48 crc kubenswrapper[4799]: I1010 17:00:48.832643 4799 scope.go:117] "RemoveContainer" containerID="ba40082f31a6ecd2726fc9105afd4269fba404cdcd41c4ec6d246e68035ee9a3" Oct 10 17:00:48 crc kubenswrapper[4799]: E1010 17:00:48.833037 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ba40082f31a6ecd2726fc9105afd4269fba404cdcd41c4ec6d246e68035ee9a3\": container with ID starting with ba40082f31a6ecd2726fc9105afd4269fba404cdcd41c4ec6d246e68035ee9a3 not found: ID does not exist" containerID="ba40082f31a6ecd2726fc9105afd4269fba404cdcd41c4ec6d246e68035ee9a3" Oct 10 17:00:48 crc kubenswrapper[4799]: I1010 17:00:48.833069 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ba40082f31a6ecd2726fc9105afd4269fba404cdcd41c4ec6d246e68035ee9a3"} err="failed to get container status \"ba40082f31a6ecd2726fc9105afd4269fba404cdcd41c4ec6d246e68035ee9a3\": rpc error: code = NotFound desc = could not find container \"ba40082f31a6ecd2726fc9105afd4269fba404cdcd41c4ec6d246e68035ee9a3\": container with ID starting with ba40082f31a6ecd2726fc9105afd4269fba404cdcd41c4ec6d246e68035ee9a3 not found: ID does not exist" Oct 10 17:00:49 crc kubenswrapper[4799]: I1010 17:00:49.421229 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fa3b1074-e673-45b4-a772-b7c5efbdf945" path="/var/lib/kubelet/pods/fa3b1074-e673-45b4-a772-b7c5efbdf945/volumes" Oct 10 17:01:00 crc kubenswrapper[4799]: I1010 17:01:00.402441 4799 scope.go:117] "RemoveContainer" containerID="eb9efabd31e0bc119cd431c6228f2fbf6db48806c09df2881081667d9ddd75e0" Oct 10 17:01:00 crc kubenswrapper[4799]: E1010 17:01:00.403172 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 17:01:04 crc kubenswrapper[4799]: I1010 17:01:04.737661 4799 scope.go:117] "RemoveContainer" containerID="af38fd40bd18be55b2f068d68e6140436d795e517aa7b1bc8e8a4310f9752868" Oct 10 17:01:04 crc kubenswrapper[4799]: I1010 17:01:04.764640 4799 scope.go:117] "RemoveContainer" containerID="8b87ad4f452cd40a751d6fe4d9f7832708fd10be491e8259151d3509310eab1c" Oct 10 17:01:04 crc kubenswrapper[4799]: I1010 17:01:04.801111 4799 scope.go:117] "RemoveContainer" containerID="7213f8a82d7b7669ad0bb4871635bfb72208bddeb1f8660087390bb922ac83a8" Oct 10 17:01:14 crc kubenswrapper[4799]: I1010 17:01:14.402834 4799 scope.go:117] "RemoveContainer" containerID="eb9efabd31e0bc119cd431c6228f2fbf6db48806c09df2881081667d9ddd75e0" Oct 10 17:01:14 crc kubenswrapper[4799]: E1010 17:01:14.404327 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 17:01:26 crc kubenswrapper[4799]: I1010 17:01:26.403175 4799 scope.go:117] "RemoveContainer" containerID="eb9efabd31e0bc119cd431c6228f2fbf6db48806c09df2881081667d9ddd75e0" Oct 10 17:01:26 crc kubenswrapper[4799]: E1010 17:01:26.404352 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 17:01:38 crc kubenswrapper[4799]: I1010 17:01:38.402834 4799 scope.go:117] "RemoveContainer" containerID="eb9efabd31e0bc119cd431c6228f2fbf6db48806c09df2881081667d9ddd75e0" Oct 10 17:01:38 crc kubenswrapper[4799]: E1010 17:01:38.403786 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 17:01:49 crc kubenswrapper[4799]: I1010 17:01:49.402944 4799 scope.go:117] "RemoveContainer" containerID="eb9efabd31e0bc119cd431c6228f2fbf6db48806c09df2881081667d9ddd75e0" Oct 10 17:01:49 crc kubenswrapper[4799]: E1010 17:01:49.404201 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 17:02:04 crc kubenswrapper[4799]: I1010 17:02:04.403294 4799 scope.go:117] "RemoveContainer" containerID="eb9efabd31e0bc119cd431c6228f2fbf6db48806c09df2881081667d9ddd75e0" Oct 10 17:02:04 crc kubenswrapper[4799]: E1010 17:02:04.404378 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 17:02:15 crc kubenswrapper[4799]: I1010 17:02:15.402894 4799 scope.go:117] "RemoveContainer" containerID="eb9efabd31e0bc119cd431c6228f2fbf6db48806c09df2881081667d9ddd75e0" Oct 10 17:02:16 crc kubenswrapper[4799]: I1010 17:02:16.514317 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" event={"ID":"6cebefda-e31d-4be2-9bf4-8e1f8ec002cb","Type":"ContainerStarted","Data":"4dd1886b500c1dcdf7222359795c4b97be8fff09a255efaef533259216eb6900"} Oct 10 17:04:15 crc kubenswrapper[4799]: I1010 17:04:15.249087 4799 patch_prober.go:28] interesting pod/machine-config-daemon-rh8zc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 10 17:04:15 crc kubenswrapper[4799]: I1010 17:04:15.249951 4799 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 10 17:04:45 crc kubenswrapper[4799]: I1010 17:04:45.249247 4799 patch_prober.go:28] interesting pod/machine-config-daemon-rh8zc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 10 17:04:45 crc kubenswrapper[4799]: I1010 17:04:45.249782 4799 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 10 17:05:15 crc kubenswrapper[4799]: I1010 17:05:15.248935 4799 patch_prober.go:28] interesting pod/machine-config-daemon-rh8zc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 10 17:05:15 crc kubenswrapper[4799]: I1010 17:05:15.249537 4799 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 10 17:05:15 crc kubenswrapper[4799]: I1010 17:05:15.249601 4799 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" Oct 10 17:05:15 crc kubenswrapper[4799]: I1010 17:05:15.250518 4799 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"4dd1886b500c1dcdf7222359795c4b97be8fff09a255efaef533259216eb6900"} pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 10 17:05:15 crc kubenswrapper[4799]: I1010 17:05:15.250657 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" containerName="machine-config-daemon" containerID="cri-o://4dd1886b500c1dcdf7222359795c4b97be8fff09a255efaef533259216eb6900" gracePeriod=600 Oct 10 17:05:16 crc kubenswrapper[4799]: I1010 17:05:16.227741 4799 generic.go:334] "Generic (PLEG): container finished" podID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" containerID="4dd1886b500c1dcdf7222359795c4b97be8fff09a255efaef533259216eb6900" exitCode=0 Oct 10 17:05:16 crc kubenswrapper[4799]: I1010 17:05:16.227794 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" event={"ID":"6cebefda-e31d-4be2-9bf4-8e1f8ec002cb","Type":"ContainerDied","Data":"4dd1886b500c1dcdf7222359795c4b97be8fff09a255efaef533259216eb6900"} Oct 10 17:05:16 crc kubenswrapper[4799]: I1010 17:05:16.228189 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" event={"ID":"6cebefda-e31d-4be2-9bf4-8e1f8ec002cb","Type":"ContainerStarted","Data":"3ee83a672ba2f7803e7c4f7cecdddbaee1838a288065f4f6358434432017c356"} Oct 10 17:05:16 crc kubenswrapper[4799]: I1010 17:05:16.228214 4799 scope.go:117] "RemoveContainer" containerID="eb9efabd31e0bc119cd431c6228f2fbf6db48806c09df2881081667d9ddd75e0" Oct 10 17:05:26 crc kubenswrapper[4799]: I1010 17:05:26.443804 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-gjplc"] Oct 10 17:05:26 crc kubenswrapper[4799]: E1010 17:05:26.444818 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fa3b1074-e673-45b4-a772-b7c5efbdf945" containerName="registry-server" Oct 10 17:05:26 crc kubenswrapper[4799]: I1010 17:05:26.444841 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="fa3b1074-e673-45b4-a772-b7c5efbdf945" containerName="registry-server" Oct 10 17:05:26 crc kubenswrapper[4799]: E1010 17:05:26.444875 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fa3b1074-e673-45b4-a772-b7c5efbdf945" containerName="extract-content" Oct 10 17:05:26 crc kubenswrapper[4799]: I1010 17:05:26.444888 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="fa3b1074-e673-45b4-a772-b7c5efbdf945" containerName="extract-content" Oct 10 17:05:26 crc kubenswrapper[4799]: E1010 17:05:26.444914 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fa3b1074-e673-45b4-a772-b7c5efbdf945" containerName="extract-utilities" Oct 10 17:05:26 crc kubenswrapper[4799]: I1010 17:05:26.444928 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="fa3b1074-e673-45b4-a772-b7c5efbdf945" containerName="extract-utilities" Oct 10 17:05:26 crc kubenswrapper[4799]: I1010 17:05:26.445192 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="fa3b1074-e673-45b4-a772-b7c5efbdf945" containerName="registry-server" Oct 10 17:05:26 crc kubenswrapper[4799]: I1010 17:05:26.452928 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-gjplc" Oct 10 17:05:26 crc kubenswrapper[4799]: I1010 17:05:26.460052 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-gjplc"] Oct 10 17:05:26 crc kubenswrapper[4799]: I1010 17:05:26.568863 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f01bd688-9703-4aec-a71b-aabf928476a1-utilities\") pod \"redhat-operators-gjplc\" (UID: \"f01bd688-9703-4aec-a71b-aabf928476a1\") " pod="openshift-marketplace/redhat-operators-gjplc" Oct 10 17:05:26 crc kubenswrapper[4799]: I1010 17:05:26.569209 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f01bd688-9703-4aec-a71b-aabf928476a1-catalog-content\") pod \"redhat-operators-gjplc\" (UID: \"f01bd688-9703-4aec-a71b-aabf928476a1\") " pod="openshift-marketplace/redhat-operators-gjplc" Oct 10 17:05:26 crc kubenswrapper[4799]: I1010 17:05:26.569291 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pj4wp\" (UniqueName: \"kubernetes.io/projected/f01bd688-9703-4aec-a71b-aabf928476a1-kube-api-access-pj4wp\") pod \"redhat-operators-gjplc\" (UID: \"f01bd688-9703-4aec-a71b-aabf928476a1\") " pod="openshift-marketplace/redhat-operators-gjplc" Oct 10 17:05:26 crc kubenswrapper[4799]: I1010 17:05:26.670607 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f01bd688-9703-4aec-a71b-aabf928476a1-utilities\") pod \"redhat-operators-gjplc\" (UID: \"f01bd688-9703-4aec-a71b-aabf928476a1\") " pod="openshift-marketplace/redhat-operators-gjplc" Oct 10 17:05:26 crc kubenswrapper[4799]: I1010 17:05:26.670694 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f01bd688-9703-4aec-a71b-aabf928476a1-catalog-content\") pod \"redhat-operators-gjplc\" (UID: \"f01bd688-9703-4aec-a71b-aabf928476a1\") " pod="openshift-marketplace/redhat-operators-gjplc" Oct 10 17:05:26 crc kubenswrapper[4799]: I1010 17:05:26.670723 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pj4wp\" (UniqueName: \"kubernetes.io/projected/f01bd688-9703-4aec-a71b-aabf928476a1-kube-api-access-pj4wp\") pod \"redhat-operators-gjplc\" (UID: \"f01bd688-9703-4aec-a71b-aabf928476a1\") " pod="openshift-marketplace/redhat-operators-gjplc" Oct 10 17:05:26 crc kubenswrapper[4799]: I1010 17:05:26.671551 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f01bd688-9703-4aec-a71b-aabf928476a1-catalog-content\") pod \"redhat-operators-gjplc\" (UID: \"f01bd688-9703-4aec-a71b-aabf928476a1\") " pod="openshift-marketplace/redhat-operators-gjplc" Oct 10 17:05:26 crc kubenswrapper[4799]: I1010 17:05:26.671869 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f01bd688-9703-4aec-a71b-aabf928476a1-utilities\") pod \"redhat-operators-gjplc\" (UID: \"f01bd688-9703-4aec-a71b-aabf928476a1\") " pod="openshift-marketplace/redhat-operators-gjplc" Oct 10 17:05:26 crc kubenswrapper[4799]: I1010 17:05:26.698666 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pj4wp\" (UniqueName: \"kubernetes.io/projected/f01bd688-9703-4aec-a71b-aabf928476a1-kube-api-access-pj4wp\") pod \"redhat-operators-gjplc\" (UID: \"f01bd688-9703-4aec-a71b-aabf928476a1\") " pod="openshift-marketplace/redhat-operators-gjplc" Oct 10 17:05:26 crc kubenswrapper[4799]: I1010 17:05:26.788195 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-gjplc" Oct 10 17:05:27 crc kubenswrapper[4799]: I1010 17:05:27.022576 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-gjplc"] Oct 10 17:05:27 crc kubenswrapper[4799]: W1010 17:05:27.032073 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf01bd688_9703_4aec_a71b_aabf928476a1.slice/crio-6dd38df1a4b5f86f472cbd4266ac9882a38a559829b9719f08afbe38d4174c49 WatchSource:0}: Error finding container 6dd38df1a4b5f86f472cbd4266ac9882a38a559829b9719f08afbe38d4174c49: Status 404 returned error can't find the container with id 6dd38df1a4b5f86f472cbd4266ac9882a38a559829b9719f08afbe38d4174c49 Oct 10 17:05:27 crc kubenswrapper[4799]: I1010 17:05:27.332968 4799 generic.go:334] "Generic (PLEG): container finished" podID="f01bd688-9703-4aec-a71b-aabf928476a1" containerID="45330372d6ff44c5632b2584a83da8608ac50fbda99900e1f69dd0460cc244b2" exitCode=0 Oct 10 17:05:27 crc kubenswrapper[4799]: I1010 17:05:27.333030 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-gjplc" event={"ID":"f01bd688-9703-4aec-a71b-aabf928476a1","Type":"ContainerDied","Data":"45330372d6ff44c5632b2584a83da8608ac50fbda99900e1f69dd0460cc244b2"} Oct 10 17:05:27 crc kubenswrapper[4799]: I1010 17:05:27.333087 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-gjplc" event={"ID":"f01bd688-9703-4aec-a71b-aabf928476a1","Type":"ContainerStarted","Data":"6dd38df1a4b5f86f472cbd4266ac9882a38a559829b9719f08afbe38d4174c49"} Oct 10 17:05:29 crc kubenswrapper[4799]: I1010 17:05:29.355391 4799 generic.go:334] "Generic (PLEG): container finished" podID="f01bd688-9703-4aec-a71b-aabf928476a1" containerID="aa5d9bfe4dc47541d43bc85477d341598297f945fae893e7db758bbce9872d0e" exitCode=0 Oct 10 17:05:29 crc kubenswrapper[4799]: I1010 17:05:29.355495 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-gjplc" event={"ID":"f01bd688-9703-4aec-a71b-aabf928476a1","Type":"ContainerDied","Data":"aa5d9bfe4dc47541d43bc85477d341598297f945fae893e7db758bbce9872d0e"} Oct 10 17:05:30 crc kubenswrapper[4799]: I1010 17:05:30.371092 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-gjplc" event={"ID":"f01bd688-9703-4aec-a71b-aabf928476a1","Type":"ContainerStarted","Data":"4f77d16ee991d3807dc5715b69c4f40e35faf8c2c5fa1e308b8c74cd38251013"} Oct 10 17:05:30 crc kubenswrapper[4799]: I1010 17:05:30.394045 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-gjplc" podStartSLOduration=1.954480621 podStartE2EDuration="4.394023622s" podCreationTimestamp="2025-10-10 17:05:26 +0000 UTC" firstStartedPulling="2025-10-10 17:05:27.334305966 +0000 UTC m=+2020.842630081" lastFinishedPulling="2025-10-10 17:05:29.773848927 +0000 UTC m=+2023.282173082" observedRunningTime="2025-10-10 17:05:30.392036783 +0000 UTC m=+2023.900360928" watchObservedRunningTime="2025-10-10 17:05:30.394023622 +0000 UTC m=+2023.902347747" Oct 10 17:05:36 crc kubenswrapper[4799]: I1010 17:05:36.789242 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-gjplc" Oct 10 17:05:36 crc kubenswrapper[4799]: I1010 17:05:36.789737 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-gjplc" Oct 10 17:05:36 crc kubenswrapper[4799]: I1010 17:05:36.840827 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-gjplc" Oct 10 17:05:37 crc kubenswrapper[4799]: I1010 17:05:37.503595 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-gjplc" Oct 10 17:05:37 crc kubenswrapper[4799]: I1010 17:05:37.561934 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-gjplc"] Oct 10 17:05:39 crc kubenswrapper[4799]: I1010 17:05:39.457843 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-gjplc" podUID="f01bd688-9703-4aec-a71b-aabf928476a1" containerName="registry-server" containerID="cri-o://4f77d16ee991d3807dc5715b69c4f40e35faf8c2c5fa1e308b8c74cd38251013" gracePeriod=2 Oct 10 17:05:41 crc kubenswrapper[4799]: I1010 17:05:41.478160 4799 generic.go:334] "Generic (PLEG): container finished" podID="f01bd688-9703-4aec-a71b-aabf928476a1" containerID="4f77d16ee991d3807dc5715b69c4f40e35faf8c2c5fa1e308b8c74cd38251013" exitCode=0 Oct 10 17:05:41 crc kubenswrapper[4799]: I1010 17:05:41.478219 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-gjplc" event={"ID":"f01bd688-9703-4aec-a71b-aabf928476a1","Type":"ContainerDied","Data":"4f77d16ee991d3807dc5715b69c4f40e35faf8c2c5fa1e308b8c74cd38251013"} Oct 10 17:05:41 crc kubenswrapper[4799]: I1010 17:05:41.768865 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-gjplc" Oct 10 17:05:41 crc kubenswrapper[4799]: I1010 17:05:41.902904 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f01bd688-9703-4aec-a71b-aabf928476a1-catalog-content\") pod \"f01bd688-9703-4aec-a71b-aabf928476a1\" (UID: \"f01bd688-9703-4aec-a71b-aabf928476a1\") " Oct 10 17:05:41 crc kubenswrapper[4799]: I1010 17:05:41.902947 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f01bd688-9703-4aec-a71b-aabf928476a1-utilities\") pod \"f01bd688-9703-4aec-a71b-aabf928476a1\" (UID: \"f01bd688-9703-4aec-a71b-aabf928476a1\") " Oct 10 17:05:41 crc kubenswrapper[4799]: I1010 17:05:41.902994 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pj4wp\" (UniqueName: \"kubernetes.io/projected/f01bd688-9703-4aec-a71b-aabf928476a1-kube-api-access-pj4wp\") pod \"f01bd688-9703-4aec-a71b-aabf928476a1\" (UID: \"f01bd688-9703-4aec-a71b-aabf928476a1\") " Oct 10 17:05:41 crc kubenswrapper[4799]: I1010 17:05:41.904227 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f01bd688-9703-4aec-a71b-aabf928476a1-utilities" (OuterVolumeSpecName: "utilities") pod "f01bd688-9703-4aec-a71b-aabf928476a1" (UID: "f01bd688-9703-4aec-a71b-aabf928476a1"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 10 17:05:41 crc kubenswrapper[4799]: I1010 17:05:41.909448 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f01bd688-9703-4aec-a71b-aabf928476a1-kube-api-access-pj4wp" (OuterVolumeSpecName: "kube-api-access-pj4wp") pod "f01bd688-9703-4aec-a71b-aabf928476a1" (UID: "f01bd688-9703-4aec-a71b-aabf928476a1"). InnerVolumeSpecName "kube-api-access-pj4wp". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 17:05:41 crc kubenswrapper[4799]: I1010 17:05:41.995919 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f01bd688-9703-4aec-a71b-aabf928476a1-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "f01bd688-9703-4aec-a71b-aabf928476a1" (UID: "f01bd688-9703-4aec-a71b-aabf928476a1"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 10 17:05:42 crc kubenswrapper[4799]: I1010 17:05:42.004934 4799 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f01bd688-9703-4aec-a71b-aabf928476a1-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 10 17:05:42 crc kubenswrapper[4799]: I1010 17:05:42.004973 4799 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f01bd688-9703-4aec-a71b-aabf928476a1-utilities\") on node \"crc\" DevicePath \"\"" Oct 10 17:05:42 crc kubenswrapper[4799]: I1010 17:05:42.004984 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pj4wp\" (UniqueName: \"kubernetes.io/projected/f01bd688-9703-4aec-a71b-aabf928476a1-kube-api-access-pj4wp\") on node \"crc\" DevicePath \"\"" Oct 10 17:05:42 crc kubenswrapper[4799]: I1010 17:05:42.489968 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-gjplc" event={"ID":"f01bd688-9703-4aec-a71b-aabf928476a1","Type":"ContainerDied","Data":"6dd38df1a4b5f86f472cbd4266ac9882a38a559829b9719f08afbe38d4174c49"} Oct 10 17:05:42 crc kubenswrapper[4799]: I1010 17:05:42.490288 4799 scope.go:117] "RemoveContainer" containerID="4f77d16ee991d3807dc5715b69c4f40e35faf8c2c5fa1e308b8c74cd38251013" Oct 10 17:05:42 crc kubenswrapper[4799]: I1010 17:05:42.490048 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-gjplc" Oct 10 17:05:42 crc kubenswrapper[4799]: I1010 17:05:42.526014 4799 scope.go:117] "RemoveContainer" containerID="aa5d9bfe4dc47541d43bc85477d341598297f945fae893e7db758bbce9872d0e" Oct 10 17:05:42 crc kubenswrapper[4799]: I1010 17:05:42.570347 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-gjplc"] Oct 10 17:05:42 crc kubenswrapper[4799]: I1010 17:05:42.592656 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-gjplc"] Oct 10 17:05:42 crc kubenswrapper[4799]: I1010 17:05:42.601019 4799 scope.go:117] "RemoveContainer" containerID="45330372d6ff44c5632b2584a83da8608ac50fbda99900e1f69dd0460cc244b2" Oct 10 17:05:43 crc kubenswrapper[4799]: I1010 17:05:43.414445 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f01bd688-9703-4aec-a71b-aabf928476a1" path="/var/lib/kubelet/pods/f01bd688-9703-4aec-a71b-aabf928476a1/volumes" Oct 10 17:06:25 crc kubenswrapper[4799]: I1010 17:06:25.859895 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-9v8tz"] Oct 10 17:06:25 crc kubenswrapper[4799]: E1010 17:06:25.861321 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f01bd688-9703-4aec-a71b-aabf928476a1" containerName="extract-utilities" Oct 10 17:06:25 crc kubenswrapper[4799]: I1010 17:06:25.861358 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="f01bd688-9703-4aec-a71b-aabf928476a1" containerName="extract-utilities" Oct 10 17:06:25 crc kubenswrapper[4799]: E1010 17:06:25.861382 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f01bd688-9703-4aec-a71b-aabf928476a1" containerName="extract-content" Oct 10 17:06:25 crc kubenswrapper[4799]: I1010 17:06:25.861400 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="f01bd688-9703-4aec-a71b-aabf928476a1" containerName="extract-content" Oct 10 17:06:25 crc kubenswrapper[4799]: E1010 17:06:25.861439 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f01bd688-9703-4aec-a71b-aabf928476a1" containerName="registry-server" Oct 10 17:06:25 crc kubenswrapper[4799]: I1010 17:06:25.861506 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="f01bd688-9703-4aec-a71b-aabf928476a1" containerName="registry-server" Oct 10 17:06:25 crc kubenswrapper[4799]: I1010 17:06:25.861966 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="f01bd688-9703-4aec-a71b-aabf928476a1" containerName="registry-server" Oct 10 17:06:25 crc kubenswrapper[4799]: I1010 17:06:25.864376 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-9v8tz" Oct 10 17:06:25 crc kubenswrapper[4799]: I1010 17:06:25.877180 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-9v8tz"] Oct 10 17:06:25 crc kubenswrapper[4799]: I1010 17:06:25.975351 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8vhq4\" (UniqueName: \"kubernetes.io/projected/fdd20fdb-39fc-437e-b192-b0bb8d6325f1-kube-api-access-8vhq4\") pod \"community-operators-9v8tz\" (UID: \"fdd20fdb-39fc-437e-b192-b0bb8d6325f1\") " pod="openshift-marketplace/community-operators-9v8tz" Oct 10 17:06:25 crc kubenswrapper[4799]: I1010 17:06:25.975774 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fdd20fdb-39fc-437e-b192-b0bb8d6325f1-catalog-content\") pod \"community-operators-9v8tz\" (UID: \"fdd20fdb-39fc-437e-b192-b0bb8d6325f1\") " pod="openshift-marketplace/community-operators-9v8tz" Oct 10 17:06:25 crc kubenswrapper[4799]: I1010 17:06:25.975821 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fdd20fdb-39fc-437e-b192-b0bb8d6325f1-utilities\") pod \"community-operators-9v8tz\" (UID: \"fdd20fdb-39fc-437e-b192-b0bb8d6325f1\") " pod="openshift-marketplace/community-operators-9v8tz" Oct 10 17:06:26 crc kubenswrapper[4799]: I1010 17:06:26.076459 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fdd20fdb-39fc-437e-b192-b0bb8d6325f1-utilities\") pod \"community-operators-9v8tz\" (UID: \"fdd20fdb-39fc-437e-b192-b0bb8d6325f1\") " pod="openshift-marketplace/community-operators-9v8tz" Oct 10 17:06:26 crc kubenswrapper[4799]: I1010 17:06:26.076581 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8vhq4\" (UniqueName: \"kubernetes.io/projected/fdd20fdb-39fc-437e-b192-b0bb8d6325f1-kube-api-access-8vhq4\") pod \"community-operators-9v8tz\" (UID: \"fdd20fdb-39fc-437e-b192-b0bb8d6325f1\") " pod="openshift-marketplace/community-operators-9v8tz" Oct 10 17:06:26 crc kubenswrapper[4799]: I1010 17:06:26.076615 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fdd20fdb-39fc-437e-b192-b0bb8d6325f1-catalog-content\") pod \"community-operators-9v8tz\" (UID: \"fdd20fdb-39fc-437e-b192-b0bb8d6325f1\") " pod="openshift-marketplace/community-operators-9v8tz" Oct 10 17:06:26 crc kubenswrapper[4799]: I1010 17:06:26.077091 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fdd20fdb-39fc-437e-b192-b0bb8d6325f1-catalog-content\") pod \"community-operators-9v8tz\" (UID: \"fdd20fdb-39fc-437e-b192-b0bb8d6325f1\") " pod="openshift-marketplace/community-operators-9v8tz" Oct 10 17:06:26 crc kubenswrapper[4799]: I1010 17:06:26.077356 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fdd20fdb-39fc-437e-b192-b0bb8d6325f1-utilities\") pod \"community-operators-9v8tz\" (UID: \"fdd20fdb-39fc-437e-b192-b0bb8d6325f1\") " pod="openshift-marketplace/community-operators-9v8tz" Oct 10 17:06:26 crc kubenswrapper[4799]: I1010 17:06:26.102931 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8vhq4\" (UniqueName: \"kubernetes.io/projected/fdd20fdb-39fc-437e-b192-b0bb8d6325f1-kube-api-access-8vhq4\") pod \"community-operators-9v8tz\" (UID: \"fdd20fdb-39fc-437e-b192-b0bb8d6325f1\") " pod="openshift-marketplace/community-operators-9v8tz" Oct 10 17:06:26 crc kubenswrapper[4799]: I1010 17:06:26.186744 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-9v8tz" Oct 10 17:06:26 crc kubenswrapper[4799]: I1010 17:06:26.479670 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-9v8tz"] Oct 10 17:06:26 crc kubenswrapper[4799]: I1010 17:06:26.886479 4799 generic.go:334] "Generic (PLEG): container finished" podID="fdd20fdb-39fc-437e-b192-b0bb8d6325f1" containerID="1215fb63a40fd31d75cc44005e677423d8f7b2635c85e9b01b88552b02f54b62" exitCode=0 Oct 10 17:06:26 crc kubenswrapper[4799]: I1010 17:06:26.886904 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-9v8tz" event={"ID":"fdd20fdb-39fc-437e-b192-b0bb8d6325f1","Type":"ContainerDied","Data":"1215fb63a40fd31d75cc44005e677423d8f7b2635c85e9b01b88552b02f54b62"} Oct 10 17:06:26 crc kubenswrapper[4799]: I1010 17:06:26.886947 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-9v8tz" event={"ID":"fdd20fdb-39fc-437e-b192-b0bb8d6325f1","Type":"ContainerStarted","Data":"bff7192e9ab5ab9adc55ec84f2cbd495d7dabbedf79e3257b82c467aa8ed58dc"} Oct 10 17:06:26 crc kubenswrapper[4799]: I1010 17:06:26.889161 4799 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 10 17:06:27 crc kubenswrapper[4799]: I1010 17:06:27.895020 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-9v8tz" event={"ID":"fdd20fdb-39fc-437e-b192-b0bb8d6325f1","Type":"ContainerStarted","Data":"b2506d6510bbd5ed76c1d2d42af5c58579b60510735200f0e3291c0db1a9ab3e"} Oct 10 17:06:28 crc kubenswrapper[4799]: I1010 17:06:28.905694 4799 generic.go:334] "Generic (PLEG): container finished" podID="fdd20fdb-39fc-437e-b192-b0bb8d6325f1" containerID="b2506d6510bbd5ed76c1d2d42af5c58579b60510735200f0e3291c0db1a9ab3e" exitCode=0 Oct 10 17:06:28 crc kubenswrapper[4799]: I1010 17:06:28.905749 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-9v8tz" event={"ID":"fdd20fdb-39fc-437e-b192-b0bb8d6325f1","Type":"ContainerDied","Data":"b2506d6510bbd5ed76c1d2d42af5c58579b60510735200f0e3291c0db1a9ab3e"} Oct 10 17:06:29 crc kubenswrapper[4799]: I1010 17:06:29.917049 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-9v8tz" event={"ID":"fdd20fdb-39fc-437e-b192-b0bb8d6325f1","Type":"ContainerStarted","Data":"02740132262e4f26285fbc2f0ce62de0d7e2da0e7e853353f1b1d982426d8662"} Oct 10 17:06:29 crc kubenswrapper[4799]: I1010 17:06:29.941217 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-9v8tz" podStartSLOduration=2.495312863 podStartE2EDuration="4.941191329s" podCreationTimestamp="2025-10-10 17:06:25 +0000 UTC" firstStartedPulling="2025-10-10 17:06:26.888388803 +0000 UTC m=+2080.396712968" lastFinishedPulling="2025-10-10 17:06:29.334267279 +0000 UTC m=+2082.842591434" observedRunningTime="2025-10-10 17:06:29.936969536 +0000 UTC m=+2083.445293691" watchObservedRunningTime="2025-10-10 17:06:29.941191329 +0000 UTC m=+2083.449515494" Oct 10 17:06:36 crc kubenswrapper[4799]: I1010 17:06:36.187842 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-9v8tz" Oct 10 17:06:36 crc kubenswrapper[4799]: I1010 17:06:36.188114 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-9v8tz" Oct 10 17:06:36 crc kubenswrapper[4799]: I1010 17:06:36.260913 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-9v8tz" Oct 10 17:06:37 crc kubenswrapper[4799]: I1010 17:06:37.041283 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-9v8tz" Oct 10 17:06:37 crc kubenswrapper[4799]: I1010 17:06:37.097668 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-9v8tz"] Oct 10 17:06:38 crc kubenswrapper[4799]: I1010 17:06:38.997238 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-9v8tz" podUID="fdd20fdb-39fc-437e-b192-b0bb8d6325f1" containerName="registry-server" containerID="cri-o://02740132262e4f26285fbc2f0ce62de0d7e2da0e7e853353f1b1d982426d8662" gracePeriod=2 Oct 10 17:06:40 crc kubenswrapper[4799]: I1010 17:06:40.010784 4799 generic.go:334] "Generic (PLEG): container finished" podID="fdd20fdb-39fc-437e-b192-b0bb8d6325f1" containerID="02740132262e4f26285fbc2f0ce62de0d7e2da0e7e853353f1b1d982426d8662" exitCode=0 Oct 10 17:06:40 crc kubenswrapper[4799]: I1010 17:06:40.010822 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-9v8tz" event={"ID":"fdd20fdb-39fc-437e-b192-b0bb8d6325f1","Type":"ContainerDied","Data":"02740132262e4f26285fbc2f0ce62de0d7e2da0e7e853353f1b1d982426d8662"} Oct 10 17:06:40 crc kubenswrapper[4799]: I1010 17:06:40.595589 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-9v8tz" Oct 10 17:06:40 crc kubenswrapper[4799]: I1010 17:06:40.788792 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fdd20fdb-39fc-437e-b192-b0bb8d6325f1-catalog-content\") pod \"fdd20fdb-39fc-437e-b192-b0bb8d6325f1\" (UID: \"fdd20fdb-39fc-437e-b192-b0bb8d6325f1\") " Oct 10 17:06:40 crc kubenswrapper[4799]: I1010 17:06:40.788844 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8vhq4\" (UniqueName: \"kubernetes.io/projected/fdd20fdb-39fc-437e-b192-b0bb8d6325f1-kube-api-access-8vhq4\") pod \"fdd20fdb-39fc-437e-b192-b0bb8d6325f1\" (UID: \"fdd20fdb-39fc-437e-b192-b0bb8d6325f1\") " Oct 10 17:06:40 crc kubenswrapper[4799]: I1010 17:06:40.788956 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fdd20fdb-39fc-437e-b192-b0bb8d6325f1-utilities\") pod \"fdd20fdb-39fc-437e-b192-b0bb8d6325f1\" (UID: \"fdd20fdb-39fc-437e-b192-b0bb8d6325f1\") " Oct 10 17:06:40 crc kubenswrapper[4799]: I1010 17:06:40.790984 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fdd20fdb-39fc-437e-b192-b0bb8d6325f1-utilities" (OuterVolumeSpecName: "utilities") pod "fdd20fdb-39fc-437e-b192-b0bb8d6325f1" (UID: "fdd20fdb-39fc-437e-b192-b0bb8d6325f1"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 10 17:06:40 crc kubenswrapper[4799]: I1010 17:06:40.800892 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fdd20fdb-39fc-437e-b192-b0bb8d6325f1-kube-api-access-8vhq4" (OuterVolumeSpecName: "kube-api-access-8vhq4") pod "fdd20fdb-39fc-437e-b192-b0bb8d6325f1" (UID: "fdd20fdb-39fc-437e-b192-b0bb8d6325f1"). InnerVolumeSpecName "kube-api-access-8vhq4". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 17:06:40 crc kubenswrapper[4799]: I1010 17:06:40.865237 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fdd20fdb-39fc-437e-b192-b0bb8d6325f1-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "fdd20fdb-39fc-437e-b192-b0bb8d6325f1" (UID: "fdd20fdb-39fc-437e-b192-b0bb8d6325f1"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 10 17:06:40 crc kubenswrapper[4799]: I1010 17:06:40.890509 4799 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fdd20fdb-39fc-437e-b192-b0bb8d6325f1-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 10 17:06:40 crc kubenswrapper[4799]: I1010 17:06:40.890571 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8vhq4\" (UniqueName: \"kubernetes.io/projected/fdd20fdb-39fc-437e-b192-b0bb8d6325f1-kube-api-access-8vhq4\") on node \"crc\" DevicePath \"\"" Oct 10 17:06:40 crc kubenswrapper[4799]: I1010 17:06:40.890590 4799 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fdd20fdb-39fc-437e-b192-b0bb8d6325f1-utilities\") on node \"crc\" DevicePath \"\"" Oct 10 17:06:41 crc kubenswrapper[4799]: I1010 17:06:41.024554 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-9v8tz" event={"ID":"fdd20fdb-39fc-437e-b192-b0bb8d6325f1","Type":"ContainerDied","Data":"bff7192e9ab5ab9adc55ec84f2cbd495d7dabbedf79e3257b82c467aa8ed58dc"} Oct 10 17:06:41 crc kubenswrapper[4799]: I1010 17:06:41.024610 4799 scope.go:117] "RemoveContainer" containerID="02740132262e4f26285fbc2f0ce62de0d7e2da0e7e853353f1b1d982426d8662" Oct 10 17:06:41 crc kubenswrapper[4799]: I1010 17:06:41.024742 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-9v8tz" Oct 10 17:06:41 crc kubenswrapper[4799]: I1010 17:06:41.065920 4799 scope.go:117] "RemoveContainer" containerID="b2506d6510bbd5ed76c1d2d42af5c58579b60510735200f0e3291c0db1a9ab3e" Oct 10 17:06:41 crc kubenswrapper[4799]: I1010 17:06:41.072353 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-9v8tz"] Oct 10 17:06:41 crc kubenswrapper[4799]: I1010 17:06:41.082179 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-9v8tz"] Oct 10 17:06:41 crc kubenswrapper[4799]: I1010 17:06:41.095430 4799 scope.go:117] "RemoveContainer" containerID="1215fb63a40fd31d75cc44005e677423d8f7b2635c85e9b01b88552b02f54b62" Oct 10 17:06:41 crc kubenswrapper[4799]: I1010 17:06:41.419645 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fdd20fdb-39fc-437e-b192-b0bb8d6325f1" path="/var/lib/kubelet/pods/fdd20fdb-39fc-437e-b192-b0bb8d6325f1/volumes" Oct 10 17:06:55 crc kubenswrapper[4799]: I1010 17:06:55.041892 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-7956v"] Oct 10 17:06:55 crc kubenswrapper[4799]: E1010 17:06:55.043474 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fdd20fdb-39fc-437e-b192-b0bb8d6325f1" containerName="extract-utilities" Oct 10 17:06:55 crc kubenswrapper[4799]: I1010 17:06:55.043511 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="fdd20fdb-39fc-437e-b192-b0bb8d6325f1" containerName="extract-utilities" Oct 10 17:06:55 crc kubenswrapper[4799]: E1010 17:06:55.043583 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fdd20fdb-39fc-437e-b192-b0bb8d6325f1" containerName="extract-content" Oct 10 17:06:55 crc kubenswrapper[4799]: I1010 17:06:55.043603 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="fdd20fdb-39fc-437e-b192-b0bb8d6325f1" containerName="extract-content" Oct 10 17:06:55 crc kubenswrapper[4799]: E1010 17:06:55.043638 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fdd20fdb-39fc-437e-b192-b0bb8d6325f1" containerName="registry-server" Oct 10 17:06:55 crc kubenswrapper[4799]: I1010 17:06:55.043656 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="fdd20fdb-39fc-437e-b192-b0bb8d6325f1" containerName="registry-server" Oct 10 17:06:55 crc kubenswrapper[4799]: I1010 17:06:55.044784 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="fdd20fdb-39fc-437e-b192-b0bb8d6325f1" containerName="registry-server" Oct 10 17:06:55 crc kubenswrapper[4799]: I1010 17:06:55.048252 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-7956v" Oct 10 17:06:55 crc kubenswrapper[4799]: I1010 17:06:55.057488 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-7956v"] Oct 10 17:06:55 crc kubenswrapper[4799]: I1010 17:06:55.129628 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c7jlq\" (UniqueName: \"kubernetes.io/projected/5738a268-1112-42a4-9935-b60b3066e452-kube-api-access-c7jlq\") pod \"certified-operators-7956v\" (UID: \"5738a268-1112-42a4-9935-b60b3066e452\") " pod="openshift-marketplace/certified-operators-7956v" Oct 10 17:06:55 crc kubenswrapper[4799]: I1010 17:06:55.129719 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5738a268-1112-42a4-9935-b60b3066e452-utilities\") pod \"certified-operators-7956v\" (UID: \"5738a268-1112-42a4-9935-b60b3066e452\") " pod="openshift-marketplace/certified-operators-7956v" Oct 10 17:06:55 crc kubenswrapper[4799]: I1010 17:06:55.129774 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5738a268-1112-42a4-9935-b60b3066e452-catalog-content\") pod \"certified-operators-7956v\" (UID: \"5738a268-1112-42a4-9935-b60b3066e452\") " pod="openshift-marketplace/certified-operators-7956v" Oct 10 17:06:55 crc kubenswrapper[4799]: I1010 17:06:55.230778 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c7jlq\" (UniqueName: \"kubernetes.io/projected/5738a268-1112-42a4-9935-b60b3066e452-kube-api-access-c7jlq\") pod \"certified-operators-7956v\" (UID: \"5738a268-1112-42a4-9935-b60b3066e452\") " pod="openshift-marketplace/certified-operators-7956v" Oct 10 17:06:55 crc kubenswrapper[4799]: I1010 17:06:55.230908 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5738a268-1112-42a4-9935-b60b3066e452-utilities\") pod \"certified-operators-7956v\" (UID: \"5738a268-1112-42a4-9935-b60b3066e452\") " pod="openshift-marketplace/certified-operators-7956v" Oct 10 17:06:55 crc kubenswrapper[4799]: I1010 17:06:55.230946 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5738a268-1112-42a4-9935-b60b3066e452-catalog-content\") pod \"certified-operators-7956v\" (UID: \"5738a268-1112-42a4-9935-b60b3066e452\") " pod="openshift-marketplace/certified-operators-7956v" Oct 10 17:06:55 crc kubenswrapper[4799]: I1010 17:06:55.231728 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5738a268-1112-42a4-9935-b60b3066e452-catalog-content\") pod \"certified-operators-7956v\" (UID: \"5738a268-1112-42a4-9935-b60b3066e452\") " pod="openshift-marketplace/certified-operators-7956v" Oct 10 17:06:55 crc kubenswrapper[4799]: I1010 17:06:55.231739 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5738a268-1112-42a4-9935-b60b3066e452-utilities\") pod \"certified-operators-7956v\" (UID: \"5738a268-1112-42a4-9935-b60b3066e452\") " pod="openshift-marketplace/certified-operators-7956v" Oct 10 17:06:55 crc kubenswrapper[4799]: I1010 17:06:55.249675 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c7jlq\" (UniqueName: \"kubernetes.io/projected/5738a268-1112-42a4-9935-b60b3066e452-kube-api-access-c7jlq\") pod \"certified-operators-7956v\" (UID: \"5738a268-1112-42a4-9935-b60b3066e452\") " pod="openshift-marketplace/certified-operators-7956v" Oct 10 17:06:55 crc kubenswrapper[4799]: I1010 17:06:55.387312 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-7956v" Oct 10 17:06:55 crc kubenswrapper[4799]: I1010 17:06:55.886942 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-7956v"] Oct 10 17:06:55 crc kubenswrapper[4799]: W1010 17:06:55.890062 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5738a268_1112_42a4_9935_b60b3066e452.slice/crio-49895824c9ce5a9a168eab1fca0bbcd0bea53976baf404a02a84a7b28145ac71 WatchSource:0}: Error finding container 49895824c9ce5a9a168eab1fca0bbcd0bea53976baf404a02a84a7b28145ac71: Status 404 returned error can't find the container with id 49895824c9ce5a9a168eab1fca0bbcd0bea53976baf404a02a84a7b28145ac71 Oct 10 17:06:56 crc kubenswrapper[4799]: I1010 17:06:56.176971 4799 generic.go:334] "Generic (PLEG): container finished" podID="5738a268-1112-42a4-9935-b60b3066e452" containerID="b51908b29cfde38c7aa0e6d03e40006306dd840fd45075ad2e24408a0442ac6a" exitCode=0 Oct 10 17:06:56 crc kubenswrapper[4799]: I1010 17:06:56.177017 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-7956v" event={"ID":"5738a268-1112-42a4-9935-b60b3066e452","Type":"ContainerDied","Data":"b51908b29cfde38c7aa0e6d03e40006306dd840fd45075ad2e24408a0442ac6a"} Oct 10 17:06:56 crc kubenswrapper[4799]: I1010 17:06:56.177043 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-7956v" event={"ID":"5738a268-1112-42a4-9935-b60b3066e452","Type":"ContainerStarted","Data":"49895824c9ce5a9a168eab1fca0bbcd0bea53976baf404a02a84a7b28145ac71"} Oct 10 17:07:00 crc kubenswrapper[4799]: I1010 17:07:00.215710 4799 generic.go:334] "Generic (PLEG): container finished" podID="5738a268-1112-42a4-9935-b60b3066e452" containerID="34bf7480eca8fac208a2682e8a46d4c6a06550fa6e6b3c367e326b8ff006efce" exitCode=0 Oct 10 17:07:00 crc kubenswrapper[4799]: I1010 17:07:00.215816 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-7956v" event={"ID":"5738a268-1112-42a4-9935-b60b3066e452","Type":"ContainerDied","Data":"34bf7480eca8fac208a2682e8a46d4c6a06550fa6e6b3c367e326b8ff006efce"} Oct 10 17:07:01 crc kubenswrapper[4799]: I1010 17:07:01.233155 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-7956v" event={"ID":"5738a268-1112-42a4-9935-b60b3066e452","Type":"ContainerStarted","Data":"98932b31832494531a1c8cdda1aafb4f3ee641508d486324b29994e7c8b051fc"} Oct 10 17:07:01 crc kubenswrapper[4799]: I1010 17:07:01.274282 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-7956v" podStartSLOduration=1.735231135 podStartE2EDuration="6.274255528s" podCreationTimestamp="2025-10-10 17:06:55 +0000 UTC" firstStartedPulling="2025-10-10 17:06:56.178889167 +0000 UTC m=+2109.687213282" lastFinishedPulling="2025-10-10 17:07:00.71791356 +0000 UTC m=+2114.226237675" observedRunningTime="2025-10-10 17:07:01.259575098 +0000 UTC m=+2114.767899293" watchObservedRunningTime="2025-10-10 17:07:01.274255528 +0000 UTC m=+2114.782579683" Oct 10 17:07:05 crc kubenswrapper[4799]: I1010 17:07:05.388199 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-7956v" Oct 10 17:07:05 crc kubenswrapper[4799]: I1010 17:07:05.388615 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-7956v" Oct 10 17:07:05 crc kubenswrapper[4799]: I1010 17:07:05.466924 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-7956v" Oct 10 17:07:06 crc kubenswrapper[4799]: I1010 17:07:06.348084 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-7956v" Oct 10 17:07:06 crc kubenswrapper[4799]: I1010 17:07:06.444781 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-7956v"] Oct 10 17:07:06 crc kubenswrapper[4799]: I1010 17:07:06.506651 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-x5rwq"] Oct 10 17:07:06 crc kubenswrapper[4799]: I1010 17:07:06.506896 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-x5rwq" podUID="0f9d3340-b2a7-4571-a317-7f535f9b900a" containerName="registry-server" containerID="cri-o://1c42073bc770f58726975e9504d336e63c172d733341a53aee75558acd928d41" gracePeriod=2 Oct 10 17:07:06 crc kubenswrapper[4799]: I1010 17:07:06.920720 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-x5rwq" Oct 10 17:07:07 crc kubenswrapper[4799]: I1010 17:07:07.108910 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0f9d3340-b2a7-4571-a317-7f535f9b900a-catalog-content\") pod \"0f9d3340-b2a7-4571-a317-7f535f9b900a\" (UID: \"0f9d3340-b2a7-4571-a317-7f535f9b900a\") " Oct 10 17:07:07 crc kubenswrapper[4799]: I1010 17:07:07.108972 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0f9d3340-b2a7-4571-a317-7f535f9b900a-utilities\") pod \"0f9d3340-b2a7-4571-a317-7f535f9b900a\" (UID: \"0f9d3340-b2a7-4571-a317-7f535f9b900a\") " Oct 10 17:07:07 crc kubenswrapper[4799]: I1010 17:07:07.109020 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dx2gj\" (UniqueName: \"kubernetes.io/projected/0f9d3340-b2a7-4571-a317-7f535f9b900a-kube-api-access-dx2gj\") pod \"0f9d3340-b2a7-4571-a317-7f535f9b900a\" (UID: \"0f9d3340-b2a7-4571-a317-7f535f9b900a\") " Oct 10 17:07:07 crc kubenswrapper[4799]: I1010 17:07:07.110054 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0f9d3340-b2a7-4571-a317-7f535f9b900a-utilities" (OuterVolumeSpecName: "utilities") pod "0f9d3340-b2a7-4571-a317-7f535f9b900a" (UID: "0f9d3340-b2a7-4571-a317-7f535f9b900a"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 10 17:07:07 crc kubenswrapper[4799]: I1010 17:07:07.115910 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0f9d3340-b2a7-4571-a317-7f535f9b900a-kube-api-access-dx2gj" (OuterVolumeSpecName: "kube-api-access-dx2gj") pod "0f9d3340-b2a7-4571-a317-7f535f9b900a" (UID: "0f9d3340-b2a7-4571-a317-7f535f9b900a"). InnerVolumeSpecName "kube-api-access-dx2gj". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 17:07:07 crc kubenswrapper[4799]: I1010 17:07:07.151924 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0f9d3340-b2a7-4571-a317-7f535f9b900a-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "0f9d3340-b2a7-4571-a317-7f535f9b900a" (UID: "0f9d3340-b2a7-4571-a317-7f535f9b900a"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 10 17:07:07 crc kubenswrapper[4799]: I1010 17:07:07.210207 4799 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0f9d3340-b2a7-4571-a317-7f535f9b900a-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 10 17:07:07 crc kubenswrapper[4799]: I1010 17:07:07.210437 4799 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0f9d3340-b2a7-4571-a317-7f535f9b900a-utilities\") on node \"crc\" DevicePath \"\"" Oct 10 17:07:07 crc kubenswrapper[4799]: I1010 17:07:07.210519 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dx2gj\" (UniqueName: \"kubernetes.io/projected/0f9d3340-b2a7-4571-a317-7f535f9b900a-kube-api-access-dx2gj\") on node \"crc\" DevicePath \"\"" Oct 10 17:07:07 crc kubenswrapper[4799]: I1010 17:07:07.298318 4799 generic.go:334] "Generic (PLEG): container finished" podID="0f9d3340-b2a7-4571-a317-7f535f9b900a" containerID="1c42073bc770f58726975e9504d336e63c172d733341a53aee75558acd928d41" exitCode=0 Oct 10 17:07:07 crc kubenswrapper[4799]: I1010 17:07:07.298400 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-x5rwq" event={"ID":"0f9d3340-b2a7-4571-a317-7f535f9b900a","Type":"ContainerDied","Data":"1c42073bc770f58726975e9504d336e63c172d733341a53aee75558acd928d41"} Oct 10 17:07:07 crc kubenswrapper[4799]: I1010 17:07:07.298457 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-x5rwq" Oct 10 17:07:07 crc kubenswrapper[4799]: I1010 17:07:07.298478 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-x5rwq" event={"ID":"0f9d3340-b2a7-4571-a317-7f535f9b900a","Type":"ContainerDied","Data":"e5572e5f19a7967308d170313402efd27e647d650997a51f2006fd8abc9be112"} Oct 10 17:07:07 crc kubenswrapper[4799]: I1010 17:07:07.298522 4799 scope.go:117] "RemoveContainer" containerID="1c42073bc770f58726975e9504d336e63c172d733341a53aee75558acd928d41" Oct 10 17:07:07 crc kubenswrapper[4799]: I1010 17:07:07.332633 4799 scope.go:117] "RemoveContainer" containerID="c7599865add37e644f9b406cb6c3bd9ade63a0afa8d6ae73166eacd4a887d4c3" Oct 10 17:07:07 crc kubenswrapper[4799]: I1010 17:07:07.358773 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-x5rwq"] Oct 10 17:07:07 crc kubenswrapper[4799]: I1010 17:07:07.359890 4799 scope.go:117] "RemoveContainer" containerID="e83d38507cc835e2393d5f5f33f746924ebfa4676172630efac2079f5b8674eb" Oct 10 17:07:07 crc kubenswrapper[4799]: I1010 17:07:07.365606 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-x5rwq"] Oct 10 17:07:07 crc kubenswrapper[4799]: I1010 17:07:07.384498 4799 scope.go:117] "RemoveContainer" containerID="1c42073bc770f58726975e9504d336e63c172d733341a53aee75558acd928d41" Oct 10 17:07:07 crc kubenswrapper[4799]: E1010 17:07:07.389410 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1c42073bc770f58726975e9504d336e63c172d733341a53aee75558acd928d41\": container with ID starting with 1c42073bc770f58726975e9504d336e63c172d733341a53aee75558acd928d41 not found: ID does not exist" containerID="1c42073bc770f58726975e9504d336e63c172d733341a53aee75558acd928d41" Oct 10 17:07:07 crc kubenswrapper[4799]: I1010 17:07:07.389458 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1c42073bc770f58726975e9504d336e63c172d733341a53aee75558acd928d41"} err="failed to get container status \"1c42073bc770f58726975e9504d336e63c172d733341a53aee75558acd928d41\": rpc error: code = NotFound desc = could not find container \"1c42073bc770f58726975e9504d336e63c172d733341a53aee75558acd928d41\": container with ID starting with 1c42073bc770f58726975e9504d336e63c172d733341a53aee75558acd928d41 not found: ID does not exist" Oct 10 17:07:07 crc kubenswrapper[4799]: I1010 17:07:07.389502 4799 scope.go:117] "RemoveContainer" containerID="c7599865add37e644f9b406cb6c3bd9ade63a0afa8d6ae73166eacd4a887d4c3" Oct 10 17:07:07 crc kubenswrapper[4799]: E1010 17:07:07.391198 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c7599865add37e644f9b406cb6c3bd9ade63a0afa8d6ae73166eacd4a887d4c3\": container with ID starting with c7599865add37e644f9b406cb6c3bd9ade63a0afa8d6ae73166eacd4a887d4c3 not found: ID does not exist" containerID="c7599865add37e644f9b406cb6c3bd9ade63a0afa8d6ae73166eacd4a887d4c3" Oct 10 17:07:07 crc kubenswrapper[4799]: I1010 17:07:07.391243 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c7599865add37e644f9b406cb6c3bd9ade63a0afa8d6ae73166eacd4a887d4c3"} err="failed to get container status \"c7599865add37e644f9b406cb6c3bd9ade63a0afa8d6ae73166eacd4a887d4c3\": rpc error: code = NotFound desc = could not find container \"c7599865add37e644f9b406cb6c3bd9ade63a0afa8d6ae73166eacd4a887d4c3\": container with ID starting with c7599865add37e644f9b406cb6c3bd9ade63a0afa8d6ae73166eacd4a887d4c3 not found: ID does not exist" Oct 10 17:07:07 crc kubenswrapper[4799]: I1010 17:07:07.391271 4799 scope.go:117] "RemoveContainer" containerID="e83d38507cc835e2393d5f5f33f746924ebfa4676172630efac2079f5b8674eb" Oct 10 17:07:07 crc kubenswrapper[4799]: E1010 17:07:07.391633 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e83d38507cc835e2393d5f5f33f746924ebfa4676172630efac2079f5b8674eb\": container with ID starting with e83d38507cc835e2393d5f5f33f746924ebfa4676172630efac2079f5b8674eb not found: ID does not exist" containerID="e83d38507cc835e2393d5f5f33f746924ebfa4676172630efac2079f5b8674eb" Oct 10 17:07:07 crc kubenswrapper[4799]: I1010 17:07:07.391660 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e83d38507cc835e2393d5f5f33f746924ebfa4676172630efac2079f5b8674eb"} err="failed to get container status \"e83d38507cc835e2393d5f5f33f746924ebfa4676172630efac2079f5b8674eb\": rpc error: code = NotFound desc = could not find container \"e83d38507cc835e2393d5f5f33f746924ebfa4676172630efac2079f5b8674eb\": container with ID starting with e83d38507cc835e2393d5f5f33f746924ebfa4676172630efac2079f5b8674eb not found: ID does not exist" Oct 10 17:07:07 crc kubenswrapper[4799]: I1010 17:07:07.414345 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0f9d3340-b2a7-4571-a317-7f535f9b900a" path="/var/lib/kubelet/pods/0f9d3340-b2a7-4571-a317-7f535f9b900a/volumes" Oct 10 17:07:07 crc kubenswrapper[4799]: E1010 17:07:07.446413 4799 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod0f9d3340_b2a7_4571_a317_7f535f9b900a.slice\": RecentStats: unable to find data in memory cache]" Oct 10 17:07:15 crc kubenswrapper[4799]: I1010 17:07:15.249444 4799 patch_prober.go:28] interesting pod/machine-config-daemon-rh8zc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 10 17:07:15 crc kubenswrapper[4799]: I1010 17:07:15.250136 4799 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 10 17:07:45 crc kubenswrapper[4799]: I1010 17:07:45.249009 4799 patch_prober.go:28] interesting pod/machine-config-daemon-rh8zc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 10 17:07:45 crc kubenswrapper[4799]: I1010 17:07:45.249718 4799 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 10 17:08:15 crc kubenswrapper[4799]: I1010 17:08:15.248984 4799 patch_prober.go:28] interesting pod/machine-config-daemon-rh8zc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 10 17:08:15 crc kubenswrapper[4799]: I1010 17:08:15.249876 4799 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 10 17:08:15 crc kubenswrapper[4799]: I1010 17:08:15.249941 4799 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" Oct 10 17:08:15 crc kubenswrapper[4799]: I1010 17:08:15.250732 4799 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"3ee83a672ba2f7803e7c4f7cecdddbaee1838a288065f4f6358434432017c356"} pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 10 17:08:15 crc kubenswrapper[4799]: I1010 17:08:15.250859 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" containerName="machine-config-daemon" containerID="cri-o://3ee83a672ba2f7803e7c4f7cecdddbaee1838a288065f4f6358434432017c356" gracePeriod=600 Oct 10 17:08:15 crc kubenswrapper[4799]: E1010 17:08:15.391659 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 17:08:15 crc kubenswrapper[4799]: I1010 17:08:15.980073 4799 generic.go:334] "Generic (PLEG): container finished" podID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" containerID="3ee83a672ba2f7803e7c4f7cecdddbaee1838a288065f4f6358434432017c356" exitCode=0 Oct 10 17:08:15 crc kubenswrapper[4799]: I1010 17:08:15.980118 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" event={"ID":"6cebefda-e31d-4be2-9bf4-8e1f8ec002cb","Type":"ContainerDied","Data":"3ee83a672ba2f7803e7c4f7cecdddbaee1838a288065f4f6358434432017c356"} Oct 10 17:08:15 crc kubenswrapper[4799]: I1010 17:08:15.981016 4799 scope.go:117] "RemoveContainer" containerID="4dd1886b500c1dcdf7222359795c4b97be8fff09a255efaef533259216eb6900" Oct 10 17:08:15 crc kubenswrapper[4799]: I1010 17:08:15.981444 4799 scope.go:117] "RemoveContainer" containerID="3ee83a672ba2f7803e7c4f7cecdddbaee1838a288065f4f6358434432017c356" Oct 10 17:08:15 crc kubenswrapper[4799]: E1010 17:08:15.981715 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 17:08:28 crc kubenswrapper[4799]: I1010 17:08:28.403587 4799 scope.go:117] "RemoveContainer" containerID="3ee83a672ba2f7803e7c4f7cecdddbaee1838a288065f4f6358434432017c356" Oct 10 17:08:28 crc kubenswrapper[4799]: E1010 17:08:28.404662 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 17:08:41 crc kubenswrapper[4799]: I1010 17:08:41.412038 4799 scope.go:117] "RemoveContainer" containerID="3ee83a672ba2f7803e7c4f7cecdddbaee1838a288065f4f6358434432017c356" Oct 10 17:08:41 crc kubenswrapper[4799]: E1010 17:08:41.413229 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 17:08:53 crc kubenswrapper[4799]: I1010 17:08:53.403520 4799 scope.go:117] "RemoveContainer" containerID="3ee83a672ba2f7803e7c4f7cecdddbaee1838a288065f4f6358434432017c356" Oct 10 17:08:53 crc kubenswrapper[4799]: E1010 17:08:53.405243 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 17:09:07 crc kubenswrapper[4799]: I1010 17:09:07.412969 4799 scope.go:117] "RemoveContainer" containerID="3ee83a672ba2f7803e7c4f7cecdddbaee1838a288065f4f6358434432017c356" Oct 10 17:09:07 crc kubenswrapper[4799]: E1010 17:09:07.414818 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 17:09:20 crc kubenswrapper[4799]: I1010 17:09:20.402671 4799 scope.go:117] "RemoveContainer" containerID="3ee83a672ba2f7803e7c4f7cecdddbaee1838a288065f4f6358434432017c356" Oct 10 17:09:20 crc kubenswrapper[4799]: E1010 17:09:20.403864 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 17:09:33 crc kubenswrapper[4799]: I1010 17:09:33.402569 4799 scope.go:117] "RemoveContainer" containerID="3ee83a672ba2f7803e7c4f7cecdddbaee1838a288065f4f6358434432017c356" Oct 10 17:09:33 crc kubenswrapper[4799]: E1010 17:09:33.403363 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 17:09:46 crc kubenswrapper[4799]: I1010 17:09:46.402847 4799 scope.go:117] "RemoveContainer" containerID="3ee83a672ba2f7803e7c4f7cecdddbaee1838a288065f4f6358434432017c356" Oct 10 17:09:46 crc kubenswrapper[4799]: E1010 17:09:46.403576 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 17:10:01 crc kubenswrapper[4799]: I1010 17:10:01.402667 4799 scope.go:117] "RemoveContainer" containerID="3ee83a672ba2f7803e7c4f7cecdddbaee1838a288065f4f6358434432017c356" Oct 10 17:10:01 crc kubenswrapper[4799]: E1010 17:10:01.405401 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 17:10:15 crc kubenswrapper[4799]: I1010 17:10:15.403362 4799 scope.go:117] "RemoveContainer" containerID="3ee83a672ba2f7803e7c4f7cecdddbaee1838a288065f4f6358434432017c356" Oct 10 17:10:15 crc kubenswrapper[4799]: E1010 17:10:15.404535 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 17:10:30 crc kubenswrapper[4799]: I1010 17:10:30.402717 4799 scope.go:117] "RemoveContainer" containerID="3ee83a672ba2f7803e7c4f7cecdddbaee1838a288065f4f6358434432017c356" Oct 10 17:10:30 crc kubenswrapper[4799]: E1010 17:10:30.403697 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 17:10:44 crc kubenswrapper[4799]: I1010 17:10:44.402689 4799 scope.go:117] "RemoveContainer" containerID="3ee83a672ba2f7803e7c4f7cecdddbaee1838a288065f4f6358434432017c356" Oct 10 17:10:44 crc kubenswrapper[4799]: E1010 17:10:44.403651 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 17:10:59 crc kubenswrapper[4799]: I1010 17:10:59.403411 4799 scope.go:117] "RemoveContainer" containerID="3ee83a672ba2f7803e7c4f7cecdddbaee1838a288065f4f6358434432017c356" Oct 10 17:10:59 crc kubenswrapper[4799]: E1010 17:10:59.404907 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 17:11:13 crc kubenswrapper[4799]: I1010 17:11:13.403824 4799 scope.go:117] "RemoveContainer" containerID="3ee83a672ba2f7803e7c4f7cecdddbaee1838a288065f4f6358434432017c356" Oct 10 17:11:13 crc kubenswrapper[4799]: E1010 17:11:13.404727 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 17:11:26 crc kubenswrapper[4799]: I1010 17:11:26.402461 4799 scope.go:117] "RemoveContainer" containerID="3ee83a672ba2f7803e7c4f7cecdddbaee1838a288065f4f6358434432017c356" Oct 10 17:11:26 crc kubenswrapper[4799]: E1010 17:11:26.403365 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 17:11:39 crc kubenswrapper[4799]: I1010 17:11:39.801684 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-vnfwr"] Oct 10 17:11:39 crc kubenswrapper[4799]: E1010 17:11:39.802782 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0f9d3340-b2a7-4571-a317-7f535f9b900a" containerName="registry-server" Oct 10 17:11:39 crc kubenswrapper[4799]: I1010 17:11:39.802806 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="0f9d3340-b2a7-4571-a317-7f535f9b900a" containerName="registry-server" Oct 10 17:11:39 crc kubenswrapper[4799]: E1010 17:11:39.802843 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0f9d3340-b2a7-4571-a317-7f535f9b900a" containerName="extract-content" Oct 10 17:11:39 crc kubenswrapper[4799]: I1010 17:11:39.802855 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="0f9d3340-b2a7-4571-a317-7f535f9b900a" containerName="extract-content" Oct 10 17:11:39 crc kubenswrapper[4799]: E1010 17:11:39.802891 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0f9d3340-b2a7-4571-a317-7f535f9b900a" containerName="extract-utilities" Oct 10 17:11:39 crc kubenswrapper[4799]: I1010 17:11:39.802903 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="0f9d3340-b2a7-4571-a317-7f535f9b900a" containerName="extract-utilities" Oct 10 17:11:39 crc kubenswrapper[4799]: I1010 17:11:39.803127 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="0f9d3340-b2a7-4571-a317-7f535f9b900a" containerName="registry-server" Oct 10 17:11:39 crc kubenswrapper[4799]: I1010 17:11:39.804644 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-vnfwr" Oct 10 17:11:39 crc kubenswrapper[4799]: I1010 17:11:39.823573 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-vnfwr"] Oct 10 17:11:39 crc kubenswrapper[4799]: I1010 17:11:39.903121 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0e57ee11-d566-4fcc-bd5e-aa8e47b1322f-utilities\") pod \"redhat-marketplace-vnfwr\" (UID: \"0e57ee11-d566-4fcc-bd5e-aa8e47b1322f\") " pod="openshift-marketplace/redhat-marketplace-vnfwr" Oct 10 17:11:39 crc kubenswrapper[4799]: I1010 17:11:39.903207 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0e57ee11-d566-4fcc-bd5e-aa8e47b1322f-catalog-content\") pod \"redhat-marketplace-vnfwr\" (UID: \"0e57ee11-d566-4fcc-bd5e-aa8e47b1322f\") " pod="openshift-marketplace/redhat-marketplace-vnfwr" Oct 10 17:11:39 crc kubenswrapper[4799]: I1010 17:11:39.903249 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k8x7k\" (UniqueName: \"kubernetes.io/projected/0e57ee11-d566-4fcc-bd5e-aa8e47b1322f-kube-api-access-k8x7k\") pod \"redhat-marketplace-vnfwr\" (UID: \"0e57ee11-d566-4fcc-bd5e-aa8e47b1322f\") " pod="openshift-marketplace/redhat-marketplace-vnfwr" Oct 10 17:11:40 crc kubenswrapper[4799]: I1010 17:11:40.004112 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k8x7k\" (UniqueName: \"kubernetes.io/projected/0e57ee11-d566-4fcc-bd5e-aa8e47b1322f-kube-api-access-k8x7k\") pod \"redhat-marketplace-vnfwr\" (UID: \"0e57ee11-d566-4fcc-bd5e-aa8e47b1322f\") " pod="openshift-marketplace/redhat-marketplace-vnfwr" Oct 10 17:11:40 crc kubenswrapper[4799]: I1010 17:11:40.004464 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0e57ee11-d566-4fcc-bd5e-aa8e47b1322f-utilities\") pod \"redhat-marketplace-vnfwr\" (UID: \"0e57ee11-d566-4fcc-bd5e-aa8e47b1322f\") " pod="openshift-marketplace/redhat-marketplace-vnfwr" Oct 10 17:11:40 crc kubenswrapper[4799]: I1010 17:11:40.004613 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0e57ee11-d566-4fcc-bd5e-aa8e47b1322f-catalog-content\") pod \"redhat-marketplace-vnfwr\" (UID: \"0e57ee11-d566-4fcc-bd5e-aa8e47b1322f\") " pod="openshift-marketplace/redhat-marketplace-vnfwr" Oct 10 17:11:40 crc kubenswrapper[4799]: I1010 17:11:40.005339 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0e57ee11-d566-4fcc-bd5e-aa8e47b1322f-catalog-content\") pod \"redhat-marketplace-vnfwr\" (UID: \"0e57ee11-d566-4fcc-bd5e-aa8e47b1322f\") " pod="openshift-marketplace/redhat-marketplace-vnfwr" Oct 10 17:11:40 crc kubenswrapper[4799]: I1010 17:11:40.005333 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0e57ee11-d566-4fcc-bd5e-aa8e47b1322f-utilities\") pod \"redhat-marketplace-vnfwr\" (UID: \"0e57ee11-d566-4fcc-bd5e-aa8e47b1322f\") " pod="openshift-marketplace/redhat-marketplace-vnfwr" Oct 10 17:11:40 crc kubenswrapper[4799]: I1010 17:11:40.038631 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k8x7k\" (UniqueName: \"kubernetes.io/projected/0e57ee11-d566-4fcc-bd5e-aa8e47b1322f-kube-api-access-k8x7k\") pod \"redhat-marketplace-vnfwr\" (UID: \"0e57ee11-d566-4fcc-bd5e-aa8e47b1322f\") " pod="openshift-marketplace/redhat-marketplace-vnfwr" Oct 10 17:11:40 crc kubenswrapper[4799]: I1010 17:11:40.132644 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-vnfwr" Oct 10 17:11:40 crc kubenswrapper[4799]: I1010 17:11:40.408812 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-vnfwr"] Oct 10 17:11:40 crc kubenswrapper[4799]: I1010 17:11:40.936816 4799 generic.go:334] "Generic (PLEG): container finished" podID="0e57ee11-d566-4fcc-bd5e-aa8e47b1322f" containerID="29aa827f7c87872b4434aa3690a73514aab6797f1598578cf011bb7550694f37" exitCode=0 Oct 10 17:11:40 crc kubenswrapper[4799]: I1010 17:11:40.936920 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vnfwr" event={"ID":"0e57ee11-d566-4fcc-bd5e-aa8e47b1322f","Type":"ContainerDied","Data":"29aa827f7c87872b4434aa3690a73514aab6797f1598578cf011bb7550694f37"} Oct 10 17:11:40 crc kubenswrapper[4799]: I1010 17:11:40.938981 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vnfwr" event={"ID":"0e57ee11-d566-4fcc-bd5e-aa8e47b1322f","Type":"ContainerStarted","Data":"60b17d6c74f8575389887f00f9fd926c9515c2450b56a87f81a8e7e002cf0a59"} Oct 10 17:11:40 crc kubenswrapper[4799]: I1010 17:11:40.939959 4799 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 10 17:11:41 crc kubenswrapper[4799]: I1010 17:11:41.401859 4799 scope.go:117] "RemoveContainer" containerID="3ee83a672ba2f7803e7c4f7cecdddbaee1838a288065f4f6358434432017c356" Oct 10 17:11:41 crc kubenswrapper[4799]: E1010 17:11:41.402160 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 17:11:41 crc kubenswrapper[4799]: I1010 17:11:41.950667 4799 generic.go:334] "Generic (PLEG): container finished" podID="0e57ee11-d566-4fcc-bd5e-aa8e47b1322f" containerID="f8f70f7b474a3c825eb5c12975ec0ecb8e6a6aa661893adacc5564d4dc02c4fa" exitCode=0 Oct 10 17:11:41 crc kubenswrapper[4799]: I1010 17:11:41.950730 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vnfwr" event={"ID":"0e57ee11-d566-4fcc-bd5e-aa8e47b1322f","Type":"ContainerDied","Data":"f8f70f7b474a3c825eb5c12975ec0ecb8e6a6aa661893adacc5564d4dc02c4fa"} Oct 10 17:11:42 crc kubenswrapper[4799]: I1010 17:11:42.960123 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vnfwr" event={"ID":"0e57ee11-d566-4fcc-bd5e-aa8e47b1322f","Type":"ContainerStarted","Data":"34cda3fd76deccfdcb565c6eed32efb4c579fb1006333c82629cd3d056f1736c"} Oct 10 17:11:42 crc kubenswrapper[4799]: I1010 17:11:42.987729 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-vnfwr" podStartSLOduration=2.256531892 podStartE2EDuration="3.987709325s" podCreationTimestamp="2025-10-10 17:11:39 +0000 UTC" firstStartedPulling="2025-10-10 17:11:40.939455303 +0000 UTC m=+2394.447779458" lastFinishedPulling="2025-10-10 17:11:42.670632746 +0000 UTC m=+2396.178956891" observedRunningTime="2025-10-10 17:11:42.986070126 +0000 UTC m=+2396.494394251" watchObservedRunningTime="2025-10-10 17:11:42.987709325 +0000 UTC m=+2396.496033450" Oct 10 17:11:50 crc kubenswrapper[4799]: I1010 17:11:50.133226 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-vnfwr" Oct 10 17:11:50 crc kubenswrapper[4799]: I1010 17:11:50.134048 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-vnfwr" Oct 10 17:11:50 crc kubenswrapper[4799]: I1010 17:11:50.210984 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-vnfwr" Oct 10 17:11:51 crc kubenswrapper[4799]: I1010 17:11:51.113316 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-vnfwr" Oct 10 17:11:51 crc kubenswrapper[4799]: I1010 17:11:51.190150 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-vnfwr"] Oct 10 17:11:52 crc kubenswrapper[4799]: I1010 17:11:52.403245 4799 scope.go:117] "RemoveContainer" containerID="3ee83a672ba2f7803e7c4f7cecdddbaee1838a288065f4f6358434432017c356" Oct 10 17:11:52 crc kubenswrapper[4799]: E1010 17:11:52.403624 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 17:11:53 crc kubenswrapper[4799]: I1010 17:11:53.057117 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-vnfwr" podUID="0e57ee11-d566-4fcc-bd5e-aa8e47b1322f" containerName="registry-server" containerID="cri-o://34cda3fd76deccfdcb565c6eed32efb4c579fb1006333c82629cd3d056f1736c" gracePeriod=2 Oct 10 17:11:53 crc kubenswrapper[4799]: I1010 17:11:53.461823 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-vnfwr" Oct 10 17:11:53 crc kubenswrapper[4799]: I1010 17:11:53.635092 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0e57ee11-d566-4fcc-bd5e-aa8e47b1322f-utilities\") pod \"0e57ee11-d566-4fcc-bd5e-aa8e47b1322f\" (UID: \"0e57ee11-d566-4fcc-bd5e-aa8e47b1322f\") " Oct 10 17:11:53 crc kubenswrapper[4799]: I1010 17:11:53.635212 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-k8x7k\" (UniqueName: \"kubernetes.io/projected/0e57ee11-d566-4fcc-bd5e-aa8e47b1322f-kube-api-access-k8x7k\") pod \"0e57ee11-d566-4fcc-bd5e-aa8e47b1322f\" (UID: \"0e57ee11-d566-4fcc-bd5e-aa8e47b1322f\") " Oct 10 17:11:53 crc kubenswrapper[4799]: I1010 17:11:53.635265 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0e57ee11-d566-4fcc-bd5e-aa8e47b1322f-catalog-content\") pod \"0e57ee11-d566-4fcc-bd5e-aa8e47b1322f\" (UID: \"0e57ee11-d566-4fcc-bd5e-aa8e47b1322f\") " Oct 10 17:11:53 crc kubenswrapper[4799]: I1010 17:11:53.636729 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0e57ee11-d566-4fcc-bd5e-aa8e47b1322f-utilities" (OuterVolumeSpecName: "utilities") pod "0e57ee11-d566-4fcc-bd5e-aa8e47b1322f" (UID: "0e57ee11-d566-4fcc-bd5e-aa8e47b1322f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 10 17:11:53 crc kubenswrapper[4799]: I1010 17:11:53.648842 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0e57ee11-d566-4fcc-bd5e-aa8e47b1322f-kube-api-access-k8x7k" (OuterVolumeSpecName: "kube-api-access-k8x7k") pod "0e57ee11-d566-4fcc-bd5e-aa8e47b1322f" (UID: "0e57ee11-d566-4fcc-bd5e-aa8e47b1322f"). InnerVolumeSpecName "kube-api-access-k8x7k". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 17:11:53 crc kubenswrapper[4799]: I1010 17:11:53.656847 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0e57ee11-d566-4fcc-bd5e-aa8e47b1322f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "0e57ee11-d566-4fcc-bd5e-aa8e47b1322f" (UID: "0e57ee11-d566-4fcc-bd5e-aa8e47b1322f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 10 17:11:53 crc kubenswrapper[4799]: I1010 17:11:53.737412 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-k8x7k\" (UniqueName: \"kubernetes.io/projected/0e57ee11-d566-4fcc-bd5e-aa8e47b1322f-kube-api-access-k8x7k\") on node \"crc\" DevicePath \"\"" Oct 10 17:11:53 crc kubenswrapper[4799]: I1010 17:11:53.737491 4799 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0e57ee11-d566-4fcc-bd5e-aa8e47b1322f-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 10 17:11:53 crc kubenswrapper[4799]: I1010 17:11:53.737514 4799 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0e57ee11-d566-4fcc-bd5e-aa8e47b1322f-utilities\") on node \"crc\" DevicePath \"\"" Oct 10 17:11:54 crc kubenswrapper[4799]: I1010 17:11:54.071428 4799 generic.go:334] "Generic (PLEG): container finished" podID="0e57ee11-d566-4fcc-bd5e-aa8e47b1322f" containerID="34cda3fd76deccfdcb565c6eed32efb4c579fb1006333c82629cd3d056f1736c" exitCode=0 Oct 10 17:11:54 crc kubenswrapper[4799]: I1010 17:11:54.071494 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vnfwr" event={"ID":"0e57ee11-d566-4fcc-bd5e-aa8e47b1322f","Type":"ContainerDied","Data":"34cda3fd76deccfdcb565c6eed32efb4c579fb1006333c82629cd3d056f1736c"} Oct 10 17:11:54 crc kubenswrapper[4799]: I1010 17:11:54.071848 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vnfwr" event={"ID":"0e57ee11-d566-4fcc-bd5e-aa8e47b1322f","Type":"ContainerDied","Data":"60b17d6c74f8575389887f00f9fd926c9515c2450b56a87f81a8e7e002cf0a59"} Oct 10 17:11:54 crc kubenswrapper[4799]: I1010 17:11:54.071896 4799 scope.go:117] "RemoveContainer" containerID="34cda3fd76deccfdcb565c6eed32efb4c579fb1006333c82629cd3d056f1736c" Oct 10 17:11:54 crc kubenswrapper[4799]: I1010 17:11:54.071541 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-vnfwr" Oct 10 17:11:54 crc kubenswrapper[4799]: I1010 17:11:54.118408 4799 scope.go:117] "RemoveContainer" containerID="f8f70f7b474a3c825eb5c12975ec0ecb8e6a6aa661893adacc5564d4dc02c4fa" Oct 10 17:11:54 crc kubenswrapper[4799]: I1010 17:11:54.144912 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-vnfwr"] Oct 10 17:11:54 crc kubenswrapper[4799]: I1010 17:11:54.157167 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-vnfwr"] Oct 10 17:11:54 crc kubenswrapper[4799]: I1010 17:11:54.159545 4799 scope.go:117] "RemoveContainer" containerID="29aa827f7c87872b4434aa3690a73514aab6797f1598578cf011bb7550694f37" Oct 10 17:11:54 crc kubenswrapper[4799]: I1010 17:11:54.214915 4799 scope.go:117] "RemoveContainer" containerID="34cda3fd76deccfdcb565c6eed32efb4c579fb1006333c82629cd3d056f1736c" Oct 10 17:11:54 crc kubenswrapper[4799]: E1010 17:11:54.215540 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"34cda3fd76deccfdcb565c6eed32efb4c579fb1006333c82629cd3d056f1736c\": container with ID starting with 34cda3fd76deccfdcb565c6eed32efb4c579fb1006333c82629cd3d056f1736c not found: ID does not exist" containerID="34cda3fd76deccfdcb565c6eed32efb4c579fb1006333c82629cd3d056f1736c" Oct 10 17:11:54 crc kubenswrapper[4799]: I1010 17:11:54.215597 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"34cda3fd76deccfdcb565c6eed32efb4c579fb1006333c82629cd3d056f1736c"} err="failed to get container status \"34cda3fd76deccfdcb565c6eed32efb4c579fb1006333c82629cd3d056f1736c\": rpc error: code = NotFound desc = could not find container \"34cda3fd76deccfdcb565c6eed32efb4c579fb1006333c82629cd3d056f1736c\": container with ID starting with 34cda3fd76deccfdcb565c6eed32efb4c579fb1006333c82629cd3d056f1736c not found: ID does not exist" Oct 10 17:11:54 crc kubenswrapper[4799]: I1010 17:11:54.215629 4799 scope.go:117] "RemoveContainer" containerID="f8f70f7b474a3c825eb5c12975ec0ecb8e6a6aa661893adacc5564d4dc02c4fa" Oct 10 17:11:54 crc kubenswrapper[4799]: E1010 17:11:54.216021 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f8f70f7b474a3c825eb5c12975ec0ecb8e6a6aa661893adacc5564d4dc02c4fa\": container with ID starting with f8f70f7b474a3c825eb5c12975ec0ecb8e6a6aa661893adacc5564d4dc02c4fa not found: ID does not exist" containerID="f8f70f7b474a3c825eb5c12975ec0ecb8e6a6aa661893adacc5564d4dc02c4fa" Oct 10 17:11:54 crc kubenswrapper[4799]: I1010 17:11:54.216153 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f8f70f7b474a3c825eb5c12975ec0ecb8e6a6aa661893adacc5564d4dc02c4fa"} err="failed to get container status \"f8f70f7b474a3c825eb5c12975ec0ecb8e6a6aa661893adacc5564d4dc02c4fa\": rpc error: code = NotFound desc = could not find container \"f8f70f7b474a3c825eb5c12975ec0ecb8e6a6aa661893adacc5564d4dc02c4fa\": container with ID starting with f8f70f7b474a3c825eb5c12975ec0ecb8e6a6aa661893adacc5564d4dc02c4fa not found: ID does not exist" Oct 10 17:11:54 crc kubenswrapper[4799]: I1010 17:11:54.216332 4799 scope.go:117] "RemoveContainer" containerID="29aa827f7c87872b4434aa3690a73514aab6797f1598578cf011bb7550694f37" Oct 10 17:11:54 crc kubenswrapper[4799]: E1010 17:11:54.217089 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"29aa827f7c87872b4434aa3690a73514aab6797f1598578cf011bb7550694f37\": container with ID starting with 29aa827f7c87872b4434aa3690a73514aab6797f1598578cf011bb7550694f37 not found: ID does not exist" containerID="29aa827f7c87872b4434aa3690a73514aab6797f1598578cf011bb7550694f37" Oct 10 17:11:54 crc kubenswrapper[4799]: I1010 17:11:54.217212 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"29aa827f7c87872b4434aa3690a73514aab6797f1598578cf011bb7550694f37"} err="failed to get container status \"29aa827f7c87872b4434aa3690a73514aab6797f1598578cf011bb7550694f37\": rpc error: code = NotFound desc = could not find container \"29aa827f7c87872b4434aa3690a73514aab6797f1598578cf011bb7550694f37\": container with ID starting with 29aa827f7c87872b4434aa3690a73514aab6797f1598578cf011bb7550694f37 not found: ID does not exist" Oct 10 17:11:55 crc kubenswrapper[4799]: I1010 17:11:55.417627 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0e57ee11-d566-4fcc-bd5e-aa8e47b1322f" path="/var/lib/kubelet/pods/0e57ee11-d566-4fcc-bd5e-aa8e47b1322f/volumes" Oct 10 17:12:06 crc kubenswrapper[4799]: I1010 17:12:06.403054 4799 scope.go:117] "RemoveContainer" containerID="3ee83a672ba2f7803e7c4f7cecdddbaee1838a288065f4f6358434432017c356" Oct 10 17:12:06 crc kubenswrapper[4799]: E1010 17:12:06.403828 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 17:12:17 crc kubenswrapper[4799]: I1010 17:12:17.407213 4799 scope.go:117] "RemoveContainer" containerID="3ee83a672ba2f7803e7c4f7cecdddbaee1838a288065f4f6358434432017c356" Oct 10 17:12:17 crc kubenswrapper[4799]: E1010 17:12:17.408357 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 17:12:31 crc kubenswrapper[4799]: I1010 17:12:31.402421 4799 scope.go:117] "RemoveContainer" containerID="3ee83a672ba2f7803e7c4f7cecdddbaee1838a288065f4f6358434432017c356" Oct 10 17:12:31 crc kubenswrapper[4799]: E1010 17:12:31.403846 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 17:12:44 crc kubenswrapper[4799]: I1010 17:12:44.403050 4799 scope.go:117] "RemoveContainer" containerID="3ee83a672ba2f7803e7c4f7cecdddbaee1838a288065f4f6358434432017c356" Oct 10 17:12:44 crc kubenswrapper[4799]: E1010 17:12:44.404001 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 17:12:56 crc kubenswrapper[4799]: I1010 17:12:56.403199 4799 scope.go:117] "RemoveContainer" containerID="3ee83a672ba2f7803e7c4f7cecdddbaee1838a288065f4f6358434432017c356" Oct 10 17:12:56 crc kubenswrapper[4799]: E1010 17:12:56.404325 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 17:13:07 crc kubenswrapper[4799]: I1010 17:13:07.402302 4799 scope.go:117] "RemoveContainer" containerID="3ee83a672ba2f7803e7c4f7cecdddbaee1838a288065f4f6358434432017c356" Oct 10 17:13:07 crc kubenswrapper[4799]: E1010 17:13:07.405322 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 17:13:22 crc kubenswrapper[4799]: I1010 17:13:22.403226 4799 scope.go:117] "RemoveContainer" containerID="3ee83a672ba2f7803e7c4f7cecdddbaee1838a288065f4f6358434432017c356" Oct 10 17:13:22 crc kubenswrapper[4799]: I1010 17:13:22.919160 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" event={"ID":"6cebefda-e31d-4be2-9bf4-8e1f8ec002cb","Type":"ContainerStarted","Data":"42045cbeab3b7f6682f926dc6bfd21910965b5211bc6575c74a92e96cc7dc346"} Oct 10 17:15:00 crc kubenswrapper[4799]: I1010 17:15:00.149583 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29335275-rm27g"] Oct 10 17:15:00 crc kubenswrapper[4799]: E1010 17:15:00.150499 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0e57ee11-d566-4fcc-bd5e-aa8e47b1322f" containerName="extract-content" Oct 10 17:15:00 crc kubenswrapper[4799]: I1010 17:15:00.150515 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="0e57ee11-d566-4fcc-bd5e-aa8e47b1322f" containerName="extract-content" Oct 10 17:15:00 crc kubenswrapper[4799]: E1010 17:15:00.150553 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0e57ee11-d566-4fcc-bd5e-aa8e47b1322f" containerName="registry-server" Oct 10 17:15:00 crc kubenswrapper[4799]: I1010 17:15:00.150562 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="0e57ee11-d566-4fcc-bd5e-aa8e47b1322f" containerName="registry-server" Oct 10 17:15:00 crc kubenswrapper[4799]: E1010 17:15:00.150573 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0e57ee11-d566-4fcc-bd5e-aa8e47b1322f" containerName="extract-utilities" Oct 10 17:15:00 crc kubenswrapper[4799]: I1010 17:15:00.150581 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="0e57ee11-d566-4fcc-bd5e-aa8e47b1322f" containerName="extract-utilities" Oct 10 17:15:00 crc kubenswrapper[4799]: I1010 17:15:00.150741 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="0e57ee11-d566-4fcc-bd5e-aa8e47b1322f" containerName="registry-server" Oct 10 17:15:00 crc kubenswrapper[4799]: I1010 17:15:00.151348 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29335275-rm27g" Oct 10 17:15:00 crc kubenswrapper[4799]: I1010 17:15:00.154927 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Oct 10 17:15:00 crc kubenswrapper[4799]: I1010 17:15:00.164334 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Oct 10 17:15:00 crc kubenswrapper[4799]: I1010 17:15:00.165008 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29335275-rm27g"] Oct 10 17:15:00 crc kubenswrapper[4799]: I1010 17:15:00.312301 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/a14b809d-d163-4d78-9062-534b9025aa9b-config-volume\") pod \"collect-profiles-29335275-rm27g\" (UID: \"a14b809d-d163-4d78-9062-534b9025aa9b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29335275-rm27g" Oct 10 17:15:00 crc kubenswrapper[4799]: I1010 17:15:00.312473 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/a14b809d-d163-4d78-9062-534b9025aa9b-secret-volume\") pod \"collect-profiles-29335275-rm27g\" (UID: \"a14b809d-d163-4d78-9062-534b9025aa9b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29335275-rm27g" Oct 10 17:15:00 crc kubenswrapper[4799]: I1010 17:15:00.312729 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bqj4c\" (UniqueName: \"kubernetes.io/projected/a14b809d-d163-4d78-9062-534b9025aa9b-kube-api-access-bqj4c\") pod \"collect-profiles-29335275-rm27g\" (UID: \"a14b809d-d163-4d78-9062-534b9025aa9b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29335275-rm27g" Oct 10 17:15:00 crc kubenswrapper[4799]: I1010 17:15:00.413858 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bqj4c\" (UniqueName: \"kubernetes.io/projected/a14b809d-d163-4d78-9062-534b9025aa9b-kube-api-access-bqj4c\") pod \"collect-profiles-29335275-rm27g\" (UID: \"a14b809d-d163-4d78-9062-534b9025aa9b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29335275-rm27g" Oct 10 17:15:00 crc kubenswrapper[4799]: I1010 17:15:00.413961 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/a14b809d-d163-4d78-9062-534b9025aa9b-config-volume\") pod \"collect-profiles-29335275-rm27g\" (UID: \"a14b809d-d163-4d78-9062-534b9025aa9b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29335275-rm27g" Oct 10 17:15:00 crc kubenswrapper[4799]: I1010 17:15:00.413991 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/a14b809d-d163-4d78-9062-534b9025aa9b-secret-volume\") pod \"collect-profiles-29335275-rm27g\" (UID: \"a14b809d-d163-4d78-9062-534b9025aa9b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29335275-rm27g" Oct 10 17:15:00 crc kubenswrapper[4799]: I1010 17:15:00.415549 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/a14b809d-d163-4d78-9062-534b9025aa9b-config-volume\") pod \"collect-profiles-29335275-rm27g\" (UID: \"a14b809d-d163-4d78-9062-534b9025aa9b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29335275-rm27g" Oct 10 17:15:00 crc kubenswrapper[4799]: I1010 17:15:00.428500 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/a14b809d-d163-4d78-9062-534b9025aa9b-secret-volume\") pod \"collect-profiles-29335275-rm27g\" (UID: \"a14b809d-d163-4d78-9062-534b9025aa9b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29335275-rm27g" Oct 10 17:15:00 crc kubenswrapper[4799]: I1010 17:15:00.436476 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bqj4c\" (UniqueName: \"kubernetes.io/projected/a14b809d-d163-4d78-9062-534b9025aa9b-kube-api-access-bqj4c\") pod \"collect-profiles-29335275-rm27g\" (UID: \"a14b809d-d163-4d78-9062-534b9025aa9b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29335275-rm27g" Oct 10 17:15:00 crc kubenswrapper[4799]: I1010 17:15:00.486573 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29335275-rm27g" Oct 10 17:15:00 crc kubenswrapper[4799]: I1010 17:15:00.928295 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29335275-rm27g"] Oct 10 17:15:01 crc kubenswrapper[4799]: I1010 17:15:01.838820 4799 generic.go:334] "Generic (PLEG): container finished" podID="a14b809d-d163-4d78-9062-534b9025aa9b" containerID="daa4de57417be4d272cf2abbb626de4933c43b4ced1fcd17caffb6f8bdc941a6" exitCode=0 Oct 10 17:15:01 crc kubenswrapper[4799]: I1010 17:15:01.838916 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29335275-rm27g" event={"ID":"a14b809d-d163-4d78-9062-534b9025aa9b","Type":"ContainerDied","Data":"daa4de57417be4d272cf2abbb626de4933c43b4ced1fcd17caffb6f8bdc941a6"} Oct 10 17:15:01 crc kubenswrapper[4799]: I1010 17:15:01.838978 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29335275-rm27g" event={"ID":"a14b809d-d163-4d78-9062-534b9025aa9b","Type":"ContainerStarted","Data":"bf5bd454d584ab36ed176b4b8374a13afe4a5bd326f3572fda753ca20bf99bfb"} Oct 10 17:15:03 crc kubenswrapper[4799]: I1010 17:15:03.232480 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29335275-rm27g" Oct 10 17:15:03 crc kubenswrapper[4799]: I1010 17:15:03.362998 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/a14b809d-d163-4d78-9062-534b9025aa9b-secret-volume\") pod \"a14b809d-d163-4d78-9062-534b9025aa9b\" (UID: \"a14b809d-d163-4d78-9062-534b9025aa9b\") " Oct 10 17:15:03 crc kubenswrapper[4799]: I1010 17:15:03.363040 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bqj4c\" (UniqueName: \"kubernetes.io/projected/a14b809d-d163-4d78-9062-534b9025aa9b-kube-api-access-bqj4c\") pod \"a14b809d-d163-4d78-9062-534b9025aa9b\" (UID: \"a14b809d-d163-4d78-9062-534b9025aa9b\") " Oct 10 17:15:03 crc kubenswrapper[4799]: I1010 17:15:03.363216 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/a14b809d-d163-4d78-9062-534b9025aa9b-config-volume\") pod \"a14b809d-d163-4d78-9062-534b9025aa9b\" (UID: \"a14b809d-d163-4d78-9062-534b9025aa9b\") " Oct 10 17:15:03 crc kubenswrapper[4799]: I1010 17:15:03.363746 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a14b809d-d163-4d78-9062-534b9025aa9b-config-volume" (OuterVolumeSpecName: "config-volume") pod "a14b809d-d163-4d78-9062-534b9025aa9b" (UID: "a14b809d-d163-4d78-9062-534b9025aa9b"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 17:15:03 crc kubenswrapper[4799]: I1010 17:15:03.364125 4799 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/a14b809d-d163-4d78-9062-534b9025aa9b-config-volume\") on node \"crc\" DevicePath \"\"" Oct 10 17:15:03 crc kubenswrapper[4799]: I1010 17:15:03.368373 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a14b809d-d163-4d78-9062-534b9025aa9b-kube-api-access-bqj4c" (OuterVolumeSpecName: "kube-api-access-bqj4c") pod "a14b809d-d163-4d78-9062-534b9025aa9b" (UID: "a14b809d-d163-4d78-9062-534b9025aa9b"). InnerVolumeSpecName "kube-api-access-bqj4c". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 17:15:03 crc kubenswrapper[4799]: I1010 17:15:03.369059 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a14b809d-d163-4d78-9062-534b9025aa9b-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "a14b809d-d163-4d78-9062-534b9025aa9b" (UID: "a14b809d-d163-4d78-9062-534b9025aa9b"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 17:15:03 crc kubenswrapper[4799]: I1010 17:15:03.465250 4799 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/a14b809d-d163-4d78-9062-534b9025aa9b-secret-volume\") on node \"crc\" DevicePath \"\"" Oct 10 17:15:03 crc kubenswrapper[4799]: I1010 17:15:03.465276 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bqj4c\" (UniqueName: \"kubernetes.io/projected/a14b809d-d163-4d78-9062-534b9025aa9b-kube-api-access-bqj4c\") on node \"crc\" DevicePath \"\"" Oct 10 17:15:03 crc kubenswrapper[4799]: I1010 17:15:03.857287 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29335275-rm27g" event={"ID":"a14b809d-d163-4d78-9062-534b9025aa9b","Type":"ContainerDied","Data":"bf5bd454d584ab36ed176b4b8374a13afe4a5bd326f3572fda753ca20bf99bfb"} Oct 10 17:15:03 crc kubenswrapper[4799]: I1010 17:15:03.857339 4799 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="bf5bd454d584ab36ed176b4b8374a13afe4a5bd326f3572fda753ca20bf99bfb" Oct 10 17:15:03 crc kubenswrapper[4799]: I1010 17:15:03.857454 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29335275-rm27g" Oct 10 17:15:04 crc kubenswrapper[4799]: I1010 17:15:04.328665 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29335230-q6qww"] Oct 10 17:15:04 crc kubenswrapper[4799]: I1010 17:15:04.334335 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29335230-q6qww"] Oct 10 17:15:05 crc kubenswrapper[4799]: I1010 17:15:05.420115 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="25cd298f-ccde-4805-801d-2d486c7e45da" path="/var/lib/kubelet/pods/25cd298f-ccde-4805-801d-2d486c7e45da/volumes" Oct 10 17:15:45 crc kubenswrapper[4799]: I1010 17:15:45.248975 4799 patch_prober.go:28] interesting pod/machine-config-daemon-rh8zc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 10 17:15:45 crc kubenswrapper[4799]: I1010 17:15:45.249483 4799 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 10 17:16:05 crc kubenswrapper[4799]: I1010 17:16:05.201039 4799 scope.go:117] "RemoveContainer" containerID="8d9753523c04dd86b3bd12b6206d393c9a192c0cfb49fb8be7252c33137623c7" Oct 10 17:16:15 crc kubenswrapper[4799]: I1010 17:16:15.248634 4799 patch_prober.go:28] interesting pod/machine-config-daemon-rh8zc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 10 17:16:15 crc kubenswrapper[4799]: I1010 17:16:15.249512 4799 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 10 17:16:27 crc kubenswrapper[4799]: I1010 17:16:27.784600 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-fdnkj"] Oct 10 17:16:27 crc kubenswrapper[4799]: E1010 17:16:27.785993 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a14b809d-d163-4d78-9062-534b9025aa9b" containerName="collect-profiles" Oct 10 17:16:27 crc kubenswrapper[4799]: I1010 17:16:27.786019 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="a14b809d-d163-4d78-9062-534b9025aa9b" containerName="collect-profiles" Oct 10 17:16:27 crc kubenswrapper[4799]: I1010 17:16:27.786282 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="a14b809d-d163-4d78-9062-534b9025aa9b" containerName="collect-profiles" Oct 10 17:16:27 crc kubenswrapper[4799]: I1010 17:16:27.788734 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-fdnkj" Oct 10 17:16:27 crc kubenswrapper[4799]: I1010 17:16:27.791646 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-fdnkj"] Oct 10 17:16:27 crc kubenswrapper[4799]: I1010 17:16:27.824021 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/69cca437-b936-4407-9b61-335d742d795a-catalog-content\") pod \"community-operators-fdnkj\" (UID: \"69cca437-b936-4407-9b61-335d742d795a\") " pod="openshift-marketplace/community-operators-fdnkj" Oct 10 17:16:27 crc kubenswrapper[4799]: I1010 17:16:27.824096 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/69cca437-b936-4407-9b61-335d742d795a-utilities\") pod \"community-operators-fdnkj\" (UID: \"69cca437-b936-4407-9b61-335d742d795a\") " pod="openshift-marketplace/community-operators-fdnkj" Oct 10 17:16:27 crc kubenswrapper[4799]: I1010 17:16:27.824164 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fw6xq\" (UniqueName: \"kubernetes.io/projected/69cca437-b936-4407-9b61-335d742d795a-kube-api-access-fw6xq\") pod \"community-operators-fdnkj\" (UID: \"69cca437-b936-4407-9b61-335d742d795a\") " pod="openshift-marketplace/community-operators-fdnkj" Oct 10 17:16:27 crc kubenswrapper[4799]: I1010 17:16:27.925682 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/69cca437-b936-4407-9b61-335d742d795a-catalog-content\") pod \"community-operators-fdnkj\" (UID: \"69cca437-b936-4407-9b61-335d742d795a\") " pod="openshift-marketplace/community-operators-fdnkj" Oct 10 17:16:27 crc kubenswrapper[4799]: I1010 17:16:27.925921 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/69cca437-b936-4407-9b61-335d742d795a-utilities\") pod \"community-operators-fdnkj\" (UID: \"69cca437-b936-4407-9b61-335d742d795a\") " pod="openshift-marketplace/community-operators-fdnkj" Oct 10 17:16:27 crc kubenswrapper[4799]: I1010 17:16:27.926008 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fw6xq\" (UniqueName: \"kubernetes.io/projected/69cca437-b936-4407-9b61-335d742d795a-kube-api-access-fw6xq\") pod \"community-operators-fdnkj\" (UID: \"69cca437-b936-4407-9b61-335d742d795a\") " pod="openshift-marketplace/community-operators-fdnkj" Oct 10 17:16:27 crc kubenswrapper[4799]: I1010 17:16:27.926471 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/69cca437-b936-4407-9b61-335d742d795a-catalog-content\") pod \"community-operators-fdnkj\" (UID: \"69cca437-b936-4407-9b61-335d742d795a\") " pod="openshift-marketplace/community-operators-fdnkj" Oct 10 17:16:27 crc kubenswrapper[4799]: I1010 17:16:27.926647 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/69cca437-b936-4407-9b61-335d742d795a-utilities\") pod \"community-operators-fdnkj\" (UID: \"69cca437-b936-4407-9b61-335d742d795a\") " pod="openshift-marketplace/community-operators-fdnkj" Oct 10 17:16:27 crc kubenswrapper[4799]: I1010 17:16:27.960862 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fw6xq\" (UniqueName: \"kubernetes.io/projected/69cca437-b936-4407-9b61-335d742d795a-kube-api-access-fw6xq\") pod \"community-operators-fdnkj\" (UID: \"69cca437-b936-4407-9b61-335d742d795a\") " pod="openshift-marketplace/community-operators-fdnkj" Oct 10 17:16:28 crc kubenswrapper[4799]: I1010 17:16:28.168941 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-fdnkj" Oct 10 17:16:28 crc kubenswrapper[4799]: I1010 17:16:28.445451 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-fdnkj"] Oct 10 17:16:28 crc kubenswrapper[4799]: I1010 17:16:28.640873 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-fdnkj" event={"ID":"69cca437-b936-4407-9b61-335d742d795a","Type":"ContainerStarted","Data":"d560a85f4a6d7cd1d9201ff6ebf3a1916086be98a6cce1cc60ab51362e79044c"} Oct 10 17:16:28 crc kubenswrapper[4799]: I1010 17:16:28.641178 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-fdnkj" event={"ID":"69cca437-b936-4407-9b61-335d742d795a","Type":"ContainerStarted","Data":"389af1a064a146b5c7899bfec6f46311893df1caa26bbd9ced842a9c39454b06"} Oct 10 17:16:29 crc kubenswrapper[4799]: I1010 17:16:29.652235 4799 generic.go:334] "Generic (PLEG): container finished" podID="69cca437-b936-4407-9b61-335d742d795a" containerID="d560a85f4a6d7cd1d9201ff6ebf3a1916086be98a6cce1cc60ab51362e79044c" exitCode=0 Oct 10 17:16:29 crc kubenswrapper[4799]: I1010 17:16:29.652314 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-fdnkj" event={"ID":"69cca437-b936-4407-9b61-335d742d795a","Type":"ContainerDied","Data":"d560a85f4a6d7cd1d9201ff6ebf3a1916086be98a6cce1cc60ab51362e79044c"} Oct 10 17:16:35 crc kubenswrapper[4799]: I1010 17:16:35.702308 4799 generic.go:334] "Generic (PLEG): container finished" podID="69cca437-b936-4407-9b61-335d742d795a" containerID="799b11dded64e5db8cd88af738e59333d01965de4cdc4d392aad5b788e12ed5e" exitCode=0 Oct 10 17:16:35 crc kubenswrapper[4799]: I1010 17:16:35.702481 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-fdnkj" event={"ID":"69cca437-b936-4407-9b61-335d742d795a","Type":"ContainerDied","Data":"799b11dded64e5db8cd88af738e59333d01965de4cdc4d392aad5b788e12ed5e"} Oct 10 17:16:36 crc kubenswrapper[4799]: I1010 17:16:36.754960 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-fdnkj" event={"ID":"69cca437-b936-4407-9b61-335d742d795a","Type":"ContainerStarted","Data":"2773415e001144934c5fa16430b3da864daf9711a93fb67866fb936cd0ebb971"} Oct 10 17:16:36 crc kubenswrapper[4799]: I1010 17:16:36.789033 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-fdnkj" podStartSLOduration=3.114257527 podStartE2EDuration="9.789006727s" podCreationTimestamp="2025-10-10 17:16:27 +0000 UTC" firstStartedPulling="2025-10-10 17:16:29.656077985 +0000 UTC m=+2683.164402130" lastFinishedPulling="2025-10-10 17:16:36.330827215 +0000 UTC m=+2689.839151330" observedRunningTime="2025-10-10 17:16:36.781494605 +0000 UTC m=+2690.289818750" watchObservedRunningTime="2025-10-10 17:16:36.789006727 +0000 UTC m=+2690.297330852" Oct 10 17:16:38 crc kubenswrapper[4799]: I1010 17:16:38.170045 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-fdnkj" Oct 10 17:16:38 crc kubenswrapper[4799]: I1010 17:16:38.170128 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-fdnkj" Oct 10 17:16:39 crc kubenswrapper[4799]: I1010 17:16:39.242089 4799 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/community-operators-fdnkj" podUID="69cca437-b936-4407-9b61-335d742d795a" containerName="registry-server" probeResult="failure" output=< Oct 10 17:16:39 crc kubenswrapper[4799]: timeout: failed to connect service ":50051" within 1s Oct 10 17:16:39 crc kubenswrapper[4799]: > Oct 10 17:16:39 crc kubenswrapper[4799]: I1010 17:16:39.250411 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-wt2dl"] Oct 10 17:16:39 crc kubenswrapper[4799]: I1010 17:16:39.252341 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-wt2dl" Oct 10 17:16:39 crc kubenswrapper[4799]: I1010 17:16:39.268518 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-wt2dl"] Oct 10 17:16:39 crc kubenswrapper[4799]: I1010 17:16:39.392145 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b3054e8b-83d3-4b6b-b315-df4dc26164f5-catalog-content\") pod \"redhat-operators-wt2dl\" (UID: \"b3054e8b-83d3-4b6b-b315-df4dc26164f5\") " pod="openshift-marketplace/redhat-operators-wt2dl" Oct 10 17:16:39 crc kubenswrapper[4799]: I1010 17:16:39.392346 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j8nb6\" (UniqueName: \"kubernetes.io/projected/b3054e8b-83d3-4b6b-b315-df4dc26164f5-kube-api-access-j8nb6\") pod \"redhat-operators-wt2dl\" (UID: \"b3054e8b-83d3-4b6b-b315-df4dc26164f5\") " pod="openshift-marketplace/redhat-operators-wt2dl" Oct 10 17:16:39 crc kubenswrapper[4799]: I1010 17:16:39.392665 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b3054e8b-83d3-4b6b-b315-df4dc26164f5-utilities\") pod \"redhat-operators-wt2dl\" (UID: \"b3054e8b-83d3-4b6b-b315-df4dc26164f5\") " pod="openshift-marketplace/redhat-operators-wt2dl" Oct 10 17:16:39 crc kubenswrapper[4799]: I1010 17:16:39.494185 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b3054e8b-83d3-4b6b-b315-df4dc26164f5-utilities\") pod \"redhat-operators-wt2dl\" (UID: \"b3054e8b-83d3-4b6b-b315-df4dc26164f5\") " pod="openshift-marketplace/redhat-operators-wt2dl" Oct 10 17:16:39 crc kubenswrapper[4799]: I1010 17:16:39.494249 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b3054e8b-83d3-4b6b-b315-df4dc26164f5-catalog-content\") pod \"redhat-operators-wt2dl\" (UID: \"b3054e8b-83d3-4b6b-b315-df4dc26164f5\") " pod="openshift-marketplace/redhat-operators-wt2dl" Oct 10 17:16:39 crc kubenswrapper[4799]: I1010 17:16:39.494282 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j8nb6\" (UniqueName: \"kubernetes.io/projected/b3054e8b-83d3-4b6b-b315-df4dc26164f5-kube-api-access-j8nb6\") pod \"redhat-operators-wt2dl\" (UID: \"b3054e8b-83d3-4b6b-b315-df4dc26164f5\") " pod="openshift-marketplace/redhat-operators-wt2dl" Oct 10 17:16:39 crc kubenswrapper[4799]: I1010 17:16:39.494622 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b3054e8b-83d3-4b6b-b315-df4dc26164f5-catalog-content\") pod \"redhat-operators-wt2dl\" (UID: \"b3054e8b-83d3-4b6b-b315-df4dc26164f5\") " pod="openshift-marketplace/redhat-operators-wt2dl" Oct 10 17:16:39 crc kubenswrapper[4799]: I1010 17:16:39.494698 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b3054e8b-83d3-4b6b-b315-df4dc26164f5-utilities\") pod \"redhat-operators-wt2dl\" (UID: \"b3054e8b-83d3-4b6b-b315-df4dc26164f5\") " pod="openshift-marketplace/redhat-operators-wt2dl" Oct 10 17:16:39 crc kubenswrapper[4799]: I1010 17:16:39.520429 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j8nb6\" (UniqueName: \"kubernetes.io/projected/b3054e8b-83d3-4b6b-b315-df4dc26164f5-kube-api-access-j8nb6\") pod \"redhat-operators-wt2dl\" (UID: \"b3054e8b-83d3-4b6b-b315-df4dc26164f5\") " pod="openshift-marketplace/redhat-operators-wt2dl" Oct 10 17:16:39 crc kubenswrapper[4799]: I1010 17:16:39.587491 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-wt2dl" Oct 10 17:16:40 crc kubenswrapper[4799]: I1010 17:16:40.003907 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-wt2dl"] Oct 10 17:16:40 crc kubenswrapper[4799]: W1010 17:16:40.016155 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb3054e8b_83d3_4b6b_b315_df4dc26164f5.slice/crio-d0391c50dd578d56bedc4681f997a8134d13613aa131d5af8168a1c8d5c0c779 WatchSource:0}: Error finding container d0391c50dd578d56bedc4681f997a8134d13613aa131d5af8168a1c8d5c0c779: Status 404 returned error can't find the container with id d0391c50dd578d56bedc4681f997a8134d13613aa131d5af8168a1c8d5c0c779 Oct 10 17:16:40 crc kubenswrapper[4799]: I1010 17:16:40.792045 4799 generic.go:334] "Generic (PLEG): container finished" podID="b3054e8b-83d3-4b6b-b315-df4dc26164f5" containerID="950be2733ae296889c81454bb4e30746526ac91a722afd831a4cf5a2f8f18626" exitCode=0 Oct 10 17:16:40 crc kubenswrapper[4799]: I1010 17:16:40.792095 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-wt2dl" event={"ID":"b3054e8b-83d3-4b6b-b315-df4dc26164f5","Type":"ContainerDied","Data":"950be2733ae296889c81454bb4e30746526ac91a722afd831a4cf5a2f8f18626"} Oct 10 17:16:40 crc kubenswrapper[4799]: I1010 17:16:40.792131 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-wt2dl" event={"ID":"b3054e8b-83d3-4b6b-b315-df4dc26164f5","Type":"ContainerStarted","Data":"d0391c50dd578d56bedc4681f997a8134d13613aa131d5af8168a1c8d5c0c779"} Oct 10 17:16:41 crc kubenswrapper[4799]: I1010 17:16:41.803893 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-wt2dl" event={"ID":"b3054e8b-83d3-4b6b-b315-df4dc26164f5","Type":"ContainerStarted","Data":"13b9f98106155e4e8356aa04f804661fc8e8106fc18b8d4788c710542c011537"} Oct 10 17:16:42 crc kubenswrapper[4799]: I1010 17:16:42.814435 4799 generic.go:334] "Generic (PLEG): container finished" podID="b3054e8b-83d3-4b6b-b315-df4dc26164f5" containerID="13b9f98106155e4e8356aa04f804661fc8e8106fc18b8d4788c710542c011537" exitCode=0 Oct 10 17:16:42 crc kubenswrapper[4799]: I1010 17:16:42.814505 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-wt2dl" event={"ID":"b3054e8b-83d3-4b6b-b315-df4dc26164f5","Type":"ContainerDied","Data":"13b9f98106155e4e8356aa04f804661fc8e8106fc18b8d4788c710542c011537"} Oct 10 17:16:42 crc kubenswrapper[4799]: I1010 17:16:42.817806 4799 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 10 17:16:44 crc kubenswrapper[4799]: I1010 17:16:44.834538 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-wt2dl" event={"ID":"b3054e8b-83d3-4b6b-b315-df4dc26164f5","Type":"ContainerStarted","Data":"caee72b733863890004f1f4e75b7a8a9fa891db2961e2d35c55027695927337b"} Oct 10 17:16:44 crc kubenswrapper[4799]: I1010 17:16:44.861279 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-wt2dl" podStartSLOduration=3.039038556 podStartE2EDuration="5.861262768s" podCreationTimestamp="2025-10-10 17:16:39 +0000 UTC" firstStartedPulling="2025-10-10 17:16:40.79431344 +0000 UTC m=+2694.302637565" lastFinishedPulling="2025-10-10 17:16:43.616537652 +0000 UTC m=+2697.124861777" observedRunningTime="2025-10-10 17:16:44.859577517 +0000 UTC m=+2698.367901642" watchObservedRunningTime="2025-10-10 17:16:44.861262768 +0000 UTC m=+2698.369586883" Oct 10 17:16:45 crc kubenswrapper[4799]: I1010 17:16:45.248831 4799 patch_prober.go:28] interesting pod/machine-config-daemon-rh8zc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 10 17:16:45 crc kubenswrapper[4799]: I1010 17:16:45.248906 4799 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 10 17:16:45 crc kubenswrapper[4799]: I1010 17:16:45.248954 4799 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" Oct 10 17:16:45 crc kubenswrapper[4799]: I1010 17:16:45.249612 4799 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"42045cbeab3b7f6682f926dc6bfd21910965b5211bc6575c74a92e96cc7dc346"} pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 10 17:16:45 crc kubenswrapper[4799]: I1010 17:16:45.249683 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" containerName="machine-config-daemon" containerID="cri-o://42045cbeab3b7f6682f926dc6bfd21910965b5211bc6575c74a92e96cc7dc346" gracePeriod=600 Oct 10 17:16:45 crc kubenswrapper[4799]: I1010 17:16:45.852414 4799 generic.go:334] "Generic (PLEG): container finished" podID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" containerID="42045cbeab3b7f6682f926dc6bfd21910965b5211bc6575c74a92e96cc7dc346" exitCode=0 Oct 10 17:16:45 crc kubenswrapper[4799]: I1010 17:16:45.852504 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" event={"ID":"6cebefda-e31d-4be2-9bf4-8e1f8ec002cb","Type":"ContainerDied","Data":"42045cbeab3b7f6682f926dc6bfd21910965b5211bc6575c74a92e96cc7dc346"} Oct 10 17:16:45 crc kubenswrapper[4799]: I1010 17:16:45.852581 4799 scope.go:117] "RemoveContainer" containerID="3ee83a672ba2f7803e7c4f7cecdddbaee1838a288065f4f6358434432017c356" Oct 10 17:16:46 crc kubenswrapper[4799]: I1010 17:16:46.863380 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" event={"ID":"6cebefda-e31d-4be2-9bf4-8e1f8ec002cb","Type":"ContainerStarted","Data":"bf4eb3e56ead59e9fb87d5f16a0e791ef1612b88c8f649a4f091b1c1c12d7b71"} Oct 10 17:16:48 crc kubenswrapper[4799]: I1010 17:16:48.238947 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-fdnkj" Oct 10 17:16:48 crc kubenswrapper[4799]: I1010 17:16:48.294862 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-fdnkj" Oct 10 17:16:48 crc kubenswrapper[4799]: I1010 17:16:48.396480 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-fdnkj"] Oct 10 17:16:48 crc kubenswrapper[4799]: I1010 17:16:48.485807 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-vdv4v"] Oct 10 17:16:48 crc kubenswrapper[4799]: I1010 17:16:48.486327 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-vdv4v" podUID="49348cb6-a0ca-44e9-81c9-370942d4e5b7" containerName="registry-server" containerID="cri-o://017307e03e9b84aa53e1d09494898a5393495181f93263582950c5638c4e01b1" gracePeriod=2 Oct 10 17:16:48 crc kubenswrapper[4799]: I1010 17:16:48.878894 4799 generic.go:334] "Generic (PLEG): container finished" podID="49348cb6-a0ca-44e9-81c9-370942d4e5b7" containerID="017307e03e9b84aa53e1d09494898a5393495181f93263582950c5638c4e01b1" exitCode=0 Oct 10 17:16:48 crc kubenswrapper[4799]: I1010 17:16:48.879067 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-vdv4v" event={"ID":"49348cb6-a0ca-44e9-81c9-370942d4e5b7","Type":"ContainerDied","Data":"017307e03e9b84aa53e1d09494898a5393495181f93263582950c5638c4e01b1"} Oct 10 17:16:48 crc kubenswrapper[4799]: I1010 17:16:48.879277 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-vdv4v" event={"ID":"49348cb6-a0ca-44e9-81c9-370942d4e5b7","Type":"ContainerDied","Data":"7eb9061e5ad9f3a61be157ff6dd1e8c4421127e0f3c104dbfbea0dd37aa5919a"} Oct 10 17:16:48 crc kubenswrapper[4799]: I1010 17:16:48.879319 4799 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7eb9061e5ad9f3a61be157ff6dd1e8c4421127e0f3c104dbfbea0dd37aa5919a" Oct 10 17:16:48 crc kubenswrapper[4799]: I1010 17:16:48.886965 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-vdv4v" Oct 10 17:16:49 crc kubenswrapper[4799]: I1010 17:16:49.046081 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/49348cb6-a0ca-44e9-81c9-370942d4e5b7-catalog-content\") pod \"49348cb6-a0ca-44e9-81c9-370942d4e5b7\" (UID: \"49348cb6-a0ca-44e9-81c9-370942d4e5b7\") " Oct 10 17:16:49 crc kubenswrapper[4799]: I1010 17:16:49.046173 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mrbrx\" (UniqueName: \"kubernetes.io/projected/49348cb6-a0ca-44e9-81c9-370942d4e5b7-kube-api-access-mrbrx\") pod \"49348cb6-a0ca-44e9-81c9-370942d4e5b7\" (UID: \"49348cb6-a0ca-44e9-81c9-370942d4e5b7\") " Oct 10 17:16:49 crc kubenswrapper[4799]: I1010 17:16:49.046245 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/49348cb6-a0ca-44e9-81c9-370942d4e5b7-utilities\") pod \"49348cb6-a0ca-44e9-81c9-370942d4e5b7\" (UID: \"49348cb6-a0ca-44e9-81c9-370942d4e5b7\") " Oct 10 17:16:49 crc kubenswrapper[4799]: I1010 17:16:49.046953 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/49348cb6-a0ca-44e9-81c9-370942d4e5b7-utilities" (OuterVolumeSpecName: "utilities") pod "49348cb6-a0ca-44e9-81c9-370942d4e5b7" (UID: "49348cb6-a0ca-44e9-81c9-370942d4e5b7"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 10 17:16:49 crc kubenswrapper[4799]: I1010 17:16:49.051554 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49348cb6-a0ca-44e9-81c9-370942d4e5b7-kube-api-access-mrbrx" (OuterVolumeSpecName: "kube-api-access-mrbrx") pod "49348cb6-a0ca-44e9-81c9-370942d4e5b7" (UID: "49348cb6-a0ca-44e9-81c9-370942d4e5b7"). InnerVolumeSpecName "kube-api-access-mrbrx". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 17:16:49 crc kubenswrapper[4799]: I1010 17:16:49.091971 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/49348cb6-a0ca-44e9-81c9-370942d4e5b7-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "49348cb6-a0ca-44e9-81c9-370942d4e5b7" (UID: "49348cb6-a0ca-44e9-81c9-370942d4e5b7"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 10 17:16:49 crc kubenswrapper[4799]: I1010 17:16:49.147658 4799 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/49348cb6-a0ca-44e9-81c9-370942d4e5b7-utilities\") on node \"crc\" DevicePath \"\"" Oct 10 17:16:49 crc kubenswrapper[4799]: I1010 17:16:49.147694 4799 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/49348cb6-a0ca-44e9-81c9-370942d4e5b7-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 10 17:16:49 crc kubenswrapper[4799]: I1010 17:16:49.147706 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mrbrx\" (UniqueName: \"kubernetes.io/projected/49348cb6-a0ca-44e9-81c9-370942d4e5b7-kube-api-access-mrbrx\") on node \"crc\" DevicePath \"\"" Oct 10 17:16:49 crc kubenswrapper[4799]: I1010 17:16:49.588784 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-wt2dl" Oct 10 17:16:49 crc kubenswrapper[4799]: I1010 17:16:49.589209 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-wt2dl" Oct 10 17:16:49 crc kubenswrapper[4799]: I1010 17:16:49.657684 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-wt2dl" Oct 10 17:16:49 crc kubenswrapper[4799]: I1010 17:16:49.885699 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-vdv4v" Oct 10 17:16:49 crc kubenswrapper[4799]: I1010 17:16:49.911594 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-vdv4v"] Oct 10 17:16:49 crc kubenswrapper[4799]: I1010 17:16:49.917566 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-vdv4v"] Oct 10 17:16:49 crc kubenswrapper[4799]: I1010 17:16:49.949018 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-wt2dl" Oct 10 17:16:51 crc kubenswrapper[4799]: I1010 17:16:51.420178 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49348cb6-a0ca-44e9-81c9-370942d4e5b7" path="/var/lib/kubelet/pods/49348cb6-a0ca-44e9-81c9-370942d4e5b7/volumes" Oct 10 17:16:52 crc kubenswrapper[4799]: I1010 17:16:52.094731 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-wt2dl"] Oct 10 17:16:52 crc kubenswrapper[4799]: I1010 17:16:52.095153 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-wt2dl" podUID="b3054e8b-83d3-4b6b-b315-df4dc26164f5" containerName="registry-server" containerID="cri-o://caee72b733863890004f1f4e75b7a8a9fa891db2961e2d35c55027695927337b" gracePeriod=2 Oct 10 17:16:52 crc kubenswrapper[4799]: I1010 17:16:52.500838 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-wt2dl" Oct 10 17:16:52 crc kubenswrapper[4799]: I1010 17:16:52.700679 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b3054e8b-83d3-4b6b-b315-df4dc26164f5-catalog-content\") pod \"b3054e8b-83d3-4b6b-b315-df4dc26164f5\" (UID: \"b3054e8b-83d3-4b6b-b315-df4dc26164f5\") " Oct 10 17:16:52 crc kubenswrapper[4799]: I1010 17:16:52.700807 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-j8nb6\" (UniqueName: \"kubernetes.io/projected/b3054e8b-83d3-4b6b-b315-df4dc26164f5-kube-api-access-j8nb6\") pod \"b3054e8b-83d3-4b6b-b315-df4dc26164f5\" (UID: \"b3054e8b-83d3-4b6b-b315-df4dc26164f5\") " Oct 10 17:16:52 crc kubenswrapper[4799]: I1010 17:16:52.700851 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b3054e8b-83d3-4b6b-b315-df4dc26164f5-utilities\") pod \"b3054e8b-83d3-4b6b-b315-df4dc26164f5\" (UID: \"b3054e8b-83d3-4b6b-b315-df4dc26164f5\") " Oct 10 17:16:52 crc kubenswrapper[4799]: I1010 17:16:52.701782 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b3054e8b-83d3-4b6b-b315-df4dc26164f5-utilities" (OuterVolumeSpecName: "utilities") pod "b3054e8b-83d3-4b6b-b315-df4dc26164f5" (UID: "b3054e8b-83d3-4b6b-b315-df4dc26164f5"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 10 17:16:52 crc kubenswrapper[4799]: I1010 17:16:52.712348 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b3054e8b-83d3-4b6b-b315-df4dc26164f5-kube-api-access-j8nb6" (OuterVolumeSpecName: "kube-api-access-j8nb6") pod "b3054e8b-83d3-4b6b-b315-df4dc26164f5" (UID: "b3054e8b-83d3-4b6b-b315-df4dc26164f5"). InnerVolumeSpecName "kube-api-access-j8nb6". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 17:16:52 crc kubenswrapper[4799]: I1010 17:16:52.803245 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-j8nb6\" (UniqueName: \"kubernetes.io/projected/b3054e8b-83d3-4b6b-b315-df4dc26164f5-kube-api-access-j8nb6\") on node \"crc\" DevicePath \"\"" Oct 10 17:16:52 crc kubenswrapper[4799]: I1010 17:16:52.803471 4799 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b3054e8b-83d3-4b6b-b315-df4dc26164f5-utilities\") on node \"crc\" DevicePath \"\"" Oct 10 17:16:52 crc kubenswrapper[4799]: I1010 17:16:52.915618 4799 generic.go:334] "Generic (PLEG): container finished" podID="b3054e8b-83d3-4b6b-b315-df4dc26164f5" containerID="caee72b733863890004f1f4e75b7a8a9fa891db2961e2d35c55027695927337b" exitCode=0 Oct 10 17:16:52 crc kubenswrapper[4799]: I1010 17:16:52.915666 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-wt2dl" event={"ID":"b3054e8b-83d3-4b6b-b315-df4dc26164f5","Type":"ContainerDied","Data":"caee72b733863890004f1f4e75b7a8a9fa891db2961e2d35c55027695927337b"} Oct 10 17:16:52 crc kubenswrapper[4799]: I1010 17:16:52.915713 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-wt2dl" event={"ID":"b3054e8b-83d3-4b6b-b315-df4dc26164f5","Type":"ContainerDied","Data":"d0391c50dd578d56bedc4681f997a8134d13613aa131d5af8168a1c8d5c0c779"} Oct 10 17:16:52 crc kubenswrapper[4799]: I1010 17:16:52.915730 4799 scope.go:117] "RemoveContainer" containerID="caee72b733863890004f1f4e75b7a8a9fa891db2961e2d35c55027695927337b" Oct 10 17:16:52 crc kubenswrapper[4799]: I1010 17:16:52.915741 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-wt2dl" Oct 10 17:16:52 crc kubenswrapper[4799]: I1010 17:16:52.937204 4799 scope.go:117] "RemoveContainer" containerID="13b9f98106155e4e8356aa04f804661fc8e8106fc18b8d4788c710542c011537" Oct 10 17:16:52 crc kubenswrapper[4799]: I1010 17:16:52.957159 4799 scope.go:117] "RemoveContainer" containerID="950be2733ae296889c81454bb4e30746526ac91a722afd831a4cf5a2f8f18626" Oct 10 17:16:52 crc kubenswrapper[4799]: I1010 17:16:52.976945 4799 scope.go:117] "RemoveContainer" containerID="caee72b733863890004f1f4e75b7a8a9fa891db2961e2d35c55027695927337b" Oct 10 17:16:52 crc kubenswrapper[4799]: E1010 17:16:52.977387 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"caee72b733863890004f1f4e75b7a8a9fa891db2961e2d35c55027695927337b\": container with ID starting with caee72b733863890004f1f4e75b7a8a9fa891db2961e2d35c55027695927337b not found: ID does not exist" containerID="caee72b733863890004f1f4e75b7a8a9fa891db2961e2d35c55027695927337b" Oct 10 17:16:52 crc kubenswrapper[4799]: I1010 17:16:52.977424 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"caee72b733863890004f1f4e75b7a8a9fa891db2961e2d35c55027695927337b"} err="failed to get container status \"caee72b733863890004f1f4e75b7a8a9fa891db2961e2d35c55027695927337b\": rpc error: code = NotFound desc = could not find container \"caee72b733863890004f1f4e75b7a8a9fa891db2961e2d35c55027695927337b\": container with ID starting with caee72b733863890004f1f4e75b7a8a9fa891db2961e2d35c55027695927337b not found: ID does not exist" Oct 10 17:16:52 crc kubenswrapper[4799]: I1010 17:16:52.977465 4799 scope.go:117] "RemoveContainer" containerID="13b9f98106155e4e8356aa04f804661fc8e8106fc18b8d4788c710542c011537" Oct 10 17:16:52 crc kubenswrapper[4799]: E1010 17:16:52.977677 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"13b9f98106155e4e8356aa04f804661fc8e8106fc18b8d4788c710542c011537\": container with ID starting with 13b9f98106155e4e8356aa04f804661fc8e8106fc18b8d4788c710542c011537 not found: ID does not exist" containerID="13b9f98106155e4e8356aa04f804661fc8e8106fc18b8d4788c710542c011537" Oct 10 17:16:52 crc kubenswrapper[4799]: I1010 17:16:52.977718 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"13b9f98106155e4e8356aa04f804661fc8e8106fc18b8d4788c710542c011537"} err="failed to get container status \"13b9f98106155e4e8356aa04f804661fc8e8106fc18b8d4788c710542c011537\": rpc error: code = NotFound desc = could not find container \"13b9f98106155e4e8356aa04f804661fc8e8106fc18b8d4788c710542c011537\": container with ID starting with 13b9f98106155e4e8356aa04f804661fc8e8106fc18b8d4788c710542c011537 not found: ID does not exist" Oct 10 17:16:52 crc kubenswrapper[4799]: I1010 17:16:52.977732 4799 scope.go:117] "RemoveContainer" containerID="950be2733ae296889c81454bb4e30746526ac91a722afd831a4cf5a2f8f18626" Oct 10 17:16:52 crc kubenswrapper[4799]: E1010 17:16:52.978019 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"950be2733ae296889c81454bb4e30746526ac91a722afd831a4cf5a2f8f18626\": container with ID starting with 950be2733ae296889c81454bb4e30746526ac91a722afd831a4cf5a2f8f18626 not found: ID does not exist" containerID="950be2733ae296889c81454bb4e30746526ac91a722afd831a4cf5a2f8f18626" Oct 10 17:16:52 crc kubenswrapper[4799]: I1010 17:16:52.978040 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"950be2733ae296889c81454bb4e30746526ac91a722afd831a4cf5a2f8f18626"} err="failed to get container status \"950be2733ae296889c81454bb4e30746526ac91a722afd831a4cf5a2f8f18626\": rpc error: code = NotFound desc = could not find container \"950be2733ae296889c81454bb4e30746526ac91a722afd831a4cf5a2f8f18626\": container with ID starting with 950be2733ae296889c81454bb4e30746526ac91a722afd831a4cf5a2f8f18626 not found: ID does not exist" Oct 10 17:16:53 crc kubenswrapper[4799]: I1010 17:16:53.275005 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b3054e8b-83d3-4b6b-b315-df4dc26164f5-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b3054e8b-83d3-4b6b-b315-df4dc26164f5" (UID: "b3054e8b-83d3-4b6b-b315-df4dc26164f5"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 10 17:16:53 crc kubenswrapper[4799]: I1010 17:16:53.311630 4799 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b3054e8b-83d3-4b6b-b315-df4dc26164f5-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 10 17:16:53 crc kubenswrapper[4799]: I1010 17:16:53.555366 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-wt2dl"] Oct 10 17:16:53 crc kubenswrapper[4799]: I1010 17:16:53.560904 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-wt2dl"] Oct 10 17:16:55 crc kubenswrapper[4799]: I1010 17:16:55.422609 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b3054e8b-83d3-4b6b-b315-df4dc26164f5" path="/var/lib/kubelet/pods/b3054e8b-83d3-4b6b-b315-df4dc26164f5/volumes" Oct 10 17:16:59 crc kubenswrapper[4799]: I1010 17:16:59.245088 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-q7krn"] Oct 10 17:16:59 crc kubenswrapper[4799]: E1010 17:16:59.245618 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b3054e8b-83d3-4b6b-b315-df4dc26164f5" containerName="extract-utilities" Oct 10 17:16:59 crc kubenswrapper[4799]: I1010 17:16:59.245631 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="b3054e8b-83d3-4b6b-b315-df4dc26164f5" containerName="extract-utilities" Oct 10 17:16:59 crc kubenswrapper[4799]: E1010 17:16:59.245645 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="49348cb6-a0ca-44e9-81c9-370942d4e5b7" containerName="extract-content" Oct 10 17:16:59 crc kubenswrapper[4799]: I1010 17:16:59.245651 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="49348cb6-a0ca-44e9-81c9-370942d4e5b7" containerName="extract-content" Oct 10 17:16:59 crc kubenswrapper[4799]: E1010 17:16:59.245662 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b3054e8b-83d3-4b6b-b315-df4dc26164f5" containerName="extract-content" Oct 10 17:16:59 crc kubenswrapper[4799]: I1010 17:16:59.245669 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="b3054e8b-83d3-4b6b-b315-df4dc26164f5" containerName="extract-content" Oct 10 17:16:59 crc kubenswrapper[4799]: E1010 17:16:59.245683 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b3054e8b-83d3-4b6b-b315-df4dc26164f5" containerName="registry-server" Oct 10 17:16:59 crc kubenswrapper[4799]: I1010 17:16:59.245692 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="b3054e8b-83d3-4b6b-b315-df4dc26164f5" containerName="registry-server" Oct 10 17:16:59 crc kubenswrapper[4799]: E1010 17:16:59.245710 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="49348cb6-a0ca-44e9-81c9-370942d4e5b7" containerName="registry-server" Oct 10 17:16:59 crc kubenswrapper[4799]: I1010 17:16:59.245716 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="49348cb6-a0ca-44e9-81c9-370942d4e5b7" containerName="registry-server" Oct 10 17:16:59 crc kubenswrapper[4799]: E1010 17:16:59.245728 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="49348cb6-a0ca-44e9-81c9-370942d4e5b7" containerName="extract-utilities" Oct 10 17:16:59 crc kubenswrapper[4799]: I1010 17:16:59.245734 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="49348cb6-a0ca-44e9-81c9-370942d4e5b7" containerName="extract-utilities" Oct 10 17:16:59 crc kubenswrapper[4799]: I1010 17:16:59.245918 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="49348cb6-a0ca-44e9-81c9-370942d4e5b7" containerName="registry-server" Oct 10 17:16:59 crc kubenswrapper[4799]: I1010 17:16:59.245936 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="b3054e8b-83d3-4b6b-b315-df4dc26164f5" containerName="registry-server" Oct 10 17:16:59 crc kubenswrapper[4799]: I1010 17:16:59.246895 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-q7krn" Oct 10 17:16:59 crc kubenswrapper[4799]: I1010 17:16:59.278571 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-q7krn"] Oct 10 17:16:59 crc kubenswrapper[4799]: I1010 17:16:59.410547 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/08bdef95-be4e-4fe8-adb2-6a7f075b2a8d-utilities\") pod \"certified-operators-q7krn\" (UID: \"08bdef95-be4e-4fe8-adb2-6a7f075b2a8d\") " pod="openshift-marketplace/certified-operators-q7krn" Oct 10 17:16:59 crc kubenswrapper[4799]: I1010 17:16:59.410664 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bcvl6\" (UniqueName: \"kubernetes.io/projected/08bdef95-be4e-4fe8-adb2-6a7f075b2a8d-kube-api-access-bcvl6\") pod \"certified-operators-q7krn\" (UID: \"08bdef95-be4e-4fe8-adb2-6a7f075b2a8d\") " pod="openshift-marketplace/certified-operators-q7krn" Oct 10 17:16:59 crc kubenswrapper[4799]: I1010 17:16:59.410731 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/08bdef95-be4e-4fe8-adb2-6a7f075b2a8d-catalog-content\") pod \"certified-operators-q7krn\" (UID: \"08bdef95-be4e-4fe8-adb2-6a7f075b2a8d\") " pod="openshift-marketplace/certified-operators-q7krn" Oct 10 17:16:59 crc kubenswrapper[4799]: I1010 17:16:59.511989 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bcvl6\" (UniqueName: \"kubernetes.io/projected/08bdef95-be4e-4fe8-adb2-6a7f075b2a8d-kube-api-access-bcvl6\") pod \"certified-operators-q7krn\" (UID: \"08bdef95-be4e-4fe8-adb2-6a7f075b2a8d\") " pod="openshift-marketplace/certified-operators-q7krn" Oct 10 17:16:59 crc kubenswrapper[4799]: I1010 17:16:59.512095 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/08bdef95-be4e-4fe8-adb2-6a7f075b2a8d-catalog-content\") pod \"certified-operators-q7krn\" (UID: \"08bdef95-be4e-4fe8-adb2-6a7f075b2a8d\") " pod="openshift-marketplace/certified-operators-q7krn" Oct 10 17:16:59 crc kubenswrapper[4799]: I1010 17:16:59.512126 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/08bdef95-be4e-4fe8-adb2-6a7f075b2a8d-utilities\") pod \"certified-operators-q7krn\" (UID: \"08bdef95-be4e-4fe8-adb2-6a7f075b2a8d\") " pod="openshift-marketplace/certified-operators-q7krn" Oct 10 17:16:59 crc kubenswrapper[4799]: I1010 17:16:59.512688 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/08bdef95-be4e-4fe8-adb2-6a7f075b2a8d-utilities\") pod \"certified-operators-q7krn\" (UID: \"08bdef95-be4e-4fe8-adb2-6a7f075b2a8d\") " pod="openshift-marketplace/certified-operators-q7krn" Oct 10 17:16:59 crc kubenswrapper[4799]: I1010 17:16:59.512843 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/08bdef95-be4e-4fe8-adb2-6a7f075b2a8d-catalog-content\") pod \"certified-operators-q7krn\" (UID: \"08bdef95-be4e-4fe8-adb2-6a7f075b2a8d\") " pod="openshift-marketplace/certified-operators-q7krn" Oct 10 17:16:59 crc kubenswrapper[4799]: I1010 17:16:59.532749 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bcvl6\" (UniqueName: \"kubernetes.io/projected/08bdef95-be4e-4fe8-adb2-6a7f075b2a8d-kube-api-access-bcvl6\") pod \"certified-operators-q7krn\" (UID: \"08bdef95-be4e-4fe8-adb2-6a7f075b2a8d\") " pod="openshift-marketplace/certified-operators-q7krn" Oct 10 17:16:59 crc kubenswrapper[4799]: I1010 17:16:59.573355 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-q7krn" Oct 10 17:17:00 crc kubenswrapper[4799]: I1010 17:17:00.094496 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-q7krn"] Oct 10 17:17:00 crc kubenswrapper[4799]: W1010 17:17:00.095435 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod08bdef95_be4e_4fe8_adb2_6a7f075b2a8d.slice/crio-ef4ba221df9a2004e5cb58dcddeb7db09a8ed1cdc836c14850ff51361e49e9a1 WatchSource:0}: Error finding container ef4ba221df9a2004e5cb58dcddeb7db09a8ed1cdc836c14850ff51361e49e9a1: Status 404 returned error can't find the container with id ef4ba221df9a2004e5cb58dcddeb7db09a8ed1cdc836c14850ff51361e49e9a1 Oct 10 17:17:00 crc kubenswrapper[4799]: I1010 17:17:00.986215 4799 generic.go:334] "Generic (PLEG): container finished" podID="08bdef95-be4e-4fe8-adb2-6a7f075b2a8d" containerID="7a1f9fb453c6daa0f60214480ce9bdfcf5a5706d1af07bc7e94ffaef77c1111e" exitCode=0 Oct 10 17:17:00 crc kubenswrapper[4799]: I1010 17:17:00.986272 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-q7krn" event={"ID":"08bdef95-be4e-4fe8-adb2-6a7f075b2a8d","Type":"ContainerDied","Data":"7a1f9fb453c6daa0f60214480ce9bdfcf5a5706d1af07bc7e94ffaef77c1111e"} Oct 10 17:17:00 crc kubenswrapper[4799]: I1010 17:17:00.986338 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-q7krn" event={"ID":"08bdef95-be4e-4fe8-adb2-6a7f075b2a8d","Type":"ContainerStarted","Data":"ef4ba221df9a2004e5cb58dcddeb7db09a8ed1cdc836c14850ff51361e49e9a1"} Oct 10 17:17:03 crc kubenswrapper[4799]: I1010 17:17:03.007902 4799 generic.go:334] "Generic (PLEG): container finished" podID="08bdef95-be4e-4fe8-adb2-6a7f075b2a8d" containerID="8203ed1d894f9471fce1e6c7ad5864b833281713b23e96ad3f0fbd9a1da312fa" exitCode=0 Oct 10 17:17:03 crc kubenswrapper[4799]: I1010 17:17:03.008011 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-q7krn" event={"ID":"08bdef95-be4e-4fe8-adb2-6a7f075b2a8d","Type":"ContainerDied","Data":"8203ed1d894f9471fce1e6c7ad5864b833281713b23e96ad3f0fbd9a1da312fa"} Oct 10 17:17:04 crc kubenswrapper[4799]: I1010 17:17:04.023239 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-q7krn" event={"ID":"08bdef95-be4e-4fe8-adb2-6a7f075b2a8d","Type":"ContainerStarted","Data":"4142cae95884ff8c965913e7c88bee0c9f45b39ad08ec18835ade77734959498"} Oct 10 17:17:04 crc kubenswrapper[4799]: I1010 17:17:04.069860 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-q7krn" podStartSLOduration=2.66475784 podStartE2EDuration="5.069840357s" podCreationTimestamp="2025-10-10 17:16:59 +0000 UTC" firstStartedPulling="2025-10-10 17:17:00.98973033 +0000 UTC m=+2714.498054475" lastFinishedPulling="2025-10-10 17:17:03.394812877 +0000 UTC m=+2716.903136992" observedRunningTime="2025-10-10 17:17:04.066310472 +0000 UTC m=+2717.574634597" watchObservedRunningTime="2025-10-10 17:17:04.069840357 +0000 UTC m=+2717.578164502" Oct 10 17:17:05 crc kubenswrapper[4799]: I1010 17:17:05.270641 4799 scope.go:117] "RemoveContainer" containerID="017307e03e9b84aa53e1d09494898a5393495181f93263582950c5638c4e01b1" Oct 10 17:17:05 crc kubenswrapper[4799]: I1010 17:17:05.303488 4799 scope.go:117] "RemoveContainer" containerID="2ac0dbd976177d28e98d2b18e48eb6c3dd52ad9146d394f017ada5f690bf7d97" Oct 10 17:17:05 crc kubenswrapper[4799]: I1010 17:17:05.329602 4799 scope.go:117] "RemoveContainer" containerID="8c72cabef0bccd663e62c36c128830868838eadcc39c7f851b73f6d0e1f637ad" Oct 10 17:17:09 crc kubenswrapper[4799]: I1010 17:17:09.574690 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-q7krn" Oct 10 17:17:09 crc kubenswrapper[4799]: I1010 17:17:09.574988 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-q7krn" Oct 10 17:17:09 crc kubenswrapper[4799]: I1010 17:17:09.638719 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-q7krn" Oct 10 17:17:10 crc kubenswrapper[4799]: I1010 17:17:10.148718 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-q7krn" Oct 10 17:17:10 crc kubenswrapper[4799]: I1010 17:17:10.195392 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-q7krn"] Oct 10 17:17:12 crc kubenswrapper[4799]: I1010 17:17:12.112570 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-q7krn" podUID="08bdef95-be4e-4fe8-adb2-6a7f075b2a8d" containerName="registry-server" containerID="cri-o://4142cae95884ff8c965913e7c88bee0c9f45b39ad08ec18835ade77734959498" gracePeriod=2 Oct 10 17:17:12 crc kubenswrapper[4799]: I1010 17:17:12.669490 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-q7krn" Oct 10 17:17:12 crc kubenswrapper[4799]: I1010 17:17:12.732246 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bcvl6\" (UniqueName: \"kubernetes.io/projected/08bdef95-be4e-4fe8-adb2-6a7f075b2a8d-kube-api-access-bcvl6\") pod \"08bdef95-be4e-4fe8-adb2-6a7f075b2a8d\" (UID: \"08bdef95-be4e-4fe8-adb2-6a7f075b2a8d\") " Oct 10 17:17:12 crc kubenswrapper[4799]: I1010 17:17:12.732419 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/08bdef95-be4e-4fe8-adb2-6a7f075b2a8d-utilities\") pod \"08bdef95-be4e-4fe8-adb2-6a7f075b2a8d\" (UID: \"08bdef95-be4e-4fe8-adb2-6a7f075b2a8d\") " Oct 10 17:17:12 crc kubenswrapper[4799]: I1010 17:17:12.732477 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/08bdef95-be4e-4fe8-adb2-6a7f075b2a8d-catalog-content\") pod \"08bdef95-be4e-4fe8-adb2-6a7f075b2a8d\" (UID: \"08bdef95-be4e-4fe8-adb2-6a7f075b2a8d\") " Oct 10 17:17:12 crc kubenswrapper[4799]: I1010 17:17:12.733293 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/08bdef95-be4e-4fe8-adb2-6a7f075b2a8d-utilities" (OuterVolumeSpecName: "utilities") pod "08bdef95-be4e-4fe8-adb2-6a7f075b2a8d" (UID: "08bdef95-be4e-4fe8-adb2-6a7f075b2a8d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 10 17:17:12 crc kubenswrapper[4799]: I1010 17:17:12.743001 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/08bdef95-be4e-4fe8-adb2-6a7f075b2a8d-kube-api-access-bcvl6" (OuterVolumeSpecName: "kube-api-access-bcvl6") pod "08bdef95-be4e-4fe8-adb2-6a7f075b2a8d" (UID: "08bdef95-be4e-4fe8-adb2-6a7f075b2a8d"). InnerVolumeSpecName "kube-api-access-bcvl6". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 17:17:12 crc kubenswrapper[4799]: I1010 17:17:12.835054 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bcvl6\" (UniqueName: \"kubernetes.io/projected/08bdef95-be4e-4fe8-adb2-6a7f075b2a8d-kube-api-access-bcvl6\") on node \"crc\" DevicePath \"\"" Oct 10 17:17:12 crc kubenswrapper[4799]: I1010 17:17:12.835097 4799 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/08bdef95-be4e-4fe8-adb2-6a7f075b2a8d-utilities\") on node \"crc\" DevicePath \"\"" Oct 10 17:17:12 crc kubenswrapper[4799]: I1010 17:17:12.899794 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/08bdef95-be4e-4fe8-adb2-6a7f075b2a8d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "08bdef95-be4e-4fe8-adb2-6a7f075b2a8d" (UID: "08bdef95-be4e-4fe8-adb2-6a7f075b2a8d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 10 17:17:12 crc kubenswrapper[4799]: I1010 17:17:12.936843 4799 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/08bdef95-be4e-4fe8-adb2-6a7f075b2a8d-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 10 17:17:13 crc kubenswrapper[4799]: I1010 17:17:13.107733 4799 generic.go:334] "Generic (PLEG): container finished" podID="08bdef95-be4e-4fe8-adb2-6a7f075b2a8d" containerID="4142cae95884ff8c965913e7c88bee0c9f45b39ad08ec18835ade77734959498" exitCode=0 Oct 10 17:17:13 crc kubenswrapper[4799]: I1010 17:17:13.107821 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-q7krn" event={"ID":"08bdef95-be4e-4fe8-adb2-6a7f075b2a8d","Type":"ContainerDied","Data":"4142cae95884ff8c965913e7c88bee0c9f45b39ad08ec18835ade77734959498"} Oct 10 17:17:13 crc kubenswrapper[4799]: I1010 17:17:13.107858 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-q7krn" event={"ID":"08bdef95-be4e-4fe8-adb2-6a7f075b2a8d","Type":"ContainerDied","Data":"ef4ba221df9a2004e5cb58dcddeb7db09a8ed1cdc836c14850ff51361e49e9a1"} Oct 10 17:17:13 crc kubenswrapper[4799]: I1010 17:17:13.107879 4799 scope.go:117] "RemoveContainer" containerID="4142cae95884ff8c965913e7c88bee0c9f45b39ad08ec18835ade77734959498" Oct 10 17:17:13 crc kubenswrapper[4799]: I1010 17:17:13.107895 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-q7krn" Oct 10 17:17:13 crc kubenswrapper[4799]: I1010 17:17:13.151365 4799 scope.go:117] "RemoveContainer" containerID="8203ed1d894f9471fce1e6c7ad5864b833281713b23e96ad3f0fbd9a1da312fa" Oct 10 17:17:13 crc kubenswrapper[4799]: I1010 17:17:13.155333 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-q7krn"] Oct 10 17:17:13 crc kubenswrapper[4799]: I1010 17:17:13.214685 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-q7krn"] Oct 10 17:17:13 crc kubenswrapper[4799]: I1010 17:17:13.229708 4799 scope.go:117] "RemoveContainer" containerID="7a1f9fb453c6daa0f60214480ce9bdfcf5a5706d1af07bc7e94ffaef77c1111e" Oct 10 17:17:13 crc kubenswrapper[4799]: I1010 17:17:13.247207 4799 scope.go:117] "RemoveContainer" containerID="4142cae95884ff8c965913e7c88bee0c9f45b39ad08ec18835ade77734959498" Oct 10 17:17:13 crc kubenswrapper[4799]: E1010 17:17:13.247677 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4142cae95884ff8c965913e7c88bee0c9f45b39ad08ec18835ade77734959498\": container with ID starting with 4142cae95884ff8c965913e7c88bee0c9f45b39ad08ec18835ade77734959498 not found: ID does not exist" containerID="4142cae95884ff8c965913e7c88bee0c9f45b39ad08ec18835ade77734959498" Oct 10 17:17:13 crc kubenswrapper[4799]: I1010 17:17:13.247746 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4142cae95884ff8c965913e7c88bee0c9f45b39ad08ec18835ade77734959498"} err="failed to get container status \"4142cae95884ff8c965913e7c88bee0c9f45b39ad08ec18835ade77734959498\": rpc error: code = NotFound desc = could not find container \"4142cae95884ff8c965913e7c88bee0c9f45b39ad08ec18835ade77734959498\": container with ID starting with 4142cae95884ff8c965913e7c88bee0c9f45b39ad08ec18835ade77734959498 not found: ID does not exist" Oct 10 17:17:13 crc kubenswrapper[4799]: I1010 17:17:13.247842 4799 scope.go:117] "RemoveContainer" containerID="8203ed1d894f9471fce1e6c7ad5864b833281713b23e96ad3f0fbd9a1da312fa" Oct 10 17:17:13 crc kubenswrapper[4799]: E1010 17:17:13.248168 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8203ed1d894f9471fce1e6c7ad5864b833281713b23e96ad3f0fbd9a1da312fa\": container with ID starting with 8203ed1d894f9471fce1e6c7ad5864b833281713b23e96ad3f0fbd9a1da312fa not found: ID does not exist" containerID="8203ed1d894f9471fce1e6c7ad5864b833281713b23e96ad3f0fbd9a1da312fa" Oct 10 17:17:13 crc kubenswrapper[4799]: I1010 17:17:13.248219 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8203ed1d894f9471fce1e6c7ad5864b833281713b23e96ad3f0fbd9a1da312fa"} err="failed to get container status \"8203ed1d894f9471fce1e6c7ad5864b833281713b23e96ad3f0fbd9a1da312fa\": rpc error: code = NotFound desc = could not find container \"8203ed1d894f9471fce1e6c7ad5864b833281713b23e96ad3f0fbd9a1da312fa\": container with ID starting with 8203ed1d894f9471fce1e6c7ad5864b833281713b23e96ad3f0fbd9a1da312fa not found: ID does not exist" Oct 10 17:17:13 crc kubenswrapper[4799]: I1010 17:17:13.248237 4799 scope.go:117] "RemoveContainer" containerID="7a1f9fb453c6daa0f60214480ce9bdfcf5a5706d1af07bc7e94ffaef77c1111e" Oct 10 17:17:13 crc kubenswrapper[4799]: E1010 17:17:13.248530 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7a1f9fb453c6daa0f60214480ce9bdfcf5a5706d1af07bc7e94ffaef77c1111e\": container with ID starting with 7a1f9fb453c6daa0f60214480ce9bdfcf5a5706d1af07bc7e94ffaef77c1111e not found: ID does not exist" containerID="7a1f9fb453c6daa0f60214480ce9bdfcf5a5706d1af07bc7e94ffaef77c1111e" Oct 10 17:17:13 crc kubenswrapper[4799]: I1010 17:17:13.248577 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7a1f9fb453c6daa0f60214480ce9bdfcf5a5706d1af07bc7e94ffaef77c1111e"} err="failed to get container status \"7a1f9fb453c6daa0f60214480ce9bdfcf5a5706d1af07bc7e94ffaef77c1111e\": rpc error: code = NotFound desc = could not find container \"7a1f9fb453c6daa0f60214480ce9bdfcf5a5706d1af07bc7e94ffaef77c1111e\": container with ID starting with 7a1f9fb453c6daa0f60214480ce9bdfcf5a5706d1af07bc7e94ffaef77c1111e not found: ID does not exist" Oct 10 17:17:13 crc kubenswrapper[4799]: I1010 17:17:13.418635 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="08bdef95-be4e-4fe8-adb2-6a7f075b2a8d" path="/var/lib/kubelet/pods/08bdef95-be4e-4fe8-adb2-6a7f075b2a8d/volumes" Oct 10 17:19:15 crc kubenswrapper[4799]: I1010 17:19:15.248956 4799 patch_prober.go:28] interesting pod/machine-config-daemon-rh8zc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 10 17:19:15 crc kubenswrapper[4799]: I1010 17:19:15.249544 4799 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 10 17:19:45 crc kubenswrapper[4799]: I1010 17:19:45.248744 4799 patch_prober.go:28] interesting pod/machine-config-daemon-rh8zc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 10 17:19:45 crc kubenswrapper[4799]: I1010 17:19:45.249380 4799 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 10 17:20:15 crc kubenswrapper[4799]: I1010 17:20:15.248936 4799 patch_prober.go:28] interesting pod/machine-config-daemon-rh8zc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 10 17:20:15 crc kubenswrapper[4799]: I1010 17:20:15.249406 4799 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 10 17:20:15 crc kubenswrapper[4799]: I1010 17:20:15.249448 4799 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" Oct 10 17:20:15 crc kubenswrapper[4799]: I1010 17:20:15.249947 4799 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"bf4eb3e56ead59e9fb87d5f16a0e791ef1612b88c8f649a4f091b1c1c12d7b71"} pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 10 17:20:15 crc kubenswrapper[4799]: I1010 17:20:15.249999 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" containerName="machine-config-daemon" containerID="cri-o://bf4eb3e56ead59e9fb87d5f16a0e791ef1612b88c8f649a4f091b1c1c12d7b71" gracePeriod=600 Oct 10 17:20:15 crc kubenswrapper[4799]: E1010 17:20:15.403502 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 17:20:15 crc kubenswrapper[4799]: I1010 17:20:15.892549 4799 generic.go:334] "Generic (PLEG): container finished" podID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" containerID="bf4eb3e56ead59e9fb87d5f16a0e791ef1612b88c8f649a4f091b1c1c12d7b71" exitCode=0 Oct 10 17:20:15 crc kubenswrapper[4799]: I1010 17:20:15.892645 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" event={"ID":"6cebefda-e31d-4be2-9bf4-8e1f8ec002cb","Type":"ContainerDied","Data":"bf4eb3e56ead59e9fb87d5f16a0e791ef1612b88c8f649a4f091b1c1c12d7b71"} Oct 10 17:20:15 crc kubenswrapper[4799]: I1010 17:20:15.892736 4799 scope.go:117] "RemoveContainer" containerID="42045cbeab3b7f6682f926dc6bfd21910965b5211bc6575c74a92e96cc7dc346" Oct 10 17:20:15 crc kubenswrapper[4799]: I1010 17:20:15.893780 4799 scope.go:117] "RemoveContainer" containerID="bf4eb3e56ead59e9fb87d5f16a0e791ef1612b88c8f649a4f091b1c1c12d7b71" Oct 10 17:20:15 crc kubenswrapper[4799]: E1010 17:20:15.894158 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 17:20:31 crc kubenswrapper[4799]: I1010 17:20:31.402466 4799 scope.go:117] "RemoveContainer" containerID="bf4eb3e56ead59e9fb87d5f16a0e791ef1612b88c8f649a4f091b1c1c12d7b71" Oct 10 17:20:31 crc kubenswrapper[4799]: E1010 17:20:31.403378 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 17:20:44 crc kubenswrapper[4799]: I1010 17:20:44.402952 4799 scope.go:117] "RemoveContainer" containerID="bf4eb3e56ead59e9fb87d5f16a0e791ef1612b88c8f649a4f091b1c1c12d7b71" Oct 10 17:20:44 crc kubenswrapper[4799]: E1010 17:20:44.405898 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 17:20:55 crc kubenswrapper[4799]: I1010 17:20:55.403510 4799 scope.go:117] "RemoveContainer" containerID="bf4eb3e56ead59e9fb87d5f16a0e791ef1612b88c8f649a4f091b1c1c12d7b71" Oct 10 17:20:55 crc kubenswrapper[4799]: E1010 17:20:55.404574 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 17:21:09 crc kubenswrapper[4799]: I1010 17:21:09.403079 4799 scope.go:117] "RemoveContainer" containerID="bf4eb3e56ead59e9fb87d5f16a0e791ef1612b88c8f649a4f091b1c1c12d7b71" Oct 10 17:21:09 crc kubenswrapper[4799]: E1010 17:21:09.403829 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 17:21:21 crc kubenswrapper[4799]: I1010 17:21:21.402712 4799 scope.go:117] "RemoveContainer" containerID="bf4eb3e56ead59e9fb87d5f16a0e791ef1612b88c8f649a4f091b1c1c12d7b71" Oct 10 17:21:21 crc kubenswrapper[4799]: E1010 17:21:21.403637 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 17:21:32 crc kubenswrapper[4799]: I1010 17:21:32.402749 4799 scope.go:117] "RemoveContainer" containerID="bf4eb3e56ead59e9fb87d5f16a0e791ef1612b88c8f649a4f091b1c1c12d7b71" Oct 10 17:21:32 crc kubenswrapper[4799]: E1010 17:21:32.403631 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 17:21:43 crc kubenswrapper[4799]: I1010 17:21:43.402460 4799 scope.go:117] "RemoveContainer" containerID="bf4eb3e56ead59e9fb87d5f16a0e791ef1612b88c8f649a4f091b1c1c12d7b71" Oct 10 17:21:43 crc kubenswrapper[4799]: E1010 17:21:43.403158 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 17:21:55 crc kubenswrapper[4799]: I1010 17:21:55.404577 4799 scope.go:117] "RemoveContainer" containerID="bf4eb3e56ead59e9fb87d5f16a0e791ef1612b88c8f649a4f091b1c1c12d7b71" Oct 10 17:21:55 crc kubenswrapper[4799]: E1010 17:21:55.405746 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 17:22:09 crc kubenswrapper[4799]: I1010 17:22:09.402342 4799 scope.go:117] "RemoveContainer" containerID="bf4eb3e56ead59e9fb87d5f16a0e791ef1612b88c8f649a4f091b1c1c12d7b71" Oct 10 17:22:09 crc kubenswrapper[4799]: E1010 17:22:09.403094 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 17:22:24 crc kubenswrapper[4799]: I1010 17:22:24.401901 4799 scope.go:117] "RemoveContainer" containerID="bf4eb3e56ead59e9fb87d5f16a0e791ef1612b88c8f649a4f091b1c1c12d7b71" Oct 10 17:22:24 crc kubenswrapper[4799]: E1010 17:22:24.402740 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 17:22:33 crc kubenswrapper[4799]: I1010 17:22:33.471639 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-7xzth"] Oct 10 17:22:33 crc kubenswrapper[4799]: E1010 17:22:33.474205 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="08bdef95-be4e-4fe8-adb2-6a7f075b2a8d" containerName="extract-utilities" Oct 10 17:22:33 crc kubenswrapper[4799]: I1010 17:22:33.474368 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="08bdef95-be4e-4fe8-adb2-6a7f075b2a8d" containerName="extract-utilities" Oct 10 17:22:33 crc kubenswrapper[4799]: E1010 17:22:33.474524 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="08bdef95-be4e-4fe8-adb2-6a7f075b2a8d" containerName="registry-server" Oct 10 17:22:33 crc kubenswrapper[4799]: I1010 17:22:33.474647 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="08bdef95-be4e-4fe8-adb2-6a7f075b2a8d" containerName="registry-server" Oct 10 17:22:33 crc kubenswrapper[4799]: E1010 17:22:33.474816 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="08bdef95-be4e-4fe8-adb2-6a7f075b2a8d" containerName="extract-content" Oct 10 17:22:33 crc kubenswrapper[4799]: I1010 17:22:33.474949 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="08bdef95-be4e-4fe8-adb2-6a7f075b2a8d" containerName="extract-content" Oct 10 17:22:33 crc kubenswrapper[4799]: I1010 17:22:33.475312 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="08bdef95-be4e-4fe8-adb2-6a7f075b2a8d" containerName="registry-server" Oct 10 17:22:33 crc kubenswrapper[4799]: I1010 17:22:33.477498 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-7xzth" Oct 10 17:22:33 crc kubenswrapper[4799]: I1010 17:22:33.489188 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-7xzth"] Oct 10 17:22:33 crc kubenswrapper[4799]: I1010 17:22:33.552842 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/28c8230f-3388-4ada-b699-e9b408db1e7a-utilities\") pod \"redhat-marketplace-7xzth\" (UID: \"28c8230f-3388-4ada-b699-e9b408db1e7a\") " pod="openshift-marketplace/redhat-marketplace-7xzth" Oct 10 17:22:33 crc kubenswrapper[4799]: I1010 17:22:33.553164 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qpcvg\" (UniqueName: \"kubernetes.io/projected/28c8230f-3388-4ada-b699-e9b408db1e7a-kube-api-access-qpcvg\") pod \"redhat-marketplace-7xzth\" (UID: \"28c8230f-3388-4ada-b699-e9b408db1e7a\") " pod="openshift-marketplace/redhat-marketplace-7xzth" Oct 10 17:22:33 crc kubenswrapper[4799]: I1010 17:22:33.553301 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/28c8230f-3388-4ada-b699-e9b408db1e7a-catalog-content\") pod \"redhat-marketplace-7xzth\" (UID: \"28c8230f-3388-4ada-b699-e9b408db1e7a\") " pod="openshift-marketplace/redhat-marketplace-7xzth" Oct 10 17:22:33 crc kubenswrapper[4799]: I1010 17:22:33.654772 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/28c8230f-3388-4ada-b699-e9b408db1e7a-utilities\") pod \"redhat-marketplace-7xzth\" (UID: \"28c8230f-3388-4ada-b699-e9b408db1e7a\") " pod="openshift-marketplace/redhat-marketplace-7xzth" Oct 10 17:22:33 crc kubenswrapper[4799]: I1010 17:22:33.654857 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qpcvg\" (UniqueName: \"kubernetes.io/projected/28c8230f-3388-4ada-b699-e9b408db1e7a-kube-api-access-qpcvg\") pod \"redhat-marketplace-7xzth\" (UID: \"28c8230f-3388-4ada-b699-e9b408db1e7a\") " pod="openshift-marketplace/redhat-marketplace-7xzth" Oct 10 17:22:33 crc kubenswrapper[4799]: I1010 17:22:33.654886 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/28c8230f-3388-4ada-b699-e9b408db1e7a-catalog-content\") pod \"redhat-marketplace-7xzth\" (UID: \"28c8230f-3388-4ada-b699-e9b408db1e7a\") " pod="openshift-marketplace/redhat-marketplace-7xzth" Oct 10 17:22:33 crc kubenswrapper[4799]: I1010 17:22:33.655426 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/28c8230f-3388-4ada-b699-e9b408db1e7a-utilities\") pod \"redhat-marketplace-7xzth\" (UID: \"28c8230f-3388-4ada-b699-e9b408db1e7a\") " pod="openshift-marketplace/redhat-marketplace-7xzth" Oct 10 17:22:33 crc kubenswrapper[4799]: I1010 17:22:33.655463 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/28c8230f-3388-4ada-b699-e9b408db1e7a-catalog-content\") pod \"redhat-marketplace-7xzth\" (UID: \"28c8230f-3388-4ada-b699-e9b408db1e7a\") " pod="openshift-marketplace/redhat-marketplace-7xzth" Oct 10 17:22:33 crc kubenswrapper[4799]: I1010 17:22:33.678071 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qpcvg\" (UniqueName: \"kubernetes.io/projected/28c8230f-3388-4ada-b699-e9b408db1e7a-kube-api-access-qpcvg\") pod \"redhat-marketplace-7xzth\" (UID: \"28c8230f-3388-4ada-b699-e9b408db1e7a\") " pod="openshift-marketplace/redhat-marketplace-7xzth" Oct 10 17:22:33 crc kubenswrapper[4799]: I1010 17:22:33.822633 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-7xzth" Oct 10 17:22:34 crc kubenswrapper[4799]: I1010 17:22:34.304984 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-7xzth"] Oct 10 17:22:34 crc kubenswrapper[4799]: W1010 17:22:34.322829 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod28c8230f_3388_4ada_b699_e9b408db1e7a.slice/crio-a8f0abc50a0a9ce34b7d8adb6dba9fb6bbef8018b90082d7f8c03b55a1d0ad28 WatchSource:0}: Error finding container a8f0abc50a0a9ce34b7d8adb6dba9fb6bbef8018b90082d7f8c03b55a1d0ad28: Status 404 returned error can't find the container with id a8f0abc50a0a9ce34b7d8adb6dba9fb6bbef8018b90082d7f8c03b55a1d0ad28 Oct 10 17:22:35 crc kubenswrapper[4799]: I1010 17:22:35.252716 4799 generic.go:334] "Generic (PLEG): container finished" podID="28c8230f-3388-4ada-b699-e9b408db1e7a" containerID="54af260c4fca05005b1ce31898adfd63a23199d922f959fbbba4eda9ea7620a9" exitCode=0 Oct 10 17:22:35 crc kubenswrapper[4799]: I1010 17:22:35.252791 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-7xzth" event={"ID":"28c8230f-3388-4ada-b699-e9b408db1e7a","Type":"ContainerDied","Data":"54af260c4fca05005b1ce31898adfd63a23199d922f959fbbba4eda9ea7620a9"} Oct 10 17:22:35 crc kubenswrapper[4799]: I1010 17:22:35.253148 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-7xzth" event={"ID":"28c8230f-3388-4ada-b699-e9b408db1e7a","Type":"ContainerStarted","Data":"a8f0abc50a0a9ce34b7d8adb6dba9fb6bbef8018b90082d7f8c03b55a1d0ad28"} Oct 10 17:22:35 crc kubenswrapper[4799]: I1010 17:22:35.254684 4799 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 10 17:22:35 crc kubenswrapper[4799]: I1010 17:22:35.403129 4799 scope.go:117] "RemoveContainer" containerID="bf4eb3e56ead59e9fb87d5f16a0e791ef1612b88c8f649a4f091b1c1c12d7b71" Oct 10 17:22:35 crc kubenswrapper[4799]: E1010 17:22:35.403591 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 17:22:36 crc kubenswrapper[4799]: I1010 17:22:36.262541 4799 generic.go:334] "Generic (PLEG): container finished" podID="28c8230f-3388-4ada-b699-e9b408db1e7a" containerID="f207004143c24c361dae839c7afcf04210f042f2546497dbf41192f967fda177" exitCode=0 Oct 10 17:22:36 crc kubenswrapper[4799]: I1010 17:22:36.262687 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-7xzth" event={"ID":"28c8230f-3388-4ada-b699-e9b408db1e7a","Type":"ContainerDied","Data":"f207004143c24c361dae839c7afcf04210f042f2546497dbf41192f967fda177"} Oct 10 17:22:37 crc kubenswrapper[4799]: I1010 17:22:37.270640 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-7xzth" event={"ID":"28c8230f-3388-4ada-b699-e9b408db1e7a","Type":"ContainerStarted","Data":"ecadc0f6edc3ae2f249c211911885e000776deb705d03df9d07befe3cb6b503c"} Oct 10 17:22:37 crc kubenswrapper[4799]: I1010 17:22:37.290349 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-7xzth" podStartSLOduration=2.875548775 podStartE2EDuration="4.290330319s" podCreationTimestamp="2025-10-10 17:22:33 +0000 UTC" firstStartedPulling="2025-10-10 17:22:35.2543759 +0000 UTC m=+3048.762700015" lastFinishedPulling="2025-10-10 17:22:36.669157444 +0000 UTC m=+3050.177481559" observedRunningTime="2025-10-10 17:22:37.289305584 +0000 UTC m=+3050.797629709" watchObservedRunningTime="2025-10-10 17:22:37.290330319 +0000 UTC m=+3050.798654454" Oct 10 17:22:43 crc kubenswrapper[4799]: I1010 17:22:43.823112 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-7xzth" Oct 10 17:22:43 crc kubenswrapper[4799]: I1010 17:22:43.823931 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-7xzth" Oct 10 17:22:43 crc kubenswrapper[4799]: I1010 17:22:43.905836 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-7xzth" Oct 10 17:22:44 crc kubenswrapper[4799]: I1010 17:22:44.432850 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-7xzth" Oct 10 17:22:44 crc kubenswrapper[4799]: I1010 17:22:44.493374 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-7xzth"] Oct 10 17:22:46 crc kubenswrapper[4799]: I1010 17:22:46.393628 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-7xzth" podUID="28c8230f-3388-4ada-b699-e9b408db1e7a" containerName="registry-server" containerID="cri-o://ecadc0f6edc3ae2f249c211911885e000776deb705d03df9d07befe3cb6b503c" gracePeriod=2 Oct 10 17:22:46 crc kubenswrapper[4799]: I1010 17:22:46.403210 4799 scope.go:117] "RemoveContainer" containerID="bf4eb3e56ead59e9fb87d5f16a0e791ef1612b88c8f649a4f091b1c1c12d7b71" Oct 10 17:22:46 crc kubenswrapper[4799]: E1010 17:22:46.405250 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 17:22:46 crc kubenswrapper[4799]: I1010 17:22:46.878899 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-7xzth" Oct 10 17:22:46 crc kubenswrapper[4799]: I1010 17:22:46.966717 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/28c8230f-3388-4ada-b699-e9b408db1e7a-utilities\") pod \"28c8230f-3388-4ada-b699-e9b408db1e7a\" (UID: \"28c8230f-3388-4ada-b699-e9b408db1e7a\") " Oct 10 17:22:46 crc kubenswrapper[4799]: I1010 17:22:46.966948 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qpcvg\" (UniqueName: \"kubernetes.io/projected/28c8230f-3388-4ada-b699-e9b408db1e7a-kube-api-access-qpcvg\") pod \"28c8230f-3388-4ada-b699-e9b408db1e7a\" (UID: \"28c8230f-3388-4ada-b699-e9b408db1e7a\") " Oct 10 17:22:46 crc kubenswrapper[4799]: I1010 17:22:46.966993 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/28c8230f-3388-4ada-b699-e9b408db1e7a-catalog-content\") pod \"28c8230f-3388-4ada-b699-e9b408db1e7a\" (UID: \"28c8230f-3388-4ada-b699-e9b408db1e7a\") " Oct 10 17:22:46 crc kubenswrapper[4799]: I1010 17:22:46.967512 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/28c8230f-3388-4ada-b699-e9b408db1e7a-utilities" (OuterVolumeSpecName: "utilities") pod "28c8230f-3388-4ada-b699-e9b408db1e7a" (UID: "28c8230f-3388-4ada-b699-e9b408db1e7a"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 10 17:22:46 crc kubenswrapper[4799]: I1010 17:22:46.973035 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/28c8230f-3388-4ada-b699-e9b408db1e7a-kube-api-access-qpcvg" (OuterVolumeSpecName: "kube-api-access-qpcvg") pod "28c8230f-3388-4ada-b699-e9b408db1e7a" (UID: "28c8230f-3388-4ada-b699-e9b408db1e7a"). InnerVolumeSpecName "kube-api-access-qpcvg". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 17:22:46 crc kubenswrapper[4799]: I1010 17:22:46.981199 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/28c8230f-3388-4ada-b699-e9b408db1e7a-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "28c8230f-3388-4ada-b699-e9b408db1e7a" (UID: "28c8230f-3388-4ada-b699-e9b408db1e7a"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 10 17:22:47 crc kubenswrapper[4799]: I1010 17:22:47.068358 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qpcvg\" (UniqueName: \"kubernetes.io/projected/28c8230f-3388-4ada-b699-e9b408db1e7a-kube-api-access-qpcvg\") on node \"crc\" DevicePath \"\"" Oct 10 17:22:47 crc kubenswrapper[4799]: I1010 17:22:47.068391 4799 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/28c8230f-3388-4ada-b699-e9b408db1e7a-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 10 17:22:47 crc kubenswrapper[4799]: I1010 17:22:47.068400 4799 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/28c8230f-3388-4ada-b699-e9b408db1e7a-utilities\") on node \"crc\" DevicePath \"\"" Oct 10 17:22:47 crc kubenswrapper[4799]: I1010 17:22:47.404494 4799 generic.go:334] "Generic (PLEG): container finished" podID="28c8230f-3388-4ada-b699-e9b408db1e7a" containerID="ecadc0f6edc3ae2f249c211911885e000776deb705d03df9d07befe3cb6b503c" exitCode=0 Oct 10 17:22:47 crc kubenswrapper[4799]: I1010 17:22:47.407143 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-7xzth" Oct 10 17:22:47 crc kubenswrapper[4799]: I1010 17:22:47.417440 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-7xzth" event={"ID":"28c8230f-3388-4ada-b699-e9b408db1e7a","Type":"ContainerDied","Data":"ecadc0f6edc3ae2f249c211911885e000776deb705d03df9d07befe3cb6b503c"} Oct 10 17:22:47 crc kubenswrapper[4799]: I1010 17:22:47.417497 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-7xzth" event={"ID":"28c8230f-3388-4ada-b699-e9b408db1e7a","Type":"ContainerDied","Data":"a8f0abc50a0a9ce34b7d8adb6dba9fb6bbef8018b90082d7f8c03b55a1d0ad28"} Oct 10 17:22:47 crc kubenswrapper[4799]: I1010 17:22:47.417528 4799 scope.go:117] "RemoveContainer" containerID="ecadc0f6edc3ae2f249c211911885e000776deb705d03df9d07befe3cb6b503c" Oct 10 17:22:47 crc kubenswrapper[4799]: I1010 17:22:47.457804 4799 scope.go:117] "RemoveContainer" containerID="f207004143c24c361dae839c7afcf04210f042f2546497dbf41192f967fda177" Oct 10 17:22:47 crc kubenswrapper[4799]: I1010 17:22:47.466537 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-7xzth"] Oct 10 17:22:47 crc kubenswrapper[4799]: I1010 17:22:47.472268 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-7xzth"] Oct 10 17:22:47 crc kubenswrapper[4799]: I1010 17:22:47.481680 4799 scope.go:117] "RemoveContainer" containerID="54af260c4fca05005b1ce31898adfd63a23199d922f959fbbba4eda9ea7620a9" Oct 10 17:22:47 crc kubenswrapper[4799]: I1010 17:22:47.510278 4799 scope.go:117] "RemoveContainer" containerID="ecadc0f6edc3ae2f249c211911885e000776deb705d03df9d07befe3cb6b503c" Oct 10 17:22:47 crc kubenswrapper[4799]: E1010 17:22:47.510956 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ecadc0f6edc3ae2f249c211911885e000776deb705d03df9d07befe3cb6b503c\": container with ID starting with ecadc0f6edc3ae2f249c211911885e000776deb705d03df9d07befe3cb6b503c not found: ID does not exist" containerID="ecadc0f6edc3ae2f249c211911885e000776deb705d03df9d07befe3cb6b503c" Oct 10 17:22:47 crc kubenswrapper[4799]: I1010 17:22:47.511010 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ecadc0f6edc3ae2f249c211911885e000776deb705d03df9d07befe3cb6b503c"} err="failed to get container status \"ecadc0f6edc3ae2f249c211911885e000776deb705d03df9d07befe3cb6b503c\": rpc error: code = NotFound desc = could not find container \"ecadc0f6edc3ae2f249c211911885e000776deb705d03df9d07befe3cb6b503c\": container with ID starting with ecadc0f6edc3ae2f249c211911885e000776deb705d03df9d07befe3cb6b503c not found: ID does not exist" Oct 10 17:22:47 crc kubenswrapper[4799]: I1010 17:22:47.511046 4799 scope.go:117] "RemoveContainer" containerID="f207004143c24c361dae839c7afcf04210f042f2546497dbf41192f967fda177" Oct 10 17:22:47 crc kubenswrapper[4799]: E1010 17:22:47.511593 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f207004143c24c361dae839c7afcf04210f042f2546497dbf41192f967fda177\": container with ID starting with f207004143c24c361dae839c7afcf04210f042f2546497dbf41192f967fda177 not found: ID does not exist" containerID="f207004143c24c361dae839c7afcf04210f042f2546497dbf41192f967fda177" Oct 10 17:22:47 crc kubenswrapper[4799]: I1010 17:22:47.511622 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f207004143c24c361dae839c7afcf04210f042f2546497dbf41192f967fda177"} err="failed to get container status \"f207004143c24c361dae839c7afcf04210f042f2546497dbf41192f967fda177\": rpc error: code = NotFound desc = could not find container \"f207004143c24c361dae839c7afcf04210f042f2546497dbf41192f967fda177\": container with ID starting with f207004143c24c361dae839c7afcf04210f042f2546497dbf41192f967fda177 not found: ID does not exist" Oct 10 17:22:47 crc kubenswrapper[4799]: I1010 17:22:47.511640 4799 scope.go:117] "RemoveContainer" containerID="54af260c4fca05005b1ce31898adfd63a23199d922f959fbbba4eda9ea7620a9" Oct 10 17:22:47 crc kubenswrapper[4799]: E1010 17:22:47.512109 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"54af260c4fca05005b1ce31898adfd63a23199d922f959fbbba4eda9ea7620a9\": container with ID starting with 54af260c4fca05005b1ce31898adfd63a23199d922f959fbbba4eda9ea7620a9 not found: ID does not exist" containerID="54af260c4fca05005b1ce31898adfd63a23199d922f959fbbba4eda9ea7620a9" Oct 10 17:22:47 crc kubenswrapper[4799]: I1010 17:22:47.512136 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"54af260c4fca05005b1ce31898adfd63a23199d922f959fbbba4eda9ea7620a9"} err="failed to get container status \"54af260c4fca05005b1ce31898adfd63a23199d922f959fbbba4eda9ea7620a9\": rpc error: code = NotFound desc = could not find container \"54af260c4fca05005b1ce31898adfd63a23199d922f959fbbba4eda9ea7620a9\": container with ID starting with 54af260c4fca05005b1ce31898adfd63a23199d922f959fbbba4eda9ea7620a9 not found: ID does not exist" Oct 10 17:22:49 crc kubenswrapper[4799]: I1010 17:22:49.422178 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="28c8230f-3388-4ada-b699-e9b408db1e7a" path="/var/lib/kubelet/pods/28c8230f-3388-4ada-b699-e9b408db1e7a/volumes" Oct 10 17:22:57 crc kubenswrapper[4799]: E1010 17:22:57.442153 4799 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod28c8230f_3388_4ada_b699_e9b408db1e7a.slice/crio-a8f0abc50a0a9ce34b7d8adb6dba9fb6bbef8018b90082d7f8c03b55a1d0ad28\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod28c8230f_3388_4ada_b699_e9b408db1e7a.slice\": RecentStats: unable to find data in memory cache]" Oct 10 17:22:58 crc kubenswrapper[4799]: I1010 17:22:58.402596 4799 scope.go:117] "RemoveContainer" containerID="bf4eb3e56ead59e9fb87d5f16a0e791ef1612b88c8f649a4f091b1c1c12d7b71" Oct 10 17:22:58 crc kubenswrapper[4799]: E1010 17:22:58.403412 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 17:23:07 crc kubenswrapper[4799]: E1010 17:23:07.641273 4799 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod28c8230f_3388_4ada_b699_e9b408db1e7a.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod28c8230f_3388_4ada_b699_e9b408db1e7a.slice/crio-a8f0abc50a0a9ce34b7d8adb6dba9fb6bbef8018b90082d7f8c03b55a1d0ad28\": RecentStats: unable to find data in memory cache]" Oct 10 17:23:09 crc kubenswrapper[4799]: I1010 17:23:09.403429 4799 scope.go:117] "RemoveContainer" containerID="bf4eb3e56ead59e9fb87d5f16a0e791ef1612b88c8f649a4f091b1c1c12d7b71" Oct 10 17:23:09 crc kubenswrapper[4799]: E1010 17:23:09.404268 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 17:23:17 crc kubenswrapper[4799]: E1010 17:23:17.825714 4799 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod28c8230f_3388_4ada_b699_e9b408db1e7a.slice/crio-a8f0abc50a0a9ce34b7d8adb6dba9fb6bbef8018b90082d7f8c03b55a1d0ad28\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod28c8230f_3388_4ada_b699_e9b408db1e7a.slice\": RecentStats: unable to find data in memory cache]" Oct 10 17:23:20 crc kubenswrapper[4799]: I1010 17:23:20.402889 4799 scope.go:117] "RemoveContainer" containerID="bf4eb3e56ead59e9fb87d5f16a0e791ef1612b88c8f649a4f091b1c1c12d7b71" Oct 10 17:23:20 crc kubenswrapper[4799]: E1010 17:23:20.403632 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 17:23:28 crc kubenswrapper[4799]: E1010 17:23:28.036496 4799 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod28c8230f_3388_4ada_b699_e9b408db1e7a.slice/crio-a8f0abc50a0a9ce34b7d8adb6dba9fb6bbef8018b90082d7f8c03b55a1d0ad28\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod28c8230f_3388_4ada_b699_e9b408db1e7a.slice\": RecentStats: unable to find data in memory cache]" Oct 10 17:23:31 crc kubenswrapper[4799]: I1010 17:23:31.404343 4799 scope.go:117] "RemoveContainer" containerID="bf4eb3e56ead59e9fb87d5f16a0e791ef1612b88c8f649a4f091b1c1c12d7b71" Oct 10 17:23:31 crc kubenswrapper[4799]: E1010 17:23:31.405174 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 17:23:38 crc kubenswrapper[4799]: E1010 17:23:38.205876 4799 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod28c8230f_3388_4ada_b699_e9b408db1e7a.slice/crio-a8f0abc50a0a9ce34b7d8adb6dba9fb6bbef8018b90082d7f8c03b55a1d0ad28\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod28c8230f_3388_4ada_b699_e9b408db1e7a.slice\": RecentStats: unable to find data in memory cache]" Oct 10 17:23:45 crc kubenswrapper[4799]: I1010 17:23:45.403069 4799 scope.go:117] "RemoveContainer" containerID="bf4eb3e56ead59e9fb87d5f16a0e791ef1612b88c8f649a4f091b1c1c12d7b71" Oct 10 17:23:45 crc kubenswrapper[4799]: E1010 17:23:45.404017 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 17:23:58 crc kubenswrapper[4799]: I1010 17:23:58.403174 4799 scope.go:117] "RemoveContainer" containerID="bf4eb3e56ead59e9fb87d5f16a0e791ef1612b88c8f649a4f091b1c1c12d7b71" Oct 10 17:23:58 crc kubenswrapper[4799]: E1010 17:23:58.404115 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 17:24:09 crc kubenswrapper[4799]: I1010 17:24:09.402149 4799 scope.go:117] "RemoveContainer" containerID="bf4eb3e56ead59e9fb87d5f16a0e791ef1612b88c8f649a4f091b1c1c12d7b71" Oct 10 17:24:09 crc kubenswrapper[4799]: E1010 17:24:09.402841 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 17:24:24 crc kubenswrapper[4799]: I1010 17:24:24.402640 4799 scope.go:117] "RemoveContainer" containerID="bf4eb3e56ead59e9fb87d5f16a0e791ef1612b88c8f649a4f091b1c1c12d7b71" Oct 10 17:24:24 crc kubenswrapper[4799]: E1010 17:24:24.403696 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 17:24:37 crc kubenswrapper[4799]: I1010 17:24:37.408955 4799 scope.go:117] "RemoveContainer" containerID="bf4eb3e56ead59e9fb87d5f16a0e791ef1612b88c8f649a4f091b1c1c12d7b71" Oct 10 17:24:37 crc kubenswrapper[4799]: E1010 17:24:37.410042 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 17:24:52 crc kubenswrapper[4799]: I1010 17:24:52.402802 4799 scope.go:117] "RemoveContainer" containerID="bf4eb3e56ead59e9fb87d5f16a0e791ef1612b88c8f649a4f091b1c1c12d7b71" Oct 10 17:24:52 crc kubenswrapper[4799]: E1010 17:24:52.403659 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 17:25:05 crc kubenswrapper[4799]: I1010 17:25:05.403817 4799 scope.go:117] "RemoveContainer" containerID="bf4eb3e56ead59e9fb87d5f16a0e791ef1612b88c8f649a4f091b1c1c12d7b71" Oct 10 17:25:05 crc kubenswrapper[4799]: E1010 17:25:05.404585 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 17:25:17 crc kubenswrapper[4799]: I1010 17:25:17.412869 4799 scope.go:117] "RemoveContainer" containerID="bf4eb3e56ead59e9fb87d5f16a0e791ef1612b88c8f649a4f091b1c1c12d7b71" Oct 10 17:25:17 crc kubenswrapper[4799]: I1010 17:25:17.780918 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" event={"ID":"6cebefda-e31d-4be2-9bf4-8e1f8ec002cb","Type":"ContainerStarted","Data":"1805e3a5f297fe261be5d0ffadb3f386d2dd65f9b6e9f819312403974b1cfafc"} Oct 10 17:26:49 crc kubenswrapper[4799]: I1010 17:26:49.293415 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-268ws"] Oct 10 17:26:49 crc kubenswrapper[4799]: E1010 17:26:49.295004 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="28c8230f-3388-4ada-b699-e9b408db1e7a" containerName="extract-content" Oct 10 17:26:49 crc kubenswrapper[4799]: I1010 17:26:49.295033 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="28c8230f-3388-4ada-b699-e9b408db1e7a" containerName="extract-content" Oct 10 17:26:49 crc kubenswrapper[4799]: E1010 17:26:49.295077 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="28c8230f-3388-4ada-b699-e9b408db1e7a" containerName="extract-utilities" Oct 10 17:26:49 crc kubenswrapper[4799]: I1010 17:26:49.295090 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="28c8230f-3388-4ada-b699-e9b408db1e7a" containerName="extract-utilities" Oct 10 17:26:49 crc kubenswrapper[4799]: E1010 17:26:49.295121 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="28c8230f-3388-4ada-b699-e9b408db1e7a" containerName="registry-server" Oct 10 17:26:49 crc kubenswrapper[4799]: I1010 17:26:49.295135 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="28c8230f-3388-4ada-b699-e9b408db1e7a" containerName="registry-server" Oct 10 17:26:49 crc kubenswrapper[4799]: I1010 17:26:49.295414 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="28c8230f-3388-4ada-b699-e9b408db1e7a" containerName="registry-server" Oct 10 17:26:49 crc kubenswrapper[4799]: I1010 17:26:49.302157 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-268ws" Oct 10 17:26:49 crc kubenswrapper[4799]: I1010 17:26:49.313296 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-268ws"] Oct 10 17:26:49 crc kubenswrapper[4799]: I1010 17:26:49.402688 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/249b9042-b235-4e8b-b221-0dbe35cda565-utilities\") pod \"community-operators-268ws\" (UID: \"249b9042-b235-4e8b-b221-0dbe35cda565\") " pod="openshift-marketplace/community-operators-268ws" Oct 10 17:26:49 crc kubenswrapper[4799]: I1010 17:26:49.402826 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/249b9042-b235-4e8b-b221-0dbe35cda565-catalog-content\") pod \"community-operators-268ws\" (UID: \"249b9042-b235-4e8b-b221-0dbe35cda565\") " pod="openshift-marketplace/community-operators-268ws" Oct 10 17:26:49 crc kubenswrapper[4799]: I1010 17:26:49.403499 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k2c8w\" (UniqueName: \"kubernetes.io/projected/249b9042-b235-4e8b-b221-0dbe35cda565-kube-api-access-k2c8w\") pod \"community-operators-268ws\" (UID: \"249b9042-b235-4e8b-b221-0dbe35cda565\") " pod="openshift-marketplace/community-operators-268ws" Oct 10 17:26:49 crc kubenswrapper[4799]: I1010 17:26:49.504891 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/249b9042-b235-4e8b-b221-0dbe35cda565-utilities\") pod \"community-operators-268ws\" (UID: \"249b9042-b235-4e8b-b221-0dbe35cda565\") " pod="openshift-marketplace/community-operators-268ws" Oct 10 17:26:49 crc kubenswrapper[4799]: I1010 17:26:49.504992 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/249b9042-b235-4e8b-b221-0dbe35cda565-catalog-content\") pod \"community-operators-268ws\" (UID: \"249b9042-b235-4e8b-b221-0dbe35cda565\") " pod="openshift-marketplace/community-operators-268ws" Oct 10 17:26:49 crc kubenswrapper[4799]: I1010 17:26:49.505036 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k2c8w\" (UniqueName: \"kubernetes.io/projected/249b9042-b235-4e8b-b221-0dbe35cda565-kube-api-access-k2c8w\") pod \"community-operators-268ws\" (UID: \"249b9042-b235-4e8b-b221-0dbe35cda565\") " pod="openshift-marketplace/community-operators-268ws" Oct 10 17:26:49 crc kubenswrapper[4799]: I1010 17:26:49.505962 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/249b9042-b235-4e8b-b221-0dbe35cda565-catalog-content\") pod \"community-operators-268ws\" (UID: \"249b9042-b235-4e8b-b221-0dbe35cda565\") " pod="openshift-marketplace/community-operators-268ws" Oct 10 17:26:49 crc kubenswrapper[4799]: I1010 17:26:49.506048 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/249b9042-b235-4e8b-b221-0dbe35cda565-utilities\") pod \"community-operators-268ws\" (UID: \"249b9042-b235-4e8b-b221-0dbe35cda565\") " pod="openshift-marketplace/community-operators-268ws" Oct 10 17:26:49 crc kubenswrapper[4799]: I1010 17:26:49.529782 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k2c8w\" (UniqueName: \"kubernetes.io/projected/249b9042-b235-4e8b-b221-0dbe35cda565-kube-api-access-k2c8w\") pod \"community-operators-268ws\" (UID: \"249b9042-b235-4e8b-b221-0dbe35cda565\") " pod="openshift-marketplace/community-operators-268ws" Oct 10 17:26:49 crc kubenswrapper[4799]: I1010 17:26:49.635157 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-268ws" Oct 10 17:26:50 crc kubenswrapper[4799]: I1010 17:26:50.169885 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-268ws"] Oct 10 17:26:50 crc kubenswrapper[4799]: I1010 17:26:50.630114 4799 generic.go:334] "Generic (PLEG): container finished" podID="249b9042-b235-4e8b-b221-0dbe35cda565" containerID="a25b9e9d12c46a2d6d4659be4416feabe396eabcbbc471ba179978c1608b41c1" exitCode=0 Oct 10 17:26:50 crc kubenswrapper[4799]: I1010 17:26:50.630169 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-268ws" event={"ID":"249b9042-b235-4e8b-b221-0dbe35cda565","Type":"ContainerDied","Data":"a25b9e9d12c46a2d6d4659be4416feabe396eabcbbc471ba179978c1608b41c1"} Oct 10 17:26:50 crc kubenswrapper[4799]: I1010 17:26:50.630234 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-268ws" event={"ID":"249b9042-b235-4e8b-b221-0dbe35cda565","Type":"ContainerStarted","Data":"beae6c17f487b740728034c3a0b6dbc2d47a4765691b94acce198654153adef1"} Oct 10 17:26:52 crc kubenswrapper[4799]: I1010 17:26:52.653347 4799 generic.go:334] "Generic (PLEG): container finished" podID="249b9042-b235-4e8b-b221-0dbe35cda565" containerID="bb9903e252cc90c4982db3919e5e09f913e93f1fa97bfb3b3ad4a53dbe338f0c" exitCode=0 Oct 10 17:26:52 crc kubenswrapper[4799]: I1010 17:26:52.653492 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-268ws" event={"ID":"249b9042-b235-4e8b-b221-0dbe35cda565","Type":"ContainerDied","Data":"bb9903e252cc90c4982db3919e5e09f913e93f1fa97bfb3b3ad4a53dbe338f0c"} Oct 10 17:26:53 crc kubenswrapper[4799]: I1010 17:26:53.664293 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-268ws" event={"ID":"249b9042-b235-4e8b-b221-0dbe35cda565","Type":"ContainerStarted","Data":"78c33ebe7227cc8973ecb49161ee586bed3c0bb925cace7d572351cf5b44d10f"} Oct 10 17:26:53 crc kubenswrapper[4799]: I1010 17:26:53.696385 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-268ws" podStartSLOduration=2.2709345020000002 podStartE2EDuration="4.69636027s" podCreationTimestamp="2025-10-10 17:26:49 +0000 UTC" firstStartedPulling="2025-10-10 17:26:50.63395411 +0000 UTC m=+3304.142278235" lastFinishedPulling="2025-10-10 17:26:53.059379878 +0000 UTC m=+3306.567704003" observedRunningTime="2025-10-10 17:26:53.686309324 +0000 UTC m=+3307.194633449" watchObservedRunningTime="2025-10-10 17:26:53.69636027 +0000 UTC m=+3307.204684395" Oct 10 17:26:54 crc kubenswrapper[4799]: I1010 17:26:54.092064 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-r6229"] Oct 10 17:26:54 crc kubenswrapper[4799]: I1010 17:26:54.095376 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-r6229" Oct 10 17:26:54 crc kubenswrapper[4799]: I1010 17:26:54.104219 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-r6229"] Oct 10 17:26:54 crc kubenswrapper[4799]: I1010 17:26:54.280301 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tqtt2\" (UniqueName: \"kubernetes.io/projected/eac4a817-52d8-459e-9daf-143b0a89119c-kube-api-access-tqtt2\") pod \"redhat-operators-r6229\" (UID: \"eac4a817-52d8-459e-9daf-143b0a89119c\") " pod="openshift-marketplace/redhat-operators-r6229" Oct 10 17:26:54 crc kubenswrapper[4799]: I1010 17:26:54.280519 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/eac4a817-52d8-459e-9daf-143b0a89119c-catalog-content\") pod \"redhat-operators-r6229\" (UID: \"eac4a817-52d8-459e-9daf-143b0a89119c\") " pod="openshift-marketplace/redhat-operators-r6229" Oct 10 17:26:54 crc kubenswrapper[4799]: I1010 17:26:54.280585 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/eac4a817-52d8-459e-9daf-143b0a89119c-utilities\") pod \"redhat-operators-r6229\" (UID: \"eac4a817-52d8-459e-9daf-143b0a89119c\") " pod="openshift-marketplace/redhat-operators-r6229" Oct 10 17:26:54 crc kubenswrapper[4799]: I1010 17:26:54.382603 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tqtt2\" (UniqueName: \"kubernetes.io/projected/eac4a817-52d8-459e-9daf-143b0a89119c-kube-api-access-tqtt2\") pod \"redhat-operators-r6229\" (UID: \"eac4a817-52d8-459e-9daf-143b0a89119c\") " pod="openshift-marketplace/redhat-operators-r6229" Oct 10 17:26:54 crc kubenswrapper[4799]: I1010 17:26:54.382803 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/eac4a817-52d8-459e-9daf-143b0a89119c-catalog-content\") pod \"redhat-operators-r6229\" (UID: \"eac4a817-52d8-459e-9daf-143b0a89119c\") " pod="openshift-marketplace/redhat-operators-r6229" Oct 10 17:26:54 crc kubenswrapper[4799]: I1010 17:26:54.382842 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/eac4a817-52d8-459e-9daf-143b0a89119c-utilities\") pod \"redhat-operators-r6229\" (UID: \"eac4a817-52d8-459e-9daf-143b0a89119c\") " pod="openshift-marketplace/redhat-operators-r6229" Oct 10 17:26:54 crc kubenswrapper[4799]: I1010 17:26:54.383535 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/eac4a817-52d8-459e-9daf-143b0a89119c-utilities\") pod \"redhat-operators-r6229\" (UID: \"eac4a817-52d8-459e-9daf-143b0a89119c\") " pod="openshift-marketplace/redhat-operators-r6229" Oct 10 17:26:54 crc kubenswrapper[4799]: I1010 17:26:54.383591 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/eac4a817-52d8-459e-9daf-143b0a89119c-catalog-content\") pod \"redhat-operators-r6229\" (UID: \"eac4a817-52d8-459e-9daf-143b0a89119c\") " pod="openshift-marketplace/redhat-operators-r6229" Oct 10 17:26:54 crc kubenswrapper[4799]: I1010 17:26:54.406407 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tqtt2\" (UniqueName: \"kubernetes.io/projected/eac4a817-52d8-459e-9daf-143b0a89119c-kube-api-access-tqtt2\") pod \"redhat-operators-r6229\" (UID: \"eac4a817-52d8-459e-9daf-143b0a89119c\") " pod="openshift-marketplace/redhat-operators-r6229" Oct 10 17:26:54 crc kubenswrapper[4799]: I1010 17:26:54.463027 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-r6229" Oct 10 17:26:54 crc kubenswrapper[4799]: W1010 17:26:54.916337 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podeac4a817_52d8_459e_9daf_143b0a89119c.slice/crio-5e9159ab9ad67892cf0273986434b82ef0b4a155f51f27b1cdee00bd4e2d3ceb WatchSource:0}: Error finding container 5e9159ab9ad67892cf0273986434b82ef0b4a155f51f27b1cdee00bd4e2d3ceb: Status 404 returned error can't find the container with id 5e9159ab9ad67892cf0273986434b82ef0b4a155f51f27b1cdee00bd4e2d3ceb Oct 10 17:26:54 crc kubenswrapper[4799]: I1010 17:26:54.919469 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-r6229"] Oct 10 17:26:55 crc kubenswrapper[4799]: I1010 17:26:55.683235 4799 generic.go:334] "Generic (PLEG): container finished" podID="eac4a817-52d8-459e-9daf-143b0a89119c" containerID="a53eafb9b4d752726ef7a56e2bac7aa0fb06bceafe754546f56f8fa584ab58d0" exitCode=0 Oct 10 17:26:55 crc kubenswrapper[4799]: I1010 17:26:55.683324 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-r6229" event={"ID":"eac4a817-52d8-459e-9daf-143b0a89119c","Type":"ContainerDied","Data":"a53eafb9b4d752726ef7a56e2bac7aa0fb06bceafe754546f56f8fa584ab58d0"} Oct 10 17:26:55 crc kubenswrapper[4799]: I1010 17:26:55.684266 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-r6229" event={"ID":"eac4a817-52d8-459e-9daf-143b0a89119c","Type":"ContainerStarted","Data":"5e9159ab9ad67892cf0273986434b82ef0b4a155f51f27b1cdee00bd4e2d3ceb"} Oct 10 17:26:57 crc kubenswrapper[4799]: I1010 17:26:57.709235 4799 generic.go:334] "Generic (PLEG): container finished" podID="eac4a817-52d8-459e-9daf-143b0a89119c" containerID="3989ad99b104cadbf8e83833f1e82bf0219b6a71d6b300f51987720f9b818fee" exitCode=0 Oct 10 17:26:57 crc kubenswrapper[4799]: I1010 17:26:57.709301 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-r6229" event={"ID":"eac4a817-52d8-459e-9daf-143b0a89119c","Type":"ContainerDied","Data":"3989ad99b104cadbf8e83833f1e82bf0219b6a71d6b300f51987720f9b818fee"} Oct 10 17:26:58 crc kubenswrapper[4799]: I1010 17:26:58.721872 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-r6229" event={"ID":"eac4a817-52d8-459e-9daf-143b0a89119c","Type":"ContainerStarted","Data":"3821847fe3c4dc5819f415f84efa9684889d82622dc6db426442c064e895d0f4"} Oct 10 17:26:58 crc kubenswrapper[4799]: I1010 17:26:58.748453 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-r6229" podStartSLOduration=2.301114709 podStartE2EDuration="4.748420243s" podCreationTimestamp="2025-10-10 17:26:54 +0000 UTC" firstStartedPulling="2025-10-10 17:26:55.686203248 +0000 UTC m=+3309.194527403" lastFinishedPulling="2025-10-10 17:26:58.133508812 +0000 UTC m=+3311.641832937" observedRunningTime="2025-10-10 17:26:58.747644324 +0000 UTC m=+3312.255968479" watchObservedRunningTime="2025-10-10 17:26:58.748420243 +0000 UTC m=+3312.256744398" Oct 10 17:26:59 crc kubenswrapper[4799]: I1010 17:26:59.635818 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-268ws" Oct 10 17:26:59 crc kubenswrapper[4799]: I1010 17:26:59.636186 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-268ws" Oct 10 17:26:59 crc kubenswrapper[4799]: I1010 17:26:59.702261 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-268ws" Oct 10 17:26:59 crc kubenswrapper[4799]: I1010 17:26:59.791828 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-268ws" Oct 10 17:27:02 crc kubenswrapper[4799]: I1010 17:27:02.083170 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-268ws"] Oct 10 17:27:02 crc kubenswrapper[4799]: I1010 17:27:02.083627 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-268ws" podUID="249b9042-b235-4e8b-b221-0dbe35cda565" containerName="registry-server" containerID="cri-o://78c33ebe7227cc8973ecb49161ee586bed3c0bb925cace7d572351cf5b44d10f" gracePeriod=2 Oct 10 17:27:04 crc kubenswrapper[4799]: I1010 17:27:04.463936 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-r6229" Oct 10 17:27:04 crc kubenswrapper[4799]: I1010 17:27:04.464563 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-r6229" Oct 10 17:27:04 crc kubenswrapper[4799]: I1010 17:27:04.573867 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-268ws" Oct 10 17:27:04 crc kubenswrapper[4799]: I1010 17:27:04.749405 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-k2c8w\" (UniqueName: \"kubernetes.io/projected/249b9042-b235-4e8b-b221-0dbe35cda565-kube-api-access-k2c8w\") pod \"249b9042-b235-4e8b-b221-0dbe35cda565\" (UID: \"249b9042-b235-4e8b-b221-0dbe35cda565\") " Oct 10 17:27:04 crc kubenswrapper[4799]: I1010 17:27:04.749481 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/249b9042-b235-4e8b-b221-0dbe35cda565-utilities\") pod \"249b9042-b235-4e8b-b221-0dbe35cda565\" (UID: \"249b9042-b235-4e8b-b221-0dbe35cda565\") " Oct 10 17:27:04 crc kubenswrapper[4799]: I1010 17:27:04.749592 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/249b9042-b235-4e8b-b221-0dbe35cda565-catalog-content\") pod \"249b9042-b235-4e8b-b221-0dbe35cda565\" (UID: \"249b9042-b235-4e8b-b221-0dbe35cda565\") " Oct 10 17:27:04 crc kubenswrapper[4799]: I1010 17:27:04.750252 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/249b9042-b235-4e8b-b221-0dbe35cda565-utilities" (OuterVolumeSpecName: "utilities") pod "249b9042-b235-4e8b-b221-0dbe35cda565" (UID: "249b9042-b235-4e8b-b221-0dbe35cda565"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 10 17:27:04 crc kubenswrapper[4799]: I1010 17:27:04.766045 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/249b9042-b235-4e8b-b221-0dbe35cda565-kube-api-access-k2c8w" (OuterVolumeSpecName: "kube-api-access-k2c8w") pod "249b9042-b235-4e8b-b221-0dbe35cda565" (UID: "249b9042-b235-4e8b-b221-0dbe35cda565"). InnerVolumeSpecName "kube-api-access-k2c8w". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 17:27:04 crc kubenswrapper[4799]: I1010 17:27:04.784851 4799 generic.go:334] "Generic (PLEG): container finished" podID="249b9042-b235-4e8b-b221-0dbe35cda565" containerID="78c33ebe7227cc8973ecb49161ee586bed3c0bb925cace7d572351cf5b44d10f" exitCode=0 Oct 10 17:27:04 crc kubenswrapper[4799]: I1010 17:27:04.784892 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-268ws" event={"ID":"249b9042-b235-4e8b-b221-0dbe35cda565","Type":"ContainerDied","Data":"78c33ebe7227cc8973ecb49161ee586bed3c0bb925cace7d572351cf5b44d10f"} Oct 10 17:27:04 crc kubenswrapper[4799]: I1010 17:27:04.784952 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-268ws" event={"ID":"249b9042-b235-4e8b-b221-0dbe35cda565","Type":"ContainerDied","Data":"beae6c17f487b740728034c3a0b6dbc2d47a4765691b94acce198654153adef1"} Oct 10 17:27:04 crc kubenswrapper[4799]: I1010 17:27:04.784955 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-268ws" Oct 10 17:27:04 crc kubenswrapper[4799]: I1010 17:27:04.784993 4799 scope.go:117] "RemoveContainer" containerID="78c33ebe7227cc8973ecb49161ee586bed3c0bb925cace7d572351cf5b44d10f" Oct 10 17:27:04 crc kubenswrapper[4799]: I1010 17:27:04.817163 4799 scope.go:117] "RemoveContainer" containerID="bb9903e252cc90c4982db3919e5e09f913e93f1fa97bfb3b3ad4a53dbe338f0c" Oct 10 17:27:04 crc kubenswrapper[4799]: I1010 17:27:04.835596 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/249b9042-b235-4e8b-b221-0dbe35cda565-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "249b9042-b235-4e8b-b221-0dbe35cda565" (UID: "249b9042-b235-4e8b-b221-0dbe35cda565"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 10 17:27:04 crc kubenswrapper[4799]: I1010 17:27:04.845869 4799 scope.go:117] "RemoveContainer" containerID="a25b9e9d12c46a2d6d4659be4416feabe396eabcbbc471ba179978c1608b41c1" Oct 10 17:27:04 crc kubenswrapper[4799]: I1010 17:27:04.851883 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-k2c8w\" (UniqueName: \"kubernetes.io/projected/249b9042-b235-4e8b-b221-0dbe35cda565-kube-api-access-k2c8w\") on node \"crc\" DevicePath \"\"" Oct 10 17:27:04 crc kubenswrapper[4799]: I1010 17:27:04.851909 4799 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/249b9042-b235-4e8b-b221-0dbe35cda565-utilities\") on node \"crc\" DevicePath \"\"" Oct 10 17:27:04 crc kubenswrapper[4799]: I1010 17:27:04.851919 4799 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/249b9042-b235-4e8b-b221-0dbe35cda565-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 10 17:27:04 crc kubenswrapper[4799]: I1010 17:27:04.873158 4799 scope.go:117] "RemoveContainer" containerID="78c33ebe7227cc8973ecb49161ee586bed3c0bb925cace7d572351cf5b44d10f" Oct 10 17:27:04 crc kubenswrapper[4799]: E1010 17:27:04.873701 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"78c33ebe7227cc8973ecb49161ee586bed3c0bb925cace7d572351cf5b44d10f\": container with ID starting with 78c33ebe7227cc8973ecb49161ee586bed3c0bb925cace7d572351cf5b44d10f not found: ID does not exist" containerID="78c33ebe7227cc8973ecb49161ee586bed3c0bb925cace7d572351cf5b44d10f" Oct 10 17:27:04 crc kubenswrapper[4799]: I1010 17:27:04.873809 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"78c33ebe7227cc8973ecb49161ee586bed3c0bb925cace7d572351cf5b44d10f"} err="failed to get container status \"78c33ebe7227cc8973ecb49161ee586bed3c0bb925cace7d572351cf5b44d10f\": rpc error: code = NotFound desc = could not find container \"78c33ebe7227cc8973ecb49161ee586bed3c0bb925cace7d572351cf5b44d10f\": container with ID starting with 78c33ebe7227cc8973ecb49161ee586bed3c0bb925cace7d572351cf5b44d10f not found: ID does not exist" Oct 10 17:27:04 crc kubenswrapper[4799]: I1010 17:27:04.873862 4799 scope.go:117] "RemoveContainer" containerID="bb9903e252cc90c4982db3919e5e09f913e93f1fa97bfb3b3ad4a53dbe338f0c" Oct 10 17:27:04 crc kubenswrapper[4799]: E1010 17:27:04.874686 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bb9903e252cc90c4982db3919e5e09f913e93f1fa97bfb3b3ad4a53dbe338f0c\": container with ID starting with bb9903e252cc90c4982db3919e5e09f913e93f1fa97bfb3b3ad4a53dbe338f0c not found: ID does not exist" containerID="bb9903e252cc90c4982db3919e5e09f913e93f1fa97bfb3b3ad4a53dbe338f0c" Oct 10 17:27:04 crc kubenswrapper[4799]: I1010 17:27:04.874748 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bb9903e252cc90c4982db3919e5e09f913e93f1fa97bfb3b3ad4a53dbe338f0c"} err="failed to get container status \"bb9903e252cc90c4982db3919e5e09f913e93f1fa97bfb3b3ad4a53dbe338f0c\": rpc error: code = NotFound desc = could not find container \"bb9903e252cc90c4982db3919e5e09f913e93f1fa97bfb3b3ad4a53dbe338f0c\": container with ID starting with bb9903e252cc90c4982db3919e5e09f913e93f1fa97bfb3b3ad4a53dbe338f0c not found: ID does not exist" Oct 10 17:27:04 crc kubenswrapper[4799]: I1010 17:27:04.874815 4799 scope.go:117] "RemoveContainer" containerID="a25b9e9d12c46a2d6d4659be4416feabe396eabcbbc471ba179978c1608b41c1" Oct 10 17:27:04 crc kubenswrapper[4799]: E1010 17:27:04.875191 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a25b9e9d12c46a2d6d4659be4416feabe396eabcbbc471ba179978c1608b41c1\": container with ID starting with a25b9e9d12c46a2d6d4659be4416feabe396eabcbbc471ba179978c1608b41c1 not found: ID does not exist" containerID="a25b9e9d12c46a2d6d4659be4416feabe396eabcbbc471ba179978c1608b41c1" Oct 10 17:27:04 crc kubenswrapper[4799]: I1010 17:27:04.875239 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a25b9e9d12c46a2d6d4659be4416feabe396eabcbbc471ba179978c1608b41c1"} err="failed to get container status \"a25b9e9d12c46a2d6d4659be4416feabe396eabcbbc471ba179978c1608b41c1\": rpc error: code = NotFound desc = could not find container \"a25b9e9d12c46a2d6d4659be4416feabe396eabcbbc471ba179978c1608b41c1\": container with ID starting with a25b9e9d12c46a2d6d4659be4416feabe396eabcbbc471ba179978c1608b41c1 not found: ID does not exist" Oct 10 17:27:05 crc kubenswrapper[4799]: I1010 17:27:05.119545 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-268ws"] Oct 10 17:27:05 crc kubenswrapper[4799]: I1010 17:27:05.126327 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-268ws"] Oct 10 17:27:05 crc kubenswrapper[4799]: I1010 17:27:05.426110 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="249b9042-b235-4e8b-b221-0dbe35cda565" path="/var/lib/kubelet/pods/249b9042-b235-4e8b-b221-0dbe35cda565/volumes" Oct 10 17:27:05 crc kubenswrapper[4799]: I1010 17:27:05.509642 4799 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-r6229" podUID="eac4a817-52d8-459e-9daf-143b0a89119c" containerName="registry-server" probeResult="failure" output=< Oct 10 17:27:05 crc kubenswrapper[4799]: timeout: failed to connect service ":50051" within 1s Oct 10 17:27:05 crc kubenswrapper[4799]: > Oct 10 17:27:14 crc kubenswrapper[4799]: I1010 17:27:14.574798 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-r6229" Oct 10 17:27:14 crc kubenswrapper[4799]: I1010 17:27:14.640980 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-r6229" Oct 10 17:27:14 crc kubenswrapper[4799]: I1010 17:27:14.823362 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-r6229"] Oct 10 17:27:15 crc kubenswrapper[4799]: I1010 17:27:15.899032 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-r6229" podUID="eac4a817-52d8-459e-9daf-143b0a89119c" containerName="registry-server" containerID="cri-o://3821847fe3c4dc5819f415f84efa9684889d82622dc6db426442c064e895d0f4" gracePeriod=2 Oct 10 17:27:16 crc kubenswrapper[4799]: I1010 17:27:16.377170 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-r6229" Oct 10 17:27:16 crc kubenswrapper[4799]: I1010 17:27:16.564374 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tqtt2\" (UniqueName: \"kubernetes.io/projected/eac4a817-52d8-459e-9daf-143b0a89119c-kube-api-access-tqtt2\") pod \"eac4a817-52d8-459e-9daf-143b0a89119c\" (UID: \"eac4a817-52d8-459e-9daf-143b0a89119c\") " Oct 10 17:27:16 crc kubenswrapper[4799]: I1010 17:27:16.564617 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/eac4a817-52d8-459e-9daf-143b0a89119c-utilities\") pod \"eac4a817-52d8-459e-9daf-143b0a89119c\" (UID: \"eac4a817-52d8-459e-9daf-143b0a89119c\") " Oct 10 17:27:16 crc kubenswrapper[4799]: I1010 17:27:16.564677 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/eac4a817-52d8-459e-9daf-143b0a89119c-catalog-content\") pod \"eac4a817-52d8-459e-9daf-143b0a89119c\" (UID: \"eac4a817-52d8-459e-9daf-143b0a89119c\") " Oct 10 17:27:16 crc kubenswrapper[4799]: I1010 17:27:16.565521 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/eac4a817-52d8-459e-9daf-143b0a89119c-utilities" (OuterVolumeSpecName: "utilities") pod "eac4a817-52d8-459e-9daf-143b0a89119c" (UID: "eac4a817-52d8-459e-9daf-143b0a89119c"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 10 17:27:16 crc kubenswrapper[4799]: I1010 17:27:16.572201 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/eac4a817-52d8-459e-9daf-143b0a89119c-kube-api-access-tqtt2" (OuterVolumeSpecName: "kube-api-access-tqtt2") pod "eac4a817-52d8-459e-9daf-143b0a89119c" (UID: "eac4a817-52d8-459e-9daf-143b0a89119c"). InnerVolumeSpecName "kube-api-access-tqtt2". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 17:27:16 crc kubenswrapper[4799]: I1010 17:27:16.650299 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/eac4a817-52d8-459e-9daf-143b0a89119c-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "eac4a817-52d8-459e-9daf-143b0a89119c" (UID: "eac4a817-52d8-459e-9daf-143b0a89119c"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 10 17:27:16 crc kubenswrapper[4799]: I1010 17:27:16.666827 4799 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/eac4a817-52d8-459e-9daf-143b0a89119c-utilities\") on node \"crc\" DevicePath \"\"" Oct 10 17:27:16 crc kubenswrapper[4799]: I1010 17:27:16.666858 4799 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/eac4a817-52d8-459e-9daf-143b0a89119c-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 10 17:27:16 crc kubenswrapper[4799]: I1010 17:27:16.666871 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tqtt2\" (UniqueName: \"kubernetes.io/projected/eac4a817-52d8-459e-9daf-143b0a89119c-kube-api-access-tqtt2\") on node \"crc\" DevicePath \"\"" Oct 10 17:27:16 crc kubenswrapper[4799]: I1010 17:27:16.914791 4799 generic.go:334] "Generic (PLEG): container finished" podID="eac4a817-52d8-459e-9daf-143b0a89119c" containerID="3821847fe3c4dc5819f415f84efa9684889d82622dc6db426442c064e895d0f4" exitCode=0 Oct 10 17:27:16 crc kubenswrapper[4799]: I1010 17:27:16.914855 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-r6229" event={"ID":"eac4a817-52d8-459e-9daf-143b0a89119c","Type":"ContainerDied","Data":"3821847fe3c4dc5819f415f84efa9684889d82622dc6db426442c064e895d0f4"} Oct 10 17:27:16 crc kubenswrapper[4799]: I1010 17:27:16.914895 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-r6229" event={"ID":"eac4a817-52d8-459e-9daf-143b0a89119c","Type":"ContainerDied","Data":"5e9159ab9ad67892cf0273986434b82ef0b4a155f51f27b1cdee00bd4e2d3ceb"} Oct 10 17:27:16 crc kubenswrapper[4799]: I1010 17:27:16.914925 4799 scope.go:117] "RemoveContainer" containerID="3821847fe3c4dc5819f415f84efa9684889d82622dc6db426442c064e895d0f4" Oct 10 17:27:16 crc kubenswrapper[4799]: I1010 17:27:16.915149 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-r6229" Oct 10 17:27:16 crc kubenswrapper[4799]: I1010 17:27:16.956456 4799 scope.go:117] "RemoveContainer" containerID="3989ad99b104cadbf8e83833f1e82bf0219b6a71d6b300f51987720f9b818fee" Oct 10 17:27:16 crc kubenswrapper[4799]: I1010 17:27:16.982551 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-r6229"] Oct 10 17:27:16 crc kubenswrapper[4799]: I1010 17:27:16.984825 4799 scope.go:117] "RemoveContainer" containerID="a53eafb9b4d752726ef7a56e2bac7aa0fb06bceafe754546f56f8fa584ab58d0" Oct 10 17:27:16 crc kubenswrapper[4799]: I1010 17:27:16.990296 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-r6229"] Oct 10 17:27:17 crc kubenswrapper[4799]: I1010 17:27:17.016667 4799 scope.go:117] "RemoveContainer" containerID="3821847fe3c4dc5819f415f84efa9684889d82622dc6db426442c064e895d0f4" Oct 10 17:27:17 crc kubenswrapper[4799]: E1010 17:27:17.017227 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3821847fe3c4dc5819f415f84efa9684889d82622dc6db426442c064e895d0f4\": container with ID starting with 3821847fe3c4dc5819f415f84efa9684889d82622dc6db426442c064e895d0f4 not found: ID does not exist" containerID="3821847fe3c4dc5819f415f84efa9684889d82622dc6db426442c064e895d0f4" Oct 10 17:27:17 crc kubenswrapper[4799]: I1010 17:27:17.017292 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3821847fe3c4dc5819f415f84efa9684889d82622dc6db426442c064e895d0f4"} err="failed to get container status \"3821847fe3c4dc5819f415f84efa9684889d82622dc6db426442c064e895d0f4\": rpc error: code = NotFound desc = could not find container \"3821847fe3c4dc5819f415f84efa9684889d82622dc6db426442c064e895d0f4\": container with ID starting with 3821847fe3c4dc5819f415f84efa9684889d82622dc6db426442c064e895d0f4 not found: ID does not exist" Oct 10 17:27:17 crc kubenswrapper[4799]: I1010 17:27:17.017333 4799 scope.go:117] "RemoveContainer" containerID="3989ad99b104cadbf8e83833f1e82bf0219b6a71d6b300f51987720f9b818fee" Oct 10 17:27:17 crc kubenswrapper[4799]: E1010 17:27:17.017797 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3989ad99b104cadbf8e83833f1e82bf0219b6a71d6b300f51987720f9b818fee\": container with ID starting with 3989ad99b104cadbf8e83833f1e82bf0219b6a71d6b300f51987720f9b818fee not found: ID does not exist" containerID="3989ad99b104cadbf8e83833f1e82bf0219b6a71d6b300f51987720f9b818fee" Oct 10 17:27:17 crc kubenswrapper[4799]: I1010 17:27:17.017855 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3989ad99b104cadbf8e83833f1e82bf0219b6a71d6b300f51987720f9b818fee"} err="failed to get container status \"3989ad99b104cadbf8e83833f1e82bf0219b6a71d6b300f51987720f9b818fee\": rpc error: code = NotFound desc = could not find container \"3989ad99b104cadbf8e83833f1e82bf0219b6a71d6b300f51987720f9b818fee\": container with ID starting with 3989ad99b104cadbf8e83833f1e82bf0219b6a71d6b300f51987720f9b818fee not found: ID does not exist" Oct 10 17:27:17 crc kubenswrapper[4799]: I1010 17:27:17.017887 4799 scope.go:117] "RemoveContainer" containerID="a53eafb9b4d752726ef7a56e2bac7aa0fb06bceafe754546f56f8fa584ab58d0" Oct 10 17:27:17 crc kubenswrapper[4799]: E1010 17:27:17.018293 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a53eafb9b4d752726ef7a56e2bac7aa0fb06bceafe754546f56f8fa584ab58d0\": container with ID starting with a53eafb9b4d752726ef7a56e2bac7aa0fb06bceafe754546f56f8fa584ab58d0 not found: ID does not exist" containerID="a53eafb9b4d752726ef7a56e2bac7aa0fb06bceafe754546f56f8fa584ab58d0" Oct 10 17:27:17 crc kubenswrapper[4799]: I1010 17:27:17.018344 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a53eafb9b4d752726ef7a56e2bac7aa0fb06bceafe754546f56f8fa584ab58d0"} err="failed to get container status \"a53eafb9b4d752726ef7a56e2bac7aa0fb06bceafe754546f56f8fa584ab58d0\": rpc error: code = NotFound desc = could not find container \"a53eafb9b4d752726ef7a56e2bac7aa0fb06bceafe754546f56f8fa584ab58d0\": container with ID starting with a53eafb9b4d752726ef7a56e2bac7aa0fb06bceafe754546f56f8fa584ab58d0 not found: ID does not exist" Oct 10 17:27:17 crc kubenswrapper[4799]: I1010 17:27:17.419658 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="eac4a817-52d8-459e-9daf-143b0a89119c" path="/var/lib/kubelet/pods/eac4a817-52d8-459e-9daf-143b0a89119c/volumes" Oct 10 17:27:45 crc kubenswrapper[4799]: I1010 17:27:45.248411 4799 patch_prober.go:28] interesting pod/machine-config-daemon-rh8zc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 10 17:27:45 crc kubenswrapper[4799]: I1010 17:27:45.249124 4799 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 10 17:28:14 crc kubenswrapper[4799]: I1010 17:28:14.433899 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-m4vhg"] Oct 10 17:28:14 crc kubenswrapper[4799]: E1010 17:28:14.434887 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="249b9042-b235-4e8b-b221-0dbe35cda565" containerName="extract-utilities" Oct 10 17:28:14 crc kubenswrapper[4799]: I1010 17:28:14.434907 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="249b9042-b235-4e8b-b221-0dbe35cda565" containerName="extract-utilities" Oct 10 17:28:14 crc kubenswrapper[4799]: E1010 17:28:14.434934 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eac4a817-52d8-459e-9daf-143b0a89119c" containerName="registry-server" Oct 10 17:28:14 crc kubenswrapper[4799]: I1010 17:28:14.434943 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="eac4a817-52d8-459e-9daf-143b0a89119c" containerName="registry-server" Oct 10 17:28:14 crc kubenswrapper[4799]: E1010 17:28:14.434958 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="249b9042-b235-4e8b-b221-0dbe35cda565" containerName="extract-content" Oct 10 17:28:14 crc kubenswrapper[4799]: I1010 17:28:14.434968 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="249b9042-b235-4e8b-b221-0dbe35cda565" containerName="extract-content" Oct 10 17:28:14 crc kubenswrapper[4799]: E1010 17:28:14.434983 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="249b9042-b235-4e8b-b221-0dbe35cda565" containerName="registry-server" Oct 10 17:28:14 crc kubenswrapper[4799]: I1010 17:28:14.434991 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="249b9042-b235-4e8b-b221-0dbe35cda565" containerName="registry-server" Oct 10 17:28:14 crc kubenswrapper[4799]: E1010 17:28:14.435011 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eac4a817-52d8-459e-9daf-143b0a89119c" containerName="extract-content" Oct 10 17:28:14 crc kubenswrapper[4799]: I1010 17:28:14.435019 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="eac4a817-52d8-459e-9daf-143b0a89119c" containerName="extract-content" Oct 10 17:28:14 crc kubenswrapper[4799]: E1010 17:28:14.435033 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eac4a817-52d8-459e-9daf-143b0a89119c" containerName="extract-utilities" Oct 10 17:28:14 crc kubenswrapper[4799]: I1010 17:28:14.435041 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="eac4a817-52d8-459e-9daf-143b0a89119c" containerName="extract-utilities" Oct 10 17:28:14 crc kubenswrapper[4799]: I1010 17:28:14.435226 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="249b9042-b235-4e8b-b221-0dbe35cda565" containerName="registry-server" Oct 10 17:28:14 crc kubenswrapper[4799]: I1010 17:28:14.435257 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="eac4a817-52d8-459e-9daf-143b0a89119c" containerName="registry-server" Oct 10 17:28:14 crc kubenswrapper[4799]: I1010 17:28:14.436509 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-m4vhg" Oct 10 17:28:14 crc kubenswrapper[4799]: I1010 17:28:14.451594 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-m4vhg"] Oct 10 17:28:14 crc kubenswrapper[4799]: I1010 17:28:14.541174 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7c259c46-07fe-4b96-8746-549b6a3a9998-catalog-content\") pod \"certified-operators-m4vhg\" (UID: \"7c259c46-07fe-4b96-8746-549b6a3a9998\") " pod="openshift-marketplace/certified-operators-m4vhg" Oct 10 17:28:14 crc kubenswrapper[4799]: I1010 17:28:14.541323 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9pndg\" (UniqueName: \"kubernetes.io/projected/7c259c46-07fe-4b96-8746-549b6a3a9998-kube-api-access-9pndg\") pod \"certified-operators-m4vhg\" (UID: \"7c259c46-07fe-4b96-8746-549b6a3a9998\") " pod="openshift-marketplace/certified-operators-m4vhg" Oct 10 17:28:14 crc kubenswrapper[4799]: I1010 17:28:14.541425 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7c259c46-07fe-4b96-8746-549b6a3a9998-utilities\") pod \"certified-operators-m4vhg\" (UID: \"7c259c46-07fe-4b96-8746-549b6a3a9998\") " pod="openshift-marketplace/certified-operators-m4vhg" Oct 10 17:28:14 crc kubenswrapper[4799]: I1010 17:28:14.643172 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7c259c46-07fe-4b96-8746-549b6a3a9998-catalog-content\") pod \"certified-operators-m4vhg\" (UID: \"7c259c46-07fe-4b96-8746-549b6a3a9998\") " pod="openshift-marketplace/certified-operators-m4vhg" Oct 10 17:28:14 crc kubenswrapper[4799]: I1010 17:28:14.643231 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9pndg\" (UniqueName: \"kubernetes.io/projected/7c259c46-07fe-4b96-8746-549b6a3a9998-kube-api-access-9pndg\") pod \"certified-operators-m4vhg\" (UID: \"7c259c46-07fe-4b96-8746-549b6a3a9998\") " pod="openshift-marketplace/certified-operators-m4vhg" Oct 10 17:28:14 crc kubenswrapper[4799]: I1010 17:28:14.643286 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7c259c46-07fe-4b96-8746-549b6a3a9998-utilities\") pod \"certified-operators-m4vhg\" (UID: \"7c259c46-07fe-4b96-8746-549b6a3a9998\") " pod="openshift-marketplace/certified-operators-m4vhg" Oct 10 17:28:14 crc kubenswrapper[4799]: I1010 17:28:14.643865 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7c259c46-07fe-4b96-8746-549b6a3a9998-utilities\") pod \"certified-operators-m4vhg\" (UID: \"7c259c46-07fe-4b96-8746-549b6a3a9998\") " pod="openshift-marketplace/certified-operators-m4vhg" Oct 10 17:28:14 crc kubenswrapper[4799]: I1010 17:28:14.643988 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7c259c46-07fe-4b96-8746-549b6a3a9998-catalog-content\") pod \"certified-operators-m4vhg\" (UID: \"7c259c46-07fe-4b96-8746-549b6a3a9998\") " pod="openshift-marketplace/certified-operators-m4vhg" Oct 10 17:28:14 crc kubenswrapper[4799]: I1010 17:28:14.671022 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9pndg\" (UniqueName: \"kubernetes.io/projected/7c259c46-07fe-4b96-8746-549b6a3a9998-kube-api-access-9pndg\") pod \"certified-operators-m4vhg\" (UID: \"7c259c46-07fe-4b96-8746-549b6a3a9998\") " pod="openshift-marketplace/certified-operators-m4vhg" Oct 10 17:28:14 crc kubenswrapper[4799]: I1010 17:28:14.805927 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-m4vhg" Oct 10 17:28:15 crc kubenswrapper[4799]: I1010 17:28:15.104039 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-m4vhg"] Oct 10 17:28:15 crc kubenswrapper[4799]: I1010 17:28:15.249231 4799 patch_prober.go:28] interesting pod/machine-config-daemon-rh8zc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 10 17:28:15 crc kubenswrapper[4799]: I1010 17:28:15.249587 4799 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 10 17:28:15 crc kubenswrapper[4799]: I1010 17:28:15.457714 4799 generic.go:334] "Generic (PLEG): container finished" podID="7c259c46-07fe-4b96-8746-549b6a3a9998" containerID="1a2b019b83553e214a4679e3dbe257e67d6efbbe916cf529d4cb5baaab06b035" exitCode=0 Oct 10 17:28:15 crc kubenswrapper[4799]: I1010 17:28:15.457773 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-m4vhg" event={"ID":"7c259c46-07fe-4b96-8746-549b6a3a9998","Type":"ContainerDied","Data":"1a2b019b83553e214a4679e3dbe257e67d6efbbe916cf529d4cb5baaab06b035"} Oct 10 17:28:15 crc kubenswrapper[4799]: I1010 17:28:15.457803 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-m4vhg" event={"ID":"7c259c46-07fe-4b96-8746-549b6a3a9998","Type":"ContainerStarted","Data":"260a10263d9b91d2349a115448eefa4406a4386fad7f21d9dce82423087d268e"} Oct 10 17:28:15 crc kubenswrapper[4799]: I1010 17:28:15.459273 4799 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 10 17:28:16 crc kubenswrapper[4799]: I1010 17:28:16.465846 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-m4vhg" event={"ID":"7c259c46-07fe-4b96-8746-549b6a3a9998","Type":"ContainerStarted","Data":"e96bc0ec9edd9a3d1ab8ac3c70292ef57f0f34e9d981320a57a48842bc460458"} Oct 10 17:28:17 crc kubenswrapper[4799]: I1010 17:28:17.477676 4799 generic.go:334] "Generic (PLEG): container finished" podID="7c259c46-07fe-4b96-8746-549b6a3a9998" containerID="e96bc0ec9edd9a3d1ab8ac3c70292ef57f0f34e9d981320a57a48842bc460458" exitCode=0 Oct 10 17:28:17 crc kubenswrapper[4799]: I1010 17:28:17.477787 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-m4vhg" event={"ID":"7c259c46-07fe-4b96-8746-549b6a3a9998","Type":"ContainerDied","Data":"e96bc0ec9edd9a3d1ab8ac3c70292ef57f0f34e9d981320a57a48842bc460458"} Oct 10 17:28:17 crc kubenswrapper[4799]: I1010 17:28:17.478301 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-m4vhg" event={"ID":"7c259c46-07fe-4b96-8746-549b6a3a9998","Type":"ContainerStarted","Data":"01a700ca09ae446ea6abb0afc972d2a8712ac244f53b0dd55c563533ac2785ae"} Oct 10 17:28:17 crc kubenswrapper[4799]: I1010 17:28:17.502588 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-m4vhg" podStartSLOduration=2.060782476 podStartE2EDuration="3.502558631s" podCreationTimestamp="2025-10-10 17:28:14 +0000 UTC" firstStartedPulling="2025-10-10 17:28:15.459006397 +0000 UTC m=+3388.967330532" lastFinishedPulling="2025-10-10 17:28:16.900782562 +0000 UTC m=+3390.409106687" observedRunningTime="2025-10-10 17:28:17.499012764 +0000 UTC m=+3391.007336919" watchObservedRunningTime="2025-10-10 17:28:17.502558631 +0000 UTC m=+3391.010882786" Oct 10 17:28:24 crc kubenswrapper[4799]: I1010 17:28:24.806462 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-m4vhg" Oct 10 17:28:24 crc kubenswrapper[4799]: I1010 17:28:24.807320 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-m4vhg" Oct 10 17:28:24 crc kubenswrapper[4799]: I1010 17:28:24.895154 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-m4vhg" Oct 10 17:28:25 crc kubenswrapper[4799]: I1010 17:28:25.660227 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-m4vhg" Oct 10 17:28:25 crc kubenswrapper[4799]: I1010 17:28:25.719516 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-m4vhg"] Oct 10 17:28:27 crc kubenswrapper[4799]: I1010 17:28:27.594559 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-m4vhg" podUID="7c259c46-07fe-4b96-8746-549b6a3a9998" containerName="registry-server" containerID="cri-o://01a700ca09ae446ea6abb0afc972d2a8712ac244f53b0dd55c563533ac2785ae" gracePeriod=2 Oct 10 17:28:28 crc kubenswrapper[4799]: I1010 17:28:28.052213 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-m4vhg" Oct 10 17:28:28 crc kubenswrapper[4799]: I1010 17:28:28.167313 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7c259c46-07fe-4b96-8746-549b6a3a9998-utilities\") pod \"7c259c46-07fe-4b96-8746-549b6a3a9998\" (UID: \"7c259c46-07fe-4b96-8746-549b6a3a9998\") " Oct 10 17:28:28 crc kubenswrapper[4799]: I1010 17:28:28.167594 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9pndg\" (UniqueName: \"kubernetes.io/projected/7c259c46-07fe-4b96-8746-549b6a3a9998-kube-api-access-9pndg\") pod \"7c259c46-07fe-4b96-8746-549b6a3a9998\" (UID: \"7c259c46-07fe-4b96-8746-549b6a3a9998\") " Oct 10 17:28:28 crc kubenswrapper[4799]: I1010 17:28:28.167674 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7c259c46-07fe-4b96-8746-549b6a3a9998-catalog-content\") pod \"7c259c46-07fe-4b96-8746-549b6a3a9998\" (UID: \"7c259c46-07fe-4b96-8746-549b6a3a9998\") " Oct 10 17:28:28 crc kubenswrapper[4799]: I1010 17:28:28.168463 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7c259c46-07fe-4b96-8746-549b6a3a9998-utilities" (OuterVolumeSpecName: "utilities") pod "7c259c46-07fe-4b96-8746-549b6a3a9998" (UID: "7c259c46-07fe-4b96-8746-549b6a3a9998"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 10 17:28:28 crc kubenswrapper[4799]: I1010 17:28:28.177663 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7c259c46-07fe-4b96-8746-549b6a3a9998-kube-api-access-9pndg" (OuterVolumeSpecName: "kube-api-access-9pndg") pod "7c259c46-07fe-4b96-8746-549b6a3a9998" (UID: "7c259c46-07fe-4b96-8746-549b6a3a9998"). InnerVolumeSpecName "kube-api-access-9pndg". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 17:28:28 crc kubenswrapper[4799]: I1010 17:28:28.241881 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7c259c46-07fe-4b96-8746-549b6a3a9998-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "7c259c46-07fe-4b96-8746-549b6a3a9998" (UID: "7c259c46-07fe-4b96-8746-549b6a3a9998"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 10 17:28:28 crc kubenswrapper[4799]: I1010 17:28:28.270316 4799 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7c259c46-07fe-4b96-8746-549b6a3a9998-utilities\") on node \"crc\" DevicePath \"\"" Oct 10 17:28:28 crc kubenswrapper[4799]: I1010 17:28:28.270358 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9pndg\" (UniqueName: \"kubernetes.io/projected/7c259c46-07fe-4b96-8746-549b6a3a9998-kube-api-access-9pndg\") on node \"crc\" DevicePath \"\"" Oct 10 17:28:28 crc kubenswrapper[4799]: I1010 17:28:28.270379 4799 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7c259c46-07fe-4b96-8746-549b6a3a9998-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 10 17:28:28 crc kubenswrapper[4799]: I1010 17:28:28.609705 4799 generic.go:334] "Generic (PLEG): container finished" podID="7c259c46-07fe-4b96-8746-549b6a3a9998" containerID="01a700ca09ae446ea6abb0afc972d2a8712ac244f53b0dd55c563533ac2785ae" exitCode=0 Oct 10 17:28:28 crc kubenswrapper[4799]: I1010 17:28:28.609801 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-m4vhg" event={"ID":"7c259c46-07fe-4b96-8746-549b6a3a9998","Type":"ContainerDied","Data":"01a700ca09ae446ea6abb0afc972d2a8712ac244f53b0dd55c563533ac2785ae"} Oct 10 17:28:28 crc kubenswrapper[4799]: I1010 17:28:28.609873 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-m4vhg" event={"ID":"7c259c46-07fe-4b96-8746-549b6a3a9998","Type":"ContainerDied","Data":"260a10263d9b91d2349a115448eefa4406a4386fad7f21d9dce82423087d268e"} Oct 10 17:28:28 crc kubenswrapper[4799]: I1010 17:28:28.609899 4799 scope.go:117] "RemoveContainer" containerID="01a700ca09ae446ea6abb0afc972d2a8712ac244f53b0dd55c563533ac2785ae" Oct 10 17:28:28 crc kubenswrapper[4799]: I1010 17:28:28.609939 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-m4vhg" Oct 10 17:28:28 crc kubenswrapper[4799]: I1010 17:28:28.647069 4799 scope.go:117] "RemoveContainer" containerID="e96bc0ec9edd9a3d1ab8ac3c70292ef57f0f34e9d981320a57a48842bc460458" Oct 10 17:28:28 crc kubenswrapper[4799]: I1010 17:28:28.669652 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-m4vhg"] Oct 10 17:28:28 crc kubenswrapper[4799]: I1010 17:28:28.674129 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-m4vhg"] Oct 10 17:28:28 crc kubenswrapper[4799]: I1010 17:28:28.700438 4799 scope.go:117] "RemoveContainer" containerID="1a2b019b83553e214a4679e3dbe257e67d6efbbe916cf529d4cb5baaab06b035" Oct 10 17:28:28 crc kubenswrapper[4799]: I1010 17:28:28.721985 4799 scope.go:117] "RemoveContainer" containerID="01a700ca09ae446ea6abb0afc972d2a8712ac244f53b0dd55c563533ac2785ae" Oct 10 17:28:28 crc kubenswrapper[4799]: E1010 17:28:28.722570 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"01a700ca09ae446ea6abb0afc972d2a8712ac244f53b0dd55c563533ac2785ae\": container with ID starting with 01a700ca09ae446ea6abb0afc972d2a8712ac244f53b0dd55c563533ac2785ae not found: ID does not exist" containerID="01a700ca09ae446ea6abb0afc972d2a8712ac244f53b0dd55c563533ac2785ae" Oct 10 17:28:28 crc kubenswrapper[4799]: I1010 17:28:28.722631 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"01a700ca09ae446ea6abb0afc972d2a8712ac244f53b0dd55c563533ac2785ae"} err="failed to get container status \"01a700ca09ae446ea6abb0afc972d2a8712ac244f53b0dd55c563533ac2785ae\": rpc error: code = NotFound desc = could not find container \"01a700ca09ae446ea6abb0afc972d2a8712ac244f53b0dd55c563533ac2785ae\": container with ID starting with 01a700ca09ae446ea6abb0afc972d2a8712ac244f53b0dd55c563533ac2785ae not found: ID does not exist" Oct 10 17:28:28 crc kubenswrapper[4799]: I1010 17:28:28.722667 4799 scope.go:117] "RemoveContainer" containerID="e96bc0ec9edd9a3d1ab8ac3c70292ef57f0f34e9d981320a57a48842bc460458" Oct 10 17:28:28 crc kubenswrapper[4799]: E1010 17:28:28.723060 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e96bc0ec9edd9a3d1ab8ac3c70292ef57f0f34e9d981320a57a48842bc460458\": container with ID starting with e96bc0ec9edd9a3d1ab8ac3c70292ef57f0f34e9d981320a57a48842bc460458 not found: ID does not exist" containerID="e96bc0ec9edd9a3d1ab8ac3c70292ef57f0f34e9d981320a57a48842bc460458" Oct 10 17:28:28 crc kubenswrapper[4799]: I1010 17:28:28.723104 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e96bc0ec9edd9a3d1ab8ac3c70292ef57f0f34e9d981320a57a48842bc460458"} err="failed to get container status \"e96bc0ec9edd9a3d1ab8ac3c70292ef57f0f34e9d981320a57a48842bc460458\": rpc error: code = NotFound desc = could not find container \"e96bc0ec9edd9a3d1ab8ac3c70292ef57f0f34e9d981320a57a48842bc460458\": container with ID starting with e96bc0ec9edd9a3d1ab8ac3c70292ef57f0f34e9d981320a57a48842bc460458 not found: ID does not exist" Oct 10 17:28:28 crc kubenswrapper[4799]: I1010 17:28:28.723141 4799 scope.go:117] "RemoveContainer" containerID="1a2b019b83553e214a4679e3dbe257e67d6efbbe916cf529d4cb5baaab06b035" Oct 10 17:28:28 crc kubenswrapper[4799]: E1010 17:28:28.723438 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1a2b019b83553e214a4679e3dbe257e67d6efbbe916cf529d4cb5baaab06b035\": container with ID starting with 1a2b019b83553e214a4679e3dbe257e67d6efbbe916cf529d4cb5baaab06b035 not found: ID does not exist" containerID="1a2b019b83553e214a4679e3dbe257e67d6efbbe916cf529d4cb5baaab06b035" Oct 10 17:28:28 crc kubenswrapper[4799]: I1010 17:28:28.723479 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1a2b019b83553e214a4679e3dbe257e67d6efbbe916cf529d4cb5baaab06b035"} err="failed to get container status \"1a2b019b83553e214a4679e3dbe257e67d6efbbe916cf529d4cb5baaab06b035\": rpc error: code = NotFound desc = could not find container \"1a2b019b83553e214a4679e3dbe257e67d6efbbe916cf529d4cb5baaab06b035\": container with ID starting with 1a2b019b83553e214a4679e3dbe257e67d6efbbe916cf529d4cb5baaab06b035 not found: ID does not exist" Oct 10 17:28:29 crc kubenswrapper[4799]: I1010 17:28:29.416011 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7c259c46-07fe-4b96-8746-549b6a3a9998" path="/var/lib/kubelet/pods/7c259c46-07fe-4b96-8746-549b6a3a9998/volumes" Oct 10 17:28:45 crc kubenswrapper[4799]: I1010 17:28:45.249430 4799 patch_prober.go:28] interesting pod/machine-config-daemon-rh8zc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 10 17:28:45 crc kubenswrapper[4799]: I1010 17:28:45.250193 4799 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 10 17:28:45 crc kubenswrapper[4799]: I1010 17:28:45.250255 4799 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" Oct 10 17:28:45 crc kubenswrapper[4799]: I1010 17:28:45.251004 4799 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"1805e3a5f297fe261be5d0ffadb3f386d2dd65f9b6e9f819312403974b1cfafc"} pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 10 17:28:45 crc kubenswrapper[4799]: I1010 17:28:45.251087 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" containerName="machine-config-daemon" containerID="cri-o://1805e3a5f297fe261be5d0ffadb3f386d2dd65f9b6e9f819312403974b1cfafc" gracePeriod=600 Oct 10 17:28:45 crc kubenswrapper[4799]: I1010 17:28:45.789158 4799 generic.go:334] "Generic (PLEG): container finished" podID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" containerID="1805e3a5f297fe261be5d0ffadb3f386d2dd65f9b6e9f819312403974b1cfafc" exitCode=0 Oct 10 17:28:45 crc kubenswrapper[4799]: I1010 17:28:45.789264 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" event={"ID":"6cebefda-e31d-4be2-9bf4-8e1f8ec002cb","Type":"ContainerDied","Data":"1805e3a5f297fe261be5d0ffadb3f386d2dd65f9b6e9f819312403974b1cfafc"} Oct 10 17:28:45 crc kubenswrapper[4799]: I1010 17:28:45.789624 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" event={"ID":"6cebefda-e31d-4be2-9bf4-8e1f8ec002cb","Type":"ContainerStarted","Data":"50c1c49a2b0ab1d8d8b7206eccc727d542a604d2bf97ae52e942229bc6e50061"} Oct 10 17:28:45 crc kubenswrapper[4799]: I1010 17:28:45.789655 4799 scope.go:117] "RemoveContainer" containerID="bf4eb3e56ead59e9fb87d5f16a0e791ef1612b88c8f649a4f091b1c1c12d7b71" Oct 10 17:30:00 crc kubenswrapper[4799]: I1010 17:30:00.204211 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29335290-gd5c6"] Oct 10 17:30:00 crc kubenswrapper[4799]: E1010 17:30:00.205235 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7c259c46-07fe-4b96-8746-549b6a3a9998" containerName="extract-utilities" Oct 10 17:30:00 crc kubenswrapper[4799]: I1010 17:30:00.205256 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="7c259c46-07fe-4b96-8746-549b6a3a9998" containerName="extract-utilities" Oct 10 17:30:00 crc kubenswrapper[4799]: E1010 17:30:00.205301 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7c259c46-07fe-4b96-8746-549b6a3a9998" containerName="extract-content" Oct 10 17:30:00 crc kubenswrapper[4799]: I1010 17:30:00.205311 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="7c259c46-07fe-4b96-8746-549b6a3a9998" containerName="extract-content" Oct 10 17:30:00 crc kubenswrapper[4799]: E1010 17:30:00.205331 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7c259c46-07fe-4b96-8746-549b6a3a9998" containerName="registry-server" Oct 10 17:30:00 crc kubenswrapper[4799]: I1010 17:30:00.205345 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="7c259c46-07fe-4b96-8746-549b6a3a9998" containerName="registry-server" Oct 10 17:30:00 crc kubenswrapper[4799]: I1010 17:30:00.205589 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="7c259c46-07fe-4b96-8746-549b6a3a9998" containerName="registry-server" Oct 10 17:30:00 crc kubenswrapper[4799]: I1010 17:30:00.206355 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29335290-gd5c6" Oct 10 17:30:00 crc kubenswrapper[4799]: I1010 17:30:00.210374 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Oct 10 17:30:00 crc kubenswrapper[4799]: I1010 17:30:00.211287 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Oct 10 17:30:00 crc kubenswrapper[4799]: I1010 17:30:00.218979 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29335290-gd5c6"] Oct 10 17:30:00 crc kubenswrapper[4799]: I1010 17:30:00.365347 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/ad8b20fe-7b38-4f83-ba94-986013d30ea5-secret-volume\") pod \"collect-profiles-29335290-gd5c6\" (UID: \"ad8b20fe-7b38-4f83-ba94-986013d30ea5\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29335290-gd5c6" Oct 10 17:30:00 crc kubenswrapper[4799]: I1010 17:30:00.366172 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qt8vr\" (UniqueName: \"kubernetes.io/projected/ad8b20fe-7b38-4f83-ba94-986013d30ea5-kube-api-access-qt8vr\") pod \"collect-profiles-29335290-gd5c6\" (UID: \"ad8b20fe-7b38-4f83-ba94-986013d30ea5\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29335290-gd5c6" Oct 10 17:30:00 crc kubenswrapper[4799]: I1010 17:30:00.366447 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/ad8b20fe-7b38-4f83-ba94-986013d30ea5-config-volume\") pod \"collect-profiles-29335290-gd5c6\" (UID: \"ad8b20fe-7b38-4f83-ba94-986013d30ea5\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29335290-gd5c6" Oct 10 17:30:00 crc kubenswrapper[4799]: I1010 17:30:00.467794 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/ad8b20fe-7b38-4f83-ba94-986013d30ea5-secret-volume\") pod \"collect-profiles-29335290-gd5c6\" (UID: \"ad8b20fe-7b38-4f83-ba94-986013d30ea5\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29335290-gd5c6" Oct 10 17:30:00 crc kubenswrapper[4799]: I1010 17:30:00.467907 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qt8vr\" (UniqueName: \"kubernetes.io/projected/ad8b20fe-7b38-4f83-ba94-986013d30ea5-kube-api-access-qt8vr\") pod \"collect-profiles-29335290-gd5c6\" (UID: \"ad8b20fe-7b38-4f83-ba94-986013d30ea5\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29335290-gd5c6" Oct 10 17:30:00 crc kubenswrapper[4799]: I1010 17:30:00.468026 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/ad8b20fe-7b38-4f83-ba94-986013d30ea5-config-volume\") pod \"collect-profiles-29335290-gd5c6\" (UID: \"ad8b20fe-7b38-4f83-ba94-986013d30ea5\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29335290-gd5c6" Oct 10 17:30:00 crc kubenswrapper[4799]: I1010 17:30:00.469279 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/ad8b20fe-7b38-4f83-ba94-986013d30ea5-config-volume\") pod \"collect-profiles-29335290-gd5c6\" (UID: \"ad8b20fe-7b38-4f83-ba94-986013d30ea5\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29335290-gd5c6" Oct 10 17:30:00 crc kubenswrapper[4799]: I1010 17:30:00.475924 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/ad8b20fe-7b38-4f83-ba94-986013d30ea5-secret-volume\") pod \"collect-profiles-29335290-gd5c6\" (UID: \"ad8b20fe-7b38-4f83-ba94-986013d30ea5\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29335290-gd5c6" Oct 10 17:30:00 crc kubenswrapper[4799]: I1010 17:30:00.491597 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qt8vr\" (UniqueName: \"kubernetes.io/projected/ad8b20fe-7b38-4f83-ba94-986013d30ea5-kube-api-access-qt8vr\") pod \"collect-profiles-29335290-gd5c6\" (UID: \"ad8b20fe-7b38-4f83-ba94-986013d30ea5\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29335290-gd5c6" Oct 10 17:30:00 crc kubenswrapper[4799]: I1010 17:30:00.532789 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29335290-gd5c6" Oct 10 17:30:01 crc kubenswrapper[4799]: I1010 17:30:01.010280 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29335290-gd5c6"] Oct 10 17:30:01 crc kubenswrapper[4799]: W1010 17:30:01.025033 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podad8b20fe_7b38_4f83_ba94_986013d30ea5.slice/crio-cbbee23926d6bdc36c318618926b62e20980775311112629df25e57461105463 WatchSource:0}: Error finding container cbbee23926d6bdc36c318618926b62e20980775311112629df25e57461105463: Status 404 returned error can't find the container with id cbbee23926d6bdc36c318618926b62e20980775311112629df25e57461105463 Oct 10 17:30:01 crc kubenswrapper[4799]: I1010 17:30:01.546121 4799 generic.go:334] "Generic (PLEG): container finished" podID="ad8b20fe-7b38-4f83-ba94-986013d30ea5" containerID="9624dc8ea5d0347c35f7e7ab7c7f9a4e58284e339c93af675f78bf25d899ee36" exitCode=0 Oct 10 17:30:01 crc kubenswrapper[4799]: I1010 17:30:01.546169 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29335290-gd5c6" event={"ID":"ad8b20fe-7b38-4f83-ba94-986013d30ea5","Type":"ContainerDied","Data":"9624dc8ea5d0347c35f7e7ab7c7f9a4e58284e339c93af675f78bf25d899ee36"} Oct 10 17:30:01 crc kubenswrapper[4799]: I1010 17:30:01.546197 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29335290-gd5c6" event={"ID":"ad8b20fe-7b38-4f83-ba94-986013d30ea5","Type":"ContainerStarted","Data":"cbbee23926d6bdc36c318618926b62e20980775311112629df25e57461105463"} Oct 10 17:30:02 crc kubenswrapper[4799]: I1010 17:30:02.921652 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29335290-gd5c6" Oct 10 17:30:03 crc kubenswrapper[4799]: I1010 17:30:03.105546 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/ad8b20fe-7b38-4f83-ba94-986013d30ea5-config-volume\") pod \"ad8b20fe-7b38-4f83-ba94-986013d30ea5\" (UID: \"ad8b20fe-7b38-4f83-ba94-986013d30ea5\") " Oct 10 17:30:03 crc kubenswrapper[4799]: I1010 17:30:03.105722 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/ad8b20fe-7b38-4f83-ba94-986013d30ea5-secret-volume\") pod \"ad8b20fe-7b38-4f83-ba94-986013d30ea5\" (UID: \"ad8b20fe-7b38-4f83-ba94-986013d30ea5\") " Oct 10 17:30:03 crc kubenswrapper[4799]: I1010 17:30:03.105824 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qt8vr\" (UniqueName: \"kubernetes.io/projected/ad8b20fe-7b38-4f83-ba94-986013d30ea5-kube-api-access-qt8vr\") pod \"ad8b20fe-7b38-4f83-ba94-986013d30ea5\" (UID: \"ad8b20fe-7b38-4f83-ba94-986013d30ea5\") " Oct 10 17:30:03 crc kubenswrapper[4799]: I1010 17:30:03.107495 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ad8b20fe-7b38-4f83-ba94-986013d30ea5-config-volume" (OuterVolumeSpecName: "config-volume") pod "ad8b20fe-7b38-4f83-ba94-986013d30ea5" (UID: "ad8b20fe-7b38-4f83-ba94-986013d30ea5"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 17:30:03 crc kubenswrapper[4799]: I1010 17:30:03.115380 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ad8b20fe-7b38-4f83-ba94-986013d30ea5-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "ad8b20fe-7b38-4f83-ba94-986013d30ea5" (UID: "ad8b20fe-7b38-4f83-ba94-986013d30ea5"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 17:30:03 crc kubenswrapper[4799]: I1010 17:30:03.115607 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ad8b20fe-7b38-4f83-ba94-986013d30ea5-kube-api-access-qt8vr" (OuterVolumeSpecName: "kube-api-access-qt8vr") pod "ad8b20fe-7b38-4f83-ba94-986013d30ea5" (UID: "ad8b20fe-7b38-4f83-ba94-986013d30ea5"). InnerVolumeSpecName "kube-api-access-qt8vr". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 17:30:03 crc kubenswrapper[4799]: I1010 17:30:03.208106 4799 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/ad8b20fe-7b38-4f83-ba94-986013d30ea5-config-volume\") on node \"crc\" DevicePath \"\"" Oct 10 17:30:03 crc kubenswrapper[4799]: I1010 17:30:03.208152 4799 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/ad8b20fe-7b38-4f83-ba94-986013d30ea5-secret-volume\") on node \"crc\" DevicePath \"\"" Oct 10 17:30:03 crc kubenswrapper[4799]: I1010 17:30:03.208165 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qt8vr\" (UniqueName: \"kubernetes.io/projected/ad8b20fe-7b38-4f83-ba94-986013d30ea5-kube-api-access-qt8vr\") on node \"crc\" DevicePath \"\"" Oct 10 17:30:03 crc kubenswrapper[4799]: I1010 17:30:03.576915 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29335290-gd5c6" event={"ID":"ad8b20fe-7b38-4f83-ba94-986013d30ea5","Type":"ContainerDied","Data":"cbbee23926d6bdc36c318618926b62e20980775311112629df25e57461105463"} Oct 10 17:30:03 crc kubenswrapper[4799]: I1010 17:30:03.577169 4799 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="cbbee23926d6bdc36c318618926b62e20980775311112629df25e57461105463" Oct 10 17:30:03 crc kubenswrapper[4799]: I1010 17:30:03.576949 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29335290-gd5c6" Oct 10 17:30:04 crc kubenswrapper[4799]: I1010 17:30:04.041480 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29335245-pwfqs"] Oct 10 17:30:04 crc kubenswrapper[4799]: I1010 17:30:04.049342 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29335245-pwfqs"] Oct 10 17:30:05 crc kubenswrapper[4799]: I1010 17:30:05.416243 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="923eeaa7-33ad-4958-8c29-83d9508c527c" path="/var/lib/kubelet/pods/923eeaa7-33ad-4958-8c29-83d9508c527c/volumes" Oct 10 17:30:05 crc kubenswrapper[4799]: I1010 17:30:05.732884 4799 scope.go:117] "RemoveContainer" containerID="dc80910e86773f54ae92cfc151c285e5c20d72d27b9a21bf89d0b4d93f66e785" Oct 10 17:30:45 crc kubenswrapper[4799]: I1010 17:30:45.249198 4799 patch_prober.go:28] interesting pod/machine-config-daemon-rh8zc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 10 17:30:45 crc kubenswrapper[4799]: I1010 17:30:45.249838 4799 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 10 17:31:15 crc kubenswrapper[4799]: I1010 17:31:15.255958 4799 patch_prober.go:28] interesting pod/machine-config-daemon-rh8zc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 10 17:31:15 crc kubenswrapper[4799]: I1010 17:31:15.256821 4799 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 10 17:31:45 crc kubenswrapper[4799]: I1010 17:31:45.249409 4799 patch_prober.go:28] interesting pod/machine-config-daemon-rh8zc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 10 17:31:45 crc kubenswrapper[4799]: I1010 17:31:45.250127 4799 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 10 17:31:45 crc kubenswrapper[4799]: I1010 17:31:45.250190 4799 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" Oct 10 17:31:45 crc kubenswrapper[4799]: I1010 17:31:45.251319 4799 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"50c1c49a2b0ab1d8d8b7206eccc727d542a604d2bf97ae52e942229bc6e50061"} pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 10 17:31:45 crc kubenswrapper[4799]: I1010 17:31:45.251422 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" containerName="machine-config-daemon" containerID="cri-o://50c1c49a2b0ab1d8d8b7206eccc727d542a604d2bf97ae52e942229bc6e50061" gracePeriod=600 Oct 10 17:31:45 crc kubenswrapper[4799]: E1010 17:31:45.387512 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 17:31:45 crc kubenswrapper[4799]: I1010 17:31:45.547829 4799 generic.go:334] "Generic (PLEG): container finished" podID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" containerID="50c1c49a2b0ab1d8d8b7206eccc727d542a604d2bf97ae52e942229bc6e50061" exitCode=0 Oct 10 17:31:45 crc kubenswrapper[4799]: I1010 17:31:45.547897 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" event={"ID":"6cebefda-e31d-4be2-9bf4-8e1f8ec002cb","Type":"ContainerDied","Data":"50c1c49a2b0ab1d8d8b7206eccc727d542a604d2bf97ae52e942229bc6e50061"} Oct 10 17:31:45 crc kubenswrapper[4799]: I1010 17:31:45.547983 4799 scope.go:117] "RemoveContainer" containerID="1805e3a5f297fe261be5d0ffadb3f386d2dd65f9b6e9f819312403974b1cfafc" Oct 10 17:31:45 crc kubenswrapper[4799]: I1010 17:31:45.548920 4799 scope.go:117] "RemoveContainer" containerID="50c1c49a2b0ab1d8d8b7206eccc727d542a604d2bf97ae52e942229bc6e50061" Oct 10 17:31:45 crc kubenswrapper[4799]: E1010 17:31:45.549508 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 17:31:57 crc kubenswrapper[4799]: I1010 17:31:57.411117 4799 scope.go:117] "RemoveContainer" containerID="50c1c49a2b0ab1d8d8b7206eccc727d542a604d2bf97ae52e942229bc6e50061" Oct 10 17:31:57 crc kubenswrapper[4799]: E1010 17:31:57.412368 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 17:32:11 crc kubenswrapper[4799]: I1010 17:32:11.403258 4799 scope.go:117] "RemoveContainer" containerID="50c1c49a2b0ab1d8d8b7206eccc727d542a604d2bf97ae52e942229bc6e50061" Oct 10 17:32:11 crc kubenswrapper[4799]: E1010 17:32:11.404578 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 17:32:25 crc kubenswrapper[4799]: I1010 17:32:25.402460 4799 scope.go:117] "RemoveContainer" containerID="50c1c49a2b0ab1d8d8b7206eccc727d542a604d2bf97ae52e942229bc6e50061" Oct 10 17:32:25 crc kubenswrapper[4799]: E1010 17:32:25.403371 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 17:32:36 crc kubenswrapper[4799]: I1010 17:32:36.402959 4799 scope.go:117] "RemoveContainer" containerID="50c1c49a2b0ab1d8d8b7206eccc727d542a604d2bf97ae52e942229bc6e50061" Oct 10 17:32:36 crc kubenswrapper[4799]: E1010 17:32:36.404013 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 17:32:49 crc kubenswrapper[4799]: I1010 17:32:49.402896 4799 scope.go:117] "RemoveContainer" containerID="50c1c49a2b0ab1d8d8b7206eccc727d542a604d2bf97ae52e942229bc6e50061" Oct 10 17:32:49 crc kubenswrapper[4799]: E1010 17:32:49.404729 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 17:33:03 crc kubenswrapper[4799]: I1010 17:33:03.404604 4799 scope.go:117] "RemoveContainer" containerID="50c1c49a2b0ab1d8d8b7206eccc727d542a604d2bf97ae52e942229bc6e50061" Oct 10 17:33:03 crc kubenswrapper[4799]: E1010 17:33:03.407157 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 17:33:16 crc kubenswrapper[4799]: I1010 17:33:16.403176 4799 scope.go:117] "RemoveContainer" containerID="50c1c49a2b0ab1d8d8b7206eccc727d542a604d2bf97ae52e942229bc6e50061" Oct 10 17:33:16 crc kubenswrapper[4799]: E1010 17:33:16.404313 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 17:33:17 crc kubenswrapper[4799]: I1010 17:33:17.440683 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-mk79d"] Oct 10 17:33:17 crc kubenswrapper[4799]: E1010 17:33:17.440973 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ad8b20fe-7b38-4f83-ba94-986013d30ea5" containerName="collect-profiles" Oct 10 17:33:17 crc kubenswrapper[4799]: I1010 17:33:17.440984 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="ad8b20fe-7b38-4f83-ba94-986013d30ea5" containerName="collect-profiles" Oct 10 17:33:17 crc kubenswrapper[4799]: I1010 17:33:17.441117 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="ad8b20fe-7b38-4f83-ba94-986013d30ea5" containerName="collect-profiles" Oct 10 17:33:17 crc kubenswrapper[4799]: I1010 17:33:17.442018 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-mk79d" Oct 10 17:33:17 crc kubenswrapper[4799]: I1010 17:33:17.469970 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-mk79d"] Oct 10 17:33:17 crc kubenswrapper[4799]: I1010 17:33:17.600945 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tnvg7\" (UniqueName: \"kubernetes.io/projected/f0f97d6f-5b23-4c67-9173-3ad73cda89cb-kube-api-access-tnvg7\") pod \"redhat-marketplace-mk79d\" (UID: \"f0f97d6f-5b23-4c67-9173-3ad73cda89cb\") " pod="openshift-marketplace/redhat-marketplace-mk79d" Oct 10 17:33:17 crc kubenswrapper[4799]: I1010 17:33:17.601006 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f0f97d6f-5b23-4c67-9173-3ad73cda89cb-catalog-content\") pod \"redhat-marketplace-mk79d\" (UID: \"f0f97d6f-5b23-4c67-9173-3ad73cda89cb\") " pod="openshift-marketplace/redhat-marketplace-mk79d" Oct 10 17:33:17 crc kubenswrapper[4799]: I1010 17:33:17.601092 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f0f97d6f-5b23-4c67-9173-3ad73cda89cb-utilities\") pod \"redhat-marketplace-mk79d\" (UID: \"f0f97d6f-5b23-4c67-9173-3ad73cda89cb\") " pod="openshift-marketplace/redhat-marketplace-mk79d" Oct 10 17:33:17 crc kubenswrapper[4799]: I1010 17:33:17.702453 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f0f97d6f-5b23-4c67-9173-3ad73cda89cb-catalog-content\") pod \"redhat-marketplace-mk79d\" (UID: \"f0f97d6f-5b23-4c67-9173-3ad73cda89cb\") " pod="openshift-marketplace/redhat-marketplace-mk79d" Oct 10 17:33:17 crc kubenswrapper[4799]: I1010 17:33:17.702565 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f0f97d6f-5b23-4c67-9173-3ad73cda89cb-utilities\") pod \"redhat-marketplace-mk79d\" (UID: \"f0f97d6f-5b23-4c67-9173-3ad73cda89cb\") " pod="openshift-marketplace/redhat-marketplace-mk79d" Oct 10 17:33:17 crc kubenswrapper[4799]: I1010 17:33:17.702642 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tnvg7\" (UniqueName: \"kubernetes.io/projected/f0f97d6f-5b23-4c67-9173-3ad73cda89cb-kube-api-access-tnvg7\") pod \"redhat-marketplace-mk79d\" (UID: \"f0f97d6f-5b23-4c67-9173-3ad73cda89cb\") " pod="openshift-marketplace/redhat-marketplace-mk79d" Oct 10 17:33:17 crc kubenswrapper[4799]: I1010 17:33:17.703122 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f0f97d6f-5b23-4c67-9173-3ad73cda89cb-catalog-content\") pod \"redhat-marketplace-mk79d\" (UID: \"f0f97d6f-5b23-4c67-9173-3ad73cda89cb\") " pod="openshift-marketplace/redhat-marketplace-mk79d" Oct 10 17:33:17 crc kubenswrapper[4799]: I1010 17:33:17.703146 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f0f97d6f-5b23-4c67-9173-3ad73cda89cb-utilities\") pod \"redhat-marketplace-mk79d\" (UID: \"f0f97d6f-5b23-4c67-9173-3ad73cda89cb\") " pod="openshift-marketplace/redhat-marketplace-mk79d" Oct 10 17:33:17 crc kubenswrapper[4799]: I1010 17:33:17.728804 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tnvg7\" (UniqueName: \"kubernetes.io/projected/f0f97d6f-5b23-4c67-9173-3ad73cda89cb-kube-api-access-tnvg7\") pod \"redhat-marketplace-mk79d\" (UID: \"f0f97d6f-5b23-4c67-9173-3ad73cda89cb\") " pod="openshift-marketplace/redhat-marketplace-mk79d" Oct 10 17:33:17 crc kubenswrapper[4799]: I1010 17:33:17.771671 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-mk79d" Oct 10 17:33:18 crc kubenswrapper[4799]: I1010 17:33:18.011396 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-mk79d"] Oct 10 17:33:18 crc kubenswrapper[4799]: I1010 17:33:18.483513 4799 generic.go:334] "Generic (PLEG): container finished" podID="f0f97d6f-5b23-4c67-9173-3ad73cda89cb" containerID="ffa0289f6c0db9cc24b65cca0465f52a5385d62f2de2930b075562896e9e1086" exitCode=0 Oct 10 17:33:18 crc kubenswrapper[4799]: I1010 17:33:18.483623 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mk79d" event={"ID":"f0f97d6f-5b23-4c67-9173-3ad73cda89cb","Type":"ContainerDied","Data":"ffa0289f6c0db9cc24b65cca0465f52a5385d62f2de2930b075562896e9e1086"} Oct 10 17:33:18 crc kubenswrapper[4799]: I1010 17:33:18.484023 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mk79d" event={"ID":"f0f97d6f-5b23-4c67-9173-3ad73cda89cb","Type":"ContainerStarted","Data":"240b20632b6363ff3c8e5993f2f1d9942011e319573d02a7d68fd810046646c2"} Oct 10 17:33:18 crc kubenswrapper[4799]: I1010 17:33:18.486308 4799 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 10 17:33:19 crc kubenswrapper[4799]: I1010 17:33:19.492489 4799 generic.go:334] "Generic (PLEG): container finished" podID="f0f97d6f-5b23-4c67-9173-3ad73cda89cb" containerID="3ff417aa2a7fe5a88083ac6ccedf363535440e70bdbc76973d754eeef6756762" exitCode=0 Oct 10 17:33:19 crc kubenswrapper[4799]: I1010 17:33:19.492654 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mk79d" event={"ID":"f0f97d6f-5b23-4c67-9173-3ad73cda89cb","Type":"ContainerDied","Data":"3ff417aa2a7fe5a88083ac6ccedf363535440e70bdbc76973d754eeef6756762"} Oct 10 17:33:20 crc kubenswrapper[4799]: I1010 17:33:20.504494 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mk79d" event={"ID":"f0f97d6f-5b23-4c67-9173-3ad73cda89cb","Type":"ContainerStarted","Data":"d97cf719b52d617475eb5808570a799303a8ff4d3bd146afa75a7e10e37906b3"} Oct 10 17:33:20 crc kubenswrapper[4799]: I1010 17:33:20.536099 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-mk79d" podStartSLOduration=2.039111113 podStartE2EDuration="3.536076912s" podCreationTimestamp="2025-10-10 17:33:17 +0000 UTC" firstStartedPulling="2025-10-10 17:33:18.486113361 +0000 UTC m=+3691.994437476" lastFinishedPulling="2025-10-10 17:33:19.98307913 +0000 UTC m=+3693.491403275" observedRunningTime="2025-10-10 17:33:20.534914694 +0000 UTC m=+3694.043238859" watchObservedRunningTime="2025-10-10 17:33:20.536076912 +0000 UTC m=+3694.044401037" Oct 10 17:33:27 crc kubenswrapper[4799]: I1010 17:33:27.772014 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-mk79d" Oct 10 17:33:27 crc kubenswrapper[4799]: I1010 17:33:27.772401 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-mk79d" Oct 10 17:33:27 crc kubenswrapper[4799]: I1010 17:33:27.843339 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-mk79d" Oct 10 17:33:28 crc kubenswrapper[4799]: I1010 17:33:28.674252 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-mk79d" Oct 10 17:33:28 crc kubenswrapper[4799]: I1010 17:33:28.760233 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-mk79d"] Oct 10 17:33:30 crc kubenswrapper[4799]: I1010 17:33:30.403151 4799 scope.go:117] "RemoveContainer" containerID="50c1c49a2b0ab1d8d8b7206eccc727d542a604d2bf97ae52e942229bc6e50061" Oct 10 17:33:30 crc kubenswrapper[4799]: E1010 17:33:30.403633 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 17:33:30 crc kubenswrapper[4799]: I1010 17:33:30.625910 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-mk79d" podUID="f0f97d6f-5b23-4c67-9173-3ad73cda89cb" containerName="registry-server" containerID="cri-o://d97cf719b52d617475eb5808570a799303a8ff4d3bd146afa75a7e10e37906b3" gracePeriod=2 Oct 10 17:33:31 crc kubenswrapper[4799]: I1010 17:33:31.175838 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-mk79d" Oct 10 17:33:31 crc kubenswrapper[4799]: I1010 17:33:31.332209 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f0f97d6f-5b23-4c67-9173-3ad73cda89cb-catalog-content\") pod \"f0f97d6f-5b23-4c67-9173-3ad73cda89cb\" (UID: \"f0f97d6f-5b23-4c67-9173-3ad73cda89cb\") " Oct 10 17:33:31 crc kubenswrapper[4799]: I1010 17:33:31.332354 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tnvg7\" (UniqueName: \"kubernetes.io/projected/f0f97d6f-5b23-4c67-9173-3ad73cda89cb-kube-api-access-tnvg7\") pod \"f0f97d6f-5b23-4c67-9173-3ad73cda89cb\" (UID: \"f0f97d6f-5b23-4c67-9173-3ad73cda89cb\") " Oct 10 17:33:31 crc kubenswrapper[4799]: I1010 17:33:31.332464 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f0f97d6f-5b23-4c67-9173-3ad73cda89cb-utilities\") pod \"f0f97d6f-5b23-4c67-9173-3ad73cda89cb\" (UID: \"f0f97d6f-5b23-4c67-9173-3ad73cda89cb\") " Oct 10 17:33:31 crc kubenswrapper[4799]: I1010 17:33:31.333720 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f0f97d6f-5b23-4c67-9173-3ad73cda89cb-utilities" (OuterVolumeSpecName: "utilities") pod "f0f97d6f-5b23-4c67-9173-3ad73cda89cb" (UID: "f0f97d6f-5b23-4c67-9173-3ad73cda89cb"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 10 17:33:31 crc kubenswrapper[4799]: I1010 17:33:31.344379 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f0f97d6f-5b23-4c67-9173-3ad73cda89cb-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "f0f97d6f-5b23-4c67-9173-3ad73cda89cb" (UID: "f0f97d6f-5b23-4c67-9173-3ad73cda89cb"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 10 17:33:31 crc kubenswrapper[4799]: I1010 17:33:31.344513 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f0f97d6f-5b23-4c67-9173-3ad73cda89cb-kube-api-access-tnvg7" (OuterVolumeSpecName: "kube-api-access-tnvg7") pod "f0f97d6f-5b23-4c67-9173-3ad73cda89cb" (UID: "f0f97d6f-5b23-4c67-9173-3ad73cda89cb"). InnerVolumeSpecName "kube-api-access-tnvg7". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 17:33:31 crc kubenswrapper[4799]: I1010 17:33:31.433987 4799 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f0f97d6f-5b23-4c67-9173-3ad73cda89cb-utilities\") on node \"crc\" DevicePath \"\"" Oct 10 17:33:31 crc kubenswrapper[4799]: I1010 17:33:31.435094 4799 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f0f97d6f-5b23-4c67-9173-3ad73cda89cb-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 10 17:33:31 crc kubenswrapper[4799]: I1010 17:33:31.435109 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tnvg7\" (UniqueName: \"kubernetes.io/projected/f0f97d6f-5b23-4c67-9173-3ad73cda89cb-kube-api-access-tnvg7\") on node \"crc\" DevicePath \"\"" Oct 10 17:33:31 crc kubenswrapper[4799]: I1010 17:33:31.640109 4799 generic.go:334] "Generic (PLEG): container finished" podID="f0f97d6f-5b23-4c67-9173-3ad73cda89cb" containerID="d97cf719b52d617475eb5808570a799303a8ff4d3bd146afa75a7e10e37906b3" exitCode=0 Oct 10 17:33:31 crc kubenswrapper[4799]: I1010 17:33:31.640190 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mk79d" event={"ID":"f0f97d6f-5b23-4c67-9173-3ad73cda89cb","Type":"ContainerDied","Data":"d97cf719b52d617475eb5808570a799303a8ff4d3bd146afa75a7e10e37906b3"} Oct 10 17:33:31 crc kubenswrapper[4799]: I1010 17:33:31.640241 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mk79d" event={"ID":"f0f97d6f-5b23-4c67-9173-3ad73cda89cb","Type":"ContainerDied","Data":"240b20632b6363ff3c8e5993f2f1d9942011e319573d02a7d68fd810046646c2"} Oct 10 17:33:31 crc kubenswrapper[4799]: I1010 17:33:31.640282 4799 scope.go:117] "RemoveContainer" containerID="d97cf719b52d617475eb5808570a799303a8ff4d3bd146afa75a7e10e37906b3" Oct 10 17:33:31 crc kubenswrapper[4799]: I1010 17:33:31.641033 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-mk79d" Oct 10 17:33:31 crc kubenswrapper[4799]: I1010 17:33:31.669197 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-mk79d"] Oct 10 17:33:31 crc kubenswrapper[4799]: I1010 17:33:31.674207 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-mk79d"] Oct 10 17:33:31 crc kubenswrapper[4799]: I1010 17:33:31.678593 4799 scope.go:117] "RemoveContainer" containerID="3ff417aa2a7fe5a88083ac6ccedf363535440e70bdbc76973d754eeef6756762" Oct 10 17:33:31 crc kubenswrapper[4799]: I1010 17:33:31.714388 4799 scope.go:117] "RemoveContainer" containerID="ffa0289f6c0db9cc24b65cca0465f52a5385d62f2de2930b075562896e9e1086" Oct 10 17:33:31 crc kubenswrapper[4799]: I1010 17:33:31.744110 4799 scope.go:117] "RemoveContainer" containerID="d97cf719b52d617475eb5808570a799303a8ff4d3bd146afa75a7e10e37906b3" Oct 10 17:33:31 crc kubenswrapper[4799]: E1010 17:33:31.744705 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d97cf719b52d617475eb5808570a799303a8ff4d3bd146afa75a7e10e37906b3\": container with ID starting with d97cf719b52d617475eb5808570a799303a8ff4d3bd146afa75a7e10e37906b3 not found: ID does not exist" containerID="d97cf719b52d617475eb5808570a799303a8ff4d3bd146afa75a7e10e37906b3" Oct 10 17:33:31 crc kubenswrapper[4799]: I1010 17:33:31.744793 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d97cf719b52d617475eb5808570a799303a8ff4d3bd146afa75a7e10e37906b3"} err="failed to get container status \"d97cf719b52d617475eb5808570a799303a8ff4d3bd146afa75a7e10e37906b3\": rpc error: code = NotFound desc = could not find container \"d97cf719b52d617475eb5808570a799303a8ff4d3bd146afa75a7e10e37906b3\": container with ID starting with d97cf719b52d617475eb5808570a799303a8ff4d3bd146afa75a7e10e37906b3 not found: ID does not exist" Oct 10 17:33:31 crc kubenswrapper[4799]: I1010 17:33:31.744826 4799 scope.go:117] "RemoveContainer" containerID="3ff417aa2a7fe5a88083ac6ccedf363535440e70bdbc76973d754eeef6756762" Oct 10 17:33:31 crc kubenswrapper[4799]: E1010 17:33:31.745350 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3ff417aa2a7fe5a88083ac6ccedf363535440e70bdbc76973d754eeef6756762\": container with ID starting with 3ff417aa2a7fe5a88083ac6ccedf363535440e70bdbc76973d754eeef6756762 not found: ID does not exist" containerID="3ff417aa2a7fe5a88083ac6ccedf363535440e70bdbc76973d754eeef6756762" Oct 10 17:33:31 crc kubenswrapper[4799]: I1010 17:33:31.745394 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3ff417aa2a7fe5a88083ac6ccedf363535440e70bdbc76973d754eeef6756762"} err="failed to get container status \"3ff417aa2a7fe5a88083ac6ccedf363535440e70bdbc76973d754eeef6756762\": rpc error: code = NotFound desc = could not find container \"3ff417aa2a7fe5a88083ac6ccedf363535440e70bdbc76973d754eeef6756762\": container with ID starting with 3ff417aa2a7fe5a88083ac6ccedf363535440e70bdbc76973d754eeef6756762 not found: ID does not exist" Oct 10 17:33:31 crc kubenswrapper[4799]: I1010 17:33:31.745420 4799 scope.go:117] "RemoveContainer" containerID="ffa0289f6c0db9cc24b65cca0465f52a5385d62f2de2930b075562896e9e1086" Oct 10 17:33:31 crc kubenswrapper[4799]: E1010 17:33:31.745816 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ffa0289f6c0db9cc24b65cca0465f52a5385d62f2de2930b075562896e9e1086\": container with ID starting with ffa0289f6c0db9cc24b65cca0465f52a5385d62f2de2930b075562896e9e1086 not found: ID does not exist" containerID="ffa0289f6c0db9cc24b65cca0465f52a5385d62f2de2930b075562896e9e1086" Oct 10 17:33:31 crc kubenswrapper[4799]: I1010 17:33:31.745861 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ffa0289f6c0db9cc24b65cca0465f52a5385d62f2de2930b075562896e9e1086"} err="failed to get container status \"ffa0289f6c0db9cc24b65cca0465f52a5385d62f2de2930b075562896e9e1086\": rpc error: code = NotFound desc = could not find container \"ffa0289f6c0db9cc24b65cca0465f52a5385d62f2de2930b075562896e9e1086\": container with ID starting with ffa0289f6c0db9cc24b65cca0465f52a5385d62f2de2930b075562896e9e1086 not found: ID does not exist" Oct 10 17:33:33 crc kubenswrapper[4799]: I1010 17:33:33.421312 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f0f97d6f-5b23-4c67-9173-3ad73cda89cb" path="/var/lib/kubelet/pods/f0f97d6f-5b23-4c67-9173-3ad73cda89cb/volumes" Oct 10 17:33:42 crc kubenswrapper[4799]: I1010 17:33:42.403186 4799 scope.go:117] "RemoveContainer" containerID="50c1c49a2b0ab1d8d8b7206eccc727d542a604d2bf97ae52e942229bc6e50061" Oct 10 17:33:42 crc kubenswrapper[4799]: E1010 17:33:42.404219 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 17:33:56 crc kubenswrapper[4799]: I1010 17:33:56.402604 4799 scope.go:117] "RemoveContainer" containerID="50c1c49a2b0ab1d8d8b7206eccc727d542a604d2bf97ae52e942229bc6e50061" Oct 10 17:33:56 crc kubenswrapper[4799]: E1010 17:33:56.403364 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 17:34:08 crc kubenswrapper[4799]: I1010 17:34:08.402886 4799 scope.go:117] "RemoveContainer" containerID="50c1c49a2b0ab1d8d8b7206eccc727d542a604d2bf97ae52e942229bc6e50061" Oct 10 17:34:08 crc kubenswrapper[4799]: E1010 17:34:08.403589 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 17:34:19 crc kubenswrapper[4799]: I1010 17:34:19.402967 4799 scope.go:117] "RemoveContainer" containerID="50c1c49a2b0ab1d8d8b7206eccc727d542a604d2bf97ae52e942229bc6e50061" Oct 10 17:34:19 crc kubenswrapper[4799]: E1010 17:34:19.403776 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 17:34:30 crc kubenswrapper[4799]: I1010 17:34:30.403417 4799 scope.go:117] "RemoveContainer" containerID="50c1c49a2b0ab1d8d8b7206eccc727d542a604d2bf97ae52e942229bc6e50061" Oct 10 17:34:30 crc kubenswrapper[4799]: E1010 17:34:30.405668 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 17:34:42 crc kubenswrapper[4799]: I1010 17:34:42.402889 4799 scope.go:117] "RemoveContainer" containerID="50c1c49a2b0ab1d8d8b7206eccc727d542a604d2bf97ae52e942229bc6e50061" Oct 10 17:34:42 crc kubenswrapper[4799]: E1010 17:34:42.403933 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 17:34:54 crc kubenswrapper[4799]: I1010 17:34:54.403146 4799 scope.go:117] "RemoveContainer" containerID="50c1c49a2b0ab1d8d8b7206eccc727d542a604d2bf97ae52e942229bc6e50061" Oct 10 17:34:54 crc kubenswrapper[4799]: E1010 17:34:54.404331 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 17:35:09 crc kubenswrapper[4799]: I1010 17:35:09.402628 4799 scope.go:117] "RemoveContainer" containerID="50c1c49a2b0ab1d8d8b7206eccc727d542a604d2bf97ae52e942229bc6e50061" Oct 10 17:35:09 crc kubenswrapper[4799]: E1010 17:35:09.403666 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 17:35:24 crc kubenswrapper[4799]: I1010 17:35:24.403252 4799 scope.go:117] "RemoveContainer" containerID="50c1c49a2b0ab1d8d8b7206eccc727d542a604d2bf97ae52e942229bc6e50061" Oct 10 17:35:24 crc kubenswrapper[4799]: E1010 17:35:24.404323 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 17:35:38 crc kubenswrapper[4799]: I1010 17:35:38.403156 4799 scope.go:117] "RemoveContainer" containerID="50c1c49a2b0ab1d8d8b7206eccc727d542a604d2bf97ae52e942229bc6e50061" Oct 10 17:35:38 crc kubenswrapper[4799]: E1010 17:35:38.404129 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 17:35:51 crc kubenswrapper[4799]: I1010 17:35:51.402947 4799 scope.go:117] "RemoveContainer" containerID="50c1c49a2b0ab1d8d8b7206eccc727d542a604d2bf97ae52e942229bc6e50061" Oct 10 17:35:51 crc kubenswrapper[4799]: E1010 17:35:51.404050 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 17:36:05 crc kubenswrapper[4799]: I1010 17:36:05.402899 4799 scope.go:117] "RemoveContainer" containerID="50c1c49a2b0ab1d8d8b7206eccc727d542a604d2bf97ae52e942229bc6e50061" Oct 10 17:36:05 crc kubenswrapper[4799]: E1010 17:36:05.403777 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 17:36:16 crc kubenswrapper[4799]: I1010 17:36:16.402960 4799 scope.go:117] "RemoveContainer" containerID="50c1c49a2b0ab1d8d8b7206eccc727d542a604d2bf97ae52e942229bc6e50061" Oct 10 17:36:16 crc kubenswrapper[4799]: E1010 17:36:16.403936 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 17:36:31 crc kubenswrapper[4799]: I1010 17:36:31.403400 4799 scope.go:117] "RemoveContainer" containerID="50c1c49a2b0ab1d8d8b7206eccc727d542a604d2bf97ae52e942229bc6e50061" Oct 10 17:36:31 crc kubenswrapper[4799]: E1010 17:36:31.404187 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 17:36:45 crc kubenswrapper[4799]: I1010 17:36:45.403418 4799 scope.go:117] "RemoveContainer" containerID="50c1c49a2b0ab1d8d8b7206eccc727d542a604d2bf97ae52e942229bc6e50061" Oct 10 17:36:46 crc kubenswrapper[4799]: I1010 17:36:46.506222 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" event={"ID":"6cebefda-e31d-4be2-9bf4-8e1f8ec002cb","Type":"ContainerStarted","Data":"b03f64a0a26d5fb94dfb90dcd1184b37fd9810e09a8440f1ed7eeed522926253"} Oct 10 17:37:13 crc kubenswrapper[4799]: I1010 17:37:13.150975 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-kn9kj"] Oct 10 17:37:13 crc kubenswrapper[4799]: E1010 17:37:13.151995 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f0f97d6f-5b23-4c67-9173-3ad73cda89cb" containerName="extract-content" Oct 10 17:37:13 crc kubenswrapper[4799]: I1010 17:37:13.152018 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="f0f97d6f-5b23-4c67-9173-3ad73cda89cb" containerName="extract-content" Oct 10 17:37:13 crc kubenswrapper[4799]: E1010 17:37:13.152060 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f0f97d6f-5b23-4c67-9173-3ad73cda89cb" containerName="registry-server" Oct 10 17:37:13 crc kubenswrapper[4799]: I1010 17:37:13.152073 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="f0f97d6f-5b23-4c67-9173-3ad73cda89cb" containerName="registry-server" Oct 10 17:37:13 crc kubenswrapper[4799]: E1010 17:37:13.152130 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f0f97d6f-5b23-4c67-9173-3ad73cda89cb" containerName="extract-utilities" Oct 10 17:37:13 crc kubenswrapper[4799]: I1010 17:37:13.152144 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="f0f97d6f-5b23-4c67-9173-3ad73cda89cb" containerName="extract-utilities" Oct 10 17:37:13 crc kubenswrapper[4799]: I1010 17:37:13.152376 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="f0f97d6f-5b23-4c67-9173-3ad73cda89cb" containerName="registry-server" Oct 10 17:37:13 crc kubenswrapper[4799]: I1010 17:37:13.154166 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-kn9kj" Oct 10 17:37:13 crc kubenswrapper[4799]: I1010 17:37:13.159519 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dfvtm\" (UniqueName: \"kubernetes.io/projected/927a4e58-2438-40ee-be6c-2672211f634f-kube-api-access-dfvtm\") pod \"community-operators-kn9kj\" (UID: \"927a4e58-2438-40ee-be6c-2672211f634f\") " pod="openshift-marketplace/community-operators-kn9kj" Oct 10 17:37:13 crc kubenswrapper[4799]: I1010 17:37:13.159598 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/927a4e58-2438-40ee-be6c-2672211f634f-utilities\") pod \"community-operators-kn9kj\" (UID: \"927a4e58-2438-40ee-be6c-2672211f634f\") " pod="openshift-marketplace/community-operators-kn9kj" Oct 10 17:37:13 crc kubenswrapper[4799]: I1010 17:37:13.159657 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/927a4e58-2438-40ee-be6c-2672211f634f-catalog-content\") pod \"community-operators-kn9kj\" (UID: \"927a4e58-2438-40ee-be6c-2672211f634f\") " pod="openshift-marketplace/community-operators-kn9kj" Oct 10 17:37:13 crc kubenswrapper[4799]: I1010 17:37:13.171944 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-kn9kj"] Oct 10 17:37:13 crc kubenswrapper[4799]: I1010 17:37:13.260540 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dfvtm\" (UniqueName: \"kubernetes.io/projected/927a4e58-2438-40ee-be6c-2672211f634f-kube-api-access-dfvtm\") pod \"community-operators-kn9kj\" (UID: \"927a4e58-2438-40ee-be6c-2672211f634f\") " pod="openshift-marketplace/community-operators-kn9kj" Oct 10 17:37:13 crc kubenswrapper[4799]: I1010 17:37:13.260631 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/927a4e58-2438-40ee-be6c-2672211f634f-utilities\") pod \"community-operators-kn9kj\" (UID: \"927a4e58-2438-40ee-be6c-2672211f634f\") " pod="openshift-marketplace/community-operators-kn9kj" Oct 10 17:37:13 crc kubenswrapper[4799]: I1010 17:37:13.260693 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/927a4e58-2438-40ee-be6c-2672211f634f-catalog-content\") pod \"community-operators-kn9kj\" (UID: \"927a4e58-2438-40ee-be6c-2672211f634f\") " pod="openshift-marketplace/community-operators-kn9kj" Oct 10 17:37:13 crc kubenswrapper[4799]: I1010 17:37:13.261210 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/927a4e58-2438-40ee-be6c-2672211f634f-catalog-content\") pod \"community-operators-kn9kj\" (UID: \"927a4e58-2438-40ee-be6c-2672211f634f\") " pod="openshift-marketplace/community-operators-kn9kj" Oct 10 17:37:13 crc kubenswrapper[4799]: I1010 17:37:13.261678 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/927a4e58-2438-40ee-be6c-2672211f634f-utilities\") pod \"community-operators-kn9kj\" (UID: \"927a4e58-2438-40ee-be6c-2672211f634f\") " pod="openshift-marketplace/community-operators-kn9kj" Oct 10 17:37:13 crc kubenswrapper[4799]: I1010 17:37:13.295127 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dfvtm\" (UniqueName: \"kubernetes.io/projected/927a4e58-2438-40ee-be6c-2672211f634f-kube-api-access-dfvtm\") pod \"community-operators-kn9kj\" (UID: \"927a4e58-2438-40ee-be6c-2672211f634f\") " pod="openshift-marketplace/community-operators-kn9kj" Oct 10 17:37:13 crc kubenswrapper[4799]: I1010 17:37:13.485476 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-kn9kj" Oct 10 17:37:14 crc kubenswrapper[4799]: I1010 17:37:14.025390 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-kn9kj"] Oct 10 17:37:14 crc kubenswrapper[4799]: W1010 17:37:14.034894 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod927a4e58_2438_40ee_be6c_2672211f634f.slice/crio-630b3aa9de6de254f5157a4e57288a0e94234739fd03801121ace517455a4d3f WatchSource:0}: Error finding container 630b3aa9de6de254f5157a4e57288a0e94234739fd03801121ace517455a4d3f: Status 404 returned error can't find the container with id 630b3aa9de6de254f5157a4e57288a0e94234739fd03801121ace517455a4d3f Oct 10 17:37:14 crc kubenswrapper[4799]: I1010 17:37:14.778057 4799 generic.go:334] "Generic (PLEG): container finished" podID="927a4e58-2438-40ee-be6c-2672211f634f" containerID="e980d23e9ef064b79885056fc81aa65dcf29d413fb9ae555c821f0fd0db2af6a" exitCode=0 Oct 10 17:37:14 crc kubenswrapper[4799]: I1010 17:37:14.778135 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-kn9kj" event={"ID":"927a4e58-2438-40ee-be6c-2672211f634f","Type":"ContainerDied","Data":"e980d23e9ef064b79885056fc81aa65dcf29d413fb9ae555c821f0fd0db2af6a"} Oct 10 17:37:14 crc kubenswrapper[4799]: I1010 17:37:14.778211 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-kn9kj" event={"ID":"927a4e58-2438-40ee-be6c-2672211f634f","Type":"ContainerStarted","Data":"630b3aa9de6de254f5157a4e57288a0e94234739fd03801121ace517455a4d3f"} Oct 10 17:37:16 crc kubenswrapper[4799]: E1010 17:37:16.496134 4799 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod927a4e58_2438_40ee_be6c_2672211f634f.slice/crio-conmon-799730d2dbaecbf058f21e09ab7432b7c31397858def30ebd5ee69e4cc5f7cf5.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod927a4e58_2438_40ee_be6c_2672211f634f.slice/crio-799730d2dbaecbf058f21e09ab7432b7c31397858def30ebd5ee69e4cc5f7cf5.scope\": RecentStats: unable to find data in memory cache]" Oct 10 17:37:16 crc kubenswrapper[4799]: I1010 17:37:16.810672 4799 generic.go:334] "Generic (PLEG): container finished" podID="927a4e58-2438-40ee-be6c-2672211f634f" containerID="799730d2dbaecbf058f21e09ab7432b7c31397858def30ebd5ee69e4cc5f7cf5" exitCode=0 Oct 10 17:37:16 crc kubenswrapper[4799]: I1010 17:37:16.811198 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-kn9kj" event={"ID":"927a4e58-2438-40ee-be6c-2672211f634f","Type":"ContainerDied","Data":"799730d2dbaecbf058f21e09ab7432b7c31397858def30ebd5ee69e4cc5f7cf5"} Oct 10 17:37:17 crc kubenswrapper[4799]: I1010 17:37:17.824799 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-kn9kj" event={"ID":"927a4e58-2438-40ee-be6c-2672211f634f","Type":"ContainerStarted","Data":"4d085b77cd9310adcd6978d6b83ca14b4081e522e209c69caa3fbd277ab11549"} Oct 10 17:37:17 crc kubenswrapper[4799]: I1010 17:37:17.855585 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-kn9kj" podStartSLOduration=2.312983873 podStartE2EDuration="4.855558488s" podCreationTimestamp="2025-10-10 17:37:13 +0000 UTC" firstStartedPulling="2025-10-10 17:37:14.780521992 +0000 UTC m=+3928.288846147" lastFinishedPulling="2025-10-10 17:37:17.323096617 +0000 UTC m=+3930.831420762" observedRunningTime="2025-10-10 17:37:17.854594605 +0000 UTC m=+3931.362918760" watchObservedRunningTime="2025-10-10 17:37:17.855558488 +0000 UTC m=+3931.363882643" Oct 10 17:37:23 crc kubenswrapper[4799]: I1010 17:37:23.485979 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-kn9kj" Oct 10 17:37:23 crc kubenswrapper[4799]: I1010 17:37:23.486921 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-kn9kj" Oct 10 17:37:23 crc kubenswrapper[4799]: I1010 17:37:23.587560 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-kn9kj" Oct 10 17:37:24 crc kubenswrapper[4799]: I1010 17:37:24.002194 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-kn9kj" Oct 10 17:37:24 crc kubenswrapper[4799]: I1010 17:37:24.054104 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-kn9kj"] Oct 10 17:37:25 crc kubenswrapper[4799]: I1010 17:37:25.904271 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-kn9kj" podUID="927a4e58-2438-40ee-be6c-2672211f634f" containerName="registry-server" containerID="cri-o://4d085b77cd9310adcd6978d6b83ca14b4081e522e209c69caa3fbd277ab11549" gracePeriod=2 Oct 10 17:37:26 crc kubenswrapper[4799]: I1010 17:37:26.375171 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-kn9kj" Oct 10 17:37:26 crc kubenswrapper[4799]: I1010 17:37:26.480411 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/927a4e58-2438-40ee-be6c-2672211f634f-utilities\") pod \"927a4e58-2438-40ee-be6c-2672211f634f\" (UID: \"927a4e58-2438-40ee-be6c-2672211f634f\") " Oct 10 17:37:26 crc kubenswrapper[4799]: I1010 17:37:26.480575 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dfvtm\" (UniqueName: \"kubernetes.io/projected/927a4e58-2438-40ee-be6c-2672211f634f-kube-api-access-dfvtm\") pod \"927a4e58-2438-40ee-be6c-2672211f634f\" (UID: \"927a4e58-2438-40ee-be6c-2672211f634f\") " Oct 10 17:37:26 crc kubenswrapper[4799]: I1010 17:37:26.480619 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/927a4e58-2438-40ee-be6c-2672211f634f-catalog-content\") pod \"927a4e58-2438-40ee-be6c-2672211f634f\" (UID: \"927a4e58-2438-40ee-be6c-2672211f634f\") " Oct 10 17:37:26 crc kubenswrapper[4799]: I1010 17:37:26.481439 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/927a4e58-2438-40ee-be6c-2672211f634f-utilities" (OuterVolumeSpecName: "utilities") pod "927a4e58-2438-40ee-be6c-2672211f634f" (UID: "927a4e58-2438-40ee-be6c-2672211f634f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 10 17:37:26 crc kubenswrapper[4799]: I1010 17:37:26.486597 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/927a4e58-2438-40ee-be6c-2672211f634f-kube-api-access-dfvtm" (OuterVolumeSpecName: "kube-api-access-dfvtm") pod "927a4e58-2438-40ee-be6c-2672211f634f" (UID: "927a4e58-2438-40ee-be6c-2672211f634f"). InnerVolumeSpecName "kube-api-access-dfvtm". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 17:37:26 crc kubenswrapper[4799]: I1010 17:37:26.545238 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/927a4e58-2438-40ee-be6c-2672211f634f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "927a4e58-2438-40ee-be6c-2672211f634f" (UID: "927a4e58-2438-40ee-be6c-2672211f634f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 10 17:37:26 crc kubenswrapper[4799]: I1010 17:37:26.582004 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dfvtm\" (UniqueName: \"kubernetes.io/projected/927a4e58-2438-40ee-be6c-2672211f634f-kube-api-access-dfvtm\") on node \"crc\" DevicePath \"\"" Oct 10 17:37:26 crc kubenswrapper[4799]: I1010 17:37:26.582043 4799 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/927a4e58-2438-40ee-be6c-2672211f634f-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 10 17:37:26 crc kubenswrapper[4799]: I1010 17:37:26.582056 4799 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/927a4e58-2438-40ee-be6c-2672211f634f-utilities\") on node \"crc\" DevicePath \"\"" Oct 10 17:37:26 crc kubenswrapper[4799]: I1010 17:37:26.917835 4799 generic.go:334] "Generic (PLEG): container finished" podID="927a4e58-2438-40ee-be6c-2672211f634f" containerID="4d085b77cd9310adcd6978d6b83ca14b4081e522e209c69caa3fbd277ab11549" exitCode=0 Oct 10 17:37:26 crc kubenswrapper[4799]: I1010 17:37:26.917913 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-kn9kj" event={"ID":"927a4e58-2438-40ee-be6c-2672211f634f","Type":"ContainerDied","Data":"4d085b77cd9310adcd6978d6b83ca14b4081e522e209c69caa3fbd277ab11549"} Oct 10 17:37:26 crc kubenswrapper[4799]: I1010 17:37:26.917949 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-kn9kj" Oct 10 17:37:26 crc kubenswrapper[4799]: I1010 17:37:26.919270 4799 scope.go:117] "RemoveContainer" containerID="4d085b77cd9310adcd6978d6b83ca14b4081e522e209c69caa3fbd277ab11549" Oct 10 17:37:26 crc kubenswrapper[4799]: I1010 17:37:26.919122 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-kn9kj" event={"ID":"927a4e58-2438-40ee-be6c-2672211f634f","Type":"ContainerDied","Data":"630b3aa9de6de254f5157a4e57288a0e94234739fd03801121ace517455a4d3f"} Oct 10 17:37:26 crc kubenswrapper[4799]: I1010 17:37:26.982278 4799 scope.go:117] "RemoveContainer" containerID="799730d2dbaecbf058f21e09ab7432b7c31397858def30ebd5ee69e4cc5f7cf5" Oct 10 17:37:26 crc kubenswrapper[4799]: I1010 17:37:26.991105 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-kn9kj"] Oct 10 17:37:26 crc kubenswrapper[4799]: I1010 17:37:26.997265 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-kn9kj"] Oct 10 17:37:27 crc kubenswrapper[4799]: I1010 17:37:27.019975 4799 scope.go:117] "RemoveContainer" containerID="e980d23e9ef064b79885056fc81aa65dcf29d413fb9ae555c821f0fd0db2af6a" Oct 10 17:37:27 crc kubenswrapper[4799]: I1010 17:37:27.050656 4799 scope.go:117] "RemoveContainer" containerID="4d085b77cd9310adcd6978d6b83ca14b4081e522e209c69caa3fbd277ab11549" Oct 10 17:37:27 crc kubenswrapper[4799]: E1010 17:37:27.051060 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4d085b77cd9310adcd6978d6b83ca14b4081e522e209c69caa3fbd277ab11549\": container with ID starting with 4d085b77cd9310adcd6978d6b83ca14b4081e522e209c69caa3fbd277ab11549 not found: ID does not exist" containerID="4d085b77cd9310adcd6978d6b83ca14b4081e522e209c69caa3fbd277ab11549" Oct 10 17:37:27 crc kubenswrapper[4799]: I1010 17:37:27.051113 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4d085b77cd9310adcd6978d6b83ca14b4081e522e209c69caa3fbd277ab11549"} err="failed to get container status \"4d085b77cd9310adcd6978d6b83ca14b4081e522e209c69caa3fbd277ab11549\": rpc error: code = NotFound desc = could not find container \"4d085b77cd9310adcd6978d6b83ca14b4081e522e209c69caa3fbd277ab11549\": container with ID starting with 4d085b77cd9310adcd6978d6b83ca14b4081e522e209c69caa3fbd277ab11549 not found: ID does not exist" Oct 10 17:37:27 crc kubenswrapper[4799]: I1010 17:37:27.051146 4799 scope.go:117] "RemoveContainer" containerID="799730d2dbaecbf058f21e09ab7432b7c31397858def30ebd5ee69e4cc5f7cf5" Oct 10 17:37:27 crc kubenswrapper[4799]: E1010 17:37:27.051474 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"799730d2dbaecbf058f21e09ab7432b7c31397858def30ebd5ee69e4cc5f7cf5\": container with ID starting with 799730d2dbaecbf058f21e09ab7432b7c31397858def30ebd5ee69e4cc5f7cf5 not found: ID does not exist" containerID="799730d2dbaecbf058f21e09ab7432b7c31397858def30ebd5ee69e4cc5f7cf5" Oct 10 17:37:27 crc kubenswrapper[4799]: I1010 17:37:27.051524 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"799730d2dbaecbf058f21e09ab7432b7c31397858def30ebd5ee69e4cc5f7cf5"} err="failed to get container status \"799730d2dbaecbf058f21e09ab7432b7c31397858def30ebd5ee69e4cc5f7cf5\": rpc error: code = NotFound desc = could not find container \"799730d2dbaecbf058f21e09ab7432b7c31397858def30ebd5ee69e4cc5f7cf5\": container with ID starting with 799730d2dbaecbf058f21e09ab7432b7c31397858def30ebd5ee69e4cc5f7cf5 not found: ID does not exist" Oct 10 17:37:27 crc kubenswrapper[4799]: I1010 17:37:27.051560 4799 scope.go:117] "RemoveContainer" containerID="e980d23e9ef064b79885056fc81aa65dcf29d413fb9ae555c821f0fd0db2af6a" Oct 10 17:37:27 crc kubenswrapper[4799]: E1010 17:37:27.052023 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e980d23e9ef064b79885056fc81aa65dcf29d413fb9ae555c821f0fd0db2af6a\": container with ID starting with e980d23e9ef064b79885056fc81aa65dcf29d413fb9ae555c821f0fd0db2af6a not found: ID does not exist" containerID="e980d23e9ef064b79885056fc81aa65dcf29d413fb9ae555c821f0fd0db2af6a" Oct 10 17:37:27 crc kubenswrapper[4799]: I1010 17:37:27.052053 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e980d23e9ef064b79885056fc81aa65dcf29d413fb9ae555c821f0fd0db2af6a"} err="failed to get container status \"e980d23e9ef064b79885056fc81aa65dcf29d413fb9ae555c821f0fd0db2af6a\": rpc error: code = NotFound desc = could not find container \"e980d23e9ef064b79885056fc81aa65dcf29d413fb9ae555c821f0fd0db2af6a\": container with ID starting with e980d23e9ef064b79885056fc81aa65dcf29d413fb9ae555c821f0fd0db2af6a not found: ID does not exist" Oct 10 17:37:27 crc kubenswrapper[4799]: I1010 17:37:27.418703 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="927a4e58-2438-40ee-be6c-2672211f634f" path="/var/lib/kubelet/pods/927a4e58-2438-40ee-be6c-2672211f634f/volumes" Oct 10 17:37:29 crc kubenswrapper[4799]: I1010 17:37:29.245685 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-g2g6k"] Oct 10 17:37:29 crc kubenswrapper[4799]: E1010 17:37:29.246701 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="927a4e58-2438-40ee-be6c-2672211f634f" containerName="registry-server" Oct 10 17:37:29 crc kubenswrapper[4799]: I1010 17:37:29.246783 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="927a4e58-2438-40ee-be6c-2672211f634f" containerName="registry-server" Oct 10 17:37:29 crc kubenswrapper[4799]: E1010 17:37:29.246868 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="927a4e58-2438-40ee-be6c-2672211f634f" containerName="extract-utilities" Oct 10 17:37:29 crc kubenswrapper[4799]: I1010 17:37:29.246889 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="927a4e58-2438-40ee-be6c-2672211f634f" containerName="extract-utilities" Oct 10 17:37:29 crc kubenswrapper[4799]: E1010 17:37:29.246909 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="927a4e58-2438-40ee-be6c-2672211f634f" containerName="extract-content" Oct 10 17:37:29 crc kubenswrapper[4799]: I1010 17:37:29.246928 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="927a4e58-2438-40ee-be6c-2672211f634f" containerName="extract-content" Oct 10 17:37:29 crc kubenswrapper[4799]: I1010 17:37:29.247391 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="927a4e58-2438-40ee-be6c-2672211f634f" containerName="registry-server" Oct 10 17:37:29 crc kubenswrapper[4799]: I1010 17:37:29.249960 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-g2g6k" Oct 10 17:37:29 crc kubenswrapper[4799]: I1010 17:37:29.258627 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-g2g6k"] Oct 10 17:37:29 crc kubenswrapper[4799]: I1010 17:37:29.431833 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d481aef8-cf0c-4360-a927-5418fbe09853-catalog-content\") pod \"redhat-operators-g2g6k\" (UID: \"d481aef8-cf0c-4360-a927-5418fbe09853\") " pod="openshift-marketplace/redhat-operators-g2g6k" Oct 10 17:37:29 crc kubenswrapper[4799]: I1010 17:37:29.432208 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d481aef8-cf0c-4360-a927-5418fbe09853-utilities\") pod \"redhat-operators-g2g6k\" (UID: \"d481aef8-cf0c-4360-a927-5418fbe09853\") " pod="openshift-marketplace/redhat-operators-g2g6k" Oct 10 17:37:29 crc kubenswrapper[4799]: I1010 17:37:29.432312 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-df5kz\" (UniqueName: \"kubernetes.io/projected/d481aef8-cf0c-4360-a927-5418fbe09853-kube-api-access-df5kz\") pod \"redhat-operators-g2g6k\" (UID: \"d481aef8-cf0c-4360-a927-5418fbe09853\") " pod="openshift-marketplace/redhat-operators-g2g6k" Oct 10 17:37:29 crc kubenswrapper[4799]: I1010 17:37:29.533796 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d481aef8-cf0c-4360-a927-5418fbe09853-utilities\") pod \"redhat-operators-g2g6k\" (UID: \"d481aef8-cf0c-4360-a927-5418fbe09853\") " pod="openshift-marketplace/redhat-operators-g2g6k" Oct 10 17:37:29 crc kubenswrapper[4799]: I1010 17:37:29.533905 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-df5kz\" (UniqueName: \"kubernetes.io/projected/d481aef8-cf0c-4360-a927-5418fbe09853-kube-api-access-df5kz\") pod \"redhat-operators-g2g6k\" (UID: \"d481aef8-cf0c-4360-a927-5418fbe09853\") " pod="openshift-marketplace/redhat-operators-g2g6k" Oct 10 17:37:29 crc kubenswrapper[4799]: I1010 17:37:29.533987 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d481aef8-cf0c-4360-a927-5418fbe09853-catalog-content\") pod \"redhat-operators-g2g6k\" (UID: \"d481aef8-cf0c-4360-a927-5418fbe09853\") " pod="openshift-marketplace/redhat-operators-g2g6k" Oct 10 17:37:29 crc kubenswrapper[4799]: I1010 17:37:29.535094 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d481aef8-cf0c-4360-a927-5418fbe09853-catalog-content\") pod \"redhat-operators-g2g6k\" (UID: \"d481aef8-cf0c-4360-a927-5418fbe09853\") " pod="openshift-marketplace/redhat-operators-g2g6k" Oct 10 17:37:29 crc kubenswrapper[4799]: I1010 17:37:29.535343 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d481aef8-cf0c-4360-a927-5418fbe09853-utilities\") pod \"redhat-operators-g2g6k\" (UID: \"d481aef8-cf0c-4360-a927-5418fbe09853\") " pod="openshift-marketplace/redhat-operators-g2g6k" Oct 10 17:37:29 crc kubenswrapper[4799]: I1010 17:37:29.564681 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-df5kz\" (UniqueName: \"kubernetes.io/projected/d481aef8-cf0c-4360-a927-5418fbe09853-kube-api-access-df5kz\") pod \"redhat-operators-g2g6k\" (UID: \"d481aef8-cf0c-4360-a927-5418fbe09853\") " pod="openshift-marketplace/redhat-operators-g2g6k" Oct 10 17:37:29 crc kubenswrapper[4799]: I1010 17:37:29.591334 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-g2g6k" Oct 10 17:37:29 crc kubenswrapper[4799]: I1010 17:37:29.857120 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-g2g6k"] Oct 10 17:37:29 crc kubenswrapper[4799]: I1010 17:37:29.948052 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-g2g6k" event={"ID":"d481aef8-cf0c-4360-a927-5418fbe09853","Type":"ContainerStarted","Data":"2e9a068ee4803fe57ef77623abda7943bc40aa4cad1a8b744da715a975a47ceb"} Oct 10 17:37:30 crc kubenswrapper[4799]: I1010 17:37:30.958024 4799 generic.go:334] "Generic (PLEG): container finished" podID="d481aef8-cf0c-4360-a927-5418fbe09853" containerID="c8e7c3334b17dc54d81eec77a761b918ce4510ee19a1f3218f9cbe06f29420cc" exitCode=0 Oct 10 17:37:30 crc kubenswrapper[4799]: I1010 17:37:30.958122 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-g2g6k" event={"ID":"d481aef8-cf0c-4360-a927-5418fbe09853","Type":"ContainerDied","Data":"c8e7c3334b17dc54d81eec77a761b918ce4510ee19a1f3218f9cbe06f29420cc"} Oct 10 17:37:32 crc kubenswrapper[4799]: I1010 17:37:32.981058 4799 generic.go:334] "Generic (PLEG): container finished" podID="d481aef8-cf0c-4360-a927-5418fbe09853" containerID="7d75dfb9b344a93a3ba573fe22a23fea53b4e9ed64e886fd93524f8c1a608875" exitCode=0 Oct 10 17:37:32 crc kubenswrapper[4799]: I1010 17:37:32.981183 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-g2g6k" event={"ID":"d481aef8-cf0c-4360-a927-5418fbe09853","Type":"ContainerDied","Data":"7d75dfb9b344a93a3ba573fe22a23fea53b4e9ed64e886fd93524f8c1a608875"} Oct 10 17:37:33 crc kubenswrapper[4799]: I1010 17:37:33.993883 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-g2g6k" event={"ID":"d481aef8-cf0c-4360-a927-5418fbe09853","Type":"ContainerStarted","Data":"25bef5062b7597c144efe7efc937ef6f4f3a99ab41f43189fec361821e5e269a"} Oct 10 17:37:34 crc kubenswrapper[4799]: I1010 17:37:34.033811 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-g2g6k" podStartSLOduration=2.615439494 podStartE2EDuration="5.033791163s" podCreationTimestamp="2025-10-10 17:37:29 +0000 UTC" firstStartedPulling="2025-10-10 17:37:30.960249138 +0000 UTC m=+3944.468573293" lastFinishedPulling="2025-10-10 17:37:33.378600817 +0000 UTC m=+3946.886924962" observedRunningTime="2025-10-10 17:37:34.028299689 +0000 UTC m=+3947.536623874" watchObservedRunningTime="2025-10-10 17:37:34.033791163 +0000 UTC m=+3947.542115308" Oct 10 17:37:39 crc kubenswrapper[4799]: I1010 17:37:39.591513 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-g2g6k" Oct 10 17:37:39 crc kubenswrapper[4799]: I1010 17:37:39.592162 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-g2g6k" Oct 10 17:37:40 crc kubenswrapper[4799]: I1010 17:37:40.651315 4799 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-g2g6k" podUID="d481aef8-cf0c-4360-a927-5418fbe09853" containerName="registry-server" probeResult="failure" output=< Oct 10 17:37:40 crc kubenswrapper[4799]: timeout: failed to connect service ":50051" within 1s Oct 10 17:37:40 crc kubenswrapper[4799]: > Oct 10 17:37:49 crc kubenswrapper[4799]: I1010 17:37:49.663827 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-g2g6k" Oct 10 17:37:49 crc kubenswrapper[4799]: I1010 17:37:49.750709 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-g2g6k" Oct 10 17:37:49 crc kubenswrapper[4799]: I1010 17:37:49.905919 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-g2g6k"] Oct 10 17:37:51 crc kubenswrapper[4799]: I1010 17:37:51.174218 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-g2g6k" podUID="d481aef8-cf0c-4360-a927-5418fbe09853" containerName="registry-server" containerID="cri-o://25bef5062b7597c144efe7efc937ef6f4f3a99ab41f43189fec361821e5e269a" gracePeriod=2 Oct 10 17:37:51 crc kubenswrapper[4799]: I1010 17:37:51.717184 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-g2g6k" Oct 10 17:37:51 crc kubenswrapper[4799]: I1010 17:37:51.826640 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d481aef8-cf0c-4360-a927-5418fbe09853-catalog-content\") pod \"d481aef8-cf0c-4360-a927-5418fbe09853\" (UID: \"d481aef8-cf0c-4360-a927-5418fbe09853\") " Oct 10 17:37:51 crc kubenswrapper[4799]: I1010 17:37:51.826901 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d481aef8-cf0c-4360-a927-5418fbe09853-utilities\") pod \"d481aef8-cf0c-4360-a927-5418fbe09853\" (UID: \"d481aef8-cf0c-4360-a927-5418fbe09853\") " Oct 10 17:37:51 crc kubenswrapper[4799]: I1010 17:37:51.828129 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-df5kz\" (UniqueName: \"kubernetes.io/projected/d481aef8-cf0c-4360-a927-5418fbe09853-kube-api-access-df5kz\") pod \"d481aef8-cf0c-4360-a927-5418fbe09853\" (UID: \"d481aef8-cf0c-4360-a927-5418fbe09853\") " Oct 10 17:37:51 crc kubenswrapper[4799]: I1010 17:37:51.828253 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d481aef8-cf0c-4360-a927-5418fbe09853-utilities" (OuterVolumeSpecName: "utilities") pod "d481aef8-cf0c-4360-a927-5418fbe09853" (UID: "d481aef8-cf0c-4360-a927-5418fbe09853"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 10 17:37:51 crc kubenswrapper[4799]: I1010 17:37:51.828954 4799 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d481aef8-cf0c-4360-a927-5418fbe09853-utilities\") on node \"crc\" DevicePath \"\"" Oct 10 17:37:51 crc kubenswrapper[4799]: I1010 17:37:51.839304 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d481aef8-cf0c-4360-a927-5418fbe09853-kube-api-access-df5kz" (OuterVolumeSpecName: "kube-api-access-df5kz") pod "d481aef8-cf0c-4360-a927-5418fbe09853" (UID: "d481aef8-cf0c-4360-a927-5418fbe09853"). InnerVolumeSpecName "kube-api-access-df5kz". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 17:37:51 crc kubenswrapper[4799]: I1010 17:37:51.930385 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-df5kz\" (UniqueName: \"kubernetes.io/projected/d481aef8-cf0c-4360-a927-5418fbe09853-kube-api-access-df5kz\") on node \"crc\" DevicePath \"\"" Oct 10 17:37:51 crc kubenswrapper[4799]: I1010 17:37:51.957628 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d481aef8-cf0c-4360-a927-5418fbe09853-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "d481aef8-cf0c-4360-a927-5418fbe09853" (UID: "d481aef8-cf0c-4360-a927-5418fbe09853"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 10 17:37:52 crc kubenswrapper[4799]: I1010 17:37:52.031885 4799 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d481aef8-cf0c-4360-a927-5418fbe09853-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 10 17:37:52 crc kubenswrapper[4799]: I1010 17:37:52.197026 4799 generic.go:334] "Generic (PLEG): container finished" podID="d481aef8-cf0c-4360-a927-5418fbe09853" containerID="25bef5062b7597c144efe7efc937ef6f4f3a99ab41f43189fec361821e5e269a" exitCode=0 Oct 10 17:37:52 crc kubenswrapper[4799]: I1010 17:37:52.197109 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-g2g6k" event={"ID":"d481aef8-cf0c-4360-a927-5418fbe09853","Type":"ContainerDied","Data":"25bef5062b7597c144efe7efc937ef6f4f3a99ab41f43189fec361821e5e269a"} Oct 10 17:37:52 crc kubenswrapper[4799]: I1010 17:37:52.197132 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-g2g6k" Oct 10 17:37:52 crc kubenswrapper[4799]: I1010 17:37:52.197151 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-g2g6k" event={"ID":"d481aef8-cf0c-4360-a927-5418fbe09853","Type":"ContainerDied","Data":"2e9a068ee4803fe57ef77623abda7943bc40aa4cad1a8b744da715a975a47ceb"} Oct 10 17:37:52 crc kubenswrapper[4799]: I1010 17:37:52.197182 4799 scope.go:117] "RemoveContainer" containerID="25bef5062b7597c144efe7efc937ef6f4f3a99ab41f43189fec361821e5e269a" Oct 10 17:37:52 crc kubenswrapper[4799]: I1010 17:37:52.251135 4799 scope.go:117] "RemoveContainer" containerID="7d75dfb9b344a93a3ba573fe22a23fea53b4e9ed64e886fd93524f8c1a608875" Oct 10 17:37:52 crc kubenswrapper[4799]: I1010 17:37:52.253638 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-g2g6k"] Oct 10 17:37:52 crc kubenswrapper[4799]: I1010 17:37:52.259608 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-g2g6k"] Oct 10 17:37:52 crc kubenswrapper[4799]: I1010 17:37:52.277666 4799 scope.go:117] "RemoveContainer" containerID="c8e7c3334b17dc54d81eec77a761b918ce4510ee19a1f3218f9cbe06f29420cc" Oct 10 17:37:52 crc kubenswrapper[4799]: I1010 17:37:52.326109 4799 scope.go:117] "RemoveContainer" containerID="25bef5062b7597c144efe7efc937ef6f4f3a99ab41f43189fec361821e5e269a" Oct 10 17:37:52 crc kubenswrapper[4799]: E1010 17:37:52.327514 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"25bef5062b7597c144efe7efc937ef6f4f3a99ab41f43189fec361821e5e269a\": container with ID starting with 25bef5062b7597c144efe7efc937ef6f4f3a99ab41f43189fec361821e5e269a not found: ID does not exist" containerID="25bef5062b7597c144efe7efc937ef6f4f3a99ab41f43189fec361821e5e269a" Oct 10 17:37:52 crc kubenswrapper[4799]: I1010 17:37:52.327653 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"25bef5062b7597c144efe7efc937ef6f4f3a99ab41f43189fec361821e5e269a"} err="failed to get container status \"25bef5062b7597c144efe7efc937ef6f4f3a99ab41f43189fec361821e5e269a\": rpc error: code = NotFound desc = could not find container \"25bef5062b7597c144efe7efc937ef6f4f3a99ab41f43189fec361821e5e269a\": container with ID starting with 25bef5062b7597c144efe7efc937ef6f4f3a99ab41f43189fec361821e5e269a not found: ID does not exist" Oct 10 17:37:52 crc kubenswrapper[4799]: I1010 17:37:52.327778 4799 scope.go:117] "RemoveContainer" containerID="7d75dfb9b344a93a3ba573fe22a23fea53b4e9ed64e886fd93524f8c1a608875" Oct 10 17:37:52 crc kubenswrapper[4799]: E1010 17:37:52.328454 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7d75dfb9b344a93a3ba573fe22a23fea53b4e9ed64e886fd93524f8c1a608875\": container with ID starting with 7d75dfb9b344a93a3ba573fe22a23fea53b4e9ed64e886fd93524f8c1a608875 not found: ID does not exist" containerID="7d75dfb9b344a93a3ba573fe22a23fea53b4e9ed64e886fd93524f8c1a608875" Oct 10 17:37:52 crc kubenswrapper[4799]: I1010 17:37:52.328514 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7d75dfb9b344a93a3ba573fe22a23fea53b4e9ed64e886fd93524f8c1a608875"} err="failed to get container status \"7d75dfb9b344a93a3ba573fe22a23fea53b4e9ed64e886fd93524f8c1a608875\": rpc error: code = NotFound desc = could not find container \"7d75dfb9b344a93a3ba573fe22a23fea53b4e9ed64e886fd93524f8c1a608875\": container with ID starting with 7d75dfb9b344a93a3ba573fe22a23fea53b4e9ed64e886fd93524f8c1a608875 not found: ID does not exist" Oct 10 17:37:52 crc kubenswrapper[4799]: I1010 17:37:52.328553 4799 scope.go:117] "RemoveContainer" containerID="c8e7c3334b17dc54d81eec77a761b918ce4510ee19a1f3218f9cbe06f29420cc" Oct 10 17:37:52 crc kubenswrapper[4799]: E1010 17:37:52.328972 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c8e7c3334b17dc54d81eec77a761b918ce4510ee19a1f3218f9cbe06f29420cc\": container with ID starting with c8e7c3334b17dc54d81eec77a761b918ce4510ee19a1f3218f9cbe06f29420cc not found: ID does not exist" containerID="c8e7c3334b17dc54d81eec77a761b918ce4510ee19a1f3218f9cbe06f29420cc" Oct 10 17:37:52 crc kubenswrapper[4799]: I1010 17:37:52.329002 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c8e7c3334b17dc54d81eec77a761b918ce4510ee19a1f3218f9cbe06f29420cc"} err="failed to get container status \"c8e7c3334b17dc54d81eec77a761b918ce4510ee19a1f3218f9cbe06f29420cc\": rpc error: code = NotFound desc = could not find container \"c8e7c3334b17dc54d81eec77a761b918ce4510ee19a1f3218f9cbe06f29420cc\": container with ID starting with c8e7c3334b17dc54d81eec77a761b918ce4510ee19a1f3218f9cbe06f29420cc not found: ID does not exist" Oct 10 17:37:53 crc kubenswrapper[4799]: I1010 17:37:53.419346 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d481aef8-cf0c-4360-a927-5418fbe09853" path="/var/lib/kubelet/pods/d481aef8-cf0c-4360-a927-5418fbe09853/volumes" Oct 10 17:38:21 crc kubenswrapper[4799]: I1010 17:38:21.923925 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-nmn7j"] Oct 10 17:38:21 crc kubenswrapper[4799]: E1010 17:38:21.926711 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d481aef8-cf0c-4360-a927-5418fbe09853" containerName="extract-utilities" Oct 10 17:38:21 crc kubenswrapper[4799]: I1010 17:38:21.926971 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="d481aef8-cf0c-4360-a927-5418fbe09853" containerName="extract-utilities" Oct 10 17:38:21 crc kubenswrapper[4799]: E1010 17:38:21.927079 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d481aef8-cf0c-4360-a927-5418fbe09853" containerName="extract-content" Oct 10 17:38:21 crc kubenswrapper[4799]: I1010 17:38:21.927163 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="d481aef8-cf0c-4360-a927-5418fbe09853" containerName="extract-content" Oct 10 17:38:21 crc kubenswrapper[4799]: E1010 17:38:21.927258 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d481aef8-cf0c-4360-a927-5418fbe09853" containerName="registry-server" Oct 10 17:38:21 crc kubenswrapper[4799]: I1010 17:38:21.927336 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="d481aef8-cf0c-4360-a927-5418fbe09853" containerName="registry-server" Oct 10 17:38:21 crc kubenswrapper[4799]: I1010 17:38:21.927625 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="d481aef8-cf0c-4360-a927-5418fbe09853" containerName="registry-server" Oct 10 17:38:21 crc kubenswrapper[4799]: I1010 17:38:21.929302 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-nmn7j" Oct 10 17:38:21 crc kubenswrapper[4799]: I1010 17:38:21.943964 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-nmn7j"] Oct 10 17:38:22 crc kubenswrapper[4799]: I1010 17:38:22.060557 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ba7c6429-4930-4aaa-b0f2-3d7b87e373fc-catalog-content\") pod \"certified-operators-nmn7j\" (UID: \"ba7c6429-4930-4aaa-b0f2-3d7b87e373fc\") " pod="openshift-marketplace/certified-operators-nmn7j" Oct 10 17:38:22 crc kubenswrapper[4799]: I1010 17:38:22.060643 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5p7m2\" (UniqueName: \"kubernetes.io/projected/ba7c6429-4930-4aaa-b0f2-3d7b87e373fc-kube-api-access-5p7m2\") pod \"certified-operators-nmn7j\" (UID: \"ba7c6429-4930-4aaa-b0f2-3d7b87e373fc\") " pod="openshift-marketplace/certified-operators-nmn7j" Oct 10 17:38:22 crc kubenswrapper[4799]: I1010 17:38:22.060702 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ba7c6429-4930-4aaa-b0f2-3d7b87e373fc-utilities\") pod \"certified-operators-nmn7j\" (UID: \"ba7c6429-4930-4aaa-b0f2-3d7b87e373fc\") " pod="openshift-marketplace/certified-operators-nmn7j" Oct 10 17:38:22 crc kubenswrapper[4799]: I1010 17:38:22.161885 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ba7c6429-4930-4aaa-b0f2-3d7b87e373fc-catalog-content\") pod \"certified-operators-nmn7j\" (UID: \"ba7c6429-4930-4aaa-b0f2-3d7b87e373fc\") " pod="openshift-marketplace/certified-operators-nmn7j" Oct 10 17:38:22 crc kubenswrapper[4799]: I1010 17:38:22.161954 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5p7m2\" (UniqueName: \"kubernetes.io/projected/ba7c6429-4930-4aaa-b0f2-3d7b87e373fc-kube-api-access-5p7m2\") pod \"certified-operators-nmn7j\" (UID: \"ba7c6429-4930-4aaa-b0f2-3d7b87e373fc\") " pod="openshift-marketplace/certified-operators-nmn7j" Oct 10 17:38:22 crc kubenswrapper[4799]: I1010 17:38:22.161993 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ba7c6429-4930-4aaa-b0f2-3d7b87e373fc-utilities\") pod \"certified-operators-nmn7j\" (UID: \"ba7c6429-4930-4aaa-b0f2-3d7b87e373fc\") " pod="openshift-marketplace/certified-operators-nmn7j" Oct 10 17:38:22 crc kubenswrapper[4799]: I1010 17:38:22.162374 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ba7c6429-4930-4aaa-b0f2-3d7b87e373fc-utilities\") pod \"certified-operators-nmn7j\" (UID: \"ba7c6429-4930-4aaa-b0f2-3d7b87e373fc\") " pod="openshift-marketplace/certified-operators-nmn7j" Oct 10 17:38:22 crc kubenswrapper[4799]: I1010 17:38:22.162558 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ba7c6429-4930-4aaa-b0f2-3d7b87e373fc-catalog-content\") pod \"certified-operators-nmn7j\" (UID: \"ba7c6429-4930-4aaa-b0f2-3d7b87e373fc\") " pod="openshift-marketplace/certified-operators-nmn7j" Oct 10 17:38:22 crc kubenswrapper[4799]: I1010 17:38:22.184703 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5p7m2\" (UniqueName: \"kubernetes.io/projected/ba7c6429-4930-4aaa-b0f2-3d7b87e373fc-kube-api-access-5p7m2\") pod \"certified-operators-nmn7j\" (UID: \"ba7c6429-4930-4aaa-b0f2-3d7b87e373fc\") " pod="openshift-marketplace/certified-operators-nmn7j" Oct 10 17:38:22 crc kubenswrapper[4799]: I1010 17:38:22.259056 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-nmn7j" Oct 10 17:38:22 crc kubenswrapper[4799]: I1010 17:38:22.742993 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-nmn7j"] Oct 10 17:38:23 crc kubenswrapper[4799]: I1010 17:38:23.522950 4799 generic.go:334] "Generic (PLEG): container finished" podID="ba7c6429-4930-4aaa-b0f2-3d7b87e373fc" containerID="ac27e8796fc56740f4aca9a74891d24145d8281403fb3f7c069c8b85a66aaace" exitCode=0 Oct 10 17:38:23 crc kubenswrapper[4799]: I1010 17:38:23.523414 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-nmn7j" event={"ID":"ba7c6429-4930-4aaa-b0f2-3d7b87e373fc","Type":"ContainerDied","Data":"ac27e8796fc56740f4aca9a74891d24145d8281403fb3f7c069c8b85a66aaace"} Oct 10 17:38:23 crc kubenswrapper[4799]: I1010 17:38:23.523459 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-nmn7j" event={"ID":"ba7c6429-4930-4aaa-b0f2-3d7b87e373fc","Type":"ContainerStarted","Data":"ed271d9641b79784a079a7f328160cdfd74c8eb1e17c050a6e7df290690ca6b9"} Oct 10 17:38:23 crc kubenswrapper[4799]: I1010 17:38:23.526255 4799 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 10 17:38:25 crc kubenswrapper[4799]: I1010 17:38:25.587302 4799 generic.go:334] "Generic (PLEG): container finished" podID="ba7c6429-4930-4aaa-b0f2-3d7b87e373fc" containerID="a1c5be8cd3621c39d1e274eb7aeb787b228ce9b4ab80f7ad058dd5a48da23d74" exitCode=0 Oct 10 17:38:25 crc kubenswrapper[4799]: I1010 17:38:25.587355 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-nmn7j" event={"ID":"ba7c6429-4930-4aaa-b0f2-3d7b87e373fc","Type":"ContainerDied","Data":"a1c5be8cd3621c39d1e274eb7aeb787b228ce9b4ab80f7ad058dd5a48da23d74"} Oct 10 17:38:26 crc kubenswrapper[4799]: I1010 17:38:26.598566 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-nmn7j" event={"ID":"ba7c6429-4930-4aaa-b0f2-3d7b87e373fc","Type":"ContainerStarted","Data":"9dfe75bb2c96a7e8e41623c8707326a4305c6423d1275518f7f1618e06dc5398"} Oct 10 17:38:32 crc kubenswrapper[4799]: I1010 17:38:32.259340 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-nmn7j" Oct 10 17:38:32 crc kubenswrapper[4799]: I1010 17:38:32.259908 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-nmn7j" Oct 10 17:38:32 crc kubenswrapper[4799]: I1010 17:38:32.354269 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-nmn7j" Oct 10 17:38:32 crc kubenswrapper[4799]: I1010 17:38:32.377319 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-nmn7j" podStartSLOduration=8.712487949 podStartE2EDuration="11.377301803s" podCreationTimestamp="2025-10-10 17:38:21 +0000 UTC" firstStartedPulling="2025-10-10 17:38:23.52583142 +0000 UTC m=+3997.034155575" lastFinishedPulling="2025-10-10 17:38:26.190645274 +0000 UTC m=+3999.698969429" observedRunningTime="2025-10-10 17:38:26.620731258 +0000 UTC m=+4000.129055373" watchObservedRunningTime="2025-10-10 17:38:32.377301803 +0000 UTC m=+4005.885625918" Oct 10 17:38:32 crc kubenswrapper[4799]: I1010 17:38:32.716353 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-nmn7j" Oct 10 17:38:32 crc kubenswrapper[4799]: I1010 17:38:32.781153 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-nmn7j"] Oct 10 17:38:34 crc kubenswrapper[4799]: I1010 17:38:34.677590 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-nmn7j" podUID="ba7c6429-4930-4aaa-b0f2-3d7b87e373fc" containerName="registry-server" containerID="cri-o://9dfe75bb2c96a7e8e41623c8707326a4305c6423d1275518f7f1618e06dc5398" gracePeriod=2 Oct 10 17:38:35 crc kubenswrapper[4799]: I1010 17:38:35.140771 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-nmn7j" Oct 10 17:38:35 crc kubenswrapper[4799]: I1010 17:38:35.194100 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ba7c6429-4930-4aaa-b0f2-3d7b87e373fc-catalog-content\") pod \"ba7c6429-4930-4aaa-b0f2-3d7b87e373fc\" (UID: \"ba7c6429-4930-4aaa-b0f2-3d7b87e373fc\") " Oct 10 17:38:35 crc kubenswrapper[4799]: I1010 17:38:35.194208 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5p7m2\" (UniqueName: \"kubernetes.io/projected/ba7c6429-4930-4aaa-b0f2-3d7b87e373fc-kube-api-access-5p7m2\") pod \"ba7c6429-4930-4aaa-b0f2-3d7b87e373fc\" (UID: \"ba7c6429-4930-4aaa-b0f2-3d7b87e373fc\") " Oct 10 17:38:35 crc kubenswrapper[4799]: I1010 17:38:35.194275 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ba7c6429-4930-4aaa-b0f2-3d7b87e373fc-utilities\") pod \"ba7c6429-4930-4aaa-b0f2-3d7b87e373fc\" (UID: \"ba7c6429-4930-4aaa-b0f2-3d7b87e373fc\") " Oct 10 17:38:35 crc kubenswrapper[4799]: I1010 17:38:35.195863 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ba7c6429-4930-4aaa-b0f2-3d7b87e373fc-utilities" (OuterVolumeSpecName: "utilities") pod "ba7c6429-4930-4aaa-b0f2-3d7b87e373fc" (UID: "ba7c6429-4930-4aaa-b0f2-3d7b87e373fc"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 10 17:38:35 crc kubenswrapper[4799]: I1010 17:38:35.203378 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ba7c6429-4930-4aaa-b0f2-3d7b87e373fc-kube-api-access-5p7m2" (OuterVolumeSpecName: "kube-api-access-5p7m2") pod "ba7c6429-4930-4aaa-b0f2-3d7b87e373fc" (UID: "ba7c6429-4930-4aaa-b0f2-3d7b87e373fc"). InnerVolumeSpecName "kube-api-access-5p7m2". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 17:38:35 crc kubenswrapper[4799]: I1010 17:38:35.247259 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ba7c6429-4930-4aaa-b0f2-3d7b87e373fc-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "ba7c6429-4930-4aaa-b0f2-3d7b87e373fc" (UID: "ba7c6429-4930-4aaa-b0f2-3d7b87e373fc"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 10 17:38:35 crc kubenswrapper[4799]: I1010 17:38:35.295939 4799 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ba7c6429-4930-4aaa-b0f2-3d7b87e373fc-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 10 17:38:35 crc kubenswrapper[4799]: I1010 17:38:35.296067 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5p7m2\" (UniqueName: \"kubernetes.io/projected/ba7c6429-4930-4aaa-b0f2-3d7b87e373fc-kube-api-access-5p7m2\") on node \"crc\" DevicePath \"\"" Oct 10 17:38:35 crc kubenswrapper[4799]: I1010 17:38:35.296093 4799 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ba7c6429-4930-4aaa-b0f2-3d7b87e373fc-utilities\") on node \"crc\" DevicePath \"\"" Oct 10 17:38:35 crc kubenswrapper[4799]: I1010 17:38:35.686213 4799 generic.go:334] "Generic (PLEG): container finished" podID="ba7c6429-4930-4aaa-b0f2-3d7b87e373fc" containerID="9dfe75bb2c96a7e8e41623c8707326a4305c6423d1275518f7f1618e06dc5398" exitCode=0 Oct 10 17:38:35 crc kubenswrapper[4799]: I1010 17:38:35.686274 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-nmn7j" Oct 10 17:38:35 crc kubenswrapper[4799]: I1010 17:38:35.686294 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-nmn7j" event={"ID":"ba7c6429-4930-4aaa-b0f2-3d7b87e373fc","Type":"ContainerDied","Data":"9dfe75bb2c96a7e8e41623c8707326a4305c6423d1275518f7f1618e06dc5398"} Oct 10 17:38:35 crc kubenswrapper[4799]: I1010 17:38:35.686691 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-nmn7j" event={"ID":"ba7c6429-4930-4aaa-b0f2-3d7b87e373fc","Type":"ContainerDied","Data":"ed271d9641b79784a079a7f328160cdfd74c8eb1e17c050a6e7df290690ca6b9"} Oct 10 17:38:35 crc kubenswrapper[4799]: I1010 17:38:35.686708 4799 scope.go:117] "RemoveContainer" containerID="9dfe75bb2c96a7e8e41623c8707326a4305c6423d1275518f7f1618e06dc5398" Oct 10 17:38:35 crc kubenswrapper[4799]: I1010 17:38:35.716642 4799 scope.go:117] "RemoveContainer" containerID="a1c5be8cd3621c39d1e274eb7aeb787b228ce9b4ab80f7ad058dd5a48da23d74" Oct 10 17:38:35 crc kubenswrapper[4799]: I1010 17:38:35.721852 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-nmn7j"] Oct 10 17:38:35 crc kubenswrapper[4799]: I1010 17:38:35.726975 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-nmn7j"] Oct 10 17:38:35 crc kubenswrapper[4799]: I1010 17:38:35.741398 4799 scope.go:117] "RemoveContainer" containerID="ac27e8796fc56740f4aca9a74891d24145d8281403fb3f7c069c8b85a66aaace" Oct 10 17:38:35 crc kubenswrapper[4799]: I1010 17:38:35.773209 4799 scope.go:117] "RemoveContainer" containerID="9dfe75bb2c96a7e8e41623c8707326a4305c6423d1275518f7f1618e06dc5398" Oct 10 17:38:35 crc kubenswrapper[4799]: E1010 17:38:35.773841 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9dfe75bb2c96a7e8e41623c8707326a4305c6423d1275518f7f1618e06dc5398\": container with ID starting with 9dfe75bb2c96a7e8e41623c8707326a4305c6423d1275518f7f1618e06dc5398 not found: ID does not exist" containerID="9dfe75bb2c96a7e8e41623c8707326a4305c6423d1275518f7f1618e06dc5398" Oct 10 17:38:35 crc kubenswrapper[4799]: I1010 17:38:35.773955 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9dfe75bb2c96a7e8e41623c8707326a4305c6423d1275518f7f1618e06dc5398"} err="failed to get container status \"9dfe75bb2c96a7e8e41623c8707326a4305c6423d1275518f7f1618e06dc5398\": rpc error: code = NotFound desc = could not find container \"9dfe75bb2c96a7e8e41623c8707326a4305c6423d1275518f7f1618e06dc5398\": container with ID starting with 9dfe75bb2c96a7e8e41623c8707326a4305c6423d1275518f7f1618e06dc5398 not found: ID does not exist" Oct 10 17:38:35 crc kubenswrapper[4799]: I1010 17:38:35.774032 4799 scope.go:117] "RemoveContainer" containerID="a1c5be8cd3621c39d1e274eb7aeb787b228ce9b4ab80f7ad058dd5a48da23d74" Oct 10 17:38:35 crc kubenswrapper[4799]: E1010 17:38:35.774281 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a1c5be8cd3621c39d1e274eb7aeb787b228ce9b4ab80f7ad058dd5a48da23d74\": container with ID starting with a1c5be8cd3621c39d1e274eb7aeb787b228ce9b4ab80f7ad058dd5a48da23d74 not found: ID does not exist" containerID="a1c5be8cd3621c39d1e274eb7aeb787b228ce9b4ab80f7ad058dd5a48da23d74" Oct 10 17:38:35 crc kubenswrapper[4799]: I1010 17:38:35.774357 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a1c5be8cd3621c39d1e274eb7aeb787b228ce9b4ab80f7ad058dd5a48da23d74"} err="failed to get container status \"a1c5be8cd3621c39d1e274eb7aeb787b228ce9b4ab80f7ad058dd5a48da23d74\": rpc error: code = NotFound desc = could not find container \"a1c5be8cd3621c39d1e274eb7aeb787b228ce9b4ab80f7ad058dd5a48da23d74\": container with ID starting with a1c5be8cd3621c39d1e274eb7aeb787b228ce9b4ab80f7ad058dd5a48da23d74 not found: ID does not exist" Oct 10 17:38:35 crc kubenswrapper[4799]: I1010 17:38:35.774437 4799 scope.go:117] "RemoveContainer" containerID="ac27e8796fc56740f4aca9a74891d24145d8281403fb3f7c069c8b85a66aaace" Oct 10 17:38:35 crc kubenswrapper[4799]: E1010 17:38:35.774771 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ac27e8796fc56740f4aca9a74891d24145d8281403fb3f7c069c8b85a66aaace\": container with ID starting with ac27e8796fc56740f4aca9a74891d24145d8281403fb3f7c069c8b85a66aaace not found: ID does not exist" containerID="ac27e8796fc56740f4aca9a74891d24145d8281403fb3f7c069c8b85a66aaace" Oct 10 17:38:35 crc kubenswrapper[4799]: I1010 17:38:35.774861 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ac27e8796fc56740f4aca9a74891d24145d8281403fb3f7c069c8b85a66aaace"} err="failed to get container status \"ac27e8796fc56740f4aca9a74891d24145d8281403fb3f7c069c8b85a66aaace\": rpc error: code = NotFound desc = could not find container \"ac27e8796fc56740f4aca9a74891d24145d8281403fb3f7c069c8b85a66aaace\": container with ID starting with ac27e8796fc56740f4aca9a74891d24145d8281403fb3f7c069c8b85a66aaace not found: ID does not exist" Oct 10 17:38:37 crc kubenswrapper[4799]: I1010 17:38:37.420350 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ba7c6429-4930-4aaa-b0f2-3d7b87e373fc" path="/var/lib/kubelet/pods/ba7c6429-4930-4aaa-b0f2-3d7b87e373fc/volumes" Oct 10 17:38:45 crc kubenswrapper[4799]: I1010 17:38:45.249053 4799 patch_prober.go:28] interesting pod/machine-config-daemon-rh8zc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 10 17:38:45 crc kubenswrapper[4799]: I1010 17:38:45.250250 4799 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 10 17:39:15 crc kubenswrapper[4799]: I1010 17:39:15.248890 4799 patch_prober.go:28] interesting pod/machine-config-daemon-rh8zc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 10 17:39:15 crc kubenswrapper[4799]: I1010 17:39:15.249953 4799 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 10 17:39:45 crc kubenswrapper[4799]: I1010 17:39:45.248743 4799 patch_prober.go:28] interesting pod/machine-config-daemon-rh8zc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 10 17:39:45 crc kubenswrapper[4799]: I1010 17:39:45.249567 4799 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 10 17:39:45 crc kubenswrapper[4799]: I1010 17:39:45.249633 4799 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" Oct 10 17:39:45 crc kubenswrapper[4799]: I1010 17:39:45.250614 4799 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"b03f64a0a26d5fb94dfb90dcd1184b37fd9810e09a8440f1ed7eeed522926253"} pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 10 17:39:45 crc kubenswrapper[4799]: I1010 17:39:45.250712 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" containerName="machine-config-daemon" containerID="cri-o://b03f64a0a26d5fb94dfb90dcd1184b37fd9810e09a8440f1ed7eeed522926253" gracePeriod=600 Oct 10 17:39:45 crc kubenswrapper[4799]: I1010 17:39:45.396853 4799 generic.go:334] "Generic (PLEG): container finished" podID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" containerID="b03f64a0a26d5fb94dfb90dcd1184b37fd9810e09a8440f1ed7eeed522926253" exitCode=0 Oct 10 17:39:45 crc kubenswrapper[4799]: I1010 17:39:45.396923 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" event={"ID":"6cebefda-e31d-4be2-9bf4-8e1f8ec002cb","Type":"ContainerDied","Data":"b03f64a0a26d5fb94dfb90dcd1184b37fd9810e09a8440f1ed7eeed522926253"} Oct 10 17:39:45 crc kubenswrapper[4799]: I1010 17:39:45.396995 4799 scope.go:117] "RemoveContainer" containerID="50c1c49a2b0ab1d8d8b7206eccc727d542a604d2bf97ae52e942229bc6e50061" Oct 10 17:39:46 crc kubenswrapper[4799]: I1010 17:39:46.412712 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" event={"ID":"6cebefda-e31d-4be2-9bf4-8e1f8ec002cb","Type":"ContainerStarted","Data":"fe197396c5be2d0959a18226c3a887cbfc22f79ce601687c7a015173f2073961"} Oct 10 17:41:45 crc kubenswrapper[4799]: I1010 17:41:45.249536 4799 patch_prober.go:28] interesting pod/machine-config-daemon-rh8zc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 10 17:41:45 crc kubenswrapper[4799]: I1010 17:41:45.250326 4799 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 10 17:42:15 crc kubenswrapper[4799]: I1010 17:42:15.249481 4799 patch_prober.go:28] interesting pod/machine-config-daemon-rh8zc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 10 17:42:15 crc kubenswrapper[4799]: I1010 17:42:15.250343 4799 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 10 17:42:45 crc kubenswrapper[4799]: I1010 17:42:45.248862 4799 patch_prober.go:28] interesting pod/machine-config-daemon-rh8zc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 10 17:42:45 crc kubenswrapper[4799]: I1010 17:42:45.249742 4799 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 10 17:42:45 crc kubenswrapper[4799]: I1010 17:42:45.249817 4799 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" Oct 10 17:42:45 crc kubenswrapper[4799]: I1010 17:42:45.250392 4799 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"fe197396c5be2d0959a18226c3a887cbfc22f79ce601687c7a015173f2073961"} pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 10 17:42:45 crc kubenswrapper[4799]: I1010 17:42:45.250463 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" containerName="machine-config-daemon" containerID="cri-o://fe197396c5be2d0959a18226c3a887cbfc22f79ce601687c7a015173f2073961" gracePeriod=600 Oct 10 17:42:45 crc kubenswrapper[4799]: E1010 17:42:45.381668 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 17:42:46 crc kubenswrapper[4799]: I1010 17:42:46.103020 4799 generic.go:334] "Generic (PLEG): container finished" podID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" containerID="fe197396c5be2d0959a18226c3a887cbfc22f79ce601687c7a015173f2073961" exitCode=0 Oct 10 17:42:46 crc kubenswrapper[4799]: I1010 17:42:46.103087 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" event={"ID":"6cebefda-e31d-4be2-9bf4-8e1f8ec002cb","Type":"ContainerDied","Data":"fe197396c5be2d0959a18226c3a887cbfc22f79ce601687c7a015173f2073961"} Oct 10 17:42:46 crc kubenswrapper[4799]: I1010 17:42:46.103132 4799 scope.go:117] "RemoveContainer" containerID="b03f64a0a26d5fb94dfb90dcd1184b37fd9810e09a8440f1ed7eeed522926253" Oct 10 17:42:46 crc kubenswrapper[4799]: I1010 17:42:46.104546 4799 scope.go:117] "RemoveContainer" containerID="fe197396c5be2d0959a18226c3a887cbfc22f79ce601687c7a015173f2073961" Oct 10 17:42:46 crc kubenswrapper[4799]: E1010 17:42:46.104827 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 17:42:58 crc kubenswrapper[4799]: I1010 17:42:58.403442 4799 scope.go:117] "RemoveContainer" containerID="fe197396c5be2d0959a18226c3a887cbfc22f79ce601687c7a015173f2073961" Oct 10 17:42:58 crc kubenswrapper[4799]: E1010 17:42:58.404451 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 17:43:13 crc kubenswrapper[4799]: I1010 17:43:13.403338 4799 scope.go:117] "RemoveContainer" containerID="fe197396c5be2d0959a18226c3a887cbfc22f79ce601687c7a015173f2073961" Oct 10 17:43:13 crc kubenswrapper[4799]: E1010 17:43:13.407601 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 17:43:28 crc kubenswrapper[4799]: I1010 17:43:28.403887 4799 scope.go:117] "RemoveContainer" containerID="fe197396c5be2d0959a18226c3a887cbfc22f79ce601687c7a015173f2073961" Oct 10 17:43:28 crc kubenswrapper[4799]: E1010 17:43:28.405143 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 17:43:40 crc kubenswrapper[4799]: I1010 17:43:40.403426 4799 scope.go:117] "RemoveContainer" containerID="fe197396c5be2d0959a18226c3a887cbfc22f79ce601687c7a015173f2073961" Oct 10 17:43:40 crc kubenswrapper[4799]: E1010 17:43:40.404179 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 17:43:53 crc kubenswrapper[4799]: I1010 17:43:53.403539 4799 scope.go:117] "RemoveContainer" containerID="fe197396c5be2d0959a18226c3a887cbfc22f79ce601687c7a015173f2073961" Oct 10 17:43:53 crc kubenswrapper[4799]: E1010 17:43:53.404452 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 17:44:01 crc kubenswrapper[4799]: I1010 17:44:01.851042 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-n8lwb"] Oct 10 17:44:01 crc kubenswrapper[4799]: E1010 17:44:01.852145 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ba7c6429-4930-4aaa-b0f2-3d7b87e373fc" containerName="registry-server" Oct 10 17:44:01 crc kubenswrapper[4799]: I1010 17:44:01.852168 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="ba7c6429-4930-4aaa-b0f2-3d7b87e373fc" containerName="registry-server" Oct 10 17:44:01 crc kubenswrapper[4799]: E1010 17:44:01.852189 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ba7c6429-4930-4aaa-b0f2-3d7b87e373fc" containerName="extract-content" Oct 10 17:44:01 crc kubenswrapper[4799]: I1010 17:44:01.852202 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="ba7c6429-4930-4aaa-b0f2-3d7b87e373fc" containerName="extract-content" Oct 10 17:44:01 crc kubenswrapper[4799]: E1010 17:44:01.852222 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ba7c6429-4930-4aaa-b0f2-3d7b87e373fc" containerName="extract-utilities" Oct 10 17:44:01 crc kubenswrapper[4799]: I1010 17:44:01.852237 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="ba7c6429-4930-4aaa-b0f2-3d7b87e373fc" containerName="extract-utilities" Oct 10 17:44:01 crc kubenswrapper[4799]: I1010 17:44:01.852562 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="ba7c6429-4930-4aaa-b0f2-3d7b87e373fc" containerName="registry-server" Oct 10 17:44:01 crc kubenswrapper[4799]: I1010 17:44:01.858555 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-n8lwb" Oct 10 17:44:01 crc kubenswrapper[4799]: I1010 17:44:01.861068 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-n8lwb"] Oct 10 17:44:01 crc kubenswrapper[4799]: I1010 17:44:01.957334 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cfcbc783-8f9f-4b08-8b37-401dc4df8880-catalog-content\") pod \"redhat-marketplace-n8lwb\" (UID: \"cfcbc783-8f9f-4b08-8b37-401dc4df8880\") " pod="openshift-marketplace/redhat-marketplace-n8lwb" Oct 10 17:44:01 crc kubenswrapper[4799]: I1010 17:44:01.957492 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d9gnx\" (UniqueName: \"kubernetes.io/projected/cfcbc783-8f9f-4b08-8b37-401dc4df8880-kube-api-access-d9gnx\") pod \"redhat-marketplace-n8lwb\" (UID: \"cfcbc783-8f9f-4b08-8b37-401dc4df8880\") " pod="openshift-marketplace/redhat-marketplace-n8lwb" Oct 10 17:44:01 crc kubenswrapper[4799]: I1010 17:44:01.957634 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cfcbc783-8f9f-4b08-8b37-401dc4df8880-utilities\") pod \"redhat-marketplace-n8lwb\" (UID: \"cfcbc783-8f9f-4b08-8b37-401dc4df8880\") " pod="openshift-marketplace/redhat-marketplace-n8lwb" Oct 10 17:44:02 crc kubenswrapper[4799]: I1010 17:44:02.058934 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cfcbc783-8f9f-4b08-8b37-401dc4df8880-catalog-content\") pod \"redhat-marketplace-n8lwb\" (UID: \"cfcbc783-8f9f-4b08-8b37-401dc4df8880\") " pod="openshift-marketplace/redhat-marketplace-n8lwb" Oct 10 17:44:02 crc kubenswrapper[4799]: I1010 17:44:02.059028 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d9gnx\" (UniqueName: \"kubernetes.io/projected/cfcbc783-8f9f-4b08-8b37-401dc4df8880-kube-api-access-d9gnx\") pod \"redhat-marketplace-n8lwb\" (UID: \"cfcbc783-8f9f-4b08-8b37-401dc4df8880\") " pod="openshift-marketplace/redhat-marketplace-n8lwb" Oct 10 17:44:02 crc kubenswrapper[4799]: I1010 17:44:02.059080 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cfcbc783-8f9f-4b08-8b37-401dc4df8880-utilities\") pod \"redhat-marketplace-n8lwb\" (UID: \"cfcbc783-8f9f-4b08-8b37-401dc4df8880\") " pod="openshift-marketplace/redhat-marketplace-n8lwb" Oct 10 17:44:02 crc kubenswrapper[4799]: I1010 17:44:02.059601 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cfcbc783-8f9f-4b08-8b37-401dc4df8880-catalog-content\") pod \"redhat-marketplace-n8lwb\" (UID: \"cfcbc783-8f9f-4b08-8b37-401dc4df8880\") " pod="openshift-marketplace/redhat-marketplace-n8lwb" Oct 10 17:44:02 crc kubenswrapper[4799]: I1010 17:44:02.059617 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cfcbc783-8f9f-4b08-8b37-401dc4df8880-utilities\") pod \"redhat-marketplace-n8lwb\" (UID: \"cfcbc783-8f9f-4b08-8b37-401dc4df8880\") " pod="openshift-marketplace/redhat-marketplace-n8lwb" Oct 10 17:44:02 crc kubenswrapper[4799]: I1010 17:44:02.091328 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d9gnx\" (UniqueName: \"kubernetes.io/projected/cfcbc783-8f9f-4b08-8b37-401dc4df8880-kube-api-access-d9gnx\") pod \"redhat-marketplace-n8lwb\" (UID: \"cfcbc783-8f9f-4b08-8b37-401dc4df8880\") " pod="openshift-marketplace/redhat-marketplace-n8lwb" Oct 10 17:44:02 crc kubenswrapper[4799]: I1010 17:44:02.188445 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-n8lwb" Oct 10 17:44:02 crc kubenswrapper[4799]: I1010 17:44:02.699709 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-n8lwb"] Oct 10 17:44:02 crc kubenswrapper[4799]: I1010 17:44:02.902332 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-n8lwb" event={"ID":"cfcbc783-8f9f-4b08-8b37-401dc4df8880","Type":"ContainerStarted","Data":"805f54ee046fc7322be64da27cd4e00ea044ae05a1e270bc375e46d8d890a28e"} Oct 10 17:44:03 crc kubenswrapper[4799]: I1010 17:44:03.911505 4799 generic.go:334] "Generic (PLEG): container finished" podID="cfcbc783-8f9f-4b08-8b37-401dc4df8880" containerID="4bc7ab263e07a55b58ba62181b232634f9c30d6e661f0fcf368e60a38e400417" exitCode=0 Oct 10 17:44:03 crc kubenswrapper[4799]: I1010 17:44:03.911600 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-n8lwb" event={"ID":"cfcbc783-8f9f-4b08-8b37-401dc4df8880","Type":"ContainerDied","Data":"4bc7ab263e07a55b58ba62181b232634f9c30d6e661f0fcf368e60a38e400417"} Oct 10 17:44:03 crc kubenswrapper[4799]: I1010 17:44:03.913581 4799 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 10 17:44:04 crc kubenswrapper[4799]: I1010 17:44:04.924818 4799 generic.go:334] "Generic (PLEG): container finished" podID="cfcbc783-8f9f-4b08-8b37-401dc4df8880" containerID="c937b379420fd8ccc01953d254b09fd07f684868e7636a55ec9f348349d75f1b" exitCode=0 Oct 10 17:44:04 crc kubenswrapper[4799]: I1010 17:44:04.924900 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-n8lwb" event={"ID":"cfcbc783-8f9f-4b08-8b37-401dc4df8880","Type":"ContainerDied","Data":"c937b379420fd8ccc01953d254b09fd07f684868e7636a55ec9f348349d75f1b"} Oct 10 17:44:05 crc kubenswrapper[4799]: I1010 17:44:05.936318 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-n8lwb" event={"ID":"cfcbc783-8f9f-4b08-8b37-401dc4df8880","Type":"ContainerStarted","Data":"c7061fba0a7fdc40cbd1a42d404bc208ba05c223c3642be950b44ce1826a5724"} Oct 10 17:44:05 crc kubenswrapper[4799]: I1010 17:44:05.971661 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-n8lwb" podStartSLOduration=3.410568846 podStartE2EDuration="4.971629657s" podCreationTimestamp="2025-10-10 17:44:01 +0000 UTC" firstStartedPulling="2025-10-10 17:44:03.9132873 +0000 UTC m=+4337.421611425" lastFinishedPulling="2025-10-10 17:44:05.474348121 +0000 UTC m=+4338.982672236" observedRunningTime="2025-10-10 17:44:05.960203618 +0000 UTC m=+4339.468527753" watchObservedRunningTime="2025-10-10 17:44:05.971629657 +0000 UTC m=+4339.479953812" Oct 10 17:44:07 crc kubenswrapper[4799]: I1010 17:44:07.408257 4799 scope.go:117] "RemoveContainer" containerID="fe197396c5be2d0959a18226c3a887cbfc22f79ce601687c7a015173f2073961" Oct 10 17:44:07 crc kubenswrapper[4799]: E1010 17:44:07.409993 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 17:44:12 crc kubenswrapper[4799]: I1010 17:44:12.188629 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-n8lwb" Oct 10 17:44:12 crc kubenswrapper[4799]: I1010 17:44:12.188986 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-n8lwb" Oct 10 17:44:12 crc kubenswrapper[4799]: I1010 17:44:12.254393 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-n8lwb" Oct 10 17:44:13 crc kubenswrapper[4799]: I1010 17:44:13.067099 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-n8lwb" Oct 10 17:44:13 crc kubenswrapper[4799]: I1010 17:44:13.133603 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-n8lwb"] Oct 10 17:44:15 crc kubenswrapper[4799]: I1010 17:44:15.015221 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-n8lwb" podUID="cfcbc783-8f9f-4b08-8b37-401dc4df8880" containerName="registry-server" containerID="cri-o://c7061fba0a7fdc40cbd1a42d404bc208ba05c223c3642be950b44ce1826a5724" gracePeriod=2 Oct 10 17:44:15 crc kubenswrapper[4799]: I1010 17:44:15.484718 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-n8lwb" Oct 10 17:44:15 crc kubenswrapper[4799]: I1010 17:44:15.594444 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cfcbc783-8f9f-4b08-8b37-401dc4df8880-catalog-content\") pod \"cfcbc783-8f9f-4b08-8b37-401dc4df8880\" (UID: \"cfcbc783-8f9f-4b08-8b37-401dc4df8880\") " Oct 10 17:44:15 crc kubenswrapper[4799]: I1010 17:44:15.594599 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d9gnx\" (UniqueName: \"kubernetes.io/projected/cfcbc783-8f9f-4b08-8b37-401dc4df8880-kube-api-access-d9gnx\") pod \"cfcbc783-8f9f-4b08-8b37-401dc4df8880\" (UID: \"cfcbc783-8f9f-4b08-8b37-401dc4df8880\") " Oct 10 17:44:15 crc kubenswrapper[4799]: I1010 17:44:15.594687 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cfcbc783-8f9f-4b08-8b37-401dc4df8880-utilities\") pod \"cfcbc783-8f9f-4b08-8b37-401dc4df8880\" (UID: \"cfcbc783-8f9f-4b08-8b37-401dc4df8880\") " Oct 10 17:44:15 crc kubenswrapper[4799]: I1010 17:44:15.596783 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cfcbc783-8f9f-4b08-8b37-401dc4df8880-utilities" (OuterVolumeSpecName: "utilities") pod "cfcbc783-8f9f-4b08-8b37-401dc4df8880" (UID: "cfcbc783-8f9f-4b08-8b37-401dc4df8880"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 10 17:44:15 crc kubenswrapper[4799]: I1010 17:44:15.602084 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cfcbc783-8f9f-4b08-8b37-401dc4df8880-kube-api-access-d9gnx" (OuterVolumeSpecName: "kube-api-access-d9gnx") pod "cfcbc783-8f9f-4b08-8b37-401dc4df8880" (UID: "cfcbc783-8f9f-4b08-8b37-401dc4df8880"). InnerVolumeSpecName "kube-api-access-d9gnx". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 17:44:15 crc kubenswrapper[4799]: I1010 17:44:15.620430 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cfcbc783-8f9f-4b08-8b37-401dc4df8880-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "cfcbc783-8f9f-4b08-8b37-401dc4df8880" (UID: "cfcbc783-8f9f-4b08-8b37-401dc4df8880"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 10 17:44:15 crc kubenswrapper[4799]: I1010 17:44:15.699387 4799 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cfcbc783-8f9f-4b08-8b37-401dc4df8880-utilities\") on node \"crc\" DevicePath \"\"" Oct 10 17:44:15 crc kubenswrapper[4799]: I1010 17:44:15.699502 4799 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cfcbc783-8f9f-4b08-8b37-401dc4df8880-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 10 17:44:15 crc kubenswrapper[4799]: I1010 17:44:15.699533 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d9gnx\" (UniqueName: \"kubernetes.io/projected/cfcbc783-8f9f-4b08-8b37-401dc4df8880-kube-api-access-d9gnx\") on node \"crc\" DevicePath \"\"" Oct 10 17:44:16 crc kubenswrapper[4799]: I1010 17:44:16.023410 4799 generic.go:334] "Generic (PLEG): container finished" podID="cfcbc783-8f9f-4b08-8b37-401dc4df8880" containerID="c7061fba0a7fdc40cbd1a42d404bc208ba05c223c3642be950b44ce1826a5724" exitCode=0 Oct 10 17:44:16 crc kubenswrapper[4799]: I1010 17:44:16.023474 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-n8lwb" event={"ID":"cfcbc783-8f9f-4b08-8b37-401dc4df8880","Type":"ContainerDied","Data":"c7061fba0a7fdc40cbd1a42d404bc208ba05c223c3642be950b44ce1826a5724"} Oct 10 17:44:16 crc kubenswrapper[4799]: I1010 17:44:16.023540 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-n8lwb" event={"ID":"cfcbc783-8f9f-4b08-8b37-401dc4df8880","Type":"ContainerDied","Data":"805f54ee046fc7322be64da27cd4e00ea044ae05a1e270bc375e46d8d890a28e"} Oct 10 17:44:16 crc kubenswrapper[4799]: I1010 17:44:16.023564 4799 scope.go:117] "RemoveContainer" containerID="c7061fba0a7fdc40cbd1a42d404bc208ba05c223c3642be950b44ce1826a5724" Oct 10 17:44:16 crc kubenswrapper[4799]: I1010 17:44:16.026807 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-n8lwb" Oct 10 17:44:16 crc kubenswrapper[4799]: I1010 17:44:16.047957 4799 scope.go:117] "RemoveContainer" containerID="c937b379420fd8ccc01953d254b09fd07f684868e7636a55ec9f348349d75f1b" Oct 10 17:44:16 crc kubenswrapper[4799]: I1010 17:44:16.079696 4799 scope.go:117] "RemoveContainer" containerID="4bc7ab263e07a55b58ba62181b232634f9c30d6e661f0fcf368e60a38e400417" Oct 10 17:44:16 crc kubenswrapper[4799]: I1010 17:44:16.085710 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-n8lwb"] Oct 10 17:44:16 crc kubenswrapper[4799]: I1010 17:44:16.093414 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-n8lwb"] Oct 10 17:44:16 crc kubenswrapper[4799]: I1010 17:44:16.119699 4799 scope.go:117] "RemoveContainer" containerID="c7061fba0a7fdc40cbd1a42d404bc208ba05c223c3642be950b44ce1826a5724" Oct 10 17:44:16 crc kubenswrapper[4799]: E1010 17:44:16.120357 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c7061fba0a7fdc40cbd1a42d404bc208ba05c223c3642be950b44ce1826a5724\": container with ID starting with c7061fba0a7fdc40cbd1a42d404bc208ba05c223c3642be950b44ce1826a5724 not found: ID does not exist" containerID="c7061fba0a7fdc40cbd1a42d404bc208ba05c223c3642be950b44ce1826a5724" Oct 10 17:44:16 crc kubenswrapper[4799]: I1010 17:44:16.120461 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c7061fba0a7fdc40cbd1a42d404bc208ba05c223c3642be950b44ce1826a5724"} err="failed to get container status \"c7061fba0a7fdc40cbd1a42d404bc208ba05c223c3642be950b44ce1826a5724\": rpc error: code = NotFound desc = could not find container \"c7061fba0a7fdc40cbd1a42d404bc208ba05c223c3642be950b44ce1826a5724\": container with ID starting with c7061fba0a7fdc40cbd1a42d404bc208ba05c223c3642be950b44ce1826a5724 not found: ID does not exist" Oct 10 17:44:16 crc kubenswrapper[4799]: I1010 17:44:16.120551 4799 scope.go:117] "RemoveContainer" containerID="c937b379420fd8ccc01953d254b09fd07f684868e7636a55ec9f348349d75f1b" Oct 10 17:44:16 crc kubenswrapper[4799]: E1010 17:44:16.121001 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c937b379420fd8ccc01953d254b09fd07f684868e7636a55ec9f348349d75f1b\": container with ID starting with c937b379420fd8ccc01953d254b09fd07f684868e7636a55ec9f348349d75f1b not found: ID does not exist" containerID="c937b379420fd8ccc01953d254b09fd07f684868e7636a55ec9f348349d75f1b" Oct 10 17:44:16 crc kubenswrapper[4799]: I1010 17:44:16.121045 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c937b379420fd8ccc01953d254b09fd07f684868e7636a55ec9f348349d75f1b"} err="failed to get container status \"c937b379420fd8ccc01953d254b09fd07f684868e7636a55ec9f348349d75f1b\": rpc error: code = NotFound desc = could not find container \"c937b379420fd8ccc01953d254b09fd07f684868e7636a55ec9f348349d75f1b\": container with ID starting with c937b379420fd8ccc01953d254b09fd07f684868e7636a55ec9f348349d75f1b not found: ID does not exist" Oct 10 17:44:16 crc kubenswrapper[4799]: I1010 17:44:16.121075 4799 scope.go:117] "RemoveContainer" containerID="4bc7ab263e07a55b58ba62181b232634f9c30d6e661f0fcf368e60a38e400417" Oct 10 17:44:16 crc kubenswrapper[4799]: E1010 17:44:16.121439 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4bc7ab263e07a55b58ba62181b232634f9c30d6e661f0fcf368e60a38e400417\": container with ID starting with 4bc7ab263e07a55b58ba62181b232634f9c30d6e661f0fcf368e60a38e400417 not found: ID does not exist" containerID="4bc7ab263e07a55b58ba62181b232634f9c30d6e661f0fcf368e60a38e400417" Oct 10 17:44:16 crc kubenswrapper[4799]: I1010 17:44:16.121556 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4bc7ab263e07a55b58ba62181b232634f9c30d6e661f0fcf368e60a38e400417"} err="failed to get container status \"4bc7ab263e07a55b58ba62181b232634f9c30d6e661f0fcf368e60a38e400417\": rpc error: code = NotFound desc = could not find container \"4bc7ab263e07a55b58ba62181b232634f9c30d6e661f0fcf368e60a38e400417\": container with ID starting with 4bc7ab263e07a55b58ba62181b232634f9c30d6e661f0fcf368e60a38e400417 not found: ID does not exist" Oct 10 17:44:17 crc kubenswrapper[4799]: I1010 17:44:17.421327 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cfcbc783-8f9f-4b08-8b37-401dc4df8880" path="/var/lib/kubelet/pods/cfcbc783-8f9f-4b08-8b37-401dc4df8880/volumes" Oct 10 17:44:20 crc kubenswrapper[4799]: I1010 17:44:20.402587 4799 scope.go:117] "RemoveContainer" containerID="fe197396c5be2d0959a18226c3a887cbfc22f79ce601687c7a015173f2073961" Oct 10 17:44:20 crc kubenswrapper[4799]: E1010 17:44:20.403204 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 17:44:32 crc kubenswrapper[4799]: I1010 17:44:32.402642 4799 scope.go:117] "RemoveContainer" containerID="fe197396c5be2d0959a18226c3a887cbfc22f79ce601687c7a015173f2073961" Oct 10 17:44:32 crc kubenswrapper[4799]: E1010 17:44:32.403578 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 17:44:45 crc kubenswrapper[4799]: I1010 17:44:45.402614 4799 scope.go:117] "RemoveContainer" containerID="fe197396c5be2d0959a18226c3a887cbfc22f79ce601687c7a015173f2073961" Oct 10 17:44:45 crc kubenswrapper[4799]: E1010 17:44:45.403484 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 17:44:58 crc kubenswrapper[4799]: I1010 17:44:58.403117 4799 scope.go:117] "RemoveContainer" containerID="fe197396c5be2d0959a18226c3a887cbfc22f79ce601687c7a015173f2073961" Oct 10 17:44:58 crc kubenswrapper[4799]: E1010 17:44:58.405716 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 17:45:00 crc kubenswrapper[4799]: I1010 17:45:00.166084 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29335305-gm8nb"] Oct 10 17:45:00 crc kubenswrapper[4799]: E1010 17:45:00.167859 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cfcbc783-8f9f-4b08-8b37-401dc4df8880" containerName="extract-utilities" Oct 10 17:45:00 crc kubenswrapper[4799]: I1010 17:45:00.168041 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="cfcbc783-8f9f-4b08-8b37-401dc4df8880" containerName="extract-utilities" Oct 10 17:45:00 crc kubenswrapper[4799]: E1010 17:45:00.168378 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cfcbc783-8f9f-4b08-8b37-401dc4df8880" containerName="registry-server" Oct 10 17:45:00 crc kubenswrapper[4799]: I1010 17:45:00.168529 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="cfcbc783-8f9f-4b08-8b37-401dc4df8880" containerName="registry-server" Oct 10 17:45:00 crc kubenswrapper[4799]: E1010 17:45:00.168683 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cfcbc783-8f9f-4b08-8b37-401dc4df8880" containerName="extract-content" Oct 10 17:45:00 crc kubenswrapper[4799]: I1010 17:45:00.168836 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="cfcbc783-8f9f-4b08-8b37-401dc4df8880" containerName="extract-content" Oct 10 17:45:00 crc kubenswrapper[4799]: I1010 17:45:00.169217 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="cfcbc783-8f9f-4b08-8b37-401dc4df8880" containerName="registry-server" Oct 10 17:45:00 crc kubenswrapper[4799]: I1010 17:45:00.170210 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29335305-gm8nb" Oct 10 17:45:00 crc kubenswrapper[4799]: I1010 17:45:00.173923 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Oct 10 17:45:00 crc kubenswrapper[4799]: I1010 17:45:00.174675 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Oct 10 17:45:00 crc kubenswrapper[4799]: I1010 17:45:00.186096 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29335305-gm8nb"] Oct 10 17:45:00 crc kubenswrapper[4799]: I1010 17:45:00.340285 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tptd7\" (UniqueName: \"kubernetes.io/projected/36939816-2c24-423d-8361-9471625ae3f5-kube-api-access-tptd7\") pod \"collect-profiles-29335305-gm8nb\" (UID: \"36939816-2c24-423d-8361-9471625ae3f5\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29335305-gm8nb" Oct 10 17:45:00 crc kubenswrapper[4799]: I1010 17:45:00.340689 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/36939816-2c24-423d-8361-9471625ae3f5-config-volume\") pod \"collect-profiles-29335305-gm8nb\" (UID: \"36939816-2c24-423d-8361-9471625ae3f5\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29335305-gm8nb" Oct 10 17:45:00 crc kubenswrapper[4799]: I1010 17:45:00.340733 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/36939816-2c24-423d-8361-9471625ae3f5-secret-volume\") pod \"collect-profiles-29335305-gm8nb\" (UID: \"36939816-2c24-423d-8361-9471625ae3f5\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29335305-gm8nb" Oct 10 17:45:00 crc kubenswrapper[4799]: I1010 17:45:00.442429 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/36939816-2c24-423d-8361-9471625ae3f5-config-volume\") pod \"collect-profiles-29335305-gm8nb\" (UID: \"36939816-2c24-423d-8361-9471625ae3f5\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29335305-gm8nb" Oct 10 17:45:00 crc kubenswrapper[4799]: I1010 17:45:00.442487 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/36939816-2c24-423d-8361-9471625ae3f5-secret-volume\") pod \"collect-profiles-29335305-gm8nb\" (UID: \"36939816-2c24-423d-8361-9471625ae3f5\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29335305-gm8nb" Oct 10 17:45:00 crc kubenswrapper[4799]: I1010 17:45:00.442567 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tptd7\" (UniqueName: \"kubernetes.io/projected/36939816-2c24-423d-8361-9471625ae3f5-kube-api-access-tptd7\") pod \"collect-profiles-29335305-gm8nb\" (UID: \"36939816-2c24-423d-8361-9471625ae3f5\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29335305-gm8nb" Oct 10 17:45:00 crc kubenswrapper[4799]: I1010 17:45:00.443492 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/36939816-2c24-423d-8361-9471625ae3f5-config-volume\") pod \"collect-profiles-29335305-gm8nb\" (UID: \"36939816-2c24-423d-8361-9471625ae3f5\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29335305-gm8nb" Oct 10 17:45:00 crc kubenswrapper[4799]: I1010 17:45:00.448578 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/36939816-2c24-423d-8361-9471625ae3f5-secret-volume\") pod \"collect-profiles-29335305-gm8nb\" (UID: \"36939816-2c24-423d-8361-9471625ae3f5\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29335305-gm8nb" Oct 10 17:45:00 crc kubenswrapper[4799]: I1010 17:45:00.462560 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tptd7\" (UniqueName: \"kubernetes.io/projected/36939816-2c24-423d-8361-9471625ae3f5-kube-api-access-tptd7\") pod \"collect-profiles-29335305-gm8nb\" (UID: \"36939816-2c24-423d-8361-9471625ae3f5\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29335305-gm8nb" Oct 10 17:45:00 crc kubenswrapper[4799]: I1010 17:45:00.518664 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29335305-gm8nb" Oct 10 17:45:01 crc kubenswrapper[4799]: I1010 17:45:01.018283 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29335305-gm8nb"] Oct 10 17:45:01 crc kubenswrapper[4799]: W1010 17:45:01.022273 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod36939816_2c24_423d_8361_9471625ae3f5.slice/crio-ddb5d38e8206f204c6f62acaa74ccb0de24f7a08a0729b1d3aad1a549a97c4cd WatchSource:0}: Error finding container ddb5d38e8206f204c6f62acaa74ccb0de24f7a08a0729b1d3aad1a549a97c4cd: Status 404 returned error can't find the container with id ddb5d38e8206f204c6f62acaa74ccb0de24f7a08a0729b1d3aad1a549a97c4cd Oct 10 17:45:01 crc kubenswrapper[4799]: I1010 17:45:01.488615 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29335305-gm8nb" event={"ID":"36939816-2c24-423d-8361-9471625ae3f5","Type":"ContainerStarted","Data":"9668bfa07bd433551238d3440b7abcfac0d8f45d2a70d7bf7d07b2d2ca20effe"} Oct 10 17:45:01 crc kubenswrapper[4799]: I1010 17:45:01.488665 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29335305-gm8nb" event={"ID":"36939816-2c24-423d-8361-9471625ae3f5","Type":"ContainerStarted","Data":"ddb5d38e8206f204c6f62acaa74ccb0de24f7a08a0729b1d3aad1a549a97c4cd"} Oct 10 17:45:01 crc kubenswrapper[4799]: I1010 17:45:01.514043 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29335305-gm8nb" podStartSLOduration=1.514018431 podStartE2EDuration="1.514018431s" podCreationTimestamp="2025-10-10 17:45:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-10 17:45:01.505174984 +0000 UTC m=+4395.013499189" watchObservedRunningTime="2025-10-10 17:45:01.514018431 +0000 UTC m=+4395.022342576" Oct 10 17:45:02 crc kubenswrapper[4799]: I1010 17:45:02.524434 4799 generic.go:334] "Generic (PLEG): container finished" podID="36939816-2c24-423d-8361-9471625ae3f5" containerID="9668bfa07bd433551238d3440b7abcfac0d8f45d2a70d7bf7d07b2d2ca20effe" exitCode=0 Oct 10 17:45:02 crc kubenswrapper[4799]: I1010 17:45:02.524509 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29335305-gm8nb" event={"ID":"36939816-2c24-423d-8361-9471625ae3f5","Type":"ContainerDied","Data":"9668bfa07bd433551238d3440b7abcfac0d8f45d2a70d7bf7d07b2d2ca20effe"} Oct 10 17:45:03 crc kubenswrapper[4799]: I1010 17:45:03.822824 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29335305-gm8nb" Oct 10 17:45:04 crc kubenswrapper[4799]: I1010 17:45:04.000361 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/36939816-2c24-423d-8361-9471625ae3f5-config-volume\") pod \"36939816-2c24-423d-8361-9471625ae3f5\" (UID: \"36939816-2c24-423d-8361-9471625ae3f5\") " Oct 10 17:45:04 crc kubenswrapper[4799]: I1010 17:45:04.000453 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tptd7\" (UniqueName: \"kubernetes.io/projected/36939816-2c24-423d-8361-9471625ae3f5-kube-api-access-tptd7\") pod \"36939816-2c24-423d-8361-9471625ae3f5\" (UID: \"36939816-2c24-423d-8361-9471625ae3f5\") " Oct 10 17:45:04 crc kubenswrapper[4799]: I1010 17:45:04.000515 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/36939816-2c24-423d-8361-9471625ae3f5-secret-volume\") pod \"36939816-2c24-423d-8361-9471625ae3f5\" (UID: \"36939816-2c24-423d-8361-9471625ae3f5\") " Oct 10 17:45:04 crc kubenswrapper[4799]: I1010 17:45:04.001110 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/36939816-2c24-423d-8361-9471625ae3f5-config-volume" (OuterVolumeSpecName: "config-volume") pod "36939816-2c24-423d-8361-9471625ae3f5" (UID: "36939816-2c24-423d-8361-9471625ae3f5"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 17:45:04 crc kubenswrapper[4799]: I1010 17:45:04.002050 4799 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/36939816-2c24-423d-8361-9471625ae3f5-config-volume\") on node \"crc\" DevicePath \"\"" Oct 10 17:45:04 crc kubenswrapper[4799]: I1010 17:45:04.011073 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/36939816-2c24-423d-8361-9471625ae3f5-kube-api-access-tptd7" (OuterVolumeSpecName: "kube-api-access-tptd7") pod "36939816-2c24-423d-8361-9471625ae3f5" (UID: "36939816-2c24-423d-8361-9471625ae3f5"). InnerVolumeSpecName "kube-api-access-tptd7". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 17:45:04 crc kubenswrapper[4799]: I1010 17:45:04.012973 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/36939816-2c24-423d-8361-9471625ae3f5-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "36939816-2c24-423d-8361-9471625ae3f5" (UID: "36939816-2c24-423d-8361-9471625ae3f5"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 17:45:04 crc kubenswrapper[4799]: I1010 17:45:04.103813 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tptd7\" (UniqueName: \"kubernetes.io/projected/36939816-2c24-423d-8361-9471625ae3f5-kube-api-access-tptd7\") on node \"crc\" DevicePath \"\"" Oct 10 17:45:04 crc kubenswrapper[4799]: I1010 17:45:04.103870 4799 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/36939816-2c24-423d-8361-9471625ae3f5-secret-volume\") on node \"crc\" DevicePath \"\"" Oct 10 17:45:04 crc kubenswrapper[4799]: I1010 17:45:04.552506 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29335305-gm8nb" event={"ID":"36939816-2c24-423d-8361-9471625ae3f5","Type":"ContainerDied","Data":"ddb5d38e8206f204c6f62acaa74ccb0de24f7a08a0729b1d3aad1a549a97c4cd"} Oct 10 17:45:04 crc kubenswrapper[4799]: I1010 17:45:04.552570 4799 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ddb5d38e8206f204c6f62acaa74ccb0de24f7a08a0729b1d3aad1a549a97c4cd" Oct 10 17:45:04 crc kubenswrapper[4799]: I1010 17:45:04.552581 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29335305-gm8nb" Oct 10 17:45:04 crc kubenswrapper[4799]: I1010 17:45:04.601013 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29335260-bpk9t"] Oct 10 17:45:04 crc kubenswrapper[4799]: I1010 17:45:04.608687 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29335260-bpk9t"] Oct 10 17:45:05 crc kubenswrapper[4799]: I1010 17:45:05.418011 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0c7420b8-4eaa-4820-bf6f-a7fce3d2cfca" path="/var/lib/kubelet/pods/0c7420b8-4eaa-4820-bf6f-a7fce3d2cfca/volumes" Oct 10 17:45:06 crc kubenswrapper[4799]: I1010 17:45:06.107239 4799 scope.go:117] "RemoveContainer" containerID="a39d6697438fc86ddd6c1ef500bcdc5df4792167e65c6301313efcfb45597a19" Oct 10 17:45:11 crc kubenswrapper[4799]: I1010 17:45:11.402472 4799 scope.go:117] "RemoveContainer" containerID="fe197396c5be2d0959a18226c3a887cbfc22f79ce601687c7a015173f2073961" Oct 10 17:45:11 crc kubenswrapper[4799]: E1010 17:45:11.403321 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 17:45:23 crc kubenswrapper[4799]: I1010 17:45:23.403682 4799 scope.go:117] "RemoveContainer" containerID="fe197396c5be2d0959a18226c3a887cbfc22f79ce601687c7a015173f2073961" Oct 10 17:45:23 crc kubenswrapper[4799]: E1010 17:45:23.404654 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 17:45:35 crc kubenswrapper[4799]: I1010 17:45:35.403724 4799 scope.go:117] "RemoveContainer" containerID="fe197396c5be2d0959a18226c3a887cbfc22f79ce601687c7a015173f2073961" Oct 10 17:45:35 crc kubenswrapper[4799]: E1010 17:45:35.406443 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 17:45:47 crc kubenswrapper[4799]: I1010 17:45:47.410145 4799 scope.go:117] "RemoveContainer" containerID="fe197396c5be2d0959a18226c3a887cbfc22f79ce601687c7a015173f2073961" Oct 10 17:45:47 crc kubenswrapper[4799]: E1010 17:45:47.410896 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 17:46:02 crc kubenswrapper[4799]: I1010 17:46:02.402323 4799 scope.go:117] "RemoveContainer" containerID="fe197396c5be2d0959a18226c3a887cbfc22f79ce601687c7a015173f2073961" Oct 10 17:46:02 crc kubenswrapper[4799]: E1010 17:46:02.402871 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 17:46:07 crc kubenswrapper[4799]: I1010 17:46:07.201185 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["crc-storage/crc-storage-crc-sj59m"] Oct 10 17:46:07 crc kubenswrapper[4799]: I1010 17:46:07.208385 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["crc-storage/crc-storage-crc-sj59m"] Oct 10 17:46:07 crc kubenswrapper[4799]: I1010 17:46:07.386539 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["crc-storage/crc-storage-crc-pmlpf"] Oct 10 17:46:07 crc kubenswrapper[4799]: E1010 17:46:07.387109 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="36939816-2c24-423d-8361-9471625ae3f5" containerName="collect-profiles" Oct 10 17:46:07 crc kubenswrapper[4799]: I1010 17:46:07.387140 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="36939816-2c24-423d-8361-9471625ae3f5" containerName="collect-profiles" Oct 10 17:46:07 crc kubenswrapper[4799]: I1010 17:46:07.387417 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="36939816-2c24-423d-8361-9471625ae3f5" containerName="collect-profiles" Oct 10 17:46:07 crc kubenswrapper[4799]: I1010 17:46:07.388397 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-pmlpf" Oct 10 17:46:07 crc kubenswrapper[4799]: I1010 17:46:07.391082 4799 reflector.go:368] Caches populated for *v1.Secret from object-"crc-storage"/"crc-storage-dockercfg-qqh4q" Oct 10 17:46:07 crc kubenswrapper[4799]: I1010 17:46:07.391328 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"kube-root-ca.crt" Oct 10 17:46:07 crc kubenswrapper[4799]: I1010 17:46:07.391501 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"openshift-service-ca.crt" Oct 10 17:46:07 crc kubenswrapper[4799]: I1010 17:46:07.393745 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"crc-storage" Oct 10 17:46:07 crc kubenswrapper[4799]: I1010 17:46:07.394836 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["crc-storage/crc-storage-crc-pmlpf"] Oct 10 17:46:07 crc kubenswrapper[4799]: I1010 17:46:07.425882 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f2688c5c-df78-4127-9133-4221ea43363d" path="/var/lib/kubelet/pods/f2688c5c-df78-4127-9133-4221ea43363d/volumes" Oct 10 17:46:07 crc kubenswrapper[4799]: I1010 17:46:07.568276 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/14c679cb-5c28-4470-8123-fff3db0aa1ed-crc-storage\") pod \"crc-storage-crc-pmlpf\" (UID: \"14c679cb-5c28-4470-8123-fff3db0aa1ed\") " pod="crc-storage/crc-storage-crc-pmlpf" Oct 10 17:46:07 crc kubenswrapper[4799]: I1010 17:46:07.568504 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vpkjq\" (UniqueName: \"kubernetes.io/projected/14c679cb-5c28-4470-8123-fff3db0aa1ed-kube-api-access-vpkjq\") pod \"crc-storage-crc-pmlpf\" (UID: \"14c679cb-5c28-4470-8123-fff3db0aa1ed\") " pod="crc-storage/crc-storage-crc-pmlpf" Oct 10 17:46:07 crc kubenswrapper[4799]: I1010 17:46:07.568595 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/14c679cb-5c28-4470-8123-fff3db0aa1ed-node-mnt\") pod \"crc-storage-crc-pmlpf\" (UID: \"14c679cb-5c28-4470-8123-fff3db0aa1ed\") " pod="crc-storage/crc-storage-crc-pmlpf" Oct 10 17:46:07 crc kubenswrapper[4799]: I1010 17:46:07.669524 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/14c679cb-5c28-4470-8123-fff3db0aa1ed-crc-storage\") pod \"crc-storage-crc-pmlpf\" (UID: \"14c679cb-5c28-4470-8123-fff3db0aa1ed\") " pod="crc-storage/crc-storage-crc-pmlpf" Oct 10 17:46:07 crc kubenswrapper[4799]: I1010 17:46:07.669642 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vpkjq\" (UniqueName: \"kubernetes.io/projected/14c679cb-5c28-4470-8123-fff3db0aa1ed-kube-api-access-vpkjq\") pod \"crc-storage-crc-pmlpf\" (UID: \"14c679cb-5c28-4470-8123-fff3db0aa1ed\") " pod="crc-storage/crc-storage-crc-pmlpf" Oct 10 17:46:07 crc kubenswrapper[4799]: I1010 17:46:07.669688 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/14c679cb-5c28-4470-8123-fff3db0aa1ed-node-mnt\") pod \"crc-storage-crc-pmlpf\" (UID: \"14c679cb-5c28-4470-8123-fff3db0aa1ed\") " pod="crc-storage/crc-storage-crc-pmlpf" Oct 10 17:46:07 crc kubenswrapper[4799]: I1010 17:46:07.670020 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/14c679cb-5c28-4470-8123-fff3db0aa1ed-node-mnt\") pod \"crc-storage-crc-pmlpf\" (UID: \"14c679cb-5c28-4470-8123-fff3db0aa1ed\") " pod="crc-storage/crc-storage-crc-pmlpf" Oct 10 17:46:07 crc kubenswrapper[4799]: I1010 17:46:07.670799 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/14c679cb-5c28-4470-8123-fff3db0aa1ed-crc-storage\") pod \"crc-storage-crc-pmlpf\" (UID: \"14c679cb-5c28-4470-8123-fff3db0aa1ed\") " pod="crc-storage/crc-storage-crc-pmlpf" Oct 10 17:46:07 crc kubenswrapper[4799]: I1010 17:46:07.694207 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vpkjq\" (UniqueName: \"kubernetes.io/projected/14c679cb-5c28-4470-8123-fff3db0aa1ed-kube-api-access-vpkjq\") pod \"crc-storage-crc-pmlpf\" (UID: \"14c679cb-5c28-4470-8123-fff3db0aa1ed\") " pod="crc-storage/crc-storage-crc-pmlpf" Oct 10 17:46:07 crc kubenswrapper[4799]: I1010 17:46:07.742560 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-pmlpf" Oct 10 17:46:08 crc kubenswrapper[4799]: I1010 17:46:08.044835 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["crc-storage/crc-storage-crc-pmlpf"] Oct 10 17:46:08 crc kubenswrapper[4799]: I1010 17:46:08.159223 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-pmlpf" event={"ID":"14c679cb-5c28-4470-8123-fff3db0aa1ed","Type":"ContainerStarted","Data":"c8f7dc79734656ef76232fa5892d22b99ca13c70999b0a5de7a5301321970641"} Oct 10 17:46:09 crc kubenswrapper[4799]: I1010 17:46:09.167930 4799 generic.go:334] "Generic (PLEG): container finished" podID="14c679cb-5c28-4470-8123-fff3db0aa1ed" containerID="d605965916e424e497f95452325dc647b56db03925ac05ee18fcba63f42e4f5c" exitCode=0 Oct 10 17:46:09 crc kubenswrapper[4799]: I1010 17:46:09.168006 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-pmlpf" event={"ID":"14c679cb-5c28-4470-8123-fff3db0aa1ed","Type":"ContainerDied","Data":"d605965916e424e497f95452325dc647b56db03925ac05ee18fcba63f42e4f5c"} Oct 10 17:46:10 crc kubenswrapper[4799]: I1010 17:46:10.582317 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-pmlpf" Oct 10 17:46:10 crc kubenswrapper[4799]: I1010 17:46:10.729790 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vpkjq\" (UniqueName: \"kubernetes.io/projected/14c679cb-5c28-4470-8123-fff3db0aa1ed-kube-api-access-vpkjq\") pod \"14c679cb-5c28-4470-8123-fff3db0aa1ed\" (UID: \"14c679cb-5c28-4470-8123-fff3db0aa1ed\") " Oct 10 17:46:10 crc kubenswrapper[4799]: I1010 17:46:10.729911 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/14c679cb-5c28-4470-8123-fff3db0aa1ed-crc-storage\") pod \"14c679cb-5c28-4470-8123-fff3db0aa1ed\" (UID: \"14c679cb-5c28-4470-8123-fff3db0aa1ed\") " Oct 10 17:46:10 crc kubenswrapper[4799]: I1010 17:46:10.729959 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/14c679cb-5c28-4470-8123-fff3db0aa1ed-node-mnt\") pod \"14c679cb-5c28-4470-8123-fff3db0aa1ed\" (UID: \"14c679cb-5c28-4470-8123-fff3db0aa1ed\") " Oct 10 17:46:10 crc kubenswrapper[4799]: I1010 17:46:10.730336 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/14c679cb-5c28-4470-8123-fff3db0aa1ed-node-mnt" (OuterVolumeSpecName: "node-mnt") pod "14c679cb-5c28-4470-8123-fff3db0aa1ed" (UID: "14c679cb-5c28-4470-8123-fff3db0aa1ed"). InnerVolumeSpecName "node-mnt". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 10 17:46:10 crc kubenswrapper[4799]: I1010 17:46:10.738605 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/14c679cb-5c28-4470-8123-fff3db0aa1ed-kube-api-access-vpkjq" (OuterVolumeSpecName: "kube-api-access-vpkjq") pod "14c679cb-5c28-4470-8123-fff3db0aa1ed" (UID: "14c679cb-5c28-4470-8123-fff3db0aa1ed"). InnerVolumeSpecName "kube-api-access-vpkjq". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 17:46:10 crc kubenswrapper[4799]: I1010 17:46:10.764941 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/14c679cb-5c28-4470-8123-fff3db0aa1ed-crc-storage" (OuterVolumeSpecName: "crc-storage") pod "14c679cb-5c28-4470-8123-fff3db0aa1ed" (UID: "14c679cb-5c28-4470-8123-fff3db0aa1ed"). InnerVolumeSpecName "crc-storage". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 17:46:10 crc kubenswrapper[4799]: I1010 17:46:10.832349 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vpkjq\" (UniqueName: \"kubernetes.io/projected/14c679cb-5c28-4470-8123-fff3db0aa1ed-kube-api-access-vpkjq\") on node \"crc\" DevicePath \"\"" Oct 10 17:46:10 crc kubenswrapper[4799]: I1010 17:46:10.832414 4799 reconciler_common.go:293] "Volume detached for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/14c679cb-5c28-4470-8123-fff3db0aa1ed-crc-storage\") on node \"crc\" DevicePath \"\"" Oct 10 17:46:10 crc kubenswrapper[4799]: I1010 17:46:10.832441 4799 reconciler_common.go:293] "Volume detached for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/14c679cb-5c28-4470-8123-fff3db0aa1ed-node-mnt\") on node \"crc\" DevicePath \"\"" Oct 10 17:46:11 crc kubenswrapper[4799]: I1010 17:46:11.192586 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-pmlpf" event={"ID":"14c679cb-5c28-4470-8123-fff3db0aa1ed","Type":"ContainerDied","Data":"c8f7dc79734656ef76232fa5892d22b99ca13c70999b0a5de7a5301321970641"} Oct 10 17:46:11 crc kubenswrapper[4799]: I1010 17:46:11.192652 4799 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c8f7dc79734656ef76232fa5892d22b99ca13c70999b0a5de7a5301321970641" Oct 10 17:46:11 crc kubenswrapper[4799]: I1010 17:46:11.192782 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-pmlpf" Oct 10 17:46:13 crc kubenswrapper[4799]: I1010 17:46:13.170567 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["crc-storage/crc-storage-crc-pmlpf"] Oct 10 17:46:13 crc kubenswrapper[4799]: I1010 17:46:13.179148 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["crc-storage/crc-storage-crc-pmlpf"] Oct 10 17:46:13 crc kubenswrapper[4799]: I1010 17:46:13.352496 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["crc-storage/crc-storage-crc-mffl9"] Oct 10 17:46:13 crc kubenswrapper[4799]: E1010 17:46:13.353473 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="14c679cb-5c28-4470-8123-fff3db0aa1ed" containerName="storage" Oct 10 17:46:13 crc kubenswrapper[4799]: I1010 17:46:13.353697 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="14c679cb-5c28-4470-8123-fff3db0aa1ed" containerName="storage" Oct 10 17:46:13 crc kubenswrapper[4799]: I1010 17:46:13.354271 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="14c679cb-5c28-4470-8123-fff3db0aa1ed" containerName="storage" Oct 10 17:46:13 crc kubenswrapper[4799]: I1010 17:46:13.355477 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-mffl9" Oct 10 17:46:13 crc kubenswrapper[4799]: I1010 17:46:13.358868 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"openshift-service-ca.crt" Oct 10 17:46:13 crc kubenswrapper[4799]: I1010 17:46:13.360104 4799 reflector.go:368] Caches populated for *v1.Secret from object-"crc-storage"/"crc-storage-dockercfg-qqh4q" Oct 10 17:46:13 crc kubenswrapper[4799]: I1010 17:46:13.360496 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"crc-storage" Oct 10 17:46:13 crc kubenswrapper[4799]: I1010 17:46:13.360862 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"kube-root-ca.crt" Oct 10 17:46:13 crc kubenswrapper[4799]: I1010 17:46:13.370823 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["crc-storage/crc-storage-crc-mffl9"] Oct 10 17:46:13 crc kubenswrapper[4799]: I1010 17:46:13.423438 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="14c679cb-5c28-4470-8123-fff3db0aa1ed" path="/var/lib/kubelet/pods/14c679cb-5c28-4470-8123-fff3db0aa1ed/volumes" Oct 10 17:46:13 crc kubenswrapper[4799]: I1010 17:46:13.470649 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/82da2c09-3ade-426a-a6d3-15fa636841a7-crc-storage\") pod \"crc-storage-crc-mffl9\" (UID: \"82da2c09-3ade-426a-a6d3-15fa636841a7\") " pod="crc-storage/crc-storage-crc-mffl9" Oct 10 17:46:13 crc kubenswrapper[4799]: I1010 17:46:13.471118 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/82da2c09-3ade-426a-a6d3-15fa636841a7-node-mnt\") pod \"crc-storage-crc-mffl9\" (UID: \"82da2c09-3ade-426a-a6d3-15fa636841a7\") " pod="crc-storage/crc-storage-crc-mffl9" Oct 10 17:46:13 crc kubenswrapper[4799]: I1010 17:46:13.471240 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lv4bd\" (UniqueName: \"kubernetes.io/projected/82da2c09-3ade-426a-a6d3-15fa636841a7-kube-api-access-lv4bd\") pod \"crc-storage-crc-mffl9\" (UID: \"82da2c09-3ade-426a-a6d3-15fa636841a7\") " pod="crc-storage/crc-storage-crc-mffl9" Oct 10 17:46:13 crc kubenswrapper[4799]: I1010 17:46:13.573206 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/82da2c09-3ade-426a-a6d3-15fa636841a7-node-mnt\") pod \"crc-storage-crc-mffl9\" (UID: \"82da2c09-3ade-426a-a6d3-15fa636841a7\") " pod="crc-storage/crc-storage-crc-mffl9" Oct 10 17:46:13 crc kubenswrapper[4799]: I1010 17:46:13.573277 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lv4bd\" (UniqueName: \"kubernetes.io/projected/82da2c09-3ade-426a-a6d3-15fa636841a7-kube-api-access-lv4bd\") pod \"crc-storage-crc-mffl9\" (UID: \"82da2c09-3ade-426a-a6d3-15fa636841a7\") " pod="crc-storage/crc-storage-crc-mffl9" Oct 10 17:46:13 crc kubenswrapper[4799]: I1010 17:46:13.573380 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/82da2c09-3ade-426a-a6d3-15fa636841a7-crc-storage\") pod \"crc-storage-crc-mffl9\" (UID: \"82da2c09-3ade-426a-a6d3-15fa636841a7\") " pod="crc-storage/crc-storage-crc-mffl9" Oct 10 17:46:13 crc kubenswrapper[4799]: I1010 17:46:13.573947 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/82da2c09-3ade-426a-a6d3-15fa636841a7-node-mnt\") pod \"crc-storage-crc-mffl9\" (UID: \"82da2c09-3ade-426a-a6d3-15fa636841a7\") " pod="crc-storage/crc-storage-crc-mffl9" Oct 10 17:46:13 crc kubenswrapper[4799]: I1010 17:46:13.575448 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/82da2c09-3ade-426a-a6d3-15fa636841a7-crc-storage\") pod \"crc-storage-crc-mffl9\" (UID: \"82da2c09-3ade-426a-a6d3-15fa636841a7\") " pod="crc-storage/crc-storage-crc-mffl9" Oct 10 17:46:13 crc kubenswrapper[4799]: I1010 17:46:13.596482 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lv4bd\" (UniqueName: \"kubernetes.io/projected/82da2c09-3ade-426a-a6d3-15fa636841a7-kube-api-access-lv4bd\") pod \"crc-storage-crc-mffl9\" (UID: \"82da2c09-3ade-426a-a6d3-15fa636841a7\") " pod="crc-storage/crc-storage-crc-mffl9" Oct 10 17:46:13 crc kubenswrapper[4799]: I1010 17:46:13.687616 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-mffl9" Oct 10 17:46:14 crc kubenswrapper[4799]: I1010 17:46:14.218032 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["crc-storage/crc-storage-crc-mffl9"] Oct 10 17:46:15 crc kubenswrapper[4799]: I1010 17:46:15.232345 4799 generic.go:334] "Generic (PLEG): container finished" podID="82da2c09-3ade-426a-a6d3-15fa636841a7" containerID="e637e72142b6eb7aa7a32076df33d05fc1c28eac89d0d444c145b317c4125f11" exitCode=0 Oct 10 17:46:15 crc kubenswrapper[4799]: I1010 17:46:15.232427 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-mffl9" event={"ID":"82da2c09-3ade-426a-a6d3-15fa636841a7","Type":"ContainerDied","Data":"e637e72142b6eb7aa7a32076df33d05fc1c28eac89d0d444c145b317c4125f11"} Oct 10 17:46:15 crc kubenswrapper[4799]: I1010 17:46:15.232632 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-mffl9" event={"ID":"82da2c09-3ade-426a-a6d3-15fa636841a7","Type":"ContainerStarted","Data":"3166b535185a04ce6e8fbfc994f4ddb369f9c1f2509fdbd7c288ad0d3f1abf2b"} Oct 10 17:46:16 crc kubenswrapper[4799]: I1010 17:46:16.403970 4799 scope.go:117] "RemoveContainer" containerID="fe197396c5be2d0959a18226c3a887cbfc22f79ce601687c7a015173f2073961" Oct 10 17:46:16 crc kubenswrapper[4799]: E1010 17:46:16.405037 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 17:46:16 crc kubenswrapper[4799]: I1010 17:46:16.640194 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-mffl9" Oct 10 17:46:16 crc kubenswrapper[4799]: I1010 17:46:16.731435 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/82da2c09-3ade-426a-a6d3-15fa636841a7-node-mnt\") pod \"82da2c09-3ade-426a-a6d3-15fa636841a7\" (UID: \"82da2c09-3ade-426a-a6d3-15fa636841a7\") " Oct 10 17:46:16 crc kubenswrapper[4799]: I1010 17:46:16.731564 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/82da2c09-3ade-426a-a6d3-15fa636841a7-node-mnt" (OuterVolumeSpecName: "node-mnt") pod "82da2c09-3ade-426a-a6d3-15fa636841a7" (UID: "82da2c09-3ade-426a-a6d3-15fa636841a7"). InnerVolumeSpecName "node-mnt". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 10 17:46:16 crc kubenswrapper[4799]: I1010 17:46:16.731637 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lv4bd\" (UniqueName: \"kubernetes.io/projected/82da2c09-3ade-426a-a6d3-15fa636841a7-kube-api-access-lv4bd\") pod \"82da2c09-3ade-426a-a6d3-15fa636841a7\" (UID: \"82da2c09-3ade-426a-a6d3-15fa636841a7\") " Oct 10 17:46:16 crc kubenswrapper[4799]: I1010 17:46:16.731738 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/82da2c09-3ade-426a-a6d3-15fa636841a7-crc-storage\") pod \"82da2c09-3ade-426a-a6d3-15fa636841a7\" (UID: \"82da2c09-3ade-426a-a6d3-15fa636841a7\") " Oct 10 17:46:16 crc kubenswrapper[4799]: I1010 17:46:16.732151 4799 reconciler_common.go:293] "Volume detached for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/82da2c09-3ade-426a-a6d3-15fa636841a7-node-mnt\") on node \"crc\" DevicePath \"\"" Oct 10 17:46:16 crc kubenswrapper[4799]: I1010 17:46:16.744176 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/82da2c09-3ade-426a-a6d3-15fa636841a7-kube-api-access-lv4bd" (OuterVolumeSpecName: "kube-api-access-lv4bd") pod "82da2c09-3ade-426a-a6d3-15fa636841a7" (UID: "82da2c09-3ade-426a-a6d3-15fa636841a7"). InnerVolumeSpecName "kube-api-access-lv4bd". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 17:46:16 crc kubenswrapper[4799]: I1010 17:46:16.757659 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/82da2c09-3ade-426a-a6d3-15fa636841a7-crc-storage" (OuterVolumeSpecName: "crc-storage") pod "82da2c09-3ade-426a-a6d3-15fa636841a7" (UID: "82da2c09-3ade-426a-a6d3-15fa636841a7"). InnerVolumeSpecName "crc-storage". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 17:46:16 crc kubenswrapper[4799]: I1010 17:46:16.833663 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lv4bd\" (UniqueName: \"kubernetes.io/projected/82da2c09-3ade-426a-a6d3-15fa636841a7-kube-api-access-lv4bd\") on node \"crc\" DevicePath \"\"" Oct 10 17:46:16 crc kubenswrapper[4799]: I1010 17:46:16.833725 4799 reconciler_common.go:293] "Volume detached for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/82da2c09-3ade-426a-a6d3-15fa636841a7-crc-storage\") on node \"crc\" DevicePath \"\"" Oct 10 17:46:17 crc kubenswrapper[4799]: I1010 17:46:17.283994 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-mffl9" event={"ID":"82da2c09-3ade-426a-a6d3-15fa636841a7","Type":"ContainerDied","Data":"3166b535185a04ce6e8fbfc994f4ddb369f9c1f2509fdbd7c288ad0d3f1abf2b"} Oct 10 17:46:17 crc kubenswrapper[4799]: I1010 17:46:17.284056 4799 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3166b535185a04ce6e8fbfc994f4ddb369f9c1f2509fdbd7c288ad0d3f1abf2b" Oct 10 17:46:17 crc kubenswrapper[4799]: I1010 17:46:17.284133 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-mffl9" Oct 10 17:46:28 crc kubenswrapper[4799]: I1010 17:46:28.403220 4799 scope.go:117] "RemoveContainer" containerID="fe197396c5be2d0959a18226c3a887cbfc22f79ce601687c7a015173f2073961" Oct 10 17:46:28 crc kubenswrapper[4799]: E1010 17:46:28.404274 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 17:46:43 crc kubenswrapper[4799]: I1010 17:46:43.402979 4799 scope.go:117] "RemoveContainer" containerID="fe197396c5be2d0959a18226c3a887cbfc22f79ce601687c7a015173f2073961" Oct 10 17:46:43 crc kubenswrapper[4799]: E1010 17:46:43.403812 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 17:46:56 crc kubenswrapper[4799]: I1010 17:46:56.402433 4799 scope.go:117] "RemoveContainer" containerID="fe197396c5be2d0959a18226c3a887cbfc22f79ce601687c7a015173f2073961" Oct 10 17:46:56 crc kubenswrapper[4799]: E1010 17:46:56.403077 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 17:47:06 crc kubenswrapper[4799]: I1010 17:47:06.216336 4799 scope.go:117] "RemoveContainer" containerID="a021416239a2534be17767ac1ddffe083f1460b6ab99aa56ad5dc721ac8328b8" Oct 10 17:47:11 crc kubenswrapper[4799]: I1010 17:47:11.402576 4799 scope.go:117] "RemoveContainer" containerID="fe197396c5be2d0959a18226c3a887cbfc22f79ce601687c7a015173f2073961" Oct 10 17:47:11 crc kubenswrapper[4799]: E1010 17:47:11.405226 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 17:47:24 crc kubenswrapper[4799]: I1010 17:47:24.402636 4799 scope.go:117] "RemoveContainer" containerID="fe197396c5be2d0959a18226c3a887cbfc22f79ce601687c7a015173f2073961" Oct 10 17:47:24 crc kubenswrapper[4799]: E1010 17:47:24.403932 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 17:47:39 crc kubenswrapper[4799]: I1010 17:47:39.402857 4799 scope.go:117] "RemoveContainer" containerID="fe197396c5be2d0959a18226c3a887cbfc22f79ce601687c7a015173f2073961" Oct 10 17:47:39 crc kubenswrapper[4799]: E1010 17:47:39.403946 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 17:47:41 crc kubenswrapper[4799]: I1010 17:47:41.960791 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-m4fm2"] Oct 10 17:47:41 crc kubenswrapper[4799]: E1010 17:47:41.964959 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="82da2c09-3ade-426a-a6d3-15fa636841a7" containerName="storage" Oct 10 17:47:41 crc kubenswrapper[4799]: I1010 17:47:41.964999 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="82da2c09-3ade-426a-a6d3-15fa636841a7" containerName="storage" Oct 10 17:47:41 crc kubenswrapper[4799]: I1010 17:47:41.965201 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="82da2c09-3ade-426a-a6d3-15fa636841a7" containerName="storage" Oct 10 17:47:41 crc kubenswrapper[4799]: I1010 17:47:41.966193 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-m4fm2" Oct 10 17:47:41 crc kubenswrapper[4799]: I1010 17:47:41.972943 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-m4fm2"] Oct 10 17:47:42 crc kubenswrapper[4799]: I1010 17:47:42.109086 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jf2tr\" (UniqueName: \"kubernetes.io/projected/c814da6d-1321-4fde-abd8-6d1d5f9d9ddb-kube-api-access-jf2tr\") pod \"redhat-operators-m4fm2\" (UID: \"c814da6d-1321-4fde-abd8-6d1d5f9d9ddb\") " pod="openshift-marketplace/redhat-operators-m4fm2" Oct 10 17:47:42 crc kubenswrapper[4799]: I1010 17:47:42.109202 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c814da6d-1321-4fde-abd8-6d1d5f9d9ddb-utilities\") pod \"redhat-operators-m4fm2\" (UID: \"c814da6d-1321-4fde-abd8-6d1d5f9d9ddb\") " pod="openshift-marketplace/redhat-operators-m4fm2" Oct 10 17:47:42 crc kubenswrapper[4799]: I1010 17:47:42.109263 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c814da6d-1321-4fde-abd8-6d1d5f9d9ddb-catalog-content\") pod \"redhat-operators-m4fm2\" (UID: \"c814da6d-1321-4fde-abd8-6d1d5f9d9ddb\") " pod="openshift-marketplace/redhat-operators-m4fm2" Oct 10 17:47:42 crc kubenswrapper[4799]: I1010 17:47:42.210285 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jf2tr\" (UniqueName: \"kubernetes.io/projected/c814da6d-1321-4fde-abd8-6d1d5f9d9ddb-kube-api-access-jf2tr\") pod \"redhat-operators-m4fm2\" (UID: \"c814da6d-1321-4fde-abd8-6d1d5f9d9ddb\") " pod="openshift-marketplace/redhat-operators-m4fm2" Oct 10 17:47:42 crc kubenswrapper[4799]: I1010 17:47:42.210347 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c814da6d-1321-4fde-abd8-6d1d5f9d9ddb-utilities\") pod \"redhat-operators-m4fm2\" (UID: \"c814da6d-1321-4fde-abd8-6d1d5f9d9ddb\") " pod="openshift-marketplace/redhat-operators-m4fm2" Oct 10 17:47:42 crc kubenswrapper[4799]: I1010 17:47:42.210385 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c814da6d-1321-4fde-abd8-6d1d5f9d9ddb-catalog-content\") pod \"redhat-operators-m4fm2\" (UID: \"c814da6d-1321-4fde-abd8-6d1d5f9d9ddb\") " pod="openshift-marketplace/redhat-operators-m4fm2" Oct 10 17:47:42 crc kubenswrapper[4799]: I1010 17:47:42.210908 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c814da6d-1321-4fde-abd8-6d1d5f9d9ddb-catalog-content\") pod \"redhat-operators-m4fm2\" (UID: \"c814da6d-1321-4fde-abd8-6d1d5f9d9ddb\") " pod="openshift-marketplace/redhat-operators-m4fm2" Oct 10 17:47:42 crc kubenswrapper[4799]: I1010 17:47:42.211436 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c814da6d-1321-4fde-abd8-6d1d5f9d9ddb-utilities\") pod \"redhat-operators-m4fm2\" (UID: \"c814da6d-1321-4fde-abd8-6d1d5f9d9ddb\") " pod="openshift-marketplace/redhat-operators-m4fm2" Oct 10 17:47:42 crc kubenswrapper[4799]: I1010 17:47:42.240663 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jf2tr\" (UniqueName: \"kubernetes.io/projected/c814da6d-1321-4fde-abd8-6d1d5f9d9ddb-kube-api-access-jf2tr\") pod \"redhat-operators-m4fm2\" (UID: \"c814da6d-1321-4fde-abd8-6d1d5f9d9ddb\") " pod="openshift-marketplace/redhat-operators-m4fm2" Oct 10 17:47:42 crc kubenswrapper[4799]: I1010 17:47:42.288646 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-m4fm2" Oct 10 17:47:42 crc kubenswrapper[4799]: I1010 17:47:42.559164 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-m4fm2"] Oct 10 17:47:42 crc kubenswrapper[4799]: W1010 17:47:42.564984 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc814da6d_1321_4fde_abd8_6d1d5f9d9ddb.slice/crio-461e049c3eb50406a1fbb6abd2640df73e14c852c5d1feffcbd5444c0bc31819 WatchSource:0}: Error finding container 461e049c3eb50406a1fbb6abd2640df73e14c852c5d1feffcbd5444c0bc31819: Status 404 returned error can't find the container with id 461e049c3eb50406a1fbb6abd2640df73e14c852c5d1feffcbd5444c0bc31819 Oct 10 17:47:43 crc kubenswrapper[4799]: I1010 17:47:43.119767 4799 generic.go:334] "Generic (PLEG): container finished" podID="c814da6d-1321-4fde-abd8-6d1d5f9d9ddb" containerID="ffe3e05afc8362138b21738964f20f6bbbdbefc7f807753f6d457f0ffad73d82" exitCode=0 Oct 10 17:47:43 crc kubenswrapper[4799]: I1010 17:47:43.119813 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-m4fm2" event={"ID":"c814da6d-1321-4fde-abd8-6d1d5f9d9ddb","Type":"ContainerDied","Data":"ffe3e05afc8362138b21738964f20f6bbbdbefc7f807753f6d457f0ffad73d82"} Oct 10 17:47:43 crc kubenswrapper[4799]: I1010 17:47:43.119838 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-m4fm2" event={"ID":"c814da6d-1321-4fde-abd8-6d1d5f9d9ddb","Type":"ContainerStarted","Data":"461e049c3eb50406a1fbb6abd2640df73e14c852c5d1feffcbd5444c0bc31819"} Oct 10 17:47:44 crc kubenswrapper[4799]: I1010 17:47:44.127692 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-m4fm2" event={"ID":"c814da6d-1321-4fde-abd8-6d1d5f9d9ddb","Type":"ContainerStarted","Data":"81402489f9dd3f5e6d57a15a94f716a033c964121128d4bf1db80a983672283a"} Oct 10 17:47:45 crc kubenswrapper[4799]: I1010 17:47:45.142912 4799 generic.go:334] "Generic (PLEG): container finished" podID="c814da6d-1321-4fde-abd8-6d1d5f9d9ddb" containerID="81402489f9dd3f5e6d57a15a94f716a033c964121128d4bf1db80a983672283a" exitCode=0 Oct 10 17:47:45 crc kubenswrapper[4799]: I1010 17:47:45.143048 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-m4fm2" event={"ID":"c814da6d-1321-4fde-abd8-6d1d5f9d9ddb","Type":"ContainerDied","Data":"81402489f9dd3f5e6d57a15a94f716a033c964121128d4bf1db80a983672283a"} Oct 10 17:47:46 crc kubenswrapper[4799]: I1010 17:47:46.155410 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-m4fm2" event={"ID":"c814da6d-1321-4fde-abd8-6d1d5f9d9ddb","Type":"ContainerStarted","Data":"e5608e0bb57bfb9ac5f6a86e95fc8e9c1c8abaff51a67a10e9f7f7525cee51d6"} Oct 10 17:47:46 crc kubenswrapper[4799]: I1010 17:47:46.182177 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-m4fm2" podStartSLOduration=2.715944971 podStartE2EDuration="5.18214775s" podCreationTimestamp="2025-10-10 17:47:41 +0000 UTC" firstStartedPulling="2025-10-10 17:47:43.122229537 +0000 UTC m=+4556.630553652" lastFinishedPulling="2025-10-10 17:47:45.588432286 +0000 UTC m=+4559.096756431" observedRunningTime="2025-10-10 17:47:46.171837388 +0000 UTC m=+4559.680161513" watchObservedRunningTime="2025-10-10 17:47:46.18214775 +0000 UTC m=+4559.690471905" Oct 10 17:47:52 crc kubenswrapper[4799]: I1010 17:47:52.289589 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-m4fm2" Oct 10 17:47:52 crc kubenswrapper[4799]: I1010 17:47:52.290198 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-m4fm2" Oct 10 17:47:52 crc kubenswrapper[4799]: I1010 17:47:52.537009 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-m4fm2" Oct 10 17:47:53 crc kubenswrapper[4799]: I1010 17:47:53.268115 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-m4fm2" Oct 10 17:47:53 crc kubenswrapper[4799]: I1010 17:47:53.328289 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-m4fm2"] Oct 10 17:47:54 crc kubenswrapper[4799]: I1010 17:47:54.402555 4799 scope.go:117] "RemoveContainer" containerID="fe197396c5be2d0959a18226c3a887cbfc22f79ce601687c7a015173f2073961" Oct 10 17:47:55 crc kubenswrapper[4799]: I1010 17:47:55.229993 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" event={"ID":"6cebefda-e31d-4be2-9bf4-8e1f8ec002cb","Type":"ContainerStarted","Data":"f501bd44ca99d1332e2b38994323202e43e3dcc1b9ebd31fdd9ca3a13eab9e4d"} Oct 10 17:47:55 crc kubenswrapper[4799]: I1010 17:47:55.230213 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-m4fm2" podUID="c814da6d-1321-4fde-abd8-6d1d5f9d9ddb" containerName="registry-server" containerID="cri-o://e5608e0bb57bfb9ac5f6a86e95fc8e9c1c8abaff51a67a10e9f7f7525cee51d6" gracePeriod=2 Oct 10 17:47:56 crc kubenswrapper[4799]: I1010 17:47:56.236873 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-m4fm2" Oct 10 17:47:56 crc kubenswrapper[4799]: I1010 17:47:56.245673 4799 generic.go:334] "Generic (PLEG): container finished" podID="c814da6d-1321-4fde-abd8-6d1d5f9d9ddb" containerID="e5608e0bb57bfb9ac5f6a86e95fc8e9c1c8abaff51a67a10e9f7f7525cee51d6" exitCode=0 Oct 10 17:47:56 crc kubenswrapper[4799]: I1010 17:47:56.245705 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-m4fm2" event={"ID":"c814da6d-1321-4fde-abd8-6d1d5f9d9ddb","Type":"ContainerDied","Data":"e5608e0bb57bfb9ac5f6a86e95fc8e9c1c8abaff51a67a10e9f7f7525cee51d6"} Oct 10 17:47:56 crc kubenswrapper[4799]: I1010 17:47:56.245725 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-m4fm2" event={"ID":"c814da6d-1321-4fde-abd8-6d1d5f9d9ddb","Type":"ContainerDied","Data":"461e049c3eb50406a1fbb6abd2640df73e14c852c5d1feffcbd5444c0bc31819"} Oct 10 17:47:56 crc kubenswrapper[4799]: I1010 17:47:56.245741 4799 scope.go:117] "RemoveContainer" containerID="e5608e0bb57bfb9ac5f6a86e95fc8e9c1c8abaff51a67a10e9f7f7525cee51d6" Oct 10 17:47:56 crc kubenswrapper[4799]: I1010 17:47:56.246028 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-m4fm2" Oct 10 17:47:56 crc kubenswrapper[4799]: I1010 17:47:56.275188 4799 scope.go:117] "RemoveContainer" containerID="81402489f9dd3f5e6d57a15a94f716a033c964121128d4bf1db80a983672283a" Oct 10 17:47:56 crc kubenswrapper[4799]: I1010 17:47:56.302370 4799 scope.go:117] "RemoveContainer" containerID="ffe3e05afc8362138b21738964f20f6bbbdbefc7f807753f6d457f0ffad73d82" Oct 10 17:47:56 crc kubenswrapper[4799]: I1010 17:47:56.334116 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jf2tr\" (UniqueName: \"kubernetes.io/projected/c814da6d-1321-4fde-abd8-6d1d5f9d9ddb-kube-api-access-jf2tr\") pod \"c814da6d-1321-4fde-abd8-6d1d5f9d9ddb\" (UID: \"c814da6d-1321-4fde-abd8-6d1d5f9d9ddb\") " Oct 10 17:47:56 crc kubenswrapper[4799]: I1010 17:47:56.334169 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c814da6d-1321-4fde-abd8-6d1d5f9d9ddb-catalog-content\") pod \"c814da6d-1321-4fde-abd8-6d1d5f9d9ddb\" (UID: \"c814da6d-1321-4fde-abd8-6d1d5f9d9ddb\") " Oct 10 17:47:56 crc kubenswrapper[4799]: I1010 17:47:56.334200 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c814da6d-1321-4fde-abd8-6d1d5f9d9ddb-utilities\") pod \"c814da6d-1321-4fde-abd8-6d1d5f9d9ddb\" (UID: \"c814da6d-1321-4fde-abd8-6d1d5f9d9ddb\") " Oct 10 17:47:56 crc kubenswrapper[4799]: I1010 17:47:56.335445 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c814da6d-1321-4fde-abd8-6d1d5f9d9ddb-utilities" (OuterVolumeSpecName: "utilities") pod "c814da6d-1321-4fde-abd8-6d1d5f9d9ddb" (UID: "c814da6d-1321-4fde-abd8-6d1d5f9d9ddb"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 10 17:47:56 crc kubenswrapper[4799]: I1010 17:47:56.335523 4799 scope.go:117] "RemoveContainer" containerID="e5608e0bb57bfb9ac5f6a86e95fc8e9c1c8abaff51a67a10e9f7f7525cee51d6" Oct 10 17:47:56 crc kubenswrapper[4799]: E1010 17:47:56.335922 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e5608e0bb57bfb9ac5f6a86e95fc8e9c1c8abaff51a67a10e9f7f7525cee51d6\": container with ID starting with e5608e0bb57bfb9ac5f6a86e95fc8e9c1c8abaff51a67a10e9f7f7525cee51d6 not found: ID does not exist" containerID="e5608e0bb57bfb9ac5f6a86e95fc8e9c1c8abaff51a67a10e9f7f7525cee51d6" Oct 10 17:47:56 crc kubenswrapper[4799]: I1010 17:47:56.335962 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e5608e0bb57bfb9ac5f6a86e95fc8e9c1c8abaff51a67a10e9f7f7525cee51d6"} err="failed to get container status \"e5608e0bb57bfb9ac5f6a86e95fc8e9c1c8abaff51a67a10e9f7f7525cee51d6\": rpc error: code = NotFound desc = could not find container \"e5608e0bb57bfb9ac5f6a86e95fc8e9c1c8abaff51a67a10e9f7f7525cee51d6\": container with ID starting with e5608e0bb57bfb9ac5f6a86e95fc8e9c1c8abaff51a67a10e9f7f7525cee51d6 not found: ID does not exist" Oct 10 17:47:56 crc kubenswrapper[4799]: I1010 17:47:56.335993 4799 scope.go:117] "RemoveContainer" containerID="81402489f9dd3f5e6d57a15a94f716a033c964121128d4bf1db80a983672283a" Oct 10 17:47:56 crc kubenswrapper[4799]: E1010 17:47:56.336733 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"81402489f9dd3f5e6d57a15a94f716a033c964121128d4bf1db80a983672283a\": container with ID starting with 81402489f9dd3f5e6d57a15a94f716a033c964121128d4bf1db80a983672283a not found: ID does not exist" containerID="81402489f9dd3f5e6d57a15a94f716a033c964121128d4bf1db80a983672283a" Oct 10 17:47:56 crc kubenswrapper[4799]: I1010 17:47:56.336760 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"81402489f9dd3f5e6d57a15a94f716a033c964121128d4bf1db80a983672283a"} err="failed to get container status \"81402489f9dd3f5e6d57a15a94f716a033c964121128d4bf1db80a983672283a\": rpc error: code = NotFound desc = could not find container \"81402489f9dd3f5e6d57a15a94f716a033c964121128d4bf1db80a983672283a\": container with ID starting with 81402489f9dd3f5e6d57a15a94f716a033c964121128d4bf1db80a983672283a not found: ID does not exist" Oct 10 17:47:56 crc kubenswrapper[4799]: I1010 17:47:56.336799 4799 scope.go:117] "RemoveContainer" containerID="ffe3e05afc8362138b21738964f20f6bbbdbefc7f807753f6d457f0ffad73d82" Oct 10 17:47:56 crc kubenswrapper[4799]: E1010 17:47:56.337012 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ffe3e05afc8362138b21738964f20f6bbbdbefc7f807753f6d457f0ffad73d82\": container with ID starting with ffe3e05afc8362138b21738964f20f6bbbdbefc7f807753f6d457f0ffad73d82 not found: ID does not exist" containerID="ffe3e05afc8362138b21738964f20f6bbbdbefc7f807753f6d457f0ffad73d82" Oct 10 17:47:56 crc kubenswrapper[4799]: I1010 17:47:56.337029 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ffe3e05afc8362138b21738964f20f6bbbdbefc7f807753f6d457f0ffad73d82"} err="failed to get container status \"ffe3e05afc8362138b21738964f20f6bbbdbefc7f807753f6d457f0ffad73d82\": rpc error: code = NotFound desc = could not find container \"ffe3e05afc8362138b21738964f20f6bbbdbefc7f807753f6d457f0ffad73d82\": container with ID starting with ffe3e05afc8362138b21738964f20f6bbbdbefc7f807753f6d457f0ffad73d82 not found: ID does not exist" Oct 10 17:47:56 crc kubenswrapper[4799]: I1010 17:47:56.339634 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c814da6d-1321-4fde-abd8-6d1d5f9d9ddb-kube-api-access-jf2tr" (OuterVolumeSpecName: "kube-api-access-jf2tr") pod "c814da6d-1321-4fde-abd8-6d1d5f9d9ddb" (UID: "c814da6d-1321-4fde-abd8-6d1d5f9d9ddb"). InnerVolumeSpecName "kube-api-access-jf2tr". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 17:47:56 crc kubenswrapper[4799]: I1010 17:47:56.417619 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c814da6d-1321-4fde-abd8-6d1d5f9d9ddb-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "c814da6d-1321-4fde-abd8-6d1d5f9d9ddb" (UID: "c814da6d-1321-4fde-abd8-6d1d5f9d9ddb"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 10 17:47:56 crc kubenswrapper[4799]: I1010 17:47:56.436161 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jf2tr\" (UniqueName: \"kubernetes.io/projected/c814da6d-1321-4fde-abd8-6d1d5f9d9ddb-kube-api-access-jf2tr\") on node \"crc\" DevicePath \"\"" Oct 10 17:47:56 crc kubenswrapper[4799]: I1010 17:47:56.436210 4799 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c814da6d-1321-4fde-abd8-6d1d5f9d9ddb-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 10 17:47:56 crc kubenswrapper[4799]: I1010 17:47:56.436230 4799 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c814da6d-1321-4fde-abd8-6d1d5f9d9ddb-utilities\") on node \"crc\" DevicePath \"\"" Oct 10 17:47:56 crc kubenswrapper[4799]: I1010 17:47:56.583455 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-m4fm2"] Oct 10 17:47:56 crc kubenswrapper[4799]: I1010 17:47:56.593787 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-m4fm2"] Oct 10 17:47:57 crc kubenswrapper[4799]: I1010 17:47:57.419538 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c814da6d-1321-4fde-abd8-6d1d5f9d9ddb" path="/var/lib/kubelet/pods/c814da6d-1321-4fde-abd8-6d1d5f9d9ddb/volumes" Oct 10 17:48:07 crc kubenswrapper[4799]: I1010 17:48:07.024956 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-d6pqk"] Oct 10 17:48:07 crc kubenswrapper[4799]: E1010 17:48:07.026443 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c814da6d-1321-4fde-abd8-6d1d5f9d9ddb" containerName="registry-server" Oct 10 17:48:07 crc kubenswrapper[4799]: I1010 17:48:07.026473 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="c814da6d-1321-4fde-abd8-6d1d5f9d9ddb" containerName="registry-server" Oct 10 17:48:07 crc kubenswrapper[4799]: E1010 17:48:07.026515 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c814da6d-1321-4fde-abd8-6d1d5f9d9ddb" containerName="extract-content" Oct 10 17:48:07 crc kubenswrapper[4799]: I1010 17:48:07.026534 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="c814da6d-1321-4fde-abd8-6d1d5f9d9ddb" containerName="extract-content" Oct 10 17:48:07 crc kubenswrapper[4799]: E1010 17:48:07.026579 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c814da6d-1321-4fde-abd8-6d1d5f9d9ddb" containerName="extract-utilities" Oct 10 17:48:07 crc kubenswrapper[4799]: I1010 17:48:07.026598 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="c814da6d-1321-4fde-abd8-6d1d5f9d9ddb" containerName="extract-utilities" Oct 10 17:48:07 crc kubenswrapper[4799]: I1010 17:48:07.027004 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="c814da6d-1321-4fde-abd8-6d1d5f9d9ddb" containerName="registry-server" Oct 10 17:48:07 crc kubenswrapper[4799]: I1010 17:48:07.029199 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-d6pqk" Oct 10 17:48:07 crc kubenswrapper[4799]: I1010 17:48:07.042707 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-d6pqk"] Oct 10 17:48:07 crc kubenswrapper[4799]: I1010 17:48:07.118638 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ec13a977-45f8-4ce7-a0f6-519826922082-catalog-content\") pod \"community-operators-d6pqk\" (UID: \"ec13a977-45f8-4ce7-a0f6-519826922082\") " pod="openshift-marketplace/community-operators-d6pqk" Oct 10 17:48:07 crc kubenswrapper[4799]: I1010 17:48:07.118991 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4qw8k\" (UniqueName: \"kubernetes.io/projected/ec13a977-45f8-4ce7-a0f6-519826922082-kube-api-access-4qw8k\") pod \"community-operators-d6pqk\" (UID: \"ec13a977-45f8-4ce7-a0f6-519826922082\") " pod="openshift-marketplace/community-operators-d6pqk" Oct 10 17:48:07 crc kubenswrapper[4799]: I1010 17:48:07.119230 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ec13a977-45f8-4ce7-a0f6-519826922082-utilities\") pod \"community-operators-d6pqk\" (UID: \"ec13a977-45f8-4ce7-a0f6-519826922082\") " pod="openshift-marketplace/community-operators-d6pqk" Oct 10 17:48:07 crc kubenswrapper[4799]: I1010 17:48:07.222377 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ec13a977-45f8-4ce7-a0f6-519826922082-catalog-content\") pod \"community-operators-d6pqk\" (UID: \"ec13a977-45f8-4ce7-a0f6-519826922082\") " pod="openshift-marketplace/community-operators-d6pqk" Oct 10 17:48:07 crc kubenswrapper[4799]: I1010 17:48:07.222517 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4qw8k\" (UniqueName: \"kubernetes.io/projected/ec13a977-45f8-4ce7-a0f6-519826922082-kube-api-access-4qw8k\") pod \"community-operators-d6pqk\" (UID: \"ec13a977-45f8-4ce7-a0f6-519826922082\") " pod="openshift-marketplace/community-operators-d6pqk" Oct 10 17:48:07 crc kubenswrapper[4799]: I1010 17:48:07.222574 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ec13a977-45f8-4ce7-a0f6-519826922082-utilities\") pod \"community-operators-d6pqk\" (UID: \"ec13a977-45f8-4ce7-a0f6-519826922082\") " pod="openshift-marketplace/community-operators-d6pqk" Oct 10 17:48:07 crc kubenswrapper[4799]: I1010 17:48:07.223017 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ec13a977-45f8-4ce7-a0f6-519826922082-catalog-content\") pod \"community-operators-d6pqk\" (UID: \"ec13a977-45f8-4ce7-a0f6-519826922082\") " pod="openshift-marketplace/community-operators-d6pqk" Oct 10 17:48:07 crc kubenswrapper[4799]: I1010 17:48:07.223042 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ec13a977-45f8-4ce7-a0f6-519826922082-utilities\") pod \"community-operators-d6pqk\" (UID: \"ec13a977-45f8-4ce7-a0f6-519826922082\") " pod="openshift-marketplace/community-operators-d6pqk" Oct 10 17:48:07 crc kubenswrapper[4799]: I1010 17:48:07.585295 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4qw8k\" (UniqueName: \"kubernetes.io/projected/ec13a977-45f8-4ce7-a0f6-519826922082-kube-api-access-4qw8k\") pod \"community-operators-d6pqk\" (UID: \"ec13a977-45f8-4ce7-a0f6-519826922082\") " pod="openshift-marketplace/community-operators-d6pqk" Oct 10 17:48:07 crc kubenswrapper[4799]: I1010 17:48:07.669634 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-d6pqk" Oct 10 17:48:07 crc kubenswrapper[4799]: I1010 17:48:07.914778 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-d6pqk"] Oct 10 17:48:08 crc kubenswrapper[4799]: I1010 17:48:08.390849 4799 generic.go:334] "Generic (PLEG): container finished" podID="ec13a977-45f8-4ce7-a0f6-519826922082" containerID="80057f9add4632ed74c0cf841ca531d27f3bd80defe60d584a209520f291fea7" exitCode=0 Oct 10 17:48:08 crc kubenswrapper[4799]: I1010 17:48:08.391050 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-d6pqk" event={"ID":"ec13a977-45f8-4ce7-a0f6-519826922082","Type":"ContainerDied","Data":"80057f9add4632ed74c0cf841ca531d27f3bd80defe60d584a209520f291fea7"} Oct 10 17:48:08 crc kubenswrapper[4799]: I1010 17:48:08.391236 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-d6pqk" event={"ID":"ec13a977-45f8-4ce7-a0f6-519826922082","Type":"ContainerStarted","Data":"7c403bb1e7ab5158c538d308f3d9a179c7fc200c9e2161183b0ac4e0bb9ef453"} Oct 10 17:48:09 crc kubenswrapper[4799]: I1010 17:48:09.414508 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-d6pqk" event={"ID":"ec13a977-45f8-4ce7-a0f6-519826922082","Type":"ContainerStarted","Data":"c06a1cbc31ba82557122666b5b853a4bc5b3699b918f29ef5bf3f34f5d84eb94"} Oct 10 17:48:10 crc kubenswrapper[4799]: I1010 17:48:10.415209 4799 generic.go:334] "Generic (PLEG): container finished" podID="ec13a977-45f8-4ce7-a0f6-519826922082" containerID="c06a1cbc31ba82557122666b5b853a4bc5b3699b918f29ef5bf3f34f5d84eb94" exitCode=0 Oct 10 17:48:10 crc kubenswrapper[4799]: I1010 17:48:10.415285 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-d6pqk" event={"ID":"ec13a977-45f8-4ce7-a0f6-519826922082","Type":"ContainerDied","Data":"c06a1cbc31ba82557122666b5b853a4bc5b3699b918f29ef5bf3f34f5d84eb94"} Oct 10 17:48:11 crc kubenswrapper[4799]: I1010 17:48:11.425674 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-d6pqk" event={"ID":"ec13a977-45f8-4ce7-a0f6-519826922082","Type":"ContainerStarted","Data":"237f524b2696dc3e5c8bd79cb797aee29bbda9276692c62e581ea6db667e9786"} Oct 10 17:48:11 crc kubenswrapper[4799]: I1010 17:48:11.455273 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-d6pqk" podStartSLOduration=2.8954803890000003 podStartE2EDuration="5.455254954s" podCreationTimestamp="2025-10-10 17:48:06 +0000 UTC" firstStartedPulling="2025-10-10 17:48:08.392890562 +0000 UTC m=+4581.901214717" lastFinishedPulling="2025-10-10 17:48:10.952665167 +0000 UTC m=+4584.460989282" observedRunningTime="2025-10-10 17:48:11.449733709 +0000 UTC m=+4584.958057844" watchObservedRunningTime="2025-10-10 17:48:11.455254954 +0000 UTC m=+4584.963579069" Oct 10 17:48:17 crc kubenswrapper[4799]: I1010 17:48:17.670685 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-d6pqk" Oct 10 17:48:17 crc kubenswrapper[4799]: I1010 17:48:17.671561 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-d6pqk" Oct 10 17:48:17 crc kubenswrapper[4799]: I1010 17:48:17.728152 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-d6pqk" Oct 10 17:48:18 crc kubenswrapper[4799]: I1010 17:48:18.565976 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-d6pqk" Oct 10 17:48:18 crc kubenswrapper[4799]: I1010 17:48:18.621707 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-d6pqk"] Oct 10 17:48:20 crc kubenswrapper[4799]: I1010 17:48:20.511378 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-d6pqk" podUID="ec13a977-45f8-4ce7-a0f6-519826922082" containerName="registry-server" containerID="cri-o://237f524b2696dc3e5c8bd79cb797aee29bbda9276692c62e581ea6db667e9786" gracePeriod=2 Oct 10 17:48:21 crc kubenswrapper[4799]: I1010 17:48:21.176333 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-d6pqk" Oct 10 17:48:21 crc kubenswrapper[4799]: I1010 17:48:21.239868 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ec13a977-45f8-4ce7-a0f6-519826922082-utilities\") pod \"ec13a977-45f8-4ce7-a0f6-519826922082\" (UID: \"ec13a977-45f8-4ce7-a0f6-519826922082\") " Oct 10 17:48:21 crc kubenswrapper[4799]: I1010 17:48:21.240061 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ec13a977-45f8-4ce7-a0f6-519826922082-catalog-content\") pod \"ec13a977-45f8-4ce7-a0f6-519826922082\" (UID: \"ec13a977-45f8-4ce7-a0f6-519826922082\") " Oct 10 17:48:21 crc kubenswrapper[4799]: I1010 17:48:21.240178 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4qw8k\" (UniqueName: \"kubernetes.io/projected/ec13a977-45f8-4ce7-a0f6-519826922082-kube-api-access-4qw8k\") pod \"ec13a977-45f8-4ce7-a0f6-519826922082\" (UID: \"ec13a977-45f8-4ce7-a0f6-519826922082\") " Oct 10 17:48:21 crc kubenswrapper[4799]: I1010 17:48:21.242494 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ec13a977-45f8-4ce7-a0f6-519826922082-utilities" (OuterVolumeSpecName: "utilities") pod "ec13a977-45f8-4ce7-a0f6-519826922082" (UID: "ec13a977-45f8-4ce7-a0f6-519826922082"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 10 17:48:21 crc kubenswrapper[4799]: I1010 17:48:21.252102 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ec13a977-45f8-4ce7-a0f6-519826922082-kube-api-access-4qw8k" (OuterVolumeSpecName: "kube-api-access-4qw8k") pod "ec13a977-45f8-4ce7-a0f6-519826922082" (UID: "ec13a977-45f8-4ce7-a0f6-519826922082"). InnerVolumeSpecName "kube-api-access-4qw8k". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 17:48:21 crc kubenswrapper[4799]: I1010 17:48:21.325978 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ec13a977-45f8-4ce7-a0f6-519826922082-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "ec13a977-45f8-4ce7-a0f6-519826922082" (UID: "ec13a977-45f8-4ce7-a0f6-519826922082"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 10 17:48:21 crc kubenswrapper[4799]: I1010 17:48:21.342421 4799 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ec13a977-45f8-4ce7-a0f6-519826922082-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 10 17:48:21 crc kubenswrapper[4799]: I1010 17:48:21.342483 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4qw8k\" (UniqueName: \"kubernetes.io/projected/ec13a977-45f8-4ce7-a0f6-519826922082-kube-api-access-4qw8k\") on node \"crc\" DevicePath \"\"" Oct 10 17:48:21 crc kubenswrapper[4799]: I1010 17:48:21.342569 4799 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ec13a977-45f8-4ce7-a0f6-519826922082-utilities\") on node \"crc\" DevicePath \"\"" Oct 10 17:48:21 crc kubenswrapper[4799]: I1010 17:48:21.522598 4799 generic.go:334] "Generic (PLEG): container finished" podID="ec13a977-45f8-4ce7-a0f6-519826922082" containerID="237f524b2696dc3e5c8bd79cb797aee29bbda9276692c62e581ea6db667e9786" exitCode=0 Oct 10 17:48:21 crc kubenswrapper[4799]: I1010 17:48:21.522684 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-d6pqk" Oct 10 17:48:21 crc kubenswrapper[4799]: I1010 17:48:21.522665 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-d6pqk" event={"ID":"ec13a977-45f8-4ce7-a0f6-519826922082","Type":"ContainerDied","Data":"237f524b2696dc3e5c8bd79cb797aee29bbda9276692c62e581ea6db667e9786"} Oct 10 17:48:21 crc kubenswrapper[4799]: I1010 17:48:21.522853 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-d6pqk" event={"ID":"ec13a977-45f8-4ce7-a0f6-519826922082","Type":"ContainerDied","Data":"7c403bb1e7ab5158c538d308f3d9a179c7fc200c9e2161183b0ac4e0bb9ef453"} Oct 10 17:48:21 crc kubenswrapper[4799]: I1010 17:48:21.522889 4799 scope.go:117] "RemoveContainer" containerID="237f524b2696dc3e5c8bd79cb797aee29bbda9276692c62e581ea6db667e9786" Oct 10 17:48:21 crc kubenswrapper[4799]: I1010 17:48:21.555462 4799 scope.go:117] "RemoveContainer" containerID="c06a1cbc31ba82557122666b5b853a4bc5b3699b918f29ef5bf3f34f5d84eb94" Oct 10 17:48:21 crc kubenswrapper[4799]: I1010 17:48:21.580577 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-d6pqk"] Oct 10 17:48:21 crc kubenswrapper[4799]: I1010 17:48:21.588580 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-d6pqk"] Oct 10 17:48:21 crc kubenswrapper[4799]: I1010 17:48:21.591302 4799 scope.go:117] "RemoveContainer" containerID="80057f9add4632ed74c0cf841ca531d27f3bd80defe60d584a209520f291fea7" Oct 10 17:48:21 crc kubenswrapper[4799]: I1010 17:48:21.630937 4799 scope.go:117] "RemoveContainer" containerID="237f524b2696dc3e5c8bd79cb797aee29bbda9276692c62e581ea6db667e9786" Oct 10 17:48:21 crc kubenswrapper[4799]: E1010 17:48:21.631424 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"237f524b2696dc3e5c8bd79cb797aee29bbda9276692c62e581ea6db667e9786\": container with ID starting with 237f524b2696dc3e5c8bd79cb797aee29bbda9276692c62e581ea6db667e9786 not found: ID does not exist" containerID="237f524b2696dc3e5c8bd79cb797aee29bbda9276692c62e581ea6db667e9786" Oct 10 17:48:21 crc kubenswrapper[4799]: I1010 17:48:21.631464 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"237f524b2696dc3e5c8bd79cb797aee29bbda9276692c62e581ea6db667e9786"} err="failed to get container status \"237f524b2696dc3e5c8bd79cb797aee29bbda9276692c62e581ea6db667e9786\": rpc error: code = NotFound desc = could not find container \"237f524b2696dc3e5c8bd79cb797aee29bbda9276692c62e581ea6db667e9786\": container with ID starting with 237f524b2696dc3e5c8bd79cb797aee29bbda9276692c62e581ea6db667e9786 not found: ID does not exist" Oct 10 17:48:21 crc kubenswrapper[4799]: I1010 17:48:21.631489 4799 scope.go:117] "RemoveContainer" containerID="c06a1cbc31ba82557122666b5b853a4bc5b3699b918f29ef5bf3f34f5d84eb94" Oct 10 17:48:21 crc kubenswrapper[4799]: E1010 17:48:21.632025 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c06a1cbc31ba82557122666b5b853a4bc5b3699b918f29ef5bf3f34f5d84eb94\": container with ID starting with c06a1cbc31ba82557122666b5b853a4bc5b3699b918f29ef5bf3f34f5d84eb94 not found: ID does not exist" containerID="c06a1cbc31ba82557122666b5b853a4bc5b3699b918f29ef5bf3f34f5d84eb94" Oct 10 17:48:21 crc kubenswrapper[4799]: I1010 17:48:21.632052 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c06a1cbc31ba82557122666b5b853a4bc5b3699b918f29ef5bf3f34f5d84eb94"} err="failed to get container status \"c06a1cbc31ba82557122666b5b853a4bc5b3699b918f29ef5bf3f34f5d84eb94\": rpc error: code = NotFound desc = could not find container \"c06a1cbc31ba82557122666b5b853a4bc5b3699b918f29ef5bf3f34f5d84eb94\": container with ID starting with c06a1cbc31ba82557122666b5b853a4bc5b3699b918f29ef5bf3f34f5d84eb94 not found: ID does not exist" Oct 10 17:48:21 crc kubenswrapper[4799]: I1010 17:48:21.632072 4799 scope.go:117] "RemoveContainer" containerID="80057f9add4632ed74c0cf841ca531d27f3bd80defe60d584a209520f291fea7" Oct 10 17:48:21 crc kubenswrapper[4799]: E1010 17:48:21.632315 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"80057f9add4632ed74c0cf841ca531d27f3bd80defe60d584a209520f291fea7\": container with ID starting with 80057f9add4632ed74c0cf841ca531d27f3bd80defe60d584a209520f291fea7 not found: ID does not exist" containerID="80057f9add4632ed74c0cf841ca531d27f3bd80defe60d584a209520f291fea7" Oct 10 17:48:21 crc kubenswrapper[4799]: I1010 17:48:21.632345 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"80057f9add4632ed74c0cf841ca531d27f3bd80defe60d584a209520f291fea7"} err="failed to get container status \"80057f9add4632ed74c0cf841ca531d27f3bd80defe60d584a209520f291fea7\": rpc error: code = NotFound desc = could not find container \"80057f9add4632ed74c0cf841ca531d27f3bd80defe60d584a209520f291fea7\": container with ID starting with 80057f9add4632ed74c0cf841ca531d27f3bd80defe60d584a209520f291fea7 not found: ID does not exist" Oct 10 17:48:23 crc kubenswrapper[4799]: I1010 17:48:23.421203 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ec13a977-45f8-4ce7-a0f6-519826922082" path="/var/lib/kubelet/pods/ec13a977-45f8-4ce7-a0f6-519826922082/volumes" Oct 10 17:48:49 crc kubenswrapper[4799]: I1010 17:48:49.182530 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-q2p65"] Oct 10 17:48:49 crc kubenswrapper[4799]: E1010 17:48:49.183618 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ec13a977-45f8-4ce7-a0f6-519826922082" containerName="extract-utilities" Oct 10 17:48:49 crc kubenswrapper[4799]: I1010 17:48:49.183979 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="ec13a977-45f8-4ce7-a0f6-519826922082" containerName="extract-utilities" Oct 10 17:48:49 crc kubenswrapper[4799]: E1010 17:48:49.184009 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ec13a977-45f8-4ce7-a0f6-519826922082" containerName="extract-content" Oct 10 17:48:49 crc kubenswrapper[4799]: I1010 17:48:49.184031 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="ec13a977-45f8-4ce7-a0f6-519826922082" containerName="extract-content" Oct 10 17:48:49 crc kubenswrapper[4799]: E1010 17:48:49.184062 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ec13a977-45f8-4ce7-a0f6-519826922082" containerName="registry-server" Oct 10 17:48:49 crc kubenswrapper[4799]: I1010 17:48:49.184074 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="ec13a977-45f8-4ce7-a0f6-519826922082" containerName="registry-server" Oct 10 17:48:49 crc kubenswrapper[4799]: I1010 17:48:49.184886 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="ec13a977-45f8-4ce7-a0f6-519826922082" containerName="registry-server" Oct 10 17:48:49 crc kubenswrapper[4799]: I1010 17:48:49.187613 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-q2p65" Oct 10 17:48:49 crc kubenswrapper[4799]: I1010 17:48:49.191463 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-q2p65"] Oct 10 17:48:49 crc kubenswrapper[4799]: I1010 17:48:49.303128 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/76fc75c4-41c8-4307-8144-206de5c7868e-catalog-content\") pod \"certified-operators-q2p65\" (UID: \"76fc75c4-41c8-4307-8144-206de5c7868e\") " pod="openshift-marketplace/certified-operators-q2p65" Oct 10 17:48:49 crc kubenswrapper[4799]: I1010 17:48:49.303851 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wvj4p\" (UniqueName: \"kubernetes.io/projected/76fc75c4-41c8-4307-8144-206de5c7868e-kube-api-access-wvj4p\") pod \"certified-operators-q2p65\" (UID: \"76fc75c4-41c8-4307-8144-206de5c7868e\") " pod="openshift-marketplace/certified-operators-q2p65" Oct 10 17:48:49 crc kubenswrapper[4799]: I1010 17:48:49.304041 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/76fc75c4-41c8-4307-8144-206de5c7868e-utilities\") pod \"certified-operators-q2p65\" (UID: \"76fc75c4-41c8-4307-8144-206de5c7868e\") " pod="openshift-marketplace/certified-operators-q2p65" Oct 10 17:48:49 crc kubenswrapper[4799]: I1010 17:48:49.405202 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/76fc75c4-41c8-4307-8144-206de5c7868e-catalog-content\") pod \"certified-operators-q2p65\" (UID: \"76fc75c4-41c8-4307-8144-206de5c7868e\") " pod="openshift-marketplace/certified-operators-q2p65" Oct 10 17:48:49 crc kubenswrapper[4799]: I1010 17:48:49.405263 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wvj4p\" (UniqueName: \"kubernetes.io/projected/76fc75c4-41c8-4307-8144-206de5c7868e-kube-api-access-wvj4p\") pod \"certified-operators-q2p65\" (UID: \"76fc75c4-41c8-4307-8144-206de5c7868e\") " pod="openshift-marketplace/certified-operators-q2p65" Oct 10 17:48:49 crc kubenswrapper[4799]: I1010 17:48:49.405305 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/76fc75c4-41c8-4307-8144-206de5c7868e-utilities\") pod \"certified-operators-q2p65\" (UID: \"76fc75c4-41c8-4307-8144-206de5c7868e\") " pod="openshift-marketplace/certified-operators-q2p65" Oct 10 17:48:49 crc kubenswrapper[4799]: I1010 17:48:49.405810 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/76fc75c4-41c8-4307-8144-206de5c7868e-utilities\") pod \"certified-operators-q2p65\" (UID: \"76fc75c4-41c8-4307-8144-206de5c7868e\") " pod="openshift-marketplace/certified-operators-q2p65" Oct 10 17:48:49 crc kubenswrapper[4799]: I1010 17:48:49.405948 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/76fc75c4-41c8-4307-8144-206de5c7868e-catalog-content\") pod \"certified-operators-q2p65\" (UID: \"76fc75c4-41c8-4307-8144-206de5c7868e\") " pod="openshift-marketplace/certified-operators-q2p65" Oct 10 17:48:49 crc kubenswrapper[4799]: I1010 17:48:49.424160 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wvj4p\" (UniqueName: \"kubernetes.io/projected/76fc75c4-41c8-4307-8144-206de5c7868e-kube-api-access-wvj4p\") pod \"certified-operators-q2p65\" (UID: \"76fc75c4-41c8-4307-8144-206de5c7868e\") " pod="openshift-marketplace/certified-operators-q2p65" Oct 10 17:48:49 crc kubenswrapper[4799]: I1010 17:48:49.512868 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-q2p65" Oct 10 17:48:50 crc kubenswrapper[4799]: I1010 17:48:50.035030 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-q2p65"] Oct 10 17:48:50 crc kubenswrapper[4799]: I1010 17:48:50.808057 4799 generic.go:334] "Generic (PLEG): container finished" podID="76fc75c4-41c8-4307-8144-206de5c7868e" containerID="0588f0e212b875b3f746e7ad36724f2fe1d15f8d6e322f6b25cba3f8de01a016" exitCode=0 Oct 10 17:48:50 crc kubenswrapper[4799]: I1010 17:48:50.808123 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-q2p65" event={"ID":"76fc75c4-41c8-4307-8144-206de5c7868e","Type":"ContainerDied","Data":"0588f0e212b875b3f746e7ad36724f2fe1d15f8d6e322f6b25cba3f8de01a016"} Oct 10 17:48:50 crc kubenswrapper[4799]: I1010 17:48:50.808865 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-q2p65" event={"ID":"76fc75c4-41c8-4307-8144-206de5c7868e","Type":"ContainerStarted","Data":"6b45a0a6c7304c974c90bd860f2fdf62483b8f26c9c44d7d00ae655b4a514e0c"} Oct 10 17:48:51 crc kubenswrapper[4799]: I1010 17:48:51.819358 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-q2p65" event={"ID":"76fc75c4-41c8-4307-8144-206de5c7868e","Type":"ContainerStarted","Data":"695dd3b56f1cdc81aeb694c252b545d6d0f0142c61111fca6014a74629d17c59"} Oct 10 17:48:52 crc kubenswrapper[4799]: I1010 17:48:52.831418 4799 generic.go:334] "Generic (PLEG): container finished" podID="76fc75c4-41c8-4307-8144-206de5c7868e" containerID="695dd3b56f1cdc81aeb694c252b545d6d0f0142c61111fca6014a74629d17c59" exitCode=0 Oct 10 17:48:52 crc kubenswrapper[4799]: I1010 17:48:52.831470 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-q2p65" event={"ID":"76fc75c4-41c8-4307-8144-206de5c7868e","Type":"ContainerDied","Data":"695dd3b56f1cdc81aeb694c252b545d6d0f0142c61111fca6014a74629d17c59"} Oct 10 17:48:53 crc kubenswrapper[4799]: I1010 17:48:53.848642 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-q2p65" event={"ID":"76fc75c4-41c8-4307-8144-206de5c7868e","Type":"ContainerStarted","Data":"34b87f40ea9dafe0e759db0f10ee8daf9fa8d26424626d1979ffcca236b64129"} Oct 10 17:48:59 crc kubenswrapper[4799]: I1010 17:48:59.514016 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-q2p65" Oct 10 17:48:59 crc kubenswrapper[4799]: I1010 17:48:59.514384 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-q2p65" Oct 10 17:48:59 crc kubenswrapper[4799]: I1010 17:48:59.586465 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-q2p65" Oct 10 17:48:59 crc kubenswrapper[4799]: I1010 17:48:59.606116 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-q2p65" podStartSLOduration=7.828872121 podStartE2EDuration="10.606073282s" podCreationTimestamp="2025-10-10 17:48:49 +0000 UTC" firstStartedPulling="2025-10-10 17:48:50.809870111 +0000 UTC m=+4624.318194216" lastFinishedPulling="2025-10-10 17:48:53.587071262 +0000 UTC m=+4627.095395377" observedRunningTime="2025-10-10 17:48:53.870086311 +0000 UTC m=+4627.378410446" watchObservedRunningTime="2025-10-10 17:48:59.606073282 +0000 UTC m=+4633.114397447" Oct 10 17:48:59 crc kubenswrapper[4799]: I1010 17:48:59.977667 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-q2p65" Oct 10 17:49:00 crc kubenswrapper[4799]: I1010 17:49:00.045517 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-q2p65"] Oct 10 17:49:01 crc kubenswrapper[4799]: I1010 17:49:01.919352 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-q2p65" podUID="76fc75c4-41c8-4307-8144-206de5c7868e" containerName="registry-server" containerID="cri-o://34b87f40ea9dafe0e759db0f10ee8daf9fa8d26424626d1979ffcca236b64129" gracePeriod=2 Oct 10 17:49:02 crc kubenswrapper[4799]: I1010 17:49:02.952600 4799 generic.go:334] "Generic (PLEG): container finished" podID="76fc75c4-41c8-4307-8144-206de5c7868e" containerID="34b87f40ea9dafe0e759db0f10ee8daf9fa8d26424626d1979ffcca236b64129" exitCode=0 Oct 10 17:49:02 crc kubenswrapper[4799]: I1010 17:49:02.952773 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-q2p65" event={"ID":"76fc75c4-41c8-4307-8144-206de5c7868e","Type":"ContainerDied","Data":"34b87f40ea9dafe0e759db0f10ee8daf9fa8d26424626d1979ffcca236b64129"} Oct 10 17:49:03 crc kubenswrapper[4799]: I1010 17:49:03.337456 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-q2p65" Oct 10 17:49:03 crc kubenswrapper[4799]: I1010 17:49:03.429203 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/76fc75c4-41c8-4307-8144-206de5c7868e-catalog-content\") pod \"76fc75c4-41c8-4307-8144-206de5c7868e\" (UID: \"76fc75c4-41c8-4307-8144-206de5c7868e\") " Oct 10 17:49:03 crc kubenswrapper[4799]: I1010 17:49:03.429305 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wvj4p\" (UniqueName: \"kubernetes.io/projected/76fc75c4-41c8-4307-8144-206de5c7868e-kube-api-access-wvj4p\") pod \"76fc75c4-41c8-4307-8144-206de5c7868e\" (UID: \"76fc75c4-41c8-4307-8144-206de5c7868e\") " Oct 10 17:49:03 crc kubenswrapper[4799]: I1010 17:49:03.429374 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/76fc75c4-41c8-4307-8144-206de5c7868e-utilities\") pod \"76fc75c4-41c8-4307-8144-206de5c7868e\" (UID: \"76fc75c4-41c8-4307-8144-206de5c7868e\") " Oct 10 17:49:03 crc kubenswrapper[4799]: I1010 17:49:03.430598 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/76fc75c4-41c8-4307-8144-206de5c7868e-utilities" (OuterVolumeSpecName: "utilities") pod "76fc75c4-41c8-4307-8144-206de5c7868e" (UID: "76fc75c4-41c8-4307-8144-206de5c7868e"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 10 17:49:03 crc kubenswrapper[4799]: I1010 17:49:03.439026 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/76fc75c4-41c8-4307-8144-206de5c7868e-kube-api-access-wvj4p" (OuterVolumeSpecName: "kube-api-access-wvj4p") pod "76fc75c4-41c8-4307-8144-206de5c7868e" (UID: "76fc75c4-41c8-4307-8144-206de5c7868e"). InnerVolumeSpecName "kube-api-access-wvj4p". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 17:49:03 crc kubenswrapper[4799]: I1010 17:49:03.489427 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/76fc75c4-41c8-4307-8144-206de5c7868e-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "76fc75c4-41c8-4307-8144-206de5c7868e" (UID: "76fc75c4-41c8-4307-8144-206de5c7868e"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 10 17:49:03 crc kubenswrapper[4799]: I1010 17:49:03.534065 4799 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/76fc75c4-41c8-4307-8144-206de5c7868e-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 10 17:49:03 crc kubenswrapper[4799]: I1010 17:49:03.534091 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wvj4p\" (UniqueName: \"kubernetes.io/projected/76fc75c4-41c8-4307-8144-206de5c7868e-kube-api-access-wvj4p\") on node \"crc\" DevicePath \"\"" Oct 10 17:49:03 crc kubenswrapper[4799]: I1010 17:49:03.534102 4799 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/76fc75c4-41c8-4307-8144-206de5c7868e-utilities\") on node \"crc\" DevicePath \"\"" Oct 10 17:49:03 crc kubenswrapper[4799]: I1010 17:49:03.964775 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-q2p65" event={"ID":"76fc75c4-41c8-4307-8144-206de5c7868e","Type":"ContainerDied","Data":"6b45a0a6c7304c974c90bd860f2fdf62483b8f26c9c44d7d00ae655b4a514e0c"} Oct 10 17:49:03 crc kubenswrapper[4799]: I1010 17:49:03.964836 4799 scope.go:117] "RemoveContainer" containerID="34b87f40ea9dafe0e759db0f10ee8daf9fa8d26424626d1979ffcca236b64129" Oct 10 17:49:03 crc kubenswrapper[4799]: I1010 17:49:03.964951 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-q2p65" Oct 10 17:49:03 crc kubenswrapper[4799]: I1010 17:49:03.989343 4799 scope.go:117] "RemoveContainer" containerID="695dd3b56f1cdc81aeb694c252b545d6d0f0142c61111fca6014a74629d17c59" Oct 10 17:49:04 crc kubenswrapper[4799]: I1010 17:49:04.013003 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-q2p65"] Oct 10 17:49:04 crc kubenswrapper[4799]: I1010 17:49:04.019641 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-q2p65"] Oct 10 17:49:04 crc kubenswrapper[4799]: I1010 17:49:04.021153 4799 scope.go:117] "RemoveContainer" containerID="0588f0e212b875b3f746e7ad36724f2fe1d15f8d6e322f6b25cba3f8de01a016" Oct 10 17:49:05 crc kubenswrapper[4799]: I1010 17:49:05.420555 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="76fc75c4-41c8-4307-8144-206de5c7868e" path="/var/lib/kubelet/pods/76fc75c4-41c8-4307-8144-206de5c7868e/volumes" Oct 10 17:49:25 crc kubenswrapper[4799]: I1010 17:49:25.280343 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-85f98b87f9-tlq4r"] Oct 10 17:49:25 crc kubenswrapper[4799]: E1010 17:49:25.281079 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="76fc75c4-41c8-4307-8144-206de5c7868e" containerName="extract-utilities" Oct 10 17:49:25 crc kubenswrapper[4799]: I1010 17:49:25.281091 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="76fc75c4-41c8-4307-8144-206de5c7868e" containerName="extract-utilities" Oct 10 17:49:25 crc kubenswrapper[4799]: E1010 17:49:25.281107 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="76fc75c4-41c8-4307-8144-206de5c7868e" containerName="extract-content" Oct 10 17:49:25 crc kubenswrapper[4799]: I1010 17:49:25.281114 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="76fc75c4-41c8-4307-8144-206de5c7868e" containerName="extract-content" Oct 10 17:49:25 crc kubenswrapper[4799]: E1010 17:49:25.281131 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="76fc75c4-41c8-4307-8144-206de5c7868e" containerName="registry-server" Oct 10 17:49:25 crc kubenswrapper[4799]: I1010 17:49:25.285606 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="76fc75c4-41c8-4307-8144-206de5c7868e" containerName="registry-server" Oct 10 17:49:25 crc kubenswrapper[4799]: I1010 17:49:25.285937 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="76fc75c4-41c8-4307-8144-206de5c7868e" containerName="registry-server" Oct 10 17:49:25 crc kubenswrapper[4799]: I1010 17:49:25.287104 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-85f98b87f9-tlq4r" Oct 10 17:49:25 crc kubenswrapper[4799]: I1010 17:49:25.289724 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-85f98b87f9-tlq4r"] Oct 10 17:49:25 crc kubenswrapper[4799]: I1010 17:49:25.300790 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns-svc" Oct 10 17:49:25 crc kubenswrapper[4799]: I1010 17:49:25.300942 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openshift-service-ca.crt" Oct 10 17:49:25 crc kubenswrapper[4799]: I1010 17:49:25.301021 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns" Oct 10 17:49:25 crc kubenswrapper[4799]: I1010 17:49:25.301034 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dnsmasq-dns-dockercfg-lw6x2" Oct 10 17:49:25 crc kubenswrapper[4799]: I1010 17:49:25.301164 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"kube-root-ca.crt" Oct 10 17:49:25 crc kubenswrapper[4799]: I1010 17:49:25.361134 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-67d9f7fb89-s2cmk"] Oct 10 17:49:25 crc kubenswrapper[4799]: I1010 17:49:25.366845 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-67d9f7fb89-s2cmk" Oct 10 17:49:25 crc kubenswrapper[4799]: I1010 17:49:25.368795 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0aec4be0-7a9c-4ee0-a2a7-b78df43ae1b6-config\") pod \"dnsmasq-dns-85f98b87f9-tlq4r\" (UID: \"0aec4be0-7a9c-4ee0-a2a7-b78df43ae1b6\") " pod="openstack/dnsmasq-dns-85f98b87f9-tlq4r" Oct 10 17:49:25 crc kubenswrapper[4799]: I1010 17:49:25.368822 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m9qcg\" (UniqueName: \"kubernetes.io/projected/0aec4be0-7a9c-4ee0-a2a7-b78df43ae1b6-kube-api-access-m9qcg\") pod \"dnsmasq-dns-85f98b87f9-tlq4r\" (UID: \"0aec4be0-7a9c-4ee0-a2a7-b78df43ae1b6\") " pod="openstack/dnsmasq-dns-85f98b87f9-tlq4r" Oct 10 17:49:25 crc kubenswrapper[4799]: I1010 17:49:25.368878 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/0aec4be0-7a9c-4ee0-a2a7-b78df43ae1b6-dns-svc\") pod \"dnsmasq-dns-85f98b87f9-tlq4r\" (UID: \"0aec4be0-7a9c-4ee0-a2a7-b78df43ae1b6\") " pod="openstack/dnsmasq-dns-85f98b87f9-tlq4r" Oct 10 17:49:25 crc kubenswrapper[4799]: I1010 17:49:25.371710 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-67d9f7fb89-s2cmk"] Oct 10 17:49:25 crc kubenswrapper[4799]: I1010 17:49:25.470354 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0aec4be0-7a9c-4ee0-a2a7-b78df43ae1b6-config\") pod \"dnsmasq-dns-85f98b87f9-tlq4r\" (UID: \"0aec4be0-7a9c-4ee0-a2a7-b78df43ae1b6\") " pod="openstack/dnsmasq-dns-85f98b87f9-tlq4r" Oct 10 17:49:25 crc kubenswrapper[4799]: I1010 17:49:25.470400 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m9qcg\" (UniqueName: \"kubernetes.io/projected/0aec4be0-7a9c-4ee0-a2a7-b78df43ae1b6-kube-api-access-m9qcg\") pod \"dnsmasq-dns-85f98b87f9-tlq4r\" (UID: \"0aec4be0-7a9c-4ee0-a2a7-b78df43ae1b6\") " pod="openstack/dnsmasq-dns-85f98b87f9-tlq4r" Oct 10 17:49:25 crc kubenswrapper[4799]: I1010 17:49:25.470456 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/922d8c7c-9feb-408b-a0df-fec585601827-dns-svc\") pod \"dnsmasq-dns-67d9f7fb89-s2cmk\" (UID: \"922d8c7c-9feb-408b-a0df-fec585601827\") " pod="openstack/dnsmasq-dns-67d9f7fb89-s2cmk" Oct 10 17:49:25 crc kubenswrapper[4799]: I1010 17:49:25.470478 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/0aec4be0-7a9c-4ee0-a2a7-b78df43ae1b6-dns-svc\") pod \"dnsmasq-dns-85f98b87f9-tlq4r\" (UID: \"0aec4be0-7a9c-4ee0-a2a7-b78df43ae1b6\") " pod="openstack/dnsmasq-dns-85f98b87f9-tlq4r" Oct 10 17:49:25 crc kubenswrapper[4799]: I1010 17:49:25.470525 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-brjtp\" (UniqueName: \"kubernetes.io/projected/922d8c7c-9feb-408b-a0df-fec585601827-kube-api-access-brjtp\") pod \"dnsmasq-dns-67d9f7fb89-s2cmk\" (UID: \"922d8c7c-9feb-408b-a0df-fec585601827\") " pod="openstack/dnsmasq-dns-67d9f7fb89-s2cmk" Oct 10 17:49:25 crc kubenswrapper[4799]: I1010 17:49:25.470552 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/922d8c7c-9feb-408b-a0df-fec585601827-config\") pod \"dnsmasq-dns-67d9f7fb89-s2cmk\" (UID: \"922d8c7c-9feb-408b-a0df-fec585601827\") " pod="openstack/dnsmasq-dns-67d9f7fb89-s2cmk" Oct 10 17:49:25 crc kubenswrapper[4799]: I1010 17:49:25.471356 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0aec4be0-7a9c-4ee0-a2a7-b78df43ae1b6-config\") pod \"dnsmasq-dns-85f98b87f9-tlq4r\" (UID: \"0aec4be0-7a9c-4ee0-a2a7-b78df43ae1b6\") " pod="openstack/dnsmasq-dns-85f98b87f9-tlq4r" Oct 10 17:49:25 crc kubenswrapper[4799]: I1010 17:49:25.471659 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/0aec4be0-7a9c-4ee0-a2a7-b78df43ae1b6-dns-svc\") pod \"dnsmasq-dns-85f98b87f9-tlq4r\" (UID: \"0aec4be0-7a9c-4ee0-a2a7-b78df43ae1b6\") " pod="openstack/dnsmasq-dns-85f98b87f9-tlq4r" Oct 10 17:49:25 crc kubenswrapper[4799]: I1010 17:49:25.489874 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m9qcg\" (UniqueName: \"kubernetes.io/projected/0aec4be0-7a9c-4ee0-a2a7-b78df43ae1b6-kube-api-access-m9qcg\") pod \"dnsmasq-dns-85f98b87f9-tlq4r\" (UID: \"0aec4be0-7a9c-4ee0-a2a7-b78df43ae1b6\") " pod="openstack/dnsmasq-dns-85f98b87f9-tlq4r" Oct 10 17:49:25 crc kubenswrapper[4799]: I1010 17:49:25.571344 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-brjtp\" (UniqueName: \"kubernetes.io/projected/922d8c7c-9feb-408b-a0df-fec585601827-kube-api-access-brjtp\") pod \"dnsmasq-dns-67d9f7fb89-s2cmk\" (UID: \"922d8c7c-9feb-408b-a0df-fec585601827\") " pod="openstack/dnsmasq-dns-67d9f7fb89-s2cmk" Oct 10 17:49:25 crc kubenswrapper[4799]: I1010 17:49:25.571414 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/922d8c7c-9feb-408b-a0df-fec585601827-config\") pod \"dnsmasq-dns-67d9f7fb89-s2cmk\" (UID: \"922d8c7c-9feb-408b-a0df-fec585601827\") " pod="openstack/dnsmasq-dns-67d9f7fb89-s2cmk" Oct 10 17:49:25 crc kubenswrapper[4799]: I1010 17:49:25.571530 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/922d8c7c-9feb-408b-a0df-fec585601827-dns-svc\") pod \"dnsmasq-dns-67d9f7fb89-s2cmk\" (UID: \"922d8c7c-9feb-408b-a0df-fec585601827\") " pod="openstack/dnsmasq-dns-67d9f7fb89-s2cmk" Oct 10 17:49:25 crc kubenswrapper[4799]: I1010 17:49:25.572672 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/922d8c7c-9feb-408b-a0df-fec585601827-dns-svc\") pod \"dnsmasq-dns-67d9f7fb89-s2cmk\" (UID: \"922d8c7c-9feb-408b-a0df-fec585601827\") " pod="openstack/dnsmasq-dns-67d9f7fb89-s2cmk" Oct 10 17:49:25 crc kubenswrapper[4799]: I1010 17:49:25.572983 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/922d8c7c-9feb-408b-a0df-fec585601827-config\") pod \"dnsmasq-dns-67d9f7fb89-s2cmk\" (UID: \"922d8c7c-9feb-408b-a0df-fec585601827\") " pod="openstack/dnsmasq-dns-67d9f7fb89-s2cmk" Oct 10 17:49:25 crc kubenswrapper[4799]: I1010 17:49:25.593570 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-brjtp\" (UniqueName: \"kubernetes.io/projected/922d8c7c-9feb-408b-a0df-fec585601827-kube-api-access-brjtp\") pod \"dnsmasq-dns-67d9f7fb89-s2cmk\" (UID: \"922d8c7c-9feb-408b-a0df-fec585601827\") " pod="openstack/dnsmasq-dns-67d9f7fb89-s2cmk" Oct 10 17:49:25 crc kubenswrapper[4799]: I1010 17:49:25.610623 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-85f98b87f9-tlq4r" Oct 10 17:49:25 crc kubenswrapper[4799]: I1010 17:49:25.694680 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-67d9f7fb89-s2cmk" Oct 10 17:49:26 crc kubenswrapper[4799]: I1010 17:49:26.235249 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-server-0"] Oct 10 17:49:26 crc kubenswrapper[4799]: I1010 17:49:26.237486 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Oct 10 17:49:26 crc kubenswrapper[4799]: I1010 17:49:26.240204 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-default-user" Oct 10 17:49:26 crc kubenswrapper[4799]: I1010 17:49:26.240585 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-plugins-conf" Oct 10 17:49:26 crc kubenswrapper[4799]: I1010 17:49:26.240684 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-erlang-cookie" Oct 10 17:49:26 crc kubenswrapper[4799]: I1010 17:49:26.240712 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-server-conf" Oct 10 17:49:26 crc kubenswrapper[4799]: I1010 17:49:26.240907 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-server-dockercfg-wr9qh" Oct 10 17:49:26 crc kubenswrapper[4799]: I1010 17:49:26.249283 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Oct 10 17:49:26 crc kubenswrapper[4799]: I1010 17:49:26.380445 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/3dae848a-4c75-4ddc-9b6d-b64f127eba9b-server-conf\") pod \"rabbitmq-server-0\" (UID: \"3dae848a-4c75-4ddc-9b6d-b64f127eba9b\") " pod="openstack/rabbitmq-server-0" Oct 10 17:49:26 crc kubenswrapper[4799]: I1010 17:49:26.380496 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/3dae848a-4c75-4ddc-9b6d-b64f127eba9b-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"3dae848a-4c75-4ddc-9b6d-b64f127eba9b\") " pod="openstack/rabbitmq-server-0" Oct 10 17:49:26 crc kubenswrapper[4799]: I1010 17:49:26.380522 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/3dae848a-4c75-4ddc-9b6d-b64f127eba9b-pod-info\") pod \"rabbitmq-server-0\" (UID: \"3dae848a-4c75-4ddc-9b6d-b64f127eba9b\") " pod="openstack/rabbitmq-server-0" Oct 10 17:49:26 crc kubenswrapper[4799]: I1010 17:49:26.380675 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/3dae848a-4c75-4ddc-9b6d-b64f127eba9b-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"3dae848a-4c75-4ddc-9b6d-b64f127eba9b\") " pod="openstack/rabbitmq-server-0" Oct 10 17:49:26 crc kubenswrapper[4799]: I1010 17:49:26.380859 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-4a167392-8227-452d-8172-17e1bbf510ef\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-4a167392-8227-452d-8172-17e1bbf510ef\") pod \"rabbitmq-server-0\" (UID: \"3dae848a-4c75-4ddc-9b6d-b64f127eba9b\") " pod="openstack/rabbitmq-server-0" Oct 10 17:49:26 crc kubenswrapper[4799]: I1010 17:49:26.380919 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/3dae848a-4c75-4ddc-9b6d-b64f127eba9b-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"3dae848a-4c75-4ddc-9b6d-b64f127eba9b\") " pod="openstack/rabbitmq-server-0" Oct 10 17:49:26 crc kubenswrapper[4799]: I1010 17:49:26.380957 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/3dae848a-4c75-4ddc-9b6d-b64f127eba9b-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"3dae848a-4c75-4ddc-9b6d-b64f127eba9b\") " pod="openstack/rabbitmq-server-0" Oct 10 17:49:26 crc kubenswrapper[4799]: I1010 17:49:26.381013 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/3dae848a-4c75-4ddc-9b6d-b64f127eba9b-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"3dae848a-4c75-4ddc-9b6d-b64f127eba9b\") " pod="openstack/rabbitmq-server-0" Oct 10 17:49:26 crc kubenswrapper[4799]: I1010 17:49:26.381078 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zff5c\" (UniqueName: \"kubernetes.io/projected/3dae848a-4c75-4ddc-9b6d-b64f127eba9b-kube-api-access-zff5c\") pod \"rabbitmq-server-0\" (UID: \"3dae848a-4c75-4ddc-9b6d-b64f127eba9b\") " pod="openstack/rabbitmq-server-0" Oct 10 17:49:26 crc kubenswrapper[4799]: I1010 17:49:26.482668 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/3dae848a-4c75-4ddc-9b6d-b64f127eba9b-pod-info\") pod \"rabbitmq-server-0\" (UID: \"3dae848a-4c75-4ddc-9b6d-b64f127eba9b\") " pod="openstack/rabbitmq-server-0" Oct 10 17:49:26 crc kubenswrapper[4799]: I1010 17:49:26.482724 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/3dae848a-4c75-4ddc-9b6d-b64f127eba9b-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"3dae848a-4c75-4ddc-9b6d-b64f127eba9b\") " pod="openstack/rabbitmq-server-0" Oct 10 17:49:26 crc kubenswrapper[4799]: I1010 17:49:26.482786 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-4a167392-8227-452d-8172-17e1bbf510ef\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-4a167392-8227-452d-8172-17e1bbf510ef\") pod \"rabbitmq-server-0\" (UID: \"3dae848a-4c75-4ddc-9b6d-b64f127eba9b\") " pod="openstack/rabbitmq-server-0" Oct 10 17:49:26 crc kubenswrapper[4799]: I1010 17:49:26.482813 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/3dae848a-4c75-4ddc-9b6d-b64f127eba9b-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"3dae848a-4c75-4ddc-9b6d-b64f127eba9b\") " pod="openstack/rabbitmq-server-0" Oct 10 17:49:26 crc kubenswrapper[4799]: I1010 17:49:26.482835 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/3dae848a-4c75-4ddc-9b6d-b64f127eba9b-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"3dae848a-4c75-4ddc-9b6d-b64f127eba9b\") " pod="openstack/rabbitmq-server-0" Oct 10 17:49:26 crc kubenswrapper[4799]: I1010 17:49:26.482856 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/3dae848a-4c75-4ddc-9b6d-b64f127eba9b-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"3dae848a-4c75-4ddc-9b6d-b64f127eba9b\") " pod="openstack/rabbitmq-server-0" Oct 10 17:49:26 crc kubenswrapper[4799]: I1010 17:49:26.482877 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zff5c\" (UniqueName: \"kubernetes.io/projected/3dae848a-4c75-4ddc-9b6d-b64f127eba9b-kube-api-access-zff5c\") pod \"rabbitmq-server-0\" (UID: \"3dae848a-4c75-4ddc-9b6d-b64f127eba9b\") " pod="openstack/rabbitmq-server-0" Oct 10 17:49:26 crc kubenswrapper[4799]: I1010 17:49:26.483520 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/3dae848a-4c75-4ddc-9b6d-b64f127eba9b-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"3dae848a-4c75-4ddc-9b6d-b64f127eba9b\") " pod="openstack/rabbitmq-server-0" Oct 10 17:49:26 crc kubenswrapper[4799]: I1010 17:49:26.483052 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/3dae848a-4c75-4ddc-9b6d-b64f127eba9b-server-conf\") pod \"rabbitmq-server-0\" (UID: \"3dae848a-4c75-4ddc-9b6d-b64f127eba9b\") " pod="openstack/rabbitmq-server-0" Oct 10 17:49:26 crc kubenswrapper[4799]: I1010 17:49:26.483810 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/3dae848a-4c75-4ddc-9b6d-b64f127eba9b-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"3dae848a-4c75-4ddc-9b6d-b64f127eba9b\") " pod="openstack/rabbitmq-server-0" Oct 10 17:49:26 crc kubenswrapper[4799]: I1010 17:49:26.483910 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/3dae848a-4c75-4ddc-9b6d-b64f127eba9b-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"3dae848a-4c75-4ddc-9b6d-b64f127eba9b\") " pod="openstack/rabbitmq-server-0" Oct 10 17:49:26 crc kubenswrapper[4799]: I1010 17:49:26.484131 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/3dae848a-4c75-4ddc-9b6d-b64f127eba9b-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"3dae848a-4c75-4ddc-9b6d-b64f127eba9b\") " pod="openstack/rabbitmq-server-0" Oct 10 17:49:26 crc kubenswrapper[4799]: I1010 17:49:26.484511 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/3dae848a-4c75-4ddc-9b6d-b64f127eba9b-server-conf\") pod \"rabbitmq-server-0\" (UID: \"3dae848a-4c75-4ddc-9b6d-b64f127eba9b\") " pod="openstack/rabbitmq-server-0" Oct 10 17:49:26 crc kubenswrapper[4799]: I1010 17:49:26.488553 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/3dae848a-4c75-4ddc-9b6d-b64f127eba9b-pod-info\") pod \"rabbitmq-server-0\" (UID: \"3dae848a-4c75-4ddc-9b6d-b64f127eba9b\") " pod="openstack/rabbitmq-server-0" Oct 10 17:49:26 crc kubenswrapper[4799]: I1010 17:49:26.488879 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/3dae848a-4c75-4ddc-9b6d-b64f127eba9b-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"3dae848a-4c75-4ddc-9b6d-b64f127eba9b\") " pod="openstack/rabbitmq-server-0" Oct 10 17:49:26 crc kubenswrapper[4799]: I1010 17:49:26.491510 4799 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Oct 10 17:49:26 crc kubenswrapper[4799]: I1010 17:49:26.491542 4799 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-4a167392-8227-452d-8172-17e1bbf510ef\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-4a167392-8227-452d-8172-17e1bbf510ef\") pod \"rabbitmq-server-0\" (UID: \"3dae848a-4c75-4ddc-9b6d-b64f127eba9b\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/82558c7cfd982b1a59463f8c8923da7fd95b3bd579c05021a4870e1ffc5887b8/globalmount\"" pod="openstack/rabbitmq-server-0" Oct 10 17:49:26 crc kubenswrapper[4799]: I1010 17:49:26.500036 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/3dae848a-4c75-4ddc-9b6d-b64f127eba9b-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"3dae848a-4c75-4ddc-9b6d-b64f127eba9b\") " pod="openstack/rabbitmq-server-0" Oct 10 17:49:26 crc kubenswrapper[4799]: I1010 17:49:26.500238 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zff5c\" (UniqueName: \"kubernetes.io/projected/3dae848a-4c75-4ddc-9b6d-b64f127eba9b-kube-api-access-zff5c\") pod \"rabbitmq-server-0\" (UID: \"3dae848a-4c75-4ddc-9b6d-b64f127eba9b\") " pod="openstack/rabbitmq-server-0" Oct 10 17:49:26 crc kubenswrapper[4799]: I1010 17:49:26.520298 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-4a167392-8227-452d-8172-17e1bbf510ef\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-4a167392-8227-452d-8172-17e1bbf510ef\") pod \"rabbitmq-server-0\" (UID: \"3dae848a-4c75-4ddc-9b6d-b64f127eba9b\") " pod="openstack/rabbitmq-server-0" Oct 10 17:49:26 crc kubenswrapper[4799]: I1010 17:49:26.553597 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Oct 10 17:49:26 crc kubenswrapper[4799]: I1010 17:49:26.554737 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Oct 10 17:49:26 crc kubenswrapper[4799]: I1010 17:49:26.557345 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-default-user" Oct 10 17:49:26 crc kubenswrapper[4799]: I1010 17:49:26.557534 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-server-dockercfg-fp6hb" Oct 10 17:49:26 crc kubenswrapper[4799]: I1010 17:49:26.558272 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-server-conf" Oct 10 17:49:26 crc kubenswrapper[4799]: I1010 17:49:26.558485 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-erlang-cookie" Oct 10 17:49:26 crc kubenswrapper[4799]: I1010 17:49:26.559468 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-plugins-conf" Oct 10 17:49:26 crc kubenswrapper[4799]: I1010 17:49:26.569101 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Oct 10 17:49:26 crc kubenswrapper[4799]: I1010 17:49:26.576295 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Oct 10 17:49:26 crc kubenswrapper[4799]: I1010 17:49:26.636835 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-85f98b87f9-tlq4r"] Oct 10 17:49:26 crc kubenswrapper[4799]: I1010 17:49:26.646350 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-67d9f7fb89-s2cmk"] Oct 10 17:49:26 crc kubenswrapper[4799]: W1010 17:49:26.647072 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0aec4be0_7a9c_4ee0_a2a7_b78df43ae1b6.slice/crio-3ed7afde72bac53e1a4508b4bfc651f7a8d1f6e2950d7cbee732ae50726de204 WatchSource:0}: Error finding container 3ed7afde72bac53e1a4508b4bfc651f7a8d1f6e2950d7cbee732ae50726de204: Status 404 returned error can't find the container with id 3ed7afde72bac53e1a4508b4bfc651f7a8d1f6e2950d7cbee732ae50726de204 Oct 10 17:49:26 crc kubenswrapper[4799]: W1010 17:49:26.647481 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod922d8c7c_9feb_408b_a0df_fec585601827.slice/crio-a218d59ac8c7586e462510f6b212938f78e17bcfcc0b8e01d439fcad7ef03821 WatchSource:0}: Error finding container a218d59ac8c7586e462510f6b212938f78e17bcfcc0b8e01d439fcad7ef03821: Status 404 returned error can't find the container with id a218d59ac8c7586e462510f6b212938f78e17bcfcc0b8e01d439fcad7ef03821 Oct 10 17:49:26 crc kubenswrapper[4799]: I1010 17:49:26.690665 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/33099395-9065-4b94-95bb-70154f26962e-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"33099395-9065-4b94-95bb-70154f26962e\") " pod="openstack/rabbitmq-cell1-server-0" Oct 10 17:49:26 crc kubenswrapper[4799]: I1010 17:49:26.690714 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-c54403e4-df22-42f7-8ce9-f2b71f958c3e\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-c54403e4-df22-42f7-8ce9-f2b71f958c3e\") pod \"rabbitmq-cell1-server-0\" (UID: \"33099395-9065-4b94-95bb-70154f26962e\") " pod="openstack/rabbitmq-cell1-server-0" Oct 10 17:49:26 crc kubenswrapper[4799]: I1010 17:49:26.690747 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/33099395-9065-4b94-95bb-70154f26962e-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"33099395-9065-4b94-95bb-70154f26962e\") " pod="openstack/rabbitmq-cell1-server-0" Oct 10 17:49:26 crc kubenswrapper[4799]: I1010 17:49:26.690802 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/33099395-9065-4b94-95bb-70154f26962e-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"33099395-9065-4b94-95bb-70154f26962e\") " pod="openstack/rabbitmq-cell1-server-0" Oct 10 17:49:26 crc kubenswrapper[4799]: I1010 17:49:26.690847 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/33099395-9065-4b94-95bb-70154f26962e-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"33099395-9065-4b94-95bb-70154f26962e\") " pod="openstack/rabbitmq-cell1-server-0" Oct 10 17:49:26 crc kubenswrapper[4799]: I1010 17:49:26.690963 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/33099395-9065-4b94-95bb-70154f26962e-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"33099395-9065-4b94-95bb-70154f26962e\") " pod="openstack/rabbitmq-cell1-server-0" Oct 10 17:49:26 crc kubenswrapper[4799]: I1010 17:49:26.691019 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/33099395-9065-4b94-95bb-70154f26962e-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"33099395-9065-4b94-95bb-70154f26962e\") " pod="openstack/rabbitmq-cell1-server-0" Oct 10 17:49:26 crc kubenswrapper[4799]: I1010 17:49:26.691227 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/33099395-9065-4b94-95bb-70154f26962e-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"33099395-9065-4b94-95bb-70154f26962e\") " pod="openstack/rabbitmq-cell1-server-0" Oct 10 17:49:26 crc kubenswrapper[4799]: I1010 17:49:26.691257 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4c65n\" (UniqueName: \"kubernetes.io/projected/33099395-9065-4b94-95bb-70154f26962e-kube-api-access-4c65n\") pod \"rabbitmq-cell1-server-0\" (UID: \"33099395-9065-4b94-95bb-70154f26962e\") " pod="openstack/rabbitmq-cell1-server-0" Oct 10 17:49:26 crc kubenswrapper[4799]: I1010 17:49:26.792477 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/33099395-9065-4b94-95bb-70154f26962e-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"33099395-9065-4b94-95bb-70154f26962e\") " pod="openstack/rabbitmq-cell1-server-0" Oct 10 17:49:26 crc kubenswrapper[4799]: I1010 17:49:26.792517 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4c65n\" (UniqueName: \"kubernetes.io/projected/33099395-9065-4b94-95bb-70154f26962e-kube-api-access-4c65n\") pod \"rabbitmq-cell1-server-0\" (UID: \"33099395-9065-4b94-95bb-70154f26962e\") " pod="openstack/rabbitmq-cell1-server-0" Oct 10 17:49:26 crc kubenswrapper[4799]: I1010 17:49:26.792546 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/33099395-9065-4b94-95bb-70154f26962e-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"33099395-9065-4b94-95bb-70154f26962e\") " pod="openstack/rabbitmq-cell1-server-0" Oct 10 17:49:26 crc kubenswrapper[4799]: I1010 17:49:26.792578 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-c54403e4-df22-42f7-8ce9-f2b71f958c3e\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-c54403e4-df22-42f7-8ce9-f2b71f958c3e\") pod \"rabbitmq-cell1-server-0\" (UID: \"33099395-9065-4b94-95bb-70154f26962e\") " pod="openstack/rabbitmq-cell1-server-0" Oct 10 17:49:26 crc kubenswrapper[4799]: I1010 17:49:26.792605 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/33099395-9065-4b94-95bb-70154f26962e-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"33099395-9065-4b94-95bb-70154f26962e\") " pod="openstack/rabbitmq-cell1-server-0" Oct 10 17:49:26 crc kubenswrapper[4799]: I1010 17:49:26.792687 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/33099395-9065-4b94-95bb-70154f26962e-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"33099395-9065-4b94-95bb-70154f26962e\") " pod="openstack/rabbitmq-cell1-server-0" Oct 10 17:49:26 crc kubenswrapper[4799]: I1010 17:49:26.792727 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/33099395-9065-4b94-95bb-70154f26962e-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"33099395-9065-4b94-95bb-70154f26962e\") " pod="openstack/rabbitmq-cell1-server-0" Oct 10 17:49:26 crc kubenswrapper[4799]: I1010 17:49:26.792771 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/33099395-9065-4b94-95bb-70154f26962e-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"33099395-9065-4b94-95bb-70154f26962e\") " pod="openstack/rabbitmq-cell1-server-0" Oct 10 17:49:26 crc kubenswrapper[4799]: I1010 17:49:26.792797 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/33099395-9065-4b94-95bb-70154f26962e-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"33099395-9065-4b94-95bb-70154f26962e\") " pod="openstack/rabbitmq-cell1-server-0" Oct 10 17:49:26 crc kubenswrapper[4799]: I1010 17:49:26.793154 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/33099395-9065-4b94-95bb-70154f26962e-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"33099395-9065-4b94-95bb-70154f26962e\") " pod="openstack/rabbitmq-cell1-server-0" Oct 10 17:49:26 crc kubenswrapper[4799]: I1010 17:49:26.793616 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/33099395-9065-4b94-95bb-70154f26962e-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"33099395-9065-4b94-95bb-70154f26962e\") " pod="openstack/rabbitmq-cell1-server-0" Oct 10 17:49:26 crc kubenswrapper[4799]: I1010 17:49:26.793982 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/33099395-9065-4b94-95bb-70154f26962e-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"33099395-9065-4b94-95bb-70154f26962e\") " pod="openstack/rabbitmq-cell1-server-0" Oct 10 17:49:26 crc kubenswrapper[4799]: I1010 17:49:26.794703 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/33099395-9065-4b94-95bb-70154f26962e-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"33099395-9065-4b94-95bb-70154f26962e\") " pod="openstack/rabbitmq-cell1-server-0" Oct 10 17:49:26 crc kubenswrapper[4799]: I1010 17:49:26.797236 4799 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Oct 10 17:49:26 crc kubenswrapper[4799]: I1010 17:49:26.797277 4799 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-c54403e4-df22-42f7-8ce9-f2b71f958c3e\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-c54403e4-df22-42f7-8ce9-f2b71f958c3e\") pod \"rabbitmq-cell1-server-0\" (UID: \"33099395-9065-4b94-95bb-70154f26962e\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/b4349c2768ef6a0a004a45a21e08426036165f9270846e8d1ddb681030198662/globalmount\"" pod="openstack/rabbitmq-cell1-server-0" Oct 10 17:49:26 crc kubenswrapper[4799]: I1010 17:49:26.798344 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/33099395-9065-4b94-95bb-70154f26962e-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"33099395-9065-4b94-95bb-70154f26962e\") " pod="openstack/rabbitmq-cell1-server-0" Oct 10 17:49:26 crc kubenswrapper[4799]: I1010 17:49:26.798893 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/33099395-9065-4b94-95bb-70154f26962e-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"33099395-9065-4b94-95bb-70154f26962e\") " pod="openstack/rabbitmq-cell1-server-0" Oct 10 17:49:26 crc kubenswrapper[4799]: I1010 17:49:26.800234 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/33099395-9065-4b94-95bb-70154f26962e-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"33099395-9065-4b94-95bb-70154f26962e\") " pod="openstack/rabbitmq-cell1-server-0" Oct 10 17:49:26 crc kubenswrapper[4799]: I1010 17:49:26.815459 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4c65n\" (UniqueName: \"kubernetes.io/projected/33099395-9065-4b94-95bb-70154f26962e-kube-api-access-4c65n\") pod \"rabbitmq-cell1-server-0\" (UID: \"33099395-9065-4b94-95bb-70154f26962e\") " pod="openstack/rabbitmq-cell1-server-0" Oct 10 17:49:26 crc kubenswrapper[4799]: I1010 17:49:26.831008 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-c54403e4-df22-42f7-8ce9-f2b71f958c3e\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-c54403e4-df22-42f7-8ce9-f2b71f958c3e\") pod \"rabbitmq-cell1-server-0\" (UID: \"33099395-9065-4b94-95bb-70154f26962e\") " pod="openstack/rabbitmq-cell1-server-0" Oct 10 17:49:26 crc kubenswrapper[4799]: I1010 17:49:26.831342 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Oct 10 17:49:26 crc kubenswrapper[4799]: W1010 17:49:26.834367 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3dae848a_4c75_4ddc_9b6d_b64f127eba9b.slice/crio-ae9c26f40a4dc4424714797620b312bbe7dd78cfe023882101e1f8da35e992ed WatchSource:0}: Error finding container ae9c26f40a4dc4424714797620b312bbe7dd78cfe023882101e1f8da35e992ed: Status 404 returned error can't find the container with id ae9c26f40a4dc4424714797620b312bbe7dd78cfe023882101e1f8da35e992ed Oct 10 17:49:26 crc kubenswrapper[4799]: I1010 17:49:26.847887 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstack-galera-0"] Oct 10 17:49:26 crc kubenswrapper[4799]: I1010 17:49:26.849336 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Oct 10 17:49:26 crc kubenswrapper[4799]: I1010 17:49:26.851961 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"galera-openstack-dockercfg-7g24v" Oct 10 17:49:26 crc kubenswrapper[4799]: I1010 17:49:26.852097 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"osp-secret" Oct 10 17:49:26 crc kubenswrapper[4799]: I1010 17:49:26.853063 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-galera-openstack-svc" Oct 10 17:49:26 crc kubenswrapper[4799]: I1010 17:49:26.855110 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-scripts" Oct 10 17:49:26 crc kubenswrapper[4799]: I1010 17:49:26.855459 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-config-data" Oct 10 17:49:26 crc kubenswrapper[4799]: I1010 17:49:26.860835 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"combined-ca-bundle" Oct 10 17:49:26 crc kubenswrapper[4799]: I1010 17:49:26.874883 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-galera-0"] Oct 10 17:49:26 crc kubenswrapper[4799]: I1010 17:49:26.893865 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Oct 10 17:49:26 crc kubenswrapper[4799]: I1010 17:49:26.995343 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/78d6963b-d770-4b29-b6de-a09b36913cc0-config-data-default\") pod \"openstack-galera-0\" (UID: \"78d6963b-d770-4b29-b6de-a09b36913cc0\") " pod="openstack/openstack-galera-0" Oct 10 17:49:26 crc kubenswrapper[4799]: I1010 17:49:26.995395 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/78d6963b-d770-4b29-b6de-a09b36913cc0-operator-scripts\") pod \"openstack-galera-0\" (UID: \"78d6963b-d770-4b29-b6de-a09b36913cc0\") " pod="openstack/openstack-galera-0" Oct 10 17:49:26 crc kubenswrapper[4799]: I1010 17:49:26.995615 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/78d6963b-d770-4b29-b6de-a09b36913cc0-secrets\") pod \"openstack-galera-0\" (UID: \"78d6963b-d770-4b29-b6de-a09b36913cc0\") " pod="openstack/openstack-galera-0" Oct 10 17:49:26 crc kubenswrapper[4799]: I1010 17:49:26.995746 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gvvrh\" (UniqueName: \"kubernetes.io/projected/78d6963b-d770-4b29-b6de-a09b36913cc0-kube-api-access-gvvrh\") pod \"openstack-galera-0\" (UID: \"78d6963b-d770-4b29-b6de-a09b36913cc0\") " pod="openstack/openstack-galera-0" Oct 10 17:49:26 crc kubenswrapper[4799]: I1010 17:49:26.995860 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/78d6963b-d770-4b29-b6de-a09b36913cc0-kolla-config\") pod \"openstack-galera-0\" (UID: \"78d6963b-d770-4b29-b6de-a09b36913cc0\") " pod="openstack/openstack-galera-0" Oct 10 17:49:26 crc kubenswrapper[4799]: I1010 17:49:26.995885 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/78d6963b-d770-4b29-b6de-a09b36913cc0-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"78d6963b-d770-4b29-b6de-a09b36913cc0\") " pod="openstack/openstack-galera-0" Oct 10 17:49:26 crc kubenswrapper[4799]: I1010 17:49:26.995930 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/78d6963b-d770-4b29-b6de-a09b36913cc0-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"78d6963b-d770-4b29-b6de-a09b36913cc0\") " pod="openstack/openstack-galera-0" Oct 10 17:49:26 crc kubenswrapper[4799]: I1010 17:49:26.995970 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-d95e3240-9769-410a-a2ee-fb4b509cd43d\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-d95e3240-9769-410a-a2ee-fb4b509cd43d\") pod \"openstack-galera-0\" (UID: \"78d6963b-d770-4b29-b6de-a09b36913cc0\") " pod="openstack/openstack-galera-0" Oct 10 17:49:26 crc kubenswrapper[4799]: I1010 17:49:26.996095 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/78d6963b-d770-4b29-b6de-a09b36913cc0-config-data-generated\") pod \"openstack-galera-0\" (UID: \"78d6963b-d770-4b29-b6de-a09b36913cc0\") " pod="openstack/openstack-galera-0" Oct 10 17:49:27 crc kubenswrapper[4799]: I1010 17:49:27.097576 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/78d6963b-d770-4b29-b6de-a09b36913cc0-config-data-generated\") pod \"openstack-galera-0\" (UID: \"78d6963b-d770-4b29-b6de-a09b36913cc0\") " pod="openstack/openstack-galera-0" Oct 10 17:49:27 crc kubenswrapper[4799]: I1010 17:49:27.097689 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/78d6963b-d770-4b29-b6de-a09b36913cc0-config-data-default\") pod \"openstack-galera-0\" (UID: \"78d6963b-d770-4b29-b6de-a09b36913cc0\") " pod="openstack/openstack-galera-0" Oct 10 17:49:27 crc kubenswrapper[4799]: I1010 17:49:27.097717 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/78d6963b-d770-4b29-b6de-a09b36913cc0-operator-scripts\") pod \"openstack-galera-0\" (UID: \"78d6963b-d770-4b29-b6de-a09b36913cc0\") " pod="openstack/openstack-galera-0" Oct 10 17:49:27 crc kubenswrapper[4799]: I1010 17:49:27.097799 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/78d6963b-d770-4b29-b6de-a09b36913cc0-secrets\") pod \"openstack-galera-0\" (UID: \"78d6963b-d770-4b29-b6de-a09b36913cc0\") " pod="openstack/openstack-galera-0" Oct 10 17:49:27 crc kubenswrapper[4799]: I1010 17:49:27.097826 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gvvrh\" (UniqueName: \"kubernetes.io/projected/78d6963b-d770-4b29-b6de-a09b36913cc0-kube-api-access-gvvrh\") pod \"openstack-galera-0\" (UID: \"78d6963b-d770-4b29-b6de-a09b36913cc0\") " pod="openstack/openstack-galera-0" Oct 10 17:49:27 crc kubenswrapper[4799]: I1010 17:49:27.097858 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/78d6963b-d770-4b29-b6de-a09b36913cc0-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"78d6963b-d770-4b29-b6de-a09b36913cc0\") " pod="openstack/openstack-galera-0" Oct 10 17:49:27 crc kubenswrapper[4799]: I1010 17:49:27.097879 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/78d6963b-d770-4b29-b6de-a09b36913cc0-kolla-config\") pod \"openstack-galera-0\" (UID: \"78d6963b-d770-4b29-b6de-a09b36913cc0\") " pod="openstack/openstack-galera-0" Oct 10 17:49:27 crc kubenswrapper[4799]: I1010 17:49:27.097911 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/78d6963b-d770-4b29-b6de-a09b36913cc0-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"78d6963b-d770-4b29-b6de-a09b36913cc0\") " pod="openstack/openstack-galera-0" Oct 10 17:49:27 crc kubenswrapper[4799]: I1010 17:49:27.097944 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-d95e3240-9769-410a-a2ee-fb4b509cd43d\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-d95e3240-9769-410a-a2ee-fb4b509cd43d\") pod \"openstack-galera-0\" (UID: \"78d6963b-d770-4b29-b6de-a09b36913cc0\") " pod="openstack/openstack-galera-0" Oct 10 17:49:27 crc kubenswrapper[4799]: I1010 17:49:27.098135 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/78d6963b-d770-4b29-b6de-a09b36913cc0-config-data-generated\") pod \"openstack-galera-0\" (UID: \"78d6963b-d770-4b29-b6de-a09b36913cc0\") " pod="openstack/openstack-galera-0" Oct 10 17:49:27 crc kubenswrapper[4799]: I1010 17:49:27.098657 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/78d6963b-d770-4b29-b6de-a09b36913cc0-kolla-config\") pod \"openstack-galera-0\" (UID: \"78d6963b-d770-4b29-b6de-a09b36913cc0\") " pod="openstack/openstack-galera-0" Oct 10 17:49:27 crc kubenswrapper[4799]: I1010 17:49:27.099862 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/78d6963b-d770-4b29-b6de-a09b36913cc0-operator-scripts\") pod \"openstack-galera-0\" (UID: \"78d6963b-d770-4b29-b6de-a09b36913cc0\") " pod="openstack/openstack-galera-0" Oct 10 17:49:27 crc kubenswrapper[4799]: I1010 17:49:27.101891 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/78d6963b-d770-4b29-b6de-a09b36913cc0-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"78d6963b-d770-4b29-b6de-a09b36913cc0\") " pod="openstack/openstack-galera-0" Oct 10 17:49:27 crc kubenswrapper[4799]: I1010 17:49:27.102430 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/78d6963b-d770-4b29-b6de-a09b36913cc0-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"78d6963b-d770-4b29-b6de-a09b36913cc0\") " pod="openstack/openstack-galera-0" Oct 10 17:49:27 crc kubenswrapper[4799]: I1010 17:49:27.103159 4799 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Oct 10 17:49:27 crc kubenswrapper[4799]: I1010 17:49:27.103193 4799 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-d95e3240-9769-410a-a2ee-fb4b509cd43d\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-d95e3240-9769-410a-a2ee-fb4b509cd43d\") pod \"openstack-galera-0\" (UID: \"78d6963b-d770-4b29-b6de-a09b36913cc0\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/b311f2d8def1438ab0bed2081c494276e3477e71600d7daa1c46a15f8b323973/globalmount\"" pod="openstack/openstack-galera-0" Oct 10 17:49:27 crc kubenswrapper[4799]: I1010 17:49:27.105197 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/78d6963b-d770-4b29-b6de-a09b36913cc0-config-data-default\") pod \"openstack-galera-0\" (UID: \"78d6963b-d770-4b29-b6de-a09b36913cc0\") " pod="openstack/openstack-galera-0" Oct 10 17:49:27 crc kubenswrapper[4799]: I1010 17:49:27.105705 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/78d6963b-d770-4b29-b6de-a09b36913cc0-secrets\") pod \"openstack-galera-0\" (UID: \"78d6963b-d770-4b29-b6de-a09b36913cc0\") " pod="openstack/openstack-galera-0" Oct 10 17:49:27 crc kubenswrapper[4799]: I1010 17:49:27.119473 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gvvrh\" (UniqueName: \"kubernetes.io/projected/78d6963b-d770-4b29-b6de-a09b36913cc0-kube-api-access-gvvrh\") pod \"openstack-galera-0\" (UID: \"78d6963b-d770-4b29-b6de-a09b36913cc0\") " pod="openstack/openstack-galera-0" Oct 10 17:49:27 crc kubenswrapper[4799]: I1010 17:49:27.137246 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-d95e3240-9769-410a-a2ee-fb4b509cd43d\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-d95e3240-9769-410a-a2ee-fb4b509cd43d\") pod \"openstack-galera-0\" (UID: \"78d6963b-d770-4b29-b6de-a09b36913cc0\") " pod="openstack/openstack-galera-0" Oct 10 17:49:27 crc kubenswrapper[4799]: I1010 17:49:27.171903 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Oct 10 17:49:27 crc kubenswrapper[4799]: I1010 17:49:27.172474 4799 generic.go:334] "Generic (PLEG): container finished" podID="922d8c7c-9feb-408b-a0df-fec585601827" containerID="04cd40e92ddb78ac09bb92bf93a216f39d4afa35b3e6d4078dc30092faa68e05" exitCode=0 Oct 10 17:49:27 crc kubenswrapper[4799]: I1010 17:49:27.172573 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-67d9f7fb89-s2cmk" event={"ID":"922d8c7c-9feb-408b-a0df-fec585601827","Type":"ContainerDied","Data":"04cd40e92ddb78ac09bb92bf93a216f39d4afa35b3e6d4078dc30092faa68e05"} Oct 10 17:49:27 crc kubenswrapper[4799]: I1010 17:49:27.172601 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-67d9f7fb89-s2cmk" event={"ID":"922d8c7c-9feb-408b-a0df-fec585601827","Type":"ContainerStarted","Data":"a218d59ac8c7586e462510f6b212938f78e17bcfcc0b8e01d439fcad7ef03821"} Oct 10 17:49:27 crc kubenswrapper[4799]: I1010 17:49:27.175204 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"3dae848a-4c75-4ddc-9b6d-b64f127eba9b","Type":"ContainerStarted","Data":"ae9c26f40a4dc4424714797620b312bbe7dd78cfe023882101e1f8da35e992ed"} Oct 10 17:49:27 crc kubenswrapper[4799]: I1010 17:49:27.180809 4799 generic.go:334] "Generic (PLEG): container finished" podID="0aec4be0-7a9c-4ee0-a2a7-b78df43ae1b6" containerID="9da25e2005fef19c9b7c9e5bc293b67ff80e3a021ab14507ba061058856369fe" exitCode=0 Oct 10 17:49:27 crc kubenswrapper[4799]: I1010 17:49:27.180848 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-85f98b87f9-tlq4r" event={"ID":"0aec4be0-7a9c-4ee0-a2a7-b78df43ae1b6","Type":"ContainerDied","Data":"9da25e2005fef19c9b7c9e5bc293b67ff80e3a021ab14507ba061058856369fe"} Oct 10 17:49:27 crc kubenswrapper[4799]: I1010 17:49:27.180884 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-85f98b87f9-tlq4r" event={"ID":"0aec4be0-7a9c-4ee0-a2a7-b78df43ae1b6","Type":"ContainerStarted","Data":"3ed7afde72bac53e1a4508b4bfc651f7a8d1f6e2950d7cbee732ae50726de204"} Oct 10 17:49:27 crc kubenswrapper[4799]: I1010 17:49:27.255701 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/memcached-0"] Oct 10 17:49:27 crc kubenswrapper[4799]: I1010 17:49:27.256982 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Oct 10 17:49:27 crc kubenswrapper[4799]: I1010 17:49:27.262351 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"memcached-config-data" Oct 10 17:49:27 crc kubenswrapper[4799]: I1010 17:49:27.262533 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"memcached-memcached-dockercfg-2ckkg" Oct 10 17:49:27 crc kubenswrapper[4799]: I1010 17:49:27.274552 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/memcached-0"] Oct 10 17:49:27 crc kubenswrapper[4799]: I1010 17:49:27.356648 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Oct 10 17:49:27 crc kubenswrapper[4799]: W1010 17:49:27.361191 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod33099395_9065_4b94_95bb_70154f26962e.slice/crio-109ee61cc172613d038184c41cca98b6b1c233c891d1a1ea411577b226a5a91b WatchSource:0}: Error finding container 109ee61cc172613d038184c41cca98b6b1c233c891d1a1ea411577b226a5a91b: Status 404 returned error can't find the container with id 109ee61cc172613d038184c41cca98b6b1c233c891d1a1ea411577b226a5a91b Oct 10 17:49:27 crc kubenswrapper[4799]: I1010 17:49:27.407583 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pvgdd\" (UniqueName: \"kubernetes.io/projected/3e6ff0b3-5b8c-4b08-8351-a25ee4071299-kube-api-access-pvgdd\") pod \"memcached-0\" (UID: \"3e6ff0b3-5b8c-4b08-8351-a25ee4071299\") " pod="openstack/memcached-0" Oct 10 17:49:27 crc kubenswrapper[4799]: I1010 17:49:27.407804 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/3e6ff0b3-5b8c-4b08-8351-a25ee4071299-kolla-config\") pod \"memcached-0\" (UID: \"3e6ff0b3-5b8c-4b08-8351-a25ee4071299\") " pod="openstack/memcached-0" Oct 10 17:49:27 crc kubenswrapper[4799]: I1010 17:49:27.408158 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/3e6ff0b3-5b8c-4b08-8351-a25ee4071299-config-data\") pod \"memcached-0\" (UID: \"3e6ff0b3-5b8c-4b08-8351-a25ee4071299\") " pod="openstack/memcached-0" Oct 10 17:49:27 crc kubenswrapper[4799]: I1010 17:49:27.509952 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pvgdd\" (UniqueName: \"kubernetes.io/projected/3e6ff0b3-5b8c-4b08-8351-a25ee4071299-kube-api-access-pvgdd\") pod \"memcached-0\" (UID: \"3e6ff0b3-5b8c-4b08-8351-a25ee4071299\") " pod="openstack/memcached-0" Oct 10 17:49:27 crc kubenswrapper[4799]: I1010 17:49:27.510331 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/3e6ff0b3-5b8c-4b08-8351-a25ee4071299-kolla-config\") pod \"memcached-0\" (UID: \"3e6ff0b3-5b8c-4b08-8351-a25ee4071299\") " pod="openstack/memcached-0" Oct 10 17:49:27 crc kubenswrapper[4799]: I1010 17:49:27.510379 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/3e6ff0b3-5b8c-4b08-8351-a25ee4071299-config-data\") pod \"memcached-0\" (UID: \"3e6ff0b3-5b8c-4b08-8351-a25ee4071299\") " pod="openstack/memcached-0" Oct 10 17:49:27 crc kubenswrapper[4799]: I1010 17:49:27.511234 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/3e6ff0b3-5b8c-4b08-8351-a25ee4071299-kolla-config\") pod \"memcached-0\" (UID: \"3e6ff0b3-5b8c-4b08-8351-a25ee4071299\") " pod="openstack/memcached-0" Oct 10 17:49:27 crc kubenswrapper[4799]: I1010 17:49:27.511437 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/3e6ff0b3-5b8c-4b08-8351-a25ee4071299-config-data\") pod \"memcached-0\" (UID: \"3e6ff0b3-5b8c-4b08-8351-a25ee4071299\") " pod="openstack/memcached-0" Oct 10 17:49:27 crc kubenswrapper[4799]: I1010 17:49:27.584335 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pvgdd\" (UniqueName: \"kubernetes.io/projected/3e6ff0b3-5b8c-4b08-8351-a25ee4071299-kube-api-access-pvgdd\") pod \"memcached-0\" (UID: \"3e6ff0b3-5b8c-4b08-8351-a25ee4071299\") " pod="openstack/memcached-0" Oct 10 17:49:27 crc kubenswrapper[4799]: I1010 17:49:27.587238 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Oct 10 17:49:27 crc kubenswrapper[4799]: I1010 17:49:27.683632 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-galera-0"] Oct 10 17:49:27 crc kubenswrapper[4799]: W1010 17:49:27.696121 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod78d6963b_d770_4b29_b6de_a09b36913cc0.slice/crio-561fdc97f0903278ad0914c28944d90162bb1265de6ac0c21f09efe82636cd32 WatchSource:0}: Error finding container 561fdc97f0903278ad0914c28944d90162bb1265de6ac0c21f09efe82636cd32: Status 404 returned error can't find the container with id 561fdc97f0903278ad0914c28944d90162bb1265de6ac0c21f09efe82636cd32 Oct 10 17:49:28 crc kubenswrapper[4799]: I1010 17:49:28.164964 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/memcached-0"] Oct 10 17:49:28 crc kubenswrapper[4799]: I1010 17:49:28.193816 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"78d6963b-d770-4b29-b6de-a09b36913cc0","Type":"ContainerStarted","Data":"2fdc01ddc04989e8e16fe7f1c66f36ab20b3c54af8210b734cb1eb7f7f8da9f3"} Oct 10 17:49:28 crc kubenswrapper[4799]: I1010 17:49:28.193880 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"78d6963b-d770-4b29-b6de-a09b36913cc0","Type":"ContainerStarted","Data":"561fdc97f0903278ad0914c28944d90162bb1265de6ac0c21f09efe82636cd32"} Oct 10 17:49:28 crc kubenswrapper[4799]: I1010 17:49:28.204997 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-67d9f7fb89-s2cmk" event={"ID":"922d8c7c-9feb-408b-a0df-fec585601827","Type":"ContainerStarted","Data":"5fd705535286f953b024a58d0cef26bea66822226c41df2c3a877a67d9326012"} Oct 10 17:49:28 crc kubenswrapper[4799]: I1010 17:49:28.207148 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"33099395-9065-4b94-95bb-70154f26962e","Type":"ContainerStarted","Data":"109ee61cc172613d038184c41cca98b6b1c233c891d1a1ea411577b226a5a91b"} Oct 10 17:49:28 crc kubenswrapper[4799]: I1010 17:49:28.210420 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-67d9f7fb89-s2cmk" Oct 10 17:49:28 crc kubenswrapper[4799]: I1010 17:49:28.213426 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"3e6ff0b3-5b8c-4b08-8351-a25ee4071299","Type":"ContainerStarted","Data":"f54c9b855c0c0a68ac779e50330f0f3ac1f8acccea8382c5e0611904e0456972"} Oct 10 17:49:28 crc kubenswrapper[4799]: I1010 17:49:28.216003 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-85f98b87f9-tlq4r" event={"ID":"0aec4be0-7a9c-4ee0-a2a7-b78df43ae1b6","Type":"ContainerStarted","Data":"e371717a81036bdfba05c9fc0c2267105bdb4b6aec632abeb2fc2a8e5fa774a4"} Oct 10 17:49:28 crc kubenswrapper[4799]: I1010 17:49:28.216323 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-85f98b87f9-tlq4r" Oct 10 17:49:28 crc kubenswrapper[4799]: I1010 17:49:28.225614 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstack-cell1-galera-0"] Oct 10 17:49:28 crc kubenswrapper[4799]: I1010 17:49:28.227698 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Oct 10 17:49:28 crc kubenswrapper[4799]: I1010 17:49:28.230769 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-galera-openstack-cell1-svc" Oct 10 17:49:28 crc kubenswrapper[4799]: I1010 17:49:28.230976 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1-config-data" Oct 10 17:49:28 crc kubenswrapper[4799]: I1010 17:49:28.231097 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"galera-openstack-cell1-dockercfg-g7ck6" Oct 10 17:49:28 crc kubenswrapper[4799]: I1010 17:49:28.231660 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1-scripts" Oct 10 17:49:28 crc kubenswrapper[4799]: I1010 17:49:28.243883 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-cell1-galera-0"] Oct 10 17:49:28 crc kubenswrapper[4799]: I1010 17:49:28.261165 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-85f98b87f9-tlq4r" podStartSLOduration=3.261143249 podStartE2EDuration="3.261143249s" podCreationTimestamp="2025-10-10 17:49:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-10 17:49:28.241488299 +0000 UTC m=+4661.749812434" watchObservedRunningTime="2025-10-10 17:49:28.261143249 +0000 UTC m=+4661.769467384" Oct 10 17:49:28 crc kubenswrapper[4799]: I1010 17:49:28.273682 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-67d9f7fb89-s2cmk" podStartSLOduration=3.273661555 podStartE2EDuration="3.273661555s" podCreationTimestamp="2025-10-10 17:49:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-10 17:49:28.254969298 +0000 UTC m=+4661.763293423" watchObservedRunningTime="2025-10-10 17:49:28.273661555 +0000 UTC m=+4661.781985690" Oct 10 17:49:28 crc kubenswrapper[4799]: I1010 17:49:28.322035 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/31b3c69e-7294-4166-bacb-98c92f97ab85-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"31b3c69e-7294-4166-bacb-98c92f97ab85\") " pod="openstack/openstack-cell1-galera-0" Oct 10 17:49:28 crc kubenswrapper[4799]: I1010 17:49:28.322105 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/31b3c69e-7294-4166-bacb-98c92f97ab85-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"31b3c69e-7294-4166-bacb-98c92f97ab85\") " pod="openstack/openstack-cell1-galera-0" Oct 10 17:49:28 crc kubenswrapper[4799]: I1010 17:49:28.322168 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sxs2s\" (UniqueName: \"kubernetes.io/projected/31b3c69e-7294-4166-bacb-98c92f97ab85-kube-api-access-sxs2s\") pod \"openstack-cell1-galera-0\" (UID: \"31b3c69e-7294-4166-bacb-98c92f97ab85\") " pod="openstack/openstack-cell1-galera-0" Oct 10 17:49:28 crc kubenswrapper[4799]: I1010 17:49:28.322229 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/31b3c69e-7294-4166-bacb-98c92f97ab85-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"31b3c69e-7294-4166-bacb-98c92f97ab85\") " pod="openstack/openstack-cell1-galera-0" Oct 10 17:49:28 crc kubenswrapper[4799]: I1010 17:49:28.322313 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/31b3c69e-7294-4166-bacb-98c92f97ab85-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"31b3c69e-7294-4166-bacb-98c92f97ab85\") " pod="openstack/openstack-cell1-galera-0" Oct 10 17:49:28 crc kubenswrapper[4799]: I1010 17:49:28.322392 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/31b3c69e-7294-4166-bacb-98c92f97ab85-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"31b3c69e-7294-4166-bacb-98c92f97ab85\") " pod="openstack/openstack-cell1-galera-0" Oct 10 17:49:28 crc kubenswrapper[4799]: I1010 17:49:28.322450 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/31b3c69e-7294-4166-bacb-98c92f97ab85-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"31b3c69e-7294-4166-bacb-98c92f97ab85\") " pod="openstack/openstack-cell1-galera-0" Oct 10 17:49:28 crc kubenswrapper[4799]: I1010 17:49:28.322483 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-0975765f-ca3a-42fe-a277-4c2797f881a0\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-0975765f-ca3a-42fe-a277-4c2797f881a0\") pod \"openstack-cell1-galera-0\" (UID: \"31b3c69e-7294-4166-bacb-98c92f97ab85\") " pod="openstack/openstack-cell1-galera-0" Oct 10 17:49:28 crc kubenswrapper[4799]: I1010 17:49:28.322506 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/31b3c69e-7294-4166-bacb-98c92f97ab85-secrets\") pod \"openstack-cell1-galera-0\" (UID: \"31b3c69e-7294-4166-bacb-98c92f97ab85\") " pod="openstack/openstack-cell1-galera-0" Oct 10 17:49:28 crc kubenswrapper[4799]: I1010 17:49:28.424009 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sxs2s\" (UniqueName: \"kubernetes.io/projected/31b3c69e-7294-4166-bacb-98c92f97ab85-kube-api-access-sxs2s\") pod \"openstack-cell1-galera-0\" (UID: \"31b3c69e-7294-4166-bacb-98c92f97ab85\") " pod="openstack/openstack-cell1-galera-0" Oct 10 17:49:28 crc kubenswrapper[4799]: I1010 17:49:28.424075 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/31b3c69e-7294-4166-bacb-98c92f97ab85-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"31b3c69e-7294-4166-bacb-98c92f97ab85\") " pod="openstack/openstack-cell1-galera-0" Oct 10 17:49:28 crc kubenswrapper[4799]: I1010 17:49:28.424131 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/31b3c69e-7294-4166-bacb-98c92f97ab85-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"31b3c69e-7294-4166-bacb-98c92f97ab85\") " pod="openstack/openstack-cell1-galera-0" Oct 10 17:49:28 crc kubenswrapper[4799]: I1010 17:49:28.424181 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/31b3c69e-7294-4166-bacb-98c92f97ab85-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"31b3c69e-7294-4166-bacb-98c92f97ab85\") " pod="openstack/openstack-cell1-galera-0" Oct 10 17:49:28 crc kubenswrapper[4799]: I1010 17:49:28.424207 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/31b3c69e-7294-4166-bacb-98c92f97ab85-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"31b3c69e-7294-4166-bacb-98c92f97ab85\") " pod="openstack/openstack-cell1-galera-0" Oct 10 17:49:28 crc kubenswrapper[4799]: I1010 17:49:28.424235 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-0975765f-ca3a-42fe-a277-4c2797f881a0\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-0975765f-ca3a-42fe-a277-4c2797f881a0\") pod \"openstack-cell1-galera-0\" (UID: \"31b3c69e-7294-4166-bacb-98c92f97ab85\") " pod="openstack/openstack-cell1-galera-0" Oct 10 17:49:28 crc kubenswrapper[4799]: I1010 17:49:28.424256 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/31b3c69e-7294-4166-bacb-98c92f97ab85-secrets\") pod \"openstack-cell1-galera-0\" (UID: \"31b3c69e-7294-4166-bacb-98c92f97ab85\") " pod="openstack/openstack-cell1-galera-0" Oct 10 17:49:28 crc kubenswrapper[4799]: I1010 17:49:28.424290 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/31b3c69e-7294-4166-bacb-98c92f97ab85-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"31b3c69e-7294-4166-bacb-98c92f97ab85\") " pod="openstack/openstack-cell1-galera-0" Oct 10 17:49:28 crc kubenswrapper[4799]: I1010 17:49:28.424325 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/31b3c69e-7294-4166-bacb-98c92f97ab85-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"31b3c69e-7294-4166-bacb-98c92f97ab85\") " pod="openstack/openstack-cell1-galera-0" Oct 10 17:49:28 crc kubenswrapper[4799]: I1010 17:49:28.424878 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/31b3c69e-7294-4166-bacb-98c92f97ab85-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"31b3c69e-7294-4166-bacb-98c92f97ab85\") " pod="openstack/openstack-cell1-galera-0" Oct 10 17:49:28 crc kubenswrapper[4799]: I1010 17:49:28.425527 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/31b3c69e-7294-4166-bacb-98c92f97ab85-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"31b3c69e-7294-4166-bacb-98c92f97ab85\") " pod="openstack/openstack-cell1-galera-0" Oct 10 17:49:28 crc kubenswrapper[4799]: I1010 17:49:28.425642 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/31b3c69e-7294-4166-bacb-98c92f97ab85-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"31b3c69e-7294-4166-bacb-98c92f97ab85\") " pod="openstack/openstack-cell1-galera-0" Oct 10 17:49:28 crc kubenswrapper[4799]: I1010 17:49:28.426302 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/31b3c69e-7294-4166-bacb-98c92f97ab85-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"31b3c69e-7294-4166-bacb-98c92f97ab85\") " pod="openstack/openstack-cell1-galera-0" Oct 10 17:49:28 crc kubenswrapper[4799]: I1010 17:49:28.430952 4799 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Oct 10 17:49:28 crc kubenswrapper[4799]: I1010 17:49:28.430982 4799 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-0975765f-ca3a-42fe-a277-4c2797f881a0\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-0975765f-ca3a-42fe-a277-4c2797f881a0\") pod \"openstack-cell1-galera-0\" (UID: \"31b3c69e-7294-4166-bacb-98c92f97ab85\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/5d0d10c45b675b202d4c586309a8e5168e344fe9f64c0b1de5be3f12ecfe4940/globalmount\"" pod="openstack/openstack-cell1-galera-0" Oct 10 17:49:28 crc kubenswrapper[4799]: I1010 17:49:28.431467 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/31b3c69e-7294-4166-bacb-98c92f97ab85-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"31b3c69e-7294-4166-bacb-98c92f97ab85\") " pod="openstack/openstack-cell1-galera-0" Oct 10 17:49:28 crc kubenswrapper[4799]: I1010 17:49:28.437973 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/31b3c69e-7294-4166-bacb-98c92f97ab85-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"31b3c69e-7294-4166-bacb-98c92f97ab85\") " pod="openstack/openstack-cell1-galera-0" Oct 10 17:49:28 crc kubenswrapper[4799]: I1010 17:49:28.440919 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sxs2s\" (UniqueName: \"kubernetes.io/projected/31b3c69e-7294-4166-bacb-98c92f97ab85-kube-api-access-sxs2s\") pod \"openstack-cell1-galera-0\" (UID: \"31b3c69e-7294-4166-bacb-98c92f97ab85\") " pod="openstack/openstack-cell1-galera-0" Oct 10 17:49:28 crc kubenswrapper[4799]: I1010 17:49:28.466980 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-0975765f-ca3a-42fe-a277-4c2797f881a0\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-0975765f-ca3a-42fe-a277-4c2797f881a0\") pod \"openstack-cell1-galera-0\" (UID: \"31b3c69e-7294-4166-bacb-98c92f97ab85\") " pod="openstack/openstack-cell1-galera-0" Oct 10 17:49:28 crc kubenswrapper[4799]: I1010 17:49:28.482921 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/31b3c69e-7294-4166-bacb-98c92f97ab85-secrets\") pod \"openstack-cell1-galera-0\" (UID: \"31b3c69e-7294-4166-bacb-98c92f97ab85\") " pod="openstack/openstack-cell1-galera-0" Oct 10 17:49:28 crc kubenswrapper[4799]: I1010 17:49:28.573866 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Oct 10 17:49:29 crc kubenswrapper[4799]: I1010 17:49:29.009823 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-cell1-galera-0"] Oct 10 17:49:29 crc kubenswrapper[4799]: W1010 17:49:29.015835 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod31b3c69e_7294_4166_bacb_98c92f97ab85.slice/crio-7fbd0e38ebc8b644bde982976ba9e9eed4ee9986f86fc7d8fa822b6545760984 WatchSource:0}: Error finding container 7fbd0e38ebc8b644bde982976ba9e9eed4ee9986f86fc7d8fa822b6545760984: Status 404 returned error can't find the container with id 7fbd0e38ebc8b644bde982976ba9e9eed4ee9986f86fc7d8fa822b6545760984 Oct 10 17:49:29 crc kubenswrapper[4799]: I1010 17:49:29.237089 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"3e6ff0b3-5b8c-4b08-8351-a25ee4071299","Type":"ContainerStarted","Data":"384b5f732ff116fe992ce4a660855de18ef11e0160667339a4e0249d6eb3aa1d"} Oct 10 17:49:29 crc kubenswrapper[4799]: I1010 17:49:29.237473 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/memcached-0" Oct 10 17:49:29 crc kubenswrapper[4799]: I1010 17:49:29.239446 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"3dae848a-4c75-4ddc-9b6d-b64f127eba9b","Type":"ContainerStarted","Data":"fc360011e7da50fd6ed79378a4e8033991094f00628c1ba5c6685682528e6aa5"} Oct 10 17:49:29 crc kubenswrapper[4799]: I1010 17:49:29.242387 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"31b3c69e-7294-4166-bacb-98c92f97ab85","Type":"ContainerStarted","Data":"7fbd0e38ebc8b644bde982976ba9e9eed4ee9986f86fc7d8fa822b6545760984"} Oct 10 17:49:29 crc kubenswrapper[4799]: I1010 17:49:29.244877 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"33099395-9065-4b94-95bb-70154f26962e","Type":"ContainerStarted","Data":"121bfef76d0172879e6e5c57fcef7df33465b03df258b5acd73f122ce2086d91"} Oct 10 17:49:29 crc kubenswrapper[4799]: I1010 17:49:29.261226 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/memcached-0" podStartSLOduration=2.261204007 podStartE2EDuration="2.261204007s" podCreationTimestamp="2025-10-10 17:49:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-10 17:49:29.253879138 +0000 UTC m=+4662.762203253" watchObservedRunningTime="2025-10-10 17:49:29.261204007 +0000 UTC m=+4662.769528132" Oct 10 17:49:30 crc kubenswrapper[4799]: I1010 17:49:30.257346 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"31b3c69e-7294-4166-bacb-98c92f97ab85","Type":"ContainerStarted","Data":"3083e8cb579b6339725847fd42dfbe74a49848a26d08307b92081c26d93bd117"} Oct 10 17:49:32 crc kubenswrapper[4799]: I1010 17:49:32.281742 4799 generic.go:334] "Generic (PLEG): container finished" podID="78d6963b-d770-4b29-b6de-a09b36913cc0" containerID="2fdc01ddc04989e8e16fe7f1c66f36ab20b3c54af8210b734cb1eb7f7f8da9f3" exitCode=0 Oct 10 17:49:32 crc kubenswrapper[4799]: I1010 17:49:32.281934 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"78d6963b-d770-4b29-b6de-a09b36913cc0","Type":"ContainerDied","Data":"2fdc01ddc04989e8e16fe7f1c66f36ab20b3c54af8210b734cb1eb7f7f8da9f3"} Oct 10 17:49:33 crc kubenswrapper[4799]: I1010 17:49:33.297111 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"78d6963b-d770-4b29-b6de-a09b36913cc0","Type":"ContainerStarted","Data":"0d5dbc0e374dd7c35d71a3f3578e0ee8747d0a7489288f398acfe6ae1ca83f76"} Oct 10 17:49:33 crc kubenswrapper[4799]: I1010 17:49:33.300796 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"31b3c69e-7294-4166-bacb-98c92f97ab85","Type":"ContainerDied","Data":"3083e8cb579b6339725847fd42dfbe74a49848a26d08307b92081c26d93bd117"} Oct 10 17:49:33 crc kubenswrapper[4799]: I1010 17:49:33.300833 4799 generic.go:334] "Generic (PLEG): container finished" podID="31b3c69e-7294-4166-bacb-98c92f97ab85" containerID="3083e8cb579b6339725847fd42dfbe74a49848a26d08307b92081c26d93bd117" exitCode=0 Oct 10 17:49:33 crc kubenswrapper[4799]: I1010 17:49:33.335418 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstack-galera-0" podStartSLOduration=8.335385784 podStartE2EDuration="8.335385784s" podCreationTimestamp="2025-10-10 17:49:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-10 17:49:33.33359953 +0000 UTC m=+4666.841923645" watchObservedRunningTime="2025-10-10 17:49:33.335385784 +0000 UTC m=+4666.843709959" Oct 10 17:49:34 crc kubenswrapper[4799]: I1010 17:49:34.311070 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"31b3c69e-7294-4166-bacb-98c92f97ab85","Type":"ContainerStarted","Data":"661eb5fbea36d49cd20768e78c785c6e6dd6ba85195763022afb08a0169a84d8"} Oct 10 17:49:34 crc kubenswrapper[4799]: I1010 17:49:34.362718 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstack-cell1-galera-0" podStartSLOduration=7.362687108 podStartE2EDuration="7.362687108s" podCreationTimestamp="2025-10-10 17:49:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-10 17:49:34.356620919 +0000 UTC m=+4667.864945124" watchObservedRunningTime="2025-10-10 17:49:34.362687108 +0000 UTC m=+4667.871011263" Oct 10 17:49:35 crc kubenswrapper[4799]: I1010 17:49:35.612140 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-85f98b87f9-tlq4r" Oct 10 17:49:35 crc kubenswrapper[4799]: I1010 17:49:35.697391 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-67d9f7fb89-s2cmk" Oct 10 17:49:35 crc kubenswrapper[4799]: I1010 17:49:35.774819 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-85f98b87f9-tlq4r"] Oct 10 17:49:36 crc kubenswrapper[4799]: I1010 17:49:36.328579 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-85f98b87f9-tlq4r" podUID="0aec4be0-7a9c-4ee0-a2a7-b78df43ae1b6" containerName="dnsmasq-dns" containerID="cri-o://e371717a81036bdfba05c9fc0c2267105bdb4b6aec632abeb2fc2a8e5fa774a4" gracePeriod=10 Oct 10 17:49:36 crc kubenswrapper[4799]: I1010 17:49:36.823321 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-85f98b87f9-tlq4r" Oct 10 17:49:36 crc kubenswrapper[4799]: I1010 17:49:36.967688 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0aec4be0-7a9c-4ee0-a2a7-b78df43ae1b6-config\") pod \"0aec4be0-7a9c-4ee0-a2a7-b78df43ae1b6\" (UID: \"0aec4be0-7a9c-4ee0-a2a7-b78df43ae1b6\") " Oct 10 17:49:36 crc kubenswrapper[4799]: I1010 17:49:36.967885 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-m9qcg\" (UniqueName: \"kubernetes.io/projected/0aec4be0-7a9c-4ee0-a2a7-b78df43ae1b6-kube-api-access-m9qcg\") pod \"0aec4be0-7a9c-4ee0-a2a7-b78df43ae1b6\" (UID: \"0aec4be0-7a9c-4ee0-a2a7-b78df43ae1b6\") " Oct 10 17:49:36 crc kubenswrapper[4799]: I1010 17:49:36.968002 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/0aec4be0-7a9c-4ee0-a2a7-b78df43ae1b6-dns-svc\") pod \"0aec4be0-7a9c-4ee0-a2a7-b78df43ae1b6\" (UID: \"0aec4be0-7a9c-4ee0-a2a7-b78df43ae1b6\") " Oct 10 17:49:36 crc kubenswrapper[4799]: I1010 17:49:36.983235 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0aec4be0-7a9c-4ee0-a2a7-b78df43ae1b6-kube-api-access-m9qcg" (OuterVolumeSpecName: "kube-api-access-m9qcg") pod "0aec4be0-7a9c-4ee0-a2a7-b78df43ae1b6" (UID: "0aec4be0-7a9c-4ee0-a2a7-b78df43ae1b6"). InnerVolumeSpecName "kube-api-access-m9qcg". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 17:49:37 crc kubenswrapper[4799]: I1010 17:49:37.022701 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0aec4be0-7a9c-4ee0-a2a7-b78df43ae1b6-config" (OuterVolumeSpecName: "config") pod "0aec4be0-7a9c-4ee0-a2a7-b78df43ae1b6" (UID: "0aec4be0-7a9c-4ee0-a2a7-b78df43ae1b6"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 17:49:37 crc kubenswrapper[4799]: I1010 17:49:37.039682 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0aec4be0-7a9c-4ee0-a2a7-b78df43ae1b6-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "0aec4be0-7a9c-4ee0-a2a7-b78df43ae1b6" (UID: "0aec4be0-7a9c-4ee0-a2a7-b78df43ae1b6"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 17:49:37 crc kubenswrapper[4799]: I1010 17:49:37.070004 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-m9qcg\" (UniqueName: \"kubernetes.io/projected/0aec4be0-7a9c-4ee0-a2a7-b78df43ae1b6-kube-api-access-m9qcg\") on node \"crc\" DevicePath \"\"" Oct 10 17:49:37 crc kubenswrapper[4799]: I1010 17:49:37.070047 4799 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/0aec4be0-7a9c-4ee0-a2a7-b78df43ae1b6-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 10 17:49:37 crc kubenswrapper[4799]: I1010 17:49:37.070059 4799 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0aec4be0-7a9c-4ee0-a2a7-b78df43ae1b6-config\") on node \"crc\" DevicePath \"\"" Oct 10 17:49:37 crc kubenswrapper[4799]: I1010 17:49:37.173422 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/openstack-galera-0" Oct 10 17:49:37 crc kubenswrapper[4799]: I1010 17:49:37.173475 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/openstack-galera-0" Oct 10 17:49:37 crc kubenswrapper[4799]: I1010 17:49:37.219892 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/openstack-galera-0" Oct 10 17:49:37 crc kubenswrapper[4799]: I1010 17:49:37.338851 4799 generic.go:334] "Generic (PLEG): container finished" podID="0aec4be0-7a9c-4ee0-a2a7-b78df43ae1b6" containerID="e371717a81036bdfba05c9fc0c2267105bdb4b6aec632abeb2fc2a8e5fa774a4" exitCode=0 Oct 10 17:49:37 crc kubenswrapper[4799]: I1010 17:49:37.338928 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-85f98b87f9-tlq4r" event={"ID":"0aec4be0-7a9c-4ee0-a2a7-b78df43ae1b6","Type":"ContainerDied","Data":"e371717a81036bdfba05c9fc0c2267105bdb4b6aec632abeb2fc2a8e5fa774a4"} Oct 10 17:49:37 crc kubenswrapper[4799]: I1010 17:49:37.338950 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-85f98b87f9-tlq4r" Oct 10 17:49:37 crc kubenswrapper[4799]: I1010 17:49:37.338987 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-85f98b87f9-tlq4r" event={"ID":"0aec4be0-7a9c-4ee0-a2a7-b78df43ae1b6","Type":"ContainerDied","Data":"3ed7afde72bac53e1a4508b4bfc651f7a8d1f6e2950d7cbee732ae50726de204"} Oct 10 17:49:37 crc kubenswrapper[4799]: I1010 17:49:37.339007 4799 scope.go:117] "RemoveContainer" containerID="e371717a81036bdfba05c9fc0c2267105bdb4b6aec632abeb2fc2a8e5fa774a4" Oct 10 17:49:37 crc kubenswrapper[4799]: I1010 17:49:37.356313 4799 scope.go:117] "RemoveContainer" containerID="9da25e2005fef19c9b7c9e5bc293b67ff80e3a021ab14507ba061058856369fe" Oct 10 17:49:37 crc kubenswrapper[4799]: I1010 17:49:37.374976 4799 scope.go:117] "RemoveContainer" containerID="e371717a81036bdfba05c9fc0c2267105bdb4b6aec632abeb2fc2a8e5fa774a4" Oct 10 17:49:37 crc kubenswrapper[4799]: E1010 17:49:37.377783 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e371717a81036bdfba05c9fc0c2267105bdb4b6aec632abeb2fc2a8e5fa774a4\": container with ID starting with e371717a81036bdfba05c9fc0c2267105bdb4b6aec632abeb2fc2a8e5fa774a4 not found: ID does not exist" containerID="e371717a81036bdfba05c9fc0c2267105bdb4b6aec632abeb2fc2a8e5fa774a4" Oct 10 17:49:37 crc kubenswrapper[4799]: I1010 17:49:37.377913 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e371717a81036bdfba05c9fc0c2267105bdb4b6aec632abeb2fc2a8e5fa774a4"} err="failed to get container status \"e371717a81036bdfba05c9fc0c2267105bdb4b6aec632abeb2fc2a8e5fa774a4\": rpc error: code = NotFound desc = could not find container \"e371717a81036bdfba05c9fc0c2267105bdb4b6aec632abeb2fc2a8e5fa774a4\": container with ID starting with e371717a81036bdfba05c9fc0c2267105bdb4b6aec632abeb2fc2a8e5fa774a4 not found: ID does not exist" Oct 10 17:49:37 crc kubenswrapper[4799]: I1010 17:49:37.378037 4799 scope.go:117] "RemoveContainer" containerID="9da25e2005fef19c9b7c9e5bc293b67ff80e3a021ab14507ba061058856369fe" Oct 10 17:49:37 crc kubenswrapper[4799]: I1010 17:49:37.377867 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-85f98b87f9-tlq4r"] Oct 10 17:49:37 crc kubenswrapper[4799]: E1010 17:49:37.378660 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9da25e2005fef19c9b7c9e5bc293b67ff80e3a021ab14507ba061058856369fe\": container with ID starting with 9da25e2005fef19c9b7c9e5bc293b67ff80e3a021ab14507ba061058856369fe not found: ID does not exist" containerID="9da25e2005fef19c9b7c9e5bc293b67ff80e3a021ab14507ba061058856369fe" Oct 10 17:49:37 crc kubenswrapper[4799]: I1010 17:49:37.378711 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9da25e2005fef19c9b7c9e5bc293b67ff80e3a021ab14507ba061058856369fe"} err="failed to get container status \"9da25e2005fef19c9b7c9e5bc293b67ff80e3a021ab14507ba061058856369fe\": rpc error: code = NotFound desc = could not find container \"9da25e2005fef19c9b7c9e5bc293b67ff80e3a021ab14507ba061058856369fe\": container with ID starting with 9da25e2005fef19c9b7c9e5bc293b67ff80e3a021ab14507ba061058856369fe not found: ID does not exist" Oct 10 17:49:37 crc kubenswrapper[4799]: I1010 17:49:37.381670 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-85f98b87f9-tlq4r"] Oct 10 17:49:37 crc kubenswrapper[4799]: I1010 17:49:37.393411 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/openstack-galera-0" Oct 10 17:49:37 crc kubenswrapper[4799]: I1010 17:49:37.420380 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0aec4be0-7a9c-4ee0-a2a7-b78df43ae1b6" path="/var/lib/kubelet/pods/0aec4be0-7a9c-4ee0-a2a7-b78df43ae1b6/volumes" Oct 10 17:49:37 crc kubenswrapper[4799]: I1010 17:49:37.588382 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/memcached-0" Oct 10 17:49:38 crc kubenswrapper[4799]: I1010 17:49:38.574944 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/openstack-cell1-galera-0" Oct 10 17:49:38 crc kubenswrapper[4799]: I1010 17:49:38.576572 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/openstack-cell1-galera-0" Oct 10 17:49:40 crc kubenswrapper[4799]: I1010 17:49:40.646325 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/openstack-cell1-galera-0" Oct 10 17:49:40 crc kubenswrapper[4799]: I1010 17:49:40.701032 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/openstack-cell1-galera-0" Oct 10 17:50:01 crc kubenswrapper[4799]: I1010 17:50:01.586349 4799 generic.go:334] "Generic (PLEG): container finished" podID="33099395-9065-4b94-95bb-70154f26962e" containerID="121bfef76d0172879e6e5c57fcef7df33465b03df258b5acd73f122ce2086d91" exitCode=0 Oct 10 17:50:01 crc kubenswrapper[4799]: I1010 17:50:01.586472 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"33099395-9065-4b94-95bb-70154f26962e","Type":"ContainerDied","Data":"121bfef76d0172879e6e5c57fcef7df33465b03df258b5acd73f122ce2086d91"} Oct 10 17:50:01 crc kubenswrapper[4799]: I1010 17:50:01.590998 4799 generic.go:334] "Generic (PLEG): container finished" podID="3dae848a-4c75-4ddc-9b6d-b64f127eba9b" containerID="fc360011e7da50fd6ed79378a4e8033991094f00628c1ba5c6685682528e6aa5" exitCode=0 Oct 10 17:50:01 crc kubenswrapper[4799]: I1010 17:50:01.591054 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"3dae848a-4c75-4ddc-9b6d-b64f127eba9b","Type":"ContainerDied","Data":"fc360011e7da50fd6ed79378a4e8033991094f00628c1ba5c6685682528e6aa5"} Oct 10 17:50:02 crc kubenswrapper[4799]: I1010 17:50:02.602962 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"33099395-9065-4b94-95bb-70154f26962e","Type":"ContainerStarted","Data":"47c035f4b09c4c01a9e7b93c71f9700b3cb6094e1c2c144e370d5701971f3b8c"} Oct 10 17:50:02 crc kubenswrapper[4799]: I1010 17:50:02.603594 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-cell1-server-0" Oct 10 17:50:02 crc kubenswrapper[4799]: I1010 17:50:02.605377 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"3dae848a-4c75-4ddc-9b6d-b64f127eba9b","Type":"ContainerStarted","Data":"baac5bdc9656b934ccf0dc5984326ede93cf1516e6e05133893103c8e9300f4e"} Oct 10 17:50:02 crc kubenswrapper[4799]: I1010 17:50:02.605641 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-server-0" Oct 10 17:50:02 crc kubenswrapper[4799]: I1010 17:50:02.648227 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-cell1-server-0" podStartSLOduration=37.648201412 podStartE2EDuration="37.648201412s" podCreationTimestamp="2025-10-10 17:49:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-10 17:50:02.635578224 +0000 UTC m=+4696.143902359" watchObservedRunningTime="2025-10-10 17:50:02.648201412 +0000 UTC m=+4696.156525527" Oct 10 17:50:02 crc kubenswrapper[4799]: I1010 17:50:02.665200 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-server-0" podStartSLOduration=37.665173317 podStartE2EDuration="37.665173317s" podCreationTimestamp="2025-10-10 17:49:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-10 17:50:02.658796691 +0000 UTC m=+4696.167120816" watchObservedRunningTime="2025-10-10 17:50:02.665173317 +0000 UTC m=+4696.173497472" Oct 10 17:50:15 crc kubenswrapper[4799]: I1010 17:50:15.249125 4799 patch_prober.go:28] interesting pod/machine-config-daemon-rh8zc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 10 17:50:15 crc kubenswrapper[4799]: I1010 17:50:15.250129 4799 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 10 17:50:16 crc kubenswrapper[4799]: I1010 17:50:16.580420 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-server-0" Oct 10 17:50:16 crc kubenswrapper[4799]: I1010 17:50:16.896985 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-cell1-server-0" Oct 10 17:50:22 crc kubenswrapper[4799]: I1010 17:50:22.414567 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5fdc957c47-h7qx2"] Oct 10 17:50:22 crc kubenswrapper[4799]: E1010 17:50:22.415597 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0aec4be0-7a9c-4ee0-a2a7-b78df43ae1b6" containerName="dnsmasq-dns" Oct 10 17:50:22 crc kubenswrapper[4799]: I1010 17:50:22.415620 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="0aec4be0-7a9c-4ee0-a2a7-b78df43ae1b6" containerName="dnsmasq-dns" Oct 10 17:50:22 crc kubenswrapper[4799]: E1010 17:50:22.415663 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0aec4be0-7a9c-4ee0-a2a7-b78df43ae1b6" containerName="init" Oct 10 17:50:22 crc kubenswrapper[4799]: I1010 17:50:22.415675 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="0aec4be0-7a9c-4ee0-a2a7-b78df43ae1b6" containerName="init" Oct 10 17:50:22 crc kubenswrapper[4799]: I1010 17:50:22.415951 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="0aec4be0-7a9c-4ee0-a2a7-b78df43ae1b6" containerName="dnsmasq-dns" Oct 10 17:50:22 crc kubenswrapper[4799]: I1010 17:50:22.417371 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5fdc957c47-h7qx2" Oct 10 17:50:22 crc kubenswrapper[4799]: I1010 17:50:22.424270 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5fdc957c47-h7qx2"] Oct 10 17:50:22 crc kubenswrapper[4799]: I1010 17:50:22.518566 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tfsxb\" (UniqueName: \"kubernetes.io/projected/29388aea-227e-4b3b-96c1-5edb9ffa644d-kube-api-access-tfsxb\") pod \"dnsmasq-dns-5fdc957c47-h7qx2\" (UID: \"29388aea-227e-4b3b-96c1-5edb9ffa644d\") " pod="openstack/dnsmasq-dns-5fdc957c47-h7qx2" Oct 10 17:50:22 crc kubenswrapper[4799]: I1010 17:50:22.518704 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/29388aea-227e-4b3b-96c1-5edb9ffa644d-config\") pod \"dnsmasq-dns-5fdc957c47-h7qx2\" (UID: \"29388aea-227e-4b3b-96c1-5edb9ffa644d\") " pod="openstack/dnsmasq-dns-5fdc957c47-h7qx2" Oct 10 17:50:22 crc kubenswrapper[4799]: I1010 17:50:22.519054 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/29388aea-227e-4b3b-96c1-5edb9ffa644d-dns-svc\") pod \"dnsmasq-dns-5fdc957c47-h7qx2\" (UID: \"29388aea-227e-4b3b-96c1-5edb9ffa644d\") " pod="openstack/dnsmasq-dns-5fdc957c47-h7qx2" Oct 10 17:50:22 crc kubenswrapper[4799]: I1010 17:50:22.620106 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tfsxb\" (UniqueName: \"kubernetes.io/projected/29388aea-227e-4b3b-96c1-5edb9ffa644d-kube-api-access-tfsxb\") pod \"dnsmasq-dns-5fdc957c47-h7qx2\" (UID: \"29388aea-227e-4b3b-96c1-5edb9ffa644d\") " pod="openstack/dnsmasq-dns-5fdc957c47-h7qx2" Oct 10 17:50:22 crc kubenswrapper[4799]: I1010 17:50:22.620155 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/29388aea-227e-4b3b-96c1-5edb9ffa644d-config\") pod \"dnsmasq-dns-5fdc957c47-h7qx2\" (UID: \"29388aea-227e-4b3b-96c1-5edb9ffa644d\") " pod="openstack/dnsmasq-dns-5fdc957c47-h7qx2" Oct 10 17:50:22 crc kubenswrapper[4799]: I1010 17:50:22.620224 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/29388aea-227e-4b3b-96c1-5edb9ffa644d-dns-svc\") pod \"dnsmasq-dns-5fdc957c47-h7qx2\" (UID: \"29388aea-227e-4b3b-96c1-5edb9ffa644d\") " pod="openstack/dnsmasq-dns-5fdc957c47-h7qx2" Oct 10 17:50:22 crc kubenswrapper[4799]: I1010 17:50:22.621032 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/29388aea-227e-4b3b-96c1-5edb9ffa644d-dns-svc\") pod \"dnsmasq-dns-5fdc957c47-h7qx2\" (UID: \"29388aea-227e-4b3b-96c1-5edb9ffa644d\") " pod="openstack/dnsmasq-dns-5fdc957c47-h7qx2" Oct 10 17:50:22 crc kubenswrapper[4799]: I1010 17:50:22.621274 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/29388aea-227e-4b3b-96c1-5edb9ffa644d-config\") pod \"dnsmasq-dns-5fdc957c47-h7qx2\" (UID: \"29388aea-227e-4b3b-96c1-5edb9ffa644d\") " pod="openstack/dnsmasq-dns-5fdc957c47-h7qx2" Oct 10 17:50:22 crc kubenswrapper[4799]: I1010 17:50:22.639196 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tfsxb\" (UniqueName: \"kubernetes.io/projected/29388aea-227e-4b3b-96c1-5edb9ffa644d-kube-api-access-tfsxb\") pod \"dnsmasq-dns-5fdc957c47-h7qx2\" (UID: \"29388aea-227e-4b3b-96c1-5edb9ffa644d\") " pod="openstack/dnsmasq-dns-5fdc957c47-h7qx2" Oct 10 17:50:22 crc kubenswrapper[4799]: I1010 17:50:22.746888 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5fdc957c47-h7qx2" Oct 10 17:50:23 crc kubenswrapper[4799]: I1010 17:50:23.227627 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-server-0"] Oct 10 17:50:23 crc kubenswrapper[4799]: I1010 17:50:23.305421 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5fdc957c47-h7qx2"] Oct 10 17:50:23 crc kubenswrapper[4799]: I1010 17:50:23.803923 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5fdc957c47-h7qx2" event={"ID":"29388aea-227e-4b3b-96c1-5edb9ffa644d","Type":"ContainerStarted","Data":"85e2561153d0f09a72626e5c4bbffd69a2b78e8ff6618101ba2bd5344844e5c8"} Oct 10 17:50:24 crc kubenswrapper[4799]: I1010 17:50:24.117712 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Oct 10 17:50:24 crc kubenswrapper[4799]: I1010 17:50:24.813097 4799 generic.go:334] "Generic (PLEG): container finished" podID="29388aea-227e-4b3b-96c1-5edb9ffa644d" containerID="648ac82bd71112d1e8970237fcd6e7b637cfa55d9729676f2eed8130a04a57bf" exitCode=0 Oct 10 17:50:24 crc kubenswrapper[4799]: I1010 17:50:24.813208 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5fdc957c47-h7qx2" event={"ID":"29388aea-227e-4b3b-96c1-5edb9ffa644d","Type":"ContainerDied","Data":"648ac82bd71112d1e8970237fcd6e7b637cfa55d9729676f2eed8130a04a57bf"} Oct 10 17:50:25 crc kubenswrapper[4799]: I1010 17:50:25.644155 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/rabbitmq-server-0" podUID="3dae848a-4c75-4ddc-9b6d-b64f127eba9b" containerName="rabbitmq" containerID="cri-o://baac5bdc9656b934ccf0dc5984326ede93cf1516e6e05133893103c8e9300f4e" gracePeriod=604798 Oct 10 17:50:25 crc kubenswrapper[4799]: I1010 17:50:25.822555 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5fdc957c47-h7qx2" event={"ID":"29388aea-227e-4b3b-96c1-5edb9ffa644d","Type":"ContainerStarted","Data":"3968be08b182b99b94ecebe232fe049b82f8716d786b3ff76e347437be1acb2d"} Oct 10 17:50:25 crc kubenswrapper[4799]: I1010 17:50:25.823665 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-5fdc957c47-h7qx2" Oct 10 17:50:25 crc kubenswrapper[4799]: I1010 17:50:25.846105 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-5fdc957c47-h7qx2" podStartSLOduration=3.846083274 podStartE2EDuration="3.846083274s" podCreationTimestamp="2025-10-10 17:50:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-10 17:50:25.841883981 +0000 UTC m=+4719.350208156" watchObservedRunningTime="2025-10-10 17:50:25.846083274 +0000 UTC m=+4719.354407399" Oct 10 17:50:26 crc kubenswrapper[4799]: I1010 17:50:26.329219 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/rabbitmq-cell1-server-0" podUID="33099395-9065-4b94-95bb-70154f26962e" containerName="rabbitmq" containerID="cri-o://47c035f4b09c4c01a9e7b93c71f9700b3cb6094e1c2c144e370d5701971f3b8c" gracePeriod=604798 Oct 10 17:50:26 crc kubenswrapper[4799]: I1010 17:50:26.578322 4799 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/rabbitmq-server-0" podUID="3dae848a-4c75-4ddc-9b6d-b64f127eba9b" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.250:5672: connect: connection refused" Oct 10 17:50:26 crc kubenswrapper[4799]: I1010 17:50:26.929828 4799 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/rabbitmq-cell1-server-0" podUID="33099395-9065-4b94-95bb-70154f26962e" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.251:5672: connect: connection refused" Oct 10 17:50:31 crc kubenswrapper[4799]: I1010 17:50:31.878030 4799 generic.go:334] "Generic (PLEG): container finished" podID="3dae848a-4c75-4ddc-9b6d-b64f127eba9b" containerID="baac5bdc9656b934ccf0dc5984326ede93cf1516e6e05133893103c8e9300f4e" exitCode=0 Oct 10 17:50:31 crc kubenswrapper[4799]: I1010 17:50:31.878100 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"3dae848a-4c75-4ddc-9b6d-b64f127eba9b","Type":"ContainerDied","Data":"baac5bdc9656b934ccf0dc5984326ede93cf1516e6e05133893103c8e9300f4e"} Oct 10 17:50:32 crc kubenswrapper[4799]: I1010 17:50:32.334442 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Oct 10 17:50:32 crc kubenswrapper[4799]: I1010 17:50:32.390848 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zff5c\" (UniqueName: \"kubernetes.io/projected/3dae848a-4c75-4ddc-9b6d-b64f127eba9b-kube-api-access-zff5c\") pod \"3dae848a-4c75-4ddc-9b6d-b64f127eba9b\" (UID: \"3dae848a-4c75-4ddc-9b6d-b64f127eba9b\") " Oct 10 17:50:32 crc kubenswrapper[4799]: I1010 17:50:32.390946 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/3dae848a-4c75-4ddc-9b6d-b64f127eba9b-rabbitmq-erlang-cookie\") pod \"3dae848a-4c75-4ddc-9b6d-b64f127eba9b\" (UID: \"3dae848a-4c75-4ddc-9b6d-b64f127eba9b\") " Oct 10 17:50:32 crc kubenswrapper[4799]: I1010 17:50:32.391034 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/3dae848a-4c75-4ddc-9b6d-b64f127eba9b-erlang-cookie-secret\") pod \"3dae848a-4c75-4ddc-9b6d-b64f127eba9b\" (UID: \"3dae848a-4c75-4ddc-9b6d-b64f127eba9b\") " Oct 10 17:50:32 crc kubenswrapper[4799]: I1010 17:50:32.391085 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/3dae848a-4c75-4ddc-9b6d-b64f127eba9b-pod-info\") pod \"3dae848a-4c75-4ddc-9b6d-b64f127eba9b\" (UID: \"3dae848a-4c75-4ddc-9b6d-b64f127eba9b\") " Oct 10 17:50:32 crc kubenswrapper[4799]: I1010 17:50:32.391126 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/3dae848a-4c75-4ddc-9b6d-b64f127eba9b-rabbitmq-plugins\") pod \"3dae848a-4c75-4ddc-9b6d-b64f127eba9b\" (UID: \"3dae848a-4c75-4ddc-9b6d-b64f127eba9b\") " Oct 10 17:50:32 crc kubenswrapper[4799]: I1010 17:50:32.391166 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/3dae848a-4c75-4ddc-9b6d-b64f127eba9b-rabbitmq-confd\") pod \"3dae848a-4c75-4ddc-9b6d-b64f127eba9b\" (UID: \"3dae848a-4c75-4ddc-9b6d-b64f127eba9b\") " Oct 10 17:50:32 crc kubenswrapper[4799]: I1010 17:50:32.391329 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-4a167392-8227-452d-8172-17e1bbf510ef\") pod \"3dae848a-4c75-4ddc-9b6d-b64f127eba9b\" (UID: \"3dae848a-4c75-4ddc-9b6d-b64f127eba9b\") " Oct 10 17:50:32 crc kubenswrapper[4799]: I1010 17:50:32.391435 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/3dae848a-4c75-4ddc-9b6d-b64f127eba9b-server-conf\") pod \"3dae848a-4c75-4ddc-9b6d-b64f127eba9b\" (UID: \"3dae848a-4c75-4ddc-9b6d-b64f127eba9b\") " Oct 10 17:50:32 crc kubenswrapper[4799]: I1010 17:50:32.391485 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/3dae848a-4c75-4ddc-9b6d-b64f127eba9b-plugins-conf\") pod \"3dae848a-4c75-4ddc-9b6d-b64f127eba9b\" (UID: \"3dae848a-4c75-4ddc-9b6d-b64f127eba9b\") " Oct 10 17:50:32 crc kubenswrapper[4799]: I1010 17:50:32.392033 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3dae848a-4c75-4ddc-9b6d-b64f127eba9b-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "3dae848a-4c75-4ddc-9b6d-b64f127eba9b" (UID: "3dae848a-4c75-4ddc-9b6d-b64f127eba9b"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 10 17:50:32 crc kubenswrapper[4799]: I1010 17:50:32.392385 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3dae848a-4c75-4ddc-9b6d-b64f127eba9b-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "3dae848a-4c75-4ddc-9b6d-b64f127eba9b" (UID: "3dae848a-4c75-4ddc-9b6d-b64f127eba9b"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 10 17:50:32 crc kubenswrapper[4799]: I1010 17:50:32.392810 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3dae848a-4c75-4ddc-9b6d-b64f127eba9b-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "3dae848a-4c75-4ddc-9b6d-b64f127eba9b" (UID: "3dae848a-4c75-4ddc-9b6d-b64f127eba9b"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 17:50:32 crc kubenswrapper[4799]: I1010 17:50:32.399115 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3dae848a-4c75-4ddc-9b6d-b64f127eba9b-kube-api-access-zff5c" (OuterVolumeSpecName: "kube-api-access-zff5c") pod "3dae848a-4c75-4ddc-9b6d-b64f127eba9b" (UID: "3dae848a-4c75-4ddc-9b6d-b64f127eba9b"). InnerVolumeSpecName "kube-api-access-zff5c". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 17:50:32 crc kubenswrapper[4799]: I1010 17:50:32.401577 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/3dae848a-4c75-4ddc-9b6d-b64f127eba9b-pod-info" (OuterVolumeSpecName: "pod-info") pod "3dae848a-4c75-4ddc-9b6d-b64f127eba9b" (UID: "3dae848a-4c75-4ddc-9b6d-b64f127eba9b"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Oct 10 17:50:32 crc kubenswrapper[4799]: I1010 17:50:32.406662 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3dae848a-4c75-4ddc-9b6d-b64f127eba9b-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "3dae848a-4c75-4ddc-9b6d-b64f127eba9b" (UID: "3dae848a-4c75-4ddc-9b6d-b64f127eba9b"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 17:50:32 crc kubenswrapper[4799]: I1010 17:50:32.408834 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-4a167392-8227-452d-8172-17e1bbf510ef" (OuterVolumeSpecName: "persistence") pod "3dae848a-4c75-4ddc-9b6d-b64f127eba9b" (UID: "3dae848a-4c75-4ddc-9b6d-b64f127eba9b"). InnerVolumeSpecName "pvc-4a167392-8227-452d-8172-17e1bbf510ef". PluginName "kubernetes.io/csi", VolumeGidValue "" Oct 10 17:50:32 crc kubenswrapper[4799]: I1010 17:50:32.436370 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3dae848a-4c75-4ddc-9b6d-b64f127eba9b-server-conf" (OuterVolumeSpecName: "server-conf") pod "3dae848a-4c75-4ddc-9b6d-b64f127eba9b" (UID: "3dae848a-4c75-4ddc-9b6d-b64f127eba9b"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 17:50:32 crc kubenswrapper[4799]: I1010 17:50:32.493209 4799 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/3dae848a-4c75-4ddc-9b6d-b64f127eba9b-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Oct 10 17:50:32 crc kubenswrapper[4799]: I1010 17:50:32.493240 4799 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/3dae848a-4c75-4ddc-9b6d-b64f127eba9b-pod-info\") on node \"crc\" DevicePath \"\"" Oct 10 17:50:32 crc kubenswrapper[4799]: I1010 17:50:32.493248 4799 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/3dae848a-4c75-4ddc-9b6d-b64f127eba9b-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Oct 10 17:50:32 crc kubenswrapper[4799]: I1010 17:50:32.493273 4799 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"pvc-4a167392-8227-452d-8172-17e1bbf510ef\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-4a167392-8227-452d-8172-17e1bbf510ef\") on node \"crc\" " Oct 10 17:50:32 crc kubenswrapper[4799]: I1010 17:50:32.493285 4799 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/3dae848a-4c75-4ddc-9b6d-b64f127eba9b-server-conf\") on node \"crc\" DevicePath \"\"" Oct 10 17:50:32 crc kubenswrapper[4799]: I1010 17:50:32.493294 4799 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/3dae848a-4c75-4ddc-9b6d-b64f127eba9b-plugins-conf\") on node \"crc\" DevicePath \"\"" Oct 10 17:50:32 crc kubenswrapper[4799]: I1010 17:50:32.493304 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zff5c\" (UniqueName: \"kubernetes.io/projected/3dae848a-4c75-4ddc-9b6d-b64f127eba9b-kube-api-access-zff5c\") on node \"crc\" DevicePath \"\"" Oct 10 17:50:32 crc kubenswrapper[4799]: I1010 17:50:32.493313 4799 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/3dae848a-4c75-4ddc-9b6d-b64f127eba9b-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Oct 10 17:50:32 crc kubenswrapper[4799]: I1010 17:50:32.493403 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3dae848a-4c75-4ddc-9b6d-b64f127eba9b-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "3dae848a-4c75-4ddc-9b6d-b64f127eba9b" (UID: "3dae848a-4c75-4ddc-9b6d-b64f127eba9b"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 17:50:32 crc kubenswrapper[4799]: I1010 17:50:32.516246 4799 csi_attacher.go:630] kubernetes.io/csi: attacher.UnmountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping UnmountDevice... Oct 10 17:50:32 crc kubenswrapper[4799]: I1010 17:50:32.516617 4799 operation_generator.go:917] UnmountDevice succeeded for volume "pvc-4a167392-8227-452d-8172-17e1bbf510ef" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-4a167392-8227-452d-8172-17e1bbf510ef") on node "crc" Oct 10 17:50:32 crc kubenswrapper[4799]: I1010 17:50:32.595225 4799 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/3dae848a-4c75-4ddc-9b6d-b64f127eba9b-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Oct 10 17:50:32 crc kubenswrapper[4799]: I1010 17:50:32.595260 4799 reconciler_common.go:293] "Volume detached for volume \"pvc-4a167392-8227-452d-8172-17e1bbf510ef\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-4a167392-8227-452d-8172-17e1bbf510ef\") on node \"crc\" DevicePath \"\"" Oct 10 17:50:32 crc kubenswrapper[4799]: I1010 17:50:32.752039 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-5fdc957c47-h7qx2" Oct 10 17:50:32 crc kubenswrapper[4799]: I1010 17:50:32.860882 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-67d9f7fb89-s2cmk"] Oct 10 17:50:32 crc kubenswrapper[4799]: I1010 17:50:32.861240 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-67d9f7fb89-s2cmk" podUID="922d8c7c-9feb-408b-a0df-fec585601827" containerName="dnsmasq-dns" containerID="cri-o://5fd705535286f953b024a58d0cef26bea66822226c41df2c3a877a67d9326012" gracePeriod=10 Oct 10 17:50:32 crc kubenswrapper[4799]: I1010 17:50:32.913810 4799 generic.go:334] "Generic (PLEG): container finished" podID="33099395-9065-4b94-95bb-70154f26962e" containerID="47c035f4b09c4c01a9e7b93c71f9700b3cb6094e1c2c144e370d5701971f3b8c" exitCode=0 Oct 10 17:50:32 crc kubenswrapper[4799]: I1010 17:50:32.913868 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"33099395-9065-4b94-95bb-70154f26962e","Type":"ContainerDied","Data":"47c035f4b09c4c01a9e7b93c71f9700b3cb6094e1c2c144e370d5701971f3b8c"} Oct 10 17:50:32 crc kubenswrapper[4799]: I1010 17:50:32.920129 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"3dae848a-4c75-4ddc-9b6d-b64f127eba9b","Type":"ContainerDied","Data":"ae9c26f40a4dc4424714797620b312bbe7dd78cfe023882101e1f8da35e992ed"} Oct 10 17:50:32 crc kubenswrapper[4799]: I1010 17:50:32.920189 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Oct 10 17:50:32 crc kubenswrapper[4799]: I1010 17:50:32.920204 4799 scope.go:117] "RemoveContainer" containerID="baac5bdc9656b934ccf0dc5984326ede93cf1516e6e05133893103c8e9300f4e" Oct 10 17:50:33 crc kubenswrapper[4799]: I1010 17:50:33.038740 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Oct 10 17:50:33 crc kubenswrapper[4799]: I1010 17:50:33.042421 4799 scope.go:117] "RemoveContainer" containerID="fc360011e7da50fd6ed79378a4e8033991094f00628c1ba5c6685682528e6aa5" Oct 10 17:50:33 crc kubenswrapper[4799]: I1010 17:50:33.050411 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-server-0"] Oct 10 17:50:33 crc kubenswrapper[4799]: I1010 17:50:33.058582 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/rabbitmq-server-0"] Oct 10 17:50:33 crc kubenswrapper[4799]: I1010 17:50:33.075787 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-server-0"] Oct 10 17:50:33 crc kubenswrapper[4799]: E1010 17:50:33.076165 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3dae848a-4c75-4ddc-9b6d-b64f127eba9b" containerName="setup-container" Oct 10 17:50:33 crc kubenswrapper[4799]: I1010 17:50:33.076185 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="3dae848a-4c75-4ddc-9b6d-b64f127eba9b" containerName="setup-container" Oct 10 17:50:33 crc kubenswrapper[4799]: E1010 17:50:33.076199 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="33099395-9065-4b94-95bb-70154f26962e" containerName="setup-container" Oct 10 17:50:33 crc kubenswrapper[4799]: I1010 17:50:33.076208 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="33099395-9065-4b94-95bb-70154f26962e" containerName="setup-container" Oct 10 17:50:33 crc kubenswrapper[4799]: E1010 17:50:33.076226 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3dae848a-4c75-4ddc-9b6d-b64f127eba9b" containerName="rabbitmq" Oct 10 17:50:33 crc kubenswrapper[4799]: I1010 17:50:33.076235 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="3dae848a-4c75-4ddc-9b6d-b64f127eba9b" containerName="rabbitmq" Oct 10 17:50:33 crc kubenswrapper[4799]: E1010 17:50:33.076259 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="33099395-9065-4b94-95bb-70154f26962e" containerName="rabbitmq" Oct 10 17:50:33 crc kubenswrapper[4799]: I1010 17:50:33.076266 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="33099395-9065-4b94-95bb-70154f26962e" containerName="rabbitmq" Oct 10 17:50:33 crc kubenswrapper[4799]: I1010 17:50:33.076452 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="3dae848a-4c75-4ddc-9b6d-b64f127eba9b" containerName="rabbitmq" Oct 10 17:50:33 crc kubenswrapper[4799]: I1010 17:50:33.076466 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="33099395-9065-4b94-95bb-70154f26962e" containerName="rabbitmq" Oct 10 17:50:33 crc kubenswrapper[4799]: I1010 17:50:33.077514 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Oct 10 17:50:33 crc kubenswrapper[4799]: I1010 17:50:33.083501 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-default-user" Oct 10 17:50:33 crc kubenswrapper[4799]: I1010 17:50:33.083704 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-server-dockercfg-wr9qh" Oct 10 17:50:33 crc kubenswrapper[4799]: I1010 17:50:33.084778 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-erlang-cookie" Oct 10 17:50:33 crc kubenswrapper[4799]: I1010 17:50:33.084997 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-server-conf" Oct 10 17:50:33 crc kubenswrapper[4799]: I1010 17:50:33.085126 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-plugins-conf" Oct 10 17:50:33 crc kubenswrapper[4799]: I1010 17:50:33.112703 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Oct 10 17:50:33 crc kubenswrapper[4799]: I1010 17:50:33.209411 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/33099395-9065-4b94-95bb-70154f26962e-rabbitmq-erlang-cookie\") pod \"33099395-9065-4b94-95bb-70154f26962e\" (UID: \"33099395-9065-4b94-95bb-70154f26962e\") " Oct 10 17:50:33 crc kubenswrapper[4799]: I1010 17:50:33.209656 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/33099395-9065-4b94-95bb-70154f26962e-erlang-cookie-secret\") pod \"33099395-9065-4b94-95bb-70154f26962e\" (UID: \"33099395-9065-4b94-95bb-70154f26962e\") " Oct 10 17:50:33 crc kubenswrapper[4799]: I1010 17:50:33.209717 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/33099395-9065-4b94-95bb-70154f26962e-server-conf\") pod \"33099395-9065-4b94-95bb-70154f26962e\" (UID: \"33099395-9065-4b94-95bb-70154f26962e\") " Oct 10 17:50:33 crc kubenswrapper[4799]: I1010 17:50:33.209747 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/33099395-9065-4b94-95bb-70154f26962e-plugins-conf\") pod \"33099395-9065-4b94-95bb-70154f26962e\" (UID: \"33099395-9065-4b94-95bb-70154f26962e\") " Oct 10 17:50:33 crc kubenswrapper[4799]: I1010 17:50:33.209786 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/33099395-9065-4b94-95bb-70154f26962e-rabbitmq-confd\") pod \"33099395-9065-4b94-95bb-70154f26962e\" (UID: \"33099395-9065-4b94-95bb-70154f26962e\") " Oct 10 17:50:33 crc kubenswrapper[4799]: I1010 17:50:33.209821 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/33099395-9065-4b94-95bb-70154f26962e-pod-info\") pod \"33099395-9065-4b94-95bb-70154f26962e\" (UID: \"33099395-9065-4b94-95bb-70154f26962e\") " Oct 10 17:50:33 crc kubenswrapper[4799]: I1010 17:50:33.209849 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/33099395-9065-4b94-95bb-70154f26962e-rabbitmq-plugins\") pod \"33099395-9065-4b94-95bb-70154f26962e\" (UID: \"33099395-9065-4b94-95bb-70154f26962e\") " Oct 10 17:50:33 crc kubenswrapper[4799]: I1010 17:50:33.210007 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-c54403e4-df22-42f7-8ce9-f2b71f958c3e\") pod \"33099395-9065-4b94-95bb-70154f26962e\" (UID: \"33099395-9065-4b94-95bb-70154f26962e\") " Oct 10 17:50:33 crc kubenswrapper[4799]: I1010 17:50:33.210037 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4c65n\" (UniqueName: \"kubernetes.io/projected/33099395-9065-4b94-95bb-70154f26962e-kube-api-access-4c65n\") pod \"33099395-9065-4b94-95bb-70154f26962e\" (UID: \"33099395-9065-4b94-95bb-70154f26962e\") " Oct 10 17:50:33 crc kubenswrapper[4799]: I1010 17:50:33.210231 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/38e633f5-1f40-40b7-979b-4c34ec12dcf4-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"38e633f5-1f40-40b7-979b-4c34ec12dcf4\") " pod="openstack/rabbitmq-server-0" Oct 10 17:50:33 crc kubenswrapper[4799]: I1010 17:50:33.210269 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/38e633f5-1f40-40b7-979b-4c34ec12dcf4-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"38e633f5-1f40-40b7-979b-4c34ec12dcf4\") " pod="openstack/rabbitmq-server-0" Oct 10 17:50:33 crc kubenswrapper[4799]: I1010 17:50:33.210293 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/38e633f5-1f40-40b7-979b-4c34ec12dcf4-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"38e633f5-1f40-40b7-979b-4c34ec12dcf4\") " pod="openstack/rabbitmq-server-0" Oct 10 17:50:33 crc kubenswrapper[4799]: I1010 17:50:33.210338 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/38e633f5-1f40-40b7-979b-4c34ec12dcf4-server-conf\") pod \"rabbitmq-server-0\" (UID: \"38e633f5-1f40-40b7-979b-4c34ec12dcf4\") " pod="openstack/rabbitmq-server-0" Oct 10 17:50:33 crc kubenswrapper[4799]: I1010 17:50:33.210357 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/38e633f5-1f40-40b7-979b-4c34ec12dcf4-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"38e633f5-1f40-40b7-979b-4c34ec12dcf4\") " pod="openstack/rabbitmq-server-0" Oct 10 17:50:33 crc kubenswrapper[4799]: I1010 17:50:33.210371 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/38e633f5-1f40-40b7-979b-4c34ec12dcf4-pod-info\") pod \"rabbitmq-server-0\" (UID: \"38e633f5-1f40-40b7-979b-4c34ec12dcf4\") " pod="openstack/rabbitmq-server-0" Oct 10 17:50:33 crc kubenswrapper[4799]: I1010 17:50:33.210388 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/38e633f5-1f40-40b7-979b-4c34ec12dcf4-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"38e633f5-1f40-40b7-979b-4c34ec12dcf4\") " pod="openstack/rabbitmq-server-0" Oct 10 17:50:33 crc kubenswrapper[4799]: I1010 17:50:33.210420 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-4a167392-8227-452d-8172-17e1bbf510ef\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-4a167392-8227-452d-8172-17e1bbf510ef\") pod \"rabbitmq-server-0\" (UID: \"38e633f5-1f40-40b7-979b-4c34ec12dcf4\") " pod="openstack/rabbitmq-server-0" Oct 10 17:50:33 crc kubenswrapper[4799]: I1010 17:50:33.210436 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6z5hx\" (UniqueName: \"kubernetes.io/projected/38e633f5-1f40-40b7-979b-4c34ec12dcf4-kube-api-access-6z5hx\") pod \"rabbitmq-server-0\" (UID: \"38e633f5-1f40-40b7-979b-4c34ec12dcf4\") " pod="openstack/rabbitmq-server-0" Oct 10 17:50:33 crc kubenswrapper[4799]: I1010 17:50:33.210488 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/33099395-9065-4b94-95bb-70154f26962e-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "33099395-9065-4b94-95bb-70154f26962e" (UID: "33099395-9065-4b94-95bb-70154f26962e"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 17:50:33 crc kubenswrapper[4799]: I1010 17:50:33.210730 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/33099395-9065-4b94-95bb-70154f26962e-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "33099395-9065-4b94-95bb-70154f26962e" (UID: "33099395-9065-4b94-95bb-70154f26962e"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 10 17:50:33 crc kubenswrapper[4799]: I1010 17:50:33.211193 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/33099395-9065-4b94-95bb-70154f26962e-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "33099395-9065-4b94-95bb-70154f26962e" (UID: "33099395-9065-4b94-95bb-70154f26962e"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 10 17:50:33 crc kubenswrapper[4799]: I1010 17:50:33.215449 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/33099395-9065-4b94-95bb-70154f26962e-pod-info" (OuterVolumeSpecName: "pod-info") pod "33099395-9065-4b94-95bb-70154f26962e" (UID: "33099395-9065-4b94-95bb-70154f26962e"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Oct 10 17:50:33 crc kubenswrapper[4799]: I1010 17:50:33.215506 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/33099395-9065-4b94-95bb-70154f26962e-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "33099395-9065-4b94-95bb-70154f26962e" (UID: "33099395-9065-4b94-95bb-70154f26962e"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 17:50:33 crc kubenswrapper[4799]: I1010 17:50:33.220979 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/33099395-9065-4b94-95bb-70154f26962e-kube-api-access-4c65n" (OuterVolumeSpecName: "kube-api-access-4c65n") pod "33099395-9065-4b94-95bb-70154f26962e" (UID: "33099395-9065-4b94-95bb-70154f26962e"). InnerVolumeSpecName "kube-api-access-4c65n". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 17:50:33 crc kubenswrapper[4799]: I1010 17:50:33.232611 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-c54403e4-df22-42f7-8ce9-f2b71f958c3e" (OuterVolumeSpecName: "persistence") pod "33099395-9065-4b94-95bb-70154f26962e" (UID: "33099395-9065-4b94-95bb-70154f26962e"). InnerVolumeSpecName "pvc-c54403e4-df22-42f7-8ce9-f2b71f958c3e". PluginName "kubernetes.io/csi", VolumeGidValue "" Oct 10 17:50:33 crc kubenswrapper[4799]: I1010 17:50:33.235308 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/33099395-9065-4b94-95bb-70154f26962e-server-conf" (OuterVolumeSpecName: "server-conf") pod "33099395-9065-4b94-95bb-70154f26962e" (UID: "33099395-9065-4b94-95bb-70154f26962e"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 17:50:33 crc kubenswrapper[4799]: I1010 17:50:33.249129 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-67d9f7fb89-s2cmk" Oct 10 17:50:33 crc kubenswrapper[4799]: I1010 17:50:33.302902 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/33099395-9065-4b94-95bb-70154f26962e-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "33099395-9065-4b94-95bb-70154f26962e" (UID: "33099395-9065-4b94-95bb-70154f26962e"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 17:50:33 crc kubenswrapper[4799]: I1010 17:50:33.311616 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/38e633f5-1f40-40b7-979b-4c34ec12dcf4-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"38e633f5-1f40-40b7-979b-4c34ec12dcf4\") " pod="openstack/rabbitmq-server-0" Oct 10 17:50:33 crc kubenswrapper[4799]: I1010 17:50:33.311693 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/38e633f5-1f40-40b7-979b-4c34ec12dcf4-server-conf\") pod \"rabbitmq-server-0\" (UID: \"38e633f5-1f40-40b7-979b-4c34ec12dcf4\") " pod="openstack/rabbitmq-server-0" Oct 10 17:50:33 crc kubenswrapper[4799]: I1010 17:50:33.311716 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/38e633f5-1f40-40b7-979b-4c34ec12dcf4-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"38e633f5-1f40-40b7-979b-4c34ec12dcf4\") " pod="openstack/rabbitmq-server-0" Oct 10 17:50:33 crc kubenswrapper[4799]: I1010 17:50:33.311730 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/38e633f5-1f40-40b7-979b-4c34ec12dcf4-pod-info\") pod \"rabbitmq-server-0\" (UID: \"38e633f5-1f40-40b7-979b-4c34ec12dcf4\") " pod="openstack/rabbitmq-server-0" Oct 10 17:50:33 crc kubenswrapper[4799]: I1010 17:50:33.311749 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/38e633f5-1f40-40b7-979b-4c34ec12dcf4-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"38e633f5-1f40-40b7-979b-4c34ec12dcf4\") " pod="openstack/rabbitmq-server-0" Oct 10 17:50:33 crc kubenswrapper[4799]: I1010 17:50:33.311796 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-4a167392-8227-452d-8172-17e1bbf510ef\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-4a167392-8227-452d-8172-17e1bbf510ef\") pod \"rabbitmq-server-0\" (UID: \"38e633f5-1f40-40b7-979b-4c34ec12dcf4\") " pod="openstack/rabbitmq-server-0" Oct 10 17:50:33 crc kubenswrapper[4799]: I1010 17:50:33.311817 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6z5hx\" (UniqueName: \"kubernetes.io/projected/38e633f5-1f40-40b7-979b-4c34ec12dcf4-kube-api-access-6z5hx\") pod \"rabbitmq-server-0\" (UID: \"38e633f5-1f40-40b7-979b-4c34ec12dcf4\") " pod="openstack/rabbitmq-server-0" Oct 10 17:50:33 crc kubenswrapper[4799]: I1010 17:50:33.311846 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/38e633f5-1f40-40b7-979b-4c34ec12dcf4-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"38e633f5-1f40-40b7-979b-4c34ec12dcf4\") " pod="openstack/rabbitmq-server-0" Oct 10 17:50:33 crc kubenswrapper[4799]: I1010 17:50:33.311874 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/38e633f5-1f40-40b7-979b-4c34ec12dcf4-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"38e633f5-1f40-40b7-979b-4c34ec12dcf4\") " pod="openstack/rabbitmq-server-0" Oct 10 17:50:33 crc kubenswrapper[4799]: I1010 17:50:33.311918 4799 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/33099395-9065-4b94-95bb-70154f26962e-server-conf\") on node \"crc\" DevicePath \"\"" Oct 10 17:50:33 crc kubenswrapper[4799]: I1010 17:50:33.311928 4799 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/33099395-9065-4b94-95bb-70154f26962e-plugins-conf\") on node \"crc\" DevicePath \"\"" Oct 10 17:50:33 crc kubenswrapper[4799]: I1010 17:50:33.311937 4799 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/33099395-9065-4b94-95bb-70154f26962e-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Oct 10 17:50:33 crc kubenswrapper[4799]: I1010 17:50:33.311945 4799 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/33099395-9065-4b94-95bb-70154f26962e-pod-info\") on node \"crc\" DevicePath \"\"" Oct 10 17:50:33 crc kubenswrapper[4799]: I1010 17:50:33.311953 4799 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/33099395-9065-4b94-95bb-70154f26962e-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Oct 10 17:50:33 crc kubenswrapper[4799]: I1010 17:50:33.311976 4799 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"pvc-c54403e4-df22-42f7-8ce9-f2b71f958c3e\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-c54403e4-df22-42f7-8ce9-f2b71f958c3e\") on node \"crc\" " Oct 10 17:50:33 crc kubenswrapper[4799]: I1010 17:50:33.311987 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4c65n\" (UniqueName: \"kubernetes.io/projected/33099395-9065-4b94-95bb-70154f26962e-kube-api-access-4c65n\") on node \"crc\" DevicePath \"\"" Oct 10 17:50:33 crc kubenswrapper[4799]: I1010 17:50:33.311996 4799 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/33099395-9065-4b94-95bb-70154f26962e-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Oct 10 17:50:33 crc kubenswrapper[4799]: I1010 17:50:33.312005 4799 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/33099395-9065-4b94-95bb-70154f26962e-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Oct 10 17:50:33 crc kubenswrapper[4799]: I1010 17:50:33.312742 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/38e633f5-1f40-40b7-979b-4c34ec12dcf4-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"38e633f5-1f40-40b7-979b-4c34ec12dcf4\") " pod="openstack/rabbitmq-server-0" Oct 10 17:50:33 crc kubenswrapper[4799]: I1010 17:50:33.314292 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/38e633f5-1f40-40b7-979b-4c34ec12dcf4-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"38e633f5-1f40-40b7-979b-4c34ec12dcf4\") " pod="openstack/rabbitmq-server-0" Oct 10 17:50:33 crc kubenswrapper[4799]: I1010 17:50:33.314383 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/38e633f5-1f40-40b7-979b-4c34ec12dcf4-server-conf\") pod \"rabbitmq-server-0\" (UID: \"38e633f5-1f40-40b7-979b-4c34ec12dcf4\") " pod="openstack/rabbitmq-server-0" Oct 10 17:50:33 crc kubenswrapper[4799]: I1010 17:50:33.314816 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/38e633f5-1f40-40b7-979b-4c34ec12dcf4-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"38e633f5-1f40-40b7-979b-4c34ec12dcf4\") " pod="openstack/rabbitmq-server-0" Oct 10 17:50:33 crc kubenswrapper[4799]: I1010 17:50:33.316808 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/38e633f5-1f40-40b7-979b-4c34ec12dcf4-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"38e633f5-1f40-40b7-979b-4c34ec12dcf4\") " pod="openstack/rabbitmq-server-0" Oct 10 17:50:33 crc kubenswrapper[4799]: I1010 17:50:33.317085 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/38e633f5-1f40-40b7-979b-4c34ec12dcf4-pod-info\") pod \"rabbitmq-server-0\" (UID: \"38e633f5-1f40-40b7-979b-4c34ec12dcf4\") " pod="openstack/rabbitmq-server-0" Oct 10 17:50:33 crc kubenswrapper[4799]: I1010 17:50:33.318385 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/38e633f5-1f40-40b7-979b-4c34ec12dcf4-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"38e633f5-1f40-40b7-979b-4c34ec12dcf4\") " pod="openstack/rabbitmq-server-0" Oct 10 17:50:33 crc kubenswrapper[4799]: I1010 17:50:33.319466 4799 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Oct 10 17:50:33 crc kubenswrapper[4799]: I1010 17:50:33.319506 4799 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-4a167392-8227-452d-8172-17e1bbf510ef\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-4a167392-8227-452d-8172-17e1bbf510ef\") pod \"rabbitmq-server-0\" (UID: \"38e633f5-1f40-40b7-979b-4c34ec12dcf4\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/82558c7cfd982b1a59463f8c8923da7fd95b3bd579c05021a4870e1ffc5887b8/globalmount\"" pod="openstack/rabbitmq-server-0" Oct 10 17:50:33 crc kubenswrapper[4799]: I1010 17:50:33.334142 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6z5hx\" (UniqueName: \"kubernetes.io/projected/38e633f5-1f40-40b7-979b-4c34ec12dcf4-kube-api-access-6z5hx\") pod \"rabbitmq-server-0\" (UID: \"38e633f5-1f40-40b7-979b-4c34ec12dcf4\") " pod="openstack/rabbitmq-server-0" Oct 10 17:50:33 crc kubenswrapper[4799]: I1010 17:50:33.336519 4799 csi_attacher.go:630] kubernetes.io/csi: attacher.UnmountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping UnmountDevice... Oct 10 17:50:33 crc kubenswrapper[4799]: I1010 17:50:33.336638 4799 operation_generator.go:917] UnmountDevice succeeded for volume "pvc-c54403e4-df22-42f7-8ce9-f2b71f958c3e" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-c54403e4-df22-42f7-8ce9-f2b71f958c3e") on node "crc" Oct 10 17:50:33 crc kubenswrapper[4799]: I1010 17:50:33.354919 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-4a167392-8227-452d-8172-17e1bbf510ef\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-4a167392-8227-452d-8172-17e1bbf510ef\") pod \"rabbitmq-server-0\" (UID: \"38e633f5-1f40-40b7-979b-4c34ec12dcf4\") " pod="openstack/rabbitmq-server-0" Oct 10 17:50:33 crc kubenswrapper[4799]: I1010 17:50:33.412270 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3dae848a-4c75-4ddc-9b6d-b64f127eba9b" path="/var/lib/kubelet/pods/3dae848a-4c75-4ddc-9b6d-b64f127eba9b/volumes" Oct 10 17:50:33 crc kubenswrapper[4799]: I1010 17:50:33.412734 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/922d8c7c-9feb-408b-a0df-fec585601827-config\") pod \"922d8c7c-9feb-408b-a0df-fec585601827\" (UID: \"922d8c7c-9feb-408b-a0df-fec585601827\") " Oct 10 17:50:33 crc kubenswrapper[4799]: I1010 17:50:33.412810 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-brjtp\" (UniqueName: \"kubernetes.io/projected/922d8c7c-9feb-408b-a0df-fec585601827-kube-api-access-brjtp\") pod \"922d8c7c-9feb-408b-a0df-fec585601827\" (UID: \"922d8c7c-9feb-408b-a0df-fec585601827\") " Oct 10 17:50:33 crc kubenswrapper[4799]: I1010 17:50:33.412884 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/922d8c7c-9feb-408b-a0df-fec585601827-dns-svc\") pod \"922d8c7c-9feb-408b-a0df-fec585601827\" (UID: \"922d8c7c-9feb-408b-a0df-fec585601827\") " Oct 10 17:50:33 crc kubenswrapper[4799]: I1010 17:50:33.413438 4799 reconciler_common.go:293] "Volume detached for volume \"pvc-c54403e4-df22-42f7-8ce9-f2b71f958c3e\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-c54403e4-df22-42f7-8ce9-f2b71f958c3e\") on node \"crc\" DevicePath \"\"" Oct 10 17:50:33 crc kubenswrapper[4799]: I1010 17:50:33.416891 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/922d8c7c-9feb-408b-a0df-fec585601827-kube-api-access-brjtp" (OuterVolumeSpecName: "kube-api-access-brjtp") pod "922d8c7c-9feb-408b-a0df-fec585601827" (UID: "922d8c7c-9feb-408b-a0df-fec585601827"). InnerVolumeSpecName "kube-api-access-brjtp". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 17:50:33 crc kubenswrapper[4799]: I1010 17:50:33.439273 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Oct 10 17:50:33 crc kubenswrapper[4799]: I1010 17:50:33.443928 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/922d8c7c-9feb-408b-a0df-fec585601827-config" (OuterVolumeSpecName: "config") pod "922d8c7c-9feb-408b-a0df-fec585601827" (UID: "922d8c7c-9feb-408b-a0df-fec585601827"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 17:50:33 crc kubenswrapper[4799]: I1010 17:50:33.450664 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/922d8c7c-9feb-408b-a0df-fec585601827-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "922d8c7c-9feb-408b-a0df-fec585601827" (UID: "922d8c7c-9feb-408b-a0df-fec585601827"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 17:50:33 crc kubenswrapper[4799]: I1010 17:50:33.522349 4799 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/922d8c7c-9feb-408b-a0df-fec585601827-config\") on node \"crc\" DevicePath \"\"" Oct 10 17:50:33 crc kubenswrapper[4799]: I1010 17:50:33.522435 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-brjtp\" (UniqueName: \"kubernetes.io/projected/922d8c7c-9feb-408b-a0df-fec585601827-kube-api-access-brjtp\") on node \"crc\" DevicePath \"\"" Oct 10 17:50:33 crc kubenswrapper[4799]: I1010 17:50:33.522466 4799 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/922d8c7c-9feb-408b-a0df-fec585601827-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 10 17:50:33 crc kubenswrapper[4799]: I1010 17:50:33.887150 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Oct 10 17:50:33 crc kubenswrapper[4799]: I1010 17:50:33.933656 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"38e633f5-1f40-40b7-979b-4c34ec12dcf4","Type":"ContainerStarted","Data":"1e0a1c04024742157cbfea42bb7bcdb8ff44aa86c8203156205243e7c45bde34"} Oct 10 17:50:33 crc kubenswrapper[4799]: I1010 17:50:33.937394 4799 generic.go:334] "Generic (PLEG): container finished" podID="922d8c7c-9feb-408b-a0df-fec585601827" containerID="5fd705535286f953b024a58d0cef26bea66822226c41df2c3a877a67d9326012" exitCode=0 Oct 10 17:50:33 crc kubenswrapper[4799]: I1010 17:50:33.937501 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-67d9f7fb89-s2cmk" event={"ID":"922d8c7c-9feb-408b-a0df-fec585601827","Type":"ContainerDied","Data":"5fd705535286f953b024a58d0cef26bea66822226c41df2c3a877a67d9326012"} Oct 10 17:50:33 crc kubenswrapper[4799]: I1010 17:50:33.937530 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-67d9f7fb89-s2cmk" Oct 10 17:50:33 crc kubenswrapper[4799]: I1010 17:50:33.937580 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-67d9f7fb89-s2cmk" event={"ID":"922d8c7c-9feb-408b-a0df-fec585601827","Type":"ContainerDied","Data":"a218d59ac8c7586e462510f6b212938f78e17bcfcc0b8e01d439fcad7ef03821"} Oct 10 17:50:33 crc kubenswrapper[4799]: I1010 17:50:33.937637 4799 scope.go:117] "RemoveContainer" containerID="5fd705535286f953b024a58d0cef26bea66822226c41df2c3a877a67d9326012" Oct 10 17:50:33 crc kubenswrapper[4799]: I1010 17:50:33.945326 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"33099395-9065-4b94-95bb-70154f26962e","Type":"ContainerDied","Data":"109ee61cc172613d038184c41cca98b6b1c233c891d1a1ea411577b226a5a91b"} Oct 10 17:50:33 crc kubenswrapper[4799]: I1010 17:50:33.945400 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Oct 10 17:50:34 crc kubenswrapper[4799]: I1010 17:50:34.060815 4799 scope.go:117] "RemoveContainer" containerID="04cd40e92ddb78ac09bb92bf93a216f39d4afa35b3e6d4078dc30092faa68e05" Oct 10 17:50:34 crc kubenswrapper[4799]: I1010 17:50:34.115131 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-67d9f7fb89-s2cmk"] Oct 10 17:50:34 crc kubenswrapper[4799]: I1010 17:50:34.125832 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-67d9f7fb89-s2cmk"] Oct 10 17:50:34 crc kubenswrapper[4799]: I1010 17:50:34.128692 4799 scope.go:117] "RemoveContainer" containerID="5fd705535286f953b024a58d0cef26bea66822226c41df2c3a877a67d9326012" Oct 10 17:50:34 crc kubenswrapper[4799]: E1010 17:50:34.133868 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5fd705535286f953b024a58d0cef26bea66822226c41df2c3a877a67d9326012\": container with ID starting with 5fd705535286f953b024a58d0cef26bea66822226c41df2c3a877a67d9326012 not found: ID does not exist" containerID="5fd705535286f953b024a58d0cef26bea66822226c41df2c3a877a67d9326012" Oct 10 17:50:34 crc kubenswrapper[4799]: I1010 17:50:34.134023 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5fd705535286f953b024a58d0cef26bea66822226c41df2c3a877a67d9326012"} err="failed to get container status \"5fd705535286f953b024a58d0cef26bea66822226c41df2c3a877a67d9326012\": rpc error: code = NotFound desc = could not find container \"5fd705535286f953b024a58d0cef26bea66822226c41df2c3a877a67d9326012\": container with ID starting with 5fd705535286f953b024a58d0cef26bea66822226c41df2c3a877a67d9326012 not found: ID does not exist" Oct 10 17:50:34 crc kubenswrapper[4799]: I1010 17:50:34.134109 4799 scope.go:117] "RemoveContainer" containerID="04cd40e92ddb78ac09bb92bf93a216f39d4afa35b3e6d4078dc30092faa68e05" Oct 10 17:50:34 crc kubenswrapper[4799]: I1010 17:50:34.137506 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Oct 10 17:50:34 crc kubenswrapper[4799]: E1010 17:50:34.137787 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"04cd40e92ddb78ac09bb92bf93a216f39d4afa35b3e6d4078dc30092faa68e05\": container with ID starting with 04cd40e92ddb78ac09bb92bf93a216f39d4afa35b3e6d4078dc30092faa68e05 not found: ID does not exist" containerID="04cd40e92ddb78ac09bb92bf93a216f39d4afa35b3e6d4078dc30092faa68e05" Oct 10 17:50:34 crc kubenswrapper[4799]: I1010 17:50:34.137897 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"04cd40e92ddb78ac09bb92bf93a216f39d4afa35b3e6d4078dc30092faa68e05"} err="failed to get container status \"04cd40e92ddb78ac09bb92bf93a216f39d4afa35b3e6d4078dc30092faa68e05\": rpc error: code = NotFound desc = could not find container \"04cd40e92ddb78ac09bb92bf93a216f39d4afa35b3e6d4078dc30092faa68e05\": container with ID starting with 04cd40e92ddb78ac09bb92bf93a216f39d4afa35b3e6d4078dc30092faa68e05 not found: ID does not exist" Oct 10 17:50:34 crc kubenswrapper[4799]: I1010 17:50:34.137974 4799 scope.go:117] "RemoveContainer" containerID="47c035f4b09c4c01a9e7b93c71f9700b3cb6094e1c2c144e370d5701971f3b8c" Oct 10 17:50:34 crc kubenswrapper[4799]: I1010 17:50:34.144832 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Oct 10 17:50:34 crc kubenswrapper[4799]: I1010 17:50:34.174792 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Oct 10 17:50:34 crc kubenswrapper[4799]: E1010 17:50:34.175371 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="922d8c7c-9feb-408b-a0df-fec585601827" containerName="dnsmasq-dns" Oct 10 17:50:34 crc kubenswrapper[4799]: I1010 17:50:34.175392 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="922d8c7c-9feb-408b-a0df-fec585601827" containerName="dnsmasq-dns" Oct 10 17:50:34 crc kubenswrapper[4799]: E1010 17:50:34.175428 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="922d8c7c-9feb-408b-a0df-fec585601827" containerName="init" Oct 10 17:50:34 crc kubenswrapper[4799]: I1010 17:50:34.175436 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="922d8c7c-9feb-408b-a0df-fec585601827" containerName="init" Oct 10 17:50:34 crc kubenswrapper[4799]: I1010 17:50:34.175743 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="922d8c7c-9feb-408b-a0df-fec585601827" containerName="dnsmasq-dns" Oct 10 17:50:34 crc kubenswrapper[4799]: I1010 17:50:34.177036 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Oct 10 17:50:34 crc kubenswrapper[4799]: I1010 17:50:34.182164 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-default-user" Oct 10 17:50:34 crc kubenswrapper[4799]: I1010 17:50:34.182392 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-server-conf" Oct 10 17:50:34 crc kubenswrapper[4799]: I1010 17:50:34.182629 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-erlang-cookie" Oct 10 17:50:34 crc kubenswrapper[4799]: I1010 17:50:34.182782 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-server-dockercfg-fp6hb" Oct 10 17:50:34 crc kubenswrapper[4799]: I1010 17:50:34.182965 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-plugins-conf" Oct 10 17:50:34 crc kubenswrapper[4799]: I1010 17:50:34.191464 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Oct 10 17:50:34 crc kubenswrapper[4799]: I1010 17:50:34.231180 4799 scope.go:117] "RemoveContainer" containerID="121bfef76d0172879e6e5c57fcef7df33465b03df258b5acd73f122ce2086d91" Oct 10 17:50:34 crc kubenswrapper[4799]: I1010 17:50:34.340418 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/217d7ceb-fa68-4e17-bd2d-8cf07d85e871-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"217d7ceb-fa68-4e17-bd2d-8cf07d85e871\") " pod="openstack/rabbitmq-cell1-server-0" Oct 10 17:50:34 crc kubenswrapper[4799]: I1010 17:50:34.340504 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/217d7ceb-fa68-4e17-bd2d-8cf07d85e871-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"217d7ceb-fa68-4e17-bd2d-8cf07d85e871\") " pod="openstack/rabbitmq-cell1-server-0" Oct 10 17:50:34 crc kubenswrapper[4799]: I1010 17:50:34.340592 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-c54403e4-df22-42f7-8ce9-f2b71f958c3e\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-c54403e4-df22-42f7-8ce9-f2b71f958c3e\") pod \"rabbitmq-cell1-server-0\" (UID: \"217d7ceb-fa68-4e17-bd2d-8cf07d85e871\") " pod="openstack/rabbitmq-cell1-server-0" Oct 10 17:50:34 crc kubenswrapper[4799]: I1010 17:50:34.340693 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/217d7ceb-fa68-4e17-bd2d-8cf07d85e871-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"217d7ceb-fa68-4e17-bd2d-8cf07d85e871\") " pod="openstack/rabbitmq-cell1-server-0" Oct 10 17:50:34 crc kubenswrapper[4799]: I1010 17:50:34.340779 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/217d7ceb-fa68-4e17-bd2d-8cf07d85e871-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"217d7ceb-fa68-4e17-bd2d-8cf07d85e871\") " pod="openstack/rabbitmq-cell1-server-0" Oct 10 17:50:34 crc kubenswrapper[4799]: I1010 17:50:34.340922 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/217d7ceb-fa68-4e17-bd2d-8cf07d85e871-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"217d7ceb-fa68-4e17-bd2d-8cf07d85e871\") " pod="openstack/rabbitmq-cell1-server-0" Oct 10 17:50:34 crc kubenswrapper[4799]: I1010 17:50:34.341044 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/217d7ceb-fa68-4e17-bd2d-8cf07d85e871-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"217d7ceb-fa68-4e17-bd2d-8cf07d85e871\") " pod="openstack/rabbitmq-cell1-server-0" Oct 10 17:50:34 crc kubenswrapper[4799]: I1010 17:50:34.341237 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/217d7ceb-fa68-4e17-bd2d-8cf07d85e871-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"217d7ceb-fa68-4e17-bd2d-8cf07d85e871\") " pod="openstack/rabbitmq-cell1-server-0" Oct 10 17:50:34 crc kubenswrapper[4799]: I1010 17:50:34.341315 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bd7zt\" (UniqueName: \"kubernetes.io/projected/217d7ceb-fa68-4e17-bd2d-8cf07d85e871-kube-api-access-bd7zt\") pod \"rabbitmq-cell1-server-0\" (UID: \"217d7ceb-fa68-4e17-bd2d-8cf07d85e871\") " pod="openstack/rabbitmq-cell1-server-0" Oct 10 17:50:34 crc kubenswrapper[4799]: I1010 17:50:34.442891 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/217d7ceb-fa68-4e17-bd2d-8cf07d85e871-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"217d7ceb-fa68-4e17-bd2d-8cf07d85e871\") " pod="openstack/rabbitmq-cell1-server-0" Oct 10 17:50:34 crc kubenswrapper[4799]: I1010 17:50:34.443015 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/217d7ceb-fa68-4e17-bd2d-8cf07d85e871-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"217d7ceb-fa68-4e17-bd2d-8cf07d85e871\") " pod="openstack/rabbitmq-cell1-server-0" Oct 10 17:50:34 crc kubenswrapper[4799]: I1010 17:50:34.443067 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bd7zt\" (UniqueName: \"kubernetes.io/projected/217d7ceb-fa68-4e17-bd2d-8cf07d85e871-kube-api-access-bd7zt\") pod \"rabbitmq-cell1-server-0\" (UID: \"217d7ceb-fa68-4e17-bd2d-8cf07d85e871\") " pod="openstack/rabbitmq-cell1-server-0" Oct 10 17:50:34 crc kubenswrapper[4799]: I1010 17:50:34.443152 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/217d7ceb-fa68-4e17-bd2d-8cf07d85e871-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"217d7ceb-fa68-4e17-bd2d-8cf07d85e871\") " pod="openstack/rabbitmq-cell1-server-0" Oct 10 17:50:34 crc kubenswrapper[4799]: I1010 17:50:34.443185 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/217d7ceb-fa68-4e17-bd2d-8cf07d85e871-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"217d7ceb-fa68-4e17-bd2d-8cf07d85e871\") " pod="openstack/rabbitmq-cell1-server-0" Oct 10 17:50:34 crc kubenswrapper[4799]: I1010 17:50:34.443230 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-c54403e4-df22-42f7-8ce9-f2b71f958c3e\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-c54403e4-df22-42f7-8ce9-f2b71f958c3e\") pod \"rabbitmq-cell1-server-0\" (UID: \"217d7ceb-fa68-4e17-bd2d-8cf07d85e871\") " pod="openstack/rabbitmq-cell1-server-0" Oct 10 17:50:34 crc kubenswrapper[4799]: I1010 17:50:34.443315 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/217d7ceb-fa68-4e17-bd2d-8cf07d85e871-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"217d7ceb-fa68-4e17-bd2d-8cf07d85e871\") " pod="openstack/rabbitmq-cell1-server-0" Oct 10 17:50:34 crc kubenswrapper[4799]: I1010 17:50:34.443402 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/217d7ceb-fa68-4e17-bd2d-8cf07d85e871-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"217d7ceb-fa68-4e17-bd2d-8cf07d85e871\") " pod="openstack/rabbitmq-cell1-server-0" Oct 10 17:50:34 crc kubenswrapper[4799]: I1010 17:50:34.443449 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/217d7ceb-fa68-4e17-bd2d-8cf07d85e871-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"217d7ceb-fa68-4e17-bd2d-8cf07d85e871\") " pod="openstack/rabbitmq-cell1-server-0" Oct 10 17:50:34 crc kubenswrapper[4799]: I1010 17:50:34.444165 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/217d7ceb-fa68-4e17-bd2d-8cf07d85e871-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"217d7ceb-fa68-4e17-bd2d-8cf07d85e871\") " pod="openstack/rabbitmq-cell1-server-0" Oct 10 17:50:34 crc kubenswrapper[4799]: I1010 17:50:34.444447 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/217d7ceb-fa68-4e17-bd2d-8cf07d85e871-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"217d7ceb-fa68-4e17-bd2d-8cf07d85e871\") " pod="openstack/rabbitmq-cell1-server-0" Oct 10 17:50:34 crc kubenswrapper[4799]: I1010 17:50:34.445233 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/217d7ceb-fa68-4e17-bd2d-8cf07d85e871-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"217d7ceb-fa68-4e17-bd2d-8cf07d85e871\") " pod="openstack/rabbitmq-cell1-server-0" Oct 10 17:50:34 crc kubenswrapper[4799]: I1010 17:50:34.445311 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/217d7ceb-fa68-4e17-bd2d-8cf07d85e871-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"217d7ceb-fa68-4e17-bd2d-8cf07d85e871\") " pod="openstack/rabbitmq-cell1-server-0" Oct 10 17:50:34 crc kubenswrapper[4799]: I1010 17:50:34.447465 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/217d7ceb-fa68-4e17-bd2d-8cf07d85e871-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"217d7ceb-fa68-4e17-bd2d-8cf07d85e871\") " pod="openstack/rabbitmq-cell1-server-0" Oct 10 17:50:34 crc kubenswrapper[4799]: I1010 17:50:34.447823 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/217d7ceb-fa68-4e17-bd2d-8cf07d85e871-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"217d7ceb-fa68-4e17-bd2d-8cf07d85e871\") " pod="openstack/rabbitmq-cell1-server-0" Oct 10 17:50:34 crc kubenswrapper[4799]: I1010 17:50:34.449359 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/217d7ceb-fa68-4e17-bd2d-8cf07d85e871-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"217d7ceb-fa68-4e17-bd2d-8cf07d85e871\") " pod="openstack/rabbitmq-cell1-server-0" Oct 10 17:50:34 crc kubenswrapper[4799]: I1010 17:50:34.450726 4799 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Oct 10 17:50:34 crc kubenswrapper[4799]: I1010 17:50:34.450799 4799 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-c54403e4-df22-42f7-8ce9-f2b71f958c3e\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-c54403e4-df22-42f7-8ce9-f2b71f958c3e\") pod \"rabbitmq-cell1-server-0\" (UID: \"217d7ceb-fa68-4e17-bd2d-8cf07d85e871\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/b4349c2768ef6a0a004a45a21e08426036165f9270846e8d1ddb681030198662/globalmount\"" pod="openstack/rabbitmq-cell1-server-0" Oct 10 17:50:34 crc kubenswrapper[4799]: I1010 17:50:34.473137 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bd7zt\" (UniqueName: \"kubernetes.io/projected/217d7ceb-fa68-4e17-bd2d-8cf07d85e871-kube-api-access-bd7zt\") pod \"rabbitmq-cell1-server-0\" (UID: \"217d7ceb-fa68-4e17-bd2d-8cf07d85e871\") " pod="openstack/rabbitmq-cell1-server-0" Oct 10 17:50:34 crc kubenswrapper[4799]: I1010 17:50:34.489666 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-c54403e4-df22-42f7-8ce9-f2b71f958c3e\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-c54403e4-df22-42f7-8ce9-f2b71f958c3e\") pod \"rabbitmq-cell1-server-0\" (UID: \"217d7ceb-fa68-4e17-bd2d-8cf07d85e871\") " pod="openstack/rabbitmq-cell1-server-0" Oct 10 17:50:34 crc kubenswrapper[4799]: I1010 17:50:34.541641 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Oct 10 17:50:34 crc kubenswrapper[4799]: I1010 17:50:34.978684 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Oct 10 17:50:34 crc kubenswrapper[4799]: W1010 17:50:34.992641 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod217d7ceb_fa68_4e17_bd2d_8cf07d85e871.slice/crio-5403e2017537f5c8d0508e2d0b8d728b8e1937e73e3427a1b0085c260469e292 WatchSource:0}: Error finding container 5403e2017537f5c8d0508e2d0b8d728b8e1937e73e3427a1b0085c260469e292: Status 404 returned error can't find the container with id 5403e2017537f5c8d0508e2d0b8d728b8e1937e73e3427a1b0085c260469e292 Oct 10 17:50:35 crc kubenswrapper[4799]: I1010 17:50:35.418851 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="33099395-9065-4b94-95bb-70154f26962e" path="/var/lib/kubelet/pods/33099395-9065-4b94-95bb-70154f26962e/volumes" Oct 10 17:50:35 crc kubenswrapper[4799]: I1010 17:50:35.419877 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="922d8c7c-9feb-408b-a0df-fec585601827" path="/var/lib/kubelet/pods/922d8c7c-9feb-408b-a0df-fec585601827/volumes" Oct 10 17:50:35 crc kubenswrapper[4799]: I1010 17:50:35.979749 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"38e633f5-1f40-40b7-979b-4c34ec12dcf4","Type":"ContainerStarted","Data":"459706ffe62f77b57cf21e61e37fed432c2595044cb084163e17b219fa238b07"} Oct 10 17:50:35 crc kubenswrapper[4799]: I1010 17:50:35.981207 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"217d7ceb-fa68-4e17-bd2d-8cf07d85e871","Type":"ContainerStarted","Data":"5403e2017537f5c8d0508e2d0b8d728b8e1937e73e3427a1b0085c260469e292"} Oct 10 17:50:36 crc kubenswrapper[4799]: I1010 17:50:36.997507 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"217d7ceb-fa68-4e17-bd2d-8cf07d85e871","Type":"ContainerStarted","Data":"3d8588dac429d6e29e1dcf2c7ef8898c0fae752767d7b559af7efc4f2908dabd"} Oct 10 17:50:44 crc kubenswrapper[4799]: E1010 17:50:44.950361 4799 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/system.slice/rpm-ostreed.service\": RecentStats: unable to find data in memory cache]" Oct 10 17:50:45 crc kubenswrapper[4799]: I1010 17:50:45.249144 4799 patch_prober.go:28] interesting pod/machine-config-daemon-rh8zc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 10 17:50:45 crc kubenswrapper[4799]: I1010 17:50:45.249230 4799 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 10 17:51:09 crc kubenswrapper[4799]: I1010 17:51:09.355310 4799 generic.go:334] "Generic (PLEG): container finished" podID="38e633f5-1f40-40b7-979b-4c34ec12dcf4" containerID="459706ffe62f77b57cf21e61e37fed432c2595044cb084163e17b219fa238b07" exitCode=0 Oct 10 17:51:09 crc kubenswrapper[4799]: I1010 17:51:09.355431 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"38e633f5-1f40-40b7-979b-4c34ec12dcf4","Type":"ContainerDied","Data":"459706ffe62f77b57cf21e61e37fed432c2595044cb084163e17b219fa238b07"} Oct 10 17:51:10 crc kubenswrapper[4799]: I1010 17:51:10.369212 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"38e633f5-1f40-40b7-979b-4c34ec12dcf4","Type":"ContainerStarted","Data":"6ec05c76c53f6f6213f3a88543feb2d5368c4707c3faf764f32e78c93e050e88"} Oct 10 17:51:10 crc kubenswrapper[4799]: I1010 17:51:10.370114 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-server-0" Oct 10 17:51:10 crc kubenswrapper[4799]: I1010 17:51:10.396051 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-server-0" podStartSLOduration=37.396029849 podStartE2EDuration="37.396029849s" podCreationTimestamp="2025-10-10 17:50:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-10 17:51:10.394320747 +0000 UTC m=+4763.902644912" watchObservedRunningTime="2025-10-10 17:51:10.396029849 +0000 UTC m=+4763.904353974" Oct 10 17:51:11 crc kubenswrapper[4799]: I1010 17:51:11.383447 4799 generic.go:334] "Generic (PLEG): container finished" podID="217d7ceb-fa68-4e17-bd2d-8cf07d85e871" containerID="3d8588dac429d6e29e1dcf2c7ef8898c0fae752767d7b559af7efc4f2908dabd" exitCode=0 Oct 10 17:51:11 crc kubenswrapper[4799]: I1010 17:51:11.383554 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"217d7ceb-fa68-4e17-bd2d-8cf07d85e871","Type":"ContainerDied","Data":"3d8588dac429d6e29e1dcf2c7ef8898c0fae752767d7b559af7efc4f2908dabd"} Oct 10 17:51:12 crc kubenswrapper[4799]: I1010 17:51:12.395902 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"217d7ceb-fa68-4e17-bd2d-8cf07d85e871","Type":"ContainerStarted","Data":"e303660f6efdfbc342b443c9fe4e97bf1e61efd6bd70288d6fbab4aa3f0760e1"} Oct 10 17:51:12 crc kubenswrapper[4799]: I1010 17:51:12.396377 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-cell1-server-0" Oct 10 17:51:12 crc kubenswrapper[4799]: I1010 17:51:12.421017 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-cell1-server-0" podStartSLOduration=38.420992151 podStartE2EDuration="38.420992151s" podCreationTimestamp="2025-10-10 17:50:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-10 17:51:12.414584904 +0000 UTC m=+4765.922909079" watchObservedRunningTime="2025-10-10 17:51:12.420992151 +0000 UTC m=+4765.929316266" Oct 10 17:51:15 crc kubenswrapper[4799]: I1010 17:51:15.248614 4799 patch_prober.go:28] interesting pod/machine-config-daemon-rh8zc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 10 17:51:15 crc kubenswrapper[4799]: I1010 17:51:15.249057 4799 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 10 17:51:15 crc kubenswrapper[4799]: I1010 17:51:15.249115 4799 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" Oct 10 17:51:15 crc kubenswrapper[4799]: I1010 17:51:15.250240 4799 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"f501bd44ca99d1332e2b38994323202e43e3dcc1b9ebd31fdd9ca3a13eab9e4d"} pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 10 17:51:15 crc kubenswrapper[4799]: I1010 17:51:15.250332 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" containerName="machine-config-daemon" containerID="cri-o://f501bd44ca99d1332e2b38994323202e43e3dcc1b9ebd31fdd9ca3a13eab9e4d" gracePeriod=600 Oct 10 17:51:15 crc kubenswrapper[4799]: I1010 17:51:15.431626 4799 generic.go:334] "Generic (PLEG): container finished" podID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" containerID="f501bd44ca99d1332e2b38994323202e43e3dcc1b9ebd31fdd9ca3a13eab9e4d" exitCode=0 Oct 10 17:51:15 crc kubenswrapper[4799]: I1010 17:51:15.431699 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" event={"ID":"6cebefda-e31d-4be2-9bf4-8e1f8ec002cb","Type":"ContainerDied","Data":"f501bd44ca99d1332e2b38994323202e43e3dcc1b9ebd31fdd9ca3a13eab9e4d"} Oct 10 17:51:15 crc kubenswrapper[4799]: I1010 17:51:15.431785 4799 scope.go:117] "RemoveContainer" containerID="fe197396c5be2d0959a18226c3a887cbfc22f79ce601687c7a015173f2073961" Oct 10 17:51:16 crc kubenswrapper[4799]: I1010 17:51:16.442066 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" event={"ID":"6cebefda-e31d-4be2-9bf4-8e1f8ec002cb","Type":"ContainerStarted","Data":"5e973f476cb0655a6e33e886e2a59fc6754febf3bf5a4718abcef307858985dd"} Oct 10 17:51:23 crc kubenswrapper[4799]: I1010 17:51:23.442011 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-server-0" Oct 10 17:51:24 crc kubenswrapper[4799]: I1010 17:51:24.545961 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-cell1-server-0" Oct 10 17:51:31 crc kubenswrapper[4799]: I1010 17:51:31.240927 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/mariadb-client-1-default"] Oct 10 17:51:31 crc kubenswrapper[4799]: I1010 17:51:31.244135 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-1-default" Oct 10 17:51:31 crc kubenswrapper[4799]: I1010 17:51:31.249645 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"default-dockercfg-qbrln" Oct 10 17:51:31 crc kubenswrapper[4799]: I1010 17:51:31.253434 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client-1-default"] Oct 10 17:51:31 crc kubenswrapper[4799]: I1010 17:51:31.312059 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9ntgb\" (UniqueName: \"kubernetes.io/projected/948a1924-4d74-4a3e-8d3b-3afdf5a3d397-kube-api-access-9ntgb\") pod \"mariadb-client-1-default\" (UID: \"948a1924-4d74-4a3e-8d3b-3afdf5a3d397\") " pod="openstack/mariadb-client-1-default" Oct 10 17:51:31 crc kubenswrapper[4799]: I1010 17:51:31.414266 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9ntgb\" (UniqueName: \"kubernetes.io/projected/948a1924-4d74-4a3e-8d3b-3afdf5a3d397-kube-api-access-9ntgb\") pod \"mariadb-client-1-default\" (UID: \"948a1924-4d74-4a3e-8d3b-3afdf5a3d397\") " pod="openstack/mariadb-client-1-default" Oct 10 17:51:31 crc kubenswrapper[4799]: I1010 17:51:31.434741 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9ntgb\" (UniqueName: \"kubernetes.io/projected/948a1924-4d74-4a3e-8d3b-3afdf5a3d397-kube-api-access-9ntgb\") pod \"mariadb-client-1-default\" (UID: \"948a1924-4d74-4a3e-8d3b-3afdf5a3d397\") " pod="openstack/mariadb-client-1-default" Oct 10 17:51:31 crc kubenswrapper[4799]: I1010 17:51:31.572782 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-1-default" Oct 10 17:51:32 crc kubenswrapper[4799]: I1010 17:51:32.149191 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client-1-default"] Oct 10 17:51:32 crc kubenswrapper[4799]: I1010 17:51:32.167259 4799 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 10 17:51:32 crc kubenswrapper[4799]: I1010 17:51:32.600136 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-1-default" event={"ID":"948a1924-4d74-4a3e-8d3b-3afdf5a3d397","Type":"ContainerStarted","Data":"ae1cdccd5b8d3b197f39771677a759d698a8e81eca88a17c6c019fcf42227418"} Oct 10 17:51:39 crc kubenswrapper[4799]: I1010 17:51:39.659224 4799 generic.go:334] "Generic (PLEG): container finished" podID="948a1924-4d74-4a3e-8d3b-3afdf5a3d397" containerID="38ef3f0503c9303575f3132fa23f8b11eb39ae03008d3beb8360bc5393d9df8c" exitCode=0 Oct 10 17:51:39 crc kubenswrapper[4799]: I1010 17:51:39.659293 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-1-default" event={"ID":"948a1924-4d74-4a3e-8d3b-3afdf5a3d397","Type":"ContainerDied","Data":"38ef3f0503c9303575f3132fa23f8b11eb39ae03008d3beb8360bc5393d9df8c"} Oct 10 17:51:41 crc kubenswrapper[4799]: I1010 17:51:41.079328 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-1-default" Oct 10 17:51:41 crc kubenswrapper[4799]: I1010 17:51:41.110340 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_mariadb-client-1-default_948a1924-4d74-4a3e-8d3b-3afdf5a3d397/mariadb-client-1-default/0.log" Oct 10 17:51:41 crc kubenswrapper[4799]: I1010 17:51:41.142244 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/mariadb-client-1-default"] Oct 10 17:51:41 crc kubenswrapper[4799]: I1010 17:51:41.149481 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/mariadb-client-1-default"] Oct 10 17:51:41 crc kubenswrapper[4799]: I1010 17:51:41.178695 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9ntgb\" (UniqueName: \"kubernetes.io/projected/948a1924-4d74-4a3e-8d3b-3afdf5a3d397-kube-api-access-9ntgb\") pod \"948a1924-4d74-4a3e-8d3b-3afdf5a3d397\" (UID: \"948a1924-4d74-4a3e-8d3b-3afdf5a3d397\") " Oct 10 17:51:41 crc kubenswrapper[4799]: I1010 17:51:41.185134 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/948a1924-4d74-4a3e-8d3b-3afdf5a3d397-kube-api-access-9ntgb" (OuterVolumeSpecName: "kube-api-access-9ntgb") pod "948a1924-4d74-4a3e-8d3b-3afdf5a3d397" (UID: "948a1924-4d74-4a3e-8d3b-3afdf5a3d397"). InnerVolumeSpecName "kube-api-access-9ntgb". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 17:51:41 crc kubenswrapper[4799]: I1010 17:51:41.280353 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9ntgb\" (UniqueName: \"kubernetes.io/projected/948a1924-4d74-4a3e-8d3b-3afdf5a3d397-kube-api-access-9ntgb\") on node \"crc\" DevicePath \"\"" Oct 10 17:51:41 crc kubenswrapper[4799]: I1010 17:51:41.412582 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="948a1924-4d74-4a3e-8d3b-3afdf5a3d397" path="/var/lib/kubelet/pods/948a1924-4d74-4a3e-8d3b-3afdf5a3d397/volumes" Oct 10 17:51:41 crc kubenswrapper[4799]: I1010 17:51:41.601119 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/mariadb-client-2-default"] Oct 10 17:51:41 crc kubenswrapper[4799]: E1010 17:51:41.601413 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="948a1924-4d74-4a3e-8d3b-3afdf5a3d397" containerName="mariadb-client-1-default" Oct 10 17:51:41 crc kubenswrapper[4799]: I1010 17:51:41.601431 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="948a1924-4d74-4a3e-8d3b-3afdf5a3d397" containerName="mariadb-client-1-default" Oct 10 17:51:41 crc kubenswrapper[4799]: I1010 17:51:41.601583 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="948a1924-4d74-4a3e-8d3b-3afdf5a3d397" containerName="mariadb-client-1-default" Oct 10 17:51:41 crc kubenswrapper[4799]: I1010 17:51:41.602076 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-2-default" Oct 10 17:51:41 crc kubenswrapper[4799]: I1010 17:51:41.620598 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client-2-default"] Oct 10 17:51:41 crc kubenswrapper[4799]: I1010 17:51:41.678662 4799 scope.go:117] "RemoveContainer" containerID="38ef3f0503c9303575f3132fa23f8b11eb39ae03008d3beb8360bc5393d9df8c" Oct 10 17:51:41 crc kubenswrapper[4799]: I1010 17:51:41.678903 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-1-default" Oct 10 17:51:41 crc kubenswrapper[4799]: I1010 17:51:41.687457 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lgxx8\" (UniqueName: \"kubernetes.io/projected/7e59b6bf-da23-43d3-b25a-f46a7e8fcd2d-kube-api-access-lgxx8\") pod \"mariadb-client-2-default\" (UID: \"7e59b6bf-da23-43d3-b25a-f46a7e8fcd2d\") " pod="openstack/mariadb-client-2-default" Oct 10 17:51:41 crc kubenswrapper[4799]: I1010 17:51:41.788543 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lgxx8\" (UniqueName: \"kubernetes.io/projected/7e59b6bf-da23-43d3-b25a-f46a7e8fcd2d-kube-api-access-lgxx8\") pod \"mariadb-client-2-default\" (UID: \"7e59b6bf-da23-43d3-b25a-f46a7e8fcd2d\") " pod="openstack/mariadb-client-2-default" Oct 10 17:51:41 crc kubenswrapper[4799]: I1010 17:51:41.810559 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lgxx8\" (UniqueName: \"kubernetes.io/projected/7e59b6bf-da23-43d3-b25a-f46a7e8fcd2d-kube-api-access-lgxx8\") pod \"mariadb-client-2-default\" (UID: \"7e59b6bf-da23-43d3-b25a-f46a7e8fcd2d\") " pod="openstack/mariadb-client-2-default" Oct 10 17:51:41 crc kubenswrapper[4799]: I1010 17:51:41.928620 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-2-default" Oct 10 17:51:42 crc kubenswrapper[4799]: I1010 17:51:42.483507 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client-2-default"] Oct 10 17:51:42 crc kubenswrapper[4799]: I1010 17:51:42.689259 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-2-default" event={"ID":"7e59b6bf-da23-43d3-b25a-f46a7e8fcd2d","Type":"ContainerStarted","Data":"7fac4e6a2b094555f55ec6eeae21210e7fddc41f8bed23337d2b0f80130417e5"} Oct 10 17:51:43 crc kubenswrapper[4799]: I1010 17:51:43.704657 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-2-default" event={"ID":"7e59b6bf-da23-43d3-b25a-f46a7e8fcd2d","Type":"ContainerStarted","Data":"5c115e91559c0508908141e853c04b457972c39bb070cfc65da0432619bfe115"} Oct 10 17:51:43 crc kubenswrapper[4799]: I1010 17:51:43.724987 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/mariadb-client-2-default" podStartSLOduration=2.7249632139999997 podStartE2EDuration="2.724963214s" podCreationTimestamp="2025-10-10 17:51:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-10 17:51:43.719895851 +0000 UTC m=+4797.228220006" watchObservedRunningTime="2025-10-10 17:51:43.724963214 +0000 UTC m=+4797.233287360" Oct 10 17:51:44 crc kubenswrapper[4799]: I1010 17:51:44.715565 4799 generic.go:334] "Generic (PLEG): container finished" podID="7e59b6bf-da23-43d3-b25a-f46a7e8fcd2d" containerID="5c115e91559c0508908141e853c04b457972c39bb070cfc65da0432619bfe115" exitCode=0 Oct 10 17:51:44 crc kubenswrapper[4799]: I1010 17:51:44.715814 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-2-default" event={"ID":"7e59b6bf-da23-43d3-b25a-f46a7e8fcd2d","Type":"ContainerDied","Data":"5c115e91559c0508908141e853c04b457972c39bb070cfc65da0432619bfe115"} Oct 10 17:51:46 crc kubenswrapper[4799]: I1010 17:51:46.176717 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-2-default" Oct 10 17:51:46 crc kubenswrapper[4799]: I1010 17:51:46.217054 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/mariadb-client-2-default"] Oct 10 17:51:46 crc kubenswrapper[4799]: I1010 17:51:46.224041 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/mariadb-client-2-default"] Oct 10 17:51:46 crc kubenswrapper[4799]: I1010 17:51:46.273267 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lgxx8\" (UniqueName: \"kubernetes.io/projected/7e59b6bf-da23-43d3-b25a-f46a7e8fcd2d-kube-api-access-lgxx8\") pod \"7e59b6bf-da23-43d3-b25a-f46a7e8fcd2d\" (UID: \"7e59b6bf-da23-43d3-b25a-f46a7e8fcd2d\") " Oct 10 17:51:46 crc kubenswrapper[4799]: I1010 17:51:46.287156 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7e59b6bf-da23-43d3-b25a-f46a7e8fcd2d-kube-api-access-lgxx8" (OuterVolumeSpecName: "kube-api-access-lgxx8") pod "7e59b6bf-da23-43d3-b25a-f46a7e8fcd2d" (UID: "7e59b6bf-da23-43d3-b25a-f46a7e8fcd2d"). InnerVolumeSpecName "kube-api-access-lgxx8". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 17:51:46 crc kubenswrapper[4799]: I1010 17:51:46.375355 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lgxx8\" (UniqueName: \"kubernetes.io/projected/7e59b6bf-da23-43d3-b25a-f46a7e8fcd2d-kube-api-access-lgxx8\") on node \"crc\" DevicePath \"\"" Oct 10 17:51:46 crc kubenswrapper[4799]: I1010 17:51:46.627906 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/mariadb-client-1"] Oct 10 17:51:46 crc kubenswrapper[4799]: E1010 17:51:46.628438 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7e59b6bf-da23-43d3-b25a-f46a7e8fcd2d" containerName="mariadb-client-2-default" Oct 10 17:51:46 crc kubenswrapper[4799]: I1010 17:51:46.628459 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="7e59b6bf-da23-43d3-b25a-f46a7e8fcd2d" containerName="mariadb-client-2-default" Oct 10 17:51:46 crc kubenswrapper[4799]: I1010 17:51:46.628665 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="7e59b6bf-da23-43d3-b25a-f46a7e8fcd2d" containerName="mariadb-client-2-default" Oct 10 17:51:46 crc kubenswrapper[4799]: I1010 17:51:46.629458 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-1" Oct 10 17:51:46 crc kubenswrapper[4799]: I1010 17:51:46.633860 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client-1"] Oct 10 17:51:46 crc kubenswrapper[4799]: I1010 17:51:46.736968 4799 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7fac4e6a2b094555f55ec6eeae21210e7fddc41f8bed23337d2b0f80130417e5" Oct 10 17:51:46 crc kubenswrapper[4799]: I1010 17:51:46.737059 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-2-default" Oct 10 17:51:46 crc kubenswrapper[4799]: I1010 17:51:46.783493 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q7g45\" (UniqueName: \"kubernetes.io/projected/7c13c03f-3041-4a3d-9e2a-fc3b2f51ead0-kube-api-access-q7g45\") pod \"mariadb-client-1\" (UID: \"7c13c03f-3041-4a3d-9e2a-fc3b2f51ead0\") " pod="openstack/mariadb-client-1" Oct 10 17:51:46 crc kubenswrapper[4799]: I1010 17:51:46.885257 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q7g45\" (UniqueName: \"kubernetes.io/projected/7c13c03f-3041-4a3d-9e2a-fc3b2f51ead0-kube-api-access-q7g45\") pod \"mariadb-client-1\" (UID: \"7c13c03f-3041-4a3d-9e2a-fc3b2f51ead0\") " pod="openstack/mariadb-client-1" Oct 10 17:51:46 crc kubenswrapper[4799]: I1010 17:51:46.904738 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q7g45\" (UniqueName: \"kubernetes.io/projected/7c13c03f-3041-4a3d-9e2a-fc3b2f51ead0-kube-api-access-q7g45\") pod \"mariadb-client-1\" (UID: \"7c13c03f-3041-4a3d-9e2a-fc3b2f51ead0\") " pod="openstack/mariadb-client-1" Oct 10 17:51:46 crc kubenswrapper[4799]: I1010 17:51:46.958270 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-1" Oct 10 17:51:47 crc kubenswrapper[4799]: I1010 17:51:47.442674 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7e59b6bf-da23-43d3-b25a-f46a7e8fcd2d" path="/var/lib/kubelet/pods/7e59b6bf-da23-43d3-b25a-f46a7e8fcd2d/volumes" Oct 10 17:51:47 crc kubenswrapper[4799]: I1010 17:51:47.522260 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client-1"] Oct 10 17:51:47 crc kubenswrapper[4799]: W1010 17:51:47.527951 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7c13c03f_3041_4a3d_9e2a_fc3b2f51ead0.slice/crio-176aed5086064d6f66022cff146fb6ba38ee3e558f49c46cacb994db517a4003 WatchSource:0}: Error finding container 176aed5086064d6f66022cff146fb6ba38ee3e558f49c46cacb994db517a4003: Status 404 returned error can't find the container with id 176aed5086064d6f66022cff146fb6ba38ee3e558f49c46cacb994db517a4003 Oct 10 17:51:47 crc kubenswrapper[4799]: I1010 17:51:47.746937 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-1" event={"ID":"7c13c03f-3041-4a3d-9e2a-fc3b2f51ead0","Type":"ContainerStarted","Data":"176aed5086064d6f66022cff146fb6ba38ee3e558f49c46cacb994db517a4003"} Oct 10 17:51:48 crc kubenswrapper[4799]: I1010 17:51:48.756557 4799 generic.go:334] "Generic (PLEG): container finished" podID="7c13c03f-3041-4a3d-9e2a-fc3b2f51ead0" containerID="452cc43f0187a182a982e862153c9c763e801a6d427ed70a3987333d5105b966" exitCode=0 Oct 10 17:51:48 crc kubenswrapper[4799]: I1010 17:51:48.756611 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-1" event={"ID":"7c13c03f-3041-4a3d-9e2a-fc3b2f51ead0","Type":"ContainerDied","Data":"452cc43f0187a182a982e862153c9c763e801a6d427ed70a3987333d5105b966"} Oct 10 17:51:50 crc kubenswrapper[4799]: I1010 17:51:50.201342 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-1" Oct 10 17:51:50 crc kubenswrapper[4799]: I1010 17:51:50.219300 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_mariadb-client-1_7c13c03f-3041-4a3d-9e2a-fc3b2f51ead0/mariadb-client-1/0.log" Oct 10 17:51:50 crc kubenswrapper[4799]: I1010 17:51:50.248656 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/mariadb-client-1"] Oct 10 17:51:50 crc kubenswrapper[4799]: I1010 17:51:50.254765 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/mariadb-client-1"] Oct 10 17:51:50 crc kubenswrapper[4799]: I1010 17:51:50.283641 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-q7g45\" (UniqueName: \"kubernetes.io/projected/7c13c03f-3041-4a3d-9e2a-fc3b2f51ead0-kube-api-access-q7g45\") pod \"7c13c03f-3041-4a3d-9e2a-fc3b2f51ead0\" (UID: \"7c13c03f-3041-4a3d-9e2a-fc3b2f51ead0\") " Oct 10 17:51:50 crc kubenswrapper[4799]: I1010 17:51:50.288823 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7c13c03f-3041-4a3d-9e2a-fc3b2f51ead0-kube-api-access-q7g45" (OuterVolumeSpecName: "kube-api-access-q7g45") pod "7c13c03f-3041-4a3d-9e2a-fc3b2f51ead0" (UID: "7c13c03f-3041-4a3d-9e2a-fc3b2f51ead0"). InnerVolumeSpecName "kube-api-access-q7g45". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 17:51:50 crc kubenswrapper[4799]: I1010 17:51:50.385014 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-q7g45\" (UniqueName: \"kubernetes.io/projected/7c13c03f-3041-4a3d-9e2a-fc3b2f51ead0-kube-api-access-q7g45\") on node \"crc\" DevicePath \"\"" Oct 10 17:51:50 crc kubenswrapper[4799]: I1010 17:51:50.776095 4799 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="176aed5086064d6f66022cff146fb6ba38ee3e558f49c46cacb994db517a4003" Oct 10 17:51:50 crc kubenswrapper[4799]: I1010 17:51:50.776158 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-1" Oct 10 17:51:50 crc kubenswrapper[4799]: I1010 17:51:50.869672 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/mariadb-client-4-default"] Oct 10 17:51:50 crc kubenswrapper[4799]: E1010 17:51:50.870152 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7c13c03f-3041-4a3d-9e2a-fc3b2f51ead0" containerName="mariadb-client-1" Oct 10 17:51:50 crc kubenswrapper[4799]: I1010 17:51:50.870174 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="7c13c03f-3041-4a3d-9e2a-fc3b2f51ead0" containerName="mariadb-client-1" Oct 10 17:51:50 crc kubenswrapper[4799]: I1010 17:51:50.870461 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="7c13c03f-3041-4a3d-9e2a-fc3b2f51ead0" containerName="mariadb-client-1" Oct 10 17:51:50 crc kubenswrapper[4799]: I1010 17:51:50.871708 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-4-default" Oct 10 17:51:50 crc kubenswrapper[4799]: I1010 17:51:50.873741 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client-4-default"] Oct 10 17:51:50 crc kubenswrapper[4799]: I1010 17:51:50.876486 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"default-dockercfg-qbrln" Oct 10 17:51:51 crc kubenswrapper[4799]: I1010 17:51:50.999823 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9rngc\" (UniqueName: \"kubernetes.io/projected/56d6dc90-ebc0-44c2-bb71-76e82852f30e-kube-api-access-9rngc\") pod \"mariadb-client-4-default\" (UID: \"56d6dc90-ebc0-44c2-bb71-76e82852f30e\") " pod="openstack/mariadb-client-4-default" Oct 10 17:51:51 crc kubenswrapper[4799]: I1010 17:51:51.102846 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9rngc\" (UniqueName: \"kubernetes.io/projected/56d6dc90-ebc0-44c2-bb71-76e82852f30e-kube-api-access-9rngc\") pod \"mariadb-client-4-default\" (UID: \"56d6dc90-ebc0-44c2-bb71-76e82852f30e\") " pod="openstack/mariadb-client-4-default" Oct 10 17:51:51 crc kubenswrapper[4799]: I1010 17:51:51.135046 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9rngc\" (UniqueName: \"kubernetes.io/projected/56d6dc90-ebc0-44c2-bb71-76e82852f30e-kube-api-access-9rngc\") pod \"mariadb-client-4-default\" (UID: \"56d6dc90-ebc0-44c2-bb71-76e82852f30e\") " pod="openstack/mariadb-client-4-default" Oct 10 17:51:51 crc kubenswrapper[4799]: I1010 17:51:51.216474 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-4-default" Oct 10 17:51:51 crc kubenswrapper[4799]: I1010 17:51:51.414314 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7c13c03f-3041-4a3d-9e2a-fc3b2f51ead0" path="/var/lib/kubelet/pods/7c13c03f-3041-4a3d-9e2a-fc3b2f51ead0/volumes" Oct 10 17:51:51 crc kubenswrapper[4799]: I1010 17:51:51.795784 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client-4-default"] Oct 10 17:51:52 crc kubenswrapper[4799]: I1010 17:51:52.799882 4799 generic.go:334] "Generic (PLEG): container finished" podID="56d6dc90-ebc0-44c2-bb71-76e82852f30e" containerID="6b2c7e819dc758dbe0fef2da04fc6912e8b3b458c9628669a7e707412b87cc27" exitCode=0 Oct 10 17:51:52 crc kubenswrapper[4799]: I1010 17:51:52.799949 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-4-default" event={"ID":"56d6dc90-ebc0-44c2-bb71-76e82852f30e","Type":"ContainerDied","Data":"6b2c7e819dc758dbe0fef2da04fc6912e8b3b458c9628669a7e707412b87cc27"} Oct 10 17:51:52 crc kubenswrapper[4799]: I1010 17:51:52.800224 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-4-default" event={"ID":"56d6dc90-ebc0-44c2-bb71-76e82852f30e","Type":"ContainerStarted","Data":"bb38e8627195e5eb8cd74bd79d6a3e0ad268f26514f874afbbfaff9a91c0051f"} Oct 10 17:51:54 crc kubenswrapper[4799]: I1010 17:51:54.218578 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-4-default" Oct 10 17:51:54 crc kubenswrapper[4799]: I1010 17:51:54.245448 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_mariadb-client-4-default_56d6dc90-ebc0-44c2-bb71-76e82852f30e/mariadb-client-4-default/0.log" Oct 10 17:51:54 crc kubenswrapper[4799]: I1010 17:51:54.286946 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/mariadb-client-4-default"] Oct 10 17:51:54 crc kubenswrapper[4799]: I1010 17:51:54.298246 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/mariadb-client-4-default"] Oct 10 17:51:54 crc kubenswrapper[4799]: I1010 17:51:54.354590 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9rngc\" (UniqueName: \"kubernetes.io/projected/56d6dc90-ebc0-44c2-bb71-76e82852f30e-kube-api-access-9rngc\") pod \"56d6dc90-ebc0-44c2-bb71-76e82852f30e\" (UID: \"56d6dc90-ebc0-44c2-bb71-76e82852f30e\") " Oct 10 17:51:54 crc kubenswrapper[4799]: I1010 17:51:54.362821 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/56d6dc90-ebc0-44c2-bb71-76e82852f30e-kube-api-access-9rngc" (OuterVolumeSpecName: "kube-api-access-9rngc") pod "56d6dc90-ebc0-44c2-bb71-76e82852f30e" (UID: "56d6dc90-ebc0-44c2-bb71-76e82852f30e"). InnerVolumeSpecName "kube-api-access-9rngc". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 17:51:54 crc kubenswrapper[4799]: I1010 17:51:54.456558 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9rngc\" (UniqueName: \"kubernetes.io/projected/56d6dc90-ebc0-44c2-bb71-76e82852f30e-kube-api-access-9rngc\") on node \"crc\" DevicePath \"\"" Oct 10 17:51:54 crc kubenswrapper[4799]: I1010 17:51:54.823920 4799 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="bb38e8627195e5eb8cd74bd79d6a3e0ad268f26514f874afbbfaff9a91c0051f" Oct 10 17:51:54 crc kubenswrapper[4799]: I1010 17:51:54.824027 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-4-default" Oct 10 17:51:55 crc kubenswrapper[4799]: I1010 17:51:55.424880 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="56d6dc90-ebc0-44c2-bb71-76e82852f30e" path="/var/lib/kubelet/pods/56d6dc90-ebc0-44c2-bb71-76e82852f30e/volumes" Oct 10 17:51:58 crc kubenswrapper[4799]: I1010 17:51:58.927136 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/mariadb-client-5-default"] Oct 10 17:51:58 crc kubenswrapper[4799]: E1010 17:51:58.927632 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="56d6dc90-ebc0-44c2-bb71-76e82852f30e" containerName="mariadb-client-4-default" Oct 10 17:51:58 crc kubenswrapper[4799]: I1010 17:51:58.927654 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="56d6dc90-ebc0-44c2-bb71-76e82852f30e" containerName="mariadb-client-4-default" Oct 10 17:51:58 crc kubenswrapper[4799]: I1010 17:51:58.927975 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="56d6dc90-ebc0-44c2-bb71-76e82852f30e" containerName="mariadb-client-4-default" Oct 10 17:51:58 crc kubenswrapper[4799]: I1010 17:51:58.928744 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-5-default" Oct 10 17:51:58 crc kubenswrapper[4799]: I1010 17:51:58.930905 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"default-dockercfg-qbrln" Oct 10 17:51:58 crc kubenswrapper[4799]: I1010 17:51:58.947413 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client-5-default"] Oct 10 17:51:59 crc kubenswrapper[4799]: I1010 17:51:59.031050 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vzffj\" (UniqueName: \"kubernetes.io/projected/e772ae88-e5c8-4666-b5eb-2a304afaaeec-kube-api-access-vzffj\") pod \"mariadb-client-5-default\" (UID: \"e772ae88-e5c8-4666-b5eb-2a304afaaeec\") " pod="openstack/mariadb-client-5-default" Oct 10 17:51:59 crc kubenswrapper[4799]: I1010 17:51:59.132443 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vzffj\" (UniqueName: \"kubernetes.io/projected/e772ae88-e5c8-4666-b5eb-2a304afaaeec-kube-api-access-vzffj\") pod \"mariadb-client-5-default\" (UID: \"e772ae88-e5c8-4666-b5eb-2a304afaaeec\") " pod="openstack/mariadb-client-5-default" Oct 10 17:51:59 crc kubenswrapper[4799]: I1010 17:51:59.150169 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vzffj\" (UniqueName: \"kubernetes.io/projected/e772ae88-e5c8-4666-b5eb-2a304afaaeec-kube-api-access-vzffj\") pod \"mariadb-client-5-default\" (UID: \"e772ae88-e5c8-4666-b5eb-2a304afaaeec\") " pod="openstack/mariadb-client-5-default" Oct 10 17:51:59 crc kubenswrapper[4799]: I1010 17:51:59.254123 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-5-default" Oct 10 17:51:59 crc kubenswrapper[4799]: I1010 17:51:59.659337 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client-5-default"] Oct 10 17:51:59 crc kubenswrapper[4799]: I1010 17:51:59.881506 4799 generic.go:334] "Generic (PLEG): container finished" podID="e772ae88-e5c8-4666-b5eb-2a304afaaeec" containerID="fcd37e9e5f5c3a0076dd4f7da5786deecbe5ea2318b5297e3fe2e8911aa51b4c" exitCode=0 Oct 10 17:51:59 crc kubenswrapper[4799]: I1010 17:51:59.881568 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-5-default" event={"ID":"e772ae88-e5c8-4666-b5eb-2a304afaaeec","Type":"ContainerDied","Data":"fcd37e9e5f5c3a0076dd4f7da5786deecbe5ea2318b5297e3fe2e8911aa51b4c"} Oct 10 17:51:59 crc kubenswrapper[4799]: I1010 17:51:59.881607 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-5-default" event={"ID":"e772ae88-e5c8-4666-b5eb-2a304afaaeec","Type":"ContainerStarted","Data":"ae6c650ef5d3826f02ed18a44858c7ffc342c32e5260daf1f896dbd06ed61b4c"} Oct 10 17:52:01 crc kubenswrapper[4799]: I1010 17:52:01.368467 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-5-default" Oct 10 17:52:01 crc kubenswrapper[4799]: I1010 17:52:01.397963 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_mariadb-client-5-default_e772ae88-e5c8-4666-b5eb-2a304afaaeec/mariadb-client-5-default/0.log" Oct 10 17:52:01 crc kubenswrapper[4799]: I1010 17:52:01.435274 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/mariadb-client-5-default"] Oct 10 17:52:01 crc kubenswrapper[4799]: I1010 17:52:01.442278 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/mariadb-client-5-default"] Oct 10 17:52:01 crc kubenswrapper[4799]: I1010 17:52:01.472342 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vzffj\" (UniqueName: \"kubernetes.io/projected/e772ae88-e5c8-4666-b5eb-2a304afaaeec-kube-api-access-vzffj\") pod \"e772ae88-e5c8-4666-b5eb-2a304afaaeec\" (UID: \"e772ae88-e5c8-4666-b5eb-2a304afaaeec\") " Oct 10 17:52:01 crc kubenswrapper[4799]: I1010 17:52:01.480057 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e772ae88-e5c8-4666-b5eb-2a304afaaeec-kube-api-access-vzffj" (OuterVolumeSpecName: "kube-api-access-vzffj") pod "e772ae88-e5c8-4666-b5eb-2a304afaaeec" (UID: "e772ae88-e5c8-4666-b5eb-2a304afaaeec"). InnerVolumeSpecName "kube-api-access-vzffj". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 17:52:01 crc kubenswrapper[4799]: I1010 17:52:01.574536 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vzffj\" (UniqueName: \"kubernetes.io/projected/e772ae88-e5c8-4666-b5eb-2a304afaaeec-kube-api-access-vzffj\") on node \"crc\" DevicePath \"\"" Oct 10 17:52:01 crc kubenswrapper[4799]: I1010 17:52:01.581375 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/mariadb-client-6-default"] Oct 10 17:52:01 crc kubenswrapper[4799]: E1010 17:52:01.581877 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e772ae88-e5c8-4666-b5eb-2a304afaaeec" containerName="mariadb-client-5-default" Oct 10 17:52:01 crc kubenswrapper[4799]: I1010 17:52:01.581900 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="e772ae88-e5c8-4666-b5eb-2a304afaaeec" containerName="mariadb-client-5-default" Oct 10 17:52:01 crc kubenswrapper[4799]: I1010 17:52:01.582127 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="e772ae88-e5c8-4666-b5eb-2a304afaaeec" containerName="mariadb-client-5-default" Oct 10 17:52:01 crc kubenswrapper[4799]: I1010 17:52:01.582766 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-6-default" Oct 10 17:52:01 crc kubenswrapper[4799]: I1010 17:52:01.587087 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client-6-default"] Oct 10 17:52:01 crc kubenswrapper[4799]: I1010 17:52:01.777230 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nngqw\" (UniqueName: \"kubernetes.io/projected/796f1d96-f686-4d14-a4e0-bdd90b00fd46-kube-api-access-nngqw\") pod \"mariadb-client-6-default\" (UID: \"796f1d96-f686-4d14-a4e0-bdd90b00fd46\") " pod="openstack/mariadb-client-6-default" Oct 10 17:52:01 crc kubenswrapper[4799]: I1010 17:52:01.879059 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nngqw\" (UniqueName: \"kubernetes.io/projected/796f1d96-f686-4d14-a4e0-bdd90b00fd46-kube-api-access-nngqw\") pod \"mariadb-client-6-default\" (UID: \"796f1d96-f686-4d14-a4e0-bdd90b00fd46\") " pod="openstack/mariadb-client-6-default" Oct 10 17:52:01 crc kubenswrapper[4799]: I1010 17:52:01.906630 4799 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ae6c650ef5d3826f02ed18a44858c7ffc342c32e5260daf1f896dbd06ed61b4c" Oct 10 17:52:01 crc kubenswrapper[4799]: I1010 17:52:01.906731 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-5-default" Oct 10 17:52:01 crc kubenswrapper[4799]: I1010 17:52:01.914907 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nngqw\" (UniqueName: \"kubernetes.io/projected/796f1d96-f686-4d14-a4e0-bdd90b00fd46-kube-api-access-nngqw\") pod \"mariadb-client-6-default\" (UID: \"796f1d96-f686-4d14-a4e0-bdd90b00fd46\") " pod="openstack/mariadb-client-6-default" Oct 10 17:52:02 crc kubenswrapper[4799]: I1010 17:52:02.202859 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-6-default" Oct 10 17:52:02 crc kubenswrapper[4799]: I1010 17:52:02.794929 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client-6-default"] Oct 10 17:52:03 crc kubenswrapper[4799]: W1010 17:52:03.190114 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod796f1d96_f686_4d14_a4e0_bdd90b00fd46.slice/crio-1d54df2216a8ee36482e6c7c5f1e0cc2369b0832c982a1e186528b93164b4bca WatchSource:0}: Error finding container 1d54df2216a8ee36482e6c7c5f1e0cc2369b0832c982a1e186528b93164b4bca: Status 404 returned error can't find the container with id 1d54df2216a8ee36482e6c7c5f1e0cc2369b0832c982a1e186528b93164b4bca Oct 10 17:52:03 crc kubenswrapper[4799]: I1010 17:52:03.416222 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e772ae88-e5c8-4666-b5eb-2a304afaaeec" path="/var/lib/kubelet/pods/e772ae88-e5c8-4666-b5eb-2a304afaaeec/volumes" Oct 10 17:52:03 crc kubenswrapper[4799]: I1010 17:52:03.927920 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-6-default" event={"ID":"796f1d96-f686-4d14-a4e0-bdd90b00fd46","Type":"ContainerStarted","Data":"7924e9df587c89bcdee10662ecc97f40ba18deddacbdd2094e286596282b4cb4"} Oct 10 17:52:03 crc kubenswrapper[4799]: I1010 17:52:03.927979 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-6-default" event={"ID":"796f1d96-f686-4d14-a4e0-bdd90b00fd46","Type":"ContainerStarted","Data":"1d54df2216a8ee36482e6c7c5f1e0cc2369b0832c982a1e186528b93164b4bca"} Oct 10 17:52:03 crc kubenswrapper[4799]: I1010 17:52:03.948634 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/mariadb-client-6-default" podStartSLOduration=2.948564908 podStartE2EDuration="2.948564908s" podCreationTimestamp="2025-10-10 17:52:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-10 17:52:03.945748609 +0000 UTC m=+4817.454072844" watchObservedRunningTime="2025-10-10 17:52:03.948564908 +0000 UTC m=+4817.456889103" Oct 10 17:52:04 crc kubenswrapper[4799]: I1010 17:52:04.044872 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_mariadb-client-6-default_796f1d96-f686-4d14-a4e0-bdd90b00fd46/mariadb-client-6-default/0.log" Oct 10 17:52:04 crc kubenswrapper[4799]: I1010 17:52:04.938966 4799 generic.go:334] "Generic (PLEG): container finished" podID="796f1d96-f686-4d14-a4e0-bdd90b00fd46" containerID="7924e9df587c89bcdee10662ecc97f40ba18deddacbdd2094e286596282b4cb4" exitCode=0 Oct 10 17:52:04 crc kubenswrapper[4799]: I1010 17:52:04.939054 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-6-default" event={"ID":"796f1d96-f686-4d14-a4e0-bdd90b00fd46","Type":"ContainerDied","Data":"7924e9df587c89bcdee10662ecc97f40ba18deddacbdd2094e286596282b4cb4"} Oct 10 17:52:06 crc kubenswrapper[4799]: I1010 17:52:06.430184 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-6-default" Oct 10 17:52:06 crc kubenswrapper[4799]: I1010 17:52:06.461231 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/mariadb-client-6-default"] Oct 10 17:52:06 crc kubenswrapper[4799]: I1010 17:52:06.465839 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/mariadb-client-6-default"] Oct 10 17:52:06 crc kubenswrapper[4799]: I1010 17:52:06.556172 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nngqw\" (UniqueName: \"kubernetes.io/projected/796f1d96-f686-4d14-a4e0-bdd90b00fd46-kube-api-access-nngqw\") pod \"796f1d96-f686-4d14-a4e0-bdd90b00fd46\" (UID: \"796f1d96-f686-4d14-a4e0-bdd90b00fd46\") " Oct 10 17:52:06 crc kubenswrapper[4799]: I1010 17:52:06.561419 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/796f1d96-f686-4d14-a4e0-bdd90b00fd46-kube-api-access-nngqw" (OuterVolumeSpecName: "kube-api-access-nngqw") pod "796f1d96-f686-4d14-a4e0-bdd90b00fd46" (UID: "796f1d96-f686-4d14-a4e0-bdd90b00fd46"). InnerVolumeSpecName "kube-api-access-nngqw". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 17:52:06 crc kubenswrapper[4799]: I1010 17:52:06.656386 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/mariadb-client-7-default"] Oct 10 17:52:06 crc kubenswrapper[4799]: E1010 17:52:06.656823 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="796f1d96-f686-4d14-a4e0-bdd90b00fd46" containerName="mariadb-client-6-default" Oct 10 17:52:06 crc kubenswrapper[4799]: I1010 17:52:06.656843 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="796f1d96-f686-4d14-a4e0-bdd90b00fd46" containerName="mariadb-client-6-default" Oct 10 17:52:06 crc kubenswrapper[4799]: I1010 17:52:06.657056 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="796f1d96-f686-4d14-a4e0-bdd90b00fd46" containerName="mariadb-client-6-default" Oct 10 17:52:06 crc kubenswrapper[4799]: I1010 17:52:06.657695 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-7-default" Oct 10 17:52:06 crc kubenswrapper[4799]: I1010 17:52:06.658352 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nngqw\" (UniqueName: \"kubernetes.io/projected/796f1d96-f686-4d14-a4e0-bdd90b00fd46-kube-api-access-nngqw\") on node \"crc\" DevicePath \"\"" Oct 10 17:52:06 crc kubenswrapper[4799]: I1010 17:52:06.666305 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client-7-default"] Oct 10 17:52:06 crc kubenswrapper[4799]: I1010 17:52:06.759213 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zp992\" (UniqueName: \"kubernetes.io/projected/c196da0b-780b-4f0f-ab3f-31a581cc4b8a-kube-api-access-zp992\") pod \"mariadb-client-7-default\" (UID: \"c196da0b-780b-4f0f-ab3f-31a581cc4b8a\") " pod="openstack/mariadb-client-7-default" Oct 10 17:52:06 crc kubenswrapper[4799]: I1010 17:52:06.860058 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zp992\" (UniqueName: \"kubernetes.io/projected/c196da0b-780b-4f0f-ab3f-31a581cc4b8a-kube-api-access-zp992\") pod \"mariadb-client-7-default\" (UID: \"c196da0b-780b-4f0f-ab3f-31a581cc4b8a\") " pod="openstack/mariadb-client-7-default" Oct 10 17:52:06 crc kubenswrapper[4799]: I1010 17:52:06.877151 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zp992\" (UniqueName: \"kubernetes.io/projected/c196da0b-780b-4f0f-ab3f-31a581cc4b8a-kube-api-access-zp992\") pod \"mariadb-client-7-default\" (UID: \"c196da0b-780b-4f0f-ab3f-31a581cc4b8a\") " pod="openstack/mariadb-client-7-default" Oct 10 17:52:06 crc kubenswrapper[4799]: I1010 17:52:06.956702 4799 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1d54df2216a8ee36482e6c7c5f1e0cc2369b0832c982a1e186528b93164b4bca" Oct 10 17:52:06 crc kubenswrapper[4799]: I1010 17:52:06.956789 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-6-default" Oct 10 17:52:07 crc kubenswrapper[4799]: I1010 17:52:07.016617 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-7-default" Oct 10 17:52:07 crc kubenswrapper[4799]: I1010 17:52:07.323326 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client-7-default"] Oct 10 17:52:07 crc kubenswrapper[4799]: W1010 17:52:07.328517 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc196da0b_780b_4f0f_ab3f_31a581cc4b8a.slice/crio-b6efde35cddd83ce54b75dfaea41c3ba0f63ea167b67d054617148eadef2a234 WatchSource:0}: Error finding container b6efde35cddd83ce54b75dfaea41c3ba0f63ea167b67d054617148eadef2a234: Status 404 returned error can't find the container with id b6efde35cddd83ce54b75dfaea41c3ba0f63ea167b67d054617148eadef2a234 Oct 10 17:52:07 crc kubenswrapper[4799]: I1010 17:52:07.426990 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="796f1d96-f686-4d14-a4e0-bdd90b00fd46" path="/var/lib/kubelet/pods/796f1d96-f686-4d14-a4e0-bdd90b00fd46/volumes" Oct 10 17:52:07 crc kubenswrapper[4799]: I1010 17:52:07.968500 4799 generic.go:334] "Generic (PLEG): container finished" podID="c196da0b-780b-4f0f-ab3f-31a581cc4b8a" containerID="742c50e0df627f07d2fd5874c5818e252c7ecc6151ae70498b7548552ee8c0ca" exitCode=0 Oct 10 17:52:07 crc kubenswrapper[4799]: I1010 17:52:07.968747 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-7-default" event={"ID":"c196da0b-780b-4f0f-ab3f-31a581cc4b8a","Type":"ContainerDied","Data":"742c50e0df627f07d2fd5874c5818e252c7ecc6151ae70498b7548552ee8c0ca"} Oct 10 17:52:07 crc kubenswrapper[4799]: I1010 17:52:07.970731 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-7-default" event={"ID":"c196da0b-780b-4f0f-ab3f-31a581cc4b8a","Type":"ContainerStarted","Data":"b6efde35cddd83ce54b75dfaea41c3ba0f63ea167b67d054617148eadef2a234"} Oct 10 17:52:09 crc kubenswrapper[4799]: I1010 17:52:09.451650 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-7-default" Oct 10 17:52:09 crc kubenswrapper[4799]: I1010 17:52:09.472779 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_mariadb-client-7-default_c196da0b-780b-4f0f-ab3f-31a581cc4b8a/mariadb-client-7-default/0.log" Oct 10 17:52:09 crc kubenswrapper[4799]: I1010 17:52:09.496603 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/mariadb-client-7-default"] Oct 10 17:52:09 crc kubenswrapper[4799]: I1010 17:52:09.500997 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/mariadb-client-7-default"] Oct 10 17:52:09 crc kubenswrapper[4799]: I1010 17:52:09.611403 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zp992\" (UniqueName: \"kubernetes.io/projected/c196da0b-780b-4f0f-ab3f-31a581cc4b8a-kube-api-access-zp992\") pod \"c196da0b-780b-4f0f-ab3f-31a581cc4b8a\" (UID: \"c196da0b-780b-4f0f-ab3f-31a581cc4b8a\") " Oct 10 17:52:09 crc kubenswrapper[4799]: I1010 17:52:09.620098 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c196da0b-780b-4f0f-ab3f-31a581cc4b8a-kube-api-access-zp992" (OuterVolumeSpecName: "kube-api-access-zp992") pod "c196da0b-780b-4f0f-ab3f-31a581cc4b8a" (UID: "c196da0b-780b-4f0f-ab3f-31a581cc4b8a"). InnerVolumeSpecName "kube-api-access-zp992". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 17:52:09 crc kubenswrapper[4799]: I1010 17:52:09.681684 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/mariadb-client-2"] Oct 10 17:52:09 crc kubenswrapper[4799]: E1010 17:52:09.682247 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c196da0b-780b-4f0f-ab3f-31a581cc4b8a" containerName="mariadb-client-7-default" Oct 10 17:52:09 crc kubenswrapper[4799]: I1010 17:52:09.682273 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="c196da0b-780b-4f0f-ab3f-31a581cc4b8a" containerName="mariadb-client-7-default" Oct 10 17:52:09 crc kubenswrapper[4799]: I1010 17:52:09.682650 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="c196da0b-780b-4f0f-ab3f-31a581cc4b8a" containerName="mariadb-client-7-default" Oct 10 17:52:09 crc kubenswrapper[4799]: I1010 17:52:09.683538 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-2" Oct 10 17:52:09 crc kubenswrapper[4799]: I1010 17:52:09.714692 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zp992\" (UniqueName: \"kubernetes.io/projected/c196da0b-780b-4f0f-ab3f-31a581cc4b8a-kube-api-access-zp992\") on node \"crc\" DevicePath \"\"" Oct 10 17:52:09 crc kubenswrapper[4799]: I1010 17:52:09.715365 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client-2"] Oct 10 17:52:09 crc kubenswrapper[4799]: I1010 17:52:09.817266 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wh89z\" (UniqueName: \"kubernetes.io/projected/3f6dbd06-7e74-451b-a870-2746eea0648e-kube-api-access-wh89z\") pod \"mariadb-client-2\" (UID: \"3f6dbd06-7e74-451b-a870-2746eea0648e\") " pod="openstack/mariadb-client-2" Oct 10 17:52:09 crc kubenswrapper[4799]: I1010 17:52:09.920563 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wh89z\" (UniqueName: \"kubernetes.io/projected/3f6dbd06-7e74-451b-a870-2746eea0648e-kube-api-access-wh89z\") pod \"mariadb-client-2\" (UID: \"3f6dbd06-7e74-451b-a870-2746eea0648e\") " pod="openstack/mariadb-client-2" Oct 10 17:52:09 crc kubenswrapper[4799]: I1010 17:52:09.952661 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wh89z\" (UniqueName: \"kubernetes.io/projected/3f6dbd06-7e74-451b-a870-2746eea0648e-kube-api-access-wh89z\") pod \"mariadb-client-2\" (UID: \"3f6dbd06-7e74-451b-a870-2746eea0648e\") " pod="openstack/mariadb-client-2" Oct 10 17:52:09 crc kubenswrapper[4799]: I1010 17:52:09.990548 4799 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b6efde35cddd83ce54b75dfaea41c3ba0f63ea167b67d054617148eadef2a234" Oct 10 17:52:09 crc kubenswrapper[4799]: I1010 17:52:09.990640 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-7-default" Oct 10 17:52:10 crc kubenswrapper[4799]: I1010 17:52:10.015092 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-2" Oct 10 17:52:10 crc kubenswrapper[4799]: I1010 17:52:10.552950 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client-2"] Oct 10 17:52:10 crc kubenswrapper[4799]: W1010 17:52:10.566860 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3f6dbd06_7e74_451b_a870_2746eea0648e.slice/crio-e34cff450dff9f516daecd3b26b74acfc8c4eec270c3939c15944934df4cf6fd WatchSource:0}: Error finding container e34cff450dff9f516daecd3b26b74acfc8c4eec270c3939c15944934df4cf6fd: Status 404 returned error can't find the container with id e34cff450dff9f516daecd3b26b74acfc8c4eec270c3939c15944934df4cf6fd Oct 10 17:52:11 crc kubenswrapper[4799]: I1010 17:52:11.001982 4799 generic.go:334] "Generic (PLEG): container finished" podID="3f6dbd06-7e74-451b-a870-2746eea0648e" containerID="3ba5e4017371256e188302c77eaf932b996d0c0f3a6d773220c94fa9ead606b2" exitCode=0 Oct 10 17:52:11 crc kubenswrapper[4799]: I1010 17:52:11.002032 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-2" event={"ID":"3f6dbd06-7e74-451b-a870-2746eea0648e","Type":"ContainerDied","Data":"3ba5e4017371256e188302c77eaf932b996d0c0f3a6d773220c94fa9ead606b2"} Oct 10 17:52:11 crc kubenswrapper[4799]: I1010 17:52:11.002280 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-2" event={"ID":"3f6dbd06-7e74-451b-a870-2746eea0648e","Type":"ContainerStarted","Data":"e34cff450dff9f516daecd3b26b74acfc8c4eec270c3939c15944934df4cf6fd"} Oct 10 17:52:11 crc kubenswrapper[4799]: I1010 17:52:11.419262 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c196da0b-780b-4f0f-ab3f-31a581cc4b8a" path="/var/lib/kubelet/pods/c196da0b-780b-4f0f-ab3f-31a581cc4b8a/volumes" Oct 10 17:52:12 crc kubenswrapper[4799]: I1010 17:52:12.459163 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-2" Oct 10 17:52:12 crc kubenswrapper[4799]: I1010 17:52:12.478174 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_mariadb-client-2_3f6dbd06-7e74-451b-a870-2746eea0648e/mariadb-client-2/0.log" Oct 10 17:52:12 crc kubenswrapper[4799]: I1010 17:52:12.505154 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/mariadb-client-2"] Oct 10 17:52:12 crc kubenswrapper[4799]: I1010 17:52:12.510427 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/mariadb-client-2"] Oct 10 17:52:12 crc kubenswrapper[4799]: I1010 17:52:12.568992 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wh89z\" (UniqueName: \"kubernetes.io/projected/3f6dbd06-7e74-451b-a870-2746eea0648e-kube-api-access-wh89z\") pod \"3f6dbd06-7e74-451b-a870-2746eea0648e\" (UID: \"3f6dbd06-7e74-451b-a870-2746eea0648e\") " Oct 10 17:52:12 crc kubenswrapper[4799]: I1010 17:52:12.577090 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3f6dbd06-7e74-451b-a870-2746eea0648e-kube-api-access-wh89z" (OuterVolumeSpecName: "kube-api-access-wh89z") pod "3f6dbd06-7e74-451b-a870-2746eea0648e" (UID: "3f6dbd06-7e74-451b-a870-2746eea0648e"). InnerVolumeSpecName "kube-api-access-wh89z". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 17:52:12 crc kubenswrapper[4799]: I1010 17:52:12.670934 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wh89z\" (UniqueName: \"kubernetes.io/projected/3f6dbd06-7e74-451b-a870-2746eea0648e-kube-api-access-wh89z\") on node \"crc\" DevicePath \"\"" Oct 10 17:52:13 crc kubenswrapper[4799]: I1010 17:52:13.022597 4799 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e34cff450dff9f516daecd3b26b74acfc8c4eec270c3939c15944934df4cf6fd" Oct 10 17:52:13 crc kubenswrapper[4799]: I1010 17:52:13.022650 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-2" Oct 10 17:52:13 crc kubenswrapper[4799]: I1010 17:52:13.418650 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3f6dbd06-7e74-451b-a870-2746eea0648e" path="/var/lib/kubelet/pods/3f6dbd06-7e74-451b-a870-2746eea0648e/volumes" Oct 10 17:53:06 crc kubenswrapper[4799]: I1010 17:53:06.492303 4799 scope.go:117] "RemoveContainer" containerID="d605965916e424e497f95452325dc647b56db03925ac05ee18fcba63f42e4f5c" Oct 10 17:53:15 crc kubenswrapper[4799]: I1010 17:53:15.248554 4799 patch_prober.go:28] interesting pod/machine-config-daemon-rh8zc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 10 17:53:15 crc kubenswrapper[4799]: I1010 17:53:15.249374 4799 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 10 17:53:45 crc kubenswrapper[4799]: I1010 17:53:45.249386 4799 patch_prober.go:28] interesting pod/machine-config-daemon-rh8zc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 10 17:53:45 crc kubenswrapper[4799]: I1010 17:53:45.250364 4799 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 10 17:54:07 crc kubenswrapper[4799]: I1010 17:54:07.252060 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-k628q"] Oct 10 17:54:07 crc kubenswrapper[4799]: E1010 17:54:07.253346 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3f6dbd06-7e74-451b-a870-2746eea0648e" containerName="mariadb-client-2" Oct 10 17:54:07 crc kubenswrapper[4799]: I1010 17:54:07.253376 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="3f6dbd06-7e74-451b-a870-2746eea0648e" containerName="mariadb-client-2" Oct 10 17:54:07 crc kubenswrapper[4799]: I1010 17:54:07.255533 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="3f6dbd06-7e74-451b-a870-2746eea0648e" containerName="mariadb-client-2" Oct 10 17:54:07 crc kubenswrapper[4799]: I1010 17:54:07.268618 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-k628q" Oct 10 17:54:07 crc kubenswrapper[4799]: I1010 17:54:07.287333 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-k628q"] Oct 10 17:54:07 crc kubenswrapper[4799]: I1010 17:54:07.395607 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cd9c78bf-6a6f-49fd-910c-c1fded8e3175-utilities\") pod \"redhat-marketplace-k628q\" (UID: \"cd9c78bf-6a6f-49fd-910c-c1fded8e3175\") " pod="openshift-marketplace/redhat-marketplace-k628q" Oct 10 17:54:07 crc kubenswrapper[4799]: I1010 17:54:07.395720 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cd9c78bf-6a6f-49fd-910c-c1fded8e3175-catalog-content\") pod \"redhat-marketplace-k628q\" (UID: \"cd9c78bf-6a6f-49fd-910c-c1fded8e3175\") " pod="openshift-marketplace/redhat-marketplace-k628q" Oct 10 17:54:07 crc kubenswrapper[4799]: I1010 17:54:07.395967 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gscvd\" (UniqueName: \"kubernetes.io/projected/cd9c78bf-6a6f-49fd-910c-c1fded8e3175-kube-api-access-gscvd\") pod \"redhat-marketplace-k628q\" (UID: \"cd9c78bf-6a6f-49fd-910c-c1fded8e3175\") " pod="openshift-marketplace/redhat-marketplace-k628q" Oct 10 17:54:07 crc kubenswrapper[4799]: I1010 17:54:07.497473 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gscvd\" (UniqueName: \"kubernetes.io/projected/cd9c78bf-6a6f-49fd-910c-c1fded8e3175-kube-api-access-gscvd\") pod \"redhat-marketplace-k628q\" (UID: \"cd9c78bf-6a6f-49fd-910c-c1fded8e3175\") " pod="openshift-marketplace/redhat-marketplace-k628q" Oct 10 17:54:07 crc kubenswrapper[4799]: I1010 17:54:07.497999 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cd9c78bf-6a6f-49fd-910c-c1fded8e3175-utilities\") pod \"redhat-marketplace-k628q\" (UID: \"cd9c78bf-6a6f-49fd-910c-c1fded8e3175\") " pod="openshift-marketplace/redhat-marketplace-k628q" Oct 10 17:54:07 crc kubenswrapper[4799]: I1010 17:54:07.498404 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cd9c78bf-6a6f-49fd-910c-c1fded8e3175-catalog-content\") pod \"redhat-marketplace-k628q\" (UID: \"cd9c78bf-6a6f-49fd-910c-c1fded8e3175\") " pod="openshift-marketplace/redhat-marketplace-k628q" Oct 10 17:54:07 crc kubenswrapper[4799]: I1010 17:54:07.498578 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cd9c78bf-6a6f-49fd-910c-c1fded8e3175-utilities\") pod \"redhat-marketplace-k628q\" (UID: \"cd9c78bf-6a6f-49fd-910c-c1fded8e3175\") " pod="openshift-marketplace/redhat-marketplace-k628q" Oct 10 17:54:07 crc kubenswrapper[4799]: I1010 17:54:07.499150 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cd9c78bf-6a6f-49fd-910c-c1fded8e3175-catalog-content\") pod \"redhat-marketplace-k628q\" (UID: \"cd9c78bf-6a6f-49fd-910c-c1fded8e3175\") " pod="openshift-marketplace/redhat-marketplace-k628q" Oct 10 17:54:07 crc kubenswrapper[4799]: I1010 17:54:07.521194 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gscvd\" (UniqueName: \"kubernetes.io/projected/cd9c78bf-6a6f-49fd-910c-c1fded8e3175-kube-api-access-gscvd\") pod \"redhat-marketplace-k628q\" (UID: \"cd9c78bf-6a6f-49fd-910c-c1fded8e3175\") " pod="openshift-marketplace/redhat-marketplace-k628q" Oct 10 17:54:07 crc kubenswrapper[4799]: I1010 17:54:07.606613 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-k628q" Oct 10 17:54:08 crc kubenswrapper[4799]: I1010 17:54:08.055859 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-k628q"] Oct 10 17:54:08 crc kubenswrapper[4799]: I1010 17:54:08.196492 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-k628q" event={"ID":"cd9c78bf-6a6f-49fd-910c-c1fded8e3175","Type":"ContainerStarted","Data":"41e6bc797157d1e5885c66cbe043e6729b1074aeaaf378701b6c6119e2ec32a9"} Oct 10 17:54:09 crc kubenswrapper[4799]: I1010 17:54:09.205467 4799 generic.go:334] "Generic (PLEG): container finished" podID="cd9c78bf-6a6f-49fd-910c-c1fded8e3175" containerID="4d62c2063b7b275679eae9543cca27b474cd80fb63dd3238212e70d812e07d40" exitCode=0 Oct 10 17:54:09 crc kubenswrapper[4799]: I1010 17:54:09.205555 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-k628q" event={"ID":"cd9c78bf-6a6f-49fd-910c-c1fded8e3175","Type":"ContainerDied","Data":"4d62c2063b7b275679eae9543cca27b474cd80fb63dd3238212e70d812e07d40"} Oct 10 17:54:10 crc kubenswrapper[4799]: I1010 17:54:10.214173 4799 generic.go:334] "Generic (PLEG): container finished" podID="cd9c78bf-6a6f-49fd-910c-c1fded8e3175" containerID="7b072678ff8a702d4bb2895601d7870edadf91046ddd44818a8c5b702c161556" exitCode=0 Oct 10 17:54:10 crc kubenswrapper[4799]: I1010 17:54:10.214470 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-k628q" event={"ID":"cd9c78bf-6a6f-49fd-910c-c1fded8e3175","Type":"ContainerDied","Data":"7b072678ff8a702d4bb2895601d7870edadf91046ddd44818a8c5b702c161556"} Oct 10 17:54:11 crc kubenswrapper[4799]: I1010 17:54:11.229717 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-k628q" event={"ID":"cd9c78bf-6a6f-49fd-910c-c1fded8e3175","Type":"ContainerStarted","Data":"f3f2bbc238c4622c632ecf4dbca060875b6c22b64a2ec7689358e51550b6c559"} Oct 10 17:54:11 crc kubenswrapper[4799]: I1010 17:54:11.281981 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-k628q" podStartSLOduration=2.846924632 podStartE2EDuration="4.281956763s" podCreationTimestamp="2025-10-10 17:54:07 +0000 UTC" firstStartedPulling="2025-10-10 17:54:09.207530822 +0000 UTC m=+4942.715854947" lastFinishedPulling="2025-10-10 17:54:10.642562923 +0000 UTC m=+4944.150887078" observedRunningTime="2025-10-10 17:54:11.248333251 +0000 UTC m=+4944.756657396" watchObservedRunningTime="2025-10-10 17:54:11.281956763 +0000 UTC m=+4944.790280878" Oct 10 17:54:15 crc kubenswrapper[4799]: I1010 17:54:15.248626 4799 patch_prober.go:28] interesting pod/machine-config-daemon-rh8zc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 10 17:54:15 crc kubenswrapper[4799]: I1010 17:54:15.249377 4799 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 10 17:54:15 crc kubenswrapper[4799]: I1010 17:54:15.249444 4799 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" Oct 10 17:54:15 crc kubenswrapper[4799]: I1010 17:54:15.250542 4799 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"5e973f476cb0655a6e33e886e2a59fc6754febf3bf5a4718abcef307858985dd"} pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 10 17:54:15 crc kubenswrapper[4799]: I1010 17:54:15.250666 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" containerName="machine-config-daemon" containerID="cri-o://5e973f476cb0655a6e33e886e2a59fc6754febf3bf5a4718abcef307858985dd" gracePeriod=600 Oct 10 17:54:15 crc kubenswrapper[4799]: E1010 17:54:15.554396 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 17:54:16 crc kubenswrapper[4799]: I1010 17:54:16.274378 4799 generic.go:334] "Generic (PLEG): container finished" podID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" containerID="5e973f476cb0655a6e33e886e2a59fc6754febf3bf5a4718abcef307858985dd" exitCode=0 Oct 10 17:54:16 crc kubenswrapper[4799]: I1010 17:54:16.274453 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" event={"ID":"6cebefda-e31d-4be2-9bf4-8e1f8ec002cb","Type":"ContainerDied","Data":"5e973f476cb0655a6e33e886e2a59fc6754febf3bf5a4718abcef307858985dd"} Oct 10 17:54:16 crc kubenswrapper[4799]: I1010 17:54:16.274521 4799 scope.go:117] "RemoveContainer" containerID="f501bd44ca99d1332e2b38994323202e43e3dcc1b9ebd31fdd9ca3a13eab9e4d" Oct 10 17:54:16 crc kubenswrapper[4799]: I1010 17:54:16.275384 4799 scope.go:117] "RemoveContainer" containerID="5e973f476cb0655a6e33e886e2a59fc6754febf3bf5a4718abcef307858985dd" Oct 10 17:54:16 crc kubenswrapper[4799]: E1010 17:54:16.275781 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 17:54:17 crc kubenswrapper[4799]: I1010 17:54:17.607161 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-k628q" Oct 10 17:54:17 crc kubenswrapper[4799]: I1010 17:54:17.607750 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-k628q" Oct 10 17:54:17 crc kubenswrapper[4799]: I1010 17:54:17.683323 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-k628q" Oct 10 17:54:18 crc kubenswrapper[4799]: I1010 17:54:18.393133 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-k628q" Oct 10 17:54:18 crc kubenswrapper[4799]: I1010 17:54:18.481097 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-k628q"] Oct 10 17:54:20 crc kubenswrapper[4799]: I1010 17:54:20.328830 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-k628q" podUID="cd9c78bf-6a6f-49fd-910c-c1fded8e3175" containerName="registry-server" containerID="cri-o://f3f2bbc238c4622c632ecf4dbca060875b6c22b64a2ec7689358e51550b6c559" gracePeriod=2 Oct 10 17:54:20 crc kubenswrapper[4799]: I1010 17:54:20.830354 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-k628q" Oct 10 17:54:20 crc kubenswrapper[4799]: I1010 17:54:20.929319 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cd9c78bf-6a6f-49fd-910c-c1fded8e3175-catalog-content\") pod \"cd9c78bf-6a6f-49fd-910c-c1fded8e3175\" (UID: \"cd9c78bf-6a6f-49fd-910c-c1fded8e3175\") " Oct 10 17:54:20 crc kubenswrapper[4799]: I1010 17:54:20.929659 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gscvd\" (UniqueName: \"kubernetes.io/projected/cd9c78bf-6a6f-49fd-910c-c1fded8e3175-kube-api-access-gscvd\") pod \"cd9c78bf-6a6f-49fd-910c-c1fded8e3175\" (UID: \"cd9c78bf-6a6f-49fd-910c-c1fded8e3175\") " Oct 10 17:54:20 crc kubenswrapper[4799]: I1010 17:54:20.929725 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cd9c78bf-6a6f-49fd-910c-c1fded8e3175-utilities\") pod \"cd9c78bf-6a6f-49fd-910c-c1fded8e3175\" (UID: \"cd9c78bf-6a6f-49fd-910c-c1fded8e3175\") " Oct 10 17:54:20 crc kubenswrapper[4799]: I1010 17:54:20.930570 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cd9c78bf-6a6f-49fd-910c-c1fded8e3175-utilities" (OuterVolumeSpecName: "utilities") pod "cd9c78bf-6a6f-49fd-910c-c1fded8e3175" (UID: "cd9c78bf-6a6f-49fd-910c-c1fded8e3175"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 10 17:54:20 crc kubenswrapper[4799]: I1010 17:54:20.940373 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cd9c78bf-6a6f-49fd-910c-c1fded8e3175-kube-api-access-gscvd" (OuterVolumeSpecName: "kube-api-access-gscvd") pod "cd9c78bf-6a6f-49fd-910c-c1fded8e3175" (UID: "cd9c78bf-6a6f-49fd-910c-c1fded8e3175"). InnerVolumeSpecName "kube-api-access-gscvd". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 17:54:20 crc kubenswrapper[4799]: I1010 17:54:20.944092 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cd9c78bf-6a6f-49fd-910c-c1fded8e3175-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "cd9c78bf-6a6f-49fd-910c-c1fded8e3175" (UID: "cd9c78bf-6a6f-49fd-910c-c1fded8e3175"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 10 17:54:21 crc kubenswrapper[4799]: I1010 17:54:21.031456 4799 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cd9c78bf-6a6f-49fd-910c-c1fded8e3175-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 10 17:54:21 crc kubenswrapper[4799]: I1010 17:54:21.031495 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gscvd\" (UniqueName: \"kubernetes.io/projected/cd9c78bf-6a6f-49fd-910c-c1fded8e3175-kube-api-access-gscvd\") on node \"crc\" DevicePath \"\"" Oct 10 17:54:21 crc kubenswrapper[4799]: I1010 17:54:21.031508 4799 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cd9c78bf-6a6f-49fd-910c-c1fded8e3175-utilities\") on node \"crc\" DevicePath \"\"" Oct 10 17:54:21 crc kubenswrapper[4799]: I1010 17:54:21.366217 4799 generic.go:334] "Generic (PLEG): container finished" podID="cd9c78bf-6a6f-49fd-910c-c1fded8e3175" containerID="f3f2bbc238c4622c632ecf4dbca060875b6c22b64a2ec7689358e51550b6c559" exitCode=0 Oct 10 17:54:21 crc kubenswrapper[4799]: I1010 17:54:21.366272 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-k628q" event={"ID":"cd9c78bf-6a6f-49fd-910c-c1fded8e3175","Type":"ContainerDied","Data":"f3f2bbc238c4622c632ecf4dbca060875b6c22b64a2ec7689358e51550b6c559"} Oct 10 17:54:21 crc kubenswrapper[4799]: I1010 17:54:21.366303 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-k628q" event={"ID":"cd9c78bf-6a6f-49fd-910c-c1fded8e3175","Type":"ContainerDied","Data":"41e6bc797157d1e5885c66cbe043e6729b1074aeaaf378701b6c6119e2ec32a9"} Oct 10 17:54:21 crc kubenswrapper[4799]: I1010 17:54:21.366325 4799 scope.go:117] "RemoveContainer" containerID="f3f2bbc238c4622c632ecf4dbca060875b6c22b64a2ec7689358e51550b6c559" Oct 10 17:54:21 crc kubenswrapper[4799]: I1010 17:54:21.366487 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-k628q" Oct 10 17:54:21 crc kubenswrapper[4799]: I1010 17:54:21.390096 4799 scope.go:117] "RemoveContainer" containerID="7b072678ff8a702d4bb2895601d7870edadf91046ddd44818a8c5b702c161556" Oct 10 17:54:21 crc kubenswrapper[4799]: I1010 17:54:21.421531 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-k628q"] Oct 10 17:54:21 crc kubenswrapper[4799]: I1010 17:54:21.433376 4799 scope.go:117] "RemoveContainer" containerID="4d62c2063b7b275679eae9543cca27b474cd80fb63dd3238212e70d812e07d40" Oct 10 17:54:21 crc kubenswrapper[4799]: I1010 17:54:21.441490 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-k628q"] Oct 10 17:54:21 crc kubenswrapper[4799]: I1010 17:54:21.475696 4799 scope.go:117] "RemoveContainer" containerID="f3f2bbc238c4622c632ecf4dbca060875b6c22b64a2ec7689358e51550b6c559" Oct 10 17:54:21 crc kubenswrapper[4799]: E1010 17:54:21.476386 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f3f2bbc238c4622c632ecf4dbca060875b6c22b64a2ec7689358e51550b6c559\": container with ID starting with f3f2bbc238c4622c632ecf4dbca060875b6c22b64a2ec7689358e51550b6c559 not found: ID does not exist" containerID="f3f2bbc238c4622c632ecf4dbca060875b6c22b64a2ec7689358e51550b6c559" Oct 10 17:54:21 crc kubenswrapper[4799]: I1010 17:54:21.476474 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f3f2bbc238c4622c632ecf4dbca060875b6c22b64a2ec7689358e51550b6c559"} err="failed to get container status \"f3f2bbc238c4622c632ecf4dbca060875b6c22b64a2ec7689358e51550b6c559\": rpc error: code = NotFound desc = could not find container \"f3f2bbc238c4622c632ecf4dbca060875b6c22b64a2ec7689358e51550b6c559\": container with ID starting with f3f2bbc238c4622c632ecf4dbca060875b6c22b64a2ec7689358e51550b6c559 not found: ID does not exist" Oct 10 17:54:21 crc kubenswrapper[4799]: I1010 17:54:21.476509 4799 scope.go:117] "RemoveContainer" containerID="7b072678ff8a702d4bb2895601d7870edadf91046ddd44818a8c5b702c161556" Oct 10 17:54:21 crc kubenswrapper[4799]: E1010 17:54:21.476938 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7b072678ff8a702d4bb2895601d7870edadf91046ddd44818a8c5b702c161556\": container with ID starting with 7b072678ff8a702d4bb2895601d7870edadf91046ddd44818a8c5b702c161556 not found: ID does not exist" containerID="7b072678ff8a702d4bb2895601d7870edadf91046ddd44818a8c5b702c161556" Oct 10 17:54:21 crc kubenswrapper[4799]: I1010 17:54:21.476979 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7b072678ff8a702d4bb2895601d7870edadf91046ddd44818a8c5b702c161556"} err="failed to get container status \"7b072678ff8a702d4bb2895601d7870edadf91046ddd44818a8c5b702c161556\": rpc error: code = NotFound desc = could not find container \"7b072678ff8a702d4bb2895601d7870edadf91046ddd44818a8c5b702c161556\": container with ID starting with 7b072678ff8a702d4bb2895601d7870edadf91046ddd44818a8c5b702c161556 not found: ID does not exist" Oct 10 17:54:21 crc kubenswrapper[4799]: I1010 17:54:21.477004 4799 scope.go:117] "RemoveContainer" containerID="4d62c2063b7b275679eae9543cca27b474cd80fb63dd3238212e70d812e07d40" Oct 10 17:54:21 crc kubenswrapper[4799]: E1010 17:54:21.477296 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4d62c2063b7b275679eae9543cca27b474cd80fb63dd3238212e70d812e07d40\": container with ID starting with 4d62c2063b7b275679eae9543cca27b474cd80fb63dd3238212e70d812e07d40 not found: ID does not exist" containerID="4d62c2063b7b275679eae9543cca27b474cd80fb63dd3238212e70d812e07d40" Oct 10 17:54:21 crc kubenswrapper[4799]: I1010 17:54:21.477333 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4d62c2063b7b275679eae9543cca27b474cd80fb63dd3238212e70d812e07d40"} err="failed to get container status \"4d62c2063b7b275679eae9543cca27b474cd80fb63dd3238212e70d812e07d40\": rpc error: code = NotFound desc = could not find container \"4d62c2063b7b275679eae9543cca27b474cd80fb63dd3238212e70d812e07d40\": container with ID starting with 4d62c2063b7b275679eae9543cca27b474cd80fb63dd3238212e70d812e07d40 not found: ID does not exist" Oct 10 17:54:23 crc kubenswrapper[4799]: I1010 17:54:23.418815 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cd9c78bf-6a6f-49fd-910c-c1fded8e3175" path="/var/lib/kubelet/pods/cd9c78bf-6a6f-49fd-910c-c1fded8e3175/volumes" Oct 10 17:54:27 crc kubenswrapper[4799]: I1010 17:54:27.416547 4799 scope.go:117] "RemoveContainer" containerID="5e973f476cb0655a6e33e886e2a59fc6754febf3bf5a4718abcef307858985dd" Oct 10 17:54:27 crc kubenswrapper[4799]: E1010 17:54:27.417947 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 17:54:41 crc kubenswrapper[4799]: I1010 17:54:41.402863 4799 scope.go:117] "RemoveContainer" containerID="5e973f476cb0655a6e33e886e2a59fc6754febf3bf5a4718abcef307858985dd" Oct 10 17:54:41 crc kubenswrapper[4799]: E1010 17:54:41.403841 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 17:54:52 crc kubenswrapper[4799]: I1010 17:54:52.401956 4799 scope.go:117] "RemoveContainer" containerID="5e973f476cb0655a6e33e886e2a59fc6754febf3bf5a4718abcef307858985dd" Oct 10 17:54:52 crc kubenswrapper[4799]: E1010 17:54:52.402673 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 17:55:04 crc kubenswrapper[4799]: I1010 17:55:04.402409 4799 scope.go:117] "RemoveContainer" containerID="5e973f476cb0655a6e33e886e2a59fc6754febf3bf5a4718abcef307858985dd" Oct 10 17:55:04 crc kubenswrapper[4799]: E1010 17:55:04.403184 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 17:55:17 crc kubenswrapper[4799]: I1010 17:55:17.412168 4799 scope.go:117] "RemoveContainer" containerID="5e973f476cb0655a6e33e886e2a59fc6754febf3bf5a4718abcef307858985dd" Oct 10 17:55:17 crc kubenswrapper[4799]: E1010 17:55:17.413532 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 17:55:32 crc kubenswrapper[4799]: I1010 17:55:32.402586 4799 scope.go:117] "RemoveContainer" containerID="5e973f476cb0655a6e33e886e2a59fc6754febf3bf5a4718abcef307858985dd" Oct 10 17:55:32 crc kubenswrapper[4799]: E1010 17:55:32.403470 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 17:55:43 crc kubenswrapper[4799]: I1010 17:55:43.404534 4799 scope.go:117] "RemoveContainer" containerID="5e973f476cb0655a6e33e886e2a59fc6754febf3bf5a4718abcef307858985dd" Oct 10 17:55:43 crc kubenswrapper[4799]: E1010 17:55:43.405439 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 17:55:55 crc kubenswrapper[4799]: I1010 17:55:55.402818 4799 scope.go:117] "RemoveContainer" containerID="5e973f476cb0655a6e33e886e2a59fc6754febf3bf5a4718abcef307858985dd" Oct 10 17:55:55 crc kubenswrapper[4799]: E1010 17:55:55.403458 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 17:56:06 crc kubenswrapper[4799]: I1010 17:56:06.402702 4799 scope.go:117] "RemoveContainer" containerID="5e973f476cb0655a6e33e886e2a59fc6754febf3bf5a4718abcef307858985dd" Oct 10 17:56:06 crc kubenswrapper[4799]: E1010 17:56:06.403447 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 17:56:18 crc kubenswrapper[4799]: I1010 17:56:18.402583 4799 scope.go:117] "RemoveContainer" containerID="5e973f476cb0655a6e33e886e2a59fc6754febf3bf5a4718abcef307858985dd" Oct 10 17:56:18 crc kubenswrapper[4799]: E1010 17:56:18.403803 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 17:56:23 crc kubenswrapper[4799]: I1010 17:56:23.564287 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/mariadb-copy-data"] Oct 10 17:56:23 crc kubenswrapper[4799]: E1010 17:56:23.565597 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cd9c78bf-6a6f-49fd-910c-c1fded8e3175" containerName="extract-content" Oct 10 17:56:23 crc kubenswrapper[4799]: I1010 17:56:23.565625 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="cd9c78bf-6a6f-49fd-910c-c1fded8e3175" containerName="extract-content" Oct 10 17:56:23 crc kubenswrapper[4799]: E1010 17:56:23.565651 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cd9c78bf-6a6f-49fd-910c-c1fded8e3175" containerName="extract-utilities" Oct 10 17:56:23 crc kubenswrapper[4799]: I1010 17:56:23.565665 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="cd9c78bf-6a6f-49fd-910c-c1fded8e3175" containerName="extract-utilities" Oct 10 17:56:23 crc kubenswrapper[4799]: E1010 17:56:23.565706 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cd9c78bf-6a6f-49fd-910c-c1fded8e3175" containerName="registry-server" Oct 10 17:56:23 crc kubenswrapper[4799]: I1010 17:56:23.565720 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="cd9c78bf-6a6f-49fd-910c-c1fded8e3175" containerName="registry-server" Oct 10 17:56:23 crc kubenswrapper[4799]: I1010 17:56:23.566112 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="cd9c78bf-6a6f-49fd-910c-c1fded8e3175" containerName="registry-server" Oct 10 17:56:23 crc kubenswrapper[4799]: I1010 17:56:23.567207 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-copy-data" Oct 10 17:56:23 crc kubenswrapper[4799]: I1010 17:56:23.571961 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"default-dockercfg-qbrln" Oct 10 17:56:23 crc kubenswrapper[4799]: I1010 17:56:23.593561 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-copy-data"] Oct 10 17:56:23 crc kubenswrapper[4799]: I1010 17:56:23.748648 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2xc64\" (UniqueName: \"kubernetes.io/projected/dac860f8-b331-42c2-8706-1db45e4285fe-kube-api-access-2xc64\") pod \"mariadb-copy-data\" (UID: \"dac860f8-b331-42c2-8706-1db45e4285fe\") " pod="openstack/mariadb-copy-data" Oct 10 17:56:23 crc kubenswrapper[4799]: I1010 17:56:23.749206 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-f1e2244a-acf8-447d-bca8-5ca755b37f8c\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-f1e2244a-acf8-447d-bca8-5ca755b37f8c\") pod \"mariadb-copy-data\" (UID: \"dac860f8-b331-42c2-8706-1db45e4285fe\") " pod="openstack/mariadb-copy-data" Oct 10 17:56:23 crc kubenswrapper[4799]: I1010 17:56:23.850902 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-f1e2244a-acf8-447d-bca8-5ca755b37f8c\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-f1e2244a-acf8-447d-bca8-5ca755b37f8c\") pod \"mariadb-copy-data\" (UID: \"dac860f8-b331-42c2-8706-1db45e4285fe\") " pod="openstack/mariadb-copy-data" Oct 10 17:56:23 crc kubenswrapper[4799]: I1010 17:56:23.851055 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2xc64\" (UniqueName: \"kubernetes.io/projected/dac860f8-b331-42c2-8706-1db45e4285fe-kube-api-access-2xc64\") pod \"mariadb-copy-data\" (UID: \"dac860f8-b331-42c2-8706-1db45e4285fe\") " pod="openstack/mariadb-copy-data" Oct 10 17:56:23 crc kubenswrapper[4799]: I1010 17:56:23.855425 4799 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Oct 10 17:56:23 crc kubenswrapper[4799]: I1010 17:56:23.855486 4799 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-f1e2244a-acf8-447d-bca8-5ca755b37f8c\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-f1e2244a-acf8-447d-bca8-5ca755b37f8c\") pod \"mariadb-copy-data\" (UID: \"dac860f8-b331-42c2-8706-1db45e4285fe\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/8051a179630e2016779deb6b2caf60549bb04e3128a1368cb4dc5e4914c17ba6/globalmount\"" pod="openstack/mariadb-copy-data" Oct 10 17:56:23 crc kubenswrapper[4799]: I1010 17:56:23.893262 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2xc64\" (UniqueName: \"kubernetes.io/projected/dac860f8-b331-42c2-8706-1db45e4285fe-kube-api-access-2xc64\") pod \"mariadb-copy-data\" (UID: \"dac860f8-b331-42c2-8706-1db45e4285fe\") " pod="openstack/mariadb-copy-data" Oct 10 17:56:23 crc kubenswrapper[4799]: I1010 17:56:23.900882 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-f1e2244a-acf8-447d-bca8-5ca755b37f8c\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-f1e2244a-acf8-447d-bca8-5ca755b37f8c\") pod \"mariadb-copy-data\" (UID: \"dac860f8-b331-42c2-8706-1db45e4285fe\") " pod="openstack/mariadb-copy-data" Oct 10 17:56:23 crc kubenswrapper[4799]: I1010 17:56:23.912608 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-copy-data" Oct 10 17:56:24 crc kubenswrapper[4799]: I1010 17:56:24.523448 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-copy-data"] Oct 10 17:56:24 crc kubenswrapper[4799]: I1010 17:56:24.702295 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-copy-data" event={"ID":"dac860f8-b331-42c2-8706-1db45e4285fe","Type":"ContainerStarted","Data":"9b172d09bfd9bb0ad75cec313f803b7cc704ba2026eaeee50d15b87410163d8a"} Oct 10 17:56:25 crc kubenswrapper[4799]: I1010 17:56:25.727565 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-copy-data" event={"ID":"dac860f8-b331-42c2-8706-1db45e4285fe","Type":"ContainerStarted","Data":"52cf81e369b3402a9e64209e361a8b4a6788083f3959698665b23f522f103326"} Oct 10 17:56:25 crc kubenswrapper[4799]: I1010 17:56:25.755137 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/mariadb-copy-data" podStartSLOduration=3.755108593 podStartE2EDuration="3.755108593s" podCreationTimestamp="2025-10-10 17:56:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-10 17:56:25.745065938 +0000 UTC m=+5079.253390093" watchObservedRunningTime="2025-10-10 17:56:25.755108593 +0000 UTC m=+5079.263432738" Oct 10 17:56:27 crc kubenswrapper[4799]: I1010 17:56:27.806572 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/mariadb-client"] Oct 10 17:56:27 crc kubenswrapper[4799]: I1010 17:56:27.809845 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client" Oct 10 17:56:27 crc kubenswrapper[4799]: I1010 17:56:27.815959 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client"] Oct 10 17:56:27 crc kubenswrapper[4799]: I1010 17:56:27.928403 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lngxm\" (UniqueName: \"kubernetes.io/projected/a8cf33bb-dfc3-403d-9f03-dc57e1dce016-kube-api-access-lngxm\") pod \"mariadb-client\" (UID: \"a8cf33bb-dfc3-403d-9f03-dc57e1dce016\") " pod="openstack/mariadb-client" Oct 10 17:56:28 crc kubenswrapper[4799]: I1010 17:56:28.030334 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lngxm\" (UniqueName: \"kubernetes.io/projected/a8cf33bb-dfc3-403d-9f03-dc57e1dce016-kube-api-access-lngxm\") pod \"mariadb-client\" (UID: \"a8cf33bb-dfc3-403d-9f03-dc57e1dce016\") " pod="openstack/mariadb-client" Oct 10 17:56:28 crc kubenswrapper[4799]: I1010 17:56:28.066941 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lngxm\" (UniqueName: \"kubernetes.io/projected/a8cf33bb-dfc3-403d-9f03-dc57e1dce016-kube-api-access-lngxm\") pod \"mariadb-client\" (UID: \"a8cf33bb-dfc3-403d-9f03-dc57e1dce016\") " pod="openstack/mariadb-client" Oct 10 17:56:28 crc kubenswrapper[4799]: I1010 17:56:28.145237 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client" Oct 10 17:56:28 crc kubenswrapper[4799]: I1010 17:56:28.458629 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client"] Oct 10 17:56:28 crc kubenswrapper[4799]: I1010 17:56:28.763707 4799 generic.go:334] "Generic (PLEG): container finished" podID="a8cf33bb-dfc3-403d-9f03-dc57e1dce016" containerID="e544fbe3bed8ac93a11df7acc87701ee36765a1579159a1d93725d8806f6c31b" exitCode=0 Oct 10 17:56:28 crc kubenswrapper[4799]: I1010 17:56:28.763828 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client" event={"ID":"a8cf33bb-dfc3-403d-9f03-dc57e1dce016","Type":"ContainerDied","Data":"e544fbe3bed8ac93a11df7acc87701ee36765a1579159a1d93725d8806f6c31b"} Oct 10 17:56:28 crc kubenswrapper[4799]: I1010 17:56:28.764257 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client" event={"ID":"a8cf33bb-dfc3-403d-9f03-dc57e1dce016","Type":"ContainerStarted","Data":"f491041ebc7fa79758e5fdc89691bb37cf975c65b5479b8f828ba5829eee34f9"} Oct 10 17:56:30 crc kubenswrapper[4799]: I1010 17:56:30.208783 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client" Oct 10 17:56:30 crc kubenswrapper[4799]: I1010 17:56:30.236663 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_mariadb-client_a8cf33bb-dfc3-403d-9f03-dc57e1dce016/mariadb-client/0.log" Oct 10 17:56:30 crc kubenswrapper[4799]: I1010 17:56:30.276145 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/mariadb-client"] Oct 10 17:56:30 crc kubenswrapper[4799]: I1010 17:56:30.282838 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/mariadb-client"] Oct 10 17:56:30 crc kubenswrapper[4799]: I1010 17:56:30.371658 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lngxm\" (UniqueName: \"kubernetes.io/projected/a8cf33bb-dfc3-403d-9f03-dc57e1dce016-kube-api-access-lngxm\") pod \"a8cf33bb-dfc3-403d-9f03-dc57e1dce016\" (UID: \"a8cf33bb-dfc3-403d-9f03-dc57e1dce016\") " Oct 10 17:56:30 crc kubenswrapper[4799]: I1010 17:56:30.382709 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a8cf33bb-dfc3-403d-9f03-dc57e1dce016-kube-api-access-lngxm" (OuterVolumeSpecName: "kube-api-access-lngxm") pod "a8cf33bb-dfc3-403d-9f03-dc57e1dce016" (UID: "a8cf33bb-dfc3-403d-9f03-dc57e1dce016"). InnerVolumeSpecName "kube-api-access-lngxm". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 17:56:30 crc kubenswrapper[4799]: I1010 17:56:30.402791 4799 scope.go:117] "RemoveContainer" containerID="5e973f476cb0655a6e33e886e2a59fc6754febf3bf5a4718abcef307858985dd" Oct 10 17:56:30 crc kubenswrapper[4799]: E1010 17:56:30.403173 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 17:56:30 crc kubenswrapper[4799]: I1010 17:56:30.426451 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/mariadb-client"] Oct 10 17:56:30 crc kubenswrapper[4799]: E1010 17:56:30.430994 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a8cf33bb-dfc3-403d-9f03-dc57e1dce016" containerName="mariadb-client" Oct 10 17:56:30 crc kubenswrapper[4799]: I1010 17:56:30.431029 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="a8cf33bb-dfc3-403d-9f03-dc57e1dce016" containerName="mariadb-client" Oct 10 17:56:30 crc kubenswrapper[4799]: I1010 17:56:30.431438 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="a8cf33bb-dfc3-403d-9f03-dc57e1dce016" containerName="mariadb-client" Oct 10 17:56:30 crc kubenswrapper[4799]: I1010 17:56:30.432565 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client" Oct 10 17:56:30 crc kubenswrapper[4799]: I1010 17:56:30.445280 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client"] Oct 10 17:56:30 crc kubenswrapper[4799]: I1010 17:56:30.477095 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lngxm\" (UniqueName: \"kubernetes.io/projected/a8cf33bb-dfc3-403d-9f03-dc57e1dce016-kube-api-access-lngxm\") on node \"crc\" DevicePath \"\"" Oct 10 17:56:30 crc kubenswrapper[4799]: I1010 17:56:30.578812 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x2bjh\" (UniqueName: \"kubernetes.io/projected/ccd7e9c0-d1f7-4951-8028-6c700edaaf9d-kube-api-access-x2bjh\") pod \"mariadb-client\" (UID: \"ccd7e9c0-d1f7-4951-8028-6c700edaaf9d\") " pod="openstack/mariadb-client" Oct 10 17:56:30 crc kubenswrapper[4799]: I1010 17:56:30.680567 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x2bjh\" (UniqueName: \"kubernetes.io/projected/ccd7e9c0-d1f7-4951-8028-6c700edaaf9d-kube-api-access-x2bjh\") pod \"mariadb-client\" (UID: \"ccd7e9c0-d1f7-4951-8028-6c700edaaf9d\") " pod="openstack/mariadb-client" Oct 10 17:56:30 crc kubenswrapper[4799]: I1010 17:56:30.705408 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x2bjh\" (UniqueName: \"kubernetes.io/projected/ccd7e9c0-d1f7-4951-8028-6c700edaaf9d-kube-api-access-x2bjh\") pod \"mariadb-client\" (UID: \"ccd7e9c0-d1f7-4951-8028-6c700edaaf9d\") " pod="openstack/mariadb-client" Oct 10 17:56:30 crc kubenswrapper[4799]: I1010 17:56:30.766124 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client" Oct 10 17:56:30 crc kubenswrapper[4799]: I1010 17:56:30.784595 4799 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f491041ebc7fa79758e5fdc89691bb37cf975c65b5479b8f828ba5829eee34f9" Oct 10 17:56:30 crc kubenswrapper[4799]: I1010 17:56:30.784692 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client" Oct 10 17:56:30 crc kubenswrapper[4799]: I1010 17:56:30.840910 4799 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openstack/mariadb-client" oldPodUID="a8cf33bb-dfc3-403d-9f03-dc57e1dce016" podUID="ccd7e9c0-d1f7-4951-8028-6c700edaaf9d" Oct 10 17:56:31 crc kubenswrapper[4799]: I1010 17:56:31.128501 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client"] Oct 10 17:56:31 crc kubenswrapper[4799]: I1010 17:56:31.422562 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a8cf33bb-dfc3-403d-9f03-dc57e1dce016" path="/var/lib/kubelet/pods/a8cf33bb-dfc3-403d-9f03-dc57e1dce016/volumes" Oct 10 17:56:31 crc kubenswrapper[4799]: I1010 17:56:31.795294 4799 generic.go:334] "Generic (PLEG): container finished" podID="ccd7e9c0-d1f7-4951-8028-6c700edaaf9d" containerID="0bd22a1d08aa22f019a2be577f65fffbcdb2b11bda2c1b1e7da45cc7575c15f2" exitCode=0 Oct 10 17:56:31 crc kubenswrapper[4799]: I1010 17:56:31.795377 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client" event={"ID":"ccd7e9c0-d1f7-4951-8028-6c700edaaf9d","Type":"ContainerDied","Data":"0bd22a1d08aa22f019a2be577f65fffbcdb2b11bda2c1b1e7da45cc7575c15f2"} Oct 10 17:56:31 crc kubenswrapper[4799]: I1010 17:56:31.795453 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client" event={"ID":"ccd7e9c0-d1f7-4951-8028-6c700edaaf9d","Type":"ContainerStarted","Data":"7b7a60e0f91ec70528acf69a8114c431f39313278273f82f3dcd9ba383afbe9f"} Oct 10 17:56:33 crc kubenswrapper[4799]: I1010 17:56:33.163942 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client" Oct 10 17:56:33 crc kubenswrapper[4799]: I1010 17:56:33.187339 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_mariadb-client_ccd7e9c0-d1f7-4951-8028-6c700edaaf9d/mariadb-client/0.log" Oct 10 17:56:33 crc kubenswrapper[4799]: I1010 17:56:33.219276 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/mariadb-client"] Oct 10 17:56:33 crc kubenswrapper[4799]: I1010 17:56:33.228273 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/mariadb-client"] Oct 10 17:56:33 crc kubenswrapper[4799]: I1010 17:56:33.324638 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x2bjh\" (UniqueName: \"kubernetes.io/projected/ccd7e9c0-d1f7-4951-8028-6c700edaaf9d-kube-api-access-x2bjh\") pod \"ccd7e9c0-d1f7-4951-8028-6c700edaaf9d\" (UID: \"ccd7e9c0-d1f7-4951-8028-6c700edaaf9d\") " Oct 10 17:56:33 crc kubenswrapper[4799]: I1010 17:56:33.335188 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ccd7e9c0-d1f7-4951-8028-6c700edaaf9d-kube-api-access-x2bjh" (OuterVolumeSpecName: "kube-api-access-x2bjh") pod "ccd7e9c0-d1f7-4951-8028-6c700edaaf9d" (UID: "ccd7e9c0-d1f7-4951-8028-6c700edaaf9d"). InnerVolumeSpecName "kube-api-access-x2bjh". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 17:56:33 crc kubenswrapper[4799]: I1010 17:56:33.416580 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ccd7e9c0-d1f7-4951-8028-6c700edaaf9d" path="/var/lib/kubelet/pods/ccd7e9c0-d1f7-4951-8028-6c700edaaf9d/volumes" Oct 10 17:56:33 crc kubenswrapper[4799]: I1010 17:56:33.426688 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x2bjh\" (UniqueName: \"kubernetes.io/projected/ccd7e9c0-d1f7-4951-8028-6c700edaaf9d-kube-api-access-x2bjh\") on node \"crc\" DevicePath \"\"" Oct 10 17:56:33 crc kubenswrapper[4799]: I1010 17:56:33.834000 4799 scope.go:117] "RemoveContainer" containerID="0bd22a1d08aa22f019a2be577f65fffbcdb2b11bda2c1b1e7da45cc7575c15f2" Oct 10 17:56:33 crc kubenswrapper[4799]: I1010 17:56:33.834211 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client" Oct 10 17:56:44 crc kubenswrapper[4799]: I1010 17:56:44.402453 4799 scope.go:117] "RemoveContainer" containerID="5e973f476cb0655a6e33e886e2a59fc6754febf3bf5a4718abcef307858985dd" Oct 10 17:56:44 crc kubenswrapper[4799]: E1010 17:56:44.403589 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 17:56:57 crc kubenswrapper[4799]: I1010 17:56:57.407396 4799 scope.go:117] "RemoveContainer" containerID="5e973f476cb0655a6e33e886e2a59fc6754febf3bf5a4718abcef307858985dd" Oct 10 17:56:57 crc kubenswrapper[4799]: E1010 17:56:57.408540 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 17:57:08 crc kubenswrapper[4799]: I1010 17:57:08.627607 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-nb-0"] Oct 10 17:57:08 crc kubenswrapper[4799]: E1010 17:57:08.628741 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ccd7e9c0-d1f7-4951-8028-6c700edaaf9d" containerName="mariadb-client" Oct 10 17:57:08 crc kubenswrapper[4799]: I1010 17:57:08.628789 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="ccd7e9c0-d1f7-4951-8028-6c700edaaf9d" containerName="mariadb-client" Oct 10 17:57:08 crc kubenswrapper[4799]: I1010 17:57:08.629169 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="ccd7e9c0-d1f7-4951-8028-6c700edaaf9d" containerName="mariadb-client" Oct 10 17:57:08 crc kubenswrapper[4799]: I1010 17:57:08.630464 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Oct 10 17:57:08 crc kubenswrapper[4799]: I1010 17:57:08.642440 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncluster-ovndbcluster-nb-dockercfg-g4w6d" Oct 10 17:57:08 crc kubenswrapper[4799]: I1010 17:57:08.642692 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-nb-config" Oct 10 17:57:08 crc kubenswrapper[4799]: I1010 17:57:08.642902 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-nb-scripts" Oct 10 17:57:08 crc kubenswrapper[4799]: I1010 17:57:08.658453 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-66d08801-beac-4321-81f4-1c3025cf1959\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-66d08801-beac-4321-81f4-1c3025cf1959\") pod \"ovsdbserver-nb-0\" (UID: \"2a529948-04ef-4796-9237-9e8e30fe5f5c\") " pod="openstack/ovsdbserver-nb-0" Oct 10 17:57:08 crc kubenswrapper[4799]: I1010 17:57:08.658591 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2a529948-04ef-4796-9237-9e8e30fe5f5c-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"2a529948-04ef-4796-9237-9e8e30fe5f5c\") " pod="openstack/ovsdbserver-nb-0" Oct 10 17:57:08 crc kubenswrapper[4799]: I1010 17:57:08.658669 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/2a529948-04ef-4796-9237-9e8e30fe5f5c-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"2a529948-04ef-4796-9237-9e8e30fe5f5c\") " pod="openstack/ovsdbserver-nb-0" Oct 10 17:57:08 crc kubenswrapper[4799]: I1010 17:57:08.658746 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6vtf8\" (UniqueName: \"kubernetes.io/projected/2a529948-04ef-4796-9237-9e8e30fe5f5c-kube-api-access-6vtf8\") pod \"ovsdbserver-nb-0\" (UID: \"2a529948-04ef-4796-9237-9e8e30fe5f5c\") " pod="openstack/ovsdbserver-nb-0" Oct 10 17:57:08 crc kubenswrapper[4799]: I1010 17:57:08.658826 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/2a529948-04ef-4796-9237-9e8e30fe5f5c-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"2a529948-04ef-4796-9237-9e8e30fe5f5c\") " pod="openstack/ovsdbserver-nb-0" Oct 10 17:57:08 crc kubenswrapper[4799]: I1010 17:57:08.658882 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2a529948-04ef-4796-9237-9e8e30fe5f5c-config\") pod \"ovsdbserver-nb-0\" (UID: \"2a529948-04ef-4796-9237-9e8e30fe5f5c\") " pod="openstack/ovsdbserver-nb-0" Oct 10 17:57:08 crc kubenswrapper[4799]: I1010 17:57:08.667028 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-nb-1"] Oct 10 17:57:08 crc kubenswrapper[4799]: I1010 17:57:08.670325 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-1" Oct 10 17:57:08 crc kubenswrapper[4799]: I1010 17:57:08.682299 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-nb-2"] Oct 10 17:57:08 crc kubenswrapper[4799]: I1010 17:57:08.684341 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-2" Oct 10 17:57:08 crc kubenswrapper[4799]: I1010 17:57:08.699875 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-0"] Oct 10 17:57:08 crc kubenswrapper[4799]: I1010 17:57:08.712797 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-2"] Oct 10 17:57:08 crc kubenswrapper[4799]: I1010 17:57:08.718354 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-1"] Oct 10 17:57:08 crc kubenswrapper[4799]: I1010 17:57:08.760020 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gxdxj\" (UniqueName: \"kubernetes.io/projected/25609809-9782-4b4b-9e3c-005b491e60a3-kube-api-access-gxdxj\") pod \"ovsdbserver-nb-1\" (UID: \"25609809-9782-4b4b-9e3c-005b491e60a3\") " pod="openstack/ovsdbserver-nb-1" Oct 10 17:57:08 crc kubenswrapper[4799]: I1010 17:57:08.760084 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6vtf8\" (UniqueName: \"kubernetes.io/projected/2a529948-04ef-4796-9237-9e8e30fe5f5c-kube-api-access-6vtf8\") pod \"ovsdbserver-nb-0\" (UID: \"2a529948-04ef-4796-9237-9e8e30fe5f5c\") " pod="openstack/ovsdbserver-nb-0" Oct 10 17:57:08 crc kubenswrapper[4799]: I1010 17:57:08.760127 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/2a529948-04ef-4796-9237-9e8e30fe5f5c-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"2a529948-04ef-4796-9237-9e8e30fe5f5c\") " pod="openstack/ovsdbserver-nb-0" Oct 10 17:57:08 crc kubenswrapper[4799]: I1010 17:57:08.760166 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2a529948-04ef-4796-9237-9e8e30fe5f5c-config\") pod \"ovsdbserver-nb-0\" (UID: \"2a529948-04ef-4796-9237-9e8e30fe5f5c\") " pod="openstack/ovsdbserver-nb-0" Oct 10 17:57:08 crc kubenswrapper[4799]: I1010 17:57:08.760279 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/25609809-9782-4b4b-9e3c-005b491e60a3-scripts\") pod \"ovsdbserver-nb-1\" (UID: \"25609809-9782-4b4b-9e3c-005b491e60a3\") " pod="openstack/ovsdbserver-nb-1" Oct 10 17:57:08 crc kubenswrapper[4799]: I1010 17:57:08.760301 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/25609809-9782-4b4b-9e3c-005b491e60a3-ovsdb-rundir\") pod \"ovsdbserver-nb-1\" (UID: \"25609809-9782-4b4b-9e3c-005b491e60a3\") " pod="openstack/ovsdbserver-nb-1" Oct 10 17:57:08 crc kubenswrapper[4799]: I1010 17:57:08.760344 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/25609809-9782-4b4b-9e3c-005b491e60a3-config\") pod \"ovsdbserver-nb-1\" (UID: \"25609809-9782-4b4b-9e3c-005b491e60a3\") " pod="openstack/ovsdbserver-nb-1" Oct 10 17:57:08 crc kubenswrapper[4799]: I1010 17:57:08.760375 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-66d08801-beac-4321-81f4-1c3025cf1959\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-66d08801-beac-4321-81f4-1c3025cf1959\") pod \"ovsdbserver-nb-0\" (UID: \"2a529948-04ef-4796-9237-9e8e30fe5f5c\") " pod="openstack/ovsdbserver-nb-0" Oct 10 17:57:08 crc kubenswrapper[4799]: I1010 17:57:08.760401 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2a529948-04ef-4796-9237-9e8e30fe5f5c-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"2a529948-04ef-4796-9237-9e8e30fe5f5c\") " pod="openstack/ovsdbserver-nb-0" Oct 10 17:57:08 crc kubenswrapper[4799]: I1010 17:57:08.760436 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-f373a466-8cf5-4e99-a5df-5d531d597a51\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-f373a466-8cf5-4e99-a5df-5d531d597a51\") pod \"ovsdbserver-nb-1\" (UID: \"25609809-9782-4b4b-9e3c-005b491e60a3\") " pod="openstack/ovsdbserver-nb-1" Oct 10 17:57:08 crc kubenswrapper[4799]: I1010 17:57:08.760473 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/25609809-9782-4b4b-9e3c-005b491e60a3-combined-ca-bundle\") pod \"ovsdbserver-nb-1\" (UID: \"25609809-9782-4b4b-9e3c-005b491e60a3\") " pod="openstack/ovsdbserver-nb-1" Oct 10 17:57:08 crc kubenswrapper[4799]: I1010 17:57:08.760525 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/2a529948-04ef-4796-9237-9e8e30fe5f5c-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"2a529948-04ef-4796-9237-9e8e30fe5f5c\") " pod="openstack/ovsdbserver-nb-0" Oct 10 17:57:08 crc kubenswrapper[4799]: I1010 17:57:08.761749 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/2a529948-04ef-4796-9237-9e8e30fe5f5c-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"2a529948-04ef-4796-9237-9e8e30fe5f5c\") " pod="openstack/ovsdbserver-nb-0" Oct 10 17:57:08 crc kubenswrapper[4799]: I1010 17:57:08.762572 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/2a529948-04ef-4796-9237-9e8e30fe5f5c-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"2a529948-04ef-4796-9237-9e8e30fe5f5c\") " pod="openstack/ovsdbserver-nb-0" Oct 10 17:57:08 crc kubenswrapper[4799]: I1010 17:57:08.762573 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2a529948-04ef-4796-9237-9e8e30fe5f5c-config\") pod \"ovsdbserver-nb-0\" (UID: \"2a529948-04ef-4796-9237-9e8e30fe5f5c\") " pod="openstack/ovsdbserver-nb-0" Oct 10 17:57:08 crc kubenswrapper[4799]: I1010 17:57:08.765569 4799 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Oct 10 17:57:08 crc kubenswrapper[4799]: I1010 17:57:08.765610 4799 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-66d08801-beac-4321-81f4-1c3025cf1959\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-66d08801-beac-4321-81f4-1c3025cf1959\") pod \"ovsdbserver-nb-0\" (UID: \"2a529948-04ef-4796-9237-9e8e30fe5f5c\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/a331528399f50692457d34fbe271cf8f1429611498f7ddc1519002ef5dfbf38b/globalmount\"" pod="openstack/ovsdbserver-nb-0" Oct 10 17:57:08 crc kubenswrapper[4799]: I1010 17:57:08.774284 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2a529948-04ef-4796-9237-9e8e30fe5f5c-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"2a529948-04ef-4796-9237-9e8e30fe5f5c\") " pod="openstack/ovsdbserver-nb-0" Oct 10 17:57:08 crc kubenswrapper[4799]: I1010 17:57:08.780131 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6vtf8\" (UniqueName: \"kubernetes.io/projected/2a529948-04ef-4796-9237-9e8e30fe5f5c-kube-api-access-6vtf8\") pod \"ovsdbserver-nb-0\" (UID: \"2a529948-04ef-4796-9237-9e8e30fe5f5c\") " pod="openstack/ovsdbserver-nb-0" Oct 10 17:57:08 crc kubenswrapper[4799]: I1010 17:57:08.810905 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-sb-0"] Oct 10 17:57:08 crc kubenswrapper[4799]: I1010 17:57:08.816944 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Oct 10 17:57:08 crc kubenswrapper[4799]: I1010 17:57:08.824015 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncluster-ovndbcluster-sb-dockercfg-bsldm" Oct 10 17:57:08 crc kubenswrapper[4799]: I1010 17:57:08.826218 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-sb-scripts" Oct 10 17:57:08 crc kubenswrapper[4799]: I1010 17:57:08.827927 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-sb-config" Oct 10 17:57:08 crc kubenswrapper[4799]: I1010 17:57:08.853215 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-0"] Oct 10 17:57:08 crc kubenswrapper[4799]: I1010 17:57:08.861738 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/061a94d3-1f07-41b9-b64d-4f2470084fe7-combined-ca-bundle\") pod \"ovsdbserver-nb-2\" (UID: \"061a94d3-1f07-41b9-b64d-4f2470084fe7\") " pod="openstack/ovsdbserver-nb-2" Oct 10 17:57:08 crc kubenswrapper[4799]: I1010 17:57:08.861846 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-151e912f-2519-4ceb-af0b-64fc907849cc\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-151e912f-2519-4ceb-af0b-64fc907849cc\") pod \"ovsdbserver-nb-2\" (UID: \"061a94d3-1f07-41b9-b64d-4f2470084fe7\") " pod="openstack/ovsdbserver-nb-2" Oct 10 17:57:08 crc kubenswrapper[4799]: I1010 17:57:08.861864 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f45ts\" (UniqueName: \"kubernetes.io/projected/061a94d3-1f07-41b9-b64d-4f2470084fe7-kube-api-access-f45ts\") pod \"ovsdbserver-nb-2\" (UID: \"061a94d3-1f07-41b9-b64d-4f2470084fe7\") " pod="openstack/ovsdbserver-nb-2" Oct 10 17:57:08 crc kubenswrapper[4799]: I1010 17:57:08.861899 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/25609809-9782-4b4b-9e3c-005b491e60a3-scripts\") pod \"ovsdbserver-nb-1\" (UID: \"25609809-9782-4b4b-9e3c-005b491e60a3\") " pod="openstack/ovsdbserver-nb-1" Oct 10 17:57:08 crc kubenswrapper[4799]: I1010 17:57:08.861916 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/25609809-9782-4b4b-9e3c-005b491e60a3-ovsdb-rundir\") pod \"ovsdbserver-nb-1\" (UID: \"25609809-9782-4b4b-9e3c-005b491e60a3\") " pod="openstack/ovsdbserver-nb-1" Oct 10 17:57:08 crc kubenswrapper[4799]: I1010 17:57:08.861949 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/061a94d3-1f07-41b9-b64d-4f2470084fe7-ovsdb-rundir\") pod \"ovsdbserver-nb-2\" (UID: \"061a94d3-1f07-41b9-b64d-4f2470084fe7\") " pod="openstack/ovsdbserver-nb-2" Oct 10 17:57:08 crc kubenswrapper[4799]: I1010 17:57:08.861964 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/25609809-9782-4b4b-9e3c-005b491e60a3-config\") pod \"ovsdbserver-nb-1\" (UID: \"25609809-9782-4b4b-9e3c-005b491e60a3\") " pod="openstack/ovsdbserver-nb-1" Oct 10 17:57:08 crc kubenswrapper[4799]: I1010 17:57:08.861995 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/061a94d3-1f07-41b9-b64d-4f2470084fe7-config\") pod \"ovsdbserver-nb-2\" (UID: \"061a94d3-1f07-41b9-b64d-4f2470084fe7\") " pod="openstack/ovsdbserver-nb-2" Oct 10 17:57:08 crc kubenswrapper[4799]: I1010 17:57:08.862015 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/061a94d3-1f07-41b9-b64d-4f2470084fe7-scripts\") pod \"ovsdbserver-nb-2\" (UID: \"061a94d3-1f07-41b9-b64d-4f2470084fe7\") " pod="openstack/ovsdbserver-nb-2" Oct 10 17:57:08 crc kubenswrapper[4799]: I1010 17:57:08.862034 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-f373a466-8cf5-4e99-a5df-5d531d597a51\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-f373a466-8cf5-4e99-a5df-5d531d597a51\") pod \"ovsdbserver-nb-1\" (UID: \"25609809-9782-4b4b-9e3c-005b491e60a3\") " pod="openstack/ovsdbserver-nb-1" Oct 10 17:57:08 crc kubenswrapper[4799]: I1010 17:57:08.862060 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/25609809-9782-4b4b-9e3c-005b491e60a3-combined-ca-bundle\") pod \"ovsdbserver-nb-1\" (UID: \"25609809-9782-4b4b-9e3c-005b491e60a3\") " pod="openstack/ovsdbserver-nb-1" Oct 10 17:57:08 crc kubenswrapper[4799]: I1010 17:57:08.862088 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gxdxj\" (UniqueName: \"kubernetes.io/projected/25609809-9782-4b4b-9e3c-005b491e60a3-kube-api-access-gxdxj\") pod \"ovsdbserver-nb-1\" (UID: \"25609809-9782-4b4b-9e3c-005b491e60a3\") " pod="openstack/ovsdbserver-nb-1" Oct 10 17:57:08 crc kubenswrapper[4799]: I1010 17:57:08.863373 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/25609809-9782-4b4b-9e3c-005b491e60a3-scripts\") pod \"ovsdbserver-nb-1\" (UID: \"25609809-9782-4b4b-9e3c-005b491e60a3\") " pod="openstack/ovsdbserver-nb-1" Oct 10 17:57:08 crc kubenswrapper[4799]: I1010 17:57:08.863806 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/25609809-9782-4b4b-9e3c-005b491e60a3-ovsdb-rundir\") pod \"ovsdbserver-nb-1\" (UID: \"25609809-9782-4b4b-9e3c-005b491e60a3\") " pod="openstack/ovsdbserver-nb-1" Oct 10 17:57:08 crc kubenswrapper[4799]: I1010 17:57:08.864391 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/25609809-9782-4b4b-9e3c-005b491e60a3-config\") pod \"ovsdbserver-nb-1\" (UID: \"25609809-9782-4b4b-9e3c-005b491e60a3\") " pod="openstack/ovsdbserver-nb-1" Oct 10 17:57:08 crc kubenswrapper[4799]: I1010 17:57:08.867029 4799 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Oct 10 17:57:08 crc kubenswrapper[4799]: I1010 17:57:08.867297 4799 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-f373a466-8cf5-4e99-a5df-5d531d597a51\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-f373a466-8cf5-4e99-a5df-5d531d597a51\") pod \"ovsdbserver-nb-1\" (UID: \"25609809-9782-4b4b-9e3c-005b491e60a3\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/a8a744fccbe0a21982323983e40ab5bb5ecc29fad6d46a3e0aa8e41c3106ba39/globalmount\"" pod="openstack/ovsdbserver-nb-1" Oct 10 17:57:08 crc kubenswrapper[4799]: I1010 17:57:08.867358 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-66d08801-beac-4321-81f4-1c3025cf1959\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-66d08801-beac-4321-81f4-1c3025cf1959\") pod \"ovsdbserver-nb-0\" (UID: \"2a529948-04ef-4796-9237-9e8e30fe5f5c\") " pod="openstack/ovsdbserver-nb-0" Oct 10 17:57:08 crc kubenswrapper[4799]: I1010 17:57:08.868856 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/25609809-9782-4b4b-9e3c-005b491e60a3-combined-ca-bundle\") pod \"ovsdbserver-nb-1\" (UID: \"25609809-9782-4b4b-9e3c-005b491e60a3\") " pod="openstack/ovsdbserver-nb-1" Oct 10 17:57:08 crc kubenswrapper[4799]: I1010 17:57:08.871646 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-sb-2"] Oct 10 17:57:08 crc kubenswrapper[4799]: I1010 17:57:08.873542 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-2" Oct 10 17:57:08 crc kubenswrapper[4799]: I1010 17:57:08.879642 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-sb-1"] Oct 10 17:57:08 crc kubenswrapper[4799]: I1010 17:57:08.885281 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gxdxj\" (UniqueName: \"kubernetes.io/projected/25609809-9782-4b4b-9e3c-005b491e60a3-kube-api-access-gxdxj\") pod \"ovsdbserver-nb-1\" (UID: \"25609809-9782-4b4b-9e3c-005b491e60a3\") " pod="openstack/ovsdbserver-nb-1" Oct 10 17:57:08 crc kubenswrapper[4799]: I1010 17:57:08.886259 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-1" Oct 10 17:57:08 crc kubenswrapper[4799]: I1010 17:57:08.891745 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-2"] Oct 10 17:57:08 crc kubenswrapper[4799]: I1010 17:57:08.901044 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-1"] Oct 10 17:57:08 crc kubenswrapper[4799]: I1010 17:57:08.910904 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-f373a466-8cf5-4e99-a5df-5d531d597a51\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-f373a466-8cf5-4e99-a5df-5d531d597a51\") pod \"ovsdbserver-nb-1\" (UID: \"25609809-9782-4b4b-9e3c-005b491e60a3\") " pod="openstack/ovsdbserver-nb-1" Oct 10 17:57:08 crc kubenswrapper[4799]: I1010 17:57:08.963512 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/061a94d3-1f07-41b9-b64d-4f2470084fe7-config\") pod \"ovsdbserver-nb-2\" (UID: \"061a94d3-1f07-41b9-b64d-4f2470084fe7\") " pod="openstack/ovsdbserver-nb-2" Oct 10 17:57:08 crc kubenswrapper[4799]: I1010 17:57:08.963592 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/061a94d3-1f07-41b9-b64d-4f2470084fe7-scripts\") pod \"ovsdbserver-nb-2\" (UID: \"061a94d3-1f07-41b9-b64d-4f2470084fe7\") " pod="openstack/ovsdbserver-nb-2" Oct 10 17:57:08 crc kubenswrapper[4799]: I1010 17:57:08.963713 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0ec95c7a-ba6b-45d9-bcde-9b26b0068c83-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"0ec95c7a-ba6b-45d9-bcde-9b26b0068c83\") " pod="openstack/ovsdbserver-sb-0" Oct 10 17:57:08 crc kubenswrapper[4799]: I1010 17:57:08.963895 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/0ec95c7a-ba6b-45d9-bcde-9b26b0068c83-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"0ec95c7a-ba6b-45d9-bcde-9b26b0068c83\") " pod="openstack/ovsdbserver-sb-0" Oct 10 17:57:08 crc kubenswrapper[4799]: I1010 17:57:08.964139 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0ec95c7a-ba6b-45d9-bcde-9b26b0068c83-config\") pod \"ovsdbserver-sb-0\" (UID: \"0ec95c7a-ba6b-45d9-bcde-9b26b0068c83\") " pod="openstack/ovsdbserver-sb-0" Oct 10 17:57:08 crc kubenswrapper[4799]: I1010 17:57:08.964263 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/061a94d3-1f07-41b9-b64d-4f2470084fe7-combined-ca-bundle\") pod \"ovsdbserver-nb-2\" (UID: \"061a94d3-1f07-41b9-b64d-4f2470084fe7\") " pod="openstack/ovsdbserver-nb-2" Oct 10 17:57:08 crc kubenswrapper[4799]: I1010 17:57:08.964329 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-151e912f-2519-4ceb-af0b-64fc907849cc\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-151e912f-2519-4ceb-af0b-64fc907849cc\") pod \"ovsdbserver-nb-2\" (UID: \"061a94d3-1f07-41b9-b64d-4f2470084fe7\") " pod="openstack/ovsdbserver-nb-2" Oct 10 17:57:08 crc kubenswrapper[4799]: I1010 17:57:08.964422 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f45ts\" (UniqueName: \"kubernetes.io/projected/061a94d3-1f07-41b9-b64d-4f2470084fe7-kube-api-access-f45ts\") pod \"ovsdbserver-nb-2\" (UID: \"061a94d3-1f07-41b9-b64d-4f2470084fe7\") " pod="openstack/ovsdbserver-nb-2" Oct 10 17:57:08 crc kubenswrapper[4799]: I1010 17:57:08.964577 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/061a94d3-1f07-41b9-b64d-4f2470084fe7-config\") pod \"ovsdbserver-nb-2\" (UID: \"061a94d3-1f07-41b9-b64d-4f2470084fe7\") " pod="openstack/ovsdbserver-nb-2" Oct 10 17:57:08 crc kubenswrapper[4799]: I1010 17:57:08.965350 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Oct 10 17:57:08 crc kubenswrapper[4799]: I1010 17:57:08.965870 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/061a94d3-1f07-41b9-b64d-4f2470084fe7-scripts\") pod \"ovsdbserver-nb-2\" (UID: \"061a94d3-1f07-41b9-b64d-4f2470084fe7\") " pod="openstack/ovsdbserver-nb-2" Oct 10 17:57:08 crc kubenswrapper[4799]: I1010 17:57:08.966457 4799 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Oct 10 17:57:08 crc kubenswrapper[4799]: I1010 17:57:08.966498 4799 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-151e912f-2519-4ceb-af0b-64fc907849cc\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-151e912f-2519-4ceb-af0b-64fc907849cc\") pod \"ovsdbserver-nb-2\" (UID: \"061a94d3-1f07-41b9-b64d-4f2470084fe7\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/8fdf2a4d345e279917d5ad603bfbfe61e8497d4b68bd43db7516718df335ee99/globalmount\"" pod="openstack/ovsdbserver-nb-2" Oct 10 17:57:08 crc kubenswrapper[4799]: I1010 17:57:08.967047 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/0ec95c7a-ba6b-45d9-bcde-9b26b0068c83-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"0ec95c7a-ba6b-45d9-bcde-9b26b0068c83\") " pod="openstack/ovsdbserver-sb-0" Oct 10 17:57:08 crc kubenswrapper[4799]: I1010 17:57:08.967106 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-28aef492-ea10-44e3-b5ad-3733e47543ef\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-28aef492-ea10-44e3-b5ad-3733e47543ef\") pod \"ovsdbserver-sb-0\" (UID: \"0ec95c7a-ba6b-45d9-bcde-9b26b0068c83\") " pod="openstack/ovsdbserver-sb-0" Oct 10 17:57:08 crc kubenswrapper[4799]: I1010 17:57:08.967243 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/061a94d3-1f07-41b9-b64d-4f2470084fe7-ovsdb-rundir\") pod \"ovsdbserver-nb-2\" (UID: \"061a94d3-1f07-41b9-b64d-4f2470084fe7\") " pod="openstack/ovsdbserver-nb-2" Oct 10 17:57:08 crc kubenswrapper[4799]: I1010 17:57:08.967283 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cwq56\" (UniqueName: \"kubernetes.io/projected/0ec95c7a-ba6b-45d9-bcde-9b26b0068c83-kube-api-access-cwq56\") pod \"ovsdbserver-sb-0\" (UID: \"0ec95c7a-ba6b-45d9-bcde-9b26b0068c83\") " pod="openstack/ovsdbserver-sb-0" Oct 10 17:57:08 crc kubenswrapper[4799]: I1010 17:57:08.969609 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/061a94d3-1f07-41b9-b64d-4f2470084fe7-ovsdb-rundir\") pod \"ovsdbserver-nb-2\" (UID: \"061a94d3-1f07-41b9-b64d-4f2470084fe7\") " pod="openstack/ovsdbserver-nb-2" Oct 10 17:57:08 crc kubenswrapper[4799]: I1010 17:57:08.971488 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/061a94d3-1f07-41b9-b64d-4f2470084fe7-combined-ca-bundle\") pod \"ovsdbserver-nb-2\" (UID: \"061a94d3-1f07-41b9-b64d-4f2470084fe7\") " pod="openstack/ovsdbserver-nb-2" Oct 10 17:57:09 crc kubenswrapper[4799]: I1010 17:57:09.003458 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-1" Oct 10 17:57:09 crc kubenswrapper[4799]: I1010 17:57:09.068545 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0ec95c7a-ba6b-45d9-bcde-9b26b0068c83-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"0ec95c7a-ba6b-45d9-bcde-9b26b0068c83\") " pod="openstack/ovsdbserver-sb-0" Oct 10 17:57:09 crc kubenswrapper[4799]: I1010 17:57:09.068609 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a08b3348-e784-42f6-bae6-f2f05b77af51-config\") pod \"ovsdbserver-sb-2\" (UID: \"a08b3348-e784-42f6-bae6-f2f05b77af51\") " pod="openstack/ovsdbserver-sb-2" Oct 10 17:57:09 crc kubenswrapper[4799]: I1010 17:57:09.068655 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/0ec95c7a-ba6b-45d9-bcde-9b26b0068c83-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"0ec95c7a-ba6b-45d9-bcde-9b26b0068c83\") " pod="openstack/ovsdbserver-sb-0" Oct 10 17:57:09 crc kubenswrapper[4799]: I1010 17:57:09.068694 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0ec95c7a-ba6b-45d9-bcde-9b26b0068c83-config\") pod \"ovsdbserver-sb-0\" (UID: \"0ec95c7a-ba6b-45d9-bcde-9b26b0068c83\") " pod="openstack/ovsdbserver-sb-0" Oct 10 17:57:09 crc kubenswrapper[4799]: I1010 17:57:09.068719 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a08b3348-e784-42f6-bae6-f2f05b77af51-combined-ca-bundle\") pod \"ovsdbserver-sb-2\" (UID: \"a08b3348-e784-42f6-bae6-f2f05b77af51\") " pod="openstack/ovsdbserver-sb-2" Oct 10 17:57:09 crc kubenswrapper[4799]: I1010 17:57:09.068745 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/215d8c12-5f45-450c-949b-fce862e0290a-ovsdb-rundir\") pod \"ovsdbserver-sb-1\" (UID: \"215d8c12-5f45-450c-949b-fce862e0290a\") " pod="openstack/ovsdbserver-sb-1" Oct 10 17:57:09 crc kubenswrapper[4799]: I1010 17:57:09.068813 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/215d8c12-5f45-450c-949b-fce862e0290a-config\") pod \"ovsdbserver-sb-1\" (UID: \"215d8c12-5f45-450c-949b-fce862e0290a\") " pod="openstack/ovsdbserver-sb-1" Oct 10 17:57:09 crc kubenswrapper[4799]: I1010 17:57:09.068844 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/215d8c12-5f45-450c-949b-fce862e0290a-combined-ca-bundle\") pod \"ovsdbserver-sb-1\" (UID: \"215d8c12-5f45-450c-949b-fce862e0290a\") " pod="openstack/ovsdbserver-sb-1" Oct 10 17:57:09 crc kubenswrapper[4799]: I1010 17:57:09.068865 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/0ec95c7a-ba6b-45d9-bcde-9b26b0068c83-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"0ec95c7a-ba6b-45d9-bcde-9b26b0068c83\") " pod="openstack/ovsdbserver-sb-0" Oct 10 17:57:09 crc kubenswrapper[4799]: I1010 17:57:09.068888 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-28aef492-ea10-44e3-b5ad-3733e47543ef\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-28aef492-ea10-44e3-b5ad-3733e47543ef\") pod \"ovsdbserver-sb-0\" (UID: \"0ec95c7a-ba6b-45d9-bcde-9b26b0068c83\") " pod="openstack/ovsdbserver-sb-0" Oct 10 17:57:09 crc kubenswrapper[4799]: I1010 17:57:09.068928 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/215d8c12-5f45-450c-949b-fce862e0290a-scripts\") pod \"ovsdbserver-sb-1\" (UID: \"215d8c12-5f45-450c-949b-fce862e0290a\") " pod="openstack/ovsdbserver-sb-1" Oct 10 17:57:09 crc kubenswrapper[4799]: I1010 17:57:09.068963 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-de7f9246-33ba-4c5e-a60e-fc65c5b8edeb\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-de7f9246-33ba-4c5e-a60e-fc65c5b8edeb\") pod \"ovsdbserver-sb-2\" (UID: \"a08b3348-e784-42f6-bae6-f2f05b77af51\") " pod="openstack/ovsdbserver-sb-2" Oct 10 17:57:09 crc kubenswrapper[4799]: I1010 17:57:09.068987 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/a08b3348-e784-42f6-bae6-f2f05b77af51-ovsdb-rundir\") pod \"ovsdbserver-sb-2\" (UID: \"a08b3348-e784-42f6-bae6-f2f05b77af51\") " pod="openstack/ovsdbserver-sb-2" Oct 10 17:57:09 crc kubenswrapper[4799]: I1010 17:57:09.069009 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-87fzg\" (UniqueName: \"kubernetes.io/projected/215d8c12-5f45-450c-949b-fce862e0290a-kube-api-access-87fzg\") pod \"ovsdbserver-sb-1\" (UID: \"215d8c12-5f45-450c-949b-fce862e0290a\") " pod="openstack/ovsdbserver-sb-1" Oct 10 17:57:09 crc kubenswrapper[4799]: I1010 17:57:09.069031 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zpkmx\" (UniqueName: \"kubernetes.io/projected/a08b3348-e784-42f6-bae6-f2f05b77af51-kube-api-access-zpkmx\") pod \"ovsdbserver-sb-2\" (UID: \"a08b3348-e784-42f6-bae6-f2f05b77af51\") " pod="openstack/ovsdbserver-sb-2" Oct 10 17:57:09 crc kubenswrapper[4799]: I1010 17:57:09.069052 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cwq56\" (UniqueName: \"kubernetes.io/projected/0ec95c7a-ba6b-45d9-bcde-9b26b0068c83-kube-api-access-cwq56\") pod \"ovsdbserver-sb-0\" (UID: \"0ec95c7a-ba6b-45d9-bcde-9b26b0068c83\") " pod="openstack/ovsdbserver-sb-0" Oct 10 17:57:09 crc kubenswrapper[4799]: I1010 17:57:09.069094 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-165f3d61-9b59-4733-9cd4-2f7212e7a759\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-165f3d61-9b59-4733-9cd4-2f7212e7a759\") pod \"ovsdbserver-sb-1\" (UID: \"215d8c12-5f45-450c-949b-fce862e0290a\") " pod="openstack/ovsdbserver-sb-1" Oct 10 17:57:09 crc kubenswrapper[4799]: I1010 17:57:09.069129 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a08b3348-e784-42f6-bae6-f2f05b77af51-scripts\") pod \"ovsdbserver-sb-2\" (UID: \"a08b3348-e784-42f6-bae6-f2f05b77af51\") " pod="openstack/ovsdbserver-sb-2" Oct 10 17:57:09 crc kubenswrapper[4799]: I1010 17:57:09.069363 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/0ec95c7a-ba6b-45d9-bcde-9b26b0068c83-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"0ec95c7a-ba6b-45d9-bcde-9b26b0068c83\") " pod="openstack/ovsdbserver-sb-0" Oct 10 17:57:09 crc kubenswrapper[4799]: I1010 17:57:09.070281 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/0ec95c7a-ba6b-45d9-bcde-9b26b0068c83-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"0ec95c7a-ba6b-45d9-bcde-9b26b0068c83\") " pod="openstack/ovsdbserver-sb-0" Oct 10 17:57:09 crc kubenswrapper[4799]: I1010 17:57:09.070281 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0ec95c7a-ba6b-45d9-bcde-9b26b0068c83-config\") pod \"ovsdbserver-sb-0\" (UID: \"0ec95c7a-ba6b-45d9-bcde-9b26b0068c83\") " pod="openstack/ovsdbserver-sb-0" Oct 10 17:57:09 crc kubenswrapper[4799]: I1010 17:57:09.071168 4799 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Oct 10 17:57:09 crc kubenswrapper[4799]: I1010 17:57:09.071203 4799 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-28aef492-ea10-44e3-b5ad-3733e47543ef\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-28aef492-ea10-44e3-b5ad-3733e47543ef\") pod \"ovsdbserver-sb-0\" (UID: \"0ec95c7a-ba6b-45d9-bcde-9b26b0068c83\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/fd178232e9206296c4ad960bc09ca5cf503d10f7fe2922e62ed4a2f7bfe72834/globalmount\"" pod="openstack/ovsdbserver-sb-0" Oct 10 17:57:09 crc kubenswrapper[4799]: I1010 17:57:09.170246 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/215d8c12-5f45-450c-949b-fce862e0290a-config\") pod \"ovsdbserver-sb-1\" (UID: \"215d8c12-5f45-450c-949b-fce862e0290a\") " pod="openstack/ovsdbserver-sb-1" Oct 10 17:57:09 crc kubenswrapper[4799]: I1010 17:57:09.170320 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/215d8c12-5f45-450c-949b-fce862e0290a-combined-ca-bundle\") pod \"ovsdbserver-sb-1\" (UID: \"215d8c12-5f45-450c-949b-fce862e0290a\") " pod="openstack/ovsdbserver-sb-1" Oct 10 17:57:09 crc kubenswrapper[4799]: I1010 17:57:09.170382 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/215d8c12-5f45-450c-949b-fce862e0290a-scripts\") pod \"ovsdbserver-sb-1\" (UID: \"215d8c12-5f45-450c-949b-fce862e0290a\") " pod="openstack/ovsdbserver-sb-1" Oct 10 17:57:09 crc kubenswrapper[4799]: I1010 17:57:09.170421 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-de7f9246-33ba-4c5e-a60e-fc65c5b8edeb\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-de7f9246-33ba-4c5e-a60e-fc65c5b8edeb\") pod \"ovsdbserver-sb-2\" (UID: \"a08b3348-e784-42f6-bae6-f2f05b77af51\") " pod="openstack/ovsdbserver-sb-2" Oct 10 17:57:09 crc kubenswrapper[4799]: I1010 17:57:09.170449 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/a08b3348-e784-42f6-bae6-f2f05b77af51-ovsdb-rundir\") pod \"ovsdbserver-sb-2\" (UID: \"a08b3348-e784-42f6-bae6-f2f05b77af51\") " pod="openstack/ovsdbserver-sb-2" Oct 10 17:57:09 crc kubenswrapper[4799]: I1010 17:57:09.170477 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-87fzg\" (UniqueName: \"kubernetes.io/projected/215d8c12-5f45-450c-949b-fce862e0290a-kube-api-access-87fzg\") pod \"ovsdbserver-sb-1\" (UID: \"215d8c12-5f45-450c-949b-fce862e0290a\") " pod="openstack/ovsdbserver-sb-1" Oct 10 17:57:09 crc kubenswrapper[4799]: I1010 17:57:09.170507 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zpkmx\" (UniqueName: \"kubernetes.io/projected/a08b3348-e784-42f6-bae6-f2f05b77af51-kube-api-access-zpkmx\") pod \"ovsdbserver-sb-2\" (UID: \"a08b3348-e784-42f6-bae6-f2f05b77af51\") " pod="openstack/ovsdbserver-sb-2" Oct 10 17:57:09 crc kubenswrapper[4799]: I1010 17:57:09.170560 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-165f3d61-9b59-4733-9cd4-2f7212e7a759\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-165f3d61-9b59-4733-9cd4-2f7212e7a759\") pod \"ovsdbserver-sb-1\" (UID: \"215d8c12-5f45-450c-949b-fce862e0290a\") " pod="openstack/ovsdbserver-sb-1" Oct 10 17:57:09 crc kubenswrapper[4799]: I1010 17:57:09.170598 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a08b3348-e784-42f6-bae6-f2f05b77af51-scripts\") pod \"ovsdbserver-sb-2\" (UID: \"a08b3348-e784-42f6-bae6-f2f05b77af51\") " pod="openstack/ovsdbserver-sb-2" Oct 10 17:57:09 crc kubenswrapper[4799]: I1010 17:57:09.170641 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a08b3348-e784-42f6-bae6-f2f05b77af51-config\") pod \"ovsdbserver-sb-2\" (UID: \"a08b3348-e784-42f6-bae6-f2f05b77af51\") " pod="openstack/ovsdbserver-sb-2" Oct 10 17:57:09 crc kubenswrapper[4799]: I1010 17:57:09.170701 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a08b3348-e784-42f6-bae6-f2f05b77af51-combined-ca-bundle\") pod \"ovsdbserver-sb-2\" (UID: \"a08b3348-e784-42f6-bae6-f2f05b77af51\") " pod="openstack/ovsdbserver-sb-2" Oct 10 17:57:09 crc kubenswrapper[4799]: I1010 17:57:09.170735 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/215d8c12-5f45-450c-949b-fce862e0290a-ovsdb-rundir\") pod \"ovsdbserver-sb-1\" (UID: \"215d8c12-5f45-450c-949b-fce862e0290a\") " pod="openstack/ovsdbserver-sb-1" Oct 10 17:57:09 crc kubenswrapper[4799]: I1010 17:57:09.171334 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/215d8c12-5f45-450c-949b-fce862e0290a-ovsdb-rundir\") pod \"ovsdbserver-sb-1\" (UID: \"215d8c12-5f45-450c-949b-fce862e0290a\") " pod="openstack/ovsdbserver-sb-1" Oct 10 17:57:09 crc kubenswrapper[4799]: I1010 17:57:09.174510 4799 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Oct 10 17:57:09 crc kubenswrapper[4799]: I1010 17:57:09.174548 4799 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-de7f9246-33ba-4c5e-a60e-fc65c5b8edeb\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-de7f9246-33ba-4c5e-a60e-fc65c5b8edeb\") pod \"ovsdbserver-sb-2\" (UID: \"a08b3348-e784-42f6-bae6-f2f05b77af51\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/285629d8d8342f9e79a269e7b78f1ab2d8c855987258bf4aec4bd63ae4ef8cca/globalmount\"" pod="openstack/ovsdbserver-sb-2" Oct 10 17:57:09 crc kubenswrapper[4799]: I1010 17:57:09.175302 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a08b3348-e784-42f6-bae6-f2f05b77af51-scripts\") pod \"ovsdbserver-sb-2\" (UID: \"a08b3348-e784-42f6-bae6-f2f05b77af51\") " pod="openstack/ovsdbserver-sb-2" Oct 10 17:57:09 crc kubenswrapper[4799]: I1010 17:57:09.175342 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a08b3348-e784-42f6-bae6-f2f05b77af51-config\") pod \"ovsdbserver-sb-2\" (UID: \"a08b3348-e784-42f6-bae6-f2f05b77af51\") " pod="openstack/ovsdbserver-sb-2" Oct 10 17:57:09 crc kubenswrapper[4799]: I1010 17:57:09.176029 4799 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Oct 10 17:57:09 crc kubenswrapper[4799]: I1010 17:57:09.176113 4799 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-165f3d61-9b59-4733-9cd4-2f7212e7a759\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-165f3d61-9b59-4733-9cd4-2f7212e7a759\") pod \"ovsdbserver-sb-1\" (UID: \"215d8c12-5f45-450c-949b-fce862e0290a\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/66e012909c71d8ff50631a08658a7ae4cd22d3c024e342fae04ad9a98d8461bf/globalmount\"" pod="openstack/ovsdbserver-sb-1" Oct 10 17:57:09 crc kubenswrapper[4799]: I1010 17:57:09.176492 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/215d8c12-5f45-450c-949b-fce862e0290a-scripts\") pod \"ovsdbserver-sb-1\" (UID: \"215d8c12-5f45-450c-949b-fce862e0290a\") " pod="openstack/ovsdbserver-sb-1" Oct 10 17:57:09 crc kubenswrapper[4799]: I1010 17:57:09.177110 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/a08b3348-e784-42f6-bae6-f2f05b77af51-ovsdb-rundir\") pod \"ovsdbserver-sb-2\" (UID: \"a08b3348-e784-42f6-bae6-f2f05b77af51\") " pod="openstack/ovsdbserver-sb-2" Oct 10 17:57:09 crc kubenswrapper[4799]: I1010 17:57:09.178240 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/215d8c12-5f45-450c-949b-fce862e0290a-config\") pod \"ovsdbserver-sb-1\" (UID: \"215d8c12-5f45-450c-949b-fce862e0290a\") " pod="openstack/ovsdbserver-sb-1" Oct 10 17:57:09 crc kubenswrapper[4799]: I1010 17:57:09.386101 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0ec95c7a-ba6b-45d9-bcde-9b26b0068c83-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"0ec95c7a-ba6b-45d9-bcde-9b26b0068c83\") " pod="openstack/ovsdbserver-sb-0" Oct 10 17:57:09 crc kubenswrapper[4799]: I1010 17:57:09.386099 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f45ts\" (UniqueName: \"kubernetes.io/projected/061a94d3-1f07-41b9-b64d-4f2470084fe7-kube-api-access-f45ts\") pod \"ovsdbserver-nb-2\" (UID: \"061a94d3-1f07-41b9-b64d-4f2470084fe7\") " pod="openstack/ovsdbserver-nb-2" Oct 10 17:57:09 crc kubenswrapper[4799]: I1010 17:57:09.387367 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/215d8c12-5f45-450c-949b-fce862e0290a-combined-ca-bundle\") pod \"ovsdbserver-sb-1\" (UID: \"215d8c12-5f45-450c-949b-fce862e0290a\") " pod="openstack/ovsdbserver-sb-1" Oct 10 17:57:09 crc kubenswrapper[4799]: I1010 17:57:09.388088 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zpkmx\" (UniqueName: \"kubernetes.io/projected/a08b3348-e784-42f6-bae6-f2f05b77af51-kube-api-access-zpkmx\") pod \"ovsdbserver-sb-2\" (UID: \"a08b3348-e784-42f6-bae6-f2f05b77af51\") " pod="openstack/ovsdbserver-sb-2" Oct 10 17:57:09 crc kubenswrapper[4799]: I1010 17:57:09.388529 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a08b3348-e784-42f6-bae6-f2f05b77af51-combined-ca-bundle\") pod \"ovsdbserver-sb-2\" (UID: \"a08b3348-e784-42f6-bae6-f2f05b77af51\") " pod="openstack/ovsdbserver-sb-2" Oct 10 17:57:09 crc kubenswrapper[4799]: I1010 17:57:09.388642 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cwq56\" (UniqueName: \"kubernetes.io/projected/0ec95c7a-ba6b-45d9-bcde-9b26b0068c83-kube-api-access-cwq56\") pod \"ovsdbserver-sb-0\" (UID: \"0ec95c7a-ba6b-45d9-bcde-9b26b0068c83\") " pod="openstack/ovsdbserver-sb-0" Oct 10 17:57:09 crc kubenswrapper[4799]: I1010 17:57:09.389038 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-87fzg\" (UniqueName: \"kubernetes.io/projected/215d8c12-5f45-450c-949b-fce862e0290a-kube-api-access-87fzg\") pod \"ovsdbserver-sb-1\" (UID: \"215d8c12-5f45-450c-949b-fce862e0290a\") " pod="openstack/ovsdbserver-sb-1" Oct 10 17:57:09 crc kubenswrapper[4799]: I1010 17:57:09.512697 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-28aef492-ea10-44e3-b5ad-3733e47543ef\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-28aef492-ea10-44e3-b5ad-3733e47543ef\") pod \"ovsdbserver-sb-0\" (UID: \"0ec95c7a-ba6b-45d9-bcde-9b26b0068c83\") " pod="openstack/ovsdbserver-sb-0" Oct 10 17:57:09 crc kubenswrapper[4799]: I1010 17:57:09.527126 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Oct 10 17:57:09 crc kubenswrapper[4799]: I1010 17:57:09.626288 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-de7f9246-33ba-4c5e-a60e-fc65c5b8edeb\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-de7f9246-33ba-4c5e-a60e-fc65c5b8edeb\") pod \"ovsdbserver-sb-2\" (UID: \"a08b3348-e784-42f6-bae6-f2f05b77af51\") " pod="openstack/ovsdbserver-sb-2" Oct 10 17:57:09 crc kubenswrapper[4799]: I1010 17:57:09.631533 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-151e912f-2519-4ceb-af0b-64fc907849cc\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-151e912f-2519-4ceb-af0b-64fc907849cc\") pod \"ovsdbserver-nb-2\" (UID: \"061a94d3-1f07-41b9-b64d-4f2470084fe7\") " pod="openstack/ovsdbserver-nb-2" Oct 10 17:57:09 crc kubenswrapper[4799]: I1010 17:57:09.634791 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-165f3d61-9b59-4733-9cd4-2f7212e7a759\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-165f3d61-9b59-4733-9cd4-2f7212e7a759\") pod \"ovsdbserver-sb-1\" (UID: \"215d8c12-5f45-450c-949b-fce862e0290a\") " pod="openstack/ovsdbserver-sb-1" Oct 10 17:57:09 crc kubenswrapper[4799]: I1010 17:57:09.674546 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-0"] Oct 10 17:57:09 crc kubenswrapper[4799]: I1010 17:57:09.845242 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-2" Oct 10 17:57:09 crc kubenswrapper[4799]: I1010 17:57:09.849340 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-1" Oct 10 17:57:09 crc kubenswrapper[4799]: I1010 17:57:09.913917 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-2" Oct 10 17:57:09 crc kubenswrapper[4799]: I1010 17:57:09.978534 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-1"] Oct 10 17:57:10 crc kubenswrapper[4799]: I1010 17:57:10.125948 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-0"] Oct 10 17:57:10 crc kubenswrapper[4799]: I1010 17:57:10.230425 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-1" event={"ID":"25609809-9782-4b4b-9e3c-005b491e60a3","Type":"ContainerStarted","Data":"a4664cf7041aed4732052d4f224142a42aebc2c26fd42715c39e799a71f415ae"} Oct 10 17:57:10 crc kubenswrapper[4799]: I1010 17:57:10.230468 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-1" event={"ID":"25609809-9782-4b4b-9e3c-005b491e60a3","Type":"ContainerStarted","Data":"4e179940c3b57a5d6b2180a540891a6912fe95def8dff4b7abbbaf5fec43d116"} Oct 10 17:57:10 crc kubenswrapper[4799]: I1010 17:57:10.244021 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"2a529948-04ef-4796-9237-9e8e30fe5f5c","Type":"ContainerStarted","Data":"6cb8ac335285f5a090ea07ae421360e6eeed26818358e65b09928fe5c656af71"} Oct 10 17:57:10 crc kubenswrapper[4799]: I1010 17:57:10.244068 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"2a529948-04ef-4796-9237-9e8e30fe5f5c","Type":"ContainerStarted","Data":"a1073802d5422487ed06e72c0cafd277fe447f85f55d1ee835b91c8d79cf0855"} Oct 10 17:57:10 crc kubenswrapper[4799]: I1010 17:57:10.244082 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"2a529948-04ef-4796-9237-9e8e30fe5f5c","Type":"ContainerStarted","Data":"65b88e5355666e6e3df2ae46a3636baaea8add049ce11da8fff19e060e10063f"} Oct 10 17:57:10 crc kubenswrapper[4799]: I1010 17:57:10.248237 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"0ec95c7a-ba6b-45d9-bcde-9b26b0068c83","Type":"ContainerStarted","Data":"b0e86a39e52a4ea40bf0c927c720bfa092461564887b4e220f35588e706e622d"} Oct 10 17:57:10 crc kubenswrapper[4799]: I1010 17:57:10.280444 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-nb-0" podStartSLOduration=3.280423109 podStartE2EDuration="3.280423109s" podCreationTimestamp="2025-10-10 17:57:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-10 17:57:10.268747585 +0000 UTC m=+5123.777071710" watchObservedRunningTime="2025-10-10 17:57:10.280423109 +0000 UTC m=+5123.788747224" Oct 10 17:57:10 crc kubenswrapper[4799]: I1010 17:57:10.365686 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-1"] Oct 10 17:57:10 crc kubenswrapper[4799]: W1010 17:57:10.380970 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod215d8c12_5f45_450c_949b_fce862e0290a.slice/crio-4f9bbc0146b0d0dd2c75876b3d620a14434a5290476571e9d237a0e154954a60 WatchSource:0}: Error finding container 4f9bbc0146b0d0dd2c75876b3d620a14434a5290476571e9d237a0e154954a60: Status 404 returned error can't find the container with id 4f9bbc0146b0d0dd2c75876b3d620a14434a5290476571e9d237a0e154954a60 Oct 10 17:57:10 crc kubenswrapper[4799]: I1010 17:57:10.478655 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-2"] Oct 10 17:57:10 crc kubenswrapper[4799]: I1010 17:57:10.565191 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-2"] Oct 10 17:57:10 crc kubenswrapper[4799]: W1010 17:57:10.576011 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod061a94d3_1f07_41b9_b64d_4f2470084fe7.slice/crio-4fcfb6e6d3c4dc960719ff5499a7115c51716ca4fdc9ebc182129578dd43b546 WatchSource:0}: Error finding container 4fcfb6e6d3c4dc960719ff5499a7115c51716ca4fdc9ebc182129578dd43b546: Status 404 returned error can't find the container with id 4fcfb6e6d3c4dc960719ff5499a7115c51716ca4fdc9ebc182129578dd43b546 Oct 10 17:57:11 crc kubenswrapper[4799]: I1010 17:57:11.258383 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"0ec95c7a-ba6b-45d9-bcde-9b26b0068c83","Type":"ContainerStarted","Data":"7749feeaf1dbeb426b1e5451acd9e1f838e6463196779489bdbcfa66929aa7b6"} Oct 10 17:57:11 crc kubenswrapper[4799]: I1010 17:57:11.258686 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"0ec95c7a-ba6b-45d9-bcde-9b26b0068c83","Type":"ContainerStarted","Data":"7abd40f59f73186f674421520924cacfb011779006ffc0c5faad98ba30015835"} Oct 10 17:57:11 crc kubenswrapper[4799]: I1010 17:57:11.260345 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-2" event={"ID":"a08b3348-e784-42f6-bae6-f2f05b77af51","Type":"ContainerStarted","Data":"fc6cd024664eba7205ab25f72922edebdc3655e44cd294b80dcfc97ff749c4bc"} Oct 10 17:57:11 crc kubenswrapper[4799]: I1010 17:57:11.260402 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-2" event={"ID":"a08b3348-e784-42f6-bae6-f2f05b77af51","Type":"ContainerStarted","Data":"73dc338c4de857489030cce86e4a13ea4afaa3100f2c4759eb7d931a1a0e7c9b"} Oct 10 17:57:11 crc kubenswrapper[4799]: I1010 17:57:11.260417 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-2" event={"ID":"a08b3348-e784-42f6-bae6-f2f05b77af51","Type":"ContainerStarted","Data":"b66372c50d55922cc622b33d9b7e043a3df6e02d5c2a23f1dcbc0903a637293b"} Oct 10 17:57:11 crc kubenswrapper[4799]: I1010 17:57:11.262662 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-2" event={"ID":"061a94d3-1f07-41b9-b64d-4f2470084fe7","Type":"ContainerStarted","Data":"91f1d4fcc39de25fec646439f3c415eee746de59dcef753192b0e69c00646b41"} Oct 10 17:57:11 crc kubenswrapper[4799]: I1010 17:57:11.262741 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-2" event={"ID":"061a94d3-1f07-41b9-b64d-4f2470084fe7","Type":"ContainerStarted","Data":"7248f9eb92d4c111240babb33d4dfd63afa45c18c701bba9fc77ef950ecd8fd3"} Oct 10 17:57:11 crc kubenswrapper[4799]: I1010 17:57:11.262805 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-2" event={"ID":"061a94d3-1f07-41b9-b64d-4f2470084fe7","Type":"ContainerStarted","Data":"4fcfb6e6d3c4dc960719ff5499a7115c51716ca4fdc9ebc182129578dd43b546"} Oct 10 17:57:11 crc kubenswrapper[4799]: I1010 17:57:11.264925 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-1" event={"ID":"25609809-9782-4b4b-9e3c-005b491e60a3","Type":"ContainerStarted","Data":"df131c66e853c3fe1dc5610083f64de107fb138b8af81b955a4a9ee2d5b019ab"} Oct 10 17:57:11 crc kubenswrapper[4799]: I1010 17:57:11.266552 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-1" event={"ID":"215d8c12-5f45-450c-949b-fce862e0290a","Type":"ContainerStarted","Data":"ddc39dd11667f2c2e83fb5aef75376b7de12e445bc593a8c873e1f765ff6f3a2"} Oct 10 17:57:11 crc kubenswrapper[4799]: I1010 17:57:11.266583 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-1" event={"ID":"215d8c12-5f45-450c-949b-fce862e0290a","Type":"ContainerStarted","Data":"0acd5d834bb7266934f6c2d66aa70224a35feb1d6b9c44fd7bec6e542ad51fd6"} Oct 10 17:57:11 crc kubenswrapper[4799]: I1010 17:57:11.266600 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-1" event={"ID":"215d8c12-5f45-450c-949b-fce862e0290a","Type":"ContainerStarted","Data":"4f9bbc0146b0d0dd2c75876b3d620a14434a5290476571e9d237a0e154954a60"} Oct 10 17:57:11 crc kubenswrapper[4799]: I1010 17:57:11.282445 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-sb-0" podStartSLOduration=4.282411981 podStartE2EDuration="4.282411981s" podCreationTimestamp="2025-10-10 17:57:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-10 17:57:11.277737777 +0000 UTC m=+5124.786061922" watchObservedRunningTime="2025-10-10 17:57:11.282411981 +0000 UTC m=+5124.790736136" Oct 10 17:57:11 crc kubenswrapper[4799]: I1010 17:57:11.305551 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-sb-2" podStartSLOduration=4.305517155 podStartE2EDuration="4.305517155s" podCreationTimestamp="2025-10-10 17:57:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-10 17:57:11.297917639 +0000 UTC m=+5124.806241844" watchObservedRunningTime="2025-10-10 17:57:11.305517155 +0000 UTC m=+5124.813841270" Oct 10 17:57:11 crc kubenswrapper[4799]: I1010 17:57:11.333347 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-sb-1" podStartSLOduration=4.333311922 podStartE2EDuration="4.333311922s" podCreationTimestamp="2025-10-10 17:57:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-10 17:57:11.323584895 +0000 UTC m=+5124.831909100" watchObservedRunningTime="2025-10-10 17:57:11.333311922 +0000 UTC m=+5124.841636077" Oct 10 17:57:11 crc kubenswrapper[4799]: I1010 17:57:11.362721 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-nb-1" podStartSLOduration=4.362691308 podStartE2EDuration="4.362691308s" podCreationTimestamp="2025-10-10 17:57:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-10 17:57:11.341304517 +0000 UTC m=+5124.849628652" watchObservedRunningTime="2025-10-10 17:57:11.362691308 +0000 UTC m=+5124.871015423" Oct 10 17:57:11 crc kubenswrapper[4799]: I1010 17:57:11.365241 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-nb-2" podStartSLOduration=4.36523097 podStartE2EDuration="4.36523097s" podCreationTimestamp="2025-10-10 17:57:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-10 17:57:11.361173771 +0000 UTC m=+5124.869497886" watchObservedRunningTime="2025-10-10 17:57:11.36523097 +0000 UTC m=+5124.873555085" Oct 10 17:57:11 crc kubenswrapper[4799]: I1010 17:57:11.402181 4799 scope.go:117] "RemoveContainer" containerID="5e973f476cb0655a6e33e886e2a59fc6754febf3bf5a4718abcef307858985dd" Oct 10 17:57:11 crc kubenswrapper[4799]: E1010 17:57:11.402402 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 17:57:11 crc kubenswrapper[4799]: I1010 17:57:11.966270 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-nb-0" Oct 10 17:57:12 crc kubenswrapper[4799]: I1010 17:57:12.004040 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-nb-1" Oct 10 17:57:12 crc kubenswrapper[4799]: I1010 17:57:12.527856 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-sb-0" Oct 10 17:57:12 crc kubenswrapper[4799]: I1010 17:57:12.846869 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-sb-2" Oct 10 17:57:12 crc kubenswrapper[4799]: I1010 17:57:12.850141 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-sb-1" Oct 10 17:57:12 crc kubenswrapper[4799]: I1010 17:57:12.914682 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-nb-2" Oct 10 17:57:13 crc kubenswrapper[4799]: I1010 17:57:13.966069 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-nb-0" Oct 10 17:57:14 crc kubenswrapper[4799]: I1010 17:57:14.004221 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-nb-1" Oct 10 17:57:14 crc kubenswrapper[4799]: I1010 17:57:14.527953 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-sb-0" Oct 10 17:57:14 crc kubenswrapper[4799]: I1010 17:57:14.846389 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-sb-2" Oct 10 17:57:14 crc kubenswrapper[4799]: I1010 17:57:14.849741 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-sb-1" Oct 10 17:57:14 crc kubenswrapper[4799]: I1010 17:57:14.914219 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-nb-2" Oct 10 17:57:15 crc kubenswrapper[4799]: I1010 17:57:15.039834 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-nb-0" Oct 10 17:57:15 crc kubenswrapper[4799]: I1010 17:57:15.087402 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-nb-1" Oct 10 17:57:15 crc kubenswrapper[4799]: I1010 17:57:15.126658 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-nb-0" Oct 10 17:57:15 crc kubenswrapper[4799]: I1010 17:57:15.366608 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-nb-1" Oct 10 17:57:15 crc kubenswrapper[4799]: I1010 17:57:15.425487 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-56d7fcb4c-5gcwt"] Oct 10 17:57:15 crc kubenswrapper[4799]: I1010 17:57:15.427078 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-56d7fcb4c-5gcwt" Oct 10 17:57:15 crc kubenswrapper[4799]: I1010 17:57:15.430910 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovsdbserver-nb" Oct 10 17:57:15 crc kubenswrapper[4799]: I1010 17:57:15.451302 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-56d7fcb4c-5gcwt"] Oct 10 17:57:15 crc kubenswrapper[4799]: I1010 17:57:15.561633 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-sb-0" Oct 10 17:57:15 crc kubenswrapper[4799]: I1010 17:57:15.597043 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5d8f77d8-d83b-4a7d-9400-a0c15aba0664-config\") pod \"dnsmasq-dns-56d7fcb4c-5gcwt\" (UID: \"5d8f77d8-d83b-4a7d-9400-a0c15aba0664\") " pod="openstack/dnsmasq-dns-56d7fcb4c-5gcwt" Oct 10 17:57:15 crc kubenswrapper[4799]: I1010 17:57:15.597098 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5d8f77d8-d83b-4a7d-9400-a0c15aba0664-dns-svc\") pod \"dnsmasq-dns-56d7fcb4c-5gcwt\" (UID: \"5d8f77d8-d83b-4a7d-9400-a0c15aba0664\") " pod="openstack/dnsmasq-dns-56d7fcb4c-5gcwt" Oct 10 17:57:15 crc kubenswrapper[4799]: I1010 17:57:15.597206 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5d8f77d8-d83b-4a7d-9400-a0c15aba0664-ovsdbserver-nb\") pod \"dnsmasq-dns-56d7fcb4c-5gcwt\" (UID: \"5d8f77d8-d83b-4a7d-9400-a0c15aba0664\") " pod="openstack/dnsmasq-dns-56d7fcb4c-5gcwt" Oct 10 17:57:15 crc kubenswrapper[4799]: I1010 17:57:15.597271 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ff6fw\" (UniqueName: \"kubernetes.io/projected/5d8f77d8-d83b-4a7d-9400-a0c15aba0664-kube-api-access-ff6fw\") pod \"dnsmasq-dns-56d7fcb4c-5gcwt\" (UID: \"5d8f77d8-d83b-4a7d-9400-a0c15aba0664\") " pod="openstack/dnsmasq-dns-56d7fcb4c-5gcwt" Oct 10 17:57:15 crc kubenswrapper[4799]: I1010 17:57:15.604597 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-sb-0" Oct 10 17:57:15 crc kubenswrapper[4799]: I1010 17:57:15.698851 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ff6fw\" (UniqueName: \"kubernetes.io/projected/5d8f77d8-d83b-4a7d-9400-a0c15aba0664-kube-api-access-ff6fw\") pod \"dnsmasq-dns-56d7fcb4c-5gcwt\" (UID: \"5d8f77d8-d83b-4a7d-9400-a0c15aba0664\") " pod="openstack/dnsmasq-dns-56d7fcb4c-5gcwt" Oct 10 17:57:15 crc kubenswrapper[4799]: I1010 17:57:15.699457 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5d8f77d8-d83b-4a7d-9400-a0c15aba0664-config\") pod \"dnsmasq-dns-56d7fcb4c-5gcwt\" (UID: \"5d8f77d8-d83b-4a7d-9400-a0c15aba0664\") " pod="openstack/dnsmasq-dns-56d7fcb4c-5gcwt" Oct 10 17:57:15 crc kubenswrapper[4799]: I1010 17:57:15.699676 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5d8f77d8-d83b-4a7d-9400-a0c15aba0664-dns-svc\") pod \"dnsmasq-dns-56d7fcb4c-5gcwt\" (UID: \"5d8f77d8-d83b-4a7d-9400-a0c15aba0664\") " pod="openstack/dnsmasq-dns-56d7fcb4c-5gcwt" Oct 10 17:57:15 crc kubenswrapper[4799]: I1010 17:57:15.700011 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5d8f77d8-d83b-4a7d-9400-a0c15aba0664-ovsdbserver-nb\") pod \"dnsmasq-dns-56d7fcb4c-5gcwt\" (UID: \"5d8f77d8-d83b-4a7d-9400-a0c15aba0664\") " pod="openstack/dnsmasq-dns-56d7fcb4c-5gcwt" Oct 10 17:57:15 crc kubenswrapper[4799]: I1010 17:57:15.700743 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5d8f77d8-d83b-4a7d-9400-a0c15aba0664-config\") pod \"dnsmasq-dns-56d7fcb4c-5gcwt\" (UID: \"5d8f77d8-d83b-4a7d-9400-a0c15aba0664\") " pod="openstack/dnsmasq-dns-56d7fcb4c-5gcwt" Oct 10 17:57:15 crc kubenswrapper[4799]: I1010 17:57:15.701304 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5d8f77d8-d83b-4a7d-9400-a0c15aba0664-dns-svc\") pod \"dnsmasq-dns-56d7fcb4c-5gcwt\" (UID: \"5d8f77d8-d83b-4a7d-9400-a0c15aba0664\") " pod="openstack/dnsmasq-dns-56d7fcb4c-5gcwt" Oct 10 17:57:15 crc kubenswrapper[4799]: I1010 17:57:15.701623 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5d8f77d8-d83b-4a7d-9400-a0c15aba0664-ovsdbserver-nb\") pod \"dnsmasq-dns-56d7fcb4c-5gcwt\" (UID: \"5d8f77d8-d83b-4a7d-9400-a0c15aba0664\") " pod="openstack/dnsmasq-dns-56d7fcb4c-5gcwt" Oct 10 17:57:15 crc kubenswrapper[4799]: I1010 17:57:15.724607 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ff6fw\" (UniqueName: \"kubernetes.io/projected/5d8f77d8-d83b-4a7d-9400-a0c15aba0664-kube-api-access-ff6fw\") pod \"dnsmasq-dns-56d7fcb4c-5gcwt\" (UID: \"5d8f77d8-d83b-4a7d-9400-a0c15aba0664\") " pod="openstack/dnsmasq-dns-56d7fcb4c-5gcwt" Oct 10 17:57:15 crc kubenswrapper[4799]: I1010 17:57:15.758900 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-56d7fcb4c-5gcwt" Oct 10 17:57:15 crc kubenswrapper[4799]: I1010 17:57:15.900314 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-sb-2" Oct 10 17:57:15 crc kubenswrapper[4799]: I1010 17:57:15.910186 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-56d7fcb4c-5gcwt"] Oct 10 17:57:15 crc kubenswrapper[4799]: I1010 17:57:15.912817 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-sb-1" Oct 10 17:57:15 crc kubenswrapper[4799]: I1010 17:57:15.960366 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5cbf6d5c75-9gk4t"] Oct 10 17:57:15 crc kubenswrapper[4799]: I1010 17:57:15.961776 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5cbf6d5c75-9gk4t" Oct 10 17:57:15 crc kubenswrapper[4799]: I1010 17:57:15.966565 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovsdbserver-sb" Oct 10 17:57:15 crc kubenswrapper[4799]: I1010 17:57:15.984024 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-nb-2" Oct 10 17:57:15 crc kubenswrapper[4799]: I1010 17:57:15.986805 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-sb-2" Oct 10 17:57:16 crc kubenswrapper[4799]: I1010 17:57:16.004479 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-sb-1" Oct 10 17:57:16 crc kubenswrapper[4799]: I1010 17:57:16.013683 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5cbf6d5c75-9gk4t"] Oct 10 17:57:16 crc kubenswrapper[4799]: I1010 17:57:16.039611 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-nb-2" Oct 10 17:57:16 crc kubenswrapper[4799]: I1010 17:57:16.113606 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f30813b7-4ff2-48c2-81f0-0e00fbb208e9-ovsdbserver-sb\") pod \"dnsmasq-dns-5cbf6d5c75-9gk4t\" (UID: \"f30813b7-4ff2-48c2-81f0-0e00fbb208e9\") " pod="openstack/dnsmasq-dns-5cbf6d5c75-9gk4t" Oct 10 17:57:16 crc kubenswrapper[4799]: I1010 17:57:16.114883 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f30813b7-4ff2-48c2-81f0-0e00fbb208e9-config\") pod \"dnsmasq-dns-5cbf6d5c75-9gk4t\" (UID: \"f30813b7-4ff2-48c2-81f0-0e00fbb208e9\") " pod="openstack/dnsmasq-dns-5cbf6d5c75-9gk4t" Oct 10 17:57:16 crc kubenswrapper[4799]: I1010 17:57:16.115124 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f30813b7-4ff2-48c2-81f0-0e00fbb208e9-dns-svc\") pod \"dnsmasq-dns-5cbf6d5c75-9gk4t\" (UID: \"f30813b7-4ff2-48c2-81f0-0e00fbb208e9\") " pod="openstack/dnsmasq-dns-5cbf6d5c75-9gk4t" Oct 10 17:57:16 crc kubenswrapper[4799]: I1010 17:57:16.115418 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jx9pm\" (UniqueName: \"kubernetes.io/projected/f30813b7-4ff2-48c2-81f0-0e00fbb208e9-kube-api-access-jx9pm\") pod \"dnsmasq-dns-5cbf6d5c75-9gk4t\" (UID: \"f30813b7-4ff2-48c2-81f0-0e00fbb208e9\") " pod="openstack/dnsmasq-dns-5cbf6d5c75-9gk4t" Oct 10 17:57:16 crc kubenswrapper[4799]: I1010 17:57:16.115525 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f30813b7-4ff2-48c2-81f0-0e00fbb208e9-ovsdbserver-nb\") pod \"dnsmasq-dns-5cbf6d5c75-9gk4t\" (UID: \"f30813b7-4ff2-48c2-81f0-0e00fbb208e9\") " pod="openstack/dnsmasq-dns-5cbf6d5c75-9gk4t" Oct 10 17:57:16 crc kubenswrapper[4799]: I1010 17:57:16.216863 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f30813b7-4ff2-48c2-81f0-0e00fbb208e9-dns-svc\") pod \"dnsmasq-dns-5cbf6d5c75-9gk4t\" (UID: \"f30813b7-4ff2-48c2-81f0-0e00fbb208e9\") " pod="openstack/dnsmasq-dns-5cbf6d5c75-9gk4t" Oct 10 17:57:16 crc kubenswrapper[4799]: I1010 17:57:16.216936 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jx9pm\" (UniqueName: \"kubernetes.io/projected/f30813b7-4ff2-48c2-81f0-0e00fbb208e9-kube-api-access-jx9pm\") pod \"dnsmasq-dns-5cbf6d5c75-9gk4t\" (UID: \"f30813b7-4ff2-48c2-81f0-0e00fbb208e9\") " pod="openstack/dnsmasq-dns-5cbf6d5c75-9gk4t" Oct 10 17:57:16 crc kubenswrapper[4799]: I1010 17:57:16.216961 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f30813b7-4ff2-48c2-81f0-0e00fbb208e9-ovsdbserver-nb\") pod \"dnsmasq-dns-5cbf6d5c75-9gk4t\" (UID: \"f30813b7-4ff2-48c2-81f0-0e00fbb208e9\") " pod="openstack/dnsmasq-dns-5cbf6d5c75-9gk4t" Oct 10 17:57:16 crc kubenswrapper[4799]: I1010 17:57:16.217008 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f30813b7-4ff2-48c2-81f0-0e00fbb208e9-ovsdbserver-sb\") pod \"dnsmasq-dns-5cbf6d5c75-9gk4t\" (UID: \"f30813b7-4ff2-48c2-81f0-0e00fbb208e9\") " pod="openstack/dnsmasq-dns-5cbf6d5c75-9gk4t" Oct 10 17:57:16 crc kubenswrapper[4799]: I1010 17:57:16.217027 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f30813b7-4ff2-48c2-81f0-0e00fbb208e9-config\") pod \"dnsmasq-dns-5cbf6d5c75-9gk4t\" (UID: \"f30813b7-4ff2-48c2-81f0-0e00fbb208e9\") " pod="openstack/dnsmasq-dns-5cbf6d5c75-9gk4t" Oct 10 17:57:16 crc kubenswrapper[4799]: I1010 17:57:16.217957 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f30813b7-4ff2-48c2-81f0-0e00fbb208e9-dns-svc\") pod \"dnsmasq-dns-5cbf6d5c75-9gk4t\" (UID: \"f30813b7-4ff2-48c2-81f0-0e00fbb208e9\") " pod="openstack/dnsmasq-dns-5cbf6d5c75-9gk4t" Oct 10 17:57:16 crc kubenswrapper[4799]: I1010 17:57:16.218225 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f30813b7-4ff2-48c2-81f0-0e00fbb208e9-config\") pod \"dnsmasq-dns-5cbf6d5c75-9gk4t\" (UID: \"f30813b7-4ff2-48c2-81f0-0e00fbb208e9\") " pod="openstack/dnsmasq-dns-5cbf6d5c75-9gk4t" Oct 10 17:57:16 crc kubenswrapper[4799]: I1010 17:57:16.218542 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f30813b7-4ff2-48c2-81f0-0e00fbb208e9-ovsdbserver-sb\") pod \"dnsmasq-dns-5cbf6d5c75-9gk4t\" (UID: \"f30813b7-4ff2-48c2-81f0-0e00fbb208e9\") " pod="openstack/dnsmasq-dns-5cbf6d5c75-9gk4t" Oct 10 17:57:16 crc kubenswrapper[4799]: I1010 17:57:16.219911 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f30813b7-4ff2-48c2-81f0-0e00fbb208e9-ovsdbserver-nb\") pod \"dnsmasq-dns-5cbf6d5c75-9gk4t\" (UID: \"f30813b7-4ff2-48c2-81f0-0e00fbb208e9\") " pod="openstack/dnsmasq-dns-5cbf6d5c75-9gk4t" Oct 10 17:57:16 crc kubenswrapper[4799]: I1010 17:57:16.240916 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jx9pm\" (UniqueName: \"kubernetes.io/projected/f30813b7-4ff2-48c2-81f0-0e00fbb208e9-kube-api-access-jx9pm\") pod \"dnsmasq-dns-5cbf6d5c75-9gk4t\" (UID: \"f30813b7-4ff2-48c2-81f0-0e00fbb208e9\") " pod="openstack/dnsmasq-dns-5cbf6d5c75-9gk4t" Oct 10 17:57:16 crc kubenswrapper[4799]: I1010 17:57:16.302442 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5cbf6d5c75-9gk4t" Oct 10 17:57:16 crc kubenswrapper[4799]: I1010 17:57:16.343520 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-56d7fcb4c-5gcwt"] Oct 10 17:57:16 crc kubenswrapper[4799]: I1010 17:57:16.788668 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5cbf6d5c75-9gk4t"] Oct 10 17:57:16 crc kubenswrapper[4799]: W1010 17:57:16.794905 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf30813b7_4ff2_48c2_81f0_0e00fbb208e9.slice/crio-cfec1868410b6bd8bebebdd7b1abd0160f817b86fd89da3f59a3e061858dbaeb WatchSource:0}: Error finding container cfec1868410b6bd8bebebdd7b1abd0160f817b86fd89da3f59a3e061858dbaeb: Status 404 returned error can't find the container with id cfec1868410b6bd8bebebdd7b1abd0160f817b86fd89da3f59a3e061858dbaeb Oct 10 17:57:17 crc kubenswrapper[4799]: I1010 17:57:17.330094 4799 generic.go:334] "Generic (PLEG): container finished" podID="5d8f77d8-d83b-4a7d-9400-a0c15aba0664" containerID="86ae59400644c3098efcbcbc86777cd4a46dea6b5953fe7f06563fe20fae6e5b" exitCode=0 Oct 10 17:57:17 crc kubenswrapper[4799]: I1010 17:57:17.330201 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-56d7fcb4c-5gcwt" event={"ID":"5d8f77d8-d83b-4a7d-9400-a0c15aba0664","Type":"ContainerDied","Data":"86ae59400644c3098efcbcbc86777cd4a46dea6b5953fe7f06563fe20fae6e5b"} Oct 10 17:57:17 crc kubenswrapper[4799]: I1010 17:57:17.330545 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-56d7fcb4c-5gcwt" event={"ID":"5d8f77d8-d83b-4a7d-9400-a0c15aba0664","Type":"ContainerStarted","Data":"e8d727643bc4839cefcaae59be85a1774f4b20a0a4d9e97a73a95801c3430f35"} Oct 10 17:57:17 crc kubenswrapper[4799]: I1010 17:57:17.332880 4799 generic.go:334] "Generic (PLEG): container finished" podID="f30813b7-4ff2-48c2-81f0-0e00fbb208e9" containerID="802daea0f9f1f28f41b3722862ec0b99be344ecb8cfeef2c0627318bad03154d" exitCode=0 Oct 10 17:57:17 crc kubenswrapper[4799]: I1010 17:57:17.332933 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5cbf6d5c75-9gk4t" event={"ID":"f30813b7-4ff2-48c2-81f0-0e00fbb208e9","Type":"ContainerDied","Data":"802daea0f9f1f28f41b3722862ec0b99be344ecb8cfeef2c0627318bad03154d"} Oct 10 17:57:17 crc kubenswrapper[4799]: I1010 17:57:17.332964 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5cbf6d5c75-9gk4t" event={"ID":"f30813b7-4ff2-48c2-81f0-0e00fbb208e9","Type":"ContainerStarted","Data":"cfec1868410b6bd8bebebdd7b1abd0160f817b86fd89da3f59a3e061858dbaeb"} Oct 10 17:57:17 crc kubenswrapper[4799]: I1010 17:57:17.667624 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-56d7fcb4c-5gcwt" Oct 10 17:57:17 crc kubenswrapper[4799]: I1010 17:57:17.780915 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5d8f77d8-d83b-4a7d-9400-a0c15aba0664-dns-svc\") pod \"5d8f77d8-d83b-4a7d-9400-a0c15aba0664\" (UID: \"5d8f77d8-d83b-4a7d-9400-a0c15aba0664\") " Oct 10 17:57:17 crc kubenswrapper[4799]: I1010 17:57:17.780970 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5d8f77d8-d83b-4a7d-9400-a0c15aba0664-ovsdbserver-nb\") pod \"5d8f77d8-d83b-4a7d-9400-a0c15aba0664\" (UID: \"5d8f77d8-d83b-4a7d-9400-a0c15aba0664\") " Oct 10 17:57:17 crc kubenswrapper[4799]: I1010 17:57:17.781022 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ff6fw\" (UniqueName: \"kubernetes.io/projected/5d8f77d8-d83b-4a7d-9400-a0c15aba0664-kube-api-access-ff6fw\") pod \"5d8f77d8-d83b-4a7d-9400-a0c15aba0664\" (UID: \"5d8f77d8-d83b-4a7d-9400-a0c15aba0664\") " Oct 10 17:57:17 crc kubenswrapper[4799]: I1010 17:57:17.781056 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5d8f77d8-d83b-4a7d-9400-a0c15aba0664-config\") pod \"5d8f77d8-d83b-4a7d-9400-a0c15aba0664\" (UID: \"5d8f77d8-d83b-4a7d-9400-a0c15aba0664\") " Oct 10 17:57:17 crc kubenswrapper[4799]: I1010 17:57:17.786294 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5d8f77d8-d83b-4a7d-9400-a0c15aba0664-kube-api-access-ff6fw" (OuterVolumeSpecName: "kube-api-access-ff6fw") pod "5d8f77d8-d83b-4a7d-9400-a0c15aba0664" (UID: "5d8f77d8-d83b-4a7d-9400-a0c15aba0664"). InnerVolumeSpecName "kube-api-access-ff6fw". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 17:57:17 crc kubenswrapper[4799]: I1010 17:57:17.804732 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5d8f77d8-d83b-4a7d-9400-a0c15aba0664-config" (OuterVolumeSpecName: "config") pod "5d8f77d8-d83b-4a7d-9400-a0c15aba0664" (UID: "5d8f77d8-d83b-4a7d-9400-a0c15aba0664"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 17:57:17 crc kubenswrapper[4799]: I1010 17:57:17.804982 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5d8f77d8-d83b-4a7d-9400-a0c15aba0664-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "5d8f77d8-d83b-4a7d-9400-a0c15aba0664" (UID: "5d8f77d8-d83b-4a7d-9400-a0c15aba0664"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 17:57:17 crc kubenswrapper[4799]: I1010 17:57:17.818889 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5d8f77d8-d83b-4a7d-9400-a0c15aba0664-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "5d8f77d8-d83b-4a7d-9400-a0c15aba0664" (UID: "5d8f77d8-d83b-4a7d-9400-a0c15aba0664"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 17:57:17 crc kubenswrapper[4799]: I1010 17:57:17.884063 4799 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5d8f77d8-d83b-4a7d-9400-a0c15aba0664-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 10 17:57:17 crc kubenswrapper[4799]: I1010 17:57:17.884306 4799 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5d8f77d8-d83b-4a7d-9400-a0c15aba0664-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 10 17:57:17 crc kubenswrapper[4799]: I1010 17:57:17.884439 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ff6fw\" (UniqueName: \"kubernetes.io/projected/5d8f77d8-d83b-4a7d-9400-a0c15aba0664-kube-api-access-ff6fw\") on node \"crc\" DevicePath \"\"" Oct 10 17:57:17 crc kubenswrapper[4799]: I1010 17:57:17.884917 4799 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5d8f77d8-d83b-4a7d-9400-a0c15aba0664-config\") on node \"crc\" DevicePath \"\"" Oct 10 17:57:18 crc kubenswrapper[4799]: I1010 17:57:18.342576 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-56d7fcb4c-5gcwt" event={"ID":"5d8f77d8-d83b-4a7d-9400-a0c15aba0664","Type":"ContainerDied","Data":"e8d727643bc4839cefcaae59be85a1774f4b20a0a4d9e97a73a95801c3430f35"} Oct 10 17:57:18 crc kubenswrapper[4799]: I1010 17:57:18.342609 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-56d7fcb4c-5gcwt" Oct 10 17:57:18 crc kubenswrapper[4799]: I1010 17:57:18.342960 4799 scope.go:117] "RemoveContainer" containerID="86ae59400644c3098efcbcbc86777cd4a46dea6b5953fe7f06563fe20fae6e5b" Oct 10 17:57:18 crc kubenswrapper[4799]: I1010 17:57:18.349197 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5cbf6d5c75-9gk4t" event={"ID":"f30813b7-4ff2-48c2-81f0-0e00fbb208e9","Type":"ContainerStarted","Data":"6780017d6e1afe6aaf33465514aa98b8c1142613e9d0e75a2b47797941cc2137"} Oct 10 17:57:18 crc kubenswrapper[4799]: I1010 17:57:18.349395 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-5cbf6d5c75-9gk4t" Oct 10 17:57:18 crc kubenswrapper[4799]: I1010 17:57:18.380905 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-5cbf6d5c75-9gk4t" podStartSLOduration=3.380849886 podStartE2EDuration="3.380849886s" podCreationTimestamp="2025-10-10 17:57:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-10 17:57:18.369423458 +0000 UTC m=+5131.877747583" watchObservedRunningTime="2025-10-10 17:57:18.380849886 +0000 UTC m=+5131.889174021" Oct 10 17:57:18 crc kubenswrapper[4799]: I1010 17:57:18.440515 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-56d7fcb4c-5gcwt"] Oct 10 17:57:18 crc kubenswrapper[4799]: I1010 17:57:18.447014 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-56d7fcb4c-5gcwt"] Oct 10 17:57:19 crc kubenswrapper[4799]: I1010 17:57:19.255427 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-copy-data"] Oct 10 17:57:19 crc kubenswrapper[4799]: E1010 17:57:19.256567 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5d8f77d8-d83b-4a7d-9400-a0c15aba0664" containerName="init" Oct 10 17:57:19 crc kubenswrapper[4799]: I1010 17:57:19.256645 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="5d8f77d8-d83b-4a7d-9400-a0c15aba0664" containerName="init" Oct 10 17:57:19 crc kubenswrapper[4799]: I1010 17:57:19.256858 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="5d8f77d8-d83b-4a7d-9400-a0c15aba0664" containerName="init" Oct 10 17:57:19 crc kubenswrapper[4799]: I1010 17:57:19.259433 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-copy-data" Oct 10 17:57:19 crc kubenswrapper[4799]: I1010 17:57:19.262212 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovn-data-cert" Oct 10 17:57:19 crc kubenswrapper[4799]: I1010 17:57:19.262466 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-copy-data"] Oct 10 17:57:19 crc kubenswrapper[4799]: I1010 17:57:19.408316 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-data-cert\" (UniqueName: \"kubernetes.io/secret/dad0e268-f7c7-4e98-a300-9943db4ae46b-ovn-data-cert\") pod \"ovn-copy-data\" (UID: \"dad0e268-f7c7-4e98-a300-9943db4ae46b\") " pod="openstack/ovn-copy-data" Oct 10 17:57:19 crc kubenswrapper[4799]: I1010 17:57:19.410098 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p9mmw\" (UniqueName: \"kubernetes.io/projected/dad0e268-f7c7-4e98-a300-9943db4ae46b-kube-api-access-p9mmw\") pod \"ovn-copy-data\" (UID: \"dad0e268-f7c7-4e98-a300-9943db4ae46b\") " pod="openstack/ovn-copy-data" Oct 10 17:57:19 crc kubenswrapper[4799]: I1010 17:57:19.410474 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-3bddd74c-598e-4193-8b05-454847ddae91\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-3bddd74c-598e-4193-8b05-454847ddae91\") pod \"ovn-copy-data\" (UID: \"dad0e268-f7c7-4e98-a300-9943db4ae46b\") " pod="openstack/ovn-copy-data" Oct 10 17:57:19 crc kubenswrapper[4799]: I1010 17:57:19.415153 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5d8f77d8-d83b-4a7d-9400-a0c15aba0664" path="/var/lib/kubelet/pods/5d8f77d8-d83b-4a7d-9400-a0c15aba0664/volumes" Oct 10 17:57:19 crc kubenswrapper[4799]: I1010 17:57:19.514300 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-3bddd74c-598e-4193-8b05-454847ddae91\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-3bddd74c-598e-4193-8b05-454847ddae91\") pod \"ovn-copy-data\" (UID: \"dad0e268-f7c7-4e98-a300-9943db4ae46b\") " pod="openstack/ovn-copy-data" Oct 10 17:57:19 crc kubenswrapper[4799]: I1010 17:57:19.514792 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-data-cert\" (UniqueName: \"kubernetes.io/secret/dad0e268-f7c7-4e98-a300-9943db4ae46b-ovn-data-cert\") pod \"ovn-copy-data\" (UID: \"dad0e268-f7c7-4e98-a300-9943db4ae46b\") " pod="openstack/ovn-copy-data" Oct 10 17:57:19 crc kubenswrapper[4799]: I1010 17:57:19.515074 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p9mmw\" (UniqueName: \"kubernetes.io/projected/dad0e268-f7c7-4e98-a300-9943db4ae46b-kube-api-access-p9mmw\") pod \"ovn-copy-data\" (UID: \"dad0e268-f7c7-4e98-a300-9943db4ae46b\") " pod="openstack/ovn-copy-data" Oct 10 17:57:19 crc kubenswrapper[4799]: I1010 17:57:19.519846 4799 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Oct 10 17:57:19 crc kubenswrapper[4799]: I1010 17:57:19.519917 4799 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-3bddd74c-598e-4193-8b05-454847ddae91\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-3bddd74c-598e-4193-8b05-454847ddae91\") pod \"ovn-copy-data\" (UID: \"dad0e268-f7c7-4e98-a300-9943db4ae46b\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/cbf629c6cf1e6700fc4c786815fff1dc0de1f8e9c6e130866fc4507f91ddbee2/globalmount\"" pod="openstack/ovn-copy-data" Oct 10 17:57:19 crc kubenswrapper[4799]: I1010 17:57:19.983711 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-data-cert\" (UniqueName: \"kubernetes.io/secret/dad0e268-f7c7-4e98-a300-9943db4ae46b-ovn-data-cert\") pod \"ovn-copy-data\" (UID: \"dad0e268-f7c7-4e98-a300-9943db4ae46b\") " pod="openstack/ovn-copy-data" Oct 10 17:57:19 crc kubenswrapper[4799]: I1010 17:57:19.985000 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p9mmw\" (UniqueName: \"kubernetes.io/projected/dad0e268-f7c7-4e98-a300-9943db4ae46b-kube-api-access-p9mmw\") pod \"ovn-copy-data\" (UID: \"dad0e268-f7c7-4e98-a300-9943db4ae46b\") " pod="openstack/ovn-copy-data" Oct 10 17:57:20 crc kubenswrapper[4799]: I1010 17:57:20.125400 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-3bddd74c-598e-4193-8b05-454847ddae91\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-3bddd74c-598e-4193-8b05-454847ddae91\") pod \"ovn-copy-data\" (UID: \"dad0e268-f7c7-4e98-a300-9943db4ae46b\") " pod="openstack/ovn-copy-data" Oct 10 17:57:20 crc kubenswrapper[4799]: I1010 17:57:20.189883 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-copy-data" Oct 10 17:57:20 crc kubenswrapper[4799]: I1010 17:57:20.721675 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-copy-data"] Oct 10 17:57:20 crc kubenswrapper[4799]: W1010 17:57:20.727496 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poddad0e268_f7c7_4e98_a300_9943db4ae46b.slice/crio-ee69fd33a1f33f0a6d50fa05d31f05b244efe65b9aedbeb4add795d4e34e57c8 WatchSource:0}: Error finding container ee69fd33a1f33f0a6d50fa05d31f05b244efe65b9aedbeb4add795d4e34e57c8: Status 404 returned error can't find the container with id ee69fd33a1f33f0a6d50fa05d31f05b244efe65b9aedbeb4add795d4e34e57c8 Oct 10 17:57:20 crc kubenswrapper[4799]: I1010 17:57:20.729589 4799 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 10 17:57:21 crc kubenswrapper[4799]: I1010 17:57:21.382302 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-copy-data" event={"ID":"dad0e268-f7c7-4e98-a300-9943db4ae46b","Type":"ContainerStarted","Data":"ee69fd33a1f33f0a6d50fa05d31f05b244efe65b9aedbeb4add795d4e34e57c8"} Oct 10 17:57:24 crc kubenswrapper[4799]: I1010 17:57:24.402341 4799 scope.go:117] "RemoveContainer" containerID="5e973f476cb0655a6e33e886e2a59fc6754febf3bf5a4718abcef307858985dd" Oct 10 17:57:24 crc kubenswrapper[4799]: E1010 17:57:24.403146 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 17:57:24 crc kubenswrapper[4799]: I1010 17:57:24.420176 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-copy-data" event={"ID":"dad0e268-f7c7-4e98-a300-9943db4ae46b","Type":"ContainerStarted","Data":"43d7fb51961135812355c5249d7be1a2c2670f6fc9821e4c30b0041592525a7a"} Oct 10 17:57:24 crc kubenswrapper[4799]: I1010 17:57:24.449120 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-copy-data" podStartSLOduration=3.612149354 podStartE2EDuration="6.449088061s" podCreationTimestamp="2025-10-10 17:57:18 +0000 UTC" firstStartedPulling="2025-10-10 17:57:20.729317637 +0000 UTC m=+5134.237641762" lastFinishedPulling="2025-10-10 17:57:23.566256314 +0000 UTC m=+5137.074580469" observedRunningTime="2025-10-10 17:57:24.443560457 +0000 UTC m=+5137.951884642" watchObservedRunningTime="2025-10-10 17:57:24.449088061 +0000 UTC m=+5137.957412216" Oct 10 17:57:26 crc kubenswrapper[4799]: I1010 17:57:26.304051 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-5cbf6d5c75-9gk4t" Oct 10 17:57:26 crc kubenswrapper[4799]: I1010 17:57:26.376572 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5fdc957c47-h7qx2"] Oct 10 17:57:26 crc kubenswrapper[4799]: I1010 17:57:26.376886 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-5fdc957c47-h7qx2" podUID="29388aea-227e-4b3b-96c1-5edb9ffa644d" containerName="dnsmasq-dns" containerID="cri-o://3968be08b182b99b94ecebe232fe049b82f8716d786b3ff76e347437be1acb2d" gracePeriod=10 Oct 10 17:57:26 crc kubenswrapper[4799]: I1010 17:57:26.872129 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5fdc957c47-h7qx2" Oct 10 17:57:26 crc kubenswrapper[4799]: I1010 17:57:26.971452 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tfsxb\" (UniqueName: \"kubernetes.io/projected/29388aea-227e-4b3b-96c1-5edb9ffa644d-kube-api-access-tfsxb\") pod \"29388aea-227e-4b3b-96c1-5edb9ffa644d\" (UID: \"29388aea-227e-4b3b-96c1-5edb9ffa644d\") " Oct 10 17:57:26 crc kubenswrapper[4799]: I1010 17:57:26.971644 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/29388aea-227e-4b3b-96c1-5edb9ffa644d-config\") pod \"29388aea-227e-4b3b-96c1-5edb9ffa644d\" (UID: \"29388aea-227e-4b3b-96c1-5edb9ffa644d\") " Oct 10 17:57:26 crc kubenswrapper[4799]: I1010 17:57:26.971731 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/29388aea-227e-4b3b-96c1-5edb9ffa644d-dns-svc\") pod \"29388aea-227e-4b3b-96c1-5edb9ffa644d\" (UID: \"29388aea-227e-4b3b-96c1-5edb9ffa644d\") " Oct 10 17:57:26 crc kubenswrapper[4799]: I1010 17:57:26.980039 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/29388aea-227e-4b3b-96c1-5edb9ffa644d-kube-api-access-tfsxb" (OuterVolumeSpecName: "kube-api-access-tfsxb") pod "29388aea-227e-4b3b-96c1-5edb9ffa644d" (UID: "29388aea-227e-4b3b-96c1-5edb9ffa644d"). InnerVolumeSpecName "kube-api-access-tfsxb". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 17:57:27 crc kubenswrapper[4799]: I1010 17:57:27.037405 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/29388aea-227e-4b3b-96c1-5edb9ffa644d-config" (OuterVolumeSpecName: "config") pod "29388aea-227e-4b3b-96c1-5edb9ffa644d" (UID: "29388aea-227e-4b3b-96c1-5edb9ffa644d"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 17:57:27 crc kubenswrapper[4799]: I1010 17:57:27.039894 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/29388aea-227e-4b3b-96c1-5edb9ffa644d-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "29388aea-227e-4b3b-96c1-5edb9ffa644d" (UID: "29388aea-227e-4b3b-96c1-5edb9ffa644d"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 17:57:27 crc kubenswrapper[4799]: I1010 17:57:27.076272 4799 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/29388aea-227e-4b3b-96c1-5edb9ffa644d-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 10 17:57:27 crc kubenswrapper[4799]: I1010 17:57:27.076332 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tfsxb\" (UniqueName: \"kubernetes.io/projected/29388aea-227e-4b3b-96c1-5edb9ffa644d-kube-api-access-tfsxb\") on node \"crc\" DevicePath \"\"" Oct 10 17:57:27 crc kubenswrapper[4799]: I1010 17:57:27.076350 4799 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/29388aea-227e-4b3b-96c1-5edb9ffa644d-config\") on node \"crc\" DevicePath \"\"" Oct 10 17:57:27 crc kubenswrapper[4799]: I1010 17:57:27.470857 4799 generic.go:334] "Generic (PLEG): container finished" podID="29388aea-227e-4b3b-96c1-5edb9ffa644d" containerID="3968be08b182b99b94ecebe232fe049b82f8716d786b3ff76e347437be1acb2d" exitCode=0 Oct 10 17:57:27 crc kubenswrapper[4799]: I1010 17:57:27.470927 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5fdc957c47-h7qx2" event={"ID":"29388aea-227e-4b3b-96c1-5edb9ffa644d","Type":"ContainerDied","Data":"3968be08b182b99b94ecebe232fe049b82f8716d786b3ff76e347437be1acb2d"} Oct 10 17:57:27 crc kubenswrapper[4799]: I1010 17:57:27.470974 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5fdc957c47-h7qx2" event={"ID":"29388aea-227e-4b3b-96c1-5edb9ffa644d","Type":"ContainerDied","Data":"85e2561153d0f09a72626e5c4bbffd69a2b78e8ff6618101ba2bd5344844e5c8"} Oct 10 17:57:27 crc kubenswrapper[4799]: I1010 17:57:27.471002 4799 scope.go:117] "RemoveContainer" containerID="3968be08b182b99b94ecebe232fe049b82f8716d786b3ff76e347437be1acb2d" Oct 10 17:57:27 crc kubenswrapper[4799]: I1010 17:57:27.471004 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5fdc957c47-h7qx2" Oct 10 17:57:27 crc kubenswrapper[4799]: I1010 17:57:27.514513 4799 scope.go:117] "RemoveContainer" containerID="648ac82bd71112d1e8970237fcd6e7b637cfa55d9729676f2eed8130a04a57bf" Oct 10 17:57:27 crc kubenswrapper[4799]: I1010 17:57:27.517384 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5fdc957c47-h7qx2"] Oct 10 17:57:27 crc kubenswrapper[4799]: I1010 17:57:27.535000 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5fdc957c47-h7qx2"] Oct 10 17:57:27 crc kubenswrapper[4799]: I1010 17:57:27.555077 4799 scope.go:117] "RemoveContainer" containerID="3968be08b182b99b94ecebe232fe049b82f8716d786b3ff76e347437be1acb2d" Oct 10 17:57:27 crc kubenswrapper[4799]: E1010 17:57:27.555612 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3968be08b182b99b94ecebe232fe049b82f8716d786b3ff76e347437be1acb2d\": container with ID starting with 3968be08b182b99b94ecebe232fe049b82f8716d786b3ff76e347437be1acb2d not found: ID does not exist" containerID="3968be08b182b99b94ecebe232fe049b82f8716d786b3ff76e347437be1acb2d" Oct 10 17:57:27 crc kubenswrapper[4799]: I1010 17:57:27.555667 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3968be08b182b99b94ecebe232fe049b82f8716d786b3ff76e347437be1acb2d"} err="failed to get container status \"3968be08b182b99b94ecebe232fe049b82f8716d786b3ff76e347437be1acb2d\": rpc error: code = NotFound desc = could not find container \"3968be08b182b99b94ecebe232fe049b82f8716d786b3ff76e347437be1acb2d\": container with ID starting with 3968be08b182b99b94ecebe232fe049b82f8716d786b3ff76e347437be1acb2d not found: ID does not exist" Oct 10 17:57:27 crc kubenswrapper[4799]: I1010 17:57:27.555703 4799 scope.go:117] "RemoveContainer" containerID="648ac82bd71112d1e8970237fcd6e7b637cfa55d9729676f2eed8130a04a57bf" Oct 10 17:57:27 crc kubenswrapper[4799]: E1010 17:57:27.556303 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"648ac82bd71112d1e8970237fcd6e7b637cfa55d9729676f2eed8130a04a57bf\": container with ID starting with 648ac82bd71112d1e8970237fcd6e7b637cfa55d9729676f2eed8130a04a57bf not found: ID does not exist" containerID="648ac82bd71112d1e8970237fcd6e7b637cfa55d9729676f2eed8130a04a57bf" Oct 10 17:57:27 crc kubenswrapper[4799]: I1010 17:57:27.556346 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"648ac82bd71112d1e8970237fcd6e7b637cfa55d9729676f2eed8130a04a57bf"} err="failed to get container status \"648ac82bd71112d1e8970237fcd6e7b637cfa55d9729676f2eed8130a04a57bf\": rpc error: code = NotFound desc = could not find container \"648ac82bd71112d1e8970237fcd6e7b637cfa55d9729676f2eed8130a04a57bf\": container with ID starting with 648ac82bd71112d1e8970237fcd6e7b637cfa55d9729676f2eed8130a04a57bf not found: ID does not exist" Oct 10 17:57:29 crc kubenswrapper[4799]: I1010 17:57:29.422371 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="29388aea-227e-4b3b-96c1-5edb9ffa644d" path="/var/lib/kubelet/pods/29388aea-227e-4b3b-96c1-5edb9ffa644d/volumes" Oct 10 17:57:30 crc kubenswrapper[4799]: I1010 17:57:30.656844 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-northd-0"] Oct 10 17:57:30 crc kubenswrapper[4799]: E1010 17:57:30.657289 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="29388aea-227e-4b3b-96c1-5edb9ffa644d" containerName="dnsmasq-dns" Oct 10 17:57:30 crc kubenswrapper[4799]: I1010 17:57:30.657307 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="29388aea-227e-4b3b-96c1-5edb9ffa644d" containerName="dnsmasq-dns" Oct 10 17:57:30 crc kubenswrapper[4799]: E1010 17:57:30.657338 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="29388aea-227e-4b3b-96c1-5edb9ffa644d" containerName="init" Oct 10 17:57:30 crc kubenswrapper[4799]: I1010 17:57:30.657346 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="29388aea-227e-4b3b-96c1-5edb9ffa644d" containerName="init" Oct 10 17:57:30 crc kubenswrapper[4799]: I1010 17:57:30.657549 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="29388aea-227e-4b3b-96c1-5edb9ffa644d" containerName="dnsmasq-dns" Oct 10 17:57:30 crc kubenswrapper[4799]: I1010 17:57:30.658607 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Oct 10 17:57:30 crc kubenswrapper[4799]: I1010 17:57:30.661847 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovnnorthd-scripts" Oct 10 17:57:30 crc kubenswrapper[4799]: I1010 17:57:30.662236 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovnnorthd-ovnnorthd-dockercfg-jmj2w" Oct 10 17:57:30 crc kubenswrapper[4799]: I1010 17:57:30.663822 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovnnorthd-config" Oct 10 17:57:30 crc kubenswrapper[4799]: I1010 17:57:30.664746 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-northd-0"] Oct 10 17:57:30 crc kubenswrapper[4799]: I1010 17:57:30.744696 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/56df8c28-dca8-463f-997b-d6c986163dfe-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"56df8c28-dca8-463f-997b-d6c986163dfe\") " pod="openstack/ovn-northd-0" Oct 10 17:57:30 crc kubenswrapper[4799]: I1010 17:57:30.744744 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/56df8c28-dca8-463f-997b-d6c986163dfe-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"56df8c28-dca8-463f-997b-d6c986163dfe\") " pod="openstack/ovn-northd-0" Oct 10 17:57:30 crc kubenswrapper[4799]: I1010 17:57:30.744796 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/56df8c28-dca8-463f-997b-d6c986163dfe-config\") pod \"ovn-northd-0\" (UID: \"56df8c28-dca8-463f-997b-d6c986163dfe\") " pod="openstack/ovn-northd-0" Oct 10 17:57:30 crc kubenswrapper[4799]: I1010 17:57:30.745090 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-79h6z\" (UniqueName: \"kubernetes.io/projected/56df8c28-dca8-463f-997b-d6c986163dfe-kube-api-access-79h6z\") pod \"ovn-northd-0\" (UID: \"56df8c28-dca8-463f-997b-d6c986163dfe\") " pod="openstack/ovn-northd-0" Oct 10 17:57:30 crc kubenswrapper[4799]: I1010 17:57:30.745260 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/56df8c28-dca8-463f-997b-d6c986163dfe-scripts\") pod \"ovn-northd-0\" (UID: \"56df8c28-dca8-463f-997b-d6c986163dfe\") " pod="openstack/ovn-northd-0" Oct 10 17:57:30 crc kubenswrapper[4799]: I1010 17:57:30.847182 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-79h6z\" (UniqueName: \"kubernetes.io/projected/56df8c28-dca8-463f-997b-d6c986163dfe-kube-api-access-79h6z\") pod \"ovn-northd-0\" (UID: \"56df8c28-dca8-463f-997b-d6c986163dfe\") " pod="openstack/ovn-northd-0" Oct 10 17:57:30 crc kubenswrapper[4799]: I1010 17:57:30.847255 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/56df8c28-dca8-463f-997b-d6c986163dfe-scripts\") pod \"ovn-northd-0\" (UID: \"56df8c28-dca8-463f-997b-d6c986163dfe\") " pod="openstack/ovn-northd-0" Oct 10 17:57:30 crc kubenswrapper[4799]: I1010 17:57:30.847970 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/56df8c28-dca8-463f-997b-d6c986163dfe-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"56df8c28-dca8-463f-997b-d6c986163dfe\") " pod="openstack/ovn-northd-0" Oct 10 17:57:30 crc kubenswrapper[4799]: I1010 17:57:30.848007 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/56df8c28-dca8-463f-997b-d6c986163dfe-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"56df8c28-dca8-463f-997b-d6c986163dfe\") " pod="openstack/ovn-northd-0" Oct 10 17:57:30 crc kubenswrapper[4799]: I1010 17:57:30.848082 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/56df8c28-dca8-463f-997b-d6c986163dfe-config\") pod \"ovn-northd-0\" (UID: \"56df8c28-dca8-463f-997b-d6c986163dfe\") " pod="openstack/ovn-northd-0" Oct 10 17:57:30 crc kubenswrapper[4799]: I1010 17:57:30.849594 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/56df8c28-dca8-463f-997b-d6c986163dfe-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"56df8c28-dca8-463f-997b-d6c986163dfe\") " pod="openstack/ovn-northd-0" Oct 10 17:57:30 crc kubenswrapper[4799]: I1010 17:57:30.849836 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/56df8c28-dca8-463f-997b-d6c986163dfe-config\") pod \"ovn-northd-0\" (UID: \"56df8c28-dca8-463f-997b-d6c986163dfe\") " pod="openstack/ovn-northd-0" Oct 10 17:57:30 crc kubenswrapper[4799]: I1010 17:57:30.850101 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/56df8c28-dca8-463f-997b-d6c986163dfe-scripts\") pod \"ovn-northd-0\" (UID: \"56df8c28-dca8-463f-997b-d6c986163dfe\") " pod="openstack/ovn-northd-0" Oct 10 17:57:30 crc kubenswrapper[4799]: I1010 17:57:30.857207 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/56df8c28-dca8-463f-997b-d6c986163dfe-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"56df8c28-dca8-463f-997b-d6c986163dfe\") " pod="openstack/ovn-northd-0" Oct 10 17:57:30 crc kubenswrapper[4799]: I1010 17:57:30.874407 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-79h6z\" (UniqueName: \"kubernetes.io/projected/56df8c28-dca8-463f-997b-d6c986163dfe-kube-api-access-79h6z\") pod \"ovn-northd-0\" (UID: \"56df8c28-dca8-463f-997b-d6c986163dfe\") " pod="openstack/ovn-northd-0" Oct 10 17:57:30 crc kubenswrapper[4799]: I1010 17:57:30.986507 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Oct 10 17:57:31 crc kubenswrapper[4799]: I1010 17:57:31.247956 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-northd-0"] Oct 10 17:57:31 crc kubenswrapper[4799]: I1010 17:57:31.515039 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"56df8c28-dca8-463f-997b-d6c986163dfe","Type":"ContainerStarted","Data":"18d717094730997fe4d2e8a2df158485f6bc1297f7ebd7d8352ffe5c18c871e2"} Oct 10 17:57:31 crc kubenswrapper[4799]: I1010 17:57:31.515434 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"56df8c28-dca8-463f-997b-d6c986163dfe","Type":"ContainerStarted","Data":"abbe0c654102e3a999541dd9a77a1d1bbaab19c8b25a66dc7f0392066338bb2b"} Oct 10 17:57:32 crc kubenswrapper[4799]: I1010 17:57:32.529456 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"56df8c28-dca8-463f-997b-d6c986163dfe","Type":"ContainerStarted","Data":"e7a75da37552a4c70f1df2741097db4c28316c6d775777cf0851cbd5de075b1a"} Oct 10 17:57:32 crc kubenswrapper[4799]: I1010 17:57:32.529899 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-northd-0" Oct 10 17:57:32 crc kubenswrapper[4799]: I1010 17:57:32.563174 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-northd-0" podStartSLOduration=2.563139701 podStartE2EDuration="2.563139701s" podCreationTimestamp="2025-10-10 17:57:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-10 17:57:32.55410959 +0000 UTC m=+5146.062433775" watchObservedRunningTime="2025-10-10 17:57:32.563139701 +0000 UTC m=+5146.071463846" Oct 10 17:57:36 crc kubenswrapper[4799]: I1010 17:57:36.589094 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-db-create-tcjxt"] Oct 10 17:57:36 crc kubenswrapper[4799]: I1010 17:57:36.590860 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-tcjxt" Oct 10 17:57:36 crc kubenswrapper[4799]: I1010 17:57:36.603014 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-create-tcjxt"] Oct 10 17:57:36 crc kubenswrapper[4799]: I1010 17:57:36.662577 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2wgqs\" (UniqueName: \"kubernetes.io/projected/d769744a-3c43-43c7-918f-0af5be0c4974-kube-api-access-2wgqs\") pod \"keystone-db-create-tcjxt\" (UID: \"d769744a-3c43-43c7-918f-0af5be0c4974\") " pod="openstack/keystone-db-create-tcjxt" Oct 10 17:57:36 crc kubenswrapper[4799]: I1010 17:57:36.764865 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2wgqs\" (UniqueName: \"kubernetes.io/projected/d769744a-3c43-43c7-918f-0af5be0c4974-kube-api-access-2wgqs\") pod \"keystone-db-create-tcjxt\" (UID: \"d769744a-3c43-43c7-918f-0af5be0c4974\") " pod="openstack/keystone-db-create-tcjxt" Oct 10 17:57:36 crc kubenswrapper[4799]: I1010 17:57:36.787964 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2wgqs\" (UniqueName: \"kubernetes.io/projected/d769744a-3c43-43c7-918f-0af5be0c4974-kube-api-access-2wgqs\") pod \"keystone-db-create-tcjxt\" (UID: \"d769744a-3c43-43c7-918f-0af5be0c4974\") " pod="openstack/keystone-db-create-tcjxt" Oct 10 17:57:36 crc kubenswrapper[4799]: I1010 17:57:36.919230 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-tcjxt" Oct 10 17:57:37 crc kubenswrapper[4799]: I1010 17:57:37.424979 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-create-tcjxt"] Oct 10 17:57:37 crc kubenswrapper[4799]: W1010 17:57:37.435531 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd769744a_3c43_43c7_918f_0af5be0c4974.slice/crio-9ad2003ec30d8fb2d04a93af95576e808bb8b3cf4d9d1c2a07bd776b63a8124e WatchSource:0}: Error finding container 9ad2003ec30d8fb2d04a93af95576e808bb8b3cf4d9d1c2a07bd776b63a8124e: Status 404 returned error can't find the container with id 9ad2003ec30d8fb2d04a93af95576e808bb8b3cf4d9d1c2a07bd776b63a8124e Oct 10 17:57:37 crc kubenswrapper[4799]: I1010 17:57:37.588831 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-tcjxt" event={"ID":"d769744a-3c43-43c7-918f-0af5be0c4974","Type":"ContainerStarted","Data":"9ad2003ec30d8fb2d04a93af95576e808bb8b3cf4d9d1c2a07bd776b63a8124e"} Oct 10 17:57:38 crc kubenswrapper[4799]: I1010 17:57:38.402888 4799 scope.go:117] "RemoveContainer" containerID="5e973f476cb0655a6e33e886e2a59fc6754febf3bf5a4718abcef307858985dd" Oct 10 17:57:38 crc kubenswrapper[4799]: E1010 17:57:38.403998 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 17:57:38 crc kubenswrapper[4799]: I1010 17:57:38.606149 4799 generic.go:334] "Generic (PLEG): container finished" podID="d769744a-3c43-43c7-918f-0af5be0c4974" containerID="5cc5cb66dc221d431e002bb6c081715f35660d3980f73c0c87e4a69b0a54cd68" exitCode=0 Oct 10 17:57:38 crc kubenswrapper[4799]: I1010 17:57:38.606214 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-tcjxt" event={"ID":"d769744a-3c43-43c7-918f-0af5be0c4974","Type":"ContainerDied","Data":"5cc5cb66dc221d431e002bb6c081715f35660d3980f73c0c87e4a69b0a54cd68"} Oct 10 17:57:40 crc kubenswrapper[4799]: I1010 17:57:40.012149 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-tcjxt" Oct 10 17:57:40 crc kubenswrapper[4799]: I1010 17:57:40.135320 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2wgqs\" (UniqueName: \"kubernetes.io/projected/d769744a-3c43-43c7-918f-0af5be0c4974-kube-api-access-2wgqs\") pod \"d769744a-3c43-43c7-918f-0af5be0c4974\" (UID: \"d769744a-3c43-43c7-918f-0af5be0c4974\") " Oct 10 17:57:40 crc kubenswrapper[4799]: I1010 17:57:40.141054 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d769744a-3c43-43c7-918f-0af5be0c4974-kube-api-access-2wgqs" (OuterVolumeSpecName: "kube-api-access-2wgqs") pod "d769744a-3c43-43c7-918f-0af5be0c4974" (UID: "d769744a-3c43-43c7-918f-0af5be0c4974"). InnerVolumeSpecName "kube-api-access-2wgqs". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 17:57:40 crc kubenswrapper[4799]: I1010 17:57:40.237859 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2wgqs\" (UniqueName: \"kubernetes.io/projected/d769744a-3c43-43c7-918f-0af5be0c4974-kube-api-access-2wgqs\") on node \"crc\" DevicePath \"\"" Oct 10 17:57:40 crc kubenswrapper[4799]: I1010 17:57:40.637900 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-tcjxt" event={"ID":"d769744a-3c43-43c7-918f-0af5be0c4974","Type":"ContainerDied","Data":"9ad2003ec30d8fb2d04a93af95576e808bb8b3cf4d9d1c2a07bd776b63a8124e"} Oct 10 17:57:40 crc kubenswrapper[4799]: I1010 17:57:40.637966 4799 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9ad2003ec30d8fb2d04a93af95576e808bb8b3cf4d9d1c2a07bd776b63a8124e" Oct 10 17:57:40 crc kubenswrapper[4799]: I1010 17:57:40.637965 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-tcjxt" Oct 10 17:57:46 crc kubenswrapper[4799]: I1010 17:57:46.050512 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-northd-0" Oct 10 17:57:46 crc kubenswrapper[4799]: I1010 17:57:46.591510 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-3f3c-account-create-ws7mb"] Oct 10 17:57:46 crc kubenswrapper[4799]: E1010 17:57:46.592084 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d769744a-3c43-43c7-918f-0af5be0c4974" containerName="mariadb-database-create" Oct 10 17:57:46 crc kubenswrapper[4799]: I1010 17:57:46.592123 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="d769744a-3c43-43c7-918f-0af5be0c4974" containerName="mariadb-database-create" Oct 10 17:57:46 crc kubenswrapper[4799]: I1010 17:57:46.592452 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="d769744a-3c43-43c7-918f-0af5be0c4974" containerName="mariadb-database-create" Oct 10 17:57:46 crc kubenswrapper[4799]: I1010 17:57:46.593368 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-3f3c-account-create-ws7mb" Oct 10 17:57:46 crc kubenswrapper[4799]: I1010 17:57:46.596059 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-db-secret" Oct 10 17:57:46 crc kubenswrapper[4799]: I1010 17:57:46.601469 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-3f3c-account-create-ws7mb"] Oct 10 17:57:46 crc kubenswrapper[4799]: I1010 17:57:46.656066 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zfnsz\" (UniqueName: \"kubernetes.io/projected/30bdf99b-c4af-4caa-87db-2ff622e2eecd-kube-api-access-zfnsz\") pod \"keystone-3f3c-account-create-ws7mb\" (UID: \"30bdf99b-c4af-4caa-87db-2ff622e2eecd\") " pod="openstack/keystone-3f3c-account-create-ws7mb" Oct 10 17:57:46 crc kubenswrapper[4799]: I1010 17:57:46.758062 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zfnsz\" (UniqueName: \"kubernetes.io/projected/30bdf99b-c4af-4caa-87db-2ff622e2eecd-kube-api-access-zfnsz\") pod \"keystone-3f3c-account-create-ws7mb\" (UID: \"30bdf99b-c4af-4caa-87db-2ff622e2eecd\") " pod="openstack/keystone-3f3c-account-create-ws7mb" Oct 10 17:57:46 crc kubenswrapper[4799]: I1010 17:57:46.785672 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zfnsz\" (UniqueName: \"kubernetes.io/projected/30bdf99b-c4af-4caa-87db-2ff622e2eecd-kube-api-access-zfnsz\") pod \"keystone-3f3c-account-create-ws7mb\" (UID: \"30bdf99b-c4af-4caa-87db-2ff622e2eecd\") " pod="openstack/keystone-3f3c-account-create-ws7mb" Oct 10 17:57:46 crc kubenswrapper[4799]: I1010 17:57:46.912276 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-3f3c-account-create-ws7mb" Oct 10 17:57:47 crc kubenswrapper[4799]: I1010 17:57:47.372358 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-3f3c-account-create-ws7mb"] Oct 10 17:57:47 crc kubenswrapper[4799]: W1010 17:57:47.375477 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod30bdf99b_c4af_4caa_87db_2ff622e2eecd.slice/crio-499332cf25e0cb57610b4d2ac3e54290a4322bd79f52ecb7586be9927810ee75 WatchSource:0}: Error finding container 499332cf25e0cb57610b4d2ac3e54290a4322bd79f52ecb7586be9927810ee75: Status 404 returned error can't find the container with id 499332cf25e0cb57610b4d2ac3e54290a4322bd79f52ecb7586be9927810ee75 Oct 10 17:57:47 crc kubenswrapper[4799]: I1010 17:57:47.382084 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-db-secret" Oct 10 17:57:47 crc kubenswrapper[4799]: I1010 17:57:47.720828 4799 generic.go:334] "Generic (PLEG): container finished" podID="30bdf99b-c4af-4caa-87db-2ff622e2eecd" containerID="dbc7ff51b886053be7d30b5b79155481e18ab3aeb7c25036396ae983a24d25c0" exitCode=0 Oct 10 17:57:47 crc kubenswrapper[4799]: I1010 17:57:47.720931 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-3f3c-account-create-ws7mb" event={"ID":"30bdf99b-c4af-4caa-87db-2ff622e2eecd","Type":"ContainerDied","Data":"dbc7ff51b886053be7d30b5b79155481e18ab3aeb7c25036396ae983a24d25c0"} Oct 10 17:57:47 crc kubenswrapper[4799]: I1010 17:57:47.721149 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-3f3c-account-create-ws7mb" event={"ID":"30bdf99b-c4af-4caa-87db-2ff622e2eecd","Type":"ContainerStarted","Data":"499332cf25e0cb57610b4d2ac3e54290a4322bd79f52ecb7586be9927810ee75"} Oct 10 17:57:49 crc kubenswrapper[4799]: I1010 17:57:49.113553 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-3f3c-account-create-ws7mb" Oct 10 17:57:49 crc kubenswrapper[4799]: I1010 17:57:49.203781 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zfnsz\" (UniqueName: \"kubernetes.io/projected/30bdf99b-c4af-4caa-87db-2ff622e2eecd-kube-api-access-zfnsz\") pod \"30bdf99b-c4af-4caa-87db-2ff622e2eecd\" (UID: \"30bdf99b-c4af-4caa-87db-2ff622e2eecd\") " Oct 10 17:57:49 crc kubenswrapper[4799]: I1010 17:57:49.211781 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/30bdf99b-c4af-4caa-87db-2ff622e2eecd-kube-api-access-zfnsz" (OuterVolumeSpecName: "kube-api-access-zfnsz") pod "30bdf99b-c4af-4caa-87db-2ff622e2eecd" (UID: "30bdf99b-c4af-4caa-87db-2ff622e2eecd"). InnerVolumeSpecName "kube-api-access-zfnsz". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 17:57:49 crc kubenswrapper[4799]: I1010 17:57:49.306263 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zfnsz\" (UniqueName: \"kubernetes.io/projected/30bdf99b-c4af-4caa-87db-2ff622e2eecd-kube-api-access-zfnsz\") on node \"crc\" DevicePath \"\"" Oct 10 17:57:49 crc kubenswrapper[4799]: I1010 17:57:49.403587 4799 scope.go:117] "RemoveContainer" containerID="5e973f476cb0655a6e33e886e2a59fc6754febf3bf5a4718abcef307858985dd" Oct 10 17:57:49 crc kubenswrapper[4799]: E1010 17:57:49.403927 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 17:57:49 crc kubenswrapper[4799]: I1010 17:57:49.742101 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-3f3c-account-create-ws7mb" event={"ID":"30bdf99b-c4af-4caa-87db-2ff622e2eecd","Type":"ContainerDied","Data":"499332cf25e0cb57610b4d2ac3e54290a4322bd79f52ecb7586be9927810ee75"} Oct 10 17:57:49 crc kubenswrapper[4799]: I1010 17:57:49.742497 4799 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="499332cf25e0cb57610b4d2ac3e54290a4322bd79f52ecb7586be9927810ee75" Oct 10 17:57:49 crc kubenswrapper[4799]: I1010 17:57:49.742216 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-3f3c-account-create-ws7mb" Oct 10 17:57:51 crc kubenswrapper[4799]: I1010 17:57:51.987317 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-db-sync-jbxkg"] Oct 10 17:57:51 crc kubenswrapper[4799]: E1010 17:57:51.988052 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="30bdf99b-c4af-4caa-87db-2ff622e2eecd" containerName="mariadb-account-create" Oct 10 17:57:51 crc kubenswrapper[4799]: I1010 17:57:51.988072 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="30bdf99b-c4af-4caa-87db-2ff622e2eecd" containerName="mariadb-account-create" Oct 10 17:57:51 crc kubenswrapper[4799]: I1010 17:57:51.988375 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="30bdf99b-c4af-4caa-87db-2ff622e2eecd" containerName="mariadb-account-create" Oct 10 17:57:51 crc kubenswrapper[4799]: I1010 17:57:51.989245 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-jbxkg" Oct 10 17:57:51 crc kubenswrapper[4799]: I1010 17:57:51.992729 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Oct 10 17:57:51 crc kubenswrapper[4799]: I1010 17:57:51.992986 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Oct 10 17:57:51 crc kubenswrapper[4799]: I1010 17:57:51.993249 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-85zxn" Oct 10 17:57:51 crc kubenswrapper[4799]: I1010 17:57:51.994560 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Oct 10 17:57:51 crc kubenswrapper[4799]: I1010 17:57:51.996103 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-sync-jbxkg"] Oct 10 17:57:52 crc kubenswrapper[4799]: I1010 17:57:52.066545 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xr2sx\" (UniqueName: \"kubernetes.io/projected/419d63e8-2e77-4702-af72-31e30165e453-kube-api-access-xr2sx\") pod \"keystone-db-sync-jbxkg\" (UID: \"419d63e8-2e77-4702-af72-31e30165e453\") " pod="openstack/keystone-db-sync-jbxkg" Oct 10 17:57:52 crc kubenswrapper[4799]: I1010 17:57:52.066788 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/419d63e8-2e77-4702-af72-31e30165e453-config-data\") pod \"keystone-db-sync-jbxkg\" (UID: \"419d63e8-2e77-4702-af72-31e30165e453\") " pod="openstack/keystone-db-sync-jbxkg" Oct 10 17:57:52 crc kubenswrapper[4799]: I1010 17:57:52.066870 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/419d63e8-2e77-4702-af72-31e30165e453-combined-ca-bundle\") pod \"keystone-db-sync-jbxkg\" (UID: \"419d63e8-2e77-4702-af72-31e30165e453\") " pod="openstack/keystone-db-sync-jbxkg" Oct 10 17:57:52 crc kubenswrapper[4799]: I1010 17:57:52.168943 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xr2sx\" (UniqueName: \"kubernetes.io/projected/419d63e8-2e77-4702-af72-31e30165e453-kube-api-access-xr2sx\") pod \"keystone-db-sync-jbxkg\" (UID: \"419d63e8-2e77-4702-af72-31e30165e453\") " pod="openstack/keystone-db-sync-jbxkg" Oct 10 17:57:52 crc kubenswrapper[4799]: I1010 17:57:52.169261 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/419d63e8-2e77-4702-af72-31e30165e453-config-data\") pod \"keystone-db-sync-jbxkg\" (UID: \"419d63e8-2e77-4702-af72-31e30165e453\") " pod="openstack/keystone-db-sync-jbxkg" Oct 10 17:57:52 crc kubenswrapper[4799]: I1010 17:57:52.169372 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/419d63e8-2e77-4702-af72-31e30165e453-combined-ca-bundle\") pod \"keystone-db-sync-jbxkg\" (UID: \"419d63e8-2e77-4702-af72-31e30165e453\") " pod="openstack/keystone-db-sync-jbxkg" Oct 10 17:57:52 crc kubenswrapper[4799]: I1010 17:57:52.175273 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/419d63e8-2e77-4702-af72-31e30165e453-combined-ca-bundle\") pod \"keystone-db-sync-jbxkg\" (UID: \"419d63e8-2e77-4702-af72-31e30165e453\") " pod="openstack/keystone-db-sync-jbxkg" Oct 10 17:57:52 crc kubenswrapper[4799]: I1010 17:57:52.182557 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/419d63e8-2e77-4702-af72-31e30165e453-config-data\") pod \"keystone-db-sync-jbxkg\" (UID: \"419d63e8-2e77-4702-af72-31e30165e453\") " pod="openstack/keystone-db-sync-jbxkg" Oct 10 17:57:52 crc kubenswrapper[4799]: I1010 17:57:52.194971 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xr2sx\" (UniqueName: \"kubernetes.io/projected/419d63e8-2e77-4702-af72-31e30165e453-kube-api-access-xr2sx\") pod \"keystone-db-sync-jbxkg\" (UID: \"419d63e8-2e77-4702-af72-31e30165e453\") " pod="openstack/keystone-db-sync-jbxkg" Oct 10 17:57:52 crc kubenswrapper[4799]: I1010 17:57:52.318340 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-jbxkg" Oct 10 17:57:52 crc kubenswrapper[4799]: I1010 17:57:52.773808 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-sync-jbxkg"] Oct 10 17:57:53 crc kubenswrapper[4799]: I1010 17:57:53.782354 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-jbxkg" event={"ID":"419d63e8-2e77-4702-af72-31e30165e453","Type":"ContainerStarted","Data":"7cb047d0d62c0ebfb2408a9c5fd465068ed67a7963e00639f7e5858dc64dec55"} Oct 10 17:57:53 crc kubenswrapper[4799]: I1010 17:57:53.782857 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-jbxkg" event={"ID":"419d63e8-2e77-4702-af72-31e30165e453","Type":"ContainerStarted","Data":"c70c3328f60e33857dd1de0e5881a74e9fb2b845a1393c27adacfecf51df1a2c"} Oct 10 17:57:53 crc kubenswrapper[4799]: I1010 17:57:53.812018 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-db-sync-jbxkg" podStartSLOduration=2.811990954 podStartE2EDuration="2.811990954s" podCreationTimestamp="2025-10-10 17:57:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-10 17:57:53.802350069 +0000 UTC m=+5167.310674224" watchObservedRunningTime="2025-10-10 17:57:53.811990954 +0000 UTC m=+5167.320315099" Oct 10 17:57:54 crc kubenswrapper[4799]: I1010 17:57:54.799601 4799 generic.go:334] "Generic (PLEG): container finished" podID="419d63e8-2e77-4702-af72-31e30165e453" containerID="7cb047d0d62c0ebfb2408a9c5fd465068ed67a7963e00639f7e5858dc64dec55" exitCode=0 Oct 10 17:57:54 crc kubenswrapper[4799]: I1010 17:57:54.799684 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-jbxkg" event={"ID":"419d63e8-2e77-4702-af72-31e30165e453","Type":"ContainerDied","Data":"7cb047d0d62c0ebfb2408a9c5fd465068ed67a7963e00639f7e5858dc64dec55"} Oct 10 17:57:56 crc kubenswrapper[4799]: I1010 17:57:56.312357 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-jbxkg" Oct 10 17:57:56 crc kubenswrapper[4799]: I1010 17:57:56.456669 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/419d63e8-2e77-4702-af72-31e30165e453-combined-ca-bundle\") pod \"419d63e8-2e77-4702-af72-31e30165e453\" (UID: \"419d63e8-2e77-4702-af72-31e30165e453\") " Oct 10 17:57:56 crc kubenswrapper[4799]: I1010 17:57:56.456814 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/419d63e8-2e77-4702-af72-31e30165e453-config-data\") pod \"419d63e8-2e77-4702-af72-31e30165e453\" (UID: \"419d63e8-2e77-4702-af72-31e30165e453\") " Oct 10 17:57:56 crc kubenswrapper[4799]: I1010 17:57:56.456878 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xr2sx\" (UniqueName: \"kubernetes.io/projected/419d63e8-2e77-4702-af72-31e30165e453-kube-api-access-xr2sx\") pod \"419d63e8-2e77-4702-af72-31e30165e453\" (UID: \"419d63e8-2e77-4702-af72-31e30165e453\") " Oct 10 17:57:56 crc kubenswrapper[4799]: I1010 17:57:56.461741 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/419d63e8-2e77-4702-af72-31e30165e453-kube-api-access-xr2sx" (OuterVolumeSpecName: "kube-api-access-xr2sx") pod "419d63e8-2e77-4702-af72-31e30165e453" (UID: "419d63e8-2e77-4702-af72-31e30165e453"). InnerVolumeSpecName "kube-api-access-xr2sx". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 17:57:56 crc kubenswrapper[4799]: I1010 17:57:56.481109 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/419d63e8-2e77-4702-af72-31e30165e453-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "419d63e8-2e77-4702-af72-31e30165e453" (UID: "419d63e8-2e77-4702-af72-31e30165e453"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 17:57:56 crc kubenswrapper[4799]: I1010 17:57:56.496381 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/419d63e8-2e77-4702-af72-31e30165e453-config-data" (OuterVolumeSpecName: "config-data") pod "419d63e8-2e77-4702-af72-31e30165e453" (UID: "419d63e8-2e77-4702-af72-31e30165e453"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 17:57:56 crc kubenswrapper[4799]: I1010 17:57:56.558733 4799 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/419d63e8-2e77-4702-af72-31e30165e453-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 10 17:57:56 crc kubenswrapper[4799]: I1010 17:57:56.558855 4799 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/419d63e8-2e77-4702-af72-31e30165e453-config-data\") on node \"crc\" DevicePath \"\"" Oct 10 17:57:56 crc kubenswrapper[4799]: I1010 17:57:56.558883 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xr2sx\" (UniqueName: \"kubernetes.io/projected/419d63e8-2e77-4702-af72-31e30165e453-kube-api-access-xr2sx\") on node \"crc\" DevicePath \"\"" Oct 10 17:57:56 crc kubenswrapper[4799]: I1010 17:57:56.824992 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-jbxkg" event={"ID":"419d63e8-2e77-4702-af72-31e30165e453","Type":"ContainerDied","Data":"c70c3328f60e33857dd1de0e5881a74e9fb2b845a1393c27adacfecf51df1a2c"} Oct 10 17:57:56 crc kubenswrapper[4799]: I1010 17:57:56.825042 4799 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c70c3328f60e33857dd1de0e5881a74e9fb2b845a1393c27adacfecf51df1a2c" Oct 10 17:57:56 crc kubenswrapper[4799]: I1010 17:57:56.825103 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-jbxkg" Oct 10 17:57:57 crc kubenswrapper[4799]: I1010 17:57:57.070019 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-6b7d9cf495-24s9n"] Oct 10 17:57:57 crc kubenswrapper[4799]: E1010 17:57:57.070514 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="419d63e8-2e77-4702-af72-31e30165e453" containerName="keystone-db-sync" Oct 10 17:57:57 crc kubenswrapper[4799]: I1010 17:57:57.070529 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="419d63e8-2e77-4702-af72-31e30165e453" containerName="keystone-db-sync" Oct 10 17:57:57 crc kubenswrapper[4799]: I1010 17:57:57.070690 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="419d63e8-2e77-4702-af72-31e30165e453" containerName="keystone-db-sync" Oct 10 17:57:57 crc kubenswrapper[4799]: I1010 17:57:57.074516 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6b7d9cf495-24s9n" Oct 10 17:57:57 crc kubenswrapper[4799]: I1010 17:57:57.117691 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6b7d9cf495-24s9n"] Oct 10 17:57:57 crc kubenswrapper[4799]: I1010 17:57:57.133898 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-bootstrap-f6s6d"] Oct 10 17:57:57 crc kubenswrapper[4799]: I1010 17:57:57.137321 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-f6s6d" Oct 10 17:57:57 crc kubenswrapper[4799]: I1010 17:57:57.144482 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Oct 10 17:57:57 crc kubenswrapper[4799]: I1010 17:57:57.144732 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-85zxn" Oct 10 17:57:57 crc kubenswrapper[4799]: I1010 17:57:57.144856 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Oct 10 17:57:57 crc kubenswrapper[4799]: I1010 17:57:57.144964 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Oct 10 17:57:57 crc kubenswrapper[4799]: I1010 17:57:57.153985 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-f6s6d"] Oct 10 17:57:57 crc kubenswrapper[4799]: I1010 17:57:57.171781 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/81c07d06-a6c1-43c5-ada3-2e734679d614-config\") pod \"dnsmasq-dns-6b7d9cf495-24s9n\" (UID: \"81c07d06-a6c1-43c5-ada3-2e734679d614\") " pod="openstack/dnsmasq-dns-6b7d9cf495-24s9n" Oct 10 17:57:57 crc kubenswrapper[4799]: I1010 17:57:57.171826 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/81c07d06-a6c1-43c5-ada3-2e734679d614-ovsdbserver-nb\") pod \"dnsmasq-dns-6b7d9cf495-24s9n\" (UID: \"81c07d06-a6c1-43c5-ada3-2e734679d614\") " pod="openstack/dnsmasq-dns-6b7d9cf495-24s9n" Oct 10 17:57:57 crc kubenswrapper[4799]: I1010 17:57:57.171857 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/81c07d06-a6c1-43c5-ada3-2e734679d614-ovsdbserver-sb\") pod \"dnsmasq-dns-6b7d9cf495-24s9n\" (UID: \"81c07d06-a6c1-43c5-ada3-2e734679d614\") " pod="openstack/dnsmasq-dns-6b7d9cf495-24s9n" Oct 10 17:57:57 crc kubenswrapper[4799]: I1010 17:57:57.171986 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/81c07d06-a6c1-43c5-ada3-2e734679d614-dns-svc\") pod \"dnsmasq-dns-6b7d9cf495-24s9n\" (UID: \"81c07d06-a6c1-43c5-ada3-2e734679d614\") " pod="openstack/dnsmasq-dns-6b7d9cf495-24s9n" Oct 10 17:57:57 crc kubenswrapper[4799]: I1010 17:57:57.172230 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tsnf2\" (UniqueName: \"kubernetes.io/projected/81c07d06-a6c1-43c5-ada3-2e734679d614-kube-api-access-tsnf2\") pod \"dnsmasq-dns-6b7d9cf495-24s9n\" (UID: \"81c07d06-a6c1-43c5-ada3-2e734679d614\") " pod="openstack/dnsmasq-dns-6b7d9cf495-24s9n" Oct 10 17:57:57 crc kubenswrapper[4799]: I1010 17:57:57.273583 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/23d1f4f9-9bb3-4d7e-b061-594175b3c221-credential-keys\") pod \"keystone-bootstrap-f6s6d\" (UID: \"23d1f4f9-9bb3-4d7e-b061-594175b3c221\") " pod="openstack/keystone-bootstrap-f6s6d" Oct 10 17:57:57 crc kubenswrapper[4799]: I1010 17:57:57.273661 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/81c07d06-a6c1-43c5-ada3-2e734679d614-config\") pod \"dnsmasq-dns-6b7d9cf495-24s9n\" (UID: \"81c07d06-a6c1-43c5-ada3-2e734679d614\") " pod="openstack/dnsmasq-dns-6b7d9cf495-24s9n" Oct 10 17:57:57 crc kubenswrapper[4799]: I1010 17:57:57.273684 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/81c07d06-a6c1-43c5-ada3-2e734679d614-ovsdbserver-nb\") pod \"dnsmasq-dns-6b7d9cf495-24s9n\" (UID: \"81c07d06-a6c1-43c5-ada3-2e734679d614\") " pod="openstack/dnsmasq-dns-6b7d9cf495-24s9n" Oct 10 17:57:57 crc kubenswrapper[4799]: I1010 17:57:57.273704 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/23d1f4f9-9bb3-4d7e-b061-594175b3c221-fernet-keys\") pod \"keystone-bootstrap-f6s6d\" (UID: \"23d1f4f9-9bb3-4d7e-b061-594175b3c221\") " pod="openstack/keystone-bootstrap-f6s6d" Oct 10 17:57:57 crc kubenswrapper[4799]: I1010 17:57:57.273724 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/81c07d06-a6c1-43c5-ada3-2e734679d614-ovsdbserver-sb\") pod \"dnsmasq-dns-6b7d9cf495-24s9n\" (UID: \"81c07d06-a6c1-43c5-ada3-2e734679d614\") " pod="openstack/dnsmasq-dns-6b7d9cf495-24s9n" Oct 10 17:57:57 crc kubenswrapper[4799]: I1010 17:57:57.273739 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/23d1f4f9-9bb3-4d7e-b061-594175b3c221-combined-ca-bundle\") pod \"keystone-bootstrap-f6s6d\" (UID: \"23d1f4f9-9bb3-4d7e-b061-594175b3c221\") " pod="openstack/keystone-bootstrap-f6s6d" Oct 10 17:57:57 crc kubenswrapper[4799]: I1010 17:57:57.273770 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fwt4d\" (UniqueName: \"kubernetes.io/projected/23d1f4f9-9bb3-4d7e-b061-594175b3c221-kube-api-access-fwt4d\") pod \"keystone-bootstrap-f6s6d\" (UID: \"23d1f4f9-9bb3-4d7e-b061-594175b3c221\") " pod="openstack/keystone-bootstrap-f6s6d" Oct 10 17:57:57 crc kubenswrapper[4799]: I1010 17:57:57.273807 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/81c07d06-a6c1-43c5-ada3-2e734679d614-dns-svc\") pod \"dnsmasq-dns-6b7d9cf495-24s9n\" (UID: \"81c07d06-a6c1-43c5-ada3-2e734679d614\") " pod="openstack/dnsmasq-dns-6b7d9cf495-24s9n" Oct 10 17:57:57 crc kubenswrapper[4799]: I1010 17:57:57.273835 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/23d1f4f9-9bb3-4d7e-b061-594175b3c221-config-data\") pod \"keystone-bootstrap-f6s6d\" (UID: \"23d1f4f9-9bb3-4d7e-b061-594175b3c221\") " pod="openstack/keystone-bootstrap-f6s6d" Oct 10 17:57:57 crc kubenswrapper[4799]: I1010 17:57:57.273858 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/23d1f4f9-9bb3-4d7e-b061-594175b3c221-scripts\") pod \"keystone-bootstrap-f6s6d\" (UID: \"23d1f4f9-9bb3-4d7e-b061-594175b3c221\") " pod="openstack/keystone-bootstrap-f6s6d" Oct 10 17:57:57 crc kubenswrapper[4799]: I1010 17:57:57.273901 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tsnf2\" (UniqueName: \"kubernetes.io/projected/81c07d06-a6c1-43c5-ada3-2e734679d614-kube-api-access-tsnf2\") pod \"dnsmasq-dns-6b7d9cf495-24s9n\" (UID: \"81c07d06-a6c1-43c5-ada3-2e734679d614\") " pod="openstack/dnsmasq-dns-6b7d9cf495-24s9n" Oct 10 17:57:57 crc kubenswrapper[4799]: I1010 17:57:57.275113 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/81c07d06-a6c1-43c5-ada3-2e734679d614-config\") pod \"dnsmasq-dns-6b7d9cf495-24s9n\" (UID: \"81c07d06-a6c1-43c5-ada3-2e734679d614\") " pod="openstack/dnsmasq-dns-6b7d9cf495-24s9n" Oct 10 17:57:57 crc kubenswrapper[4799]: I1010 17:57:57.275196 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/81c07d06-a6c1-43c5-ada3-2e734679d614-ovsdbserver-nb\") pod \"dnsmasq-dns-6b7d9cf495-24s9n\" (UID: \"81c07d06-a6c1-43c5-ada3-2e734679d614\") " pod="openstack/dnsmasq-dns-6b7d9cf495-24s9n" Oct 10 17:57:57 crc kubenswrapper[4799]: I1010 17:57:57.275481 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/81c07d06-a6c1-43c5-ada3-2e734679d614-dns-svc\") pod \"dnsmasq-dns-6b7d9cf495-24s9n\" (UID: \"81c07d06-a6c1-43c5-ada3-2e734679d614\") " pod="openstack/dnsmasq-dns-6b7d9cf495-24s9n" Oct 10 17:57:57 crc kubenswrapper[4799]: I1010 17:57:57.275549 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/81c07d06-a6c1-43c5-ada3-2e734679d614-ovsdbserver-sb\") pod \"dnsmasq-dns-6b7d9cf495-24s9n\" (UID: \"81c07d06-a6c1-43c5-ada3-2e734679d614\") " pod="openstack/dnsmasq-dns-6b7d9cf495-24s9n" Oct 10 17:57:57 crc kubenswrapper[4799]: I1010 17:57:57.298993 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tsnf2\" (UniqueName: \"kubernetes.io/projected/81c07d06-a6c1-43c5-ada3-2e734679d614-kube-api-access-tsnf2\") pod \"dnsmasq-dns-6b7d9cf495-24s9n\" (UID: \"81c07d06-a6c1-43c5-ada3-2e734679d614\") " pod="openstack/dnsmasq-dns-6b7d9cf495-24s9n" Oct 10 17:57:57 crc kubenswrapper[4799]: I1010 17:57:57.375775 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/23d1f4f9-9bb3-4d7e-b061-594175b3c221-credential-keys\") pod \"keystone-bootstrap-f6s6d\" (UID: \"23d1f4f9-9bb3-4d7e-b061-594175b3c221\") " pod="openstack/keystone-bootstrap-f6s6d" Oct 10 17:57:57 crc kubenswrapper[4799]: I1010 17:57:57.375854 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/23d1f4f9-9bb3-4d7e-b061-594175b3c221-fernet-keys\") pod \"keystone-bootstrap-f6s6d\" (UID: \"23d1f4f9-9bb3-4d7e-b061-594175b3c221\") " pod="openstack/keystone-bootstrap-f6s6d" Oct 10 17:57:57 crc kubenswrapper[4799]: I1010 17:57:57.375877 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fwt4d\" (UniqueName: \"kubernetes.io/projected/23d1f4f9-9bb3-4d7e-b061-594175b3c221-kube-api-access-fwt4d\") pod \"keystone-bootstrap-f6s6d\" (UID: \"23d1f4f9-9bb3-4d7e-b061-594175b3c221\") " pod="openstack/keystone-bootstrap-f6s6d" Oct 10 17:57:57 crc kubenswrapper[4799]: I1010 17:57:57.375891 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/23d1f4f9-9bb3-4d7e-b061-594175b3c221-combined-ca-bundle\") pod \"keystone-bootstrap-f6s6d\" (UID: \"23d1f4f9-9bb3-4d7e-b061-594175b3c221\") " pod="openstack/keystone-bootstrap-f6s6d" Oct 10 17:57:57 crc kubenswrapper[4799]: I1010 17:57:57.375930 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/23d1f4f9-9bb3-4d7e-b061-594175b3c221-config-data\") pod \"keystone-bootstrap-f6s6d\" (UID: \"23d1f4f9-9bb3-4d7e-b061-594175b3c221\") " pod="openstack/keystone-bootstrap-f6s6d" Oct 10 17:57:57 crc kubenswrapper[4799]: I1010 17:57:57.375955 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/23d1f4f9-9bb3-4d7e-b061-594175b3c221-scripts\") pod \"keystone-bootstrap-f6s6d\" (UID: \"23d1f4f9-9bb3-4d7e-b061-594175b3c221\") " pod="openstack/keystone-bootstrap-f6s6d" Oct 10 17:57:57 crc kubenswrapper[4799]: I1010 17:57:57.379331 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/23d1f4f9-9bb3-4d7e-b061-594175b3c221-combined-ca-bundle\") pod \"keystone-bootstrap-f6s6d\" (UID: \"23d1f4f9-9bb3-4d7e-b061-594175b3c221\") " pod="openstack/keystone-bootstrap-f6s6d" Oct 10 17:57:57 crc kubenswrapper[4799]: I1010 17:57:57.379400 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/23d1f4f9-9bb3-4d7e-b061-594175b3c221-fernet-keys\") pod \"keystone-bootstrap-f6s6d\" (UID: \"23d1f4f9-9bb3-4d7e-b061-594175b3c221\") " pod="openstack/keystone-bootstrap-f6s6d" Oct 10 17:57:57 crc kubenswrapper[4799]: I1010 17:57:57.379534 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/23d1f4f9-9bb3-4d7e-b061-594175b3c221-scripts\") pod \"keystone-bootstrap-f6s6d\" (UID: \"23d1f4f9-9bb3-4d7e-b061-594175b3c221\") " pod="openstack/keystone-bootstrap-f6s6d" Oct 10 17:57:57 crc kubenswrapper[4799]: I1010 17:57:57.381406 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/23d1f4f9-9bb3-4d7e-b061-594175b3c221-config-data\") pod \"keystone-bootstrap-f6s6d\" (UID: \"23d1f4f9-9bb3-4d7e-b061-594175b3c221\") " pod="openstack/keystone-bootstrap-f6s6d" Oct 10 17:57:57 crc kubenswrapper[4799]: I1010 17:57:57.386590 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/23d1f4f9-9bb3-4d7e-b061-594175b3c221-credential-keys\") pod \"keystone-bootstrap-f6s6d\" (UID: \"23d1f4f9-9bb3-4d7e-b061-594175b3c221\") " pod="openstack/keystone-bootstrap-f6s6d" Oct 10 17:57:57 crc kubenswrapper[4799]: I1010 17:57:57.391352 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fwt4d\" (UniqueName: \"kubernetes.io/projected/23d1f4f9-9bb3-4d7e-b061-594175b3c221-kube-api-access-fwt4d\") pod \"keystone-bootstrap-f6s6d\" (UID: \"23d1f4f9-9bb3-4d7e-b061-594175b3c221\") " pod="openstack/keystone-bootstrap-f6s6d" Oct 10 17:57:57 crc kubenswrapper[4799]: I1010 17:57:57.406242 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6b7d9cf495-24s9n" Oct 10 17:57:57 crc kubenswrapper[4799]: I1010 17:57:57.478286 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-f6s6d" Oct 10 17:57:57 crc kubenswrapper[4799]: I1010 17:57:57.902414 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6b7d9cf495-24s9n"] Oct 10 17:57:57 crc kubenswrapper[4799]: I1010 17:57:57.974844 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-f6s6d"] Oct 10 17:57:57 crc kubenswrapper[4799]: W1010 17:57:57.976838 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod23d1f4f9_9bb3_4d7e_b061_594175b3c221.slice/crio-6ffd27c7ecc3b4e00011da2c6eba39aba8c8eddb06b0182dd33291afbbcec24c WatchSource:0}: Error finding container 6ffd27c7ecc3b4e00011da2c6eba39aba8c8eddb06b0182dd33291afbbcec24c: Status 404 returned error can't find the container with id 6ffd27c7ecc3b4e00011da2c6eba39aba8c8eddb06b0182dd33291afbbcec24c Oct 10 17:57:58 crc kubenswrapper[4799]: I1010 17:57:58.845029 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-f6s6d" event={"ID":"23d1f4f9-9bb3-4d7e-b061-594175b3c221","Type":"ContainerStarted","Data":"4508f7112a9b38f537a7e70bbe8de8d2cbb851fcb269dc35bb42f56b5e1a511e"} Oct 10 17:57:58 crc kubenswrapper[4799]: I1010 17:57:58.845401 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-f6s6d" event={"ID":"23d1f4f9-9bb3-4d7e-b061-594175b3c221","Type":"ContainerStarted","Data":"6ffd27c7ecc3b4e00011da2c6eba39aba8c8eddb06b0182dd33291afbbcec24c"} Oct 10 17:57:58 crc kubenswrapper[4799]: I1010 17:57:58.849995 4799 generic.go:334] "Generic (PLEG): container finished" podID="81c07d06-a6c1-43c5-ada3-2e734679d614" containerID="1db11762b0dbaf9129fcd7afe1282bc73b3b0a2d9349c8cf28b69be8f32ac84d" exitCode=0 Oct 10 17:57:58 crc kubenswrapper[4799]: I1010 17:57:58.850226 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6b7d9cf495-24s9n" event={"ID":"81c07d06-a6c1-43c5-ada3-2e734679d614","Type":"ContainerDied","Data":"1db11762b0dbaf9129fcd7afe1282bc73b3b0a2d9349c8cf28b69be8f32ac84d"} Oct 10 17:57:58 crc kubenswrapper[4799]: I1010 17:57:58.850483 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6b7d9cf495-24s9n" event={"ID":"81c07d06-a6c1-43c5-ada3-2e734679d614","Type":"ContainerStarted","Data":"2945e01367df4b23af1cb90d0599cae75cc8f6b8bbe4be128e91bd9f31375e97"} Oct 10 17:57:58 crc kubenswrapper[4799]: I1010 17:57:58.917532 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-bootstrap-f6s6d" podStartSLOduration=1.9174965240000001 podStartE2EDuration="1.917496524s" podCreationTimestamp="2025-10-10 17:57:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-10 17:57:58.872679402 +0000 UTC m=+5172.381003557" watchObservedRunningTime="2025-10-10 17:57:58.917496524 +0000 UTC m=+5172.425820679" Oct 10 17:57:59 crc kubenswrapper[4799]: I1010 17:57:59.865867 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6b7d9cf495-24s9n" event={"ID":"81c07d06-a6c1-43c5-ada3-2e734679d614","Type":"ContainerStarted","Data":"24180e9f457afe208f0537ac29319dfe498db54c3fa76609183cd46286653bb7"} Oct 10 17:57:59 crc kubenswrapper[4799]: I1010 17:57:59.903488 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-6b7d9cf495-24s9n" podStartSLOduration=2.903457985 podStartE2EDuration="2.903457985s" podCreationTimestamp="2025-10-10 17:57:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-10 17:57:59.892205791 +0000 UTC m=+5173.400529936" watchObservedRunningTime="2025-10-10 17:57:59.903457985 +0000 UTC m=+5173.411782140" Oct 10 17:58:00 crc kubenswrapper[4799]: I1010 17:58:00.875583 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-6b7d9cf495-24s9n" Oct 10 17:58:01 crc kubenswrapper[4799]: I1010 17:58:01.892134 4799 generic.go:334] "Generic (PLEG): container finished" podID="23d1f4f9-9bb3-4d7e-b061-594175b3c221" containerID="4508f7112a9b38f537a7e70bbe8de8d2cbb851fcb269dc35bb42f56b5e1a511e" exitCode=0 Oct 10 17:58:01 crc kubenswrapper[4799]: I1010 17:58:01.892204 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-f6s6d" event={"ID":"23d1f4f9-9bb3-4d7e-b061-594175b3c221","Type":"ContainerDied","Data":"4508f7112a9b38f537a7e70bbe8de8d2cbb851fcb269dc35bb42f56b5e1a511e"} Oct 10 17:58:02 crc kubenswrapper[4799]: I1010 17:58:02.403681 4799 scope.go:117] "RemoveContainer" containerID="5e973f476cb0655a6e33e886e2a59fc6754febf3bf5a4718abcef307858985dd" Oct 10 17:58:02 crc kubenswrapper[4799]: E1010 17:58:02.404752 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 17:58:03 crc kubenswrapper[4799]: I1010 17:58:03.209105 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-f6s6d" Oct 10 17:58:03 crc kubenswrapper[4799]: I1010 17:58:03.287339 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/23d1f4f9-9bb3-4d7e-b061-594175b3c221-fernet-keys\") pod \"23d1f4f9-9bb3-4d7e-b061-594175b3c221\" (UID: \"23d1f4f9-9bb3-4d7e-b061-594175b3c221\") " Oct 10 17:58:03 crc kubenswrapper[4799]: I1010 17:58:03.287425 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/23d1f4f9-9bb3-4d7e-b061-594175b3c221-config-data\") pod \"23d1f4f9-9bb3-4d7e-b061-594175b3c221\" (UID: \"23d1f4f9-9bb3-4d7e-b061-594175b3c221\") " Oct 10 17:58:03 crc kubenswrapper[4799]: I1010 17:58:03.287452 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/23d1f4f9-9bb3-4d7e-b061-594175b3c221-scripts\") pod \"23d1f4f9-9bb3-4d7e-b061-594175b3c221\" (UID: \"23d1f4f9-9bb3-4d7e-b061-594175b3c221\") " Oct 10 17:58:03 crc kubenswrapper[4799]: I1010 17:58:03.287525 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fwt4d\" (UniqueName: \"kubernetes.io/projected/23d1f4f9-9bb3-4d7e-b061-594175b3c221-kube-api-access-fwt4d\") pod \"23d1f4f9-9bb3-4d7e-b061-594175b3c221\" (UID: \"23d1f4f9-9bb3-4d7e-b061-594175b3c221\") " Oct 10 17:58:03 crc kubenswrapper[4799]: I1010 17:58:03.287638 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/23d1f4f9-9bb3-4d7e-b061-594175b3c221-combined-ca-bundle\") pod \"23d1f4f9-9bb3-4d7e-b061-594175b3c221\" (UID: \"23d1f4f9-9bb3-4d7e-b061-594175b3c221\") " Oct 10 17:58:03 crc kubenswrapper[4799]: I1010 17:58:03.287665 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/23d1f4f9-9bb3-4d7e-b061-594175b3c221-credential-keys\") pod \"23d1f4f9-9bb3-4d7e-b061-594175b3c221\" (UID: \"23d1f4f9-9bb3-4d7e-b061-594175b3c221\") " Oct 10 17:58:03 crc kubenswrapper[4799]: I1010 17:58:03.294896 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/23d1f4f9-9bb3-4d7e-b061-594175b3c221-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "23d1f4f9-9bb3-4d7e-b061-594175b3c221" (UID: "23d1f4f9-9bb3-4d7e-b061-594175b3c221"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 17:58:03 crc kubenswrapper[4799]: I1010 17:58:03.295169 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/23d1f4f9-9bb3-4d7e-b061-594175b3c221-scripts" (OuterVolumeSpecName: "scripts") pod "23d1f4f9-9bb3-4d7e-b061-594175b3c221" (UID: "23d1f4f9-9bb3-4d7e-b061-594175b3c221"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 17:58:03 crc kubenswrapper[4799]: I1010 17:58:03.296712 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/23d1f4f9-9bb3-4d7e-b061-594175b3c221-kube-api-access-fwt4d" (OuterVolumeSpecName: "kube-api-access-fwt4d") pod "23d1f4f9-9bb3-4d7e-b061-594175b3c221" (UID: "23d1f4f9-9bb3-4d7e-b061-594175b3c221"). InnerVolumeSpecName "kube-api-access-fwt4d". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 17:58:03 crc kubenswrapper[4799]: I1010 17:58:03.297648 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/23d1f4f9-9bb3-4d7e-b061-594175b3c221-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "23d1f4f9-9bb3-4d7e-b061-594175b3c221" (UID: "23d1f4f9-9bb3-4d7e-b061-594175b3c221"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 17:58:03 crc kubenswrapper[4799]: I1010 17:58:03.322545 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/23d1f4f9-9bb3-4d7e-b061-594175b3c221-config-data" (OuterVolumeSpecName: "config-data") pod "23d1f4f9-9bb3-4d7e-b061-594175b3c221" (UID: "23d1f4f9-9bb3-4d7e-b061-594175b3c221"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 17:58:03 crc kubenswrapper[4799]: I1010 17:58:03.341039 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/23d1f4f9-9bb3-4d7e-b061-594175b3c221-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "23d1f4f9-9bb3-4d7e-b061-594175b3c221" (UID: "23d1f4f9-9bb3-4d7e-b061-594175b3c221"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 17:58:03 crc kubenswrapper[4799]: I1010 17:58:03.389549 4799 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/23d1f4f9-9bb3-4d7e-b061-594175b3c221-fernet-keys\") on node \"crc\" DevicePath \"\"" Oct 10 17:58:03 crc kubenswrapper[4799]: I1010 17:58:03.389596 4799 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/23d1f4f9-9bb3-4d7e-b061-594175b3c221-config-data\") on node \"crc\" DevicePath \"\"" Oct 10 17:58:03 crc kubenswrapper[4799]: I1010 17:58:03.389606 4799 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/23d1f4f9-9bb3-4d7e-b061-594175b3c221-scripts\") on node \"crc\" DevicePath \"\"" Oct 10 17:58:03 crc kubenswrapper[4799]: I1010 17:58:03.389620 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fwt4d\" (UniqueName: \"kubernetes.io/projected/23d1f4f9-9bb3-4d7e-b061-594175b3c221-kube-api-access-fwt4d\") on node \"crc\" DevicePath \"\"" Oct 10 17:58:03 crc kubenswrapper[4799]: I1010 17:58:03.389631 4799 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/23d1f4f9-9bb3-4d7e-b061-594175b3c221-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 10 17:58:03 crc kubenswrapper[4799]: I1010 17:58:03.389641 4799 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/23d1f4f9-9bb3-4d7e-b061-594175b3c221-credential-keys\") on node \"crc\" DevicePath \"\"" Oct 10 17:58:03 crc kubenswrapper[4799]: I1010 17:58:03.912091 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-f6s6d" event={"ID":"23d1f4f9-9bb3-4d7e-b061-594175b3c221","Type":"ContainerDied","Data":"6ffd27c7ecc3b4e00011da2c6eba39aba8c8eddb06b0182dd33291afbbcec24c"} Oct 10 17:58:03 crc kubenswrapper[4799]: I1010 17:58:03.913093 4799 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6ffd27c7ecc3b4e00011da2c6eba39aba8c8eddb06b0182dd33291afbbcec24c" Oct 10 17:58:03 crc kubenswrapper[4799]: I1010 17:58:03.912364 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-f6s6d" Oct 10 17:58:04 crc kubenswrapper[4799]: I1010 17:58:04.012262 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-bootstrap-f6s6d"] Oct 10 17:58:04 crc kubenswrapper[4799]: I1010 17:58:04.018898 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-bootstrap-f6s6d"] Oct 10 17:58:04 crc kubenswrapper[4799]: I1010 17:58:04.110341 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-bootstrap-lpdsw"] Oct 10 17:58:04 crc kubenswrapper[4799]: E1010 17:58:04.110849 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="23d1f4f9-9bb3-4d7e-b061-594175b3c221" containerName="keystone-bootstrap" Oct 10 17:58:04 crc kubenswrapper[4799]: I1010 17:58:04.110871 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="23d1f4f9-9bb3-4d7e-b061-594175b3c221" containerName="keystone-bootstrap" Oct 10 17:58:04 crc kubenswrapper[4799]: I1010 17:58:04.111082 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="23d1f4f9-9bb3-4d7e-b061-594175b3c221" containerName="keystone-bootstrap" Oct 10 17:58:04 crc kubenswrapper[4799]: I1010 17:58:04.111962 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-lpdsw" Oct 10 17:58:04 crc kubenswrapper[4799]: I1010 17:58:04.114304 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Oct 10 17:58:04 crc kubenswrapper[4799]: I1010 17:58:04.114319 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Oct 10 17:58:04 crc kubenswrapper[4799]: I1010 17:58:04.115461 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-85zxn" Oct 10 17:58:04 crc kubenswrapper[4799]: I1010 17:58:04.117887 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Oct 10 17:58:04 crc kubenswrapper[4799]: I1010 17:58:04.126226 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-lpdsw"] Oct 10 17:58:04 crc kubenswrapper[4799]: I1010 17:58:04.204046 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/32928cd2-df58-4d78-a702-f59ec3449bae-fernet-keys\") pod \"keystone-bootstrap-lpdsw\" (UID: \"32928cd2-df58-4d78-a702-f59ec3449bae\") " pod="openstack/keystone-bootstrap-lpdsw" Oct 10 17:58:04 crc kubenswrapper[4799]: I1010 17:58:04.204299 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/32928cd2-df58-4d78-a702-f59ec3449bae-scripts\") pod \"keystone-bootstrap-lpdsw\" (UID: \"32928cd2-df58-4d78-a702-f59ec3449bae\") " pod="openstack/keystone-bootstrap-lpdsw" Oct 10 17:58:04 crc kubenswrapper[4799]: I1010 17:58:04.204472 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/32928cd2-df58-4d78-a702-f59ec3449bae-config-data\") pod \"keystone-bootstrap-lpdsw\" (UID: \"32928cd2-df58-4d78-a702-f59ec3449bae\") " pod="openstack/keystone-bootstrap-lpdsw" Oct 10 17:58:04 crc kubenswrapper[4799]: I1010 17:58:04.204591 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kr7j4\" (UniqueName: \"kubernetes.io/projected/32928cd2-df58-4d78-a702-f59ec3449bae-kube-api-access-kr7j4\") pod \"keystone-bootstrap-lpdsw\" (UID: \"32928cd2-df58-4d78-a702-f59ec3449bae\") " pod="openstack/keystone-bootstrap-lpdsw" Oct 10 17:58:04 crc kubenswrapper[4799]: I1010 17:58:04.204705 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/32928cd2-df58-4d78-a702-f59ec3449bae-combined-ca-bundle\") pod \"keystone-bootstrap-lpdsw\" (UID: \"32928cd2-df58-4d78-a702-f59ec3449bae\") " pod="openstack/keystone-bootstrap-lpdsw" Oct 10 17:58:04 crc kubenswrapper[4799]: I1010 17:58:04.204843 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/32928cd2-df58-4d78-a702-f59ec3449bae-credential-keys\") pod \"keystone-bootstrap-lpdsw\" (UID: \"32928cd2-df58-4d78-a702-f59ec3449bae\") " pod="openstack/keystone-bootstrap-lpdsw" Oct 10 17:58:04 crc kubenswrapper[4799]: I1010 17:58:04.306261 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/32928cd2-df58-4d78-a702-f59ec3449bae-combined-ca-bundle\") pod \"keystone-bootstrap-lpdsw\" (UID: \"32928cd2-df58-4d78-a702-f59ec3449bae\") " pod="openstack/keystone-bootstrap-lpdsw" Oct 10 17:58:04 crc kubenswrapper[4799]: I1010 17:58:04.306362 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/32928cd2-df58-4d78-a702-f59ec3449bae-credential-keys\") pod \"keystone-bootstrap-lpdsw\" (UID: \"32928cd2-df58-4d78-a702-f59ec3449bae\") " pod="openstack/keystone-bootstrap-lpdsw" Oct 10 17:58:04 crc kubenswrapper[4799]: I1010 17:58:04.306424 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/32928cd2-df58-4d78-a702-f59ec3449bae-fernet-keys\") pod \"keystone-bootstrap-lpdsw\" (UID: \"32928cd2-df58-4d78-a702-f59ec3449bae\") " pod="openstack/keystone-bootstrap-lpdsw" Oct 10 17:58:04 crc kubenswrapper[4799]: I1010 17:58:04.307303 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/32928cd2-df58-4d78-a702-f59ec3449bae-scripts\") pod \"keystone-bootstrap-lpdsw\" (UID: \"32928cd2-df58-4d78-a702-f59ec3449bae\") " pod="openstack/keystone-bootstrap-lpdsw" Oct 10 17:58:04 crc kubenswrapper[4799]: I1010 17:58:04.307382 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/32928cd2-df58-4d78-a702-f59ec3449bae-config-data\") pod \"keystone-bootstrap-lpdsw\" (UID: \"32928cd2-df58-4d78-a702-f59ec3449bae\") " pod="openstack/keystone-bootstrap-lpdsw" Oct 10 17:58:04 crc kubenswrapper[4799]: I1010 17:58:04.307444 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kr7j4\" (UniqueName: \"kubernetes.io/projected/32928cd2-df58-4d78-a702-f59ec3449bae-kube-api-access-kr7j4\") pod \"keystone-bootstrap-lpdsw\" (UID: \"32928cd2-df58-4d78-a702-f59ec3449bae\") " pod="openstack/keystone-bootstrap-lpdsw" Oct 10 17:58:04 crc kubenswrapper[4799]: I1010 17:58:04.311968 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/32928cd2-df58-4d78-a702-f59ec3449bae-scripts\") pod \"keystone-bootstrap-lpdsw\" (UID: \"32928cd2-df58-4d78-a702-f59ec3449bae\") " pod="openstack/keystone-bootstrap-lpdsw" Oct 10 17:58:04 crc kubenswrapper[4799]: I1010 17:58:04.312323 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/32928cd2-df58-4d78-a702-f59ec3449bae-credential-keys\") pod \"keystone-bootstrap-lpdsw\" (UID: \"32928cd2-df58-4d78-a702-f59ec3449bae\") " pod="openstack/keystone-bootstrap-lpdsw" Oct 10 17:58:04 crc kubenswrapper[4799]: I1010 17:58:04.313122 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/32928cd2-df58-4d78-a702-f59ec3449bae-combined-ca-bundle\") pod \"keystone-bootstrap-lpdsw\" (UID: \"32928cd2-df58-4d78-a702-f59ec3449bae\") " pod="openstack/keystone-bootstrap-lpdsw" Oct 10 17:58:04 crc kubenswrapper[4799]: I1010 17:58:04.314007 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/32928cd2-df58-4d78-a702-f59ec3449bae-fernet-keys\") pod \"keystone-bootstrap-lpdsw\" (UID: \"32928cd2-df58-4d78-a702-f59ec3449bae\") " pod="openstack/keystone-bootstrap-lpdsw" Oct 10 17:58:04 crc kubenswrapper[4799]: I1010 17:58:04.320587 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/32928cd2-df58-4d78-a702-f59ec3449bae-config-data\") pod \"keystone-bootstrap-lpdsw\" (UID: \"32928cd2-df58-4d78-a702-f59ec3449bae\") " pod="openstack/keystone-bootstrap-lpdsw" Oct 10 17:58:04 crc kubenswrapper[4799]: I1010 17:58:04.338748 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kr7j4\" (UniqueName: \"kubernetes.io/projected/32928cd2-df58-4d78-a702-f59ec3449bae-kube-api-access-kr7j4\") pod \"keystone-bootstrap-lpdsw\" (UID: \"32928cd2-df58-4d78-a702-f59ec3449bae\") " pod="openstack/keystone-bootstrap-lpdsw" Oct 10 17:58:04 crc kubenswrapper[4799]: I1010 17:58:04.434942 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-lpdsw" Oct 10 17:58:04 crc kubenswrapper[4799]: I1010 17:58:04.915228 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-lpdsw"] Oct 10 17:58:05 crc kubenswrapper[4799]: I1010 17:58:05.420171 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="23d1f4f9-9bb3-4d7e-b061-594175b3c221" path="/var/lib/kubelet/pods/23d1f4f9-9bb3-4d7e-b061-594175b3c221/volumes" Oct 10 17:58:05 crc kubenswrapper[4799]: I1010 17:58:05.937090 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-lpdsw" event={"ID":"32928cd2-df58-4d78-a702-f59ec3449bae","Type":"ContainerStarted","Data":"1ee729ddf15efa277a3f6bd311967cc2196f1dedc3e6b542d10e7d2012d252ec"} Oct 10 17:58:05 crc kubenswrapper[4799]: I1010 17:58:05.937482 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-lpdsw" event={"ID":"32928cd2-df58-4d78-a702-f59ec3449bae","Type":"ContainerStarted","Data":"f2a4a454d69fb577eab7a12d1081ef51483add81db241124eeb1c10189e42ab3"} Oct 10 17:58:05 crc kubenswrapper[4799]: I1010 17:58:05.971262 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-bootstrap-lpdsw" podStartSLOduration=1.97124476 podStartE2EDuration="1.97124476s" podCreationTimestamp="2025-10-10 17:58:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-10 17:58:05.963078201 +0000 UTC m=+5179.471402326" watchObservedRunningTime="2025-10-10 17:58:05.97124476 +0000 UTC m=+5179.479568885" Oct 10 17:58:06 crc kubenswrapper[4799]: I1010 17:58:06.717528 4799 scope.go:117] "RemoveContainer" containerID="6b2c7e819dc758dbe0fef2da04fc6912e8b3b458c9628669a7e707412b87cc27" Oct 10 17:58:06 crc kubenswrapper[4799]: I1010 17:58:06.771662 4799 scope.go:117] "RemoveContainer" containerID="fcd37e9e5f5c3a0076dd4f7da5786deecbe5ea2318b5297e3fe2e8911aa51b4c" Oct 10 17:58:06 crc kubenswrapper[4799]: I1010 17:58:06.818084 4799 scope.go:117] "RemoveContainer" containerID="7924e9df587c89bcdee10662ecc97f40ba18deddacbdd2094e286596282b4cb4" Oct 10 17:58:06 crc kubenswrapper[4799]: I1010 17:58:06.851626 4799 scope.go:117] "RemoveContainer" containerID="452cc43f0187a182a982e862153c9c763e801a6d427ed70a3987333d5105b966" Oct 10 17:58:06 crc kubenswrapper[4799]: I1010 17:58:06.895286 4799 scope.go:117] "RemoveContainer" containerID="5c115e91559c0508908141e853c04b457972c39bb070cfc65da0432619bfe115" Oct 10 17:58:07 crc kubenswrapper[4799]: I1010 17:58:07.411258 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-6b7d9cf495-24s9n" Oct 10 17:58:07 crc kubenswrapper[4799]: I1010 17:58:07.463298 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5cbf6d5c75-9gk4t"] Oct 10 17:58:07 crc kubenswrapper[4799]: I1010 17:58:07.463614 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-5cbf6d5c75-9gk4t" podUID="f30813b7-4ff2-48c2-81f0-0e00fbb208e9" containerName="dnsmasq-dns" containerID="cri-o://6780017d6e1afe6aaf33465514aa98b8c1142613e9d0e75a2b47797941cc2137" gracePeriod=10 Oct 10 17:58:07 crc kubenswrapper[4799]: I1010 17:58:07.923351 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5cbf6d5c75-9gk4t" Oct 10 17:58:07 crc kubenswrapper[4799]: I1010 17:58:07.967359 4799 generic.go:334] "Generic (PLEG): container finished" podID="f30813b7-4ff2-48c2-81f0-0e00fbb208e9" containerID="6780017d6e1afe6aaf33465514aa98b8c1142613e9d0e75a2b47797941cc2137" exitCode=0 Oct 10 17:58:07 crc kubenswrapper[4799]: I1010 17:58:07.967424 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5cbf6d5c75-9gk4t" event={"ID":"f30813b7-4ff2-48c2-81f0-0e00fbb208e9","Type":"ContainerDied","Data":"6780017d6e1afe6aaf33465514aa98b8c1142613e9d0e75a2b47797941cc2137"} Oct 10 17:58:07 crc kubenswrapper[4799]: I1010 17:58:07.967440 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5cbf6d5c75-9gk4t" Oct 10 17:58:07 crc kubenswrapper[4799]: I1010 17:58:07.967463 4799 scope.go:117] "RemoveContainer" containerID="6780017d6e1afe6aaf33465514aa98b8c1142613e9d0e75a2b47797941cc2137" Oct 10 17:58:07 crc kubenswrapper[4799]: I1010 17:58:07.967451 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5cbf6d5c75-9gk4t" event={"ID":"f30813b7-4ff2-48c2-81f0-0e00fbb208e9","Type":"ContainerDied","Data":"cfec1868410b6bd8bebebdd7b1abd0160f817b86fd89da3f59a3e061858dbaeb"} Oct 10 17:58:07 crc kubenswrapper[4799]: I1010 17:58:07.969427 4799 generic.go:334] "Generic (PLEG): container finished" podID="32928cd2-df58-4d78-a702-f59ec3449bae" containerID="1ee729ddf15efa277a3f6bd311967cc2196f1dedc3e6b542d10e7d2012d252ec" exitCode=0 Oct 10 17:58:07 crc kubenswrapper[4799]: I1010 17:58:07.969471 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-lpdsw" event={"ID":"32928cd2-df58-4d78-a702-f59ec3449bae","Type":"ContainerDied","Data":"1ee729ddf15efa277a3f6bd311967cc2196f1dedc3e6b542d10e7d2012d252ec"} Oct 10 17:58:07 crc kubenswrapper[4799]: I1010 17:58:07.998297 4799 scope.go:117] "RemoveContainer" containerID="802daea0f9f1f28f41b3722862ec0b99be344ecb8cfeef2c0627318bad03154d" Oct 10 17:58:08 crc kubenswrapper[4799]: I1010 17:58:08.023150 4799 scope.go:117] "RemoveContainer" containerID="6780017d6e1afe6aaf33465514aa98b8c1142613e9d0e75a2b47797941cc2137" Oct 10 17:58:08 crc kubenswrapper[4799]: E1010 17:58:08.023842 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6780017d6e1afe6aaf33465514aa98b8c1142613e9d0e75a2b47797941cc2137\": container with ID starting with 6780017d6e1afe6aaf33465514aa98b8c1142613e9d0e75a2b47797941cc2137 not found: ID does not exist" containerID="6780017d6e1afe6aaf33465514aa98b8c1142613e9d0e75a2b47797941cc2137" Oct 10 17:58:08 crc kubenswrapper[4799]: I1010 17:58:08.023874 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6780017d6e1afe6aaf33465514aa98b8c1142613e9d0e75a2b47797941cc2137"} err="failed to get container status \"6780017d6e1afe6aaf33465514aa98b8c1142613e9d0e75a2b47797941cc2137\": rpc error: code = NotFound desc = could not find container \"6780017d6e1afe6aaf33465514aa98b8c1142613e9d0e75a2b47797941cc2137\": container with ID starting with 6780017d6e1afe6aaf33465514aa98b8c1142613e9d0e75a2b47797941cc2137 not found: ID does not exist" Oct 10 17:58:08 crc kubenswrapper[4799]: I1010 17:58:08.023901 4799 scope.go:117] "RemoveContainer" containerID="802daea0f9f1f28f41b3722862ec0b99be344ecb8cfeef2c0627318bad03154d" Oct 10 17:58:08 crc kubenswrapper[4799]: E1010 17:58:08.024343 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"802daea0f9f1f28f41b3722862ec0b99be344ecb8cfeef2c0627318bad03154d\": container with ID starting with 802daea0f9f1f28f41b3722862ec0b99be344ecb8cfeef2c0627318bad03154d not found: ID does not exist" containerID="802daea0f9f1f28f41b3722862ec0b99be344ecb8cfeef2c0627318bad03154d" Oct 10 17:58:08 crc kubenswrapper[4799]: I1010 17:58:08.024397 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"802daea0f9f1f28f41b3722862ec0b99be344ecb8cfeef2c0627318bad03154d"} err="failed to get container status \"802daea0f9f1f28f41b3722862ec0b99be344ecb8cfeef2c0627318bad03154d\": rpc error: code = NotFound desc = could not find container \"802daea0f9f1f28f41b3722862ec0b99be344ecb8cfeef2c0627318bad03154d\": container with ID starting with 802daea0f9f1f28f41b3722862ec0b99be344ecb8cfeef2c0627318bad03154d not found: ID does not exist" Oct 10 17:58:08 crc kubenswrapper[4799]: I1010 17:58:08.088635 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f30813b7-4ff2-48c2-81f0-0e00fbb208e9-config\") pod \"f30813b7-4ff2-48c2-81f0-0e00fbb208e9\" (UID: \"f30813b7-4ff2-48c2-81f0-0e00fbb208e9\") " Oct 10 17:58:08 crc kubenswrapper[4799]: I1010 17:58:08.088701 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f30813b7-4ff2-48c2-81f0-0e00fbb208e9-ovsdbserver-nb\") pod \"f30813b7-4ff2-48c2-81f0-0e00fbb208e9\" (UID: \"f30813b7-4ff2-48c2-81f0-0e00fbb208e9\") " Oct 10 17:58:08 crc kubenswrapper[4799]: I1010 17:58:08.088794 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jx9pm\" (UniqueName: \"kubernetes.io/projected/f30813b7-4ff2-48c2-81f0-0e00fbb208e9-kube-api-access-jx9pm\") pod \"f30813b7-4ff2-48c2-81f0-0e00fbb208e9\" (UID: \"f30813b7-4ff2-48c2-81f0-0e00fbb208e9\") " Oct 10 17:58:08 crc kubenswrapper[4799]: I1010 17:58:08.088856 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f30813b7-4ff2-48c2-81f0-0e00fbb208e9-ovsdbserver-sb\") pod \"f30813b7-4ff2-48c2-81f0-0e00fbb208e9\" (UID: \"f30813b7-4ff2-48c2-81f0-0e00fbb208e9\") " Oct 10 17:58:08 crc kubenswrapper[4799]: I1010 17:58:08.088940 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f30813b7-4ff2-48c2-81f0-0e00fbb208e9-dns-svc\") pod \"f30813b7-4ff2-48c2-81f0-0e00fbb208e9\" (UID: \"f30813b7-4ff2-48c2-81f0-0e00fbb208e9\") " Oct 10 17:58:08 crc kubenswrapper[4799]: I1010 17:58:08.095260 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f30813b7-4ff2-48c2-81f0-0e00fbb208e9-kube-api-access-jx9pm" (OuterVolumeSpecName: "kube-api-access-jx9pm") pod "f30813b7-4ff2-48c2-81f0-0e00fbb208e9" (UID: "f30813b7-4ff2-48c2-81f0-0e00fbb208e9"). InnerVolumeSpecName "kube-api-access-jx9pm". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 17:58:08 crc kubenswrapper[4799]: I1010 17:58:08.135273 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f30813b7-4ff2-48c2-81f0-0e00fbb208e9-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "f30813b7-4ff2-48c2-81f0-0e00fbb208e9" (UID: "f30813b7-4ff2-48c2-81f0-0e00fbb208e9"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 17:58:08 crc kubenswrapper[4799]: I1010 17:58:08.135647 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f30813b7-4ff2-48c2-81f0-0e00fbb208e9-config" (OuterVolumeSpecName: "config") pod "f30813b7-4ff2-48c2-81f0-0e00fbb208e9" (UID: "f30813b7-4ff2-48c2-81f0-0e00fbb208e9"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 17:58:08 crc kubenswrapper[4799]: I1010 17:58:08.137364 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f30813b7-4ff2-48c2-81f0-0e00fbb208e9-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "f30813b7-4ff2-48c2-81f0-0e00fbb208e9" (UID: "f30813b7-4ff2-48c2-81f0-0e00fbb208e9"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 17:58:08 crc kubenswrapper[4799]: I1010 17:58:08.146987 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f30813b7-4ff2-48c2-81f0-0e00fbb208e9-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "f30813b7-4ff2-48c2-81f0-0e00fbb208e9" (UID: "f30813b7-4ff2-48c2-81f0-0e00fbb208e9"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 17:58:08 crc kubenswrapper[4799]: I1010 17:58:08.191328 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jx9pm\" (UniqueName: \"kubernetes.io/projected/f30813b7-4ff2-48c2-81f0-0e00fbb208e9-kube-api-access-jx9pm\") on node \"crc\" DevicePath \"\"" Oct 10 17:58:08 crc kubenswrapper[4799]: I1010 17:58:08.191377 4799 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f30813b7-4ff2-48c2-81f0-0e00fbb208e9-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 10 17:58:08 crc kubenswrapper[4799]: I1010 17:58:08.191391 4799 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f30813b7-4ff2-48c2-81f0-0e00fbb208e9-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 10 17:58:08 crc kubenswrapper[4799]: I1010 17:58:08.191401 4799 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f30813b7-4ff2-48c2-81f0-0e00fbb208e9-config\") on node \"crc\" DevicePath \"\"" Oct 10 17:58:08 crc kubenswrapper[4799]: I1010 17:58:08.191412 4799 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f30813b7-4ff2-48c2-81f0-0e00fbb208e9-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 10 17:58:08 crc kubenswrapper[4799]: I1010 17:58:08.308106 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5cbf6d5c75-9gk4t"] Oct 10 17:58:08 crc kubenswrapper[4799]: I1010 17:58:08.318242 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5cbf6d5c75-9gk4t"] Oct 10 17:58:09 crc kubenswrapper[4799]: I1010 17:58:09.321471 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-lpdsw" Oct 10 17:58:09 crc kubenswrapper[4799]: I1010 17:58:09.419191 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f30813b7-4ff2-48c2-81f0-0e00fbb208e9" path="/var/lib/kubelet/pods/f30813b7-4ff2-48c2-81f0-0e00fbb208e9/volumes" Oct 10 17:58:09 crc kubenswrapper[4799]: I1010 17:58:09.519543 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/32928cd2-df58-4d78-a702-f59ec3449bae-config-data\") pod \"32928cd2-df58-4d78-a702-f59ec3449bae\" (UID: \"32928cd2-df58-4d78-a702-f59ec3449bae\") " Oct 10 17:58:09 crc kubenswrapper[4799]: I1010 17:58:09.519869 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/32928cd2-df58-4d78-a702-f59ec3449bae-scripts\") pod \"32928cd2-df58-4d78-a702-f59ec3449bae\" (UID: \"32928cd2-df58-4d78-a702-f59ec3449bae\") " Oct 10 17:58:09 crc kubenswrapper[4799]: I1010 17:58:09.519967 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/32928cd2-df58-4d78-a702-f59ec3449bae-combined-ca-bundle\") pod \"32928cd2-df58-4d78-a702-f59ec3449bae\" (UID: \"32928cd2-df58-4d78-a702-f59ec3449bae\") " Oct 10 17:58:09 crc kubenswrapper[4799]: I1010 17:58:09.520147 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/32928cd2-df58-4d78-a702-f59ec3449bae-credential-keys\") pod \"32928cd2-df58-4d78-a702-f59ec3449bae\" (UID: \"32928cd2-df58-4d78-a702-f59ec3449bae\") " Oct 10 17:58:09 crc kubenswrapper[4799]: I1010 17:58:09.520277 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kr7j4\" (UniqueName: \"kubernetes.io/projected/32928cd2-df58-4d78-a702-f59ec3449bae-kube-api-access-kr7j4\") pod \"32928cd2-df58-4d78-a702-f59ec3449bae\" (UID: \"32928cd2-df58-4d78-a702-f59ec3449bae\") " Oct 10 17:58:09 crc kubenswrapper[4799]: I1010 17:58:09.521017 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/32928cd2-df58-4d78-a702-f59ec3449bae-fernet-keys\") pod \"32928cd2-df58-4d78-a702-f59ec3449bae\" (UID: \"32928cd2-df58-4d78-a702-f59ec3449bae\") " Oct 10 17:58:09 crc kubenswrapper[4799]: I1010 17:58:09.527261 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/32928cd2-df58-4d78-a702-f59ec3449bae-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "32928cd2-df58-4d78-a702-f59ec3449bae" (UID: "32928cd2-df58-4d78-a702-f59ec3449bae"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 17:58:09 crc kubenswrapper[4799]: I1010 17:58:09.527360 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/32928cd2-df58-4d78-a702-f59ec3449bae-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "32928cd2-df58-4d78-a702-f59ec3449bae" (UID: "32928cd2-df58-4d78-a702-f59ec3449bae"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 17:58:09 crc kubenswrapper[4799]: I1010 17:58:09.527623 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/32928cd2-df58-4d78-a702-f59ec3449bae-kube-api-access-kr7j4" (OuterVolumeSpecName: "kube-api-access-kr7j4") pod "32928cd2-df58-4d78-a702-f59ec3449bae" (UID: "32928cd2-df58-4d78-a702-f59ec3449bae"). InnerVolumeSpecName "kube-api-access-kr7j4". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 17:58:09 crc kubenswrapper[4799]: I1010 17:58:09.528352 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/32928cd2-df58-4d78-a702-f59ec3449bae-scripts" (OuterVolumeSpecName: "scripts") pod "32928cd2-df58-4d78-a702-f59ec3449bae" (UID: "32928cd2-df58-4d78-a702-f59ec3449bae"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 17:58:09 crc kubenswrapper[4799]: I1010 17:58:09.564158 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/32928cd2-df58-4d78-a702-f59ec3449bae-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "32928cd2-df58-4d78-a702-f59ec3449bae" (UID: "32928cd2-df58-4d78-a702-f59ec3449bae"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 17:58:09 crc kubenswrapper[4799]: I1010 17:58:09.577038 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/32928cd2-df58-4d78-a702-f59ec3449bae-config-data" (OuterVolumeSpecName: "config-data") pod "32928cd2-df58-4d78-a702-f59ec3449bae" (UID: "32928cd2-df58-4d78-a702-f59ec3449bae"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 17:58:09 crc kubenswrapper[4799]: I1010 17:58:09.625690 4799 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/32928cd2-df58-4d78-a702-f59ec3449bae-credential-keys\") on node \"crc\" DevicePath \"\"" Oct 10 17:58:09 crc kubenswrapper[4799]: I1010 17:58:09.625739 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kr7j4\" (UniqueName: \"kubernetes.io/projected/32928cd2-df58-4d78-a702-f59ec3449bae-kube-api-access-kr7j4\") on node \"crc\" DevicePath \"\"" Oct 10 17:58:09 crc kubenswrapper[4799]: I1010 17:58:09.625760 4799 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/32928cd2-df58-4d78-a702-f59ec3449bae-fernet-keys\") on node \"crc\" DevicePath \"\"" Oct 10 17:58:09 crc kubenswrapper[4799]: I1010 17:58:09.625772 4799 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/32928cd2-df58-4d78-a702-f59ec3449bae-config-data\") on node \"crc\" DevicePath \"\"" Oct 10 17:58:09 crc kubenswrapper[4799]: I1010 17:58:09.625799 4799 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/32928cd2-df58-4d78-a702-f59ec3449bae-scripts\") on node \"crc\" DevicePath \"\"" Oct 10 17:58:09 crc kubenswrapper[4799]: I1010 17:58:09.625813 4799 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/32928cd2-df58-4d78-a702-f59ec3449bae-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 10 17:58:09 crc kubenswrapper[4799]: I1010 17:58:09.995736 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-lpdsw" event={"ID":"32928cd2-df58-4d78-a702-f59ec3449bae","Type":"ContainerDied","Data":"f2a4a454d69fb577eab7a12d1081ef51483add81db241124eeb1c10189e42ab3"} Oct 10 17:58:09 crc kubenswrapper[4799]: I1010 17:58:09.995811 4799 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f2a4a454d69fb577eab7a12d1081ef51483add81db241124eeb1c10189e42ab3" Oct 10 17:58:09 crc kubenswrapper[4799]: I1010 17:58:09.995894 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-lpdsw" Oct 10 17:58:10 crc kubenswrapper[4799]: I1010 17:58:10.181382 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-59fb5d4d57-hlz5n"] Oct 10 17:58:10 crc kubenswrapper[4799]: E1010 17:58:10.181997 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="32928cd2-df58-4d78-a702-f59ec3449bae" containerName="keystone-bootstrap" Oct 10 17:58:10 crc kubenswrapper[4799]: I1010 17:58:10.182087 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="32928cd2-df58-4d78-a702-f59ec3449bae" containerName="keystone-bootstrap" Oct 10 17:58:10 crc kubenswrapper[4799]: E1010 17:58:10.182165 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f30813b7-4ff2-48c2-81f0-0e00fbb208e9" containerName="dnsmasq-dns" Oct 10 17:58:10 crc kubenswrapper[4799]: I1010 17:58:10.182236 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="f30813b7-4ff2-48c2-81f0-0e00fbb208e9" containerName="dnsmasq-dns" Oct 10 17:58:10 crc kubenswrapper[4799]: E1010 17:58:10.182364 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f30813b7-4ff2-48c2-81f0-0e00fbb208e9" containerName="init" Oct 10 17:58:10 crc kubenswrapper[4799]: I1010 17:58:10.182444 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="f30813b7-4ff2-48c2-81f0-0e00fbb208e9" containerName="init" Oct 10 17:58:10 crc kubenswrapper[4799]: I1010 17:58:10.182694 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="32928cd2-df58-4d78-a702-f59ec3449bae" containerName="keystone-bootstrap" Oct 10 17:58:10 crc kubenswrapper[4799]: I1010 17:58:10.182816 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="f30813b7-4ff2-48c2-81f0-0e00fbb208e9" containerName="dnsmasq-dns" Oct 10 17:58:10 crc kubenswrapper[4799]: I1010 17:58:10.183545 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-59fb5d4d57-hlz5n" Oct 10 17:58:10 crc kubenswrapper[4799]: I1010 17:58:10.187853 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Oct 10 17:58:10 crc kubenswrapper[4799]: I1010 17:58:10.188304 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Oct 10 17:58:10 crc kubenswrapper[4799]: I1010 17:58:10.191823 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Oct 10 17:58:10 crc kubenswrapper[4799]: I1010 17:58:10.192097 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-85zxn" Oct 10 17:58:10 crc kubenswrapper[4799]: I1010 17:58:10.211811 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-59fb5d4d57-hlz5n"] Oct 10 17:58:10 crc kubenswrapper[4799]: I1010 17:58:10.336277 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/3f7d1ffe-1c3f-42e7-9b77-966e7e1e1936-credential-keys\") pod \"keystone-59fb5d4d57-hlz5n\" (UID: \"3f7d1ffe-1c3f-42e7-9b77-966e7e1e1936\") " pod="openstack/keystone-59fb5d4d57-hlz5n" Oct 10 17:58:10 crc kubenswrapper[4799]: I1010 17:58:10.336322 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3f7d1ffe-1c3f-42e7-9b77-966e7e1e1936-scripts\") pod \"keystone-59fb5d4d57-hlz5n\" (UID: \"3f7d1ffe-1c3f-42e7-9b77-966e7e1e1936\") " pod="openstack/keystone-59fb5d4d57-hlz5n" Oct 10 17:58:10 crc kubenswrapper[4799]: I1010 17:58:10.336340 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3f7d1ffe-1c3f-42e7-9b77-966e7e1e1936-combined-ca-bundle\") pod \"keystone-59fb5d4d57-hlz5n\" (UID: \"3f7d1ffe-1c3f-42e7-9b77-966e7e1e1936\") " pod="openstack/keystone-59fb5d4d57-hlz5n" Oct 10 17:58:10 crc kubenswrapper[4799]: I1010 17:58:10.336364 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v2fnl\" (UniqueName: \"kubernetes.io/projected/3f7d1ffe-1c3f-42e7-9b77-966e7e1e1936-kube-api-access-v2fnl\") pod \"keystone-59fb5d4d57-hlz5n\" (UID: \"3f7d1ffe-1c3f-42e7-9b77-966e7e1e1936\") " pod="openstack/keystone-59fb5d4d57-hlz5n" Oct 10 17:58:10 crc kubenswrapper[4799]: I1010 17:58:10.336936 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3f7d1ffe-1c3f-42e7-9b77-966e7e1e1936-config-data\") pod \"keystone-59fb5d4d57-hlz5n\" (UID: \"3f7d1ffe-1c3f-42e7-9b77-966e7e1e1936\") " pod="openstack/keystone-59fb5d4d57-hlz5n" Oct 10 17:58:10 crc kubenswrapper[4799]: I1010 17:58:10.337046 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/3f7d1ffe-1c3f-42e7-9b77-966e7e1e1936-fernet-keys\") pod \"keystone-59fb5d4d57-hlz5n\" (UID: \"3f7d1ffe-1c3f-42e7-9b77-966e7e1e1936\") " pod="openstack/keystone-59fb5d4d57-hlz5n" Oct 10 17:58:10 crc kubenswrapper[4799]: I1010 17:58:10.439065 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/3f7d1ffe-1c3f-42e7-9b77-966e7e1e1936-credential-keys\") pod \"keystone-59fb5d4d57-hlz5n\" (UID: \"3f7d1ffe-1c3f-42e7-9b77-966e7e1e1936\") " pod="openstack/keystone-59fb5d4d57-hlz5n" Oct 10 17:58:10 crc kubenswrapper[4799]: I1010 17:58:10.439116 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3f7d1ffe-1c3f-42e7-9b77-966e7e1e1936-scripts\") pod \"keystone-59fb5d4d57-hlz5n\" (UID: \"3f7d1ffe-1c3f-42e7-9b77-966e7e1e1936\") " pod="openstack/keystone-59fb5d4d57-hlz5n" Oct 10 17:58:10 crc kubenswrapper[4799]: I1010 17:58:10.439141 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3f7d1ffe-1c3f-42e7-9b77-966e7e1e1936-combined-ca-bundle\") pod \"keystone-59fb5d4d57-hlz5n\" (UID: \"3f7d1ffe-1c3f-42e7-9b77-966e7e1e1936\") " pod="openstack/keystone-59fb5d4d57-hlz5n" Oct 10 17:58:10 crc kubenswrapper[4799]: I1010 17:58:10.439164 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v2fnl\" (UniqueName: \"kubernetes.io/projected/3f7d1ffe-1c3f-42e7-9b77-966e7e1e1936-kube-api-access-v2fnl\") pod \"keystone-59fb5d4d57-hlz5n\" (UID: \"3f7d1ffe-1c3f-42e7-9b77-966e7e1e1936\") " pod="openstack/keystone-59fb5d4d57-hlz5n" Oct 10 17:58:10 crc kubenswrapper[4799]: I1010 17:58:10.439244 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3f7d1ffe-1c3f-42e7-9b77-966e7e1e1936-config-data\") pod \"keystone-59fb5d4d57-hlz5n\" (UID: \"3f7d1ffe-1c3f-42e7-9b77-966e7e1e1936\") " pod="openstack/keystone-59fb5d4d57-hlz5n" Oct 10 17:58:10 crc kubenswrapper[4799]: I1010 17:58:10.439283 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/3f7d1ffe-1c3f-42e7-9b77-966e7e1e1936-fernet-keys\") pod \"keystone-59fb5d4d57-hlz5n\" (UID: \"3f7d1ffe-1c3f-42e7-9b77-966e7e1e1936\") " pod="openstack/keystone-59fb5d4d57-hlz5n" Oct 10 17:58:10 crc kubenswrapper[4799]: I1010 17:58:10.442601 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3f7d1ffe-1c3f-42e7-9b77-966e7e1e1936-scripts\") pod \"keystone-59fb5d4d57-hlz5n\" (UID: \"3f7d1ffe-1c3f-42e7-9b77-966e7e1e1936\") " pod="openstack/keystone-59fb5d4d57-hlz5n" Oct 10 17:58:10 crc kubenswrapper[4799]: I1010 17:58:10.444373 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/3f7d1ffe-1c3f-42e7-9b77-966e7e1e1936-credential-keys\") pod \"keystone-59fb5d4d57-hlz5n\" (UID: \"3f7d1ffe-1c3f-42e7-9b77-966e7e1e1936\") " pod="openstack/keystone-59fb5d4d57-hlz5n" Oct 10 17:58:10 crc kubenswrapper[4799]: I1010 17:58:10.444388 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/3f7d1ffe-1c3f-42e7-9b77-966e7e1e1936-fernet-keys\") pod \"keystone-59fb5d4d57-hlz5n\" (UID: \"3f7d1ffe-1c3f-42e7-9b77-966e7e1e1936\") " pod="openstack/keystone-59fb5d4d57-hlz5n" Oct 10 17:58:10 crc kubenswrapper[4799]: I1010 17:58:10.444445 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3f7d1ffe-1c3f-42e7-9b77-966e7e1e1936-config-data\") pod \"keystone-59fb5d4d57-hlz5n\" (UID: \"3f7d1ffe-1c3f-42e7-9b77-966e7e1e1936\") " pod="openstack/keystone-59fb5d4d57-hlz5n" Oct 10 17:58:10 crc kubenswrapper[4799]: I1010 17:58:10.452916 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3f7d1ffe-1c3f-42e7-9b77-966e7e1e1936-combined-ca-bundle\") pod \"keystone-59fb5d4d57-hlz5n\" (UID: \"3f7d1ffe-1c3f-42e7-9b77-966e7e1e1936\") " pod="openstack/keystone-59fb5d4d57-hlz5n" Oct 10 17:58:10 crc kubenswrapper[4799]: I1010 17:58:10.466559 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v2fnl\" (UniqueName: \"kubernetes.io/projected/3f7d1ffe-1c3f-42e7-9b77-966e7e1e1936-kube-api-access-v2fnl\") pod \"keystone-59fb5d4d57-hlz5n\" (UID: \"3f7d1ffe-1c3f-42e7-9b77-966e7e1e1936\") " pod="openstack/keystone-59fb5d4d57-hlz5n" Oct 10 17:58:10 crc kubenswrapper[4799]: I1010 17:58:10.506155 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-59fb5d4d57-hlz5n" Oct 10 17:58:10 crc kubenswrapper[4799]: I1010 17:58:10.776061 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-59fb5d4d57-hlz5n"] Oct 10 17:58:10 crc kubenswrapper[4799]: W1010 17:58:10.779317 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3f7d1ffe_1c3f_42e7_9b77_966e7e1e1936.slice/crio-ee3daffa8ae4e4054ffb04fb42d953c5ee9c847c8af488c64052a738902c832f WatchSource:0}: Error finding container ee3daffa8ae4e4054ffb04fb42d953c5ee9c847c8af488c64052a738902c832f: Status 404 returned error can't find the container with id ee3daffa8ae4e4054ffb04fb42d953c5ee9c847c8af488c64052a738902c832f Oct 10 17:58:11 crc kubenswrapper[4799]: I1010 17:58:11.003491 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-59fb5d4d57-hlz5n" event={"ID":"3f7d1ffe-1c3f-42e7-9b77-966e7e1e1936","Type":"ContainerStarted","Data":"3f5ad31dc1c18f8432a92a142f35cfc2ab16ce67b49a55ceb69320964f9c49b6"} Oct 10 17:58:11 crc kubenswrapper[4799]: I1010 17:58:11.003865 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-59fb5d4d57-hlz5n" event={"ID":"3f7d1ffe-1c3f-42e7-9b77-966e7e1e1936","Type":"ContainerStarted","Data":"ee3daffa8ae4e4054ffb04fb42d953c5ee9c847c8af488c64052a738902c832f"} Oct 10 17:58:11 crc kubenswrapper[4799]: I1010 17:58:11.003881 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/keystone-59fb5d4d57-hlz5n" Oct 10 17:58:11 crc kubenswrapper[4799]: I1010 17:58:11.035149 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-59fb5d4d57-hlz5n" podStartSLOduration=1.035127755 podStartE2EDuration="1.035127755s" podCreationTimestamp="2025-10-10 17:58:10 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-10 17:58:11.029524098 +0000 UTC m=+5184.537848273" watchObservedRunningTime="2025-10-10 17:58:11.035127755 +0000 UTC m=+5184.543451880" Oct 10 17:58:15 crc kubenswrapper[4799]: I1010 17:58:15.402701 4799 scope.go:117] "RemoveContainer" containerID="5e973f476cb0655a6e33e886e2a59fc6754febf3bf5a4718abcef307858985dd" Oct 10 17:58:15 crc kubenswrapper[4799]: E1010 17:58:15.403439 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 17:58:21 crc kubenswrapper[4799]: I1010 17:58:21.444598 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-tjdpn"] Oct 10 17:58:21 crc kubenswrapper[4799]: I1010 17:58:21.448144 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-tjdpn" Oct 10 17:58:21 crc kubenswrapper[4799]: I1010 17:58:21.461192 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-tjdpn"] Oct 10 17:58:21 crc kubenswrapper[4799]: I1010 17:58:21.634748 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vkjnf\" (UniqueName: \"kubernetes.io/projected/9329dd06-0859-4bfa-ab8a-df404072c1f2-kube-api-access-vkjnf\") pod \"redhat-operators-tjdpn\" (UID: \"9329dd06-0859-4bfa-ab8a-df404072c1f2\") " pod="openshift-marketplace/redhat-operators-tjdpn" Oct 10 17:58:21 crc kubenswrapper[4799]: I1010 17:58:21.635008 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9329dd06-0859-4bfa-ab8a-df404072c1f2-catalog-content\") pod \"redhat-operators-tjdpn\" (UID: \"9329dd06-0859-4bfa-ab8a-df404072c1f2\") " pod="openshift-marketplace/redhat-operators-tjdpn" Oct 10 17:58:21 crc kubenswrapper[4799]: I1010 17:58:21.635326 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9329dd06-0859-4bfa-ab8a-df404072c1f2-utilities\") pod \"redhat-operators-tjdpn\" (UID: \"9329dd06-0859-4bfa-ab8a-df404072c1f2\") " pod="openshift-marketplace/redhat-operators-tjdpn" Oct 10 17:58:21 crc kubenswrapper[4799]: I1010 17:58:21.738043 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vkjnf\" (UniqueName: \"kubernetes.io/projected/9329dd06-0859-4bfa-ab8a-df404072c1f2-kube-api-access-vkjnf\") pod \"redhat-operators-tjdpn\" (UID: \"9329dd06-0859-4bfa-ab8a-df404072c1f2\") " pod="openshift-marketplace/redhat-operators-tjdpn" Oct 10 17:58:21 crc kubenswrapper[4799]: I1010 17:58:21.738165 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9329dd06-0859-4bfa-ab8a-df404072c1f2-catalog-content\") pod \"redhat-operators-tjdpn\" (UID: \"9329dd06-0859-4bfa-ab8a-df404072c1f2\") " pod="openshift-marketplace/redhat-operators-tjdpn" Oct 10 17:58:21 crc kubenswrapper[4799]: I1010 17:58:21.738373 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9329dd06-0859-4bfa-ab8a-df404072c1f2-utilities\") pod \"redhat-operators-tjdpn\" (UID: \"9329dd06-0859-4bfa-ab8a-df404072c1f2\") " pod="openshift-marketplace/redhat-operators-tjdpn" Oct 10 17:58:21 crc kubenswrapper[4799]: I1010 17:58:21.739170 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9329dd06-0859-4bfa-ab8a-df404072c1f2-catalog-content\") pod \"redhat-operators-tjdpn\" (UID: \"9329dd06-0859-4bfa-ab8a-df404072c1f2\") " pod="openshift-marketplace/redhat-operators-tjdpn" Oct 10 17:58:21 crc kubenswrapper[4799]: I1010 17:58:21.739271 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9329dd06-0859-4bfa-ab8a-df404072c1f2-utilities\") pod \"redhat-operators-tjdpn\" (UID: \"9329dd06-0859-4bfa-ab8a-df404072c1f2\") " pod="openshift-marketplace/redhat-operators-tjdpn" Oct 10 17:58:21 crc kubenswrapper[4799]: I1010 17:58:21.763211 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vkjnf\" (UniqueName: \"kubernetes.io/projected/9329dd06-0859-4bfa-ab8a-df404072c1f2-kube-api-access-vkjnf\") pod \"redhat-operators-tjdpn\" (UID: \"9329dd06-0859-4bfa-ab8a-df404072c1f2\") " pod="openshift-marketplace/redhat-operators-tjdpn" Oct 10 17:58:21 crc kubenswrapper[4799]: I1010 17:58:21.784667 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-tjdpn" Oct 10 17:58:22 crc kubenswrapper[4799]: I1010 17:58:22.119231 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-tjdpn"] Oct 10 17:58:23 crc kubenswrapper[4799]: I1010 17:58:23.134739 4799 generic.go:334] "Generic (PLEG): container finished" podID="9329dd06-0859-4bfa-ab8a-df404072c1f2" containerID="5e8326c54d3733abb447a12ba886894da6f17463f6f7bbe7e3b977776fb520ad" exitCode=0 Oct 10 17:58:23 crc kubenswrapper[4799]: I1010 17:58:23.134814 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tjdpn" event={"ID":"9329dd06-0859-4bfa-ab8a-df404072c1f2","Type":"ContainerDied","Data":"5e8326c54d3733abb447a12ba886894da6f17463f6f7bbe7e3b977776fb520ad"} Oct 10 17:58:23 crc kubenswrapper[4799]: I1010 17:58:23.135045 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tjdpn" event={"ID":"9329dd06-0859-4bfa-ab8a-df404072c1f2","Type":"ContainerStarted","Data":"9eb8013623fe4c017e7b4f9616d18ed4f92bca0ea200e6750b601d7fc49aa85c"} Oct 10 17:58:24 crc kubenswrapper[4799]: I1010 17:58:24.147329 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tjdpn" event={"ID":"9329dd06-0859-4bfa-ab8a-df404072c1f2","Type":"ContainerStarted","Data":"1e891f7700eca9c2c0e2656cffdb444e7c6bd5cb31c9ceff55c158c1c13e353e"} Oct 10 17:58:24 crc kubenswrapper[4799]: I1010 17:58:24.229014 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-f6tbn"] Oct 10 17:58:24 crc kubenswrapper[4799]: I1010 17:58:24.231179 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-f6tbn" Oct 10 17:58:24 crc kubenswrapper[4799]: I1010 17:58:24.246583 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-f6tbn"] Oct 10 17:58:24 crc kubenswrapper[4799]: I1010 17:58:24.387126 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b3b19085-6a43-4667-9b1e-67d7a34eed43-catalog-content\") pod \"community-operators-f6tbn\" (UID: \"b3b19085-6a43-4667-9b1e-67d7a34eed43\") " pod="openshift-marketplace/community-operators-f6tbn" Oct 10 17:58:24 crc kubenswrapper[4799]: I1010 17:58:24.387186 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-78j86\" (UniqueName: \"kubernetes.io/projected/b3b19085-6a43-4667-9b1e-67d7a34eed43-kube-api-access-78j86\") pod \"community-operators-f6tbn\" (UID: \"b3b19085-6a43-4667-9b1e-67d7a34eed43\") " pod="openshift-marketplace/community-operators-f6tbn" Oct 10 17:58:24 crc kubenswrapper[4799]: I1010 17:58:24.387236 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b3b19085-6a43-4667-9b1e-67d7a34eed43-utilities\") pod \"community-operators-f6tbn\" (UID: \"b3b19085-6a43-4667-9b1e-67d7a34eed43\") " pod="openshift-marketplace/community-operators-f6tbn" Oct 10 17:58:24 crc kubenswrapper[4799]: I1010 17:58:24.489481 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b3b19085-6a43-4667-9b1e-67d7a34eed43-catalog-content\") pod \"community-operators-f6tbn\" (UID: \"b3b19085-6a43-4667-9b1e-67d7a34eed43\") " pod="openshift-marketplace/community-operators-f6tbn" Oct 10 17:58:24 crc kubenswrapper[4799]: I1010 17:58:24.489539 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-78j86\" (UniqueName: \"kubernetes.io/projected/b3b19085-6a43-4667-9b1e-67d7a34eed43-kube-api-access-78j86\") pod \"community-operators-f6tbn\" (UID: \"b3b19085-6a43-4667-9b1e-67d7a34eed43\") " pod="openshift-marketplace/community-operators-f6tbn" Oct 10 17:58:24 crc kubenswrapper[4799]: I1010 17:58:24.489588 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b3b19085-6a43-4667-9b1e-67d7a34eed43-utilities\") pod \"community-operators-f6tbn\" (UID: \"b3b19085-6a43-4667-9b1e-67d7a34eed43\") " pod="openshift-marketplace/community-operators-f6tbn" Oct 10 17:58:24 crc kubenswrapper[4799]: I1010 17:58:24.490294 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b3b19085-6a43-4667-9b1e-67d7a34eed43-utilities\") pod \"community-operators-f6tbn\" (UID: \"b3b19085-6a43-4667-9b1e-67d7a34eed43\") " pod="openshift-marketplace/community-operators-f6tbn" Oct 10 17:58:24 crc kubenswrapper[4799]: I1010 17:58:24.490379 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b3b19085-6a43-4667-9b1e-67d7a34eed43-catalog-content\") pod \"community-operators-f6tbn\" (UID: \"b3b19085-6a43-4667-9b1e-67d7a34eed43\") " pod="openshift-marketplace/community-operators-f6tbn" Oct 10 17:58:24 crc kubenswrapper[4799]: I1010 17:58:24.514132 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-78j86\" (UniqueName: \"kubernetes.io/projected/b3b19085-6a43-4667-9b1e-67d7a34eed43-kube-api-access-78j86\") pod \"community-operators-f6tbn\" (UID: \"b3b19085-6a43-4667-9b1e-67d7a34eed43\") " pod="openshift-marketplace/community-operators-f6tbn" Oct 10 17:58:24 crc kubenswrapper[4799]: I1010 17:58:24.560865 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-f6tbn" Oct 10 17:58:25 crc kubenswrapper[4799]: I1010 17:58:25.093739 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-f6tbn"] Oct 10 17:58:25 crc kubenswrapper[4799]: W1010 17:58:25.103969 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb3b19085_6a43_4667_9b1e_67d7a34eed43.slice/crio-687731dbe248e56b48771f9be02c8dcff777975f57896332838e908595ce6c72 WatchSource:0}: Error finding container 687731dbe248e56b48771f9be02c8dcff777975f57896332838e908595ce6c72: Status 404 returned error can't find the container with id 687731dbe248e56b48771f9be02c8dcff777975f57896332838e908595ce6c72 Oct 10 17:58:25 crc kubenswrapper[4799]: I1010 17:58:25.154219 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-f6tbn" event={"ID":"b3b19085-6a43-4667-9b1e-67d7a34eed43","Type":"ContainerStarted","Data":"687731dbe248e56b48771f9be02c8dcff777975f57896332838e908595ce6c72"} Oct 10 17:58:25 crc kubenswrapper[4799]: I1010 17:58:25.157622 4799 generic.go:334] "Generic (PLEG): container finished" podID="9329dd06-0859-4bfa-ab8a-df404072c1f2" containerID="1e891f7700eca9c2c0e2656cffdb444e7c6bd5cb31c9ceff55c158c1c13e353e" exitCode=0 Oct 10 17:58:25 crc kubenswrapper[4799]: I1010 17:58:25.157667 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tjdpn" event={"ID":"9329dd06-0859-4bfa-ab8a-df404072c1f2","Type":"ContainerDied","Data":"1e891f7700eca9c2c0e2656cffdb444e7c6bd5cb31c9ceff55c158c1c13e353e"} Oct 10 17:58:26 crc kubenswrapper[4799]: I1010 17:58:26.171019 4799 generic.go:334] "Generic (PLEG): container finished" podID="b3b19085-6a43-4667-9b1e-67d7a34eed43" containerID="5a9750b13cdf66250325ee17808a2915e55614f436e287167655377fcf132f31" exitCode=0 Oct 10 17:58:26 crc kubenswrapper[4799]: I1010 17:58:26.171098 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-f6tbn" event={"ID":"b3b19085-6a43-4667-9b1e-67d7a34eed43","Type":"ContainerDied","Data":"5a9750b13cdf66250325ee17808a2915e55614f436e287167655377fcf132f31"} Oct 10 17:58:26 crc kubenswrapper[4799]: I1010 17:58:26.178280 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tjdpn" event={"ID":"9329dd06-0859-4bfa-ab8a-df404072c1f2","Type":"ContainerStarted","Data":"026e43647b105ca4063c09a8940f55077bbcffd0c588e2b151fa2ea909ccb596"} Oct 10 17:58:26 crc kubenswrapper[4799]: I1010 17:58:26.229508 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-tjdpn" podStartSLOduration=2.783785337 podStartE2EDuration="5.229479927s" podCreationTimestamp="2025-10-10 17:58:21 +0000 UTC" firstStartedPulling="2025-10-10 17:58:23.13691387 +0000 UTC m=+5196.645237995" lastFinishedPulling="2025-10-10 17:58:25.58260847 +0000 UTC m=+5199.090932585" observedRunningTime="2025-10-10 17:58:26.217066255 +0000 UTC m=+5199.725390440" watchObservedRunningTime="2025-10-10 17:58:26.229479927 +0000 UTC m=+5199.737804072" Oct 10 17:58:27 crc kubenswrapper[4799]: I1010 17:58:27.409880 4799 scope.go:117] "RemoveContainer" containerID="5e973f476cb0655a6e33e886e2a59fc6754febf3bf5a4718abcef307858985dd" Oct 10 17:58:27 crc kubenswrapper[4799]: E1010 17:58:27.410595 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 17:58:28 crc kubenswrapper[4799]: I1010 17:58:28.203178 4799 generic.go:334] "Generic (PLEG): container finished" podID="b3b19085-6a43-4667-9b1e-67d7a34eed43" containerID="e5d4618dfe7ae43d47add3f6f8f4f5dd5e84e962dca354bc2cb24ee1352cea36" exitCode=0 Oct 10 17:58:28 crc kubenswrapper[4799]: I1010 17:58:28.203309 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-f6tbn" event={"ID":"b3b19085-6a43-4667-9b1e-67d7a34eed43","Type":"ContainerDied","Data":"e5d4618dfe7ae43d47add3f6f8f4f5dd5e84e962dca354bc2cb24ee1352cea36"} Oct 10 17:58:29 crc kubenswrapper[4799]: I1010 17:58:29.218877 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-f6tbn" event={"ID":"b3b19085-6a43-4667-9b1e-67d7a34eed43","Type":"ContainerStarted","Data":"dd33e3dbb23d2352841389f3315c6d55fd80426e992aa646b76c36294cd3207a"} Oct 10 17:58:29 crc kubenswrapper[4799]: I1010 17:58:29.246782 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-f6tbn" podStartSLOduration=2.796120979 podStartE2EDuration="5.246732589s" podCreationTimestamp="2025-10-10 17:58:24 +0000 UTC" firstStartedPulling="2025-10-10 17:58:26.172826016 +0000 UTC m=+5199.681150171" lastFinishedPulling="2025-10-10 17:58:28.623437636 +0000 UTC m=+5202.131761781" observedRunningTime="2025-10-10 17:58:29.238607961 +0000 UTC m=+5202.746932136" watchObservedRunningTime="2025-10-10 17:58:29.246732589 +0000 UTC m=+5202.755056744" Oct 10 17:58:31 crc kubenswrapper[4799]: I1010 17:58:31.785337 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-tjdpn" Oct 10 17:58:31 crc kubenswrapper[4799]: I1010 17:58:31.785433 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-tjdpn" Oct 10 17:58:31 crc kubenswrapper[4799]: I1010 17:58:31.861679 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-tjdpn" Oct 10 17:58:32 crc kubenswrapper[4799]: I1010 17:58:32.319170 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-tjdpn" Oct 10 17:58:33 crc kubenswrapper[4799]: I1010 17:58:33.431154 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-tjdpn"] Oct 10 17:58:34 crc kubenswrapper[4799]: I1010 17:58:34.283858 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-tjdpn" podUID="9329dd06-0859-4bfa-ab8a-df404072c1f2" containerName="registry-server" containerID="cri-o://026e43647b105ca4063c09a8940f55077bbcffd0c588e2b151fa2ea909ccb596" gracePeriod=2 Oct 10 17:58:34 crc kubenswrapper[4799]: I1010 17:58:34.561587 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-f6tbn" Oct 10 17:58:34 crc kubenswrapper[4799]: I1010 17:58:34.562984 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-f6tbn" Oct 10 17:58:34 crc kubenswrapper[4799]: I1010 17:58:34.639219 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-f6tbn" Oct 10 17:58:34 crc kubenswrapper[4799]: I1010 17:58:34.806625 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-tjdpn" Oct 10 17:58:34 crc kubenswrapper[4799]: I1010 17:58:34.894263 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9329dd06-0859-4bfa-ab8a-df404072c1f2-catalog-content\") pod \"9329dd06-0859-4bfa-ab8a-df404072c1f2\" (UID: \"9329dd06-0859-4bfa-ab8a-df404072c1f2\") " Oct 10 17:58:34 crc kubenswrapper[4799]: I1010 17:58:34.894432 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9329dd06-0859-4bfa-ab8a-df404072c1f2-utilities\") pod \"9329dd06-0859-4bfa-ab8a-df404072c1f2\" (UID: \"9329dd06-0859-4bfa-ab8a-df404072c1f2\") " Oct 10 17:58:34 crc kubenswrapper[4799]: I1010 17:58:34.894503 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vkjnf\" (UniqueName: \"kubernetes.io/projected/9329dd06-0859-4bfa-ab8a-df404072c1f2-kube-api-access-vkjnf\") pod \"9329dd06-0859-4bfa-ab8a-df404072c1f2\" (UID: \"9329dd06-0859-4bfa-ab8a-df404072c1f2\") " Oct 10 17:58:34 crc kubenswrapper[4799]: I1010 17:58:34.895470 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9329dd06-0859-4bfa-ab8a-df404072c1f2-utilities" (OuterVolumeSpecName: "utilities") pod "9329dd06-0859-4bfa-ab8a-df404072c1f2" (UID: "9329dd06-0859-4bfa-ab8a-df404072c1f2"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 10 17:58:34 crc kubenswrapper[4799]: I1010 17:58:34.905114 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9329dd06-0859-4bfa-ab8a-df404072c1f2-kube-api-access-vkjnf" (OuterVolumeSpecName: "kube-api-access-vkjnf") pod "9329dd06-0859-4bfa-ab8a-df404072c1f2" (UID: "9329dd06-0859-4bfa-ab8a-df404072c1f2"). InnerVolumeSpecName "kube-api-access-vkjnf". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 17:58:34 crc kubenswrapper[4799]: I1010 17:58:34.997607 4799 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9329dd06-0859-4bfa-ab8a-df404072c1f2-utilities\") on node \"crc\" DevicePath \"\"" Oct 10 17:58:34 crc kubenswrapper[4799]: I1010 17:58:34.998107 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vkjnf\" (UniqueName: \"kubernetes.io/projected/9329dd06-0859-4bfa-ab8a-df404072c1f2-kube-api-access-vkjnf\") on node \"crc\" DevicePath \"\"" Oct 10 17:58:35 crc kubenswrapper[4799]: I1010 17:58:35.300940 4799 generic.go:334] "Generic (PLEG): container finished" podID="9329dd06-0859-4bfa-ab8a-df404072c1f2" containerID="026e43647b105ca4063c09a8940f55077bbcffd0c588e2b151fa2ea909ccb596" exitCode=0 Oct 10 17:58:35 crc kubenswrapper[4799]: I1010 17:58:35.301018 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tjdpn" event={"ID":"9329dd06-0859-4bfa-ab8a-df404072c1f2","Type":"ContainerDied","Data":"026e43647b105ca4063c09a8940f55077bbcffd0c588e2b151fa2ea909ccb596"} Oct 10 17:58:35 crc kubenswrapper[4799]: I1010 17:58:35.301104 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-tjdpn" Oct 10 17:58:35 crc kubenswrapper[4799]: I1010 17:58:35.301101 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tjdpn" event={"ID":"9329dd06-0859-4bfa-ab8a-df404072c1f2","Type":"ContainerDied","Data":"9eb8013623fe4c017e7b4f9616d18ed4f92bca0ea200e6750b601d7fc49aa85c"} Oct 10 17:58:35 crc kubenswrapper[4799]: I1010 17:58:35.301130 4799 scope.go:117] "RemoveContainer" containerID="026e43647b105ca4063c09a8940f55077bbcffd0c588e2b151fa2ea909ccb596" Oct 10 17:58:35 crc kubenswrapper[4799]: I1010 17:58:35.331980 4799 scope.go:117] "RemoveContainer" containerID="1e891f7700eca9c2c0e2656cffdb444e7c6bd5cb31c9ceff55c158c1c13e353e" Oct 10 17:58:35 crc kubenswrapper[4799]: I1010 17:58:35.363119 4799 scope.go:117] "RemoveContainer" containerID="5e8326c54d3733abb447a12ba886894da6f17463f6f7bbe7e3b977776fb520ad" Oct 10 17:58:35 crc kubenswrapper[4799]: I1010 17:58:35.377971 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-f6tbn" Oct 10 17:58:35 crc kubenswrapper[4799]: I1010 17:58:35.433466 4799 scope.go:117] "RemoveContainer" containerID="026e43647b105ca4063c09a8940f55077bbcffd0c588e2b151fa2ea909ccb596" Oct 10 17:58:35 crc kubenswrapper[4799]: E1010 17:58:35.434071 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"026e43647b105ca4063c09a8940f55077bbcffd0c588e2b151fa2ea909ccb596\": container with ID starting with 026e43647b105ca4063c09a8940f55077bbcffd0c588e2b151fa2ea909ccb596 not found: ID does not exist" containerID="026e43647b105ca4063c09a8940f55077bbcffd0c588e2b151fa2ea909ccb596" Oct 10 17:58:35 crc kubenswrapper[4799]: I1010 17:58:35.434409 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"026e43647b105ca4063c09a8940f55077bbcffd0c588e2b151fa2ea909ccb596"} err="failed to get container status \"026e43647b105ca4063c09a8940f55077bbcffd0c588e2b151fa2ea909ccb596\": rpc error: code = NotFound desc = could not find container \"026e43647b105ca4063c09a8940f55077bbcffd0c588e2b151fa2ea909ccb596\": container with ID starting with 026e43647b105ca4063c09a8940f55077bbcffd0c588e2b151fa2ea909ccb596 not found: ID does not exist" Oct 10 17:58:35 crc kubenswrapper[4799]: I1010 17:58:35.434705 4799 scope.go:117] "RemoveContainer" containerID="1e891f7700eca9c2c0e2656cffdb444e7c6bd5cb31c9ceff55c158c1c13e353e" Oct 10 17:58:35 crc kubenswrapper[4799]: E1010 17:58:35.436337 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1e891f7700eca9c2c0e2656cffdb444e7c6bd5cb31c9ceff55c158c1c13e353e\": container with ID starting with 1e891f7700eca9c2c0e2656cffdb444e7c6bd5cb31c9ceff55c158c1c13e353e not found: ID does not exist" containerID="1e891f7700eca9c2c0e2656cffdb444e7c6bd5cb31c9ceff55c158c1c13e353e" Oct 10 17:58:35 crc kubenswrapper[4799]: I1010 17:58:35.436402 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1e891f7700eca9c2c0e2656cffdb444e7c6bd5cb31c9ceff55c158c1c13e353e"} err="failed to get container status \"1e891f7700eca9c2c0e2656cffdb444e7c6bd5cb31c9ceff55c158c1c13e353e\": rpc error: code = NotFound desc = could not find container \"1e891f7700eca9c2c0e2656cffdb444e7c6bd5cb31c9ceff55c158c1c13e353e\": container with ID starting with 1e891f7700eca9c2c0e2656cffdb444e7c6bd5cb31c9ceff55c158c1c13e353e not found: ID does not exist" Oct 10 17:58:35 crc kubenswrapper[4799]: I1010 17:58:35.436446 4799 scope.go:117] "RemoveContainer" containerID="5e8326c54d3733abb447a12ba886894da6f17463f6f7bbe7e3b977776fb520ad" Oct 10 17:58:35 crc kubenswrapper[4799]: E1010 17:58:35.437128 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5e8326c54d3733abb447a12ba886894da6f17463f6f7bbe7e3b977776fb520ad\": container with ID starting with 5e8326c54d3733abb447a12ba886894da6f17463f6f7bbe7e3b977776fb520ad not found: ID does not exist" containerID="5e8326c54d3733abb447a12ba886894da6f17463f6f7bbe7e3b977776fb520ad" Oct 10 17:58:35 crc kubenswrapper[4799]: I1010 17:58:35.437503 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5e8326c54d3733abb447a12ba886894da6f17463f6f7bbe7e3b977776fb520ad"} err="failed to get container status \"5e8326c54d3733abb447a12ba886894da6f17463f6f7bbe7e3b977776fb520ad\": rpc error: code = NotFound desc = could not find container \"5e8326c54d3733abb447a12ba886894da6f17463f6f7bbe7e3b977776fb520ad\": container with ID starting with 5e8326c54d3733abb447a12ba886894da6f17463f6f7bbe7e3b977776fb520ad not found: ID does not exist" Oct 10 17:58:35 crc kubenswrapper[4799]: I1010 17:58:35.820154 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9329dd06-0859-4bfa-ab8a-df404072c1f2-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "9329dd06-0859-4bfa-ab8a-df404072c1f2" (UID: "9329dd06-0859-4bfa-ab8a-df404072c1f2"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 10 17:58:35 crc kubenswrapper[4799]: I1010 17:58:35.918624 4799 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9329dd06-0859-4bfa-ab8a-df404072c1f2-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 10 17:58:35 crc kubenswrapper[4799]: I1010 17:58:35.947411 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-tjdpn"] Oct 10 17:58:35 crc kubenswrapper[4799]: I1010 17:58:35.959935 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-tjdpn"] Oct 10 17:58:36 crc kubenswrapper[4799]: I1010 17:58:36.618939 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-f6tbn"] Oct 10 17:58:37 crc kubenswrapper[4799]: I1010 17:58:37.425943 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9329dd06-0859-4bfa-ab8a-df404072c1f2" path="/var/lib/kubelet/pods/9329dd06-0859-4bfa-ab8a-df404072c1f2/volumes" Oct 10 17:58:38 crc kubenswrapper[4799]: I1010 17:58:38.333515 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-f6tbn" podUID="b3b19085-6a43-4667-9b1e-67d7a34eed43" containerName="registry-server" containerID="cri-o://dd33e3dbb23d2352841389f3315c6d55fd80426e992aa646b76c36294cd3207a" gracePeriod=2 Oct 10 17:58:38 crc kubenswrapper[4799]: I1010 17:58:38.854795 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-f6tbn" Oct 10 17:58:38 crc kubenswrapper[4799]: I1010 17:58:38.979701 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b3b19085-6a43-4667-9b1e-67d7a34eed43-catalog-content\") pod \"b3b19085-6a43-4667-9b1e-67d7a34eed43\" (UID: \"b3b19085-6a43-4667-9b1e-67d7a34eed43\") " Oct 10 17:58:38 crc kubenswrapper[4799]: I1010 17:58:38.979967 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-78j86\" (UniqueName: \"kubernetes.io/projected/b3b19085-6a43-4667-9b1e-67d7a34eed43-kube-api-access-78j86\") pod \"b3b19085-6a43-4667-9b1e-67d7a34eed43\" (UID: \"b3b19085-6a43-4667-9b1e-67d7a34eed43\") " Oct 10 17:58:38 crc kubenswrapper[4799]: I1010 17:58:38.980050 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b3b19085-6a43-4667-9b1e-67d7a34eed43-utilities\") pod \"b3b19085-6a43-4667-9b1e-67d7a34eed43\" (UID: \"b3b19085-6a43-4667-9b1e-67d7a34eed43\") " Oct 10 17:58:38 crc kubenswrapper[4799]: I1010 17:58:38.981334 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b3b19085-6a43-4667-9b1e-67d7a34eed43-utilities" (OuterVolumeSpecName: "utilities") pod "b3b19085-6a43-4667-9b1e-67d7a34eed43" (UID: "b3b19085-6a43-4667-9b1e-67d7a34eed43"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 10 17:58:38 crc kubenswrapper[4799]: I1010 17:58:38.988501 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b3b19085-6a43-4667-9b1e-67d7a34eed43-kube-api-access-78j86" (OuterVolumeSpecName: "kube-api-access-78j86") pod "b3b19085-6a43-4667-9b1e-67d7a34eed43" (UID: "b3b19085-6a43-4667-9b1e-67d7a34eed43"). InnerVolumeSpecName "kube-api-access-78j86". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 17:58:39 crc kubenswrapper[4799]: I1010 17:58:39.025530 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b3b19085-6a43-4667-9b1e-67d7a34eed43-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b3b19085-6a43-4667-9b1e-67d7a34eed43" (UID: "b3b19085-6a43-4667-9b1e-67d7a34eed43"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 10 17:58:39 crc kubenswrapper[4799]: I1010 17:58:39.082666 4799 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b3b19085-6a43-4667-9b1e-67d7a34eed43-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 10 17:58:39 crc kubenswrapper[4799]: I1010 17:58:39.082714 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-78j86\" (UniqueName: \"kubernetes.io/projected/b3b19085-6a43-4667-9b1e-67d7a34eed43-kube-api-access-78j86\") on node \"crc\" DevicePath \"\"" Oct 10 17:58:39 crc kubenswrapper[4799]: I1010 17:58:39.082735 4799 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b3b19085-6a43-4667-9b1e-67d7a34eed43-utilities\") on node \"crc\" DevicePath \"\"" Oct 10 17:58:39 crc kubenswrapper[4799]: I1010 17:58:39.349576 4799 generic.go:334] "Generic (PLEG): container finished" podID="b3b19085-6a43-4667-9b1e-67d7a34eed43" containerID="dd33e3dbb23d2352841389f3315c6d55fd80426e992aa646b76c36294cd3207a" exitCode=0 Oct 10 17:58:39 crc kubenswrapper[4799]: I1010 17:58:39.349730 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-f6tbn" event={"ID":"b3b19085-6a43-4667-9b1e-67d7a34eed43","Type":"ContainerDied","Data":"dd33e3dbb23d2352841389f3315c6d55fd80426e992aa646b76c36294cd3207a"} Oct 10 17:58:39 crc kubenswrapper[4799]: I1010 17:58:39.350085 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-f6tbn" event={"ID":"b3b19085-6a43-4667-9b1e-67d7a34eed43","Type":"ContainerDied","Data":"687731dbe248e56b48771f9be02c8dcff777975f57896332838e908595ce6c72"} Oct 10 17:58:39 crc kubenswrapper[4799]: I1010 17:58:39.350118 4799 scope.go:117] "RemoveContainer" containerID="dd33e3dbb23d2352841389f3315c6d55fd80426e992aa646b76c36294cd3207a" Oct 10 17:58:39 crc kubenswrapper[4799]: I1010 17:58:39.349792 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-f6tbn" Oct 10 17:58:39 crc kubenswrapper[4799]: I1010 17:58:39.399875 4799 scope.go:117] "RemoveContainer" containerID="e5d4618dfe7ae43d47add3f6f8f4f5dd5e84e962dca354bc2cb24ee1352cea36" Oct 10 17:58:39 crc kubenswrapper[4799]: I1010 17:58:39.403350 4799 scope.go:117] "RemoveContainer" containerID="5e973f476cb0655a6e33e886e2a59fc6754febf3bf5a4718abcef307858985dd" Oct 10 17:58:39 crc kubenswrapper[4799]: E1010 17:58:39.403917 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 17:58:39 crc kubenswrapper[4799]: I1010 17:58:39.431627 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-f6tbn"] Oct 10 17:58:39 crc kubenswrapper[4799]: I1010 17:58:39.431675 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-f6tbn"] Oct 10 17:58:39 crc kubenswrapper[4799]: I1010 17:58:39.442415 4799 scope.go:117] "RemoveContainer" containerID="5a9750b13cdf66250325ee17808a2915e55614f436e287167655377fcf132f31" Oct 10 17:58:39 crc kubenswrapper[4799]: I1010 17:58:39.471808 4799 scope.go:117] "RemoveContainer" containerID="dd33e3dbb23d2352841389f3315c6d55fd80426e992aa646b76c36294cd3207a" Oct 10 17:58:39 crc kubenswrapper[4799]: E1010 17:58:39.472561 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"dd33e3dbb23d2352841389f3315c6d55fd80426e992aa646b76c36294cd3207a\": container with ID starting with dd33e3dbb23d2352841389f3315c6d55fd80426e992aa646b76c36294cd3207a not found: ID does not exist" containerID="dd33e3dbb23d2352841389f3315c6d55fd80426e992aa646b76c36294cd3207a" Oct 10 17:58:39 crc kubenswrapper[4799]: I1010 17:58:39.472613 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dd33e3dbb23d2352841389f3315c6d55fd80426e992aa646b76c36294cd3207a"} err="failed to get container status \"dd33e3dbb23d2352841389f3315c6d55fd80426e992aa646b76c36294cd3207a\": rpc error: code = NotFound desc = could not find container \"dd33e3dbb23d2352841389f3315c6d55fd80426e992aa646b76c36294cd3207a\": container with ID starting with dd33e3dbb23d2352841389f3315c6d55fd80426e992aa646b76c36294cd3207a not found: ID does not exist" Oct 10 17:58:39 crc kubenswrapper[4799]: I1010 17:58:39.472649 4799 scope.go:117] "RemoveContainer" containerID="e5d4618dfe7ae43d47add3f6f8f4f5dd5e84e962dca354bc2cb24ee1352cea36" Oct 10 17:58:39 crc kubenswrapper[4799]: E1010 17:58:39.473365 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e5d4618dfe7ae43d47add3f6f8f4f5dd5e84e962dca354bc2cb24ee1352cea36\": container with ID starting with e5d4618dfe7ae43d47add3f6f8f4f5dd5e84e962dca354bc2cb24ee1352cea36 not found: ID does not exist" containerID="e5d4618dfe7ae43d47add3f6f8f4f5dd5e84e962dca354bc2cb24ee1352cea36" Oct 10 17:58:39 crc kubenswrapper[4799]: I1010 17:58:39.473403 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e5d4618dfe7ae43d47add3f6f8f4f5dd5e84e962dca354bc2cb24ee1352cea36"} err="failed to get container status \"e5d4618dfe7ae43d47add3f6f8f4f5dd5e84e962dca354bc2cb24ee1352cea36\": rpc error: code = NotFound desc = could not find container \"e5d4618dfe7ae43d47add3f6f8f4f5dd5e84e962dca354bc2cb24ee1352cea36\": container with ID starting with e5d4618dfe7ae43d47add3f6f8f4f5dd5e84e962dca354bc2cb24ee1352cea36 not found: ID does not exist" Oct 10 17:58:39 crc kubenswrapper[4799]: I1010 17:58:39.473426 4799 scope.go:117] "RemoveContainer" containerID="5a9750b13cdf66250325ee17808a2915e55614f436e287167655377fcf132f31" Oct 10 17:58:39 crc kubenswrapper[4799]: E1010 17:58:39.475133 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5a9750b13cdf66250325ee17808a2915e55614f436e287167655377fcf132f31\": container with ID starting with 5a9750b13cdf66250325ee17808a2915e55614f436e287167655377fcf132f31 not found: ID does not exist" containerID="5a9750b13cdf66250325ee17808a2915e55614f436e287167655377fcf132f31" Oct 10 17:58:39 crc kubenswrapper[4799]: I1010 17:58:39.475207 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5a9750b13cdf66250325ee17808a2915e55614f436e287167655377fcf132f31"} err="failed to get container status \"5a9750b13cdf66250325ee17808a2915e55614f436e287167655377fcf132f31\": rpc error: code = NotFound desc = could not find container \"5a9750b13cdf66250325ee17808a2915e55614f436e287167655377fcf132f31\": container with ID starting with 5a9750b13cdf66250325ee17808a2915e55614f436e287167655377fcf132f31 not found: ID does not exist" Oct 10 17:58:41 crc kubenswrapper[4799]: I1010 17:58:41.416547 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b3b19085-6a43-4667-9b1e-67d7a34eed43" path="/var/lib/kubelet/pods/b3b19085-6a43-4667-9b1e-67d7a34eed43/volumes" Oct 10 17:58:41 crc kubenswrapper[4799]: I1010 17:58:41.840598 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/keystone-59fb5d4d57-hlz5n" Oct 10 17:58:46 crc kubenswrapper[4799]: I1010 17:58:46.629845 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstackclient"] Oct 10 17:58:46 crc kubenswrapper[4799]: E1010 17:58:46.631098 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9329dd06-0859-4bfa-ab8a-df404072c1f2" containerName="extract-content" Oct 10 17:58:46 crc kubenswrapper[4799]: I1010 17:58:46.631124 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="9329dd06-0859-4bfa-ab8a-df404072c1f2" containerName="extract-content" Oct 10 17:58:46 crc kubenswrapper[4799]: E1010 17:58:46.631147 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b3b19085-6a43-4667-9b1e-67d7a34eed43" containerName="extract-content" Oct 10 17:58:46 crc kubenswrapper[4799]: I1010 17:58:46.631160 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="b3b19085-6a43-4667-9b1e-67d7a34eed43" containerName="extract-content" Oct 10 17:58:46 crc kubenswrapper[4799]: E1010 17:58:46.631200 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9329dd06-0859-4bfa-ab8a-df404072c1f2" containerName="extract-utilities" Oct 10 17:58:46 crc kubenswrapper[4799]: I1010 17:58:46.631215 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="9329dd06-0859-4bfa-ab8a-df404072c1f2" containerName="extract-utilities" Oct 10 17:58:46 crc kubenswrapper[4799]: E1010 17:58:46.631234 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b3b19085-6a43-4667-9b1e-67d7a34eed43" containerName="registry-server" Oct 10 17:58:46 crc kubenswrapper[4799]: I1010 17:58:46.631247 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="b3b19085-6a43-4667-9b1e-67d7a34eed43" containerName="registry-server" Oct 10 17:58:46 crc kubenswrapper[4799]: E1010 17:58:46.631277 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9329dd06-0859-4bfa-ab8a-df404072c1f2" containerName="registry-server" Oct 10 17:58:46 crc kubenswrapper[4799]: I1010 17:58:46.631291 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="9329dd06-0859-4bfa-ab8a-df404072c1f2" containerName="registry-server" Oct 10 17:58:46 crc kubenswrapper[4799]: E1010 17:58:46.631310 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b3b19085-6a43-4667-9b1e-67d7a34eed43" containerName="extract-utilities" Oct 10 17:58:46 crc kubenswrapper[4799]: I1010 17:58:46.631321 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="b3b19085-6a43-4667-9b1e-67d7a34eed43" containerName="extract-utilities" Oct 10 17:58:46 crc kubenswrapper[4799]: I1010 17:58:46.631612 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="9329dd06-0859-4bfa-ab8a-df404072c1f2" containerName="registry-server" Oct 10 17:58:46 crc kubenswrapper[4799]: I1010 17:58:46.631638 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="b3b19085-6a43-4667-9b1e-67d7a34eed43" containerName="registry-server" Oct 10 17:58:46 crc kubenswrapper[4799]: I1010 17:58:46.632651 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Oct 10 17:58:46 crc kubenswrapper[4799]: I1010 17:58:46.636103 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-config-secret" Oct 10 17:58:46 crc kubenswrapper[4799]: I1010 17:58:46.637489 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-config" Oct 10 17:58:46 crc kubenswrapper[4799]: I1010 17:58:46.637955 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstackclient-openstackclient-dockercfg-jxn2l" Oct 10 17:58:46 crc kubenswrapper[4799]: I1010 17:58:46.640409 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Oct 10 17:58:46 crc kubenswrapper[4799]: I1010 17:58:46.730296 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/00daf52d-7faf-4cc7-80e2-7b9db14d2196-openstack-config\") pod \"openstackclient\" (UID: \"00daf52d-7faf-4cc7-80e2-7b9db14d2196\") " pod="openstack/openstackclient" Oct 10 17:58:46 crc kubenswrapper[4799]: I1010 17:58:46.730351 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7sgb9\" (UniqueName: \"kubernetes.io/projected/00daf52d-7faf-4cc7-80e2-7b9db14d2196-kube-api-access-7sgb9\") pod \"openstackclient\" (UID: \"00daf52d-7faf-4cc7-80e2-7b9db14d2196\") " pod="openstack/openstackclient" Oct 10 17:58:46 crc kubenswrapper[4799]: I1010 17:58:46.730435 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/00daf52d-7faf-4cc7-80e2-7b9db14d2196-openstack-config-secret\") pod \"openstackclient\" (UID: \"00daf52d-7faf-4cc7-80e2-7b9db14d2196\") " pod="openstack/openstackclient" Oct 10 17:58:46 crc kubenswrapper[4799]: I1010 17:58:46.833538 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/00daf52d-7faf-4cc7-80e2-7b9db14d2196-openstack-config-secret\") pod \"openstackclient\" (UID: \"00daf52d-7faf-4cc7-80e2-7b9db14d2196\") " pod="openstack/openstackclient" Oct 10 17:58:46 crc kubenswrapper[4799]: I1010 17:58:46.833804 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/00daf52d-7faf-4cc7-80e2-7b9db14d2196-openstack-config\") pod \"openstackclient\" (UID: \"00daf52d-7faf-4cc7-80e2-7b9db14d2196\") " pod="openstack/openstackclient" Oct 10 17:58:46 crc kubenswrapper[4799]: I1010 17:58:46.834901 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7sgb9\" (UniqueName: \"kubernetes.io/projected/00daf52d-7faf-4cc7-80e2-7b9db14d2196-kube-api-access-7sgb9\") pod \"openstackclient\" (UID: \"00daf52d-7faf-4cc7-80e2-7b9db14d2196\") " pod="openstack/openstackclient" Oct 10 17:58:46 crc kubenswrapper[4799]: I1010 17:58:46.835230 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/00daf52d-7faf-4cc7-80e2-7b9db14d2196-openstack-config\") pod \"openstackclient\" (UID: \"00daf52d-7faf-4cc7-80e2-7b9db14d2196\") " pod="openstack/openstackclient" Oct 10 17:58:46 crc kubenswrapper[4799]: I1010 17:58:46.839600 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/00daf52d-7faf-4cc7-80e2-7b9db14d2196-openstack-config-secret\") pod \"openstackclient\" (UID: \"00daf52d-7faf-4cc7-80e2-7b9db14d2196\") " pod="openstack/openstackclient" Oct 10 17:58:46 crc kubenswrapper[4799]: I1010 17:58:46.855782 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7sgb9\" (UniqueName: \"kubernetes.io/projected/00daf52d-7faf-4cc7-80e2-7b9db14d2196-kube-api-access-7sgb9\") pod \"openstackclient\" (UID: \"00daf52d-7faf-4cc7-80e2-7b9db14d2196\") " pod="openstack/openstackclient" Oct 10 17:58:46 crc kubenswrapper[4799]: I1010 17:58:46.968080 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Oct 10 17:58:47 crc kubenswrapper[4799]: I1010 17:58:47.459854 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Oct 10 17:58:48 crc kubenswrapper[4799]: I1010 17:58:48.469479 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"00daf52d-7faf-4cc7-80e2-7b9db14d2196","Type":"ContainerStarted","Data":"1799817d4e72e1ae52fb951ccb4b28f64b395902e023fab586dee0d6ebbe54ce"} Oct 10 17:58:48 crc kubenswrapper[4799]: I1010 17:58:48.471413 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"00daf52d-7faf-4cc7-80e2-7b9db14d2196","Type":"ContainerStarted","Data":"edebb19275a62b7149047b68e8234954968ffd68e0ad7319cbdb0f5462b6ea19"} Oct 10 17:58:48 crc kubenswrapper[4799]: I1010 17:58:48.509625 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstackclient" podStartSLOduration=2.509592326 podStartE2EDuration="2.509592326s" podCreationTimestamp="2025-10-10 17:58:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-10 17:58:48.495206345 +0000 UTC m=+5222.003530500" watchObservedRunningTime="2025-10-10 17:58:48.509592326 +0000 UTC m=+5222.017916481" Oct 10 17:58:51 crc kubenswrapper[4799]: I1010 17:58:51.403180 4799 scope.go:117] "RemoveContainer" containerID="5e973f476cb0655a6e33e886e2a59fc6754febf3bf5a4718abcef307858985dd" Oct 10 17:58:51 crc kubenswrapper[4799]: E1010 17:58:51.403990 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 17:59:06 crc kubenswrapper[4799]: I1010 17:59:06.402649 4799 scope.go:117] "RemoveContainer" containerID="5e973f476cb0655a6e33e886e2a59fc6754febf3bf5a4718abcef307858985dd" Oct 10 17:59:06 crc kubenswrapper[4799]: E1010 17:59:06.403722 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 17:59:07 crc kubenswrapper[4799]: I1010 17:59:07.063013 4799 scope.go:117] "RemoveContainer" containerID="742c50e0df627f07d2fd5874c5818e252c7ecc6151ae70498b7548552ee8c0ca" Oct 10 17:59:07 crc kubenswrapper[4799]: I1010 17:59:07.097035 4799 scope.go:117] "RemoveContainer" containerID="3ba5e4017371256e188302c77eaf932b996d0c0f3a6d773220c94fa9ead606b2" Oct 10 17:59:20 crc kubenswrapper[4799]: I1010 17:59:20.926703 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-4bkxk"] Oct 10 17:59:20 crc kubenswrapper[4799]: I1010 17:59:20.929998 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-4bkxk" Oct 10 17:59:20 crc kubenswrapper[4799]: I1010 17:59:20.940020 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-4bkxk"] Oct 10 17:59:21 crc kubenswrapper[4799]: I1010 17:59:21.002589 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/942b352e-7117-44f8-9e42-47eae36e7159-catalog-content\") pod \"certified-operators-4bkxk\" (UID: \"942b352e-7117-44f8-9e42-47eae36e7159\") " pod="openshift-marketplace/certified-operators-4bkxk" Oct 10 17:59:21 crc kubenswrapper[4799]: I1010 17:59:21.002893 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/942b352e-7117-44f8-9e42-47eae36e7159-utilities\") pod \"certified-operators-4bkxk\" (UID: \"942b352e-7117-44f8-9e42-47eae36e7159\") " pod="openshift-marketplace/certified-operators-4bkxk" Oct 10 17:59:21 crc kubenswrapper[4799]: I1010 17:59:21.003100 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gh45v\" (UniqueName: \"kubernetes.io/projected/942b352e-7117-44f8-9e42-47eae36e7159-kube-api-access-gh45v\") pod \"certified-operators-4bkxk\" (UID: \"942b352e-7117-44f8-9e42-47eae36e7159\") " pod="openshift-marketplace/certified-operators-4bkxk" Oct 10 17:59:21 crc kubenswrapper[4799]: I1010 17:59:21.104532 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/942b352e-7117-44f8-9e42-47eae36e7159-catalog-content\") pod \"certified-operators-4bkxk\" (UID: \"942b352e-7117-44f8-9e42-47eae36e7159\") " pod="openshift-marketplace/certified-operators-4bkxk" Oct 10 17:59:21 crc kubenswrapper[4799]: I1010 17:59:21.104626 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/942b352e-7117-44f8-9e42-47eae36e7159-utilities\") pod \"certified-operators-4bkxk\" (UID: \"942b352e-7117-44f8-9e42-47eae36e7159\") " pod="openshift-marketplace/certified-operators-4bkxk" Oct 10 17:59:21 crc kubenswrapper[4799]: I1010 17:59:21.104690 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gh45v\" (UniqueName: \"kubernetes.io/projected/942b352e-7117-44f8-9e42-47eae36e7159-kube-api-access-gh45v\") pod \"certified-operators-4bkxk\" (UID: \"942b352e-7117-44f8-9e42-47eae36e7159\") " pod="openshift-marketplace/certified-operators-4bkxk" Oct 10 17:59:21 crc kubenswrapper[4799]: I1010 17:59:21.105478 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/942b352e-7117-44f8-9e42-47eae36e7159-catalog-content\") pod \"certified-operators-4bkxk\" (UID: \"942b352e-7117-44f8-9e42-47eae36e7159\") " pod="openshift-marketplace/certified-operators-4bkxk" Oct 10 17:59:21 crc kubenswrapper[4799]: I1010 17:59:21.105689 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/942b352e-7117-44f8-9e42-47eae36e7159-utilities\") pod \"certified-operators-4bkxk\" (UID: \"942b352e-7117-44f8-9e42-47eae36e7159\") " pod="openshift-marketplace/certified-operators-4bkxk" Oct 10 17:59:21 crc kubenswrapper[4799]: I1010 17:59:21.130129 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gh45v\" (UniqueName: \"kubernetes.io/projected/942b352e-7117-44f8-9e42-47eae36e7159-kube-api-access-gh45v\") pod \"certified-operators-4bkxk\" (UID: \"942b352e-7117-44f8-9e42-47eae36e7159\") " pod="openshift-marketplace/certified-operators-4bkxk" Oct 10 17:59:21 crc kubenswrapper[4799]: I1010 17:59:21.250125 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-4bkxk" Oct 10 17:59:21 crc kubenswrapper[4799]: I1010 17:59:21.402125 4799 scope.go:117] "RemoveContainer" containerID="5e973f476cb0655a6e33e886e2a59fc6754febf3bf5a4718abcef307858985dd" Oct 10 17:59:21 crc kubenswrapper[4799]: I1010 17:59:21.791870 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-4bkxk"] Oct 10 17:59:21 crc kubenswrapper[4799]: W1010 17:59:21.800721 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod942b352e_7117_44f8_9e42_47eae36e7159.slice/crio-2970e9bbba2acbef70914ccb232bfb6c82e4bb96a52523adea05c0d2add3f216 WatchSource:0}: Error finding container 2970e9bbba2acbef70914ccb232bfb6c82e4bb96a52523adea05c0d2add3f216: Status 404 returned error can't find the container with id 2970e9bbba2acbef70914ccb232bfb6c82e4bb96a52523adea05c0d2add3f216 Oct 10 17:59:21 crc kubenswrapper[4799]: I1010 17:59:21.843373 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" event={"ID":"6cebefda-e31d-4be2-9bf4-8e1f8ec002cb","Type":"ContainerStarted","Data":"7d93aa50210daf6f851b294fa12ece2a292bc34c273170b02b29c3a462fa4bf5"} Oct 10 17:59:21 crc kubenswrapper[4799]: I1010 17:59:21.845917 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4bkxk" event={"ID":"942b352e-7117-44f8-9e42-47eae36e7159","Type":"ContainerStarted","Data":"2970e9bbba2acbef70914ccb232bfb6c82e4bb96a52523adea05c0d2add3f216"} Oct 10 17:59:22 crc kubenswrapper[4799]: I1010 17:59:22.854712 4799 generic.go:334] "Generic (PLEG): container finished" podID="942b352e-7117-44f8-9e42-47eae36e7159" containerID="88bf7126e9a56f16e38808946ca5bde98328b1bc5f3653b7bf6409a7d7d6793f" exitCode=0 Oct 10 17:59:22 crc kubenswrapper[4799]: I1010 17:59:22.854777 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4bkxk" event={"ID":"942b352e-7117-44f8-9e42-47eae36e7159","Type":"ContainerDied","Data":"88bf7126e9a56f16e38808946ca5bde98328b1bc5f3653b7bf6409a7d7d6793f"} Oct 10 17:59:23 crc kubenswrapper[4799]: I1010 17:59:23.870639 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4bkxk" event={"ID":"942b352e-7117-44f8-9e42-47eae36e7159","Type":"ContainerStarted","Data":"ba265914c16e719454b40751fdade020bf24629b3abd508fa51830a793bdbfed"} Oct 10 17:59:24 crc kubenswrapper[4799]: I1010 17:59:24.886147 4799 generic.go:334] "Generic (PLEG): container finished" podID="942b352e-7117-44f8-9e42-47eae36e7159" containerID="ba265914c16e719454b40751fdade020bf24629b3abd508fa51830a793bdbfed" exitCode=0 Oct 10 17:59:24 crc kubenswrapper[4799]: I1010 17:59:24.886438 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4bkxk" event={"ID":"942b352e-7117-44f8-9e42-47eae36e7159","Type":"ContainerDied","Data":"ba265914c16e719454b40751fdade020bf24629b3abd508fa51830a793bdbfed"} Oct 10 17:59:25 crc kubenswrapper[4799]: I1010 17:59:25.901000 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4bkxk" event={"ID":"942b352e-7117-44f8-9e42-47eae36e7159","Type":"ContainerStarted","Data":"2a8fc746018756113e7fec80b22adecd1ff7852d9d0907832a7b4c83dc08e5dc"} Oct 10 17:59:25 crc kubenswrapper[4799]: I1010 17:59:25.938059 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-4bkxk" podStartSLOduration=3.44748448 podStartE2EDuration="5.938029964s" podCreationTimestamp="2025-10-10 17:59:20 +0000 UTC" firstStartedPulling="2025-10-10 17:59:22.857296336 +0000 UTC m=+5256.365620441" lastFinishedPulling="2025-10-10 17:59:25.34784178 +0000 UTC m=+5258.856165925" observedRunningTime="2025-10-10 17:59:25.933850163 +0000 UTC m=+5259.442174338" watchObservedRunningTime="2025-10-10 17:59:25.938029964 +0000 UTC m=+5259.446354119" Oct 10 17:59:31 crc kubenswrapper[4799]: I1010 17:59:31.250458 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-4bkxk" Oct 10 17:59:31 crc kubenswrapper[4799]: I1010 17:59:31.251078 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-4bkxk" Oct 10 17:59:31 crc kubenswrapper[4799]: I1010 17:59:31.328488 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-4bkxk" Oct 10 17:59:32 crc kubenswrapper[4799]: I1010 17:59:32.050978 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-4bkxk" Oct 10 17:59:32 crc kubenswrapper[4799]: I1010 17:59:32.110904 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-4bkxk"] Oct 10 17:59:33 crc kubenswrapper[4799]: I1010 17:59:33.994796 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-4bkxk" podUID="942b352e-7117-44f8-9e42-47eae36e7159" containerName="registry-server" containerID="cri-o://2a8fc746018756113e7fec80b22adecd1ff7852d9d0907832a7b4c83dc08e5dc" gracePeriod=2 Oct 10 17:59:34 crc kubenswrapper[4799]: I1010 17:59:34.510879 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-4bkxk" Oct 10 17:59:34 crc kubenswrapper[4799]: I1010 17:59:34.673630 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/942b352e-7117-44f8-9e42-47eae36e7159-utilities" (OuterVolumeSpecName: "utilities") pod "942b352e-7117-44f8-9e42-47eae36e7159" (UID: "942b352e-7117-44f8-9e42-47eae36e7159"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 10 17:59:34 crc kubenswrapper[4799]: I1010 17:59:34.671523 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/942b352e-7117-44f8-9e42-47eae36e7159-utilities\") pod \"942b352e-7117-44f8-9e42-47eae36e7159\" (UID: \"942b352e-7117-44f8-9e42-47eae36e7159\") " Oct 10 17:59:34 crc kubenswrapper[4799]: I1010 17:59:34.674000 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gh45v\" (UniqueName: \"kubernetes.io/projected/942b352e-7117-44f8-9e42-47eae36e7159-kube-api-access-gh45v\") pod \"942b352e-7117-44f8-9e42-47eae36e7159\" (UID: \"942b352e-7117-44f8-9e42-47eae36e7159\") " Oct 10 17:59:34 crc kubenswrapper[4799]: I1010 17:59:34.675524 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/942b352e-7117-44f8-9e42-47eae36e7159-catalog-content\") pod \"942b352e-7117-44f8-9e42-47eae36e7159\" (UID: \"942b352e-7117-44f8-9e42-47eae36e7159\") " Oct 10 17:59:34 crc kubenswrapper[4799]: I1010 17:59:34.676642 4799 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/942b352e-7117-44f8-9e42-47eae36e7159-utilities\") on node \"crc\" DevicePath \"\"" Oct 10 17:59:34 crc kubenswrapper[4799]: I1010 17:59:34.682654 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/942b352e-7117-44f8-9e42-47eae36e7159-kube-api-access-gh45v" (OuterVolumeSpecName: "kube-api-access-gh45v") pod "942b352e-7117-44f8-9e42-47eae36e7159" (UID: "942b352e-7117-44f8-9e42-47eae36e7159"). InnerVolumeSpecName "kube-api-access-gh45v". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 17:59:34 crc kubenswrapper[4799]: I1010 17:59:34.778315 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gh45v\" (UniqueName: \"kubernetes.io/projected/942b352e-7117-44f8-9e42-47eae36e7159-kube-api-access-gh45v\") on node \"crc\" DevicePath \"\"" Oct 10 17:59:35 crc kubenswrapper[4799]: I1010 17:59:35.016737 4799 generic.go:334] "Generic (PLEG): container finished" podID="942b352e-7117-44f8-9e42-47eae36e7159" containerID="2a8fc746018756113e7fec80b22adecd1ff7852d9d0907832a7b4c83dc08e5dc" exitCode=0 Oct 10 17:59:35 crc kubenswrapper[4799]: I1010 17:59:35.016836 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4bkxk" event={"ID":"942b352e-7117-44f8-9e42-47eae36e7159","Type":"ContainerDied","Data":"2a8fc746018756113e7fec80b22adecd1ff7852d9d0907832a7b4c83dc08e5dc"} Oct 10 17:59:35 crc kubenswrapper[4799]: I1010 17:59:35.016912 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4bkxk" event={"ID":"942b352e-7117-44f8-9e42-47eae36e7159","Type":"ContainerDied","Data":"2970e9bbba2acbef70914ccb232bfb6c82e4bb96a52523adea05c0d2add3f216"} Oct 10 17:59:35 crc kubenswrapper[4799]: I1010 17:59:35.016999 4799 scope.go:117] "RemoveContainer" containerID="2a8fc746018756113e7fec80b22adecd1ff7852d9d0907832a7b4c83dc08e5dc" Oct 10 17:59:35 crc kubenswrapper[4799]: I1010 17:59:35.016869 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-4bkxk" Oct 10 17:59:35 crc kubenswrapper[4799]: I1010 17:59:35.049073 4799 scope.go:117] "RemoveContainer" containerID="ba265914c16e719454b40751fdade020bf24629b3abd508fa51830a793bdbfed" Oct 10 17:59:35 crc kubenswrapper[4799]: I1010 17:59:35.069232 4799 scope.go:117] "RemoveContainer" containerID="88bf7126e9a56f16e38808946ca5bde98328b1bc5f3653b7bf6409a7d7d6793f" Oct 10 17:59:35 crc kubenswrapper[4799]: I1010 17:59:35.109088 4799 scope.go:117] "RemoveContainer" containerID="2a8fc746018756113e7fec80b22adecd1ff7852d9d0907832a7b4c83dc08e5dc" Oct 10 17:59:35 crc kubenswrapper[4799]: E1010 17:59:35.109612 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2a8fc746018756113e7fec80b22adecd1ff7852d9d0907832a7b4c83dc08e5dc\": container with ID starting with 2a8fc746018756113e7fec80b22adecd1ff7852d9d0907832a7b4c83dc08e5dc not found: ID does not exist" containerID="2a8fc746018756113e7fec80b22adecd1ff7852d9d0907832a7b4c83dc08e5dc" Oct 10 17:59:35 crc kubenswrapper[4799]: I1010 17:59:35.109650 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2a8fc746018756113e7fec80b22adecd1ff7852d9d0907832a7b4c83dc08e5dc"} err="failed to get container status \"2a8fc746018756113e7fec80b22adecd1ff7852d9d0907832a7b4c83dc08e5dc\": rpc error: code = NotFound desc = could not find container \"2a8fc746018756113e7fec80b22adecd1ff7852d9d0907832a7b4c83dc08e5dc\": container with ID starting with 2a8fc746018756113e7fec80b22adecd1ff7852d9d0907832a7b4c83dc08e5dc not found: ID does not exist" Oct 10 17:59:35 crc kubenswrapper[4799]: I1010 17:59:35.109673 4799 scope.go:117] "RemoveContainer" containerID="ba265914c16e719454b40751fdade020bf24629b3abd508fa51830a793bdbfed" Oct 10 17:59:35 crc kubenswrapper[4799]: E1010 17:59:35.109990 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ba265914c16e719454b40751fdade020bf24629b3abd508fa51830a793bdbfed\": container with ID starting with ba265914c16e719454b40751fdade020bf24629b3abd508fa51830a793bdbfed not found: ID does not exist" containerID="ba265914c16e719454b40751fdade020bf24629b3abd508fa51830a793bdbfed" Oct 10 17:59:35 crc kubenswrapper[4799]: I1010 17:59:35.110067 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ba265914c16e719454b40751fdade020bf24629b3abd508fa51830a793bdbfed"} err="failed to get container status \"ba265914c16e719454b40751fdade020bf24629b3abd508fa51830a793bdbfed\": rpc error: code = NotFound desc = could not find container \"ba265914c16e719454b40751fdade020bf24629b3abd508fa51830a793bdbfed\": container with ID starting with ba265914c16e719454b40751fdade020bf24629b3abd508fa51830a793bdbfed not found: ID does not exist" Oct 10 17:59:35 crc kubenswrapper[4799]: I1010 17:59:35.110097 4799 scope.go:117] "RemoveContainer" containerID="88bf7126e9a56f16e38808946ca5bde98328b1bc5f3653b7bf6409a7d7d6793f" Oct 10 17:59:35 crc kubenswrapper[4799]: E1010 17:59:35.110408 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"88bf7126e9a56f16e38808946ca5bde98328b1bc5f3653b7bf6409a7d7d6793f\": container with ID starting with 88bf7126e9a56f16e38808946ca5bde98328b1bc5f3653b7bf6409a7d7d6793f not found: ID does not exist" containerID="88bf7126e9a56f16e38808946ca5bde98328b1bc5f3653b7bf6409a7d7d6793f" Oct 10 17:59:35 crc kubenswrapper[4799]: I1010 17:59:35.110433 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"88bf7126e9a56f16e38808946ca5bde98328b1bc5f3653b7bf6409a7d7d6793f"} err="failed to get container status \"88bf7126e9a56f16e38808946ca5bde98328b1bc5f3653b7bf6409a7d7d6793f\": rpc error: code = NotFound desc = could not find container \"88bf7126e9a56f16e38808946ca5bde98328b1bc5f3653b7bf6409a7d7d6793f\": container with ID starting with 88bf7126e9a56f16e38808946ca5bde98328b1bc5f3653b7bf6409a7d7d6793f not found: ID does not exist" Oct 10 17:59:35 crc kubenswrapper[4799]: I1010 17:59:35.133967 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/942b352e-7117-44f8-9e42-47eae36e7159-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "942b352e-7117-44f8-9e42-47eae36e7159" (UID: "942b352e-7117-44f8-9e42-47eae36e7159"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 10 17:59:35 crc kubenswrapper[4799]: I1010 17:59:35.185215 4799 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/942b352e-7117-44f8-9e42-47eae36e7159-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 10 17:59:35 crc kubenswrapper[4799]: I1010 17:59:35.391811 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-4bkxk"] Oct 10 17:59:35 crc kubenswrapper[4799]: I1010 17:59:35.419070 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-4bkxk"] Oct 10 17:59:37 crc kubenswrapper[4799]: I1010 17:59:37.416363 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="942b352e-7117-44f8-9e42-47eae36e7159" path="/var/lib/kubelet/pods/942b352e-7117-44f8-9e42-47eae36e7159/volumes" Oct 10 18:00:00 crc kubenswrapper[4799]: I1010 18:00:00.152038 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29335320-vfrdw"] Oct 10 18:00:00 crc kubenswrapper[4799]: E1010 18:00:00.152742 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="942b352e-7117-44f8-9e42-47eae36e7159" containerName="extract-utilities" Oct 10 18:00:00 crc kubenswrapper[4799]: I1010 18:00:00.152774 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="942b352e-7117-44f8-9e42-47eae36e7159" containerName="extract-utilities" Oct 10 18:00:00 crc kubenswrapper[4799]: E1010 18:00:00.152806 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="942b352e-7117-44f8-9e42-47eae36e7159" containerName="registry-server" Oct 10 18:00:00 crc kubenswrapper[4799]: I1010 18:00:00.152814 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="942b352e-7117-44f8-9e42-47eae36e7159" containerName="registry-server" Oct 10 18:00:00 crc kubenswrapper[4799]: E1010 18:00:00.152825 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="942b352e-7117-44f8-9e42-47eae36e7159" containerName="extract-content" Oct 10 18:00:00 crc kubenswrapper[4799]: I1010 18:00:00.152831 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="942b352e-7117-44f8-9e42-47eae36e7159" containerName="extract-content" Oct 10 18:00:00 crc kubenswrapper[4799]: I1010 18:00:00.153010 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="942b352e-7117-44f8-9e42-47eae36e7159" containerName="registry-server" Oct 10 18:00:00 crc kubenswrapper[4799]: I1010 18:00:00.153584 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29335320-vfrdw" Oct 10 18:00:00 crc kubenswrapper[4799]: I1010 18:00:00.157348 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Oct 10 18:00:00 crc kubenswrapper[4799]: I1010 18:00:00.158057 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Oct 10 18:00:00 crc kubenswrapper[4799]: I1010 18:00:00.169408 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29335320-vfrdw"] Oct 10 18:00:00 crc kubenswrapper[4799]: I1010 18:00:00.252322 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/24939b83-2fd0-4568-9515-10d4d88f116d-config-volume\") pod \"collect-profiles-29335320-vfrdw\" (UID: \"24939b83-2fd0-4568-9515-10d4d88f116d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29335320-vfrdw" Oct 10 18:00:00 crc kubenswrapper[4799]: I1010 18:00:00.252408 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/24939b83-2fd0-4568-9515-10d4d88f116d-secret-volume\") pod \"collect-profiles-29335320-vfrdw\" (UID: \"24939b83-2fd0-4568-9515-10d4d88f116d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29335320-vfrdw" Oct 10 18:00:00 crc kubenswrapper[4799]: I1010 18:00:00.252678 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sh6rh\" (UniqueName: \"kubernetes.io/projected/24939b83-2fd0-4568-9515-10d4d88f116d-kube-api-access-sh6rh\") pod \"collect-profiles-29335320-vfrdw\" (UID: \"24939b83-2fd0-4568-9515-10d4d88f116d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29335320-vfrdw" Oct 10 18:00:00 crc kubenswrapper[4799]: I1010 18:00:00.356693 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/24939b83-2fd0-4568-9515-10d4d88f116d-config-volume\") pod \"collect-profiles-29335320-vfrdw\" (UID: \"24939b83-2fd0-4568-9515-10d4d88f116d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29335320-vfrdw" Oct 10 18:00:00 crc kubenswrapper[4799]: I1010 18:00:00.358628 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/24939b83-2fd0-4568-9515-10d4d88f116d-secret-volume\") pod \"collect-profiles-29335320-vfrdw\" (UID: \"24939b83-2fd0-4568-9515-10d4d88f116d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29335320-vfrdw" Oct 10 18:00:00 crc kubenswrapper[4799]: I1010 18:00:00.357915 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/24939b83-2fd0-4568-9515-10d4d88f116d-config-volume\") pod \"collect-profiles-29335320-vfrdw\" (UID: \"24939b83-2fd0-4568-9515-10d4d88f116d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29335320-vfrdw" Oct 10 18:00:00 crc kubenswrapper[4799]: I1010 18:00:00.358817 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sh6rh\" (UniqueName: \"kubernetes.io/projected/24939b83-2fd0-4568-9515-10d4d88f116d-kube-api-access-sh6rh\") pod \"collect-profiles-29335320-vfrdw\" (UID: \"24939b83-2fd0-4568-9515-10d4d88f116d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29335320-vfrdw" Oct 10 18:00:00 crc kubenswrapper[4799]: I1010 18:00:00.368594 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/24939b83-2fd0-4568-9515-10d4d88f116d-secret-volume\") pod \"collect-profiles-29335320-vfrdw\" (UID: \"24939b83-2fd0-4568-9515-10d4d88f116d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29335320-vfrdw" Oct 10 18:00:00 crc kubenswrapper[4799]: I1010 18:00:00.386115 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sh6rh\" (UniqueName: \"kubernetes.io/projected/24939b83-2fd0-4568-9515-10d4d88f116d-kube-api-access-sh6rh\") pod \"collect-profiles-29335320-vfrdw\" (UID: \"24939b83-2fd0-4568-9515-10d4d88f116d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29335320-vfrdw" Oct 10 18:00:00 crc kubenswrapper[4799]: I1010 18:00:00.491274 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29335320-vfrdw" Oct 10 18:00:01 crc kubenswrapper[4799]: I1010 18:00:01.019665 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29335320-vfrdw"] Oct 10 18:00:01 crc kubenswrapper[4799]: I1010 18:00:01.272168 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29335320-vfrdw" event={"ID":"24939b83-2fd0-4568-9515-10d4d88f116d","Type":"ContainerStarted","Data":"707419f8a9ace0477f83d66e9fa0e461e9a31db43033bdaec034ae2e7a918417"} Oct 10 18:00:01 crc kubenswrapper[4799]: I1010 18:00:01.272521 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29335320-vfrdw" event={"ID":"24939b83-2fd0-4568-9515-10d4d88f116d","Type":"ContainerStarted","Data":"3175d0288784e3bb05e0c6187a6385ffcbca27710f13bbadd47ecee8e514e214"} Oct 10 18:00:02 crc kubenswrapper[4799]: I1010 18:00:02.285101 4799 generic.go:334] "Generic (PLEG): container finished" podID="24939b83-2fd0-4568-9515-10d4d88f116d" containerID="707419f8a9ace0477f83d66e9fa0e461e9a31db43033bdaec034ae2e7a918417" exitCode=0 Oct 10 18:00:02 crc kubenswrapper[4799]: I1010 18:00:02.285185 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29335320-vfrdw" event={"ID":"24939b83-2fd0-4568-9515-10d4d88f116d","Type":"ContainerDied","Data":"707419f8a9ace0477f83d66e9fa0e461e9a31db43033bdaec034ae2e7a918417"} Oct 10 18:00:03 crc kubenswrapper[4799]: I1010 18:00:03.711294 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29335320-vfrdw" Oct 10 18:00:03 crc kubenswrapper[4799]: I1010 18:00:03.829219 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/24939b83-2fd0-4568-9515-10d4d88f116d-config-volume\") pod \"24939b83-2fd0-4568-9515-10d4d88f116d\" (UID: \"24939b83-2fd0-4568-9515-10d4d88f116d\") " Oct 10 18:00:03 crc kubenswrapper[4799]: I1010 18:00:03.829322 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sh6rh\" (UniqueName: \"kubernetes.io/projected/24939b83-2fd0-4568-9515-10d4d88f116d-kube-api-access-sh6rh\") pod \"24939b83-2fd0-4568-9515-10d4d88f116d\" (UID: \"24939b83-2fd0-4568-9515-10d4d88f116d\") " Oct 10 18:00:03 crc kubenswrapper[4799]: I1010 18:00:03.829456 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/24939b83-2fd0-4568-9515-10d4d88f116d-secret-volume\") pod \"24939b83-2fd0-4568-9515-10d4d88f116d\" (UID: \"24939b83-2fd0-4568-9515-10d4d88f116d\") " Oct 10 18:00:03 crc kubenswrapper[4799]: I1010 18:00:03.832371 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/24939b83-2fd0-4568-9515-10d4d88f116d-config-volume" (OuterVolumeSpecName: "config-volume") pod "24939b83-2fd0-4568-9515-10d4d88f116d" (UID: "24939b83-2fd0-4568-9515-10d4d88f116d"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 18:00:03 crc kubenswrapper[4799]: I1010 18:00:03.839052 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/24939b83-2fd0-4568-9515-10d4d88f116d-kube-api-access-sh6rh" (OuterVolumeSpecName: "kube-api-access-sh6rh") pod "24939b83-2fd0-4568-9515-10d4d88f116d" (UID: "24939b83-2fd0-4568-9515-10d4d88f116d"). InnerVolumeSpecName "kube-api-access-sh6rh". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 18:00:03 crc kubenswrapper[4799]: I1010 18:00:03.854125 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/24939b83-2fd0-4568-9515-10d4d88f116d-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "24939b83-2fd0-4568-9515-10d4d88f116d" (UID: "24939b83-2fd0-4568-9515-10d4d88f116d"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 18:00:03 crc kubenswrapper[4799]: I1010 18:00:03.931495 4799 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/24939b83-2fd0-4568-9515-10d4d88f116d-config-volume\") on node \"crc\" DevicePath \"\"" Oct 10 18:00:03 crc kubenswrapper[4799]: I1010 18:00:03.931528 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sh6rh\" (UniqueName: \"kubernetes.io/projected/24939b83-2fd0-4568-9515-10d4d88f116d-kube-api-access-sh6rh\") on node \"crc\" DevicePath \"\"" Oct 10 18:00:03 crc kubenswrapper[4799]: I1010 18:00:03.931538 4799 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/24939b83-2fd0-4568-9515-10d4d88f116d-secret-volume\") on node \"crc\" DevicePath \"\"" Oct 10 18:00:04 crc kubenswrapper[4799]: I1010 18:00:04.307157 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29335320-vfrdw" event={"ID":"24939b83-2fd0-4568-9515-10d4d88f116d","Type":"ContainerDied","Data":"3175d0288784e3bb05e0c6187a6385ffcbca27710f13bbadd47ecee8e514e214"} Oct 10 18:00:04 crc kubenswrapper[4799]: I1010 18:00:04.307474 4799 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3175d0288784e3bb05e0c6187a6385ffcbca27710f13bbadd47ecee8e514e214" Oct 10 18:00:04 crc kubenswrapper[4799]: I1010 18:00:04.307211 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29335320-vfrdw" Oct 10 18:00:04 crc kubenswrapper[4799]: I1010 18:00:04.402421 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29335275-rm27g"] Oct 10 18:00:04 crc kubenswrapper[4799]: I1010 18:00:04.413894 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29335275-rm27g"] Oct 10 18:00:05 crc kubenswrapper[4799]: I1010 18:00:05.426669 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a14b809d-d163-4d78-9062-534b9025aa9b" path="/var/lib/kubelet/pods/a14b809d-d163-4d78-9062-534b9025aa9b/volumes" Oct 10 18:00:07 crc kubenswrapper[4799]: I1010 18:00:07.292289 4799 scope.go:117] "RemoveContainer" containerID="daa4de57417be4d272cf2abbb626de4933c43b4ced1fcd17caffb6f8bdc941a6" Oct 10 18:00:33 crc kubenswrapper[4799]: I1010 18:00:33.174997 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-db-create-fvlgw"] Oct 10 18:00:33 crc kubenswrapper[4799]: E1010 18:00:33.175929 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="24939b83-2fd0-4568-9515-10d4d88f116d" containerName="collect-profiles" Oct 10 18:00:33 crc kubenswrapper[4799]: I1010 18:00:33.175944 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="24939b83-2fd0-4568-9515-10d4d88f116d" containerName="collect-profiles" Oct 10 18:00:33 crc kubenswrapper[4799]: I1010 18:00:33.176134 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="24939b83-2fd0-4568-9515-10d4d88f116d" containerName="collect-profiles" Oct 10 18:00:33 crc kubenswrapper[4799]: I1010 18:00:33.176784 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-fvlgw" Oct 10 18:00:33 crc kubenswrapper[4799]: I1010 18:00:33.190968 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-create-fvlgw"] Oct 10 18:00:33 crc kubenswrapper[4799]: I1010 18:00:33.325886 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-clcd2\" (UniqueName: \"kubernetes.io/projected/33875892-5c9e-4315-98bd-cc799f670b18-kube-api-access-clcd2\") pod \"barbican-db-create-fvlgw\" (UID: \"33875892-5c9e-4315-98bd-cc799f670b18\") " pod="openstack/barbican-db-create-fvlgw" Oct 10 18:00:33 crc kubenswrapper[4799]: I1010 18:00:33.428342 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-clcd2\" (UniqueName: \"kubernetes.io/projected/33875892-5c9e-4315-98bd-cc799f670b18-kube-api-access-clcd2\") pod \"barbican-db-create-fvlgw\" (UID: \"33875892-5c9e-4315-98bd-cc799f670b18\") " pod="openstack/barbican-db-create-fvlgw" Oct 10 18:00:33 crc kubenswrapper[4799]: I1010 18:00:33.454803 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-clcd2\" (UniqueName: \"kubernetes.io/projected/33875892-5c9e-4315-98bd-cc799f670b18-kube-api-access-clcd2\") pod \"barbican-db-create-fvlgw\" (UID: \"33875892-5c9e-4315-98bd-cc799f670b18\") " pod="openstack/barbican-db-create-fvlgw" Oct 10 18:00:33 crc kubenswrapper[4799]: I1010 18:00:33.544866 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-fvlgw" Oct 10 18:00:33 crc kubenswrapper[4799]: I1010 18:00:33.988745 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-create-fvlgw"] Oct 10 18:00:34 crc kubenswrapper[4799]: I1010 18:00:34.622907 4799 generic.go:334] "Generic (PLEG): container finished" podID="33875892-5c9e-4315-98bd-cc799f670b18" containerID="df2a19a9c85cf31c0420e54e60b29aa4ff7853062bd390bbcee8763c5b47d818" exitCode=0 Oct 10 18:00:34 crc kubenswrapper[4799]: I1010 18:00:34.622968 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-fvlgw" event={"ID":"33875892-5c9e-4315-98bd-cc799f670b18","Type":"ContainerDied","Data":"df2a19a9c85cf31c0420e54e60b29aa4ff7853062bd390bbcee8763c5b47d818"} Oct 10 18:00:34 crc kubenswrapper[4799]: I1010 18:00:34.623010 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-fvlgw" event={"ID":"33875892-5c9e-4315-98bd-cc799f670b18","Type":"ContainerStarted","Data":"7c0d8f2c59e9c2c4ec8cb3f22f2ce2743ef86c797a6406e9b00e6582a693e2a5"} Oct 10 18:00:36 crc kubenswrapper[4799]: I1010 18:00:36.104501 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-fvlgw" Oct 10 18:00:36 crc kubenswrapper[4799]: I1010 18:00:36.184269 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-clcd2\" (UniqueName: \"kubernetes.io/projected/33875892-5c9e-4315-98bd-cc799f670b18-kube-api-access-clcd2\") pod \"33875892-5c9e-4315-98bd-cc799f670b18\" (UID: \"33875892-5c9e-4315-98bd-cc799f670b18\") " Oct 10 18:00:36 crc kubenswrapper[4799]: I1010 18:00:36.192007 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/33875892-5c9e-4315-98bd-cc799f670b18-kube-api-access-clcd2" (OuterVolumeSpecName: "kube-api-access-clcd2") pod "33875892-5c9e-4315-98bd-cc799f670b18" (UID: "33875892-5c9e-4315-98bd-cc799f670b18"). InnerVolumeSpecName "kube-api-access-clcd2". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 18:00:36 crc kubenswrapper[4799]: I1010 18:00:36.285905 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-clcd2\" (UniqueName: \"kubernetes.io/projected/33875892-5c9e-4315-98bd-cc799f670b18-kube-api-access-clcd2\") on node \"crc\" DevicePath \"\"" Oct 10 18:00:36 crc kubenswrapper[4799]: I1010 18:00:36.643474 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-fvlgw" event={"ID":"33875892-5c9e-4315-98bd-cc799f670b18","Type":"ContainerDied","Data":"7c0d8f2c59e9c2c4ec8cb3f22f2ce2743ef86c797a6406e9b00e6582a693e2a5"} Oct 10 18:00:36 crc kubenswrapper[4799]: I1010 18:00:36.643536 4799 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7c0d8f2c59e9c2c4ec8cb3f22f2ce2743ef86c797a6406e9b00e6582a693e2a5" Oct 10 18:00:36 crc kubenswrapper[4799]: I1010 18:00:36.643666 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-fvlgw" Oct 10 18:00:43 crc kubenswrapper[4799]: I1010 18:00:43.317578 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-f77e-account-create-4lxhb"] Oct 10 18:00:43 crc kubenswrapper[4799]: E1010 18:00:43.318878 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="33875892-5c9e-4315-98bd-cc799f670b18" containerName="mariadb-database-create" Oct 10 18:00:43 crc kubenswrapper[4799]: I1010 18:00:43.318915 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="33875892-5c9e-4315-98bd-cc799f670b18" containerName="mariadb-database-create" Oct 10 18:00:43 crc kubenswrapper[4799]: I1010 18:00:43.319272 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="33875892-5c9e-4315-98bd-cc799f670b18" containerName="mariadb-database-create" Oct 10 18:00:43 crc kubenswrapper[4799]: I1010 18:00:43.320561 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-f77e-account-create-4lxhb" Oct 10 18:00:43 crc kubenswrapper[4799]: I1010 18:00:43.324312 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-db-secret" Oct 10 18:00:43 crc kubenswrapper[4799]: I1010 18:00:43.335233 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-f77e-account-create-4lxhb"] Oct 10 18:00:43 crc kubenswrapper[4799]: I1010 18:00:43.420189 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mg2dl\" (UniqueName: \"kubernetes.io/projected/e8a40d86-64a7-4f0f-91bc-82a5c67754c0-kube-api-access-mg2dl\") pod \"barbican-f77e-account-create-4lxhb\" (UID: \"e8a40d86-64a7-4f0f-91bc-82a5c67754c0\") " pod="openstack/barbican-f77e-account-create-4lxhb" Oct 10 18:00:43 crc kubenswrapper[4799]: I1010 18:00:43.521456 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mg2dl\" (UniqueName: \"kubernetes.io/projected/e8a40d86-64a7-4f0f-91bc-82a5c67754c0-kube-api-access-mg2dl\") pod \"barbican-f77e-account-create-4lxhb\" (UID: \"e8a40d86-64a7-4f0f-91bc-82a5c67754c0\") " pod="openstack/barbican-f77e-account-create-4lxhb" Oct 10 18:00:43 crc kubenswrapper[4799]: I1010 18:00:43.555344 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mg2dl\" (UniqueName: \"kubernetes.io/projected/e8a40d86-64a7-4f0f-91bc-82a5c67754c0-kube-api-access-mg2dl\") pod \"barbican-f77e-account-create-4lxhb\" (UID: \"e8a40d86-64a7-4f0f-91bc-82a5c67754c0\") " pod="openstack/barbican-f77e-account-create-4lxhb" Oct 10 18:00:43 crc kubenswrapper[4799]: I1010 18:00:43.658000 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-f77e-account-create-4lxhb" Oct 10 18:00:44 crc kubenswrapper[4799]: I1010 18:00:44.181110 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-f77e-account-create-4lxhb"] Oct 10 18:00:44 crc kubenswrapper[4799]: I1010 18:00:44.745669 4799 generic.go:334] "Generic (PLEG): container finished" podID="e8a40d86-64a7-4f0f-91bc-82a5c67754c0" containerID="670226d31cdc3ffda0f0631a1acdd07742a6dd714a8d0468414e3727487ec1ce" exitCode=0 Oct 10 18:00:44 crc kubenswrapper[4799]: I1010 18:00:44.745737 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-f77e-account-create-4lxhb" event={"ID":"e8a40d86-64a7-4f0f-91bc-82a5c67754c0","Type":"ContainerDied","Data":"670226d31cdc3ffda0f0631a1acdd07742a6dd714a8d0468414e3727487ec1ce"} Oct 10 18:00:44 crc kubenswrapper[4799]: I1010 18:00:44.745863 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-f77e-account-create-4lxhb" event={"ID":"e8a40d86-64a7-4f0f-91bc-82a5c67754c0","Type":"ContainerStarted","Data":"eb91282ff5551d897cd13dbf2e01687bad5816a4540d9323335eff470a520835"} Oct 10 18:00:46 crc kubenswrapper[4799]: I1010 18:00:46.161253 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-f77e-account-create-4lxhb" Oct 10 18:00:46 crc kubenswrapper[4799]: I1010 18:00:46.173146 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mg2dl\" (UniqueName: \"kubernetes.io/projected/e8a40d86-64a7-4f0f-91bc-82a5c67754c0-kube-api-access-mg2dl\") pod \"e8a40d86-64a7-4f0f-91bc-82a5c67754c0\" (UID: \"e8a40d86-64a7-4f0f-91bc-82a5c67754c0\") " Oct 10 18:00:46 crc kubenswrapper[4799]: I1010 18:00:46.183351 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e8a40d86-64a7-4f0f-91bc-82a5c67754c0-kube-api-access-mg2dl" (OuterVolumeSpecName: "kube-api-access-mg2dl") pod "e8a40d86-64a7-4f0f-91bc-82a5c67754c0" (UID: "e8a40d86-64a7-4f0f-91bc-82a5c67754c0"). InnerVolumeSpecName "kube-api-access-mg2dl". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 18:00:46 crc kubenswrapper[4799]: I1010 18:00:46.275247 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mg2dl\" (UniqueName: \"kubernetes.io/projected/e8a40d86-64a7-4f0f-91bc-82a5c67754c0-kube-api-access-mg2dl\") on node \"crc\" DevicePath \"\"" Oct 10 18:00:46 crc kubenswrapper[4799]: I1010 18:00:46.771737 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-f77e-account-create-4lxhb" event={"ID":"e8a40d86-64a7-4f0f-91bc-82a5c67754c0","Type":"ContainerDied","Data":"eb91282ff5551d897cd13dbf2e01687bad5816a4540d9323335eff470a520835"} Oct 10 18:00:46 crc kubenswrapper[4799]: I1010 18:00:46.771812 4799 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="eb91282ff5551d897cd13dbf2e01687bad5816a4540d9323335eff470a520835" Oct 10 18:00:46 crc kubenswrapper[4799]: I1010 18:00:46.771971 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-f77e-account-create-4lxhb" Oct 10 18:00:48 crc kubenswrapper[4799]: I1010 18:00:48.534263 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-db-sync-9259b"] Oct 10 18:00:48 crc kubenswrapper[4799]: E1010 18:00:48.534963 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e8a40d86-64a7-4f0f-91bc-82a5c67754c0" containerName="mariadb-account-create" Oct 10 18:00:48 crc kubenswrapper[4799]: I1010 18:00:48.534981 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="e8a40d86-64a7-4f0f-91bc-82a5c67754c0" containerName="mariadb-account-create" Oct 10 18:00:48 crc kubenswrapper[4799]: I1010 18:00:48.535216 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="e8a40d86-64a7-4f0f-91bc-82a5c67754c0" containerName="mariadb-account-create" Oct 10 18:00:48 crc kubenswrapper[4799]: I1010 18:00:48.537011 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-9259b" Oct 10 18:00:48 crc kubenswrapper[4799]: I1010 18:00:48.540085 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-barbican-dockercfg-rtkcl" Oct 10 18:00:48 crc kubenswrapper[4799]: I1010 18:00:48.541990 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-config-data" Oct 10 18:00:48 crc kubenswrapper[4799]: I1010 18:00:48.549054 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-sync-9259b"] Oct 10 18:00:48 crc kubenswrapper[4799]: I1010 18:00:48.720532 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/09cb6e81-46ef-4b47-a9a6-33dacfd5f400-db-sync-config-data\") pod \"barbican-db-sync-9259b\" (UID: \"09cb6e81-46ef-4b47-a9a6-33dacfd5f400\") " pod="openstack/barbican-db-sync-9259b" Oct 10 18:00:48 crc kubenswrapper[4799]: I1010 18:00:48.720602 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/09cb6e81-46ef-4b47-a9a6-33dacfd5f400-combined-ca-bundle\") pod \"barbican-db-sync-9259b\" (UID: \"09cb6e81-46ef-4b47-a9a6-33dacfd5f400\") " pod="openstack/barbican-db-sync-9259b" Oct 10 18:00:48 crc kubenswrapper[4799]: I1010 18:00:48.721002 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4kpxf\" (UniqueName: \"kubernetes.io/projected/09cb6e81-46ef-4b47-a9a6-33dacfd5f400-kube-api-access-4kpxf\") pod \"barbican-db-sync-9259b\" (UID: \"09cb6e81-46ef-4b47-a9a6-33dacfd5f400\") " pod="openstack/barbican-db-sync-9259b" Oct 10 18:00:48 crc kubenswrapper[4799]: I1010 18:00:48.823154 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/09cb6e81-46ef-4b47-a9a6-33dacfd5f400-combined-ca-bundle\") pod \"barbican-db-sync-9259b\" (UID: \"09cb6e81-46ef-4b47-a9a6-33dacfd5f400\") " pod="openstack/barbican-db-sync-9259b" Oct 10 18:00:48 crc kubenswrapper[4799]: I1010 18:00:48.823408 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4kpxf\" (UniqueName: \"kubernetes.io/projected/09cb6e81-46ef-4b47-a9a6-33dacfd5f400-kube-api-access-4kpxf\") pod \"barbican-db-sync-9259b\" (UID: \"09cb6e81-46ef-4b47-a9a6-33dacfd5f400\") " pod="openstack/barbican-db-sync-9259b" Oct 10 18:00:48 crc kubenswrapper[4799]: I1010 18:00:48.823659 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/09cb6e81-46ef-4b47-a9a6-33dacfd5f400-db-sync-config-data\") pod \"barbican-db-sync-9259b\" (UID: \"09cb6e81-46ef-4b47-a9a6-33dacfd5f400\") " pod="openstack/barbican-db-sync-9259b" Oct 10 18:00:48 crc kubenswrapper[4799]: I1010 18:00:48.831260 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/09cb6e81-46ef-4b47-a9a6-33dacfd5f400-combined-ca-bundle\") pod \"barbican-db-sync-9259b\" (UID: \"09cb6e81-46ef-4b47-a9a6-33dacfd5f400\") " pod="openstack/barbican-db-sync-9259b" Oct 10 18:00:48 crc kubenswrapper[4799]: I1010 18:00:48.843743 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/09cb6e81-46ef-4b47-a9a6-33dacfd5f400-db-sync-config-data\") pod \"barbican-db-sync-9259b\" (UID: \"09cb6e81-46ef-4b47-a9a6-33dacfd5f400\") " pod="openstack/barbican-db-sync-9259b" Oct 10 18:00:48 crc kubenswrapper[4799]: I1010 18:00:48.861390 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4kpxf\" (UniqueName: \"kubernetes.io/projected/09cb6e81-46ef-4b47-a9a6-33dacfd5f400-kube-api-access-4kpxf\") pod \"barbican-db-sync-9259b\" (UID: \"09cb6e81-46ef-4b47-a9a6-33dacfd5f400\") " pod="openstack/barbican-db-sync-9259b" Oct 10 18:00:48 crc kubenswrapper[4799]: I1010 18:00:48.866412 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-9259b" Oct 10 18:00:49 crc kubenswrapper[4799]: I1010 18:00:49.151565 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-sync-9259b"] Oct 10 18:00:49 crc kubenswrapper[4799]: W1010 18:00:49.157932 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod09cb6e81_46ef_4b47_a9a6_33dacfd5f400.slice/crio-cdc1fc0813fe75f5e319e949c0669bc53788958dd2afd5648eeb2582ccb7b917 WatchSource:0}: Error finding container cdc1fc0813fe75f5e319e949c0669bc53788958dd2afd5648eeb2582ccb7b917: Status 404 returned error can't find the container with id cdc1fc0813fe75f5e319e949c0669bc53788958dd2afd5648eeb2582ccb7b917 Oct 10 18:00:49 crc kubenswrapper[4799]: I1010 18:00:49.803417 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-9259b" event={"ID":"09cb6e81-46ef-4b47-a9a6-33dacfd5f400","Type":"ContainerStarted","Data":"965948bd2fdcb674ea5c9f30d8aa5523ac6ca7b995603cfcf1e0370703ae0769"} Oct 10 18:00:49 crc kubenswrapper[4799]: I1010 18:00:49.803797 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-9259b" event={"ID":"09cb6e81-46ef-4b47-a9a6-33dacfd5f400","Type":"ContainerStarted","Data":"cdc1fc0813fe75f5e319e949c0669bc53788958dd2afd5648eeb2582ccb7b917"} Oct 10 18:00:49 crc kubenswrapper[4799]: I1010 18:00:49.830316 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-db-sync-9259b" podStartSLOduration=1.830286089 podStartE2EDuration="1.830286089s" podCreationTimestamp="2025-10-10 18:00:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-10 18:00:49.821153776 +0000 UTC m=+5343.329477931" watchObservedRunningTime="2025-10-10 18:00:49.830286089 +0000 UTC m=+5343.338610244" Oct 10 18:00:50 crc kubenswrapper[4799]: I1010 18:00:50.816427 4799 generic.go:334] "Generic (PLEG): container finished" podID="09cb6e81-46ef-4b47-a9a6-33dacfd5f400" containerID="965948bd2fdcb674ea5c9f30d8aa5523ac6ca7b995603cfcf1e0370703ae0769" exitCode=0 Oct 10 18:00:50 crc kubenswrapper[4799]: I1010 18:00:50.816570 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-9259b" event={"ID":"09cb6e81-46ef-4b47-a9a6-33dacfd5f400","Type":"ContainerDied","Data":"965948bd2fdcb674ea5c9f30d8aa5523ac6ca7b995603cfcf1e0370703ae0769"} Oct 10 18:00:52 crc kubenswrapper[4799]: I1010 18:00:52.219567 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-9259b" Oct 10 18:00:52 crc kubenswrapper[4799]: I1010 18:00:52.288681 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/09cb6e81-46ef-4b47-a9a6-33dacfd5f400-db-sync-config-data\") pod \"09cb6e81-46ef-4b47-a9a6-33dacfd5f400\" (UID: \"09cb6e81-46ef-4b47-a9a6-33dacfd5f400\") " Oct 10 18:00:52 crc kubenswrapper[4799]: I1010 18:00:52.288750 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4kpxf\" (UniqueName: \"kubernetes.io/projected/09cb6e81-46ef-4b47-a9a6-33dacfd5f400-kube-api-access-4kpxf\") pod \"09cb6e81-46ef-4b47-a9a6-33dacfd5f400\" (UID: \"09cb6e81-46ef-4b47-a9a6-33dacfd5f400\") " Oct 10 18:00:52 crc kubenswrapper[4799]: I1010 18:00:52.288948 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/09cb6e81-46ef-4b47-a9a6-33dacfd5f400-combined-ca-bundle\") pod \"09cb6e81-46ef-4b47-a9a6-33dacfd5f400\" (UID: \"09cb6e81-46ef-4b47-a9a6-33dacfd5f400\") " Oct 10 18:00:52 crc kubenswrapper[4799]: I1010 18:00:52.295736 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09cb6e81-46ef-4b47-a9a6-33dacfd5f400-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "09cb6e81-46ef-4b47-a9a6-33dacfd5f400" (UID: "09cb6e81-46ef-4b47-a9a6-33dacfd5f400"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 18:00:52 crc kubenswrapper[4799]: I1010 18:00:52.295903 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09cb6e81-46ef-4b47-a9a6-33dacfd5f400-kube-api-access-4kpxf" (OuterVolumeSpecName: "kube-api-access-4kpxf") pod "09cb6e81-46ef-4b47-a9a6-33dacfd5f400" (UID: "09cb6e81-46ef-4b47-a9a6-33dacfd5f400"). InnerVolumeSpecName "kube-api-access-4kpxf". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 18:00:52 crc kubenswrapper[4799]: I1010 18:00:52.315660 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09cb6e81-46ef-4b47-a9a6-33dacfd5f400-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "09cb6e81-46ef-4b47-a9a6-33dacfd5f400" (UID: "09cb6e81-46ef-4b47-a9a6-33dacfd5f400"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 18:00:52 crc kubenswrapper[4799]: I1010 18:00:52.390940 4799 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/09cb6e81-46ef-4b47-a9a6-33dacfd5f400-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Oct 10 18:00:52 crc kubenswrapper[4799]: I1010 18:00:52.390987 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4kpxf\" (UniqueName: \"kubernetes.io/projected/09cb6e81-46ef-4b47-a9a6-33dacfd5f400-kube-api-access-4kpxf\") on node \"crc\" DevicePath \"\"" Oct 10 18:00:52 crc kubenswrapper[4799]: I1010 18:00:52.391006 4799 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/09cb6e81-46ef-4b47-a9a6-33dacfd5f400-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 10 18:00:52 crc kubenswrapper[4799]: I1010 18:00:52.839909 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-9259b" event={"ID":"09cb6e81-46ef-4b47-a9a6-33dacfd5f400","Type":"ContainerDied","Data":"cdc1fc0813fe75f5e319e949c0669bc53788958dd2afd5648eeb2582ccb7b917"} Oct 10 18:00:52 crc kubenswrapper[4799]: I1010 18:00:52.840335 4799 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="cdc1fc0813fe75f5e319e949c0669bc53788958dd2afd5648eeb2582ccb7b917" Oct 10 18:00:52 crc kubenswrapper[4799]: I1010 18:00:52.840048 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-9259b" Oct 10 18:00:53 crc kubenswrapper[4799]: I1010 18:00:53.078177 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-worker-7bb467888f-4bvnt"] Oct 10 18:00:53 crc kubenswrapper[4799]: E1010 18:00:53.078729 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="09cb6e81-46ef-4b47-a9a6-33dacfd5f400" containerName="barbican-db-sync" Oct 10 18:00:53 crc kubenswrapper[4799]: I1010 18:00:53.078783 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="09cb6e81-46ef-4b47-a9a6-33dacfd5f400" containerName="barbican-db-sync" Oct 10 18:00:53 crc kubenswrapper[4799]: I1010 18:00:53.079074 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="09cb6e81-46ef-4b47-a9a6-33dacfd5f400" containerName="barbican-db-sync" Oct 10 18:00:53 crc kubenswrapper[4799]: I1010 18:00:53.081806 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-7bb467888f-4bvnt" Oct 10 18:00:53 crc kubenswrapper[4799]: I1010 18:00:53.085548 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-config-data" Oct 10 18:00:53 crc kubenswrapper[4799]: I1010 18:00:53.085941 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-worker-config-data" Oct 10 18:00:53 crc kubenswrapper[4799]: I1010 18:00:53.086143 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-barbican-dockercfg-rtkcl" Oct 10 18:00:53 crc kubenswrapper[4799]: I1010 18:00:53.092635 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-worker-7bb467888f-4bvnt"] Oct 10 18:00:53 crc kubenswrapper[4799]: I1010 18:00:53.106599 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f46ef205-6289-4443-893c-ea3e3c7728a9-config-data-custom\") pod \"barbican-worker-7bb467888f-4bvnt\" (UID: \"f46ef205-6289-4443-893c-ea3e3c7728a9\") " pod="openstack/barbican-worker-7bb467888f-4bvnt" Oct 10 18:00:53 crc kubenswrapper[4799]: I1010 18:00:53.106654 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fbjkb\" (UniqueName: \"kubernetes.io/projected/f46ef205-6289-4443-893c-ea3e3c7728a9-kube-api-access-fbjkb\") pod \"barbican-worker-7bb467888f-4bvnt\" (UID: \"f46ef205-6289-4443-893c-ea3e3c7728a9\") " pod="openstack/barbican-worker-7bb467888f-4bvnt" Oct 10 18:00:53 crc kubenswrapper[4799]: I1010 18:00:53.106681 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f46ef205-6289-4443-893c-ea3e3c7728a9-logs\") pod \"barbican-worker-7bb467888f-4bvnt\" (UID: \"f46ef205-6289-4443-893c-ea3e3c7728a9\") " pod="openstack/barbican-worker-7bb467888f-4bvnt" Oct 10 18:00:53 crc kubenswrapper[4799]: I1010 18:00:53.106789 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f46ef205-6289-4443-893c-ea3e3c7728a9-combined-ca-bundle\") pod \"barbican-worker-7bb467888f-4bvnt\" (UID: \"f46ef205-6289-4443-893c-ea3e3c7728a9\") " pod="openstack/barbican-worker-7bb467888f-4bvnt" Oct 10 18:00:53 crc kubenswrapper[4799]: I1010 18:00:53.106831 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f46ef205-6289-4443-893c-ea3e3c7728a9-config-data\") pod \"barbican-worker-7bb467888f-4bvnt\" (UID: \"f46ef205-6289-4443-893c-ea3e3c7728a9\") " pod="openstack/barbican-worker-7bb467888f-4bvnt" Oct 10 18:00:53 crc kubenswrapper[4799]: I1010 18:00:53.117123 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-keystone-listener-58d7bfb486-ptqg5"] Oct 10 18:00:53 crc kubenswrapper[4799]: I1010 18:00:53.118693 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-58d7bfb486-ptqg5" Oct 10 18:00:53 crc kubenswrapper[4799]: I1010 18:00:53.122903 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-keystone-listener-config-data" Oct 10 18:00:53 crc kubenswrapper[4799]: I1010 18:00:53.145258 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-keystone-listener-58d7bfb486-ptqg5"] Oct 10 18:00:53 crc kubenswrapper[4799]: I1010 18:00:53.171015 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-669997949f-vlg8j"] Oct 10 18:00:53 crc kubenswrapper[4799]: I1010 18:00:53.172271 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-669997949f-vlg8j" Oct 10 18:00:53 crc kubenswrapper[4799]: I1010 18:00:53.188740 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-669997949f-vlg8j"] Oct 10 18:00:53 crc kubenswrapper[4799]: I1010 18:00:53.208598 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f46ef205-6289-4443-893c-ea3e3c7728a9-combined-ca-bundle\") pod \"barbican-worker-7bb467888f-4bvnt\" (UID: \"f46ef205-6289-4443-893c-ea3e3c7728a9\") " pod="openstack/barbican-worker-7bb467888f-4bvnt" Oct 10 18:00:53 crc kubenswrapper[4799]: I1010 18:00:53.208662 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f46ef205-6289-4443-893c-ea3e3c7728a9-config-data\") pod \"barbican-worker-7bb467888f-4bvnt\" (UID: \"f46ef205-6289-4443-893c-ea3e3c7728a9\") " pod="openstack/barbican-worker-7bb467888f-4bvnt" Oct 10 18:00:53 crc kubenswrapper[4799]: I1010 18:00:53.208719 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f46ef205-6289-4443-893c-ea3e3c7728a9-config-data-custom\") pod \"barbican-worker-7bb467888f-4bvnt\" (UID: \"f46ef205-6289-4443-893c-ea3e3c7728a9\") " pod="openstack/barbican-worker-7bb467888f-4bvnt" Oct 10 18:00:53 crc kubenswrapper[4799]: I1010 18:00:53.208746 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fbjkb\" (UniqueName: \"kubernetes.io/projected/f46ef205-6289-4443-893c-ea3e3c7728a9-kube-api-access-fbjkb\") pod \"barbican-worker-7bb467888f-4bvnt\" (UID: \"f46ef205-6289-4443-893c-ea3e3c7728a9\") " pod="openstack/barbican-worker-7bb467888f-4bvnt" Oct 10 18:00:53 crc kubenswrapper[4799]: I1010 18:00:53.208785 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f46ef205-6289-4443-893c-ea3e3c7728a9-logs\") pod \"barbican-worker-7bb467888f-4bvnt\" (UID: \"f46ef205-6289-4443-893c-ea3e3c7728a9\") " pod="openstack/barbican-worker-7bb467888f-4bvnt" Oct 10 18:00:53 crc kubenswrapper[4799]: I1010 18:00:53.209207 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f46ef205-6289-4443-893c-ea3e3c7728a9-logs\") pod \"barbican-worker-7bb467888f-4bvnt\" (UID: \"f46ef205-6289-4443-893c-ea3e3c7728a9\") " pod="openstack/barbican-worker-7bb467888f-4bvnt" Oct 10 18:00:53 crc kubenswrapper[4799]: I1010 18:00:53.217396 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f46ef205-6289-4443-893c-ea3e3c7728a9-config-data\") pod \"barbican-worker-7bb467888f-4bvnt\" (UID: \"f46ef205-6289-4443-893c-ea3e3c7728a9\") " pod="openstack/barbican-worker-7bb467888f-4bvnt" Oct 10 18:00:53 crc kubenswrapper[4799]: I1010 18:00:53.229727 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f46ef205-6289-4443-893c-ea3e3c7728a9-config-data-custom\") pod \"barbican-worker-7bb467888f-4bvnt\" (UID: \"f46ef205-6289-4443-893c-ea3e3c7728a9\") " pod="openstack/barbican-worker-7bb467888f-4bvnt" Oct 10 18:00:53 crc kubenswrapper[4799]: I1010 18:00:53.231386 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fbjkb\" (UniqueName: \"kubernetes.io/projected/f46ef205-6289-4443-893c-ea3e3c7728a9-kube-api-access-fbjkb\") pod \"barbican-worker-7bb467888f-4bvnt\" (UID: \"f46ef205-6289-4443-893c-ea3e3c7728a9\") " pod="openstack/barbican-worker-7bb467888f-4bvnt" Oct 10 18:00:53 crc kubenswrapper[4799]: I1010 18:00:53.234696 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f46ef205-6289-4443-893c-ea3e3c7728a9-combined-ca-bundle\") pod \"barbican-worker-7bb467888f-4bvnt\" (UID: \"f46ef205-6289-4443-893c-ea3e3c7728a9\") " pod="openstack/barbican-worker-7bb467888f-4bvnt" Oct 10 18:00:53 crc kubenswrapper[4799]: I1010 18:00:53.281181 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-api-596f5c78cd-pnssg"] Oct 10 18:00:53 crc kubenswrapper[4799]: I1010 18:00:53.287177 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-596f5c78cd-pnssg" Oct 10 18:00:53 crc kubenswrapper[4799]: I1010 18:00:53.292645 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-api-config-data" Oct 10 18:00:53 crc kubenswrapper[4799]: I1010 18:00:53.299088 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-596f5c78cd-pnssg"] Oct 10 18:00:53 crc kubenswrapper[4799]: I1010 18:00:53.312669 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/203afbc9-83b5-49dc-a989-d12e6185fa4c-config-data-custom\") pod \"barbican-api-596f5c78cd-pnssg\" (UID: \"203afbc9-83b5-49dc-a989-d12e6185fa4c\") " pod="openstack/barbican-api-596f5c78cd-pnssg" Oct 10 18:00:53 crc kubenswrapper[4799]: I1010 18:00:53.312746 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/041e991f-3439-4111-99db-5cec8f163e97-ovsdbserver-sb\") pod \"dnsmasq-dns-669997949f-vlg8j\" (UID: \"041e991f-3439-4111-99db-5cec8f163e97\") " pod="openstack/dnsmasq-dns-669997949f-vlg8j" Oct 10 18:00:53 crc kubenswrapper[4799]: I1010 18:00:53.312815 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qwd9k\" (UniqueName: \"kubernetes.io/projected/ede7ea54-dc54-4eba-8a41-9c8fcb73f481-kube-api-access-qwd9k\") pod \"barbican-keystone-listener-58d7bfb486-ptqg5\" (UID: \"ede7ea54-dc54-4eba-8a41-9c8fcb73f481\") " pod="openstack/barbican-keystone-listener-58d7bfb486-ptqg5" Oct 10 18:00:53 crc kubenswrapper[4799]: I1010 18:00:53.312854 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mmm9x\" (UniqueName: \"kubernetes.io/projected/203afbc9-83b5-49dc-a989-d12e6185fa4c-kube-api-access-mmm9x\") pod \"barbican-api-596f5c78cd-pnssg\" (UID: \"203afbc9-83b5-49dc-a989-d12e6185fa4c\") " pod="openstack/barbican-api-596f5c78cd-pnssg" Oct 10 18:00:53 crc kubenswrapper[4799]: I1010 18:00:53.312890 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ede7ea54-dc54-4eba-8a41-9c8fcb73f481-config-data-custom\") pod \"barbican-keystone-listener-58d7bfb486-ptqg5\" (UID: \"ede7ea54-dc54-4eba-8a41-9c8fcb73f481\") " pod="openstack/barbican-keystone-listener-58d7bfb486-ptqg5" Oct 10 18:00:53 crc kubenswrapper[4799]: I1010 18:00:53.312907 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/041e991f-3439-4111-99db-5cec8f163e97-ovsdbserver-nb\") pod \"dnsmasq-dns-669997949f-vlg8j\" (UID: \"041e991f-3439-4111-99db-5cec8f163e97\") " pod="openstack/dnsmasq-dns-669997949f-vlg8j" Oct 10 18:00:53 crc kubenswrapper[4799]: I1010 18:00:53.312927 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/041e991f-3439-4111-99db-5cec8f163e97-dns-svc\") pod \"dnsmasq-dns-669997949f-vlg8j\" (UID: \"041e991f-3439-4111-99db-5cec8f163e97\") " pod="openstack/dnsmasq-dns-669997949f-vlg8j" Oct 10 18:00:53 crc kubenswrapper[4799]: I1010 18:00:53.312970 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nrsnx\" (UniqueName: \"kubernetes.io/projected/041e991f-3439-4111-99db-5cec8f163e97-kube-api-access-nrsnx\") pod \"dnsmasq-dns-669997949f-vlg8j\" (UID: \"041e991f-3439-4111-99db-5cec8f163e97\") " pod="openstack/dnsmasq-dns-669997949f-vlg8j" Oct 10 18:00:53 crc kubenswrapper[4799]: I1010 18:00:53.312990 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ede7ea54-dc54-4eba-8a41-9c8fcb73f481-logs\") pod \"barbican-keystone-listener-58d7bfb486-ptqg5\" (UID: \"ede7ea54-dc54-4eba-8a41-9c8fcb73f481\") " pod="openstack/barbican-keystone-listener-58d7bfb486-ptqg5" Oct 10 18:00:53 crc kubenswrapper[4799]: I1010 18:00:53.313031 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ede7ea54-dc54-4eba-8a41-9c8fcb73f481-config-data\") pod \"barbican-keystone-listener-58d7bfb486-ptqg5\" (UID: \"ede7ea54-dc54-4eba-8a41-9c8fcb73f481\") " pod="openstack/barbican-keystone-listener-58d7bfb486-ptqg5" Oct 10 18:00:53 crc kubenswrapper[4799]: I1010 18:00:53.313051 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/041e991f-3439-4111-99db-5cec8f163e97-config\") pod \"dnsmasq-dns-669997949f-vlg8j\" (UID: \"041e991f-3439-4111-99db-5cec8f163e97\") " pod="openstack/dnsmasq-dns-669997949f-vlg8j" Oct 10 18:00:53 crc kubenswrapper[4799]: I1010 18:00:53.313085 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ede7ea54-dc54-4eba-8a41-9c8fcb73f481-combined-ca-bundle\") pod \"barbican-keystone-listener-58d7bfb486-ptqg5\" (UID: \"ede7ea54-dc54-4eba-8a41-9c8fcb73f481\") " pod="openstack/barbican-keystone-listener-58d7bfb486-ptqg5" Oct 10 18:00:53 crc kubenswrapper[4799]: I1010 18:00:53.313125 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/203afbc9-83b5-49dc-a989-d12e6185fa4c-combined-ca-bundle\") pod \"barbican-api-596f5c78cd-pnssg\" (UID: \"203afbc9-83b5-49dc-a989-d12e6185fa4c\") " pod="openstack/barbican-api-596f5c78cd-pnssg" Oct 10 18:00:53 crc kubenswrapper[4799]: I1010 18:00:53.313142 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/203afbc9-83b5-49dc-a989-d12e6185fa4c-logs\") pod \"barbican-api-596f5c78cd-pnssg\" (UID: \"203afbc9-83b5-49dc-a989-d12e6185fa4c\") " pod="openstack/barbican-api-596f5c78cd-pnssg" Oct 10 18:00:53 crc kubenswrapper[4799]: I1010 18:00:53.313160 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/203afbc9-83b5-49dc-a989-d12e6185fa4c-config-data\") pod \"barbican-api-596f5c78cd-pnssg\" (UID: \"203afbc9-83b5-49dc-a989-d12e6185fa4c\") " pod="openstack/barbican-api-596f5c78cd-pnssg" Oct 10 18:00:53 crc kubenswrapper[4799]: I1010 18:00:53.413964 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-7bb467888f-4bvnt" Oct 10 18:00:53 crc kubenswrapper[4799]: I1010 18:00:53.414206 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ede7ea54-dc54-4eba-8a41-9c8fcb73f481-combined-ca-bundle\") pod \"barbican-keystone-listener-58d7bfb486-ptqg5\" (UID: \"ede7ea54-dc54-4eba-8a41-9c8fcb73f481\") " pod="openstack/barbican-keystone-listener-58d7bfb486-ptqg5" Oct 10 18:00:53 crc kubenswrapper[4799]: I1010 18:00:53.414845 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/203afbc9-83b5-49dc-a989-d12e6185fa4c-combined-ca-bundle\") pod \"barbican-api-596f5c78cd-pnssg\" (UID: \"203afbc9-83b5-49dc-a989-d12e6185fa4c\") " pod="openstack/barbican-api-596f5c78cd-pnssg" Oct 10 18:00:53 crc kubenswrapper[4799]: I1010 18:00:53.414953 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/203afbc9-83b5-49dc-a989-d12e6185fa4c-logs\") pod \"barbican-api-596f5c78cd-pnssg\" (UID: \"203afbc9-83b5-49dc-a989-d12e6185fa4c\") " pod="openstack/barbican-api-596f5c78cd-pnssg" Oct 10 18:00:53 crc kubenswrapper[4799]: I1010 18:00:53.415057 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/203afbc9-83b5-49dc-a989-d12e6185fa4c-config-data\") pod \"barbican-api-596f5c78cd-pnssg\" (UID: \"203afbc9-83b5-49dc-a989-d12e6185fa4c\") " pod="openstack/barbican-api-596f5c78cd-pnssg" Oct 10 18:00:53 crc kubenswrapper[4799]: I1010 18:00:53.415150 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/203afbc9-83b5-49dc-a989-d12e6185fa4c-config-data-custom\") pod \"barbican-api-596f5c78cd-pnssg\" (UID: \"203afbc9-83b5-49dc-a989-d12e6185fa4c\") " pod="openstack/barbican-api-596f5c78cd-pnssg" Oct 10 18:00:53 crc kubenswrapper[4799]: I1010 18:00:53.415285 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/041e991f-3439-4111-99db-5cec8f163e97-ovsdbserver-sb\") pod \"dnsmasq-dns-669997949f-vlg8j\" (UID: \"041e991f-3439-4111-99db-5cec8f163e97\") " pod="openstack/dnsmasq-dns-669997949f-vlg8j" Oct 10 18:00:53 crc kubenswrapper[4799]: I1010 18:00:53.415382 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qwd9k\" (UniqueName: \"kubernetes.io/projected/ede7ea54-dc54-4eba-8a41-9c8fcb73f481-kube-api-access-qwd9k\") pod \"barbican-keystone-listener-58d7bfb486-ptqg5\" (UID: \"ede7ea54-dc54-4eba-8a41-9c8fcb73f481\") " pod="openstack/barbican-keystone-listener-58d7bfb486-ptqg5" Oct 10 18:00:53 crc kubenswrapper[4799]: I1010 18:00:53.415514 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mmm9x\" (UniqueName: \"kubernetes.io/projected/203afbc9-83b5-49dc-a989-d12e6185fa4c-kube-api-access-mmm9x\") pod \"barbican-api-596f5c78cd-pnssg\" (UID: \"203afbc9-83b5-49dc-a989-d12e6185fa4c\") " pod="openstack/barbican-api-596f5c78cd-pnssg" Oct 10 18:00:53 crc kubenswrapper[4799]: I1010 18:00:53.415613 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ede7ea54-dc54-4eba-8a41-9c8fcb73f481-config-data-custom\") pod \"barbican-keystone-listener-58d7bfb486-ptqg5\" (UID: \"ede7ea54-dc54-4eba-8a41-9c8fcb73f481\") " pod="openstack/barbican-keystone-listener-58d7bfb486-ptqg5" Oct 10 18:00:53 crc kubenswrapper[4799]: I1010 18:00:53.415703 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/041e991f-3439-4111-99db-5cec8f163e97-ovsdbserver-nb\") pod \"dnsmasq-dns-669997949f-vlg8j\" (UID: \"041e991f-3439-4111-99db-5cec8f163e97\") " pod="openstack/dnsmasq-dns-669997949f-vlg8j" Oct 10 18:00:53 crc kubenswrapper[4799]: I1010 18:00:53.415834 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/041e991f-3439-4111-99db-5cec8f163e97-dns-svc\") pod \"dnsmasq-dns-669997949f-vlg8j\" (UID: \"041e991f-3439-4111-99db-5cec8f163e97\") " pod="openstack/dnsmasq-dns-669997949f-vlg8j" Oct 10 18:00:53 crc kubenswrapper[4799]: I1010 18:00:53.415957 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nrsnx\" (UniqueName: \"kubernetes.io/projected/041e991f-3439-4111-99db-5cec8f163e97-kube-api-access-nrsnx\") pod \"dnsmasq-dns-669997949f-vlg8j\" (UID: \"041e991f-3439-4111-99db-5cec8f163e97\") " pod="openstack/dnsmasq-dns-669997949f-vlg8j" Oct 10 18:00:53 crc kubenswrapper[4799]: I1010 18:00:53.416049 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ede7ea54-dc54-4eba-8a41-9c8fcb73f481-logs\") pod \"barbican-keystone-listener-58d7bfb486-ptqg5\" (UID: \"ede7ea54-dc54-4eba-8a41-9c8fcb73f481\") " pod="openstack/barbican-keystone-listener-58d7bfb486-ptqg5" Oct 10 18:00:53 crc kubenswrapper[4799]: I1010 18:00:53.416154 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ede7ea54-dc54-4eba-8a41-9c8fcb73f481-config-data\") pod \"barbican-keystone-listener-58d7bfb486-ptqg5\" (UID: \"ede7ea54-dc54-4eba-8a41-9c8fcb73f481\") " pod="openstack/barbican-keystone-listener-58d7bfb486-ptqg5" Oct 10 18:00:53 crc kubenswrapper[4799]: I1010 18:00:53.416247 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/041e991f-3439-4111-99db-5cec8f163e97-config\") pod \"dnsmasq-dns-669997949f-vlg8j\" (UID: \"041e991f-3439-4111-99db-5cec8f163e97\") " pod="openstack/dnsmasq-dns-669997949f-vlg8j" Oct 10 18:00:53 crc kubenswrapper[4799]: I1010 18:00:53.417213 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ede7ea54-dc54-4eba-8a41-9c8fcb73f481-combined-ca-bundle\") pod \"barbican-keystone-listener-58d7bfb486-ptqg5\" (UID: \"ede7ea54-dc54-4eba-8a41-9c8fcb73f481\") " pod="openstack/barbican-keystone-listener-58d7bfb486-ptqg5" Oct 10 18:00:53 crc kubenswrapper[4799]: I1010 18:00:53.417349 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/041e991f-3439-4111-99db-5cec8f163e97-config\") pod \"dnsmasq-dns-669997949f-vlg8j\" (UID: \"041e991f-3439-4111-99db-5cec8f163e97\") " pod="openstack/dnsmasq-dns-669997949f-vlg8j" Oct 10 18:00:53 crc kubenswrapper[4799]: I1010 18:00:53.418563 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/041e991f-3439-4111-99db-5cec8f163e97-dns-svc\") pod \"dnsmasq-dns-669997949f-vlg8j\" (UID: \"041e991f-3439-4111-99db-5cec8f163e97\") " pod="openstack/dnsmasq-dns-669997949f-vlg8j" Oct 10 18:00:53 crc kubenswrapper[4799]: I1010 18:00:53.418733 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/041e991f-3439-4111-99db-5cec8f163e97-ovsdbserver-sb\") pod \"dnsmasq-dns-669997949f-vlg8j\" (UID: \"041e991f-3439-4111-99db-5cec8f163e97\") " pod="openstack/dnsmasq-dns-669997949f-vlg8j" Oct 10 18:00:53 crc kubenswrapper[4799]: I1010 18:00:53.419539 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/041e991f-3439-4111-99db-5cec8f163e97-ovsdbserver-nb\") pod \"dnsmasq-dns-669997949f-vlg8j\" (UID: \"041e991f-3439-4111-99db-5cec8f163e97\") " pod="openstack/dnsmasq-dns-669997949f-vlg8j" Oct 10 18:00:53 crc kubenswrapper[4799]: I1010 18:00:53.419851 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ede7ea54-dc54-4eba-8a41-9c8fcb73f481-logs\") pod \"barbican-keystone-listener-58d7bfb486-ptqg5\" (UID: \"ede7ea54-dc54-4eba-8a41-9c8fcb73f481\") " pod="openstack/barbican-keystone-listener-58d7bfb486-ptqg5" Oct 10 18:00:53 crc kubenswrapper[4799]: I1010 18:00:53.420559 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/203afbc9-83b5-49dc-a989-d12e6185fa4c-logs\") pod \"barbican-api-596f5c78cd-pnssg\" (UID: \"203afbc9-83b5-49dc-a989-d12e6185fa4c\") " pod="openstack/barbican-api-596f5c78cd-pnssg" Oct 10 18:00:53 crc kubenswrapper[4799]: I1010 18:00:53.421152 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/203afbc9-83b5-49dc-a989-d12e6185fa4c-combined-ca-bundle\") pod \"barbican-api-596f5c78cd-pnssg\" (UID: \"203afbc9-83b5-49dc-a989-d12e6185fa4c\") " pod="openstack/barbican-api-596f5c78cd-pnssg" Oct 10 18:00:53 crc kubenswrapper[4799]: I1010 18:00:53.423674 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/203afbc9-83b5-49dc-a989-d12e6185fa4c-config-data\") pod \"barbican-api-596f5c78cd-pnssg\" (UID: \"203afbc9-83b5-49dc-a989-d12e6185fa4c\") " pod="openstack/barbican-api-596f5c78cd-pnssg" Oct 10 18:00:53 crc kubenswrapper[4799]: I1010 18:00:53.428741 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ede7ea54-dc54-4eba-8a41-9c8fcb73f481-config-data-custom\") pod \"barbican-keystone-listener-58d7bfb486-ptqg5\" (UID: \"ede7ea54-dc54-4eba-8a41-9c8fcb73f481\") " pod="openstack/barbican-keystone-listener-58d7bfb486-ptqg5" Oct 10 18:00:53 crc kubenswrapper[4799]: I1010 18:00:53.431397 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/203afbc9-83b5-49dc-a989-d12e6185fa4c-config-data-custom\") pod \"barbican-api-596f5c78cd-pnssg\" (UID: \"203afbc9-83b5-49dc-a989-d12e6185fa4c\") " pod="openstack/barbican-api-596f5c78cd-pnssg" Oct 10 18:00:53 crc kubenswrapper[4799]: I1010 18:00:53.435403 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ede7ea54-dc54-4eba-8a41-9c8fcb73f481-config-data\") pod \"barbican-keystone-listener-58d7bfb486-ptqg5\" (UID: \"ede7ea54-dc54-4eba-8a41-9c8fcb73f481\") " pod="openstack/barbican-keystone-listener-58d7bfb486-ptqg5" Oct 10 18:00:53 crc kubenswrapper[4799]: I1010 18:00:53.439317 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mmm9x\" (UniqueName: \"kubernetes.io/projected/203afbc9-83b5-49dc-a989-d12e6185fa4c-kube-api-access-mmm9x\") pod \"barbican-api-596f5c78cd-pnssg\" (UID: \"203afbc9-83b5-49dc-a989-d12e6185fa4c\") " pod="openstack/barbican-api-596f5c78cd-pnssg" Oct 10 18:00:53 crc kubenswrapper[4799]: I1010 18:00:53.441892 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nrsnx\" (UniqueName: \"kubernetes.io/projected/041e991f-3439-4111-99db-5cec8f163e97-kube-api-access-nrsnx\") pod \"dnsmasq-dns-669997949f-vlg8j\" (UID: \"041e991f-3439-4111-99db-5cec8f163e97\") " pod="openstack/dnsmasq-dns-669997949f-vlg8j" Oct 10 18:00:53 crc kubenswrapper[4799]: I1010 18:00:53.444661 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qwd9k\" (UniqueName: \"kubernetes.io/projected/ede7ea54-dc54-4eba-8a41-9c8fcb73f481-kube-api-access-qwd9k\") pod \"barbican-keystone-listener-58d7bfb486-ptqg5\" (UID: \"ede7ea54-dc54-4eba-8a41-9c8fcb73f481\") " pod="openstack/barbican-keystone-listener-58d7bfb486-ptqg5" Oct 10 18:00:53 crc kubenswrapper[4799]: I1010 18:00:53.495528 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-669997949f-vlg8j" Oct 10 18:00:53 crc kubenswrapper[4799]: I1010 18:00:53.614157 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-596f5c78cd-pnssg" Oct 10 18:00:53 crc kubenswrapper[4799]: I1010 18:00:53.739328 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-58d7bfb486-ptqg5" Oct 10 18:00:53 crc kubenswrapper[4799]: I1010 18:00:53.874120 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-worker-7bb467888f-4bvnt"] Oct 10 18:00:54 crc kubenswrapper[4799]: I1010 18:00:54.021827 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-669997949f-vlg8j"] Oct 10 18:00:54 crc kubenswrapper[4799]: W1010 18:00:54.025789 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod041e991f_3439_4111_99db_5cec8f163e97.slice/crio-6e445e149258491c3ea817b2a1e8d4d12c711631219d9c1fd778ed1bf302fd39 WatchSource:0}: Error finding container 6e445e149258491c3ea817b2a1e8d4d12c711631219d9c1fd778ed1bf302fd39: Status 404 returned error can't find the container with id 6e445e149258491c3ea817b2a1e8d4d12c711631219d9c1fd778ed1bf302fd39 Oct 10 18:00:54 crc kubenswrapper[4799]: I1010 18:00:54.113459 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-596f5c78cd-pnssg"] Oct 10 18:00:54 crc kubenswrapper[4799]: W1010 18:00:54.119814 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod203afbc9_83b5_49dc_a989_d12e6185fa4c.slice/crio-d18a4f24418d5f9ccc9c29fc0094a9549ba950cfe4d79ff86397b5959ac48829 WatchSource:0}: Error finding container d18a4f24418d5f9ccc9c29fc0094a9549ba950cfe4d79ff86397b5959ac48829: Status 404 returned error can't find the container with id d18a4f24418d5f9ccc9c29fc0094a9549ba950cfe4d79ff86397b5959ac48829 Oct 10 18:00:54 crc kubenswrapper[4799]: I1010 18:00:54.186235 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-keystone-listener-58d7bfb486-ptqg5"] Oct 10 18:00:54 crc kubenswrapper[4799]: W1010 18:00:54.205660 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podede7ea54_dc54_4eba_8a41_9c8fcb73f481.slice/crio-368d8a12f3bef467b3ba18bef6a6b80d7dd2ad19a837bdd4d45c4f219fe44ed6 WatchSource:0}: Error finding container 368d8a12f3bef467b3ba18bef6a6b80d7dd2ad19a837bdd4d45c4f219fe44ed6: Status 404 returned error can't find the container with id 368d8a12f3bef467b3ba18bef6a6b80d7dd2ad19a837bdd4d45c4f219fe44ed6 Oct 10 18:00:54 crc kubenswrapper[4799]: I1010 18:00:54.876117 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-58d7bfb486-ptqg5" event={"ID":"ede7ea54-dc54-4eba-8a41-9c8fcb73f481","Type":"ContainerStarted","Data":"c2f51426a05c15f52d289448e54b6394fadad8af7078d398fc0a2d459bdb05e7"} Oct 10 18:00:54 crc kubenswrapper[4799]: I1010 18:00:54.876187 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-58d7bfb486-ptqg5" event={"ID":"ede7ea54-dc54-4eba-8a41-9c8fcb73f481","Type":"ContainerStarted","Data":"14256de7defdaaf00e62632a30b1ecb992ca85396c86d7cdd39e547490d04cd6"} Oct 10 18:00:54 crc kubenswrapper[4799]: I1010 18:00:54.876201 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-58d7bfb486-ptqg5" event={"ID":"ede7ea54-dc54-4eba-8a41-9c8fcb73f481","Type":"ContainerStarted","Data":"368d8a12f3bef467b3ba18bef6a6b80d7dd2ad19a837bdd4d45c4f219fe44ed6"} Oct 10 18:00:54 crc kubenswrapper[4799]: I1010 18:00:54.879018 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-596f5c78cd-pnssg" event={"ID":"203afbc9-83b5-49dc-a989-d12e6185fa4c","Type":"ContainerStarted","Data":"acbf0d4b5bac4a09c5a0f95592058a3316a6b6592046b39f2c9fa1012a3de343"} Oct 10 18:00:54 crc kubenswrapper[4799]: I1010 18:00:54.879059 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-596f5c78cd-pnssg" event={"ID":"203afbc9-83b5-49dc-a989-d12e6185fa4c","Type":"ContainerStarted","Data":"f674f95bec42e24f6e4c91672529d26710fea821a8ab2ae1e6211f494fd1476f"} Oct 10 18:00:54 crc kubenswrapper[4799]: I1010 18:00:54.879074 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-596f5c78cd-pnssg" event={"ID":"203afbc9-83b5-49dc-a989-d12e6185fa4c","Type":"ContainerStarted","Data":"d18a4f24418d5f9ccc9c29fc0094a9549ba950cfe4d79ff86397b5959ac48829"} Oct 10 18:00:54 crc kubenswrapper[4799]: I1010 18:00:54.879136 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-596f5c78cd-pnssg" Oct 10 18:00:54 crc kubenswrapper[4799]: I1010 18:00:54.879164 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-596f5c78cd-pnssg" Oct 10 18:00:54 crc kubenswrapper[4799]: I1010 18:00:54.880319 4799 generic.go:334] "Generic (PLEG): container finished" podID="041e991f-3439-4111-99db-5cec8f163e97" containerID="3b3a6d3d5cb4a51f81495e6423502db3502e3dcf02cb6af9b677fbe91fcdde02" exitCode=0 Oct 10 18:00:54 crc kubenswrapper[4799]: I1010 18:00:54.880371 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-669997949f-vlg8j" event={"ID":"041e991f-3439-4111-99db-5cec8f163e97","Type":"ContainerDied","Data":"3b3a6d3d5cb4a51f81495e6423502db3502e3dcf02cb6af9b677fbe91fcdde02"} Oct 10 18:00:54 crc kubenswrapper[4799]: I1010 18:00:54.880394 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-669997949f-vlg8j" event={"ID":"041e991f-3439-4111-99db-5cec8f163e97","Type":"ContainerStarted","Data":"6e445e149258491c3ea817b2a1e8d4d12c711631219d9c1fd778ed1bf302fd39"} Oct 10 18:00:54 crc kubenswrapper[4799]: I1010 18:00:54.887596 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-7bb467888f-4bvnt" event={"ID":"f46ef205-6289-4443-893c-ea3e3c7728a9","Type":"ContainerStarted","Data":"8ef6acab4858cc40a96957babf2e363075221aac4d3f7b3bdffae178c3fc9ade"} Oct 10 18:00:54 crc kubenswrapper[4799]: I1010 18:00:54.887647 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-7bb467888f-4bvnt" event={"ID":"f46ef205-6289-4443-893c-ea3e3c7728a9","Type":"ContainerStarted","Data":"df0aa29b1efd3292b4b11d025230fc723eccbd8054378c9cc8aba4b8fc5f0ea1"} Oct 10 18:00:54 crc kubenswrapper[4799]: I1010 18:00:54.887661 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-7bb467888f-4bvnt" event={"ID":"f46ef205-6289-4443-893c-ea3e3c7728a9","Type":"ContainerStarted","Data":"133dba16e90301dbae05bd89c47c24d664058f98f996cf31f78e04082b1d6ee7"} Oct 10 18:00:54 crc kubenswrapper[4799]: I1010 18:00:54.899093 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-keystone-listener-58d7bfb486-ptqg5" podStartSLOduration=1.899066594 podStartE2EDuration="1.899066594s" podCreationTimestamp="2025-10-10 18:00:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-10 18:00:54.890073935 +0000 UTC m=+5348.398398050" watchObservedRunningTime="2025-10-10 18:00:54.899066594 +0000 UTC m=+5348.407390709" Oct 10 18:00:54 crc kubenswrapper[4799]: I1010 18:00:54.916671 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-worker-7bb467888f-4bvnt" podStartSLOduration=1.916646163 podStartE2EDuration="1.916646163s" podCreationTimestamp="2025-10-10 18:00:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-10 18:00:54.915589137 +0000 UTC m=+5348.423913262" watchObservedRunningTime="2025-10-10 18:00:54.916646163 +0000 UTC m=+5348.424970278" Oct 10 18:00:54 crc kubenswrapper[4799]: I1010 18:00:54.968263 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-api-596f5c78cd-pnssg" podStartSLOduration=1.9682358899999999 podStartE2EDuration="1.96823589s" podCreationTimestamp="2025-10-10 18:00:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-10 18:00:54.956518005 +0000 UTC m=+5348.464842120" watchObservedRunningTime="2025-10-10 18:00:54.96823589 +0000 UTC m=+5348.476560005" Oct 10 18:00:55 crc kubenswrapper[4799]: I1010 18:00:55.906895 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-669997949f-vlg8j" event={"ID":"041e991f-3439-4111-99db-5cec8f163e97","Type":"ContainerStarted","Data":"c33afc9fd6624a6dae355503387b9c4d1b22621561a0bbb735dde7e57dab7fe9"} Oct 10 18:00:55 crc kubenswrapper[4799]: I1010 18:00:55.933047 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-669997949f-vlg8j" podStartSLOduration=2.933028015 podStartE2EDuration="2.933028015s" podCreationTimestamp="2025-10-10 18:00:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-10 18:00:55.928254649 +0000 UTC m=+5349.436578784" watchObservedRunningTime="2025-10-10 18:00:55.933028015 +0000 UTC m=+5349.441352130" Oct 10 18:00:56 crc kubenswrapper[4799]: I1010 18:00:56.916868 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-669997949f-vlg8j" Oct 10 18:01:00 crc kubenswrapper[4799]: I1010 18:01:00.150262 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-cron-29335321-x447z"] Oct 10 18:01:00 crc kubenswrapper[4799]: I1010 18:01:00.151636 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29335321-x447z" Oct 10 18:01:00 crc kubenswrapper[4799]: I1010 18:01:00.179023 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-cron-29335321-x447z"] Oct 10 18:01:00 crc kubenswrapper[4799]: I1010 18:01:00.183854 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jjgt7\" (UniqueName: \"kubernetes.io/projected/386b8891-6eca-4986-b808-0e7ac3ec3339-kube-api-access-jjgt7\") pod \"keystone-cron-29335321-x447z\" (UID: \"386b8891-6eca-4986-b808-0e7ac3ec3339\") " pod="openstack/keystone-cron-29335321-x447z" Oct 10 18:01:00 crc kubenswrapper[4799]: I1010 18:01:00.183962 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/386b8891-6eca-4986-b808-0e7ac3ec3339-combined-ca-bundle\") pod \"keystone-cron-29335321-x447z\" (UID: \"386b8891-6eca-4986-b808-0e7ac3ec3339\") " pod="openstack/keystone-cron-29335321-x447z" Oct 10 18:01:00 crc kubenswrapper[4799]: I1010 18:01:00.184063 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/386b8891-6eca-4986-b808-0e7ac3ec3339-fernet-keys\") pod \"keystone-cron-29335321-x447z\" (UID: \"386b8891-6eca-4986-b808-0e7ac3ec3339\") " pod="openstack/keystone-cron-29335321-x447z" Oct 10 18:01:00 crc kubenswrapper[4799]: I1010 18:01:00.184429 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/386b8891-6eca-4986-b808-0e7ac3ec3339-config-data\") pod \"keystone-cron-29335321-x447z\" (UID: \"386b8891-6eca-4986-b808-0e7ac3ec3339\") " pod="openstack/keystone-cron-29335321-x447z" Oct 10 18:01:00 crc kubenswrapper[4799]: I1010 18:01:00.285463 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/386b8891-6eca-4986-b808-0e7ac3ec3339-config-data\") pod \"keystone-cron-29335321-x447z\" (UID: \"386b8891-6eca-4986-b808-0e7ac3ec3339\") " pod="openstack/keystone-cron-29335321-x447z" Oct 10 18:01:00 crc kubenswrapper[4799]: I1010 18:01:00.285541 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jjgt7\" (UniqueName: \"kubernetes.io/projected/386b8891-6eca-4986-b808-0e7ac3ec3339-kube-api-access-jjgt7\") pod \"keystone-cron-29335321-x447z\" (UID: \"386b8891-6eca-4986-b808-0e7ac3ec3339\") " pod="openstack/keystone-cron-29335321-x447z" Oct 10 18:01:00 crc kubenswrapper[4799]: I1010 18:01:00.285571 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/386b8891-6eca-4986-b808-0e7ac3ec3339-combined-ca-bundle\") pod \"keystone-cron-29335321-x447z\" (UID: \"386b8891-6eca-4986-b808-0e7ac3ec3339\") " pod="openstack/keystone-cron-29335321-x447z" Oct 10 18:01:00 crc kubenswrapper[4799]: I1010 18:01:00.285623 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/386b8891-6eca-4986-b808-0e7ac3ec3339-fernet-keys\") pod \"keystone-cron-29335321-x447z\" (UID: \"386b8891-6eca-4986-b808-0e7ac3ec3339\") " pod="openstack/keystone-cron-29335321-x447z" Oct 10 18:01:00 crc kubenswrapper[4799]: I1010 18:01:00.295546 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/386b8891-6eca-4986-b808-0e7ac3ec3339-config-data\") pod \"keystone-cron-29335321-x447z\" (UID: \"386b8891-6eca-4986-b808-0e7ac3ec3339\") " pod="openstack/keystone-cron-29335321-x447z" Oct 10 18:01:00 crc kubenswrapper[4799]: I1010 18:01:00.301478 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/386b8891-6eca-4986-b808-0e7ac3ec3339-fernet-keys\") pod \"keystone-cron-29335321-x447z\" (UID: \"386b8891-6eca-4986-b808-0e7ac3ec3339\") " pod="openstack/keystone-cron-29335321-x447z" Oct 10 18:01:00 crc kubenswrapper[4799]: I1010 18:01:00.307181 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jjgt7\" (UniqueName: \"kubernetes.io/projected/386b8891-6eca-4986-b808-0e7ac3ec3339-kube-api-access-jjgt7\") pod \"keystone-cron-29335321-x447z\" (UID: \"386b8891-6eca-4986-b808-0e7ac3ec3339\") " pod="openstack/keystone-cron-29335321-x447z" Oct 10 18:01:00 crc kubenswrapper[4799]: I1010 18:01:00.315810 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/386b8891-6eca-4986-b808-0e7ac3ec3339-combined-ca-bundle\") pod \"keystone-cron-29335321-x447z\" (UID: \"386b8891-6eca-4986-b808-0e7ac3ec3339\") " pod="openstack/keystone-cron-29335321-x447z" Oct 10 18:01:00 crc kubenswrapper[4799]: I1010 18:01:00.472172 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29335321-x447z" Oct 10 18:01:00 crc kubenswrapper[4799]: I1010 18:01:00.945634 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-cron-29335321-x447z"] Oct 10 18:01:01 crc kubenswrapper[4799]: I1010 18:01:01.969008 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29335321-x447z" event={"ID":"386b8891-6eca-4986-b808-0e7ac3ec3339","Type":"ContainerStarted","Data":"9fb4ca9d2e227cf92d9d6018d62c605fe64161b69ba62c38a9f24a17524793bd"} Oct 10 18:01:01 crc kubenswrapper[4799]: I1010 18:01:01.969376 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29335321-x447z" event={"ID":"386b8891-6eca-4986-b808-0e7ac3ec3339","Type":"ContainerStarted","Data":"1c2feb5416705fcab6c29ea608d6c7b70ffd811ac8783c652204133cba536131"} Oct 10 18:01:02 crc kubenswrapper[4799]: I1010 18:01:02.000386 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-cron-29335321-x447z" podStartSLOduration=2.000356548 podStartE2EDuration="2.000356548s" podCreationTimestamp="2025-10-10 18:01:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-10 18:01:01.988421007 +0000 UTC m=+5355.496745132" watchObservedRunningTime="2025-10-10 18:01:02.000356548 +0000 UTC m=+5355.508680703" Oct 10 18:01:02 crc kubenswrapper[4799]: I1010 18:01:02.981056 4799 generic.go:334] "Generic (PLEG): container finished" podID="386b8891-6eca-4986-b808-0e7ac3ec3339" containerID="9fb4ca9d2e227cf92d9d6018d62c605fe64161b69ba62c38a9f24a17524793bd" exitCode=0 Oct 10 18:01:02 crc kubenswrapper[4799]: I1010 18:01:02.981115 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29335321-x447z" event={"ID":"386b8891-6eca-4986-b808-0e7ac3ec3339","Type":"ContainerDied","Data":"9fb4ca9d2e227cf92d9d6018d62c605fe64161b69ba62c38a9f24a17524793bd"} Oct 10 18:01:03 crc kubenswrapper[4799]: I1010 18:01:03.496994 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-669997949f-vlg8j" Oct 10 18:01:03 crc kubenswrapper[4799]: I1010 18:01:03.588864 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6b7d9cf495-24s9n"] Oct 10 18:01:03 crc kubenswrapper[4799]: I1010 18:01:03.589448 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-6b7d9cf495-24s9n" podUID="81c07d06-a6c1-43c5-ada3-2e734679d614" containerName="dnsmasq-dns" containerID="cri-o://24180e9f457afe208f0537ac29319dfe498db54c3fa76609183cd46286653bb7" gracePeriod=10 Oct 10 18:01:03 crc kubenswrapper[4799]: I1010 18:01:03.998453 4799 generic.go:334] "Generic (PLEG): container finished" podID="81c07d06-a6c1-43c5-ada3-2e734679d614" containerID="24180e9f457afe208f0537ac29319dfe498db54c3fa76609183cd46286653bb7" exitCode=0 Oct 10 18:01:03 crc kubenswrapper[4799]: I1010 18:01:03.998632 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6b7d9cf495-24s9n" event={"ID":"81c07d06-a6c1-43c5-ada3-2e734679d614","Type":"ContainerDied","Data":"24180e9f457afe208f0537ac29319dfe498db54c3fa76609183cd46286653bb7"} Oct 10 18:01:04 crc kubenswrapper[4799]: I1010 18:01:04.103121 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6b7d9cf495-24s9n" Oct 10 18:01:04 crc kubenswrapper[4799]: I1010 18:01:04.276545 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/81c07d06-a6c1-43c5-ada3-2e734679d614-ovsdbserver-sb\") pod \"81c07d06-a6c1-43c5-ada3-2e734679d614\" (UID: \"81c07d06-a6c1-43c5-ada3-2e734679d614\") " Oct 10 18:01:04 crc kubenswrapper[4799]: I1010 18:01:04.276618 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/81c07d06-a6c1-43c5-ada3-2e734679d614-dns-svc\") pod \"81c07d06-a6c1-43c5-ada3-2e734679d614\" (UID: \"81c07d06-a6c1-43c5-ada3-2e734679d614\") " Oct 10 18:01:04 crc kubenswrapper[4799]: I1010 18:01:04.276684 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/81c07d06-a6c1-43c5-ada3-2e734679d614-ovsdbserver-nb\") pod \"81c07d06-a6c1-43c5-ada3-2e734679d614\" (UID: \"81c07d06-a6c1-43c5-ada3-2e734679d614\") " Oct 10 18:01:04 crc kubenswrapper[4799]: I1010 18:01:04.276768 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/81c07d06-a6c1-43c5-ada3-2e734679d614-config\") pod \"81c07d06-a6c1-43c5-ada3-2e734679d614\" (UID: \"81c07d06-a6c1-43c5-ada3-2e734679d614\") " Oct 10 18:01:04 crc kubenswrapper[4799]: I1010 18:01:04.276797 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tsnf2\" (UniqueName: \"kubernetes.io/projected/81c07d06-a6c1-43c5-ada3-2e734679d614-kube-api-access-tsnf2\") pod \"81c07d06-a6c1-43c5-ada3-2e734679d614\" (UID: \"81c07d06-a6c1-43c5-ada3-2e734679d614\") " Oct 10 18:01:04 crc kubenswrapper[4799]: I1010 18:01:04.281683 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/81c07d06-a6c1-43c5-ada3-2e734679d614-kube-api-access-tsnf2" (OuterVolumeSpecName: "kube-api-access-tsnf2") pod "81c07d06-a6c1-43c5-ada3-2e734679d614" (UID: "81c07d06-a6c1-43c5-ada3-2e734679d614"). InnerVolumeSpecName "kube-api-access-tsnf2". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 18:01:04 crc kubenswrapper[4799]: I1010 18:01:04.316244 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/81c07d06-a6c1-43c5-ada3-2e734679d614-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "81c07d06-a6c1-43c5-ada3-2e734679d614" (UID: "81c07d06-a6c1-43c5-ada3-2e734679d614"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 18:01:04 crc kubenswrapper[4799]: I1010 18:01:04.318142 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/81c07d06-a6c1-43c5-ada3-2e734679d614-config" (OuterVolumeSpecName: "config") pod "81c07d06-a6c1-43c5-ada3-2e734679d614" (UID: "81c07d06-a6c1-43c5-ada3-2e734679d614"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 18:01:04 crc kubenswrapper[4799]: I1010 18:01:04.322864 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/81c07d06-a6c1-43c5-ada3-2e734679d614-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "81c07d06-a6c1-43c5-ada3-2e734679d614" (UID: "81c07d06-a6c1-43c5-ada3-2e734679d614"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 18:01:04 crc kubenswrapper[4799]: I1010 18:01:04.323384 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/81c07d06-a6c1-43c5-ada3-2e734679d614-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "81c07d06-a6c1-43c5-ada3-2e734679d614" (UID: "81c07d06-a6c1-43c5-ada3-2e734679d614"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 18:01:04 crc kubenswrapper[4799]: I1010 18:01:04.353634 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29335321-x447z" Oct 10 18:01:04 crc kubenswrapper[4799]: I1010 18:01:04.379864 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tsnf2\" (UniqueName: \"kubernetes.io/projected/81c07d06-a6c1-43c5-ada3-2e734679d614-kube-api-access-tsnf2\") on node \"crc\" DevicePath \"\"" Oct 10 18:01:04 crc kubenswrapper[4799]: I1010 18:01:04.379896 4799 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/81c07d06-a6c1-43c5-ada3-2e734679d614-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 10 18:01:04 crc kubenswrapper[4799]: I1010 18:01:04.379910 4799 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/81c07d06-a6c1-43c5-ada3-2e734679d614-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 10 18:01:04 crc kubenswrapper[4799]: I1010 18:01:04.379923 4799 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/81c07d06-a6c1-43c5-ada3-2e734679d614-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 10 18:01:04 crc kubenswrapper[4799]: I1010 18:01:04.379938 4799 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/81c07d06-a6c1-43c5-ada3-2e734679d614-config\") on node \"crc\" DevicePath \"\"" Oct 10 18:01:04 crc kubenswrapper[4799]: I1010 18:01:04.480918 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/386b8891-6eca-4986-b808-0e7ac3ec3339-combined-ca-bundle\") pod \"386b8891-6eca-4986-b808-0e7ac3ec3339\" (UID: \"386b8891-6eca-4986-b808-0e7ac3ec3339\") " Oct 10 18:01:04 crc kubenswrapper[4799]: I1010 18:01:04.481533 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jjgt7\" (UniqueName: \"kubernetes.io/projected/386b8891-6eca-4986-b808-0e7ac3ec3339-kube-api-access-jjgt7\") pod \"386b8891-6eca-4986-b808-0e7ac3ec3339\" (UID: \"386b8891-6eca-4986-b808-0e7ac3ec3339\") " Oct 10 18:01:04 crc kubenswrapper[4799]: I1010 18:01:04.482318 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/386b8891-6eca-4986-b808-0e7ac3ec3339-fernet-keys\") pod \"386b8891-6eca-4986-b808-0e7ac3ec3339\" (UID: \"386b8891-6eca-4986-b808-0e7ac3ec3339\") " Oct 10 18:01:04 crc kubenswrapper[4799]: I1010 18:01:04.482367 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/386b8891-6eca-4986-b808-0e7ac3ec3339-config-data\") pod \"386b8891-6eca-4986-b808-0e7ac3ec3339\" (UID: \"386b8891-6eca-4986-b808-0e7ac3ec3339\") " Oct 10 18:01:04 crc kubenswrapper[4799]: I1010 18:01:04.483963 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/386b8891-6eca-4986-b808-0e7ac3ec3339-kube-api-access-jjgt7" (OuterVolumeSpecName: "kube-api-access-jjgt7") pod "386b8891-6eca-4986-b808-0e7ac3ec3339" (UID: "386b8891-6eca-4986-b808-0e7ac3ec3339"). InnerVolumeSpecName "kube-api-access-jjgt7". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 18:01:04 crc kubenswrapper[4799]: I1010 18:01:04.486699 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/386b8891-6eca-4986-b808-0e7ac3ec3339-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "386b8891-6eca-4986-b808-0e7ac3ec3339" (UID: "386b8891-6eca-4986-b808-0e7ac3ec3339"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 18:01:04 crc kubenswrapper[4799]: I1010 18:01:04.500196 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/386b8891-6eca-4986-b808-0e7ac3ec3339-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "386b8891-6eca-4986-b808-0e7ac3ec3339" (UID: "386b8891-6eca-4986-b808-0e7ac3ec3339"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 18:01:04 crc kubenswrapper[4799]: I1010 18:01:04.521811 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/386b8891-6eca-4986-b808-0e7ac3ec3339-config-data" (OuterVolumeSpecName: "config-data") pod "386b8891-6eca-4986-b808-0e7ac3ec3339" (UID: "386b8891-6eca-4986-b808-0e7ac3ec3339"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 18:01:04 crc kubenswrapper[4799]: I1010 18:01:04.585205 4799 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/386b8891-6eca-4986-b808-0e7ac3ec3339-fernet-keys\") on node \"crc\" DevicePath \"\"" Oct 10 18:01:04 crc kubenswrapper[4799]: I1010 18:01:04.585269 4799 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/386b8891-6eca-4986-b808-0e7ac3ec3339-config-data\") on node \"crc\" DevicePath \"\"" Oct 10 18:01:04 crc kubenswrapper[4799]: I1010 18:01:04.585289 4799 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/386b8891-6eca-4986-b808-0e7ac3ec3339-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 10 18:01:04 crc kubenswrapper[4799]: I1010 18:01:04.585312 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jjgt7\" (UniqueName: \"kubernetes.io/projected/386b8891-6eca-4986-b808-0e7ac3ec3339-kube-api-access-jjgt7\") on node \"crc\" DevicePath \"\"" Oct 10 18:01:04 crc kubenswrapper[4799]: I1010 18:01:04.961223 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-596f5c78cd-pnssg" Oct 10 18:01:05 crc kubenswrapper[4799]: I1010 18:01:05.015620 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6b7d9cf495-24s9n" event={"ID":"81c07d06-a6c1-43c5-ada3-2e734679d614","Type":"ContainerDied","Data":"2945e01367df4b23af1cb90d0599cae75cc8f6b8bbe4be128e91bd9f31375e97"} Oct 10 18:01:05 crc kubenswrapper[4799]: I1010 18:01:05.015717 4799 scope.go:117] "RemoveContainer" containerID="24180e9f457afe208f0537ac29319dfe498db54c3fa76609183cd46286653bb7" Oct 10 18:01:05 crc kubenswrapper[4799]: I1010 18:01:05.015745 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6b7d9cf495-24s9n" Oct 10 18:01:05 crc kubenswrapper[4799]: I1010 18:01:05.020592 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-596f5c78cd-pnssg" Oct 10 18:01:05 crc kubenswrapper[4799]: I1010 18:01:05.022889 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29335321-x447z" Oct 10 18:01:05 crc kubenswrapper[4799]: I1010 18:01:05.022879 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29335321-x447z" event={"ID":"386b8891-6eca-4986-b808-0e7ac3ec3339","Type":"ContainerDied","Data":"1c2feb5416705fcab6c29ea608d6c7b70ffd811ac8783c652204133cba536131"} Oct 10 18:01:05 crc kubenswrapper[4799]: I1010 18:01:05.022967 4799 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1c2feb5416705fcab6c29ea608d6c7b70ffd811ac8783c652204133cba536131" Oct 10 18:01:05 crc kubenswrapper[4799]: I1010 18:01:05.095505 4799 scope.go:117] "RemoveContainer" containerID="1db11762b0dbaf9129fcd7afe1282bc73b3b0a2d9349c8cf28b69be8f32ac84d" Oct 10 18:01:05 crc kubenswrapper[4799]: I1010 18:01:05.103817 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6b7d9cf495-24s9n"] Oct 10 18:01:05 crc kubenswrapper[4799]: I1010 18:01:05.110400 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-6b7d9cf495-24s9n"] Oct 10 18:01:05 crc kubenswrapper[4799]: I1010 18:01:05.417729 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="81c07d06-a6c1-43c5-ada3-2e734679d614" path="/var/lib/kubelet/pods/81c07d06-a6c1-43c5-ada3-2e734679d614/volumes" Oct 10 18:01:19 crc kubenswrapper[4799]: I1010 18:01:19.637374 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-db-create-rljmt"] Oct 10 18:01:19 crc kubenswrapper[4799]: E1010 18:01:19.638319 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="386b8891-6eca-4986-b808-0e7ac3ec3339" containerName="keystone-cron" Oct 10 18:01:19 crc kubenswrapper[4799]: I1010 18:01:19.638336 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="386b8891-6eca-4986-b808-0e7ac3ec3339" containerName="keystone-cron" Oct 10 18:01:19 crc kubenswrapper[4799]: E1010 18:01:19.638357 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="81c07d06-a6c1-43c5-ada3-2e734679d614" containerName="init" Oct 10 18:01:19 crc kubenswrapper[4799]: I1010 18:01:19.638364 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="81c07d06-a6c1-43c5-ada3-2e734679d614" containerName="init" Oct 10 18:01:19 crc kubenswrapper[4799]: E1010 18:01:19.638380 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="81c07d06-a6c1-43c5-ada3-2e734679d614" containerName="dnsmasq-dns" Oct 10 18:01:19 crc kubenswrapper[4799]: I1010 18:01:19.638386 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="81c07d06-a6c1-43c5-ada3-2e734679d614" containerName="dnsmasq-dns" Oct 10 18:01:19 crc kubenswrapper[4799]: I1010 18:01:19.638536 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="81c07d06-a6c1-43c5-ada3-2e734679d614" containerName="dnsmasq-dns" Oct 10 18:01:19 crc kubenswrapper[4799]: I1010 18:01:19.638553 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="386b8891-6eca-4986-b808-0e7ac3ec3339" containerName="keystone-cron" Oct 10 18:01:19 crc kubenswrapper[4799]: I1010 18:01:19.639214 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-rljmt" Oct 10 18:01:19 crc kubenswrapper[4799]: I1010 18:01:19.644657 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-create-rljmt"] Oct 10 18:01:19 crc kubenswrapper[4799]: I1010 18:01:19.758457 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tnn54\" (UniqueName: \"kubernetes.io/projected/0bb189b2-d065-4e80-921f-f9fd38382e9f-kube-api-access-tnn54\") pod \"neutron-db-create-rljmt\" (UID: \"0bb189b2-d065-4e80-921f-f9fd38382e9f\") " pod="openstack/neutron-db-create-rljmt" Oct 10 18:01:19 crc kubenswrapper[4799]: I1010 18:01:19.859827 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tnn54\" (UniqueName: \"kubernetes.io/projected/0bb189b2-d065-4e80-921f-f9fd38382e9f-kube-api-access-tnn54\") pod \"neutron-db-create-rljmt\" (UID: \"0bb189b2-d065-4e80-921f-f9fd38382e9f\") " pod="openstack/neutron-db-create-rljmt" Oct 10 18:01:19 crc kubenswrapper[4799]: I1010 18:01:19.882215 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tnn54\" (UniqueName: \"kubernetes.io/projected/0bb189b2-d065-4e80-921f-f9fd38382e9f-kube-api-access-tnn54\") pod \"neutron-db-create-rljmt\" (UID: \"0bb189b2-d065-4e80-921f-f9fd38382e9f\") " pod="openstack/neutron-db-create-rljmt" Oct 10 18:01:20 crc kubenswrapper[4799]: I1010 18:01:20.010207 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-rljmt" Oct 10 18:01:20 crc kubenswrapper[4799]: I1010 18:01:20.444161 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-create-rljmt"] Oct 10 18:01:21 crc kubenswrapper[4799]: I1010 18:01:21.219542 4799 generic.go:334] "Generic (PLEG): container finished" podID="0bb189b2-d065-4e80-921f-f9fd38382e9f" containerID="36d3ee12f53c9548c92cbe67328cdde1224408eb4d01d226e182cb955ade830e" exitCode=0 Oct 10 18:01:21 crc kubenswrapper[4799]: I1010 18:01:21.219636 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-rljmt" event={"ID":"0bb189b2-d065-4e80-921f-f9fd38382e9f","Type":"ContainerDied","Data":"36d3ee12f53c9548c92cbe67328cdde1224408eb4d01d226e182cb955ade830e"} Oct 10 18:01:21 crc kubenswrapper[4799]: I1010 18:01:21.220089 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-rljmt" event={"ID":"0bb189b2-d065-4e80-921f-f9fd38382e9f","Type":"ContainerStarted","Data":"57cf677ebf0407001dfaccecf938c015294b3843023e466965ce518f117c6fad"} Oct 10 18:01:22 crc kubenswrapper[4799]: I1010 18:01:22.607490 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-rljmt" Oct 10 18:01:22 crc kubenswrapper[4799]: I1010 18:01:22.708451 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tnn54\" (UniqueName: \"kubernetes.io/projected/0bb189b2-d065-4e80-921f-f9fd38382e9f-kube-api-access-tnn54\") pod \"0bb189b2-d065-4e80-921f-f9fd38382e9f\" (UID: \"0bb189b2-d065-4e80-921f-f9fd38382e9f\") " Oct 10 18:01:22 crc kubenswrapper[4799]: I1010 18:01:22.714078 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0bb189b2-d065-4e80-921f-f9fd38382e9f-kube-api-access-tnn54" (OuterVolumeSpecName: "kube-api-access-tnn54") pod "0bb189b2-d065-4e80-921f-f9fd38382e9f" (UID: "0bb189b2-d065-4e80-921f-f9fd38382e9f"). InnerVolumeSpecName "kube-api-access-tnn54". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 18:01:22 crc kubenswrapper[4799]: I1010 18:01:22.810302 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tnn54\" (UniqueName: \"kubernetes.io/projected/0bb189b2-d065-4e80-921f-f9fd38382e9f-kube-api-access-tnn54\") on node \"crc\" DevicePath \"\"" Oct 10 18:01:23 crc kubenswrapper[4799]: I1010 18:01:23.248064 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-rljmt" event={"ID":"0bb189b2-d065-4e80-921f-f9fd38382e9f","Type":"ContainerDied","Data":"57cf677ebf0407001dfaccecf938c015294b3843023e466965ce518f117c6fad"} Oct 10 18:01:23 crc kubenswrapper[4799]: I1010 18:01:23.248113 4799 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="57cf677ebf0407001dfaccecf938c015294b3843023e466965ce518f117c6fad" Oct 10 18:01:23 crc kubenswrapper[4799]: I1010 18:01:23.248125 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-rljmt" Oct 10 18:01:29 crc kubenswrapper[4799]: I1010 18:01:29.758084 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-5ea6-account-create-6fwkh"] Oct 10 18:01:29 crc kubenswrapper[4799]: E1010 18:01:29.761040 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0bb189b2-d065-4e80-921f-f9fd38382e9f" containerName="mariadb-database-create" Oct 10 18:01:29 crc kubenswrapper[4799]: I1010 18:01:29.761077 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="0bb189b2-d065-4e80-921f-f9fd38382e9f" containerName="mariadb-database-create" Oct 10 18:01:29 crc kubenswrapper[4799]: I1010 18:01:29.762064 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="0bb189b2-d065-4e80-921f-f9fd38382e9f" containerName="mariadb-database-create" Oct 10 18:01:29 crc kubenswrapper[4799]: I1010 18:01:29.764182 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-5ea6-account-create-6fwkh" Oct 10 18:01:29 crc kubenswrapper[4799]: I1010 18:01:29.767480 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-db-secret" Oct 10 18:01:29 crc kubenswrapper[4799]: I1010 18:01:29.785645 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-5ea6-account-create-6fwkh"] Oct 10 18:01:29 crc kubenswrapper[4799]: I1010 18:01:29.844936 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g5sv7\" (UniqueName: \"kubernetes.io/projected/b0bb367f-695f-493a-8cc2-04c336682c1f-kube-api-access-g5sv7\") pod \"neutron-5ea6-account-create-6fwkh\" (UID: \"b0bb367f-695f-493a-8cc2-04c336682c1f\") " pod="openstack/neutron-5ea6-account-create-6fwkh" Oct 10 18:01:29 crc kubenswrapper[4799]: I1010 18:01:29.947021 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g5sv7\" (UniqueName: \"kubernetes.io/projected/b0bb367f-695f-493a-8cc2-04c336682c1f-kube-api-access-g5sv7\") pod \"neutron-5ea6-account-create-6fwkh\" (UID: \"b0bb367f-695f-493a-8cc2-04c336682c1f\") " pod="openstack/neutron-5ea6-account-create-6fwkh" Oct 10 18:01:29 crc kubenswrapper[4799]: I1010 18:01:29.981067 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g5sv7\" (UniqueName: \"kubernetes.io/projected/b0bb367f-695f-493a-8cc2-04c336682c1f-kube-api-access-g5sv7\") pod \"neutron-5ea6-account-create-6fwkh\" (UID: \"b0bb367f-695f-493a-8cc2-04c336682c1f\") " pod="openstack/neutron-5ea6-account-create-6fwkh" Oct 10 18:01:30 crc kubenswrapper[4799]: I1010 18:01:30.100470 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-5ea6-account-create-6fwkh" Oct 10 18:01:30 crc kubenswrapper[4799]: I1010 18:01:30.522328 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-5ea6-account-create-6fwkh"] Oct 10 18:01:31 crc kubenswrapper[4799]: I1010 18:01:31.327802 4799 generic.go:334] "Generic (PLEG): container finished" podID="b0bb367f-695f-493a-8cc2-04c336682c1f" containerID="9df01443b9762abb687a9d8999266dc191394bd7b698a1a0506582d6606d989f" exitCode=0 Oct 10 18:01:31 crc kubenswrapper[4799]: I1010 18:01:31.327932 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-5ea6-account-create-6fwkh" event={"ID":"b0bb367f-695f-493a-8cc2-04c336682c1f","Type":"ContainerDied","Data":"9df01443b9762abb687a9d8999266dc191394bd7b698a1a0506582d6606d989f"} Oct 10 18:01:31 crc kubenswrapper[4799]: I1010 18:01:31.328201 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-5ea6-account-create-6fwkh" event={"ID":"b0bb367f-695f-493a-8cc2-04c336682c1f","Type":"ContainerStarted","Data":"0d09aa81a56280d8b2e1ca0b09b8b788855fd8ceba699fb36e88a1d5e5c27689"} Oct 10 18:01:32 crc kubenswrapper[4799]: I1010 18:01:32.666621 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-5ea6-account-create-6fwkh" Oct 10 18:01:32 crc kubenswrapper[4799]: I1010 18:01:32.695779 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-g5sv7\" (UniqueName: \"kubernetes.io/projected/b0bb367f-695f-493a-8cc2-04c336682c1f-kube-api-access-g5sv7\") pod \"b0bb367f-695f-493a-8cc2-04c336682c1f\" (UID: \"b0bb367f-695f-493a-8cc2-04c336682c1f\") " Oct 10 18:01:32 crc kubenswrapper[4799]: I1010 18:01:32.701328 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b0bb367f-695f-493a-8cc2-04c336682c1f-kube-api-access-g5sv7" (OuterVolumeSpecName: "kube-api-access-g5sv7") pod "b0bb367f-695f-493a-8cc2-04c336682c1f" (UID: "b0bb367f-695f-493a-8cc2-04c336682c1f"). InnerVolumeSpecName "kube-api-access-g5sv7". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 18:01:32 crc kubenswrapper[4799]: I1010 18:01:32.798288 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-g5sv7\" (UniqueName: \"kubernetes.io/projected/b0bb367f-695f-493a-8cc2-04c336682c1f-kube-api-access-g5sv7\") on node \"crc\" DevicePath \"\"" Oct 10 18:01:33 crc kubenswrapper[4799]: I1010 18:01:33.353374 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-5ea6-account-create-6fwkh" event={"ID":"b0bb367f-695f-493a-8cc2-04c336682c1f","Type":"ContainerDied","Data":"0d09aa81a56280d8b2e1ca0b09b8b788855fd8ceba699fb36e88a1d5e5c27689"} Oct 10 18:01:33 crc kubenswrapper[4799]: I1010 18:01:33.353893 4799 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0d09aa81a56280d8b2e1ca0b09b8b788855fd8ceba699fb36e88a1d5e5c27689" Oct 10 18:01:33 crc kubenswrapper[4799]: I1010 18:01:33.353465 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-5ea6-account-create-6fwkh" Oct 10 18:01:34 crc kubenswrapper[4799]: I1010 18:01:34.983060 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-db-sync-hq6ns"] Oct 10 18:01:34 crc kubenswrapper[4799]: E1010 18:01:34.983680 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b0bb367f-695f-493a-8cc2-04c336682c1f" containerName="mariadb-account-create" Oct 10 18:01:34 crc kubenswrapper[4799]: I1010 18:01:34.983705 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="b0bb367f-695f-493a-8cc2-04c336682c1f" containerName="mariadb-account-create" Oct 10 18:01:34 crc kubenswrapper[4799]: I1010 18:01:34.984132 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="b0bb367f-695f-493a-8cc2-04c336682c1f" containerName="mariadb-account-create" Oct 10 18:01:34 crc kubenswrapper[4799]: I1010 18:01:34.985184 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-hq6ns" Oct 10 18:01:34 crc kubenswrapper[4799]: I1010 18:01:34.988496 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-httpd-config" Oct 10 18:01:34 crc kubenswrapper[4799]: I1010 18:01:34.989519 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-config" Oct 10 18:01:34 crc kubenswrapper[4799]: I1010 18:01:34.989774 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-neutron-dockercfg-l49lq" Oct 10 18:01:35 crc kubenswrapper[4799]: I1010 18:01:35.008916 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-sync-hq6ns"] Oct 10 18:01:35 crc kubenswrapper[4799]: I1010 18:01:35.039463 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a61c870f-1ead-4ccb-b226-7326e146af42-combined-ca-bundle\") pod \"neutron-db-sync-hq6ns\" (UID: \"a61c870f-1ead-4ccb-b226-7326e146af42\") " pod="openstack/neutron-db-sync-hq6ns" Oct 10 18:01:35 crc kubenswrapper[4799]: I1010 18:01:35.039608 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/a61c870f-1ead-4ccb-b226-7326e146af42-config\") pod \"neutron-db-sync-hq6ns\" (UID: \"a61c870f-1ead-4ccb-b226-7326e146af42\") " pod="openstack/neutron-db-sync-hq6ns" Oct 10 18:01:35 crc kubenswrapper[4799]: I1010 18:01:35.039731 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tmrhn\" (UniqueName: \"kubernetes.io/projected/a61c870f-1ead-4ccb-b226-7326e146af42-kube-api-access-tmrhn\") pod \"neutron-db-sync-hq6ns\" (UID: \"a61c870f-1ead-4ccb-b226-7326e146af42\") " pod="openstack/neutron-db-sync-hq6ns" Oct 10 18:01:35 crc kubenswrapper[4799]: I1010 18:01:35.142221 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/a61c870f-1ead-4ccb-b226-7326e146af42-config\") pod \"neutron-db-sync-hq6ns\" (UID: \"a61c870f-1ead-4ccb-b226-7326e146af42\") " pod="openstack/neutron-db-sync-hq6ns" Oct 10 18:01:35 crc kubenswrapper[4799]: I1010 18:01:35.142308 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tmrhn\" (UniqueName: \"kubernetes.io/projected/a61c870f-1ead-4ccb-b226-7326e146af42-kube-api-access-tmrhn\") pod \"neutron-db-sync-hq6ns\" (UID: \"a61c870f-1ead-4ccb-b226-7326e146af42\") " pod="openstack/neutron-db-sync-hq6ns" Oct 10 18:01:35 crc kubenswrapper[4799]: I1010 18:01:35.142462 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a61c870f-1ead-4ccb-b226-7326e146af42-combined-ca-bundle\") pod \"neutron-db-sync-hq6ns\" (UID: \"a61c870f-1ead-4ccb-b226-7326e146af42\") " pod="openstack/neutron-db-sync-hq6ns" Oct 10 18:01:35 crc kubenswrapper[4799]: I1010 18:01:35.150568 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a61c870f-1ead-4ccb-b226-7326e146af42-combined-ca-bundle\") pod \"neutron-db-sync-hq6ns\" (UID: \"a61c870f-1ead-4ccb-b226-7326e146af42\") " pod="openstack/neutron-db-sync-hq6ns" Oct 10 18:01:35 crc kubenswrapper[4799]: I1010 18:01:35.154360 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/a61c870f-1ead-4ccb-b226-7326e146af42-config\") pod \"neutron-db-sync-hq6ns\" (UID: \"a61c870f-1ead-4ccb-b226-7326e146af42\") " pod="openstack/neutron-db-sync-hq6ns" Oct 10 18:01:35 crc kubenswrapper[4799]: I1010 18:01:35.177154 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tmrhn\" (UniqueName: \"kubernetes.io/projected/a61c870f-1ead-4ccb-b226-7326e146af42-kube-api-access-tmrhn\") pod \"neutron-db-sync-hq6ns\" (UID: \"a61c870f-1ead-4ccb-b226-7326e146af42\") " pod="openstack/neutron-db-sync-hq6ns" Oct 10 18:01:35 crc kubenswrapper[4799]: I1010 18:01:35.308403 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-hq6ns" Oct 10 18:01:35 crc kubenswrapper[4799]: I1010 18:01:35.642003 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-sync-hq6ns"] Oct 10 18:01:35 crc kubenswrapper[4799]: W1010 18:01:35.647297 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda61c870f_1ead_4ccb_b226_7326e146af42.slice/crio-ad8356145dfa71ad9ab65ad0f5efaf48419de9617b3dc3f5eff0e0475e565d20 WatchSource:0}: Error finding container ad8356145dfa71ad9ab65ad0f5efaf48419de9617b3dc3f5eff0e0475e565d20: Status 404 returned error can't find the container with id ad8356145dfa71ad9ab65ad0f5efaf48419de9617b3dc3f5eff0e0475e565d20 Oct 10 18:01:36 crc kubenswrapper[4799]: I1010 18:01:36.387020 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-hq6ns" event={"ID":"a61c870f-1ead-4ccb-b226-7326e146af42","Type":"ContainerStarted","Data":"445fba41263834141280af4408a74bb8f56e3cae6baa7003c2d82f0939b59ca0"} Oct 10 18:01:36 crc kubenswrapper[4799]: I1010 18:01:36.387394 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-hq6ns" event={"ID":"a61c870f-1ead-4ccb-b226-7326e146af42","Type":"ContainerStarted","Data":"ad8356145dfa71ad9ab65ad0f5efaf48419de9617b3dc3f5eff0e0475e565d20"} Oct 10 18:01:36 crc kubenswrapper[4799]: I1010 18:01:36.405411 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-db-sync-hq6ns" podStartSLOduration=2.405373535 podStartE2EDuration="2.405373535s" podCreationTimestamp="2025-10-10 18:01:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-10 18:01:36.401014079 +0000 UTC m=+5389.909338254" watchObservedRunningTime="2025-10-10 18:01:36.405373535 +0000 UTC m=+5389.913697680" Oct 10 18:01:40 crc kubenswrapper[4799]: I1010 18:01:40.437291 4799 generic.go:334] "Generic (PLEG): container finished" podID="a61c870f-1ead-4ccb-b226-7326e146af42" containerID="445fba41263834141280af4408a74bb8f56e3cae6baa7003c2d82f0939b59ca0" exitCode=0 Oct 10 18:01:40 crc kubenswrapper[4799]: I1010 18:01:40.437390 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-hq6ns" event={"ID":"a61c870f-1ead-4ccb-b226-7326e146af42","Type":"ContainerDied","Data":"445fba41263834141280af4408a74bb8f56e3cae6baa7003c2d82f0939b59ca0"} Oct 10 18:01:41 crc kubenswrapper[4799]: I1010 18:01:41.809985 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-hq6ns" Oct 10 18:01:41 crc kubenswrapper[4799]: I1010 18:01:41.909590 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a61c870f-1ead-4ccb-b226-7326e146af42-combined-ca-bundle\") pod \"a61c870f-1ead-4ccb-b226-7326e146af42\" (UID: \"a61c870f-1ead-4ccb-b226-7326e146af42\") " Oct 10 18:01:41 crc kubenswrapper[4799]: I1010 18:01:41.910052 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/a61c870f-1ead-4ccb-b226-7326e146af42-config\") pod \"a61c870f-1ead-4ccb-b226-7326e146af42\" (UID: \"a61c870f-1ead-4ccb-b226-7326e146af42\") " Oct 10 18:01:41 crc kubenswrapper[4799]: I1010 18:01:41.910271 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tmrhn\" (UniqueName: \"kubernetes.io/projected/a61c870f-1ead-4ccb-b226-7326e146af42-kube-api-access-tmrhn\") pod \"a61c870f-1ead-4ccb-b226-7326e146af42\" (UID: \"a61c870f-1ead-4ccb-b226-7326e146af42\") " Oct 10 18:01:41 crc kubenswrapper[4799]: I1010 18:01:41.918304 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a61c870f-1ead-4ccb-b226-7326e146af42-kube-api-access-tmrhn" (OuterVolumeSpecName: "kube-api-access-tmrhn") pod "a61c870f-1ead-4ccb-b226-7326e146af42" (UID: "a61c870f-1ead-4ccb-b226-7326e146af42"). InnerVolumeSpecName "kube-api-access-tmrhn". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 18:01:41 crc kubenswrapper[4799]: I1010 18:01:41.955390 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a61c870f-1ead-4ccb-b226-7326e146af42-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a61c870f-1ead-4ccb-b226-7326e146af42" (UID: "a61c870f-1ead-4ccb-b226-7326e146af42"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 18:01:41 crc kubenswrapper[4799]: I1010 18:01:41.958727 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a61c870f-1ead-4ccb-b226-7326e146af42-config" (OuterVolumeSpecName: "config") pod "a61c870f-1ead-4ccb-b226-7326e146af42" (UID: "a61c870f-1ead-4ccb-b226-7326e146af42"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 18:01:42 crc kubenswrapper[4799]: I1010 18:01:42.012281 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tmrhn\" (UniqueName: \"kubernetes.io/projected/a61c870f-1ead-4ccb-b226-7326e146af42-kube-api-access-tmrhn\") on node \"crc\" DevicePath \"\"" Oct 10 18:01:42 crc kubenswrapper[4799]: I1010 18:01:42.012319 4799 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a61c870f-1ead-4ccb-b226-7326e146af42-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 10 18:01:42 crc kubenswrapper[4799]: I1010 18:01:42.012331 4799 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/a61c870f-1ead-4ccb-b226-7326e146af42-config\") on node \"crc\" DevicePath \"\"" Oct 10 18:01:42 crc kubenswrapper[4799]: I1010 18:01:42.463179 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-hq6ns" event={"ID":"a61c870f-1ead-4ccb-b226-7326e146af42","Type":"ContainerDied","Data":"ad8356145dfa71ad9ab65ad0f5efaf48419de9617b3dc3f5eff0e0475e565d20"} Oct 10 18:01:42 crc kubenswrapper[4799]: I1010 18:01:42.463237 4799 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ad8356145dfa71ad9ab65ad0f5efaf48419de9617b3dc3f5eff0e0475e565d20" Oct 10 18:01:42 crc kubenswrapper[4799]: I1010 18:01:42.463287 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-hq6ns" Oct 10 18:01:42 crc kubenswrapper[4799]: I1010 18:01:42.733036 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-7b68bbc6b9-qpjxb"] Oct 10 18:01:42 crc kubenswrapper[4799]: E1010 18:01:42.733522 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a61c870f-1ead-4ccb-b226-7326e146af42" containerName="neutron-db-sync" Oct 10 18:01:42 crc kubenswrapper[4799]: I1010 18:01:42.733568 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="a61c870f-1ead-4ccb-b226-7326e146af42" containerName="neutron-db-sync" Oct 10 18:01:42 crc kubenswrapper[4799]: I1010 18:01:42.733855 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="a61c870f-1ead-4ccb-b226-7326e146af42" containerName="neutron-db-sync" Oct 10 18:01:42 crc kubenswrapper[4799]: I1010 18:01:42.735401 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7b68bbc6b9-qpjxb" Oct 10 18:01:42 crc kubenswrapper[4799]: I1010 18:01:42.755716 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7b68bbc6b9-qpjxb"] Oct 10 18:01:42 crc kubenswrapper[4799]: I1010 18:01:42.834684 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9x2tv\" (UniqueName: \"kubernetes.io/projected/7a0e164f-a14d-43a3-8f1c-e00e7b58fb5d-kube-api-access-9x2tv\") pod \"dnsmasq-dns-7b68bbc6b9-qpjxb\" (UID: \"7a0e164f-a14d-43a3-8f1c-e00e7b58fb5d\") " pod="openstack/dnsmasq-dns-7b68bbc6b9-qpjxb" Oct 10 18:01:42 crc kubenswrapper[4799]: I1010 18:01:42.834887 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/7a0e164f-a14d-43a3-8f1c-e00e7b58fb5d-ovsdbserver-nb\") pod \"dnsmasq-dns-7b68bbc6b9-qpjxb\" (UID: \"7a0e164f-a14d-43a3-8f1c-e00e7b58fb5d\") " pod="openstack/dnsmasq-dns-7b68bbc6b9-qpjxb" Oct 10 18:01:42 crc kubenswrapper[4799]: I1010 18:01:42.834922 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/7a0e164f-a14d-43a3-8f1c-e00e7b58fb5d-ovsdbserver-sb\") pod \"dnsmasq-dns-7b68bbc6b9-qpjxb\" (UID: \"7a0e164f-a14d-43a3-8f1c-e00e7b58fb5d\") " pod="openstack/dnsmasq-dns-7b68bbc6b9-qpjxb" Oct 10 18:01:42 crc kubenswrapper[4799]: I1010 18:01:42.834952 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7a0e164f-a14d-43a3-8f1c-e00e7b58fb5d-config\") pod \"dnsmasq-dns-7b68bbc6b9-qpjxb\" (UID: \"7a0e164f-a14d-43a3-8f1c-e00e7b58fb5d\") " pod="openstack/dnsmasq-dns-7b68bbc6b9-qpjxb" Oct 10 18:01:42 crc kubenswrapper[4799]: I1010 18:01:42.834999 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7a0e164f-a14d-43a3-8f1c-e00e7b58fb5d-dns-svc\") pod \"dnsmasq-dns-7b68bbc6b9-qpjxb\" (UID: \"7a0e164f-a14d-43a3-8f1c-e00e7b58fb5d\") " pod="openstack/dnsmasq-dns-7b68bbc6b9-qpjxb" Oct 10 18:01:42 crc kubenswrapper[4799]: I1010 18:01:42.843621 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-6f5b986d77-bn68m"] Oct 10 18:01:42 crc kubenswrapper[4799]: I1010 18:01:42.845364 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-6f5b986d77-bn68m" Oct 10 18:01:42 crc kubenswrapper[4799]: I1010 18:01:42.847827 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-config" Oct 10 18:01:42 crc kubenswrapper[4799]: I1010 18:01:42.848976 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-httpd-config" Oct 10 18:01:42 crc kubenswrapper[4799]: I1010 18:01:42.849211 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-neutron-dockercfg-l49lq" Oct 10 18:01:42 crc kubenswrapper[4799]: I1010 18:01:42.859992 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-6f5b986d77-bn68m"] Oct 10 18:01:42 crc kubenswrapper[4799]: I1010 18:01:42.936175 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/7a0e164f-a14d-43a3-8f1c-e00e7b58fb5d-ovsdbserver-nb\") pod \"dnsmasq-dns-7b68bbc6b9-qpjxb\" (UID: \"7a0e164f-a14d-43a3-8f1c-e00e7b58fb5d\") " pod="openstack/dnsmasq-dns-7b68bbc6b9-qpjxb" Oct 10 18:01:42 crc kubenswrapper[4799]: I1010 18:01:42.936240 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/7a0e164f-a14d-43a3-8f1c-e00e7b58fb5d-ovsdbserver-sb\") pod \"dnsmasq-dns-7b68bbc6b9-qpjxb\" (UID: \"7a0e164f-a14d-43a3-8f1c-e00e7b58fb5d\") " pod="openstack/dnsmasq-dns-7b68bbc6b9-qpjxb" Oct 10 18:01:42 crc kubenswrapper[4799]: I1010 18:01:42.936269 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7a0e164f-a14d-43a3-8f1c-e00e7b58fb5d-config\") pod \"dnsmasq-dns-7b68bbc6b9-qpjxb\" (UID: \"7a0e164f-a14d-43a3-8f1c-e00e7b58fb5d\") " pod="openstack/dnsmasq-dns-7b68bbc6b9-qpjxb" Oct 10 18:01:42 crc kubenswrapper[4799]: I1010 18:01:42.936305 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7a0e164f-a14d-43a3-8f1c-e00e7b58fb5d-dns-svc\") pod \"dnsmasq-dns-7b68bbc6b9-qpjxb\" (UID: \"7a0e164f-a14d-43a3-8f1c-e00e7b58fb5d\") " pod="openstack/dnsmasq-dns-7b68bbc6b9-qpjxb" Oct 10 18:01:42 crc kubenswrapper[4799]: I1010 18:01:42.936334 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/57db1912-1efa-4d2a-ba44-c55a62f3929b-config\") pod \"neutron-6f5b986d77-bn68m\" (UID: \"57db1912-1efa-4d2a-ba44-c55a62f3929b\") " pod="openstack/neutron-6f5b986d77-bn68m" Oct 10 18:01:42 crc kubenswrapper[4799]: I1010 18:01:42.936365 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9x2tv\" (UniqueName: \"kubernetes.io/projected/7a0e164f-a14d-43a3-8f1c-e00e7b58fb5d-kube-api-access-9x2tv\") pod \"dnsmasq-dns-7b68bbc6b9-qpjxb\" (UID: \"7a0e164f-a14d-43a3-8f1c-e00e7b58fb5d\") " pod="openstack/dnsmasq-dns-7b68bbc6b9-qpjxb" Oct 10 18:01:42 crc kubenswrapper[4799]: I1010 18:01:42.936423 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wbvhs\" (UniqueName: \"kubernetes.io/projected/57db1912-1efa-4d2a-ba44-c55a62f3929b-kube-api-access-wbvhs\") pod \"neutron-6f5b986d77-bn68m\" (UID: \"57db1912-1efa-4d2a-ba44-c55a62f3929b\") " pod="openstack/neutron-6f5b986d77-bn68m" Oct 10 18:01:42 crc kubenswrapper[4799]: I1010 18:01:42.936474 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/57db1912-1efa-4d2a-ba44-c55a62f3929b-combined-ca-bundle\") pod \"neutron-6f5b986d77-bn68m\" (UID: \"57db1912-1efa-4d2a-ba44-c55a62f3929b\") " pod="openstack/neutron-6f5b986d77-bn68m" Oct 10 18:01:42 crc kubenswrapper[4799]: I1010 18:01:42.936513 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/57db1912-1efa-4d2a-ba44-c55a62f3929b-httpd-config\") pod \"neutron-6f5b986d77-bn68m\" (UID: \"57db1912-1efa-4d2a-ba44-c55a62f3929b\") " pod="openstack/neutron-6f5b986d77-bn68m" Oct 10 18:01:42 crc kubenswrapper[4799]: I1010 18:01:42.937786 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/7a0e164f-a14d-43a3-8f1c-e00e7b58fb5d-ovsdbserver-nb\") pod \"dnsmasq-dns-7b68bbc6b9-qpjxb\" (UID: \"7a0e164f-a14d-43a3-8f1c-e00e7b58fb5d\") " pod="openstack/dnsmasq-dns-7b68bbc6b9-qpjxb" Oct 10 18:01:42 crc kubenswrapper[4799]: I1010 18:01:42.938631 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/7a0e164f-a14d-43a3-8f1c-e00e7b58fb5d-ovsdbserver-sb\") pod \"dnsmasq-dns-7b68bbc6b9-qpjxb\" (UID: \"7a0e164f-a14d-43a3-8f1c-e00e7b58fb5d\") " pod="openstack/dnsmasq-dns-7b68bbc6b9-qpjxb" Oct 10 18:01:42 crc kubenswrapper[4799]: I1010 18:01:42.939588 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7a0e164f-a14d-43a3-8f1c-e00e7b58fb5d-config\") pod \"dnsmasq-dns-7b68bbc6b9-qpjxb\" (UID: \"7a0e164f-a14d-43a3-8f1c-e00e7b58fb5d\") " pod="openstack/dnsmasq-dns-7b68bbc6b9-qpjxb" Oct 10 18:01:42 crc kubenswrapper[4799]: I1010 18:01:42.940257 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7a0e164f-a14d-43a3-8f1c-e00e7b58fb5d-dns-svc\") pod \"dnsmasq-dns-7b68bbc6b9-qpjxb\" (UID: \"7a0e164f-a14d-43a3-8f1c-e00e7b58fb5d\") " pod="openstack/dnsmasq-dns-7b68bbc6b9-qpjxb" Oct 10 18:01:42 crc kubenswrapper[4799]: I1010 18:01:42.961669 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9x2tv\" (UniqueName: \"kubernetes.io/projected/7a0e164f-a14d-43a3-8f1c-e00e7b58fb5d-kube-api-access-9x2tv\") pod \"dnsmasq-dns-7b68bbc6b9-qpjxb\" (UID: \"7a0e164f-a14d-43a3-8f1c-e00e7b58fb5d\") " pod="openstack/dnsmasq-dns-7b68bbc6b9-qpjxb" Oct 10 18:01:43 crc kubenswrapper[4799]: I1010 18:01:43.037729 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/57db1912-1efa-4d2a-ba44-c55a62f3929b-combined-ca-bundle\") pod \"neutron-6f5b986d77-bn68m\" (UID: \"57db1912-1efa-4d2a-ba44-c55a62f3929b\") " pod="openstack/neutron-6f5b986d77-bn68m" Oct 10 18:01:43 crc kubenswrapper[4799]: I1010 18:01:43.037836 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/57db1912-1efa-4d2a-ba44-c55a62f3929b-httpd-config\") pod \"neutron-6f5b986d77-bn68m\" (UID: \"57db1912-1efa-4d2a-ba44-c55a62f3929b\") " pod="openstack/neutron-6f5b986d77-bn68m" Oct 10 18:01:43 crc kubenswrapper[4799]: I1010 18:01:43.037960 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/57db1912-1efa-4d2a-ba44-c55a62f3929b-config\") pod \"neutron-6f5b986d77-bn68m\" (UID: \"57db1912-1efa-4d2a-ba44-c55a62f3929b\") " pod="openstack/neutron-6f5b986d77-bn68m" Oct 10 18:01:43 crc kubenswrapper[4799]: I1010 18:01:43.038738 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wbvhs\" (UniqueName: \"kubernetes.io/projected/57db1912-1efa-4d2a-ba44-c55a62f3929b-kube-api-access-wbvhs\") pod \"neutron-6f5b986d77-bn68m\" (UID: \"57db1912-1efa-4d2a-ba44-c55a62f3929b\") " pod="openstack/neutron-6f5b986d77-bn68m" Oct 10 18:01:43 crc kubenswrapper[4799]: I1010 18:01:43.042065 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/57db1912-1efa-4d2a-ba44-c55a62f3929b-httpd-config\") pod \"neutron-6f5b986d77-bn68m\" (UID: \"57db1912-1efa-4d2a-ba44-c55a62f3929b\") " pod="openstack/neutron-6f5b986d77-bn68m" Oct 10 18:01:43 crc kubenswrapper[4799]: I1010 18:01:43.042241 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/57db1912-1efa-4d2a-ba44-c55a62f3929b-config\") pod \"neutron-6f5b986d77-bn68m\" (UID: \"57db1912-1efa-4d2a-ba44-c55a62f3929b\") " pod="openstack/neutron-6f5b986d77-bn68m" Oct 10 18:01:43 crc kubenswrapper[4799]: I1010 18:01:43.042964 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/57db1912-1efa-4d2a-ba44-c55a62f3929b-combined-ca-bundle\") pod \"neutron-6f5b986d77-bn68m\" (UID: \"57db1912-1efa-4d2a-ba44-c55a62f3929b\") " pod="openstack/neutron-6f5b986d77-bn68m" Oct 10 18:01:43 crc kubenswrapper[4799]: I1010 18:01:43.051874 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7b68bbc6b9-qpjxb" Oct 10 18:01:43 crc kubenswrapper[4799]: I1010 18:01:43.061014 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wbvhs\" (UniqueName: \"kubernetes.io/projected/57db1912-1efa-4d2a-ba44-c55a62f3929b-kube-api-access-wbvhs\") pod \"neutron-6f5b986d77-bn68m\" (UID: \"57db1912-1efa-4d2a-ba44-c55a62f3929b\") " pod="openstack/neutron-6f5b986d77-bn68m" Oct 10 18:01:43 crc kubenswrapper[4799]: I1010 18:01:43.182676 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-6f5b986d77-bn68m" Oct 10 18:01:43 crc kubenswrapper[4799]: I1010 18:01:43.603139 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7b68bbc6b9-qpjxb"] Oct 10 18:01:43 crc kubenswrapper[4799]: I1010 18:01:43.742842 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-6f5b986d77-bn68m"] Oct 10 18:01:43 crc kubenswrapper[4799]: W1010 18:01:43.748937 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod57db1912_1efa_4d2a_ba44_c55a62f3929b.slice/crio-2b60aa21f70437404eff66fa438796012b69362eabfce273d919b808fa6b6f17 WatchSource:0}: Error finding container 2b60aa21f70437404eff66fa438796012b69362eabfce273d919b808fa6b6f17: Status 404 returned error can't find the container with id 2b60aa21f70437404eff66fa438796012b69362eabfce273d919b808fa6b6f17 Oct 10 18:01:44 crc kubenswrapper[4799]: I1010 18:01:44.478406 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-6f5b986d77-bn68m" event={"ID":"57db1912-1efa-4d2a-ba44-c55a62f3929b","Type":"ContainerStarted","Data":"7c5f048fcb4bfa7ca70a7116c8574b5d1e3f8cd5c83cf78dbd444341fd22a5aa"} Oct 10 18:01:44 crc kubenswrapper[4799]: I1010 18:01:44.478869 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/neutron-6f5b986d77-bn68m" Oct 10 18:01:44 crc kubenswrapper[4799]: I1010 18:01:44.478895 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-6f5b986d77-bn68m" event={"ID":"57db1912-1efa-4d2a-ba44-c55a62f3929b","Type":"ContainerStarted","Data":"705e36e38d5abd8071e3b28270fee74aa838f76fc4f2bfbe1ab998f6b94dff3d"} Oct 10 18:01:44 crc kubenswrapper[4799]: I1010 18:01:44.478908 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-6f5b986d77-bn68m" event={"ID":"57db1912-1efa-4d2a-ba44-c55a62f3929b","Type":"ContainerStarted","Data":"2b60aa21f70437404eff66fa438796012b69362eabfce273d919b808fa6b6f17"} Oct 10 18:01:44 crc kubenswrapper[4799]: I1010 18:01:44.480515 4799 generic.go:334] "Generic (PLEG): container finished" podID="7a0e164f-a14d-43a3-8f1c-e00e7b58fb5d" containerID="ed3cc9eb0d83ceb7e79cd0ebe9f4e944a766d679b34554a501940899e8219444" exitCode=0 Oct 10 18:01:44 crc kubenswrapper[4799]: I1010 18:01:44.480556 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7b68bbc6b9-qpjxb" event={"ID":"7a0e164f-a14d-43a3-8f1c-e00e7b58fb5d","Type":"ContainerDied","Data":"ed3cc9eb0d83ceb7e79cd0ebe9f4e944a766d679b34554a501940899e8219444"} Oct 10 18:01:44 crc kubenswrapper[4799]: I1010 18:01:44.480581 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7b68bbc6b9-qpjxb" event={"ID":"7a0e164f-a14d-43a3-8f1c-e00e7b58fb5d","Type":"ContainerStarted","Data":"f9a43b6e5a392e7c9f52cf2f718892f969fd34ca23cfc9213b4f98e504887ca5"} Oct 10 18:01:44 crc kubenswrapper[4799]: I1010 18:01:44.499309 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-6f5b986d77-bn68m" podStartSLOduration=2.498735652 podStartE2EDuration="2.498735652s" podCreationTimestamp="2025-10-10 18:01:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-10 18:01:44.493968436 +0000 UTC m=+5398.002292581" watchObservedRunningTime="2025-10-10 18:01:44.498735652 +0000 UTC m=+5398.007059777" Oct 10 18:01:45 crc kubenswrapper[4799]: I1010 18:01:45.249171 4799 patch_prober.go:28] interesting pod/machine-config-daemon-rh8zc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 10 18:01:45 crc kubenswrapper[4799]: I1010 18:01:45.249268 4799 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 10 18:01:45 crc kubenswrapper[4799]: I1010 18:01:45.494875 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7b68bbc6b9-qpjxb" event={"ID":"7a0e164f-a14d-43a3-8f1c-e00e7b58fb5d","Type":"ContainerStarted","Data":"bc46fd7d53c0a6a08ee38b93473d5d11e5b90a7596cd1f37676a77be9561e7f9"} Oct 10 18:01:45 crc kubenswrapper[4799]: I1010 18:01:45.495565 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-7b68bbc6b9-qpjxb" Oct 10 18:01:45 crc kubenswrapper[4799]: I1010 18:01:45.529333 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-7b68bbc6b9-qpjxb" podStartSLOduration=3.52931226 podStartE2EDuration="3.52931226s" podCreationTimestamp="2025-10-10 18:01:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-10 18:01:45.518826685 +0000 UTC m=+5399.027150810" watchObservedRunningTime="2025-10-10 18:01:45.52931226 +0000 UTC m=+5399.037636385" Oct 10 18:01:53 crc kubenswrapper[4799]: I1010 18:01:53.054006 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-7b68bbc6b9-qpjxb" Oct 10 18:01:53 crc kubenswrapper[4799]: I1010 18:01:53.115697 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-669997949f-vlg8j"] Oct 10 18:01:53 crc kubenswrapper[4799]: I1010 18:01:53.116018 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-669997949f-vlg8j" podUID="041e991f-3439-4111-99db-5cec8f163e97" containerName="dnsmasq-dns" containerID="cri-o://c33afc9fd6624a6dae355503387b9c4d1b22621561a0bbb735dde7e57dab7fe9" gracePeriod=10 Oct 10 18:01:53 crc kubenswrapper[4799]: I1010 18:01:53.578028 4799 generic.go:334] "Generic (PLEG): container finished" podID="041e991f-3439-4111-99db-5cec8f163e97" containerID="c33afc9fd6624a6dae355503387b9c4d1b22621561a0bbb735dde7e57dab7fe9" exitCode=0 Oct 10 18:01:53 crc kubenswrapper[4799]: I1010 18:01:53.578193 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-669997949f-vlg8j" event={"ID":"041e991f-3439-4111-99db-5cec8f163e97","Type":"ContainerDied","Data":"c33afc9fd6624a6dae355503387b9c4d1b22621561a0bbb735dde7e57dab7fe9"} Oct 10 18:01:53 crc kubenswrapper[4799]: I1010 18:01:53.723104 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-669997949f-vlg8j" Oct 10 18:01:53 crc kubenswrapper[4799]: I1010 18:01:53.848919 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/041e991f-3439-4111-99db-5cec8f163e97-ovsdbserver-sb\") pod \"041e991f-3439-4111-99db-5cec8f163e97\" (UID: \"041e991f-3439-4111-99db-5cec8f163e97\") " Oct 10 18:01:53 crc kubenswrapper[4799]: I1010 18:01:53.849022 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/041e991f-3439-4111-99db-5cec8f163e97-ovsdbserver-nb\") pod \"041e991f-3439-4111-99db-5cec8f163e97\" (UID: \"041e991f-3439-4111-99db-5cec8f163e97\") " Oct 10 18:01:53 crc kubenswrapper[4799]: I1010 18:01:53.849112 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nrsnx\" (UniqueName: \"kubernetes.io/projected/041e991f-3439-4111-99db-5cec8f163e97-kube-api-access-nrsnx\") pod \"041e991f-3439-4111-99db-5cec8f163e97\" (UID: \"041e991f-3439-4111-99db-5cec8f163e97\") " Oct 10 18:01:53 crc kubenswrapper[4799]: I1010 18:01:53.849158 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/041e991f-3439-4111-99db-5cec8f163e97-config\") pod \"041e991f-3439-4111-99db-5cec8f163e97\" (UID: \"041e991f-3439-4111-99db-5cec8f163e97\") " Oct 10 18:01:53 crc kubenswrapper[4799]: I1010 18:01:53.849226 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/041e991f-3439-4111-99db-5cec8f163e97-dns-svc\") pod \"041e991f-3439-4111-99db-5cec8f163e97\" (UID: \"041e991f-3439-4111-99db-5cec8f163e97\") " Oct 10 18:01:53 crc kubenswrapper[4799]: I1010 18:01:53.868583 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/041e991f-3439-4111-99db-5cec8f163e97-kube-api-access-nrsnx" (OuterVolumeSpecName: "kube-api-access-nrsnx") pod "041e991f-3439-4111-99db-5cec8f163e97" (UID: "041e991f-3439-4111-99db-5cec8f163e97"). InnerVolumeSpecName "kube-api-access-nrsnx". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 18:01:53 crc kubenswrapper[4799]: I1010 18:01:53.898458 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/041e991f-3439-4111-99db-5cec8f163e97-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "041e991f-3439-4111-99db-5cec8f163e97" (UID: "041e991f-3439-4111-99db-5cec8f163e97"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 18:01:53 crc kubenswrapper[4799]: I1010 18:01:53.900962 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/041e991f-3439-4111-99db-5cec8f163e97-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "041e991f-3439-4111-99db-5cec8f163e97" (UID: "041e991f-3439-4111-99db-5cec8f163e97"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 18:01:53 crc kubenswrapper[4799]: I1010 18:01:53.901000 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/041e991f-3439-4111-99db-5cec8f163e97-config" (OuterVolumeSpecName: "config") pod "041e991f-3439-4111-99db-5cec8f163e97" (UID: "041e991f-3439-4111-99db-5cec8f163e97"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 18:01:53 crc kubenswrapper[4799]: I1010 18:01:53.910421 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/041e991f-3439-4111-99db-5cec8f163e97-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "041e991f-3439-4111-99db-5cec8f163e97" (UID: "041e991f-3439-4111-99db-5cec8f163e97"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 18:01:53 crc kubenswrapper[4799]: I1010 18:01:53.951186 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nrsnx\" (UniqueName: \"kubernetes.io/projected/041e991f-3439-4111-99db-5cec8f163e97-kube-api-access-nrsnx\") on node \"crc\" DevicePath \"\"" Oct 10 18:01:53 crc kubenswrapper[4799]: I1010 18:01:53.951234 4799 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/041e991f-3439-4111-99db-5cec8f163e97-config\") on node \"crc\" DevicePath \"\"" Oct 10 18:01:53 crc kubenswrapper[4799]: I1010 18:01:53.951244 4799 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/041e991f-3439-4111-99db-5cec8f163e97-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 10 18:01:53 crc kubenswrapper[4799]: I1010 18:01:53.951253 4799 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/041e991f-3439-4111-99db-5cec8f163e97-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 10 18:01:53 crc kubenswrapper[4799]: I1010 18:01:53.951261 4799 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/041e991f-3439-4111-99db-5cec8f163e97-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 10 18:01:54 crc kubenswrapper[4799]: I1010 18:01:54.587804 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-669997949f-vlg8j" event={"ID":"041e991f-3439-4111-99db-5cec8f163e97","Type":"ContainerDied","Data":"6e445e149258491c3ea817b2a1e8d4d12c711631219d9c1fd778ed1bf302fd39"} Oct 10 18:01:54 crc kubenswrapper[4799]: I1010 18:01:54.588179 4799 scope.go:117] "RemoveContainer" containerID="c33afc9fd6624a6dae355503387b9c4d1b22621561a0bbb735dde7e57dab7fe9" Oct 10 18:01:54 crc kubenswrapper[4799]: I1010 18:01:54.588309 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-669997949f-vlg8j" Oct 10 18:01:54 crc kubenswrapper[4799]: I1010 18:01:54.621363 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-669997949f-vlg8j"] Oct 10 18:01:54 crc kubenswrapper[4799]: I1010 18:01:54.628738 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-669997949f-vlg8j"] Oct 10 18:01:54 crc kubenswrapper[4799]: I1010 18:01:54.638460 4799 scope.go:117] "RemoveContainer" containerID="3b3a6d3d5cb4a51f81495e6423502db3502e3dcf02cb6af9b677fbe91fcdde02" Oct 10 18:01:55 crc kubenswrapper[4799]: I1010 18:01:55.421919 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="041e991f-3439-4111-99db-5cec8f163e97" path="/var/lib/kubelet/pods/041e991f-3439-4111-99db-5cec8f163e97/volumes" Oct 10 18:01:58 crc kubenswrapper[4799]: I1010 18:01:58.496133 4799 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-669997949f-vlg8j" podUID="041e991f-3439-4111-99db-5cec8f163e97" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.1.41:5353: i/o timeout" Oct 10 18:02:13 crc kubenswrapper[4799]: I1010 18:02:13.198117 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/neutron-6f5b986d77-bn68m" Oct 10 18:02:15 crc kubenswrapper[4799]: I1010 18:02:15.248871 4799 patch_prober.go:28] interesting pod/machine-config-daemon-rh8zc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 10 18:02:15 crc kubenswrapper[4799]: I1010 18:02:15.249304 4799 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 10 18:02:21 crc kubenswrapper[4799]: I1010 18:02:21.363669 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-db-create-w9z7v"] Oct 10 18:02:21 crc kubenswrapper[4799]: E1010 18:02:21.365285 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="041e991f-3439-4111-99db-5cec8f163e97" containerName="dnsmasq-dns" Oct 10 18:02:21 crc kubenswrapper[4799]: I1010 18:02:21.365374 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="041e991f-3439-4111-99db-5cec8f163e97" containerName="dnsmasq-dns" Oct 10 18:02:21 crc kubenswrapper[4799]: E1010 18:02:21.365463 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="041e991f-3439-4111-99db-5cec8f163e97" containerName="init" Oct 10 18:02:21 crc kubenswrapper[4799]: I1010 18:02:21.365527 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="041e991f-3439-4111-99db-5cec8f163e97" containerName="init" Oct 10 18:02:21 crc kubenswrapper[4799]: I1010 18:02:21.365743 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="041e991f-3439-4111-99db-5cec8f163e97" containerName="dnsmasq-dns" Oct 10 18:02:21 crc kubenswrapper[4799]: I1010 18:02:21.366420 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-w9z7v" Oct 10 18:02:21 crc kubenswrapper[4799]: I1010 18:02:21.386155 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-create-w9z7v"] Oct 10 18:02:21 crc kubenswrapper[4799]: I1010 18:02:21.539426 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7p79v\" (UniqueName: \"kubernetes.io/projected/4b682ece-9af6-4f90-ab20-0251369b6791-kube-api-access-7p79v\") pod \"glance-db-create-w9z7v\" (UID: \"4b682ece-9af6-4f90-ab20-0251369b6791\") " pod="openstack/glance-db-create-w9z7v" Oct 10 18:02:21 crc kubenswrapper[4799]: I1010 18:02:21.641473 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7p79v\" (UniqueName: \"kubernetes.io/projected/4b682ece-9af6-4f90-ab20-0251369b6791-kube-api-access-7p79v\") pod \"glance-db-create-w9z7v\" (UID: \"4b682ece-9af6-4f90-ab20-0251369b6791\") " pod="openstack/glance-db-create-w9z7v" Oct 10 18:02:21 crc kubenswrapper[4799]: I1010 18:02:21.672959 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7p79v\" (UniqueName: \"kubernetes.io/projected/4b682ece-9af6-4f90-ab20-0251369b6791-kube-api-access-7p79v\") pod \"glance-db-create-w9z7v\" (UID: \"4b682ece-9af6-4f90-ab20-0251369b6791\") " pod="openstack/glance-db-create-w9z7v" Oct 10 18:02:21 crc kubenswrapper[4799]: I1010 18:02:21.699922 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-w9z7v" Oct 10 18:02:22 crc kubenswrapper[4799]: I1010 18:02:22.170231 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-create-w9z7v"] Oct 10 18:02:22 crc kubenswrapper[4799]: I1010 18:02:22.906128 4799 generic.go:334] "Generic (PLEG): container finished" podID="4b682ece-9af6-4f90-ab20-0251369b6791" containerID="a72a10290936cdfe6e78e4f33f1f5c5189c99ba95b2770b0072d0dd686258f10" exitCode=0 Oct 10 18:02:22 crc kubenswrapper[4799]: I1010 18:02:22.906207 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-w9z7v" event={"ID":"4b682ece-9af6-4f90-ab20-0251369b6791","Type":"ContainerDied","Data":"a72a10290936cdfe6e78e4f33f1f5c5189c99ba95b2770b0072d0dd686258f10"} Oct 10 18:02:22 crc kubenswrapper[4799]: I1010 18:02:22.906506 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-w9z7v" event={"ID":"4b682ece-9af6-4f90-ab20-0251369b6791","Type":"ContainerStarted","Data":"eeca716cf777c3a56426ce1f4434d2fcbc7e906dfaa3a74a0c0e040e24112c63"} Oct 10 18:02:24 crc kubenswrapper[4799]: I1010 18:02:24.298070 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-w9z7v" Oct 10 18:02:24 crc kubenswrapper[4799]: I1010 18:02:24.390234 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7p79v\" (UniqueName: \"kubernetes.io/projected/4b682ece-9af6-4f90-ab20-0251369b6791-kube-api-access-7p79v\") pod \"4b682ece-9af6-4f90-ab20-0251369b6791\" (UID: \"4b682ece-9af6-4f90-ab20-0251369b6791\") " Oct 10 18:02:24 crc kubenswrapper[4799]: I1010 18:02:24.398589 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4b682ece-9af6-4f90-ab20-0251369b6791-kube-api-access-7p79v" (OuterVolumeSpecName: "kube-api-access-7p79v") pod "4b682ece-9af6-4f90-ab20-0251369b6791" (UID: "4b682ece-9af6-4f90-ab20-0251369b6791"). InnerVolumeSpecName "kube-api-access-7p79v". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 18:02:24 crc kubenswrapper[4799]: I1010 18:02:24.493598 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7p79v\" (UniqueName: \"kubernetes.io/projected/4b682ece-9af6-4f90-ab20-0251369b6791-kube-api-access-7p79v\") on node \"crc\" DevicePath \"\"" Oct 10 18:02:24 crc kubenswrapper[4799]: I1010 18:02:24.928033 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-w9z7v" event={"ID":"4b682ece-9af6-4f90-ab20-0251369b6791","Type":"ContainerDied","Data":"eeca716cf777c3a56426ce1f4434d2fcbc7e906dfaa3a74a0c0e040e24112c63"} Oct 10 18:02:24 crc kubenswrapper[4799]: I1010 18:02:24.928085 4799 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="eeca716cf777c3a56426ce1f4434d2fcbc7e906dfaa3a74a0c0e040e24112c63" Oct 10 18:02:24 crc kubenswrapper[4799]: I1010 18:02:24.928093 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-w9z7v" Oct 10 18:02:31 crc kubenswrapper[4799]: I1010 18:02:31.484489 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-f6c2-account-create-6ngzk"] Oct 10 18:02:31 crc kubenswrapper[4799]: E1010 18:02:31.485611 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4b682ece-9af6-4f90-ab20-0251369b6791" containerName="mariadb-database-create" Oct 10 18:02:31 crc kubenswrapper[4799]: I1010 18:02:31.485629 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="4b682ece-9af6-4f90-ab20-0251369b6791" containerName="mariadb-database-create" Oct 10 18:02:31 crc kubenswrapper[4799]: I1010 18:02:31.485872 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="4b682ece-9af6-4f90-ab20-0251369b6791" containerName="mariadb-database-create" Oct 10 18:02:31 crc kubenswrapper[4799]: I1010 18:02:31.486671 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-f6c2-account-create-6ngzk" Oct 10 18:02:31 crc kubenswrapper[4799]: I1010 18:02:31.490808 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-db-secret" Oct 10 18:02:31 crc kubenswrapper[4799]: I1010 18:02:31.498645 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-f6c2-account-create-6ngzk"] Oct 10 18:02:31 crc kubenswrapper[4799]: I1010 18:02:31.536707 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7rrxr\" (UniqueName: \"kubernetes.io/projected/af686f10-f6a4-4f7b-96f0-1dc80aa26f65-kube-api-access-7rrxr\") pod \"glance-f6c2-account-create-6ngzk\" (UID: \"af686f10-f6a4-4f7b-96f0-1dc80aa26f65\") " pod="openstack/glance-f6c2-account-create-6ngzk" Oct 10 18:02:31 crc kubenswrapper[4799]: I1010 18:02:31.638517 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7rrxr\" (UniqueName: \"kubernetes.io/projected/af686f10-f6a4-4f7b-96f0-1dc80aa26f65-kube-api-access-7rrxr\") pod \"glance-f6c2-account-create-6ngzk\" (UID: \"af686f10-f6a4-4f7b-96f0-1dc80aa26f65\") " pod="openstack/glance-f6c2-account-create-6ngzk" Oct 10 18:02:31 crc kubenswrapper[4799]: I1010 18:02:31.666602 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7rrxr\" (UniqueName: \"kubernetes.io/projected/af686f10-f6a4-4f7b-96f0-1dc80aa26f65-kube-api-access-7rrxr\") pod \"glance-f6c2-account-create-6ngzk\" (UID: \"af686f10-f6a4-4f7b-96f0-1dc80aa26f65\") " pod="openstack/glance-f6c2-account-create-6ngzk" Oct 10 18:02:31 crc kubenswrapper[4799]: I1010 18:02:31.820970 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-f6c2-account-create-6ngzk" Oct 10 18:02:32 crc kubenswrapper[4799]: I1010 18:02:32.324639 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-f6c2-account-create-6ngzk"] Oct 10 18:02:32 crc kubenswrapper[4799]: W1010 18:02:32.327346 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podaf686f10_f6a4_4f7b_96f0_1dc80aa26f65.slice/crio-0aa314dfbeca685ac1f0fcf3d4f58cc30b4d21fd596ec99189517c2874deedaf WatchSource:0}: Error finding container 0aa314dfbeca685ac1f0fcf3d4f58cc30b4d21fd596ec99189517c2874deedaf: Status 404 returned error can't find the container with id 0aa314dfbeca685ac1f0fcf3d4f58cc30b4d21fd596ec99189517c2874deedaf Oct 10 18:02:33 crc kubenswrapper[4799]: I1010 18:02:33.018288 4799 generic.go:334] "Generic (PLEG): container finished" podID="af686f10-f6a4-4f7b-96f0-1dc80aa26f65" containerID="ebafbe89a2a75b4b3527b5913fd4e2a1fc5d7a4886ac2a3f624e74497e2056df" exitCode=0 Oct 10 18:02:33 crc kubenswrapper[4799]: I1010 18:02:33.018366 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-f6c2-account-create-6ngzk" event={"ID":"af686f10-f6a4-4f7b-96f0-1dc80aa26f65","Type":"ContainerDied","Data":"ebafbe89a2a75b4b3527b5913fd4e2a1fc5d7a4886ac2a3f624e74497e2056df"} Oct 10 18:02:33 crc kubenswrapper[4799]: I1010 18:02:33.018416 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-f6c2-account-create-6ngzk" event={"ID":"af686f10-f6a4-4f7b-96f0-1dc80aa26f65","Type":"ContainerStarted","Data":"0aa314dfbeca685ac1f0fcf3d4f58cc30b4d21fd596ec99189517c2874deedaf"} Oct 10 18:02:34 crc kubenswrapper[4799]: I1010 18:02:34.373227 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-f6c2-account-create-6ngzk" Oct 10 18:02:34 crc kubenswrapper[4799]: I1010 18:02:34.395405 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7rrxr\" (UniqueName: \"kubernetes.io/projected/af686f10-f6a4-4f7b-96f0-1dc80aa26f65-kube-api-access-7rrxr\") pod \"af686f10-f6a4-4f7b-96f0-1dc80aa26f65\" (UID: \"af686f10-f6a4-4f7b-96f0-1dc80aa26f65\") " Oct 10 18:02:34 crc kubenswrapper[4799]: I1010 18:02:34.402009 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/af686f10-f6a4-4f7b-96f0-1dc80aa26f65-kube-api-access-7rrxr" (OuterVolumeSpecName: "kube-api-access-7rrxr") pod "af686f10-f6a4-4f7b-96f0-1dc80aa26f65" (UID: "af686f10-f6a4-4f7b-96f0-1dc80aa26f65"). InnerVolumeSpecName "kube-api-access-7rrxr". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 18:02:34 crc kubenswrapper[4799]: I1010 18:02:34.498288 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7rrxr\" (UniqueName: \"kubernetes.io/projected/af686f10-f6a4-4f7b-96f0-1dc80aa26f65-kube-api-access-7rrxr\") on node \"crc\" DevicePath \"\"" Oct 10 18:02:35 crc kubenswrapper[4799]: I1010 18:02:35.042201 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-f6c2-account-create-6ngzk" event={"ID":"af686f10-f6a4-4f7b-96f0-1dc80aa26f65","Type":"ContainerDied","Data":"0aa314dfbeca685ac1f0fcf3d4f58cc30b4d21fd596ec99189517c2874deedaf"} Oct 10 18:02:35 crc kubenswrapper[4799]: I1010 18:02:35.042250 4799 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0aa314dfbeca685ac1f0fcf3d4f58cc30b4d21fd596ec99189517c2874deedaf" Oct 10 18:02:35 crc kubenswrapper[4799]: I1010 18:02:35.042281 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-f6c2-account-create-6ngzk" Oct 10 18:02:36 crc kubenswrapper[4799]: I1010 18:02:36.734108 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-db-sync-jq7rj"] Oct 10 18:02:36 crc kubenswrapper[4799]: E1010 18:02:36.735352 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="af686f10-f6a4-4f7b-96f0-1dc80aa26f65" containerName="mariadb-account-create" Oct 10 18:02:36 crc kubenswrapper[4799]: I1010 18:02:36.735389 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="af686f10-f6a4-4f7b-96f0-1dc80aa26f65" containerName="mariadb-account-create" Oct 10 18:02:36 crc kubenswrapper[4799]: I1010 18:02:36.735934 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="af686f10-f6a4-4f7b-96f0-1dc80aa26f65" containerName="mariadb-account-create" Oct 10 18:02:36 crc kubenswrapper[4799]: I1010 18:02:36.737336 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-jq7rj" Oct 10 18:02:36 crc kubenswrapper[4799]: I1010 18:02:36.740953 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-config-data" Oct 10 18:02:36 crc kubenswrapper[4799]: I1010 18:02:36.741816 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-4l9wb" Oct 10 18:02:36 crc kubenswrapper[4799]: I1010 18:02:36.753289 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-sync-jq7rj"] Oct 10 18:02:36 crc kubenswrapper[4799]: I1010 18:02:36.843740 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bfbf2401-767f-4a5c-a4a3-c12ac0833631-combined-ca-bundle\") pod \"glance-db-sync-jq7rj\" (UID: \"bfbf2401-767f-4a5c-a4a3-c12ac0833631\") " pod="openstack/glance-db-sync-jq7rj" Oct 10 18:02:36 crc kubenswrapper[4799]: I1010 18:02:36.843888 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/bfbf2401-767f-4a5c-a4a3-c12ac0833631-db-sync-config-data\") pod \"glance-db-sync-jq7rj\" (UID: \"bfbf2401-767f-4a5c-a4a3-c12ac0833631\") " pod="openstack/glance-db-sync-jq7rj" Oct 10 18:02:36 crc kubenswrapper[4799]: I1010 18:02:36.843942 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bfbf2401-767f-4a5c-a4a3-c12ac0833631-config-data\") pod \"glance-db-sync-jq7rj\" (UID: \"bfbf2401-767f-4a5c-a4a3-c12ac0833631\") " pod="openstack/glance-db-sync-jq7rj" Oct 10 18:02:36 crc kubenswrapper[4799]: I1010 18:02:36.843980 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hdcnb\" (UniqueName: \"kubernetes.io/projected/bfbf2401-767f-4a5c-a4a3-c12ac0833631-kube-api-access-hdcnb\") pod \"glance-db-sync-jq7rj\" (UID: \"bfbf2401-767f-4a5c-a4a3-c12ac0833631\") " pod="openstack/glance-db-sync-jq7rj" Oct 10 18:02:36 crc kubenswrapper[4799]: I1010 18:02:36.946717 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bfbf2401-767f-4a5c-a4a3-c12ac0833631-config-data\") pod \"glance-db-sync-jq7rj\" (UID: \"bfbf2401-767f-4a5c-a4a3-c12ac0833631\") " pod="openstack/glance-db-sync-jq7rj" Oct 10 18:02:36 crc kubenswrapper[4799]: I1010 18:02:36.946799 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hdcnb\" (UniqueName: \"kubernetes.io/projected/bfbf2401-767f-4a5c-a4a3-c12ac0833631-kube-api-access-hdcnb\") pod \"glance-db-sync-jq7rj\" (UID: \"bfbf2401-767f-4a5c-a4a3-c12ac0833631\") " pod="openstack/glance-db-sync-jq7rj" Oct 10 18:02:36 crc kubenswrapper[4799]: I1010 18:02:36.946866 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bfbf2401-767f-4a5c-a4a3-c12ac0833631-combined-ca-bundle\") pod \"glance-db-sync-jq7rj\" (UID: \"bfbf2401-767f-4a5c-a4a3-c12ac0833631\") " pod="openstack/glance-db-sync-jq7rj" Oct 10 18:02:36 crc kubenswrapper[4799]: I1010 18:02:36.946921 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/bfbf2401-767f-4a5c-a4a3-c12ac0833631-db-sync-config-data\") pod \"glance-db-sync-jq7rj\" (UID: \"bfbf2401-767f-4a5c-a4a3-c12ac0833631\") " pod="openstack/glance-db-sync-jq7rj" Oct 10 18:02:36 crc kubenswrapper[4799]: I1010 18:02:36.952433 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/bfbf2401-767f-4a5c-a4a3-c12ac0833631-db-sync-config-data\") pod \"glance-db-sync-jq7rj\" (UID: \"bfbf2401-767f-4a5c-a4a3-c12ac0833631\") " pod="openstack/glance-db-sync-jq7rj" Oct 10 18:02:36 crc kubenswrapper[4799]: I1010 18:02:36.952601 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bfbf2401-767f-4a5c-a4a3-c12ac0833631-config-data\") pod \"glance-db-sync-jq7rj\" (UID: \"bfbf2401-767f-4a5c-a4a3-c12ac0833631\") " pod="openstack/glance-db-sync-jq7rj" Oct 10 18:02:36 crc kubenswrapper[4799]: I1010 18:02:36.952727 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bfbf2401-767f-4a5c-a4a3-c12ac0833631-combined-ca-bundle\") pod \"glance-db-sync-jq7rj\" (UID: \"bfbf2401-767f-4a5c-a4a3-c12ac0833631\") " pod="openstack/glance-db-sync-jq7rj" Oct 10 18:02:36 crc kubenswrapper[4799]: I1010 18:02:36.966537 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hdcnb\" (UniqueName: \"kubernetes.io/projected/bfbf2401-767f-4a5c-a4a3-c12ac0833631-kube-api-access-hdcnb\") pod \"glance-db-sync-jq7rj\" (UID: \"bfbf2401-767f-4a5c-a4a3-c12ac0833631\") " pod="openstack/glance-db-sync-jq7rj" Oct 10 18:02:37 crc kubenswrapper[4799]: I1010 18:02:37.067570 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-jq7rj" Oct 10 18:02:37 crc kubenswrapper[4799]: I1010 18:02:37.680973 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-sync-jq7rj"] Oct 10 18:02:38 crc kubenswrapper[4799]: I1010 18:02:38.086400 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-jq7rj" event={"ID":"bfbf2401-767f-4a5c-a4a3-c12ac0833631","Type":"ContainerStarted","Data":"ff89bdd2719ed0ed8ea621832abf8e08aebfc44cda0027ae471ef17ab70e9f60"} Oct 10 18:02:39 crc kubenswrapper[4799]: I1010 18:02:39.095930 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-jq7rj" event={"ID":"bfbf2401-767f-4a5c-a4a3-c12ac0833631","Type":"ContainerStarted","Data":"78947021d40eb34394d54abd0f689020e16e60125938395ad6afee925ac82930"} Oct 10 18:02:39 crc kubenswrapper[4799]: I1010 18:02:39.120437 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-db-sync-jq7rj" podStartSLOduration=3.120415906 podStartE2EDuration="3.120415906s" podCreationTimestamp="2025-10-10 18:02:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-10 18:02:39.116183822 +0000 UTC m=+5452.624507977" watchObservedRunningTime="2025-10-10 18:02:39.120415906 +0000 UTC m=+5452.628740031" Oct 10 18:02:42 crc kubenswrapper[4799]: I1010 18:02:42.130952 4799 generic.go:334] "Generic (PLEG): container finished" podID="bfbf2401-767f-4a5c-a4a3-c12ac0833631" containerID="78947021d40eb34394d54abd0f689020e16e60125938395ad6afee925ac82930" exitCode=0 Oct 10 18:02:42 crc kubenswrapper[4799]: I1010 18:02:42.131088 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-jq7rj" event={"ID":"bfbf2401-767f-4a5c-a4a3-c12ac0833631","Type":"ContainerDied","Data":"78947021d40eb34394d54abd0f689020e16e60125938395ad6afee925ac82930"} Oct 10 18:02:43 crc kubenswrapper[4799]: I1010 18:02:43.663291 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-jq7rj" Oct 10 18:02:43 crc kubenswrapper[4799]: I1010 18:02:43.776129 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bfbf2401-767f-4a5c-a4a3-c12ac0833631-combined-ca-bundle\") pod \"bfbf2401-767f-4a5c-a4a3-c12ac0833631\" (UID: \"bfbf2401-767f-4a5c-a4a3-c12ac0833631\") " Oct 10 18:02:43 crc kubenswrapper[4799]: I1010 18:02:43.776202 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bfbf2401-767f-4a5c-a4a3-c12ac0833631-config-data\") pod \"bfbf2401-767f-4a5c-a4a3-c12ac0833631\" (UID: \"bfbf2401-767f-4a5c-a4a3-c12ac0833631\") " Oct 10 18:02:43 crc kubenswrapper[4799]: I1010 18:02:43.776340 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hdcnb\" (UniqueName: \"kubernetes.io/projected/bfbf2401-767f-4a5c-a4a3-c12ac0833631-kube-api-access-hdcnb\") pod \"bfbf2401-767f-4a5c-a4a3-c12ac0833631\" (UID: \"bfbf2401-767f-4a5c-a4a3-c12ac0833631\") " Oct 10 18:02:43 crc kubenswrapper[4799]: I1010 18:02:43.776396 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/bfbf2401-767f-4a5c-a4a3-c12ac0833631-db-sync-config-data\") pod \"bfbf2401-767f-4a5c-a4a3-c12ac0833631\" (UID: \"bfbf2401-767f-4a5c-a4a3-c12ac0833631\") " Oct 10 18:02:43 crc kubenswrapper[4799]: I1010 18:02:43.783594 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bfbf2401-767f-4a5c-a4a3-c12ac0833631-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "bfbf2401-767f-4a5c-a4a3-c12ac0833631" (UID: "bfbf2401-767f-4a5c-a4a3-c12ac0833631"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 18:02:43 crc kubenswrapper[4799]: I1010 18:02:43.787997 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bfbf2401-767f-4a5c-a4a3-c12ac0833631-kube-api-access-hdcnb" (OuterVolumeSpecName: "kube-api-access-hdcnb") pod "bfbf2401-767f-4a5c-a4a3-c12ac0833631" (UID: "bfbf2401-767f-4a5c-a4a3-c12ac0833631"). InnerVolumeSpecName "kube-api-access-hdcnb". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 18:02:43 crc kubenswrapper[4799]: I1010 18:02:43.806936 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bfbf2401-767f-4a5c-a4a3-c12ac0833631-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "bfbf2401-767f-4a5c-a4a3-c12ac0833631" (UID: "bfbf2401-767f-4a5c-a4a3-c12ac0833631"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 18:02:43 crc kubenswrapper[4799]: I1010 18:02:43.856443 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bfbf2401-767f-4a5c-a4a3-c12ac0833631-config-data" (OuterVolumeSpecName: "config-data") pod "bfbf2401-767f-4a5c-a4a3-c12ac0833631" (UID: "bfbf2401-767f-4a5c-a4a3-c12ac0833631"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 18:02:43 crc kubenswrapper[4799]: I1010 18:02:43.879188 4799 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/bfbf2401-767f-4a5c-a4a3-c12ac0833631-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Oct 10 18:02:43 crc kubenswrapper[4799]: I1010 18:02:43.879251 4799 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bfbf2401-767f-4a5c-a4a3-c12ac0833631-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 10 18:02:43 crc kubenswrapper[4799]: I1010 18:02:43.879282 4799 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bfbf2401-767f-4a5c-a4a3-c12ac0833631-config-data\") on node \"crc\" DevicePath \"\"" Oct 10 18:02:43 crc kubenswrapper[4799]: I1010 18:02:43.879308 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hdcnb\" (UniqueName: \"kubernetes.io/projected/bfbf2401-767f-4a5c-a4a3-c12ac0833631-kube-api-access-hdcnb\") on node \"crc\" DevicePath \"\"" Oct 10 18:02:44 crc kubenswrapper[4799]: I1010 18:02:44.152921 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-jq7rj" event={"ID":"bfbf2401-767f-4a5c-a4a3-c12ac0833631","Type":"ContainerDied","Data":"ff89bdd2719ed0ed8ea621832abf8e08aebfc44cda0027ae471ef17ab70e9f60"} Oct 10 18:02:44 crc kubenswrapper[4799]: I1010 18:02:44.152974 4799 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ff89bdd2719ed0ed8ea621832abf8e08aebfc44cda0027ae471ef17ab70e9f60" Oct 10 18:02:44 crc kubenswrapper[4799]: I1010 18:02:44.153031 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-jq7rj" Oct 10 18:02:44 crc kubenswrapper[4799]: I1010 18:02:44.648683 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-769797d5d7-zjthw"] Oct 10 18:02:44 crc kubenswrapper[4799]: E1010 18:02:44.649237 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bfbf2401-767f-4a5c-a4a3-c12ac0833631" containerName="glance-db-sync" Oct 10 18:02:44 crc kubenswrapper[4799]: I1010 18:02:44.649254 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="bfbf2401-767f-4a5c-a4a3-c12ac0833631" containerName="glance-db-sync" Oct 10 18:02:44 crc kubenswrapper[4799]: I1010 18:02:44.649424 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="bfbf2401-767f-4a5c-a4a3-c12ac0833631" containerName="glance-db-sync" Oct 10 18:02:44 crc kubenswrapper[4799]: I1010 18:02:44.650784 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-769797d5d7-zjthw" Oct 10 18:02:44 crc kubenswrapper[4799]: I1010 18:02:44.662011 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-769797d5d7-zjthw"] Oct 10 18:02:44 crc kubenswrapper[4799]: I1010 18:02:44.693096 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Oct 10 18:02:44 crc kubenswrapper[4799]: I1010 18:02:44.698578 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Oct 10 18:02:44 crc kubenswrapper[4799]: I1010 18:02:44.699429 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c4fbb2c2-9422-4e1c-b7f2-d88141521268-ovsdbserver-nb\") pod \"dnsmasq-dns-769797d5d7-zjthw\" (UID: \"c4fbb2c2-9422-4e1c-b7f2-d88141521268\") " pod="openstack/dnsmasq-dns-769797d5d7-zjthw" Oct 10 18:02:44 crc kubenswrapper[4799]: I1010 18:02:44.699476 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c4fbb2c2-9422-4e1c-b7f2-d88141521268-ovsdbserver-sb\") pod \"dnsmasq-dns-769797d5d7-zjthw\" (UID: \"c4fbb2c2-9422-4e1c-b7f2-d88141521268\") " pod="openstack/dnsmasq-dns-769797d5d7-zjthw" Oct 10 18:02:44 crc kubenswrapper[4799]: I1010 18:02:44.699562 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c4fbb2c2-9422-4e1c-b7f2-d88141521268-config\") pod \"dnsmasq-dns-769797d5d7-zjthw\" (UID: \"c4fbb2c2-9422-4e1c-b7f2-d88141521268\") " pod="openstack/dnsmasq-dns-769797d5d7-zjthw" Oct 10 18:02:44 crc kubenswrapper[4799]: I1010 18:02:44.699616 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qth6d\" (UniqueName: \"kubernetes.io/projected/c4fbb2c2-9422-4e1c-b7f2-d88141521268-kube-api-access-qth6d\") pod \"dnsmasq-dns-769797d5d7-zjthw\" (UID: \"c4fbb2c2-9422-4e1c-b7f2-d88141521268\") " pod="openstack/dnsmasq-dns-769797d5d7-zjthw" Oct 10 18:02:44 crc kubenswrapper[4799]: I1010 18:02:44.699639 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c4fbb2c2-9422-4e1c-b7f2-d88141521268-dns-svc\") pod \"dnsmasq-dns-769797d5d7-zjthw\" (UID: \"c4fbb2c2-9422-4e1c-b7f2-d88141521268\") " pod="openstack/dnsmasq-dns-769797d5d7-zjthw" Oct 10 18:02:44 crc kubenswrapper[4799]: I1010 18:02:44.701520 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-4l9wb" Oct 10 18:02:44 crc kubenswrapper[4799]: I1010 18:02:44.701563 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceph-conf-files" Oct 10 18:02:44 crc kubenswrapper[4799]: I1010 18:02:44.701681 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-scripts" Oct 10 18:02:44 crc kubenswrapper[4799]: I1010 18:02:44.703567 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Oct 10 18:02:44 crc kubenswrapper[4799]: I1010 18:02:44.713487 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 10 18:02:44 crc kubenswrapper[4799]: I1010 18:02:44.758671 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 10 18:02:44 crc kubenswrapper[4799]: I1010 18:02:44.760011 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Oct 10 18:02:44 crc kubenswrapper[4799]: I1010 18:02:44.762281 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Oct 10 18:02:44 crc kubenswrapper[4799]: I1010 18:02:44.772557 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 10 18:02:44 crc kubenswrapper[4799]: I1010 18:02:44.800867 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7fe1d290-8534-412a-9dd5-4e9bb43b14b0-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"7fe1d290-8534-412a-9dd5-4e9bb43b14b0\") " pod="openstack/glance-default-external-api-0" Oct 10 18:02:44 crc kubenswrapper[4799]: I1010 18:02:44.800911 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c4fbb2c2-9422-4e1c-b7f2-d88141521268-ovsdbserver-nb\") pod \"dnsmasq-dns-769797d5d7-zjthw\" (UID: \"c4fbb2c2-9422-4e1c-b7f2-d88141521268\") " pod="openstack/dnsmasq-dns-769797d5d7-zjthw" Oct 10 18:02:44 crc kubenswrapper[4799]: I1010 18:02:44.800931 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9fcb813b-01de-46fa-baee-f18803c8808c-scripts\") pod \"glance-default-internal-api-0\" (UID: \"9fcb813b-01de-46fa-baee-f18803c8808c\") " pod="openstack/glance-default-internal-api-0" Oct 10 18:02:44 crc kubenswrapper[4799]: I1010 18:02:44.800947 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c4fbb2c2-9422-4e1c-b7f2-d88141521268-ovsdbserver-sb\") pod \"dnsmasq-dns-769797d5d7-zjthw\" (UID: \"c4fbb2c2-9422-4e1c-b7f2-d88141521268\") " pod="openstack/dnsmasq-dns-769797d5d7-zjthw" Oct 10 18:02:44 crc kubenswrapper[4799]: I1010 18:02:44.800985 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7fe1d290-8534-412a-9dd5-4e9bb43b14b0-config-data\") pod \"glance-default-external-api-0\" (UID: \"7fe1d290-8534-412a-9dd5-4e9bb43b14b0\") " pod="openstack/glance-default-external-api-0" Oct 10 18:02:44 crc kubenswrapper[4799]: I1010 18:02:44.801029 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9fcb813b-01de-46fa-baee-f18803c8808c-logs\") pod \"glance-default-internal-api-0\" (UID: \"9fcb813b-01de-46fa-baee-f18803c8808c\") " pod="openstack/glance-default-internal-api-0" Oct 10 18:02:44 crc kubenswrapper[4799]: I1010 18:02:44.801045 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7fe1d290-8534-412a-9dd5-4e9bb43b14b0-logs\") pod \"glance-default-external-api-0\" (UID: \"7fe1d290-8534-412a-9dd5-4e9bb43b14b0\") " pod="openstack/glance-default-external-api-0" Oct 10 18:02:44 crc kubenswrapper[4799]: I1010 18:02:44.801062 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vsgrj\" (UniqueName: \"kubernetes.io/projected/7fe1d290-8534-412a-9dd5-4e9bb43b14b0-kube-api-access-vsgrj\") pod \"glance-default-external-api-0\" (UID: \"7fe1d290-8534-412a-9dd5-4e9bb43b14b0\") " pod="openstack/glance-default-external-api-0" Oct 10 18:02:44 crc kubenswrapper[4799]: I1010 18:02:44.801093 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4vkv2\" (UniqueName: \"kubernetes.io/projected/9fcb813b-01de-46fa-baee-f18803c8808c-kube-api-access-4vkv2\") pod \"glance-default-internal-api-0\" (UID: \"9fcb813b-01de-46fa-baee-f18803c8808c\") " pod="openstack/glance-default-internal-api-0" Oct 10 18:02:44 crc kubenswrapper[4799]: I1010 18:02:44.801108 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/7fe1d290-8534-412a-9dd5-4e9bb43b14b0-ceph\") pod \"glance-default-external-api-0\" (UID: \"7fe1d290-8534-412a-9dd5-4e9bb43b14b0\") " pod="openstack/glance-default-external-api-0" Oct 10 18:02:44 crc kubenswrapper[4799]: I1010 18:02:44.801127 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c4fbb2c2-9422-4e1c-b7f2-d88141521268-config\") pod \"dnsmasq-dns-769797d5d7-zjthw\" (UID: \"c4fbb2c2-9422-4e1c-b7f2-d88141521268\") " pod="openstack/dnsmasq-dns-769797d5d7-zjthw" Oct 10 18:02:44 crc kubenswrapper[4799]: I1010 18:02:44.801144 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/7fe1d290-8534-412a-9dd5-4e9bb43b14b0-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"7fe1d290-8534-412a-9dd5-4e9bb43b14b0\") " pod="openstack/glance-default-external-api-0" Oct 10 18:02:44 crc kubenswrapper[4799]: I1010 18:02:44.801160 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/9fcb813b-01de-46fa-baee-f18803c8808c-ceph\") pod \"glance-default-internal-api-0\" (UID: \"9fcb813b-01de-46fa-baee-f18803c8808c\") " pod="openstack/glance-default-internal-api-0" Oct 10 18:02:44 crc kubenswrapper[4799]: I1010 18:02:44.801182 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9fcb813b-01de-46fa-baee-f18803c8808c-config-data\") pod \"glance-default-internal-api-0\" (UID: \"9fcb813b-01de-46fa-baee-f18803c8808c\") " pod="openstack/glance-default-internal-api-0" Oct 10 18:02:44 crc kubenswrapper[4799]: I1010 18:02:44.801210 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9fcb813b-01de-46fa-baee-f18803c8808c-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"9fcb813b-01de-46fa-baee-f18803c8808c\") " pod="openstack/glance-default-internal-api-0" Oct 10 18:02:44 crc kubenswrapper[4799]: I1010 18:02:44.801228 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/9fcb813b-01de-46fa-baee-f18803c8808c-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"9fcb813b-01de-46fa-baee-f18803c8808c\") " pod="openstack/glance-default-internal-api-0" Oct 10 18:02:44 crc kubenswrapper[4799]: I1010 18:02:44.801252 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qth6d\" (UniqueName: \"kubernetes.io/projected/c4fbb2c2-9422-4e1c-b7f2-d88141521268-kube-api-access-qth6d\") pod \"dnsmasq-dns-769797d5d7-zjthw\" (UID: \"c4fbb2c2-9422-4e1c-b7f2-d88141521268\") " pod="openstack/dnsmasq-dns-769797d5d7-zjthw" Oct 10 18:02:44 crc kubenswrapper[4799]: I1010 18:02:44.801267 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7fe1d290-8534-412a-9dd5-4e9bb43b14b0-scripts\") pod \"glance-default-external-api-0\" (UID: \"7fe1d290-8534-412a-9dd5-4e9bb43b14b0\") " pod="openstack/glance-default-external-api-0" Oct 10 18:02:44 crc kubenswrapper[4799]: I1010 18:02:44.801287 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c4fbb2c2-9422-4e1c-b7f2-d88141521268-dns-svc\") pod \"dnsmasq-dns-769797d5d7-zjthw\" (UID: \"c4fbb2c2-9422-4e1c-b7f2-d88141521268\") " pod="openstack/dnsmasq-dns-769797d5d7-zjthw" Oct 10 18:02:44 crc kubenswrapper[4799]: I1010 18:02:44.802095 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c4fbb2c2-9422-4e1c-b7f2-d88141521268-dns-svc\") pod \"dnsmasq-dns-769797d5d7-zjthw\" (UID: \"c4fbb2c2-9422-4e1c-b7f2-d88141521268\") " pod="openstack/dnsmasq-dns-769797d5d7-zjthw" Oct 10 18:02:44 crc kubenswrapper[4799]: I1010 18:02:44.802581 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c4fbb2c2-9422-4e1c-b7f2-d88141521268-ovsdbserver-nb\") pod \"dnsmasq-dns-769797d5d7-zjthw\" (UID: \"c4fbb2c2-9422-4e1c-b7f2-d88141521268\") " pod="openstack/dnsmasq-dns-769797d5d7-zjthw" Oct 10 18:02:44 crc kubenswrapper[4799]: I1010 18:02:44.803095 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c4fbb2c2-9422-4e1c-b7f2-d88141521268-ovsdbserver-sb\") pod \"dnsmasq-dns-769797d5d7-zjthw\" (UID: \"c4fbb2c2-9422-4e1c-b7f2-d88141521268\") " pod="openstack/dnsmasq-dns-769797d5d7-zjthw" Oct 10 18:02:44 crc kubenswrapper[4799]: I1010 18:02:44.803583 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c4fbb2c2-9422-4e1c-b7f2-d88141521268-config\") pod \"dnsmasq-dns-769797d5d7-zjthw\" (UID: \"c4fbb2c2-9422-4e1c-b7f2-d88141521268\") " pod="openstack/dnsmasq-dns-769797d5d7-zjthw" Oct 10 18:02:44 crc kubenswrapper[4799]: I1010 18:02:44.823156 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qth6d\" (UniqueName: \"kubernetes.io/projected/c4fbb2c2-9422-4e1c-b7f2-d88141521268-kube-api-access-qth6d\") pod \"dnsmasq-dns-769797d5d7-zjthw\" (UID: \"c4fbb2c2-9422-4e1c-b7f2-d88141521268\") " pod="openstack/dnsmasq-dns-769797d5d7-zjthw" Oct 10 18:02:44 crc kubenswrapper[4799]: I1010 18:02:44.903308 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7fe1d290-8534-412a-9dd5-4e9bb43b14b0-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"7fe1d290-8534-412a-9dd5-4e9bb43b14b0\") " pod="openstack/glance-default-external-api-0" Oct 10 18:02:44 crc kubenswrapper[4799]: I1010 18:02:44.903353 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9fcb813b-01de-46fa-baee-f18803c8808c-scripts\") pod \"glance-default-internal-api-0\" (UID: \"9fcb813b-01de-46fa-baee-f18803c8808c\") " pod="openstack/glance-default-internal-api-0" Oct 10 18:02:44 crc kubenswrapper[4799]: I1010 18:02:44.903391 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7fe1d290-8534-412a-9dd5-4e9bb43b14b0-config-data\") pod \"glance-default-external-api-0\" (UID: \"7fe1d290-8534-412a-9dd5-4e9bb43b14b0\") " pod="openstack/glance-default-external-api-0" Oct 10 18:02:44 crc kubenswrapper[4799]: I1010 18:02:44.903434 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9fcb813b-01de-46fa-baee-f18803c8808c-logs\") pod \"glance-default-internal-api-0\" (UID: \"9fcb813b-01de-46fa-baee-f18803c8808c\") " pod="openstack/glance-default-internal-api-0" Oct 10 18:02:44 crc kubenswrapper[4799]: I1010 18:02:44.903454 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7fe1d290-8534-412a-9dd5-4e9bb43b14b0-logs\") pod \"glance-default-external-api-0\" (UID: \"7fe1d290-8534-412a-9dd5-4e9bb43b14b0\") " pod="openstack/glance-default-external-api-0" Oct 10 18:02:44 crc kubenswrapper[4799]: I1010 18:02:44.903474 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vsgrj\" (UniqueName: \"kubernetes.io/projected/7fe1d290-8534-412a-9dd5-4e9bb43b14b0-kube-api-access-vsgrj\") pod \"glance-default-external-api-0\" (UID: \"7fe1d290-8534-412a-9dd5-4e9bb43b14b0\") " pod="openstack/glance-default-external-api-0" Oct 10 18:02:44 crc kubenswrapper[4799]: I1010 18:02:44.903500 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4vkv2\" (UniqueName: \"kubernetes.io/projected/9fcb813b-01de-46fa-baee-f18803c8808c-kube-api-access-4vkv2\") pod \"glance-default-internal-api-0\" (UID: \"9fcb813b-01de-46fa-baee-f18803c8808c\") " pod="openstack/glance-default-internal-api-0" Oct 10 18:02:44 crc kubenswrapper[4799]: I1010 18:02:44.903517 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/7fe1d290-8534-412a-9dd5-4e9bb43b14b0-ceph\") pod \"glance-default-external-api-0\" (UID: \"7fe1d290-8534-412a-9dd5-4e9bb43b14b0\") " pod="openstack/glance-default-external-api-0" Oct 10 18:02:44 crc kubenswrapper[4799]: I1010 18:02:44.903537 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/7fe1d290-8534-412a-9dd5-4e9bb43b14b0-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"7fe1d290-8534-412a-9dd5-4e9bb43b14b0\") " pod="openstack/glance-default-external-api-0" Oct 10 18:02:44 crc kubenswrapper[4799]: I1010 18:02:44.903557 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/9fcb813b-01de-46fa-baee-f18803c8808c-ceph\") pod \"glance-default-internal-api-0\" (UID: \"9fcb813b-01de-46fa-baee-f18803c8808c\") " pod="openstack/glance-default-internal-api-0" Oct 10 18:02:44 crc kubenswrapper[4799]: I1010 18:02:44.903576 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9fcb813b-01de-46fa-baee-f18803c8808c-config-data\") pod \"glance-default-internal-api-0\" (UID: \"9fcb813b-01de-46fa-baee-f18803c8808c\") " pod="openstack/glance-default-internal-api-0" Oct 10 18:02:44 crc kubenswrapper[4799]: I1010 18:02:44.903605 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9fcb813b-01de-46fa-baee-f18803c8808c-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"9fcb813b-01de-46fa-baee-f18803c8808c\") " pod="openstack/glance-default-internal-api-0" Oct 10 18:02:44 crc kubenswrapper[4799]: I1010 18:02:44.903626 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/9fcb813b-01de-46fa-baee-f18803c8808c-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"9fcb813b-01de-46fa-baee-f18803c8808c\") " pod="openstack/glance-default-internal-api-0" Oct 10 18:02:44 crc kubenswrapper[4799]: I1010 18:02:44.903649 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7fe1d290-8534-412a-9dd5-4e9bb43b14b0-scripts\") pod \"glance-default-external-api-0\" (UID: \"7fe1d290-8534-412a-9dd5-4e9bb43b14b0\") " pod="openstack/glance-default-external-api-0" Oct 10 18:02:44 crc kubenswrapper[4799]: I1010 18:02:44.904408 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7fe1d290-8534-412a-9dd5-4e9bb43b14b0-logs\") pod \"glance-default-external-api-0\" (UID: \"7fe1d290-8534-412a-9dd5-4e9bb43b14b0\") " pod="openstack/glance-default-external-api-0" Oct 10 18:02:44 crc kubenswrapper[4799]: I1010 18:02:44.904635 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/7fe1d290-8534-412a-9dd5-4e9bb43b14b0-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"7fe1d290-8534-412a-9dd5-4e9bb43b14b0\") " pod="openstack/glance-default-external-api-0" Oct 10 18:02:44 crc kubenswrapper[4799]: I1010 18:02:44.904750 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/9fcb813b-01de-46fa-baee-f18803c8808c-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"9fcb813b-01de-46fa-baee-f18803c8808c\") " pod="openstack/glance-default-internal-api-0" Oct 10 18:02:44 crc kubenswrapper[4799]: I1010 18:02:44.904904 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9fcb813b-01de-46fa-baee-f18803c8808c-logs\") pod \"glance-default-internal-api-0\" (UID: \"9fcb813b-01de-46fa-baee-f18803c8808c\") " pod="openstack/glance-default-internal-api-0" Oct 10 18:02:44 crc kubenswrapper[4799]: I1010 18:02:44.908543 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7fe1d290-8534-412a-9dd5-4e9bb43b14b0-config-data\") pod \"glance-default-external-api-0\" (UID: \"7fe1d290-8534-412a-9dd5-4e9bb43b14b0\") " pod="openstack/glance-default-external-api-0" Oct 10 18:02:44 crc kubenswrapper[4799]: I1010 18:02:44.909847 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9fcb813b-01de-46fa-baee-f18803c8808c-config-data\") pod \"glance-default-internal-api-0\" (UID: \"9fcb813b-01de-46fa-baee-f18803c8808c\") " pod="openstack/glance-default-internal-api-0" Oct 10 18:02:44 crc kubenswrapper[4799]: I1010 18:02:44.911355 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9fcb813b-01de-46fa-baee-f18803c8808c-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"9fcb813b-01de-46fa-baee-f18803c8808c\") " pod="openstack/glance-default-internal-api-0" Oct 10 18:02:44 crc kubenswrapper[4799]: I1010 18:02:44.911570 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/9fcb813b-01de-46fa-baee-f18803c8808c-ceph\") pod \"glance-default-internal-api-0\" (UID: \"9fcb813b-01de-46fa-baee-f18803c8808c\") " pod="openstack/glance-default-internal-api-0" Oct 10 18:02:44 crc kubenswrapper[4799]: I1010 18:02:44.911715 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/7fe1d290-8534-412a-9dd5-4e9bb43b14b0-ceph\") pod \"glance-default-external-api-0\" (UID: \"7fe1d290-8534-412a-9dd5-4e9bb43b14b0\") " pod="openstack/glance-default-external-api-0" Oct 10 18:02:44 crc kubenswrapper[4799]: I1010 18:02:44.911862 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7fe1d290-8534-412a-9dd5-4e9bb43b14b0-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"7fe1d290-8534-412a-9dd5-4e9bb43b14b0\") " pod="openstack/glance-default-external-api-0" Oct 10 18:02:44 crc kubenswrapper[4799]: I1010 18:02:44.913226 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9fcb813b-01de-46fa-baee-f18803c8808c-scripts\") pod \"glance-default-internal-api-0\" (UID: \"9fcb813b-01de-46fa-baee-f18803c8808c\") " pod="openstack/glance-default-internal-api-0" Oct 10 18:02:44 crc kubenswrapper[4799]: I1010 18:02:44.919630 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4vkv2\" (UniqueName: \"kubernetes.io/projected/9fcb813b-01de-46fa-baee-f18803c8808c-kube-api-access-4vkv2\") pod \"glance-default-internal-api-0\" (UID: \"9fcb813b-01de-46fa-baee-f18803c8808c\") " pod="openstack/glance-default-internal-api-0" Oct 10 18:02:44 crc kubenswrapper[4799]: I1010 18:02:44.920866 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7fe1d290-8534-412a-9dd5-4e9bb43b14b0-scripts\") pod \"glance-default-external-api-0\" (UID: \"7fe1d290-8534-412a-9dd5-4e9bb43b14b0\") " pod="openstack/glance-default-external-api-0" Oct 10 18:02:44 crc kubenswrapper[4799]: I1010 18:02:44.927005 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vsgrj\" (UniqueName: \"kubernetes.io/projected/7fe1d290-8534-412a-9dd5-4e9bb43b14b0-kube-api-access-vsgrj\") pod \"glance-default-external-api-0\" (UID: \"7fe1d290-8534-412a-9dd5-4e9bb43b14b0\") " pod="openstack/glance-default-external-api-0" Oct 10 18:02:44 crc kubenswrapper[4799]: I1010 18:02:44.970416 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-769797d5d7-zjthw" Oct 10 18:02:45 crc kubenswrapper[4799]: I1010 18:02:45.025394 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Oct 10 18:02:45 crc kubenswrapper[4799]: I1010 18:02:45.074679 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Oct 10 18:02:45 crc kubenswrapper[4799]: I1010 18:02:45.248800 4799 patch_prober.go:28] interesting pod/machine-config-daemon-rh8zc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 10 18:02:45 crc kubenswrapper[4799]: I1010 18:02:45.248850 4799 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 10 18:02:45 crc kubenswrapper[4799]: I1010 18:02:45.248887 4799 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" Oct 10 18:02:45 crc kubenswrapper[4799]: I1010 18:02:45.249712 4799 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"7d93aa50210daf6f851b294fa12ece2a292bc34c273170b02b29c3a462fa4bf5"} pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 10 18:02:45 crc kubenswrapper[4799]: I1010 18:02:45.249778 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" containerName="machine-config-daemon" containerID="cri-o://7d93aa50210daf6f851b294fa12ece2a292bc34c273170b02b29c3a462fa4bf5" gracePeriod=600 Oct 10 18:02:45 crc kubenswrapper[4799]: I1010 18:02:45.447479 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-769797d5d7-zjthw"] Oct 10 18:02:45 crc kubenswrapper[4799]: I1010 18:02:45.622998 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 10 18:02:45 crc kubenswrapper[4799]: W1010 18:02:45.628810 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7fe1d290_8534_412a_9dd5_4e9bb43b14b0.slice/crio-8c4e9a70100d5b24c666d8bf714875f1c8bf69101e22caa07d7130bf86b8fe29 WatchSource:0}: Error finding container 8c4e9a70100d5b24c666d8bf714875f1c8bf69101e22caa07d7130bf86b8fe29: Status 404 returned error can't find the container with id 8c4e9a70100d5b24c666d8bf714875f1c8bf69101e22caa07d7130bf86b8fe29 Oct 10 18:02:45 crc kubenswrapper[4799]: I1010 18:02:45.726659 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 10 18:02:45 crc kubenswrapper[4799]: I1010 18:02:45.914864 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 10 18:02:46 crc kubenswrapper[4799]: I1010 18:02:46.191928 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"7fe1d290-8534-412a-9dd5-4e9bb43b14b0","Type":"ContainerStarted","Data":"8c4e9a70100d5b24c666d8bf714875f1c8bf69101e22caa07d7130bf86b8fe29"} Oct 10 18:02:46 crc kubenswrapper[4799]: I1010 18:02:46.194557 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"9fcb813b-01de-46fa-baee-f18803c8808c","Type":"ContainerStarted","Data":"2a967844efa6fc7ce3fed9e20574ecaf324c3cc839cc049aaf56a591bf8b59dd"} Oct 10 18:02:46 crc kubenswrapper[4799]: I1010 18:02:46.207324 4799 generic.go:334] "Generic (PLEG): container finished" podID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" containerID="7d93aa50210daf6f851b294fa12ece2a292bc34c273170b02b29c3a462fa4bf5" exitCode=0 Oct 10 18:02:46 crc kubenswrapper[4799]: I1010 18:02:46.207378 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" event={"ID":"6cebefda-e31d-4be2-9bf4-8e1f8ec002cb","Type":"ContainerDied","Data":"7d93aa50210daf6f851b294fa12ece2a292bc34c273170b02b29c3a462fa4bf5"} Oct 10 18:02:46 crc kubenswrapper[4799]: I1010 18:02:46.207405 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" event={"ID":"6cebefda-e31d-4be2-9bf4-8e1f8ec002cb","Type":"ContainerStarted","Data":"6ae067b7971fd6480cb0c3ccf44d4e22f837ba4674373b4b5903247a9af39cf1"} Oct 10 18:02:46 crc kubenswrapper[4799]: I1010 18:02:46.207423 4799 scope.go:117] "RemoveContainer" containerID="5e973f476cb0655a6e33e886e2a59fc6754febf3bf5a4718abcef307858985dd" Oct 10 18:02:46 crc kubenswrapper[4799]: I1010 18:02:46.210219 4799 generic.go:334] "Generic (PLEG): container finished" podID="c4fbb2c2-9422-4e1c-b7f2-d88141521268" containerID="150281defd8d9e8ef5efbec1515b4b93df6dc1d4e2a14187b78561759045a1f9" exitCode=0 Oct 10 18:02:46 crc kubenswrapper[4799]: I1010 18:02:46.210264 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-769797d5d7-zjthw" event={"ID":"c4fbb2c2-9422-4e1c-b7f2-d88141521268","Type":"ContainerDied","Data":"150281defd8d9e8ef5efbec1515b4b93df6dc1d4e2a14187b78561759045a1f9"} Oct 10 18:02:46 crc kubenswrapper[4799]: I1010 18:02:46.210312 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-769797d5d7-zjthw" event={"ID":"c4fbb2c2-9422-4e1c-b7f2-d88141521268","Type":"ContainerStarted","Data":"f3b0f1650516a69f468baa63b58465d4daad9bc7d95a3ed5de09fd2ba856bea2"} Oct 10 18:02:47 crc kubenswrapper[4799]: I1010 18:02:47.220411 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"7fe1d290-8534-412a-9dd5-4e9bb43b14b0","Type":"ContainerStarted","Data":"c075396860dcb75f5f42f451541414b54480ceea2c9f6f9a0378feab3cf14d59"} Oct 10 18:02:47 crc kubenswrapper[4799]: I1010 18:02:47.220901 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"7fe1d290-8534-412a-9dd5-4e9bb43b14b0","Type":"ContainerStarted","Data":"33c3774126290179407fb963124120828b90ff1514a16347afca3969d344178e"} Oct 10 18:02:47 crc kubenswrapper[4799]: I1010 18:02:47.220597 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="7fe1d290-8534-412a-9dd5-4e9bb43b14b0" containerName="glance-httpd" containerID="cri-o://33c3774126290179407fb963124120828b90ff1514a16347afca3969d344178e" gracePeriod=30 Oct 10 18:02:47 crc kubenswrapper[4799]: I1010 18:02:47.220532 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="7fe1d290-8534-412a-9dd5-4e9bb43b14b0" containerName="glance-log" containerID="cri-o://c075396860dcb75f5f42f451541414b54480ceea2c9f6f9a0378feab3cf14d59" gracePeriod=30 Oct 10 18:02:47 crc kubenswrapper[4799]: I1010 18:02:47.222950 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"9fcb813b-01de-46fa-baee-f18803c8808c","Type":"ContainerStarted","Data":"88508ee1b8a4e90158469e4940dc634fb11f306572ea18ee45254535f0799c83"} Oct 10 18:02:47 crc kubenswrapper[4799]: I1010 18:02:47.222995 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"9fcb813b-01de-46fa-baee-f18803c8808c","Type":"ContainerStarted","Data":"6844d82d560123bfb46715c1b190ed1e20825a9fcae5782981d5396cd75db098"} Oct 10 18:02:47 crc kubenswrapper[4799]: I1010 18:02:47.233921 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-769797d5d7-zjthw" event={"ID":"c4fbb2c2-9422-4e1c-b7f2-d88141521268","Type":"ContainerStarted","Data":"37961f471cf3b8844a085d0d50743c6963fac5c11832c3e36f77c653f31569fc"} Oct 10 18:02:47 crc kubenswrapper[4799]: I1010 18:02:47.234205 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-769797d5d7-zjthw" Oct 10 18:02:47 crc kubenswrapper[4799]: I1010 18:02:47.250153 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=3.250135157 podStartE2EDuration="3.250135157s" podCreationTimestamp="2025-10-10 18:02:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-10 18:02:47.243459884 +0000 UTC m=+5460.751783999" watchObservedRunningTime="2025-10-10 18:02:47.250135157 +0000 UTC m=+5460.758459272" Oct 10 18:02:47 crc kubenswrapper[4799]: I1010 18:02:47.260679 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-769797d5d7-zjthw" podStartSLOduration=3.260661223 podStartE2EDuration="3.260661223s" podCreationTimestamp="2025-10-10 18:02:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-10 18:02:47.259110995 +0000 UTC m=+5460.767435110" watchObservedRunningTime="2025-10-10 18:02:47.260661223 +0000 UTC m=+5460.768985328" Oct 10 18:02:47 crc kubenswrapper[4799]: I1010 18:02:47.279938 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=3.279916073 podStartE2EDuration="3.279916073s" podCreationTimestamp="2025-10-10 18:02:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-10 18:02:47.273899066 +0000 UTC m=+5460.782223181" watchObservedRunningTime="2025-10-10 18:02:47.279916073 +0000 UTC m=+5460.788240188" Oct 10 18:02:47 crc kubenswrapper[4799]: I1010 18:02:47.964775 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Oct 10 18:02:48 crc kubenswrapper[4799]: I1010 18:02:48.059410 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/7fe1d290-8534-412a-9dd5-4e9bb43b14b0-ceph\") pod \"7fe1d290-8534-412a-9dd5-4e9bb43b14b0\" (UID: \"7fe1d290-8534-412a-9dd5-4e9bb43b14b0\") " Oct 10 18:02:48 crc kubenswrapper[4799]: I1010 18:02:48.059533 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7fe1d290-8534-412a-9dd5-4e9bb43b14b0-scripts\") pod \"7fe1d290-8534-412a-9dd5-4e9bb43b14b0\" (UID: \"7fe1d290-8534-412a-9dd5-4e9bb43b14b0\") " Oct 10 18:02:48 crc kubenswrapper[4799]: I1010 18:02:48.059585 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7fe1d290-8534-412a-9dd5-4e9bb43b14b0-config-data\") pod \"7fe1d290-8534-412a-9dd5-4e9bb43b14b0\" (UID: \"7fe1d290-8534-412a-9dd5-4e9bb43b14b0\") " Oct 10 18:02:48 crc kubenswrapper[4799]: I1010 18:02:48.059825 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vsgrj\" (UniqueName: \"kubernetes.io/projected/7fe1d290-8534-412a-9dd5-4e9bb43b14b0-kube-api-access-vsgrj\") pod \"7fe1d290-8534-412a-9dd5-4e9bb43b14b0\" (UID: \"7fe1d290-8534-412a-9dd5-4e9bb43b14b0\") " Oct 10 18:02:48 crc kubenswrapper[4799]: I1010 18:02:48.059904 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7fe1d290-8534-412a-9dd5-4e9bb43b14b0-combined-ca-bundle\") pod \"7fe1d290-8534-412a-9dd5-4e9bb43b14b0\" (UID: \"7fe1d290-8534-412a-9dd5-4e9bb43b14b0\") " Oct 10 18:02:48 crc kubenswrapper[4799]: I1010 18:02:48.059933 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/7fe1d290-8534-412a-9dd5-4e9bb43b14b0-httpd-run\") pod \"7fe1d290-8534-412a-9dd5-4e9bb43b14b0\" (UID: \"7fe1d290-8534-412a-9dd5-4e9bb43b14b0\") " Oct 10 18:02:48 crc kubenswrapper[4799]: I1010 18:02:48.059953 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7fe1d290-8534-412a-9dd5-4e9bb43b14b0-logs\") pod \"7fe1d290-8534-412a-9dd5-4e9bb43b14b0\" (UID: \"7fe1d290-8534-412a-9dd5-4e9bb43b14b0\") " Oct 10 18:02:48 crc kubenswrapper[4799]: I1010 18:02:48.060695 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7fe1d290-8534-412a-9dd5-4e9bb43b14b0-logs" (OuterVolumeSpecName: "logs") pod "7fe1d290-8534-412a-9dd5-4e9bb43b14b0" (UID: "7fe1d290-8534-412a-9dd5-4e9bb43b14b0"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 10 18:02:48 crc kubenswrapper[4799]: I1010 18:02:48.061546 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7fe1d290-8534-412a-9dd5-4e9bb43b14b0-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "7fe1d290-8534-412a-9dd5-4e9bb43b14b0" (UID: "7fe1d290-8534-412a-9dd5-4e9bb43b14b0"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 10 18:02:48 crc kubenswrapper[4799]: I1010 18:02:48.065026 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7fe1d290-8534-412a-9dd5-4e9bb43b14b0-scripts" (OuterVolumeSpecName: "scripts") pod "7fe1d290-8534-412a-9dd5-4e9bb43b14b0" (UID: "7fe1d290-8534-412a-9dd5-4e9bb43b14b0"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 18:02:48 crc kubenswrapper[4799]: I1010 18:02:48.065450 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7fe1d290-8534-412a-9dd5-4e9bb43b14b0-ceph" (OuterVolumeSpecName: "ceph") pod "7fe1d290-8534-412a-9dd5-4e9bb43b14b0" (UID: "7fe1d290-8534-412a-9dd5-4e9bb43b14b0"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 18:02:48 crc kubenswrapper[4799]: I1010 18:02:48.072539 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7fe1d290-8534-412a-9dd5-4e9bb43b14b0-kube-api-access-vsgrj" (OuterVolumeSpecName: "kube-api-access-vsgrj") pod "7fe1d290-8534-412a-9dd5-4e9bb43b14b0" (UID: "7fe1d290-8534-412a-9dd5-4e9bb43b14b0"). InnerVolumeSpecName "kube-api-access-vsgrj". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 18:02:48 crc kubenswrapper[4799]: I1010 18:02:48.091172 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7fe1d290-8534-412a-9dd5-4e9bb43b14b0-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "7fe1d290-8534-412a-9dd5-4e9bb43b14b0" (UID: "7fe1d290-8534-412a-9dd5-4e9bb43b14b0"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 18:02:48 crc kubenswrapper[4799]: I1010 18:02:48.120395 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7fe1d290-8534-412a-9dd5-4e9bb43b14b0-config-data" (OuterVolumeSpecName: "config-data") pod "7fe1d290-8534-412a-9dd5-4e9bb43b14b0" (UID: "7fe1d290-8534-412a-9dd5-4e9bb43b14b0"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 18:02:48 crc kubenswrapper[4799]: I1010 18:02:48.161952 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vsgrj\" (UniqueName: \"kubernetes.io/projected/7fe1d290-8534-412a-9dd5-4e9bb43b14b0-kube-api-access-vsgrj\") on node \"crc\" DevicePath \"\"" Oct 10 18:02:48 crc kubenswrapper[4799]: I1010 18:02:48.161996 4799 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7fe1d290-8534-412a-9dd5-4e9bb43b14b0-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 10 18:02:48 crc kubenswrapper[4799]: I1010 18:02:48.162009 4799 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7fe1d290-8534-412a-9dd5-4e9bb43b14b0-logs\") on node \"crc\" DevicePath \"\"" Oct 10 18:02:48 crc kubenswrapper[4799]: I1010 18:02:48.162021 4799 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/7fe1d290-8534-412a-9dd5-4e9bb43b14b0-httpd-run\") on node \"crc\" DevicePath \"\"" Oct 10 18:02:48 crc kubenswrapper[4799]: I1010 18:02:48.162035 4799 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/7fe1d290-8534-412a-9dd5-4e9bb43b14b0-ceph\") on node \"crc\" DevicePath \"\"" Oct 10 18:02:48 crc kubenswrapper[4799]: I1010 18:02:48.162045 4799 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7fe1d290-8534-412a-9dd5-4e9bb43b14b0-scripts\") on node \"crc\" DevicePath \"\"" Oct 10 18:02:48 crc kubenswrapper[4799]: I1010 18:02:48.162055 4799 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7fe1d290-8534-412a-9dd5-4e9bb43b14b0-config-data\") on node \"crc\" DevicePath \"\"" Oct 10 18:02:48 crc kubenswrapper[4799]: I1010 18:02:48.244985 4799 generic.go:334] "Generic (PLEG): container finished" podID="7fe1d290-8534-412a-9dd5-4e9bb43b14b0" containerID="33c3774126290179407fb963124120828b90ff1514a16347afca3969d344178e" exitCode=0 Oct 10 18:02:48 crc kubenswrapper[4799]: I1010 18:02:48.246157 4799 generic.go:334] "Generic (PLEG): container finished" podID="7fe1d290-8534-412a-9dd5-4e9bb43b14b0" containerID="c075396860dcb75f5f42f451541414b54480ceea2c9f6f9a0378feab3cf14d59" exitCode=143 Oct 10 18:02:48 crc kubenswrapper[4799]: I1010 18:02:48.245040 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"7fe1d290-8534-412a-9dd5-4e9bb43b14b0","Type":"ContainerDied","Data":"33c3774126290179407fb963124120828b90ff1514a16347afca3969d344178e"} Oct 10 18:02:48 crc kubenswrapper[4799]: I1010 18:02:48.246622 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"7fe1d290-8534-412a-9dd5-4e9bb43b14b0","Type":"ContainerDied","Data":"c075396860dcb75f5f42f451541414b54480ceea2c9f6f9a0378feab3cf14d59"} Oct 10 18:02:48 crc kubenswrapper[4799]: I1010 18:02:48.245025 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Oct 10 18:02:48 crc kubenswrapper[4799]: I1010 18:02:48.246676 4799 scope.go:117] "RemoveContainer" containerID="33c3774126290179407fb963124120828b90ff1514a16347afca3969d344178e" Oct 10 18:02:48 crc kubenswrapper[4799]: I1010 18:02:48.246662 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"7fe1d290-8534-412a-9dd5-4e9bb43b14b0","Type":"ContainerDied","Data":"8c4e9a70100d5b24c666d8bf714875f1c8bf69101e22caa07d7130bf86b8fe29"} Oct 10 18:02:48 crc kubenswrapper[4799]: I1010 18:02:48.267470 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 10 18:02:48 crc kubenswrapper[4799]: I1010 18:02:48.276671 4799 scope.go:117] "RemoveContainer" containerID="c075396860dcb75f5f42f451541414b54480ceea2c9f6f9a0378feab3cf14d59" Oct 10 18:02:48 crc kubenswrapper[4799]: I1010 18:02:48.284720 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 10 18:02:48 crc kubenswrapper[4799]: I1010 18:02:48.292456 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 10 18:02:48 crc kubenswrapper[4799]: I1010 18:02:48.305372 4799 scope.go:117] "RemoveContainer" containerID="33c3774126290179407fb963124120828b90ff1514a16347afca3969d344178e" Oct 10 18:02:48 crc kubenswrapper[4799]: E1010 18:02:48.305892 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"33c3774126290179407fb963124120828b90ff1514a16347afca3969d344178e\": container with ID starting with 33c3774126290179407fb963124120828b90ff1514a16347afca3969d344178e not found: ID does not exist" containerID="33c3774126290179407fb963124120828b90ff1514a16347afca3969d344178e" Oct 10 18:02:48 crc kubenswrapper[4799]: I1010 18:02:48.305925 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"33c3774126290179407fb963124120828b90ff1514a16347afca3969d344178e"} err="failed to get container status \"33c3774126290179407fb963124120828b90ff1514a16347afca3969d344178e\": rpc error: code = NotFound desc = could not find container \"33c3774126290179407fb963124120828b90ff1514a16347afca3969d344178e\": container with ID starting with 33c3774126290179407fb963124120828b90ff1514a16347afca3969d344178e not found: ID does not exist" Oct 10 18:02:48 crc kubenswrapper[4799]: I1010 18:02:48.305948 4799 scope.go:117] "RemoveContainer" containerID="c075396860dcb75f5f42f451541414b54480ceea2c9f6f9a0378feab3cf14d59" Oct 10 18:02:48 crc kubenswrapper[4799]: E1010 18:02:48.306252 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c075396860dcb75f5f42f451541414b54480ceea2c9f6f9a0378feab3cf14d59\": container with ID starting with c075396860dcb75f5f42f451541414b54480ceea2c9f6f9a0378feab3cf14d59 not found: ID does not exist" containerID="c075396860dcb75f5f42f451541414b54480ceea2c9f6f9a0378feab3cf14d59" Oct 10 18:02:48 crc kubenswrapper[4799]: I1010 18:02:48.306273 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c075396860dcb75f5f42f451541414b54480ceea2c9f6f9a0378feab3cf14d59"} err="failed to get container status \"c075396860dcb75f5f42f451541414b54480ceea2c9f6f9a0378feab3cf14d59\": rpc error: code = NotFound desc = could not find container \"c075396860dcb75f5f42f451541414b54480ceea2c9f6f9a0378feab3cf14d59\": container with ID starting with c075396860dcb75f5f42f451541414b54480ceea2c9f6f9a0378feab3cf14d59 not found: ID does not exist" Oct 10 18:02:48 crc kubenswrapper[4799]: I1010 18:02:48.306287 4799 scope.go:117] "RemoveContainer" containerID="33c3774126290179407fb963124120828b90ff1514a16347afca3969d344178e" Oct 10 18:02:48 crc kubenswrapper[4799]: I1010 18:02:48.306548 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"33c3774126290179407fb963124120828b90ff1514a16347afca3969d344178e"} err="failed to get container status \"33c3774126290179407fb963124120828b90ff1514a16347afca3969d344178e\": rpc error: code = NotFound desc = could not find container \"33c3774126290179407fb963124120828b90ff1514a16347afca3969d344178e\": container with ID starting with 33c3774126290179407fb963124120828b90ff1514a16347afca3969d344178e not found: ID does not exist" Oct 10 18:02:48 crc kubenswrapper[4799]: I1010 18:02:48.306573 4799 scope.go:117] "RemoveContainer" containerID="c075396860dcb75f5f42f451541414b54480ceea2c9f6f9a0378feab3cf14d59" Oct 10 18:02:48 crc kubenswrapper[4799]: I1010 18:02:48.306810 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c075396860dcb75f5f42f451541414b54480ceea2c9f6f9a0378feab3cf14d59"} err="failed to get container status \"c075396860dcb75f5f42f451541414b54480ceea2c9f6f9a0378feab3cf14d59\": rpc error: code = NotFound desc = could not find container \"c075396860dcb75f5f42f451541414b54480ceea2c9f6f9a0378feab3cf14d59\": container with ID starting with c075396860dcb75f5f42f451541414b54480ceea2c9f6f9a0378feab3cf14d59 not found: ID does not exist" Oct 10 18:02:48 crc kubenswrapper[4799]: I1010 18:02:48.313879 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Oct 10 18:02:48 crc kubenswrapper[4799]: E1010 18:02:48.314316 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7fe1d290-8534-412a-9dd5-4e9bb43b14b0" containerName="glance-httpd" Oct 10 18:02:48 crc kubenswrapper[4799]: I1010 18:02:48.314331 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="7fe1d290-8534-412a-9dd5-4e9bb43b14b0" containerName="glance-httpd" Oct 10 18:02:48 crc kubenswrapper[4799]: E1010 18:02:48.314365 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7fe1d290-8534-412a-9dd5-4e9bb43b14b0" containerName="glance-log" Oct 10 18:02:48 crc kubenswrapper[4799]: I1010 18:02:48.314370 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="7fe1d290-8534-412a-9dd5-4e9bb43b14b0" containerName="glance-log" Oct 10 18:02:48 crc kubenswrapper[4799]: I1010 18:02:48.314508 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="7fe1d290-8534-412a-9dd5-4e9bb43b14b0" containerName="glance-httpd" Oct 10 18:02:48 crc kubenswrapper[4799]: I1010 18:02:48.314531 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="7fe1d290-8534-412a-9dd5-4e9bb43b14b0" containerName="glance-log" Oct 10 18:02:48 crc kubenswrapper[4799]: I1010 18:02:48.315429 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Oct 10 18:02:48 crc kubenswrapper[4799]: I1010 18:02:48.318411 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Oct 10 18:02:48 crc kubenswrapper[4799]: I1010 18:02:48.324133 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 10 18:02:48 crc kubenswrapper[4799]: I1010 18:02:48.466217 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/4d6ca553-e264-4abd-a853-bf86bf3b22bd-ceph\") pod \"glance-default-external-api-0\" (UID: \"4d6ca553-e264-4abd-a853-bf86bf3b22bd\") " pod="openstack/glance-default-external-api-0" Oct 10 18:02:48 crc kubenswrapper[4799]: I1010 18:02:48.466270 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4d6ca553-e264-4abd-a853-bf86bf3b22bd-config-data\") pod \"glance-default-external-api-0\" (UID: \"4d6ca553-e264-4abd-a853-bf86bf3b22bd\") " pod="openstack/glance-default-external-api-0" Oct 10 18:02:48 crc kubenswrapper[4799]: I1010 18:02:48.466334 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4d6ca553-e264-4abd-a853-bf86bf3b22bd-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"4d6ca553-e264-4abd-a853-bf86bf3b22bd\") " pod="openstack/glance-default-external-api-0" Oct 10 18:02:48 crc kubenswrapper[4799]: I1010 18:02:48.466387 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4cdkw\" (UniqueName: \"kubernetes.io/projected/4d6ca553-e264-4abd-a853-bf86bf3b22bd-kube-api-access-4cdkw\") pod \"glance-default-external-api-0\" (UID: \"4d6ca553-e264-4abd-a853-bf86bf3b22bd\") " pod="openstack/glance-default-external-api-0" Oct 10 18:02:48 crc kubenswrapper[4799]: I1010 18:02:48.466472 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4d6ca553-e264-4abd-a853-bf86bf3b22bd-logs\") pod \"glance-default-external-api-0\" (UID: \"4d6ca553-e264-4abd-a853-bf86bf3b22bd\") " pod="openstack/glance-default-external-api-0" Oct 10 18:02:48 crc kubenswrapper[4799]: I1010 18:02:48.466517 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/4d6ca553-e264-4abd-a853-bf86bf3b22bd-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"4d6ca553-e264-4abd-a853-bf86bf3b22bd\") " pod="openstack/glance-default-external-api-0" Oct 10 18:02:48 crc kubenswrapper[4799]: I1010 18:02:48.466555 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4d6ca553-e264-4abd-a853-bf86bf3b22bd-scripts\") pod \"glance-default-external-api-0\" (UID: \"4d6ca553-e264-4abd-a853-bf86bf3b22bd\") " pod="openstack/glance-default-external-api-0" Oct 10 18:02:48 crc kubenswrapper[4799]: I1010 18:02:48.568139 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4d6ca553-e264-4abd-a853-bf86bf3b22bd-logs\") pod \"glance-default-external-api-0\" (UID: \"4d6ca553-e264-4abd-a853-bf86bf3b22bd\") " pod="openstack/glance-default-external-api-0" Oct 10 18:02:48 crc kubenswrapper[4799]: I1010 18:02:48.568194 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/4d6ca553-e264-4abd-a853-bf86bf3b22bd-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"4d6ca553-e264-4abd-a853-bf86bf3b22bd\") " pod="openstack/glance-default-external-api-0" Oct 10 18:02:48 crc kubenswrapper[4799]: I1010 18:02:48.568226 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4d6ca553-e264-4abd-a853-bf86bf3b22bd-scripts\") pod \"glance-default-external-api-0\" (UID: \"4d6ca553-e264-4abd-a853-bf86bf3b22bd\") " pod="openstack/glance-default-external-api-0" Oct 10 18:02:48 crc kubenswrapper[4799]: I1010 18:02:48.568271 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/4d6ca553-e264-4abd-a853-bf86bf3b22bd-ceph\") pod \"glance-default-external-api-0\" (UID: \"4d6ca553-e264-4abd-a853-bf86bf3b22bd\") " pod="openstack/glance-default-external-api-0" Oct 10 18:02:48 crc kubenswrapper[4799]: I1010 18:02:48.568295 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4d6ca553-e264-4abd-a853-bf86bf3b22bd-config-data\") pod \"glance-default-external-api-0\" (UID: \"4d6ca553-e264-4abd-a853-bf86bf3b22bd\") " pod="openstack/glance-default-external-api-0" Oct 10 18:02:48 crc kubenswrapper[4799]: I1010 18:02:48.568312 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4d6ca553-e264-4abd-a853-bf86bf3b22bd-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"4d6ca553-e264-4abd-a853-bf86bf3b22bd\") " pod="openstack/glance-default-external-api-0" Oct 10 18:02:48 crc kubenswrapper[4799]: I1010 18:02:48.568353 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4cdkw\" (UniqueName: \"kubernetes.io/projected/4d6ca553-e264-4abd-a853-bf86bf3b22bd-kube-api-access-4cdkw\") pod \"glance-default-external-api-0\" (UID: \"4d6ca553-e264-4abd-a853-bf86bf3b22bd\") " pod="openstack/glance-default-external-api-0" Oct 10 18:02:48 crc kubenswrapper[4799]: I1010 18:02:48.569532 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/4d6ca553-e264-4abd-a853-bf86bf3b22bd-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"4d6ca553-e264-4abd-a853-bf86bf3b22bd\") " pod="openstack/glance-default-external-api-0" Oct 10 18:02:48 crc kubenswrapper[4799]: I1010 18:02:48.569676 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4d6ca553-e264-4abd-a853-bf86bf3b22bd-logs\") pod \"glance-default-external-api-0\" (UID: \"4d6ca553-e264-4abd-a853-bf86bf3b22bd\") " pod="openstack/glance-default-external-api-0" Oct 10 18:02:48 crc kubenswrapper[4799]: I1010 18:02:48.572886 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4d6ca553-e264-4abd-a853-bf86bf3b22bd-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"4d6ca553-e264-4abd-a853-bf86bf3b22bd\") " pod="openstack/glance-default-external-api-0" Oct 10 18:02:48 crc kubenswrapper[4799]: I1010 18:02:48.572968 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/4d6ca553-e264-4abd-a853-bf86bf3b22bd-ceph\") pod \"glance-default-external-api-0\" (UID: \"4d6ca553-e264-4abd-a853-bf86bf3b22bd\") " pod="openstack/glance-default-external-api-0" Oct 10 18:02:48 crc kubenswrapper[4799]: I1010 18:02:48.573172 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4d6ca553-e264-4abd-a853-bf86bf3b22bd-config-data\") pod \"glance-default-external-api-0\" (UID: \"4d6ca553-e264-4abd-a853-bf86bf3b22bd\") " pod="openstack/glance-default-external-api-0" Oct 10 18:02:48 crc kubenswrapper[4799]: I1010 18:02:48.573613 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4d6ca553-e264-4abd-a853-bf86bf3b22bd-scripts\") pod \"glance-default-external-api-0\" (UID: \"4d6ca553-e264-4abd-a853-bf86bf3b22bd\") " pod="openstack/glance-default-external-api-0" Oct 10 18:02:48 crc kubenswrapper[4799]: I1010 18:02:48.596523 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4cdkw\" (UniqueName: \"kubernetes.io/projected/4d6ca553-e264-4abd-a853-bf86bf3b22bd-kube-api-access-4cdkw\") pod \"glance-default-external-api-0\" (UID: \"4d6ca553-e264-4abd-a853-bf86bf3b22bd\") " pod="openstack/glance-default-external-api-0" Oct 10 18:02:48 crc kubenswrapper[4799]: I1010 18:02:48.629229 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Oct 10 18:02:49 crc kubenswrapper[4799]: I1010 18:02:49.198111 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 10 18:02:49 crc kubenswrapper[4799]: I1010 18:02:49.258407 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"4d6ca553-e264-4abd-a853-bf86bf3b22bd","Type":"ContainerStarted","Data":"5c97c7875787b1cec111a567c9df395a0598152fa580cfaf04c0062dc7a850d6"} Oct 10 18:02:49 crc kubenswrapper[4799]: I1010 18:02:49.259859 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="9fcb813b-01de-46fa-baee-f18803c8808c" containerName="glance-log" containerID="cri-o://6844d82d560123bfb46715c1b190ed1e20825a9fcae5782981d5396cd75db098" gracePeriod=30 Oct 10 18:02:49 crc kubenswrapper[4799]: I1010 18:02:49.259908 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="9fcb813b-01de-46fa-baee-f18803c8808c" containerName="glance-httpd" containerID="cri-o://88508ee1b8a4e90158469e4940dc634fb11f306572ea18ee45254535f0799c83" gracePeriod=30 Oct 10 18:02:49 crc kubenswrapper[4799]: I1010 18:02:49.415288 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7fe1d290-8534-412a-9dd5-4e9bb43b14b0" path="/var/lib/kubelet/pods/7fe1d290-8534-412a-9dd5-4e9bb43b14b0/volumes" Oct 10 18:02:49 crc kubenswrapper[4799]: I1010 18:02:49.792054 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Oct 10 18:02:49 crc kubenswrapper[4799]: I1010 18:02:49.907701 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4vkv2\" (UniqueName: \"kubernetes.io/projected/9fcb813b-01de-46fa-baee-f18803c8808c-kube-api-access-4vkv2\") pod \"9fcb813b-01de-46fa-baee-f18803c8808c\" (UID: \"9fcb813b-01de-46fa-baee-f18803c8808c\") " Oct 10 18:02:49 crc kubenswrapper[4799]: I1010 18:02:49.907744 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9fcb813b-01de-46fa-baee-f18803c8808c-logs\") pod \"9fcb813b-01de-46fa-baee-f18803c8808c\" (UID: \"9fcb813b-01de-46fa-baee-f18803c8808c\") " Oct 10 18:02:49 crc kubenswrapper[4799]: I1010 18:02:49.907819 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/9fcb813b-01de-46fa-baee-f18803c8808c-httpd-run\") pod \"9fcb813b-01de-46fa-baee-f18803c8808c\" (UID: \"9fcb813b-01de-46fa-baee-f18803c8808c\") " Oct 10 18:02:49 crc kubenswrapper[4799]: I1010 18:02:49.907962 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9fcb813b-01de-46fa-baee-f18803c8808c-scripts\") pod \"9fcb813b-01de-46fa-baee-f18803c8808c\" (UID: \"9fcb813b-01de-46fa-baee-f18803c8808c\") " Oct 10 18:02:49 crc kubenswrapper[4799]: I1010 18:02:49.908073 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9fcb813b-01de-46fa-baee-f18803c8808c-combined-ca-bundle\") pod \"9fcb813b-01de-46fa-baee-f18803c8808c\" (UID: \"9fcb813b-01de-46fa-baee-f18803c8808c\") " Oct 10 18:02:49 crc kubenswrapper[4799]: I1010 18:02:49.908109 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9fcb813b-01de-46fa-baee-f18803c8808c-config-data\") pod \"9fcb813b-01de-46fa-baee-f18803c8808c\" (UID: \"9fcb813b-01de-46fa-baee-f18803c8808c\") " Oct 10 18:02:49 crc kubenswrapper[4799]: I1010 18:02:49.908152 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/9fcb813b-01de-46fa-baee-f18803c8808c-ceph\") pod \"9fcb813b-01de-46fa-baee-f18803c8808c\" (UID: \"9fcb813b-01de-46fa-baee-f18803c8808c\") " Oct 10 18:02:49 crc kubenswrapper[4799]: I1010 18:02:49.908703 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9fcb813b-01de-46fa-baee-f18803c8808c-logs" (OuterVolumeSpecName: "logs") pod "9fcb813b-01de-46fa-baee-f18803c8808c" (UID: "9fcb813b-01de-46fa-baee-f18803c8808c"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 10 18:02:49 crc kubenswrapper[4799]: I1010 18:02:49.909480 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9fcb813b-01de-46fa-baee-f18803c8808c-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "9fcb813b-01de-46fa-baee-f18803c8808c" (UID: "9fcb813b-01de-46fa-baee-f18803c8808c"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 10 18:02:49 crc kubenswrapper[4799]: I1010 18:02:49.918806 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9fcb813b-01de-46fa-baee-f18803c8808c-scripts" (OuterVolumeSpecName: "scripts") pod "9fcb813b-01de-46fa-baee-f18803c8808c" (UID: "9fcb813b-01de-46fa-baee-f18803c8808c"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 18:02:49 crc kubenswrapper[4799]: I1010 18:02:49.919160 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9fcb813b-01de-46fa-baee-f18803c8808c-kube-api-access-4vkv2" (OuterVolumeSpecName: "kube-api-access-4vkv2") pod "9fcb813b-01de-46fa-baee-f18803c8808c" (UID: "9fcb813b-01de-46fa-baee-f18803c8808c"). InnerVolumeSpecName "kube-api-access-4vkv2". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 18:02:49 crc kubenswrapper[4799]: I1010 18:02:49.918859 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9fcb813b-01de-46fa-baee-f18803c8808c-ceph" (OuterVolumeSpecName: "ceph") pod "9fcb813b-01de-46fa-baee-f18803c8808c" (UID: "9fcb813b-01de-46fa-baee-f18803c8808c"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 18:02:49 crc kubenswrapper[4799]: I1010 18:02:49.946422 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9fcb813b-01de-46fa-baee-f18803c8808c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "9fcb813b-01de-46fa-baee-f18803c8808c" (UID: "9fcb813b-01de-46fa-baee-f18803c8808c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 18:02:49 crc kubenswrapper[4799]: I1010 18:02:49.969941 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9fcb813b-01de-46fa-baee-f18803c8808c-config-data" (OuterVolumeSpecName: "config-data") pod "9fcb813b-01de-46fa-baee-f18803c8808c" (UID: "9fcb813b-01de-46fa-baee-f18803c8808c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 18:02:50 crc kubenswrapper[4799]: I1010 18:02:50.010733 4799 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9fcb813b-01de-46fa-baee-f18803c8808c-scripts\") on node \"crc\" DevicePath \"\"" Oct 10 18:02:50 crc kubenswrapper[4799]: I1010 18:02:50.010805 4799 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9fcb813b-01de-46fa-baee-f18803c8808c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 10 18:02:50 crc kubenswrapper[4799]: I1010 18:02:50.010827 4799 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9fcb813b-01de-46fa-baee-f18803c8808c-config-data\") on node \"crc\" DevicePath \"\"" Oct 10 18:02:50 crc kubenswrapper[4799]: I1010 18:02:50.010844 4799 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/9fcb813b-01de-46fa-baee-f18803c8808c-ceph\") on node \"crc\" DevicePath \"\"" Oct 10 18:02:50 crc kubenswrapper[4799]: I1010 18:02:50.010862 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4vkv2\" (UniqueName: \"kubernetes.io/projected/9fcb813b-01de-46fa-baee-f18803c8808c-kube-api-access-4vkv2\") on node \"crc\" DevicePath \"\"" Oct 10 18:02:50 crc kubenswrapper[4799]: I1010 18:02:50.010880 4799 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9fcb813b-01de-46fa-baee-f18803c8808c-logs\") on node \"crc\" DevicePath \"\"" Oct 10 18:02:50 crc kubenswrapper[4799]: I1010 18:02:50.010918 4799 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/9fcb813b-01de-46fa-baee-f18803c8808c-httpd-run\") on node \"crc\" DevicePath \"\"" Oct 10 18:02:50 crc kubenswrapper[4799]: I1010 18:02:50.275614 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"4d6ca553-e264-4abd-a853-bf86bf3b22bd","Type":"ContainerStarted","Data":"6ef17f87538e7d672932f8e4b33ace3b8c80f4997198bcb997f948e0f6a49ef3"} Oct 10 18:02:50 crc kubenswrapper[4799]: I1010 18:02:50.278187 4799 generic.go:334] "Generic (PLEG): container finished" podID="9fcb813b-01de-46fa-baee-f18803c8808c" containerID="88508ee1b8a4e90158469e4940dc634fb11f306572ea18ee45254535f0799c83" exitCode=0 Oct 10 18:02:50 crc kubenswrapper[4799]: I1010 18:02:50.278213 4799 generic.go:334] "Generic (PLEG): container finished" podID="9fcb813b-01de-46fa-baee-f18803c8808c" containerID="6844d82d560123bfb46715c1b190ed1e20825a9fcae5782981d5396cd75db098" exitCode=143 Oct 10 18:02:50 crc kubenswrapper[4799]: I1010 18:02:50.278234 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"9fcb813b-01de-46fa-baee-f18803c8808c","Type":"ContainerDied","Data":"88508ee1b8a4e90158469e4940dc634fb11f306572ea18ee45254535f0799c83"} Oct 10 18:02:50 crc kubenswrapper[4799]: I1010 18:02:50.278255 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"9fcb813b-01de-46fa-baee-f18803c8808c","Type":"ContainerDied","Data":"6844d82d560123bfb46715c1b190ed1e20825a9fcae5782981d5396cd75db098"} Oct 10 18:02:50 crc kubenswrapper[4799]: I1010 18:02:50.278269 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"9fcb813b-01de-46fa-baee-f18803c8808c","Type":"ContainerDied","Data":"2a967844efa6fc7ce3fed9e20574ecaf324c3cc839cc049aaf56a591bf8b59dd"} Oct 10 18:02:50 crc kubenswrapper[4799]: I1010 18:02:50.278290 4799 scope.go:117] "RemoveContainer" containerID="88508ee1b8a4e90158469e4940dc634fb11f306572ea18ee45254535f0799c83" Oct 10 18:02:50 crc kubenswrapper[4799]: I1010 18:02:50.278312 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Oct 10 18:02:50 crc kubenswrapper[4799]: I1010 18:02:50.306115 4799 scope.go:117] "RemoveContainer" containerID="6844d82d560123bfb46715c1b190ed1e20825a9fcae5782981d5396cd75db098" Oct 10 18:02:50 crc kubenswrapper[4799]: I1010 18:02:50.343571 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 10 18:02:50 crc kubenswrapper[4799]: I1010 18:02:50.351412 4799 scope.go:117] "RemoveContainer" containerID="88508ee1b8a4e90158469e4940dc634fb11f306572ea18ee45254535f0799c83" Oct 10 18:02:50 crc kubenswrapper[4799]: E1010 18:02:50.352006 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"88508ee1b8a4e90158469e4940dc634fb11f306572ea18ee45254535f0799c83\": container with ID starting with 88508ee1b8a4e90158469e4940dc634fb11f306572ea18ee45254535f0799c83 not found: ID does not exist" containerID="88508ee1b8a4e90158469e4940dc634fb11f306572ea18ee45254535f0799c83" Oct 10 18:02:50 crc kubenswrapper[4799]: I1010 18:02:50.352059 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"88508ee1b8a4e90158469e4940dc634fb11f306572ea18ee45254535f0799c83"} err="failed to get container status \"88508ee1b8a4e90158469e4940dc634fb11f306572ea18ee45254535f0799c83\": rpc error: code = NotFound desc = could not find container \"88508ee1b8a4e90158469e4940dc634fb11f306572ea18ee45254535f0799c83\": container with ID starting with 88508ee1b8a4e90158469e4940dc634fb11f306572ea18ee45254535f0799c83 not found: ID does not exist" Oct 10 18:02:50 crc kubenswrapper[4799]: I1010 18:02:50.352098 4799 scope.go:117] "RemoveContainer" containerID="6844d82d560123bfb46715c1b190ed1e20825a9fcae5782981d5396cd75db098" Oct 10 18:02:50 crc kubenswrapper[4799]: E1010 18:02:50.352571 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6844d82d560123bfb46715c1b190ed1e20825a9fcae5782981d5396cd75db098\": container with ID starting with 6844d82d560123bfb46715c1b190ed1e20825a9fcae5782981d5396cd75db098 not found: ID does not exist" containerID="6844d82d560123bfb46715c1b190ed1e20825a9fcae5782981d5396cd75db098" Oct 10 18:02:50 crc kubenswrapper[4799]: I1010 18:02:50.352607 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6844d82d560123bfb46715c1b190ed1e20825a9fcae5782981d5396cd75db098"} err="failed to get container status \"6844d82d560123bfb46715c1b190ed1e20825a9fcae5782981d5396cd75db098\": rpc error: code = NotFound desc = could not find container \"6844d82d560123bfb46715c1b190ed1e20825a9fcae5782981d5396cd75db098\": container with ID starting with 6844d82d560123bfb46715c1b190ed1e20825a9fcae5782981d5396cd75db098 not found: ID does not exist" Oct 10 18:02:50 crc kubenswrapper[4799]: I1010 18:02:50.352631 4799 scope.go:117] "RemoveContainer" containerID="88508ee1b8a4e90158469e4940dc634fb11f306572ea18ee45254535f0799c83" Oct 10 18:02:50 crc kubenswrapper[4799]: I1010 18:02:50.352946 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"88508ee1b8a4e90158469e4940dc634fb11f306572ea18ee45254535f0799c83"} err="failed to get container status \"88508ee1b8a4e90158469e4940dc634fb11f306572ea18ee45254535f0799c83\": rpc error: code = NotFound desc = could not find container \"88508ee1b8a4e90158469e4940dc634fb11f306572ea18ee45254535f0799c83\": container with ID starting with 88508ee1b8a4e90158469e4940dc634fb11f306572ea18ee45254535f0799c83 not found: ID does not exist" Oct 10 18:02:50 crc kubenswrapper[4799]: I1010 18:02:50.352974 4799 scope.go:117] "RemoveContainer" containerID="6844d82d560123bfb46715c1b190ed1e20825a9fcae5782981d5396cd75db098" Oct 10 18:02:50 crc kubenswrapper[4799]: I1010 18:02:50.353306 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6844d82d560123bfb46715c1b190ed1e20825a9fcae5782981d5396cd75db098"} err="failed to get container status \"6844d82d560123bfb46715c1b190ed1e20825a9fcae5782981d5396cd75db098\": rpc error: code = NotFound desc = could not find container \"6844d82d560123bfb46715c1b190ed1e20825a9fcae5782981d5396cd75db098\": container with ID starting with 6844d82d560123bfb46715c1b190ed1e20825a9fcae5782981d5396cd75db098 not found: ID does not exist" Oct 10 18:02:50 crc kubenswrapper[4799]: I1010 18:02:50.360937 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 10 18:02:50 crc kubenswrapper[4799]: I1010 18:02:50.372870 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 10 18:02:50 crc kubenswrapper[4799]: E1010 18:02:50.373528 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9fcb813b-01de-46fa-baee-f18803c8808c" containerName="glance-httpd" Oct 10 18:02:50 crc kubenswrapper[4799]: I1010 18:02:50.373558 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="9fcb813b-01de-46fa-baee-f18803c8808c" containerName="glance-httpd" Oct 10 18:02:50 crc kubenswrapper[4799]: E1010 18:02:50.373586 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9fcb813b-01de-46fa-baee-f18803c8808c" containerName="glance-log" Oct 10 18:02:50 crc kubenswrapper[4799]: I1010 18:02:50.373601 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="9fcb813b-01de-46fa-baee-f18803c8808c" containerName="glance-log" Oct 10 18:02:50 crc kubenswrapper[4799]: I1010 18:02:50.373932 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="9fcb813b-01de-46fa-baee-f18803c8808c" containerName="glance-log" Oct 10 18:02:50 crc kubenswrapper[4799]: I1010 18:02:50.373976 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="9fcb813b-01de-46fa-baee-f18803c8808c" containerName="glance-httpd" Oct 10 18:02:50 crc kubenswrapper[4799]: I1010 18:02:50.375553 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Oct 10 18:02:50 crc kubenswrapper[4799]: I1010 18:02:50.379540 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Oct 10 18:02:50 crc kubenswrapper[4799]: I1010 18:02:50.394259 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 10 18:02:50 crc kubenswrapper[4799]: I1010 18:02:50.521689 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fq5sj\" (UniqueName: \"kubernetes.io/projected/7619fca7-982b-4e59-aa11-127f345ffbc0-kube-api-access-fq5sj\") pod \"glance-default-internal-api-0\" (UID: \"7619fca7-982b-4e59-aa11-127f345ffbc0\") " pod="openstack/glance-default-internal-api-0" Oct 10 18:02:50 crc kubenswrapper[4799]: I1010 18:02:50.521794 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7619fca7-982b-4e59-aa11-127f345ffbc0-scripts\") pod \"glance-default-internal-api-0\" (UID: \"7619fca7-982b-4e59-aa11-127f345ffbc0\") " pod="openstack/glance-default-internal-api-0" Oct 10 18:02:50 crc kubenswrapper[4799]: I1010 18:02:50.521871 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/7619fca7-982b-4e59-aa11-127f345ffbc0-ceph\") pod \"glance-default-internal-api-0\" (UID: \"7619fca7-982b-4e59-aa11-127f345ffbc0\") " pod="openstack/glance-default-internal-api-0" Oct 10 18:02:50 crc kubenswrapper[4799]: I1010 18:02:50.521888 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7619fca7-982b-4e59-aa11-127f345ffbc0-logs\") pod \"glance-default-internal-api-0\" (UID: \"7619fca7-982b-4e59-aa11-127f345ffbc0\") " pod="openstack/glance-default-internal-api-0" Oct 10 18:02:50 crc kubenswrapper[4799]: I1010 18:02:50.521977 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7619fca7-982b-4e59-aa11-127f345ffbc0-config-data\") pod \"glance-default-internal-api-0\" (UID: \"7619fca7-982b-4e59-aa11-127f345ffbc0\") " pod="openstack/glance-default-internal-api-0" Oct 10 18:02:50 crc kubenswrapper[4799]: I1010 18:02:50.521996 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/7619fca7-982b-4e59-aa11-127f345ffbc0-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"7619fca7-982b-4e59-aa11-127f345ffbc0\") " pod="openstack/glance-default-internal-api-0" Oct 10 18:02:50 crc kubenswrapper[4799]: I1010 18:02:50.522047 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7619fca7-982b-4e59-aa11-127f345ffbc0-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"7619fca7-982b-4e59-aa11-127f345ffbc0\") " pod="openstack/glance-default-internal-api-0" Oct 10 18:02:50 crc kubenswrapper[4799]: I1010 18:02:50.624442 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/7619fca7-982b-4e59-aa11-127f345ffbc0-ceph\") pod \"glance-default-internal-api-0\" (UID: \"7619fca7-982b-4e59-aa11-127f345ffbc0\") " pod="openstack/glance-default-internal-api-0" Oct 10 18:02:50 crc kubenswrapper[4799]: I1010 18:02:50.624506 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7619fca7-982b-4e59-aa11-127f345ffbc0-logs\") pod \"glance-default-internal-api-0\" (UID: \"7619fca7-982b-4e59-aa11-127f345ffbc0\") " pod="openstack/glance-default-internal-api-0" Oct 10 18:02:50 crc kubenswrapper[4799]: I1010 18:02:50.624600 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7619fca7-982b-4e59-aa11-127f345ffbc0-config-data\") pod \"glance-default-internal-api-0\" (UID: \"7619fca7-982b-4e59-aa11-127f345ffbc0\") " pod="openstack/glance-default-internal-api-0" Oct 10 18:02:50 crc kubenswrapper[4799]: I1010 18:02:50.624645 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/7619fca7-982b-4e59-aa11-127f345ffbc0-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"7619fca7-982b-4e59-aa11-127f345ffbc0\") " pod="openstack/glance-default-internal-api-0" Oct 10 18:02:50 crc kubenswrapper[4799]: I1010 18:02:50.624687 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7619fca7-982b-4e59-aa11-127f345ffbc0-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"7619fca7-982b-4e59-aa11-127f345ffbc0\") " pod="openstack/glance-default-internal-api-0" Oct 10 18:02:50 crc kubenswrapper[4799]: I1010 18:02:50.624803 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fq5sj\" (UniqueName: \"kubernetes.io/projected/7619fca7-982b-4e59-aa11-127f345ffbc0-kube-api-access-fq5sj\") pod \"glance-default-internal-api-0\" (UID: \"7619fca7-982b-4e59-aa11-127f345ffbc0\") " pod="openstack/glance-default-internal-api-0" Oct 10 18:02:50 crc kubenswrapper[4799]: I1010 18:02:50.624871 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7619fca7-982b-4e59-aa11-127f345ffbc0-scripts\") pod \"glance-default-internal-api-0\" (UID: \"7619fca7-982b-4e59-aa11-127f345ffbc0\") " pod="openstack/glance-default-internal-api-0" Oct 10 18:02:50 crc kubenswrapper[4799]: I1010 18:02:50.625979 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7619fca7-982b-4e59-aa11-127f345ffbc0-logs\") pod \"glance-default-internal-api-0\" (UID: \"7619fca7-982b-4e59-aa11-127f345ffbc0\") " pod="openstack/glance-default-internal-api-0" Oct 10 18:02:50 crc kubenswrapper[4799]: I1010 18:02:50.626595 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/7619fca7-982b-4e59-aa11-127f345ffbc0-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"7619fca7-982b-4e59-aa11-127f345ffbc0\") " pod="openstack/glance-default-internal-api-0" Oct 10 18:02:50 crc kubenswrapper[4799]: I1010 18:02:50.635110 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7619fca7-982b-4e59-aa11-127f345ffbc0-config-data\") pod \"glance-default-internal-api-0\" (UID: \"7619fca7-982b-4e59-aa11-127f345ffbc0\") " pod="openstack/glance-default-internal-api-0" Oct 10 18:02:50 crc kubenswrapper[4799]: I1010 18:02:50.641687 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7619fca7-982b-4e59-aa11-127f345ffbc0-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"7619fca7-982b-4e59-aa11-127f345ffbc0\") " pod="openstack/glance-default-internal-api-0" Oct 10 18:02:50 crc kubenswrapper[4799]: I1010 18:02:50.643654 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/7619fca7-982b-4e59-aa11-127f345ffbc0-ceph\") pod \"glance-default-internal-api-0\" (UID: \"7619fca7-982b-4e59-aa11-127f345ffbc0\") " pod="openstack/glance-default-internal-api-0" Oct 10 18:02:50 crc kubenswrapper[4799]: I1010 18:02:50.644523 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7619fca7-982b-4e59-aa11-127f345ffbc0-scripts\") pod \"glance-default-internal-api-0\" (UID: \"7619fca7-982b-4e59-aa11-127f345ffbc0\") " pod="openstack/glance-default-internal-api-0" Oct 10 18:02:50 crc kubenswrapper[4799]: I1010 18:02:50.645380 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fq5sj\" (UniqueName: \"kubernetes.io/projected/7619fca7-982b-4e59-aa11-127f345ffbc0-kube-api-access-fq5sj\") pod \"glance-default-internal-api-0\" (UID: \"7619fca7-982b-4e59-aa11-127f345ffbc0\") " pod="openstack/glance-default-internal-api-0" Oct 10 18:02:50 crc kubenswrapper[4799]: I1010 18:02:50.710923 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Oct 10 18:02:51 crc kubenswrapper[4799]: I1010 18:02:51.080618 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 10 18:02:51 crc kubenswrapper[4799]: I1010 18:02:51.292392 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"7619fca7-982b-4e59-aa11-127f345ffbc0","Type":"ContainerStarted","Data":"b93bbd4c9b121f2ad5c5d57db563219222ea92e2980b07de1cab4a9f9d80171d"} Oct 10 18:02:51 crc kubenswrapper[4799]: I1010 18:02:51.302617 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"4d6ca553-e264-4abd-a853-bf86bf3b22bd","Type":"ContainerStarted","Data":"ecd2dd18bc296c70097d8ef6fc6df5fb035af072ab286ceadfd05c8b44dbf0ff"} Oct 10 18:02:51 crc kubenswrapper[4799]: I1010 18:02:51.413202 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9fcb813b-01de-46fa-baee-f18803c8808c" path="/var/lib/kubelet/pods/9fcb813b-01de-46fa-baee-f18803c8808c/volumes" Oct 10 18:02:52 crc kubenswrapper[4799]: I1010 18:02:52.331520 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"7619fca7-982b-4e59-aa11-127f345ffbc0","Type":"ContainerStarted","Data":"f16021709cb91c7869706e26dfc2ee021e3931ad08061ea2ce72ea1a23302c37"} Oct 10 18:02:52 crc kubenswrapper[4799]: I1010 18:02:52.331581 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"7619fca7-982b-4e59-aa11-127f345ffbc0","Type":"ContainerStarted","Data":"0f2fa7258bd37688952d053abceccaf84fc26464448073ec999397d92f6bb5ac"} Oct 10 18:02:52 crc kubenswrapper[4799]: I1010 18:02:52.368122 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=4.368039309 podStartE2EDuration="4.368039309s" podCreationTimestamp="2025-10-10 18:02:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-10 18:02:51.338094695 +0000 UTC m=+5464.846418810" watchObservedRunningTime="2025-10-10 18:02:52.368039309 +0000 UTC m=+5465.876363454" Oct 10 18:02:52 crc kubenswrapper[4799]: I1010 18:02:52.377095 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=2.377071879 podStartE2EDuration="2.377071879s" podCreationTimestamp="2025-10-10 18:02:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-10 18:02:52.367214799 +0000 UTC m=+5465.875538924" watchObservedRunningTime="2025-10-10 18:02:52.377071879 +0000 UTC m=+5465.885395994" Oct 10 18:02:54 crc kubenswrapper[4799]: I1010 18:02:54.972066 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-769797d5d7-zjthw" Oct 10 18:02:55 crc kubenswrapper[4799]: I1010 18:02:55.051906 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7b68bbc6b9-qpjxb"] Oct 10 18:02:55 crc kubenswrapper[4799]: I1010 18:02:55.052451 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-7b68bbc6b9-qpjxb" podUID="7a0e164f-a14d-43a3-8f1c-e00e7b58fb5d" containerName="dnsmasq-dns" containerID="cri-o://bc46fd7d53c0a6a08ee38b93473d5d11e5b90a7596cd1f37676a77be9561e7f9" gracePeriod=10 Oct 10 18:02:55 crc kubenswrapper[4799]: I1010 18:02:55.366668 4799 generic.go:334] "Generic (PLEG): container finished" podID="7a0e164f-a14d-43a3-8f1c-e00e7b58fb5d" containerID="bc46fd7d53c0a6a08ee38b93473d5d11e5b90a7596cd1f37676a77be9561e7f9" exitCode=0 Oct 10 18:02:55 crc kubenswrapper[4799]: I1010 18:02:55.366795 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7b68bbc6b9-qpjxb" event={"ID":"7a0e164f-a14d-43a3-8f1c-e00e7b58fb5d","Type":"ContainerDied","Data":"bc46fd7d53c0a6a08ee38b93473d5d11e5b90a7596cd1f37676a77be9561e7f9"} Oct 10 18:02:55 crc kubenswrapper[4799]: I1010 18:02:55.539940 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7b68bbc6b9-qpjxb" Oct 10 18:02:55 crc kubenswrapper[4799]: I1010 18:02:55.617234 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7a0e164f-a14d-43a3-8f1c-e00e7b58fb5d-config\") pod \"7a0e164f-a14d-43a3-8f1c-e00e7b58fb5d\" (UID: \"7a0e164f-a14d-43a3-8f1c-e00e7b58fb5d\") " Oct 10 18:02:55 crc kubenswrapper[4799]: I1010 18:02:55.617308 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9x2tv\" (UniqueName: \"kubernetes.io/projected/7a0e164f-a14d-43a3-8f1c-e00e7b58fb5d-kube-api-access-9x2tv\") pod \"7a0e164f-a14d-43a3-8f1c-e00e7b58fb5d\" (UID: \"7a0e164f-a14d-43a3-8f1c-e00e7b58fb5d\") " Oct 10 18:02:55 crc kubenswrapper[4799]: I1010 18:02:55.617388 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/7a0e164f-a14d-43a3-8f1c-e00e7b58fb5d-ovsdbserver-nb\") pod \"7a0e164f-a14d-43a3-8f1c-e00e7b58fb5d\" (UID: \"7a0e164f-a14d-43a3-8f1c-e00e7b58fb5d\") " Oct 10 18:02:55 crc kubenswrapper[4799]: I1010 18:02:55.617475 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7a0e164f-a14d-43a3-8f1c-e00e7b58fb5d-dns-svc\") pod \"7a0e164f-a14d-43a3-8f1c-e00e7b58fb5d\" (UID: \"7a0e164f-a14d-43a3-8f1c-e00e7b58fb5d\") " Oct 10 18:02:55 crc kubenswrapper[4799]: I1010 18:02:55.617574 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/7a0e164f-a14d-43a3-8f1c-e00e7b58fb5d-ovsdbserver-sb\") pod \"7a0e164f-a14d-43a3-8f1c-e00e7b58fb5d\" (UID: \"7a0e164f-a14d-43a3-8f1c-e00e7b58fb5d\") " Oct 10 18:02:55 crc kubenswrapper[4799]: I1010 18:02:55.622348 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7a0e164f-a14d-43a3-8f1c-e00e7b58fb5d-kube-api-access-9x2tv" (OuterVolumeSpecName: "kube-api-access-9x2tv") pod "7a0e164f-a14d-43a3-8f1c-e00e7b58fb5d" (UID: "7a0e164f-a14d-43a3-8f1c-e00e7b58fb5d"). InnerVolumeSpecName "kube-api-access-9x2tv". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 18:02:55 crc kubenswrapper[4799]: I1010 18:02:55.661499 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7a0e164f-a14d-43a3-8f1c-e00e7b58fb5d-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "7a0e164f-a14d-43a3-8f1c-e00e7b58fb5d" (UID: "7a0e164f-a14d-43a3-8f1c-e00e7b58fb5d"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 18:02:55 crc kubenswrapper[4799]: I1010 18:02:55.662011 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7a0e164f-a14d-43a3-8f1c-e00e7b58fb5d-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "7a0e164f-a14d-43a3-8f1c-e00e7b58fb5d" (UID: "7a0e164f-a14d-43a3-8f1c-e00e7b58fb5d"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 18:02:55 crc kubenswrapper[4799]: I1010 18:02:55.668606 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7a0e164f-a14d-43a3-8f1c-e00e7b58fb5d-config" (OuterVolumeSpecName: "config") pod "7a0e164f-a14d-43a3-8f1c-e00e7b58fb5d" (UID: "7a0e164f-a14d-43a3-8f1c-e00e7b58fb5d"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 18:02:55 crc kubenswrapper[4799]: I1010 18:02:55.672382 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7a0e164f-a14d-43a3-8f1c-e00e7b58fb5d-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "7a0e164f-a14d-43a3-8f1c-e00e7b58fb5d" (UID: "7a0e164f-a14d-43a3-8f1c-e00e7b58fb5d"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 18:02:55 crc kubenswrapper[4799]: I1010 18:02:55.719404 4799 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7a0e164f-a14d-43a3-8f1c-e00e7b58fb5d-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 10 18:02:55 crc kubenswrapper[4799]: I1010 18:02:55.719434 4799 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/7a0e164f-a14d-43a3-8f1c-e00e7b58fb5d-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 10 18:02:55 crc kubenswrapper[4799]: I1010 18:02:55.719446 4799 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7a0e164f-a14d-43a3-8f1c-e00e7b58fb5d-config\") on node \"crc\" DevicePath \"\"" Oct 10 18:02:55 crc kubenswrapper[4799]: I1010 18:02:55.719456 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9x2tv\" (UniqueName: \"kubernetes.io/projected/7a0e164f-a14d-43a3-8f1c-e00e7b58fb5d-kube-api-access-9x2tv\") on node \"crc\" DevicePath \"\"" Oct 10 18:02:55 crc kubenswrapper[4799]: I1010 18:02:55.719465 4799 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/7a0e164f-a14d-43a3-8f1c-e00e7b58fb5d-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 10 18:02:56 crc kubenswrapper[4799]: I1010 18:02:56.383717 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7b68bbc6b9-qpjxb" event={"ID":"7a0e164f-a14d-43a3-8f1c-e00e7b58fb5d","Type":"ContainerDied","Data":"f9a43b6e5a392e7c9f52cf2f718892f969fd34ca23cfc9213b4f98e504887ca5"} Oct 10 18:02:56 crc kubenswrapper[4799]: I1010 18:02:56.383790 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7b68bbc6b9-qpjxb" Oct 10 18:02:56 crc kubenswrapper[4799]: I1010 18:02:56.384242 4799 scope.go:117] "RemoveContainer" containerID="bc46fd7d53c0a6a08ee38b93473d5d11e5b90a7596cd1f37676a77be9561e7f9" Oct 10 18:02:56 crc kubenswrapper[4799]: I1010 18:02:56.433075 4799 scope.go:117] "RemoveContainer" containerID="ed3cc9eb0d83ceb7e79cd0ebe9f4e944a766d679b34554a501940899e8219444" Oct 10 18:02:56 crc kubenswrapper[4799]: I1010 18:02:56.443986 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7b68bbc6b9-qpjxb"] Oct 10 18:02:56 crc kubenswrapper[4799]: I1010 18:02:56.470137 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-7b68bbc6b9-qpjxb"] Oct 10 18:02:57 crc kubenswrapper[4799]: I1010 18:02:57.434534 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7a0e164f-a14d-43a3-8f1c-e00e7b58fb5d" path="/var/lib/kubelet/pods/7a0e164f-a14d-43a3-8f1c-e00e7b58fb5d/volumes" Oct 10 18:02:58 crc kubenswrapper[4799]: I1010 18:02:58.629842 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Oct 10 18:02:58 crc kubenswrapper[4799]: I1010 18:02:58.629917 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Oct 10 18:02:58 crc kubenswrapper[4799]: I1010 18:02:58.681135 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Oct 10 18:02:58 crc kubenswrapper[4799]: I1010 18:02:58.706294 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Oct 10 18:02:59 crc kubenswrapper[4799]: I1010 18:02:59.444496 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Oct 10 18:02:59 crc kubenswrapper[4799]: I1010 18:02:59.444583 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Oct 10 18:03:00 crc kubenswrapper[4799]: I1010 18:03:00.711486 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Oct 10 18:03:00 crc kubenswrapper[4799]: I1010 18:03:00.711539 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Oct 10 18:03:00 crc kubenswrapper[4799]: I1010 18:03:00.754022 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Oct 10 18:03:00 crc kubenswrapper[4799]: I1010 18:03:00.762930 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Oct 10 18:03:01 crc kubenswrapper[4799]: I1010 18:03:01.264888 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Oct 10 18:03:01 crc kubenswrapper[4799]: I1010 18:03:01.368582 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Oct 10 18:03:01 crc kubenswrapper[4799]: I1010 18:03:01.479123 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Oct 10 18:03:01 crc kubenswrapper[4799]: I1010 18:03:01.479193 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Oct 10 18:03:03 crc kubenswrapper[4799]: I1010 18:03:03.264540 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Oct 10 18:03:03 crc kubenswrapper[4799]: I1010 18:03:03.271249 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Oct 10 18:03:07 crc kubenswrapper[4799]: I1010 18:03:07.501677 4799 scope.go:117] "RemoveContainer" containerID="e544fbe3bed8ac93a11df7acc87701ee36765a1579159a1d93725d8806f6c31b" Oct 10 18:03:10 crc kubenswrapper[4799]: I1010 18:03:10.196904 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-db-create-g5czp"] Oct 10 18:03:10 crc kubenswrapper[4799]: E1010 18:03:10.198033 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7a0e164f-a14d-43a3-8f1c-e00e7b58fb5d" containerName="dnsmasq-dns" Oct 10 18:03:10 crc kubenswrapper[4799]: I1010 18:03:10.198054 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="7a0e164f-a14d-43a3-8f1c-e00e7b58fb5d" containerName="dnsmasq-dns" Oct 10 18:03:10 crc kubenswrapper[4799]: E1010 18:03:10.198081 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7a0e164f-a14d-43a3-8f1c-e00e7b58fb5d" containerName="init" Oct 10 18:03:10 crc kubenswrapper[4799]: I1010 18:03:10.198093 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="7a0e164f-a14d-43a3-8f1c-e00e7b58fb5d" containerName="init" Oct 10 18:03:10 crc kubenswrapper[4799]: I1010 18:03:10.198406 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="7a0e164f-a14d-43a3-8f1c-e00e7b58fb5d" containerName="dnsmasq-dns" Oct 10 18:03:10 crc kubenswrapper[4799]: I1010 18:03:10.199352 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-g5czp" Oct 10 18:03:10 crc kubenswrapper[4799]: I1010 18:03:10.207418 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-create-g5czp"] Oct 10 18:03:10 crc kubenswrapper[4799]: I1010 18:03:10.306235 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9mpr6\" (UniqueName: \"kubernetes.io/projected/80bb2405-1710-46ed-9414-e92c883f1e49-kube-api-access-9mpr6\") pod \"placement-db-create-g5czp\" (UID: \"80bb2405-1710-46ed-9414-e92c883f1e49\") " pod="openstack/placement-db-create-g5czp" Oct 10 18:03:10 crc kubenswrapper[4799]: I1010 18:03:10.407660 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9mpr6\" (UniqueName: \"kubernetes.io/projected/80bb2405-1710-46ed-9414-e92c883f1e49-kube-api-access-9mpr6\") pod \"placement-db-create-g5czp\" (UID: \"80bb2405-1710-46ed-9414-e92c883f1e49\") " pod="openstack/placement-db-create-g5czp" Oct 10 18:03:10 crc kubenswrapper[4799]: I1010 18:03:10.431537 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9mpr6\" (UniqueName: \"kubernetes.io/projected/80bb2405-1710-46ed-9414-e92c883f1e49-kube-api-access-9mpr6\") pod \"placement-db-create-g5czp\" (UID: \"80bb2405-1710-46ed-9414-e92c883f1e49\") " pod="openstack/placement-db-create-g5czp" Oct 10 18:03:10 crc kubenswrapper[4799]: I1010 18:03:10.580420 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-g5czp" Oct 10 18:03:11 crc kubenswrapper[4799]: I1010 18:03:11.050133 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-create-g5czp"] Oct 10 18:03:11 crc kubenswrapper[4799]: W1010 18:03:11.058583 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod80bb2405_1710_46ed_9414_e92c883f1e49.slice/crio-570888a50077931e4afc64c8d761aebed6c2caf7e5111a61580e71e278193c66 WatchSource:0}: Error finding container 570888a50077931e4afc64c8d761aebed6c2caf7e5111a61580e71e278193c66: Status 404 returned error can't find the container with id 570888a50077931e4afc64c8d761aebed6c2caf7e5111a61580e71e278193c66 Oct 10 18:03:11 crc kubenswrapper[4799]: I1010 18:03:11.593346 4799 generic.go:334] "Generic (PLEG): container finished" podID="80bb2405-1710-46ed-9414-e92c883f1e49" containerID="8adbc00f08ae1e0fe2cc01d0bd8926a2ebfd5bddc1932ef6256585089c184ae7" exitCode=0 Oct 10 18:03:11 crc kubenswrapper[4799]: I1010 18:03:11.593440 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-g5czp" event={"ID":"80bb2405-1710-46ed-9414-e92c883f1e49","Type":"ContainerDied","Data":"8adbc00f08ae1e0fe2cc01d0bd8926a2ebfd5bddc1932ef6256585089c184ae7"} Oct 10 18:03:11 crc kubenswrapper[4799]: I1010 18:03:11.593788 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-g5czp" event={"ID":"80bb2405-1710-46ed-9414-e92c883f1e49","Type":"ContainerStarted","Data":"570888a50077931e4afc64c8d761aebed6c2caf7e5111a61580e71e278193c66"} Oct 10 18:03:13 crc kubenswrapper[4799]: I1010 18:03:13.045198 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-g5czp" Oct 10 18:03:13 crc kubenswrapper[4799]: I1010 18:03:13.162428 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9mpr6\" (UniqueName: \"kubernetes.io/projected/80bb2405-1710-46ed-9414-e92c883f1e49-kube-api-access-9mpr6\") pod \"80bb2405-1710-46ed-9414-e92c883f1e49\" (UID: \"80bb2405-1710-46ed-9414-e92c883f1e49\") " Oct 10 18:03:13 crc kubenswrapper[4799]: I1010 18:03:13.170931 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/80bb2405-1710-46ed-9414-e92c883f1e49-kube-api-access-9mpr6" (OuterVolumeSpecName: "kube-api-access-9mpr6") pod "80bb2405-1710-46ed-9414-e92c883f1e49" (UID: "80bb2405-1710-46ed-9414-e92c883f1e49"). InnerVolumeSpecName "kube-api-access-9mpr6". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 18:03:13 crc kubenswrapper[4799]: I1010 18:03:13.264904 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9mpr6\" (UniqueName: \"kubernetes.io/projected/80bb2405-1710-46ed-9414-e92c883f1e49-kube-api-access-9mpr6\") on node \"crc\" DevicePath \"\"" Oct 10 18:03:13 crc kubenswrapper[4799]: I1010 18:03:13.620618 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-g5czp" event={"ID":"80bb2405-1710-46ed-9414-e92c883f1e49","Type":"ContainerDied","Data":"570888a50077931e4afc64c8d761aebed6c2caf7e5111a61580e71e278193c66"} Oct 10 18:03:13 crc kubenswrapper[4799]: I1010 18:03:13.620670 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-g5czp" Oct 10 18:03:13 crc kubenswrapper[4799]: I1010 18:03:13.620695 4799 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="570888a50077931e4afc64c8d761aebed6c2caf7e5111a61580e71e278193c66" Oct 10 18:03:20 crc kubenswrapper[4799]: I1010 18:03:20.358159 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-8aed-account-create-bj7jg"] Oct 10 18:03:20 crc kubenswrapper[4799]: E1010 18:03:20.359012 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="80bb2405-1710-46ed-9414-e92c883f1e49" containerName="mariadb-database-create" Oct 10 18:03:20 crc kubenswrapper[4799]: I1010 18:03:20.359026 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="80bb2405-1710-46ed-9414-e92c883f1e49" containerName="mariadb-database-create" Oct 10 18:03:20 crc kubenswrapper[4799]: I1010 18:03:20.359193 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="80bb2405-1710-46ed-9414-e92c883f1e49" containerName="mariadb-database-create" Oct 10 18:03:20 crc kubenswrapper[4799]: I1010 18:03:20.359734 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-8aed-account-create-bj7jg" Oct 10 18:03:20 crc kubenswrapper[4799]: I1010 18:03:20.361894 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-db-secret" Oct 10 18:03:20 crc kubenswrapper[4799]: I1010 18:03:20.373802 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-8aed-account-create-bj7jg"] Oct 10 18:03:20 crc kubenswrapper[4799]: I1010 18:03:20.517143 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qkccp\" (UniqueName: \"kubernetes.io/projected/06167591-831b-4d1c-950f-60158682fc9b-kube-api-access-qkccp\") pod \"placement-8aed-account-create-bj7jg\" (UID: \"06167591-831b-4d1c-950f-60158682fc9b\") " pod="openstack/placement-8aed-account-create-bj7jg" Oct 10 18:03:20 crc kubenswrapper[4799]: I1010 18:03:20.619739 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qkccp\" (UniqueName: \"kubernetes.io/projected/06167591-831b-4d1c-950f-60158682fc9b-kube-api-access-qkccp\") pod \"placement-8aed-account-create-bj7jg\" (UID: \"06167591-831b-4d1c-950f-60158682fc9b\") " pod="openstack/placement-8aed-account-create-bj7jg" Oct 10 18:03:20 crc kubenswrapper[4799]: I1010 18:03:20.639552 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qkccp\" (UniqueName: \"kubernetes.io/projected/06167591-831b-4d1c-950f-60158682fc9b-kube-api-access-qkccp\") pod \"placement-8aed-account-create-bj7jg\" (UID: \"06167591-831b-4d1c-950f-60158682fc9b\") " pod="openstack/placement-8aed-account-create-bj7jg" Oct 10 18:03:20 crc kubenswrapper[4799]: I1010 18:03:20.704455 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-8aed-account-create-bj7jg" Oct 10 18:03:21 crc kubenswrapper[4799]: I1010 18:03:21.178946 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-8aed-account-create-bj7jg"] Oct 10 18:03:21 crc kubenswrapper[4799]: I1010 18:03:21.717557 4799 generic.go:334] "Generic (PLEG): container finished" podID="06167591-831b-4d1c-950f-60158682fc9b" containerID="dcd21a975134541c976755f676b7dfd64e2570805cf5722972d467e07a190b0c" exitCode=0 Oct 10 18:03:21 crc kubenswrapper[4799]: I1010 18:03:21.717627 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-8aed-account-create-bj7jg" event={"ID":"06167591-831b-4d1c-950f-60158682fc9b","Type":"ContainerDied","Data":"dcd21a975134541c976755f676b7dfd64e2570805cf5722972d467e07a190b0c"} Oct 10 18:03:21 crc kubenswrapper[4799]: I1010 18:03:21.717889 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-8aed-account-create-bj7jg" event={"ID":"06167591-831b-4d1c-950f-60158682fc9b","Type":"ContainerStarted","Data":"478c02552672a221e1c7e464154e2c45144c8a15a2f4261792325aa298d2846c"} Oct 10 18:03:23 crc kubenswrapper[4799]: I1010 18:03:23.172974 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-8aed-account-create-bj7jg" Oct 10 18:03:23 crc kubenswrapper[4799]: I1010 18:03:23.370599 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qkccp\" (UniqueName: \"kubernetes.io/projected/06167591-831b-4d1c-950f-60158682fc9b-kube-api-access-qkccp\") pod \"06167591-831b-4d1c-950f-60158682fc9b\" (UID: \"06167591-831b-4d1c-950f-60158682fc9b\") " Oct 10 18:03:23 crc kubenswrapper[4799]: I1010 18:03:23.384095 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/06167591-831b-4d1c-950f-60158682fc9b-kube-api-access-qkccp" (OuterVolumeSpecName: "kube-api-access-qkccp") pod "06167591-831b-4d1c-950f-60158682fc9b" (UID: "06167591-831b-4d1c-950f-60158682fc9b"). InnerVolumeSpecName "kube-api-access-qkccp". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 18:03:23 crc kubenswrapper[4799]: I1010 18:03:23.473443 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qkccp\" (UniqueName: \"kubernetes.io/projected/06167591-831b-4d1c-950f-60158682fc9b-kube-api-access-qkccp\") on node \"crc\" DevicePath \"\"" Oct 10 18:03:23 crc kubenswrapper[4799]: I1010 18:03:23.750223 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-8aed-account-create-bj7jg" event={"ID":"06167591-831b-4d1c-950f-60158682fc9b","Type":"ContainerDied","Data":"478c02552672a221e1c7e464154e2c45144c8a15a2f4261792325aa298d2846c"} Oct 10 18:03:23 crc kubenswrapper[4799]: I1010 18:03:23.750286 4799 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="478c02552672a221e1c7e464154e2c45144c8a15a2f4261792325aa298d2846c" Oct 10 18:03:23 crc kubenswrapper[4799]: I1010 18:03:23.750310 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-8aed-account-create-bj7jg" Oct 10 18:03:25 crc kubenswrapper[4799]: I1010 18:03:25.574239 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-fdf47c55f-nkhwh"] Oct 10 18:03:25 crc kubenswrapper[4799]: E1010 18:03:25.574826 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="06167591-831b-4d1c-950f-60158682fc9b" containerName="mariadb-account-create" Oct 10 18:03:25 crc kubenswrapper[4799]: I1010 18:03:25.574839 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="06167591-831b-4d1c-950f-60158682fc9b" containerName="mariadb-account-create" Oct 10 18:03:25 crc kubenswrapper[4799]: I1010 18:03:25.574988 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="06167591-831b-4d1c-950f-60158682fc9b" containerName="mariadb-account-create" Oct 10 18:03:25 crc kubenswrapper[4799]: I1010 18:03:25.575846 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-fdf47c55f-nkhwh" Oct 10 18:03:25 crc kubenswrapper[4799]: I1010 18:03:25.597963 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-fdf47c55f-nkhwh"] Oct 10 18:03:25 crc kubenswrapper[4799]: I1010 18:03:25.642498 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-db-sync-j8njx"] Oct 10 18:03:25 crc kubenswrapper[4799]: I1010 18:03:25.643682 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-j8njx" Oct 10 18:03:25 crc kubenswrapper[4799]: I1010 18:03:25.646983 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-scripts" Oct 10 18:03:25 crc kubenswrapper[4799]: I1010 18:03:25.647325 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-config-data" Oct 10 18:03:25 crc kubenswrapper[4799]: I1010 18:03:25.647555 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-placement-dockercfg-tgt9v" Oct 10 18:03:25 crc kubenswrapper[4799]: I1010 18:03:25.652959 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-sync-j8njx"] Oct 10 18:03:25 crc kubenswrapper[4799]: I1010 18:03:25.715428 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a45728b4-650f-4ed5-bb39-118e04708f2b-dns-svc\") pod \"dnsmasq-dns-fdf47c55f-nkhwh\" (UID: \"a45728b4-650f-4ed5-bb39-118e04708f2b\") " pod="openstack/dnsmasq-dns-fdf47c55f-nkhwh" Oct 10 18:03:25 crc kubenswrapper[4799]: I1010 18:03:25.715486 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/a45728b4-650f-4ed5-bb39-118e04708f2b-ovsdbserver-sb\") pod \"dnsmasq-dns-fdf47c55f-nkhwh\" (UID: \"a45728b4-650f-4ed5-bb39-118e04708f2b\") " pod="openstack/dnsmasq-dns-fdf47c55f-nkhwh" Oct 10 18:03:25 crc kubenswrapper[4799]: I1010 18:03:25.715609 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a45728b4-650f-4ed5-bb39-118e04708f2b-config\") pod \"dnsmasq-dns-fdf47c55f-nkhwh\" (UID: \"a45728b4-650f-4ed5-bb39-118e04708f2b\") " pod="openstack/dnsmasq-dns-fdf47c55f-nkhwh" Oct 10 18:03:25 crc kubenswrapper[4799]: I1010 18:03:25.715689 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-92x6q\" (UniqueName: \"kubernetes.io/projected/a45728b4-650f-4ed5-bb39-118e04708f2b-kube-api-access-92x6q\") pod \"dnsmasq-dns-fdf47c55f-nkhwh\" (UID: \"a45728b4-650f-4ed5-bb39-118e04708f2b\") " pod="openstack/dnsmasq-dns-fdf47c55f-nkhwh" Oct 10 18:03:25 crc kubenswrapper[4799]: I1010 18:03:25.715808 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/a45728b4-650f-4ed5-bb39-118e04708f2b-ovsdbserver-nb\") pod \"dnsmasq-dns-fdf47c55f-nkhwh\" (UID: \"a45728b4-650f-4ed5-bb39-118e04708f2b\") " pod="openstack/dnsmasq-dns-fdf47c55f-nkhwh" Oct 10 18:03:25 crc kubenswrapper[4799]: I1010 18:03:25.817620 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a45728b4-650f-4ed5-bb39-118e04708f2b-config\") pod \"dnsmasq-dns-fdf47c55f-nkhwh\" (UID: \"a45728b4-650f-4ed5-bb39-118e04708f2b\") " pod="openstack/dnsmasq-dns-fdf47c55f-nkhwh" Oct 10 18:03:25 crc kubenswrapper[4799]: I1010 18:03:25.817986 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4638bb90-28e7-4aec-a5fe-7bdb4195d49d-scripts\") pod \"placement-db-sync-j8njx\" (UID: \"4638bb90-28e7-4aec-a5fe-7bdb4195d49d\") " pod="openstack/placement-db-sync-j8njx" Oct 10 18:03:25 crc kubenswrapper[4799]: I1010 18:03:25.818011 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-92x6q\" (UniqueName: \"kubernetes.io/projected/a45728b4-650f-4ed5-bb39-118e04708f2b-kube-api-access-92x6q\") pod \"dnsmasq-dns-fdf47c55f-nkhwh\" (UID: \"a45728b4-650f-4ed5-bb39-118e04708f2b\") " pod="openstack/dnsmasq-dns-fdf47c55f-nkhwh" Oct 10 18:03:25 crc kubenswrapper[4799]: I1010 18:03:25.818026 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4638bb90-28e7-4aec-a5fe-7bdb4195d49d-config-data\") pod \"placement-db-sync-j8njx\" (UID: \"4638bb90-28e7-4aec-a5fe-7bdb4195d49d\") " pod="openstack/placement-db-sync-j8njx" Oct 10 18:03:25 crc kubenswrapper[4799]: I1010 18:03:25.818066 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4638bb90-28e7-4aec-a5fe-7bdb4195d49d-combined-ca-bundle\") pod \"placement-db-sync-j8njx\" (UID: \"4638bb90-28e7-4aec-a5fe-7bdb4195d49d\") " pod="openstack/placement-db-sync-j8njx" Oct 10 18:03:25 crc kubenswrapper[4799]: I1010 18:03:25.818090 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/a45728b4-650f-4ed5-bb39-118e04708f2b-ovsdbserver-nb\") pod \"dnsmasq-dns-fdf47c55f-nkhwh\" (UID: \"a45728b4-650f-4ed5-bb39-118e04708f2b\") " pod="openstack/dnsmasq-dns-fdf47c55f-nkhwh" Oct 10 18:03:25 crc kubenswrapper[4799]: I1010 18:03:25.818207 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qldkl\" (UniqueName: \"kubernetes.io/projected/4638bb90-28e7-4aec-a5fe-7bdb4195d49d-kube-api-access-qldkl\") pod \"placement-db-sync-j8njx\" (UID: \"4638bb90-28e7-4aec-a5fe-7bdb4195d49d\") " pod="openstack/placement-db-sync-j8njx" Oct 10 18:03:25 crc kubenswrapper[4799]: I1010 18:03:25.818383 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a45728b4-650f-4ed5-bb39-118e04708f2b-dns-svc\") pod \"dnsmasq-dns-fdf47c55f-nkhwh\" (UID: \"a45728b4-650f-4ed5-bb39-118e04708f2b\") " pod="openstack/dnsmasq-dns-fdf47c55f-nkhwh" Oct 10 18:03:25 crc kubenswrapper[4799]: I1010 18:03:25.818437 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4638bb90-28e7-4aec-a5fe-7bdb4195d49d-logs\") pod \"placement-db-sync-j8njx\" (UID: \"4638bb90-28e7-4aec-a5fe-7bdb4195d49d\") " pod="openstack/placement-db-sync-j8njx" Oct 10 18:03:25 crc kubenswrapper[4799]: I1010 18:03:25.818471 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/a45728b4-650f-4ed5-bb39-118e04708f2b-ovsdbserver-sb\") pod \"dnsmasq-dns-fdf47c55f-nkhwh\" (UID: \"a45728b4-650f-4ed5-bb39-118e04708f2b\") " pod="openstack/dnsmasq-dns-fdf47c55f-nkhwh" Oct 10 18:03:25 crc kubenswrapper[4799]: I1010 18:03:25.818931 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a45728b4-650f-4ed5-bb39-118e04708f2b-config\") pod \"dnsmasq-dns-fdf47c55f-nkhwh\" (UID: \"a45728b4-650f-4ed5-bb39-118e04708f2b\") " pod="openstack/dnsmasq-dns-fdf47c55f-nkhwh" Oct 10 18:03:25 crc kubenswrapper[4799]: I1010 18:03:25.819008 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/a45728b4-650f-4ed5-bb39-118e04708f2b-ovsdbserver-nb\") pod \"dnsmasq-dns-fdf47c55f-nkhwh\" (UID: \"a45728b4-650f-4ed5-bb39-118e04708f2b\") " pod="openstack/dnsmasq-dns-fdf47c55f-nkhwh" Oct 10 18:03:25 crc kubenswrapper[4799]: I1010 18:03:25.819095 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a45728b4-650f-4ed5-bb39-118e04708f2b-dns-svc\") pod \"dnsmasq-dns-fdf47c55f-nkhwh\" (UID: \"a45728b4-650f-4ed5-bb39-118e04708f2b\") " pod="openstack/dnsmasq-dns-fdf47c55f-nkhwh" Oct 10 18:03:25 crc kubenswrapper[4799]: I1010 18:03:25.819280 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/a45728b4-650f-4ed5-bb39-118e04708f2b-ovsdbserver-sb\") pod \"dnsmasq-dns-fdf47c55f-nkhwh\" (UID: \"a45728b4-650f-4ed5-bb39-118e04708f2b\") " pod="openstack/dnsmasq-dns-fdf47c55f-nkhwh" Oct 10 18:03:25 crc kubenswrapper[4799]: I1010 18:03:25.837556 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-92x6q\" (UniqueName: \"kubernetes.io/projected/a45728b4-650f-4ed5-bb39-118e04708f2b-kube-api-access-92x6q\") pod \"dnsmasq-dns-fdf47c55f-nkhwh\" (UID: \"a45728b4-650f-4ed5-bb39-118e04708f2b\") " pod="openstack/dnsmasq-dns-fdf47c55f-nkhwh" Oct 10 18:03:25 crc kubenswrapper[4799]: I1010 18:03:25.895707 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-fdf47c55f-nkhwh" Oct 10 18:03:25 crc kubenswrapper[4799]: I1010 18:03:25.919878 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4638bb90-28e7-4aec-a5fe-7bdb4195d49d-combined-ca-bundle\") pod \"placement-db-sync-j8njx\" (UID: \"4638bb90-28e7-4aec-a5fe-7bdb4195d49d\") " pod="openstack/placement-db-sync-j8njx" Oct 10 18:03:25 crc kubenswrapper[4799]: I1010 18:03:25.919979 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qldkl\" (UniqueName: \"kubernetes.io/projected/4638bb90-28e7-4aec-a5fe-7bdb4195d49d-kube-api-access-qldkl\") pod \"placement-db-sync-j8njx\" (UID: \"4638bb90-28e7-4aec-a5fe-7bdb4195d49d\") " pod="openstack/placement-db-sync-j8njx" Oct 10 18:03:25 crc kubenswrapper[4799]: I1010 18:03:25.920053 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4638bb90-28e7-4aec-a5fe-7bdb4195d49d-logs\") pod \"placement-db-sync-j8njx\" (UID: \"4638bb90-28e7-4aec-a5fe-7bdb4195d49d\") " pod="openstack/placement-db-sync-j8njx" Oct 10 18:03:25 crc kubenswrapper[4799]: I1010 18:03:25.920122 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4638bb90-28e7-4aec-a5fe-7bdb4195d49d-scripts\") pod \"placement-db-sync-j8njx\" (UID: \"4638bb90-28e7-4aec-a5fe-7bdb4195d49d\") " pod="openstack/placement-db-sync-j8njx" Oct 10 18:03:25 crc kubenswrapper[4799]: I1010 18:03:25.920170 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4638bb90-28e7-4aec-a5fe-7bdb4195d49d-config-data\") pod \"placement-db-sync-j8njx\" (UID: \"4638bb90-28e7-4aec-a5fe-7bdb4195d49d\") " pod="openstack/placement-db-sync-j8njx" Oct 10 18:03:25 crc kubenswrapper[4799]: I1010 18:03:25.921120 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4638bb90-28e7-4aec-a5fe-7bdb4195d49d-logs\") pod \"placement-db-sync-j8njx\" (UID: \"4638bb90-28e7-4aec-a5fe-7bdb4195d49d\") " pod="openstack/placement-db-sync-j8njx" Oct 10 18:03:25 crc kubenswrapper[4799]: I1010 18:03:25.925986 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4638bb90-28e7-4aec-a5fe-7bdb4195d49d-config-data\") pod \"placement-db-sync-j8njx\" (UID: \"4638bb90-28e7-4aec-a5fe-7bdb4195d49d\") " pod="openstack/placement-db-sync-j8njx" Oct 10 18:03:25 crc kubenswrapper[4799]: I1010 18:03:25.926405 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4638bb90-28e7-4aec-a5fe-7bdb4195d49d-combined-ca-bundle\") pod \"placement-db-sync-j8njx\" (UID: \"4638bb90-28e7-4aec-a5fe-7bdb4195d49d\") " pod="openstack/placement-db-sync-j8njx" Oct 10 18:03:25 crc kubenswrapper[4799]: I1010 18:03:25.926440 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4638bb90-28e7-4aec-a5fe-7bdb4195d49d-scripts\") pod \"placement-db-sync-j8njx\" (UID: \"4638bb90-28e7-4aec-a5fe-7bdb4195d49d\") " pod="openstack/placement-db-sync-j8njx" Oct 10 18:03:25 crc kubenswrapper[4799]: I1010 18:03:25.950207 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qldkl\" (UniqueName: \"kubernetes.io/projected/4638bb90-28e7-4aec-a5fe-7bdb4195d49d-kube-api-access-qldkl\") pod \"placement-db-sync-j8njx\" (UID: \"4638bb90-28e7-4aec-a5fe-7bdb4195d49d\") " pod="openstack/placement-db-sync-j8njx" Oct 10 18:03:25 crc kubenswrapper[4799]: I1010 18:03:25.956836 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-j8njx" Oct 10 18:03:26 crc kubenswrapper[4799]: I1010 18:03:26.389324 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-fdf47c55f-nkhwh"] Oct 10 18:03:26 crc kubenswrapper[4799]: I1010 18:03:26.485127 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-sync-j8njx"] Oct 10 18:03:26 crc kubenswrapper[4799]: W1010 18:03:26.485248 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4638bb90_28e7_4aec_a5fe_7bdb4195d49d.slice/crio-e51588389ee7b9828cef849f6b95f10f6b078a0f3fa6312b104076d2b4e07ee3 WatchSource:0}: Error finding container e51588389ee7b9828cef849f6b95f10f6b078a0f3fa6312b104076d2b4e07ee3: Status 404 returned error can't find the container with id e51588389ee7b9828cef849f6b95f10f6b078a0f3fa6312b104076d2b4e07ee3 Oct 10 18:03:26 crc kubenswrapper[4799]: I1010 18:03:26.782109 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-j8njx" event={"ID":"4638bb90-28e7-4aec-a5fe-7bdb4195d49d","Type":"ContainerStarted","Data":"680516a7ccd4b2d45746a58f833cb75b023766d7ade5e0e37ae374462bc329c7"} Oct 10 18:03:26 crc kubenswrapper[4799]: I1010 18:03:26.782507 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-j8njx" event={"ID":"4638bb90-28e7-4aec-a5fe-7bdb4195d49d","Type":"ContainerStarted","Data":"e51588389ee7b9828cef849f6b95f10f6b078a0f3fa6312b104076d2b4e07ee3"} Oct 10 18:03:26 crc kubenswrapper[4799]: I1010 18:03:26.783634 4799 generic.go:334] "Generic (PLEG): container finished" podID="a45728b4-650f-4ed5-bb39-118e04708f2b" containerID="6c38fb25af6f322d183cc6e140efbf88e0923f4c5ac469ccb33ebe6d1a3408ed" exitCode=0 Oct 10 18:03:26 crc kubenswrapper[4799]: I1010 18:03:26.783687 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-fdf47c55f-nkhwh" event={"ID":"a45728b4-650f-4ed5-bb39-118e04708f2b","Type":"ContainerDied","Data":"6c38fb25af6f322d183cc6e140efbf88e0923f4c5ac469ccb33ebe6d1a3408ed"} Oct 10 18:03:26 crc kubenswrapper[4799]: I1010 18:03:26.783726 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-fdf47c55f-nkhwh" event={"ID":"a45728b4-650f-4ed5-bb39-118e04708f2b","Type":"ContainerStarted","Data":"cd125e760601632b83474e9299c09ebbf97fbb09498ea2cef6fbb458e0ebfc04"} Oct 10 18:03:26 crc kubenswrapper[4799]: I1010 18:03:26.802650 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-db-sync-j8njx" podStartSLOduration=1.802622076 podStartE2EDuration="1.802622076s" podCreationTimestamp="2025-10-10 18:03:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-10 18:03:26.800482654 +0000 UTC m=+5500.308806799" watchObservedRunningTime="2025-10-10 18:03:26.802622076 +0000 UTC m=+5500.310946231" Oct 10 18:03:27 crc kubenswrapper[4799]: I1010 18:03:27.797292 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-fdf47c55f-nkhwh" event={"ID":"a45728b4-650f-4ed5-bb39-118e04708f2b","Type":"ContainerStarted","Data":"1c45ebbecf3f756ac75772ae3d242481cb7f4650a4ab85c9b14f261da87c52c1"} Oct 10 18:03:28 crc kubenswrapper[4799]: I1010 18:03:28.811593 4799 generic.go:334] "Generic (PLEG): container finished" podID="4638bb90-28e7-4aec-a5fe-7bdb4195d49d" containerID="680516a7ccd4b2d45746a58f833cb75b023766d7ade5e0e37ae374462bc329c7" exitCode=0 Oct 10 18:03:28 crc kubenswrapper[4799]: I1010 18:03:28.811717 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-j8njx" event={"ID":"4638bb90-28e7-4aec-a5fe-7bdb4195d49d","Type":"ContainerDied","Data":"680516a7ccd4b2d45746a58f833cb75b023766d7ade5e0e37ae374462bc329c7"} Oct 10 18:03:28 crc kubenswrapper[4799]: I1010 18:03:28.812056 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-fdf47c55f-nkhwh" Oct 10 18:03:28 crc kubenswrapper[4799]: I1010 18:03:28.835897 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-fdf47c55f-nkhwh" podStartSLOduration=3.835865664 podStartE2EDuration="3.835865664s" podCreationTimestamp="2025-10-10 18:03:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-10 18:03:27.826448901 +0000 UTC m=+5501.334773016" watchObservedRunningTime="2025-10-10 18:03:28.835865664 +0000 UTC m=+5502.344189819" Oct 10 18:03:30 crc kubenswrapper[4799]: I1010 18:03:30.177274 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-j8njx" Oct 10 18:03:30 crc kubenswrapper[4799]: I1010 18:03:30.312257 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qldkl\" (UniqueName: \"kubernetes.io/projected/4638bb90-28e7-4aec-a5fe-7bdb4195d49d-kube-api-access-qldkl\") pod \"4638bb90-28e7-4aec-a5fe-7bdb4195d49d\" (UID: \"4638bb90-28e7-4aec-a5fe-7bdb4195d49d\") " Oct 10 18:03:30 crc kubenswrapper[4799]: I1010 18:03:30.312436 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4638bb90-28e7-4aec-a5fe-7bdb4195d49d-combined-ca-bundle\") pod \"4638bb90-28e7-4aec-a5fe-7bdb4195d49d\" (UID: \"4638bb90-28e7-4aec-a5fe-7bdb4195d49d\") " Oct 10 18:03:30 crc kubenswrapper[4799]: I1010 18:03:30.312485 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4638bb90-28e7-4aec-a5fe-7bdb4195d49d-logs\") pod \"4638bb90-28e7-4aec-a5fe-7bdb4195d49d\" (UID: \"4638bb90-28e7-4aec-a5fe-7bdb4195d49d\") " Oct 10 18:03:30 crc kubenswrapper[4799]: I1010 18:03:30.313168 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4638bb90-28e7-4aec-a5fe-7bdb4195d49d-logs" (OuterVolumeSpecName: "logs") pod "4638bb90-28e7-4aec-a5fe-7bdb4195d49d" (UID: "4638bb90-28e7-4aec-a5fe-7bdb4195d49d"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 10 18:03:30 crc kubenswrapper[4799]: I1010 18:03:30.313241 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4638bb90-28e7-4aec-a5fe-7bdb4195d49d-config-data\") pod \"4638bb90-28e7-4aec-a5fe-7bdb4195d49d\" (UID: \"4638bb90-28e7-4aec-a5fe-7bdb4195d49d\") " Oct 10 18:03:30 crc kubenswrapper[4799]: I1010 18:03:30.313624 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4638bb90-28e7-4aec-a5fe-7bdb4195d49d-scripts\") pod \"4638bb90-28e7-4aec-a5fe-7bdb4195d49d\" (UID: \"4638bb90-28e7-4aec-a5fe-7bdb4195d49d\") " Oct 10 18:03:30 crc kubenswrapper[4799]: I1010 18:03:30.314212 4799 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4638bb90-28e7-4aec-a5fe-7bdb4195d49d-logs\") on node \"crc\" DevicePath \"\"" Oct 10 18:03:30 crc kubenswrapper[4799]: I1010 18:03:30.321537 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4638bb90-28e7-4aec-a5fe-7bdb4195d49d-scripts" (OuterVolumeSpecName: "scripts") pod "4638bb90-28e7-4aec-a5fe-7bdb4195d49d" (UID: "4638bb90-28e7-4aec-a5fe-7bdb4195d49d"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 18:03:30 crc kubenswrapper[4799]: I1010 18:03:30.322081 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4638bb90-28e7-4aec-a5fe-7bdb4195d49d-kube-api-access-qldkl" (OuterVolumeSpecName: "kube-api-access-qldkl") pod "4638bb90-28e7-4aec-a5fe-7bdb4195d49d" (UID: "4638bb90-28e7-4aec-a5fe-7bdb4195d49d"). InnerVolumeSpecName "kube-api-access-qldkl". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 18:03:30 crc kubenswrapper[4799]: I1010 18:03:30.360165 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4638bb90-28e7-4aec-a5fe-7bdb4195d49d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "4638bb90-28e7-4aec-a5fe-7bdb4195d49d" (UID: "4638bb90-28e7-4aec-a5fe-7bdb4195d49d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 18:03:30 crc kubenswrapper[4799]: I1010 18:03:30.362872 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4638bb90-28e7-4aec-a5fe-7bdb4195d49d-config-data" (OuterVolumeSpecName: "config-data") pod "4638bb90-28e7-4aec-a5fe-7bdb4195d49d" (UID: "4638bb90-28e7-4aec-a5fe-7bdb4195d49d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 18:03:30 crc kubenswrapper[4799]: I1010 18:03:30.416658 4799 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4638bb90-28e7-4aec-a5fe-7bdb4195d49d-config-data\") on node \"crc\" DevicePath \"\"" Oct 10 18:03:30 crc kubenswrapper[4799]: I1010 18:03:30.416715 4799 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4638bb90-28e7-4aec-a5fe-7bdb4195d49d-scripts\") on node \"crc\" DevicePath \"\"" Oct 10 18:03:30 crc kubenswrapper[4799]: I1010 18:03:30.416735 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qldkl\" (UniqueName: \"kubernetes.io/projected/4638bb90-28e7-4aec-a5fe-7bdb4195d49d-kube-api-access-qldkl\") on node \"crc\" DevicePath \"\"" Oct 10 18:03:30 crc kubenswrapper[4799]: I1010 18:03:30.416763 4799 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4638bb90-28e7-4aec-a5fe-7bdb4195d49d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 10 18:03:30 crc kubenswrapper[4799]: I1010 18:03:30.834595 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-j8njx" event={"ID":"4638bb90-28e7-4aec-a5fe-7bdb4195d49d","Type":"ContainerDied","Data":"e51588389ee7b9828cef849f6b95f10f6b078a0f3fa6312b104076d2b4e07ee3"} Oct 10 18:03:30 crc kubenswrapper[4799]: I1010 18:03:30.834639 4799 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e51588389ee7b9828cef849f6b95f10f6b078a0f3fa6312b104076d2b4e07ee3" Oct 10 18:03:30 crc kubenswrapper[4799]: I1010 18:03:30.834675 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-j8njx" Oct 10 18:03:30 crc kubenswrapper[4799]: I1010 18:03:30.939087 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-6b78c5594-k6jlg"] Oct 10 18:03:30 crc kubenswrapper[4799]: E1010 18:03:30.939505 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4638bb90-28e7-4aec-a5fe-7bdb4195d49d" containerName="placement-db-sync" Oct 10 18:03:30 crc kubenswrapper[4799]: I1010 18:03:30.939530 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="4638bb90-28e7-4aec-a5fe-7bdb4195d49d" containerName="placement-db-sync" Oct 10 18:03:30 crc kubenswrapper[4799]: I1010 18:03:30.939751 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="4638bb90-28e7-4aec-a5fe-7bdb4195d49d" containerName="placement-db-sync" Oct 10 18:03:30 crc kubenswrapper[4799]: I1010 18:03:30.941036 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-6b78c5594-k6jlg" Oct 10 18:03:30 crc kubenswrapper[4799]: I1010 18:03:30.945460 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-placement-dockercfg-tgt9v" Oct 10 18:03:30 crc kubenswrapper[4799]: I1010 18:03:30.945475 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-scripts" Oct 10 18:03:30 crc kubenswrapper[4799]: I1010 18:03:30.945722 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-config-data" Oct 10 18:03:30 crc kubenswrapper[4799]: I1010 18:03:30.955608 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-6b78c5594-k6jlg"] Oct 10 18:03:31 crc kubenswrapper[4799]: I1010 18:03:31.128743 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e35e375b-b334-49ca-865d-cc4852481337-config-data\") pod \"placement-6b78c5594-k6jlg\" (UID: \"e35e375b-b334-49ca-865d-cc4852481337\") " pod="openstack/placement-6b78c5594-k6jlg" Oct 10 18:03:31 crc kubenswrapper[4799]: I1010 18:03:31.128839 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xnz5l\" (UniqueName: \"kubernetes.io/projected/e35e375b-b334-49ca-865d-cc4852481337-kube-api-access-xnz5l\") pod \"placement-6b78c5594-k6jlg\" (UID: \"e35e375b-b334-49ca-865d-cc4852481337\") " pod="openstack/placement-6b78c5594-k6jlg" Oct 10 18:03:31 crc kubenswrapper[4799]: I1010 18:03:31.128911 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e35e375b-b334-49ca-865d-cc4852481337-logs\") pod \"placement-6b78c5594-k6jlg\" (UID: \"e35e375b-b334-49ca-865d-cc4852481337\") " pod="openstack/placement-6b78c5594-k6jlg" Oct 10 18:03:31 crc kubenswrapper[4799]: I1010 18:03:31.128934 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e35e375b-b334-49ca-865d-cc4852481337-combined-ca-bundle\") pod \"placement-6b78c5594-k6jlg\" (UID: \"e35e375b-b334-49ca-865d-cc4852481337\") " pod="openstack/placement-6b78c5594-k6jlg" Oct 10 18:03:31 crc kubenswrapper[4799]: I1010 18:03:31.128977 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e35e375b-b334-49ca-865d-cc4852481337-scripts\") pod \"placement-6b78c5594-k6jlg\" (UID: \"e35e375b-b334-49ca-865d-cc4852481337\") " pod="openstack/placement-6b78c5594-k6jlg" Oct 10 18:03:31 crc kubenswrapper[4799]: I1010 18:03:31.230792 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e35e375b-b334-49ca-865d-cc4852481337-scripts\") pod \"placement-6b78c5594-k6jlg\" (UID: \"e35e375b-b334-49ca-865d-cc4852481337\") " pod="openstack/placement-6b78c5594-k6jlg" Oct 10 18:03:31 crc kubenswrapper[4799]: I1010 18:03:31.230889 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e35e375b-b334-49ca-865d-cc4852481337-config-data\") pod \"placement-6b78c5594-k6jlg\" (UID: \"e35e375b-b334-49ca-865d-cc4852481337\") " pod="openstack/placement-6b78c5594-k6jlg" Oct 10 18:03:31 crc kubenswrapper[4799]: I1010 18:03:31.230950 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xnz5l\" (UniqueName: \"kubernetes.io/projected/e35e375b-b334-49ca-865d-cc4852481337-kube-api-access-xnz5l\") pod \"placement-6b78c5594-k6jlg\" (UID: \"e35e375b-b334-49ca-865d-cc4852481337\") " pod="openstack/placement-6b78c5594-k6jlg" Oct 10 18:03:31 crc kubenswrapper[4799]: I1010 18:03:31.230998 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e35e375b-b334-49ca-865d-cc4852481337-logs\") pod \"placement-6b78c5594-k6jlg\" (UID: \"e35e375b-b334-49ca-865d-cc4852481337\") " pod="openstack/placement-6b78c5594-k6jlg" Oct 10 18:03:31 crc kubenswrapper[4799]: I1010 18:03:31.231016 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e35e375b-b334-49ca-865d-cc4852481337-combined-ca-bundle\") pod \"placement-6b78c5594-k6jlg\" (UID: \"e35e375b-b334-49ca-865d-cc4852481337\") " pod="openstack/placement-6b78c5594-k6jlg" Oct 10 18:03:31 crc kubenswrapper[4799]: I1010 18:03:31.231743 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e35e375b-b334-49ca-865d-cc4852481337-logs\") pod \"placement-6b78c5594-k6jlg\" (UID: \"e35e375b-b334-49ca-865d-cc4852481337\") " pod="openstack/placement-6b78c5594-k6jlg" Oct 10 18:03:31 crc kubenswrapper[4799]: I1010 18:03:31.235239 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e35e375b-b334-49ca-865d-cc4852481337-scripts\") pod \"placement-6b78c5594-k6jlg\" (UID: \"e35e375b-b334-49ca-865d-cc4852481337\") " pod="openstack/placement-6b78c5594-k6jlg" Oct 10 18:03:31 crc kubenswrapper[4799]: I1010 18:03:31.236451 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e35e375b-b334-49ca-865d-cc4852481337-config-data\") pod \"placement-6b78c5594-k6jlg\" (UID: \"e35e375b-b334-49ca-865d-cc4852481337\") " pod="openstack/placement-6b78c5594-k6jlg" Oct 10 18:03:31 crc kubenswrapper[4799]: I1010 18:03:31.236991 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e35e375b-b334-49ca-865d-cc4852481337-combined-ca-bundle\") pod \"placement-6b78c5594-k6jlg\" (UID: \"e35e375b-b334-49ca-865d-cc4852481337\") " pod="openstack/placement-6b78c5594-k6jlg" Oct 10 18:03:31 crc kubenswrapper[4799]: I1010 18:03:31.251364 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xnz5l\" (UniqueName: \"kubernetes.io/projected/e35e375b-b334-49ca-865d-cc4852481337-kube-api-access-xnz5l\") pod \"placement-6b78c5594-k6jlg\" (UID: \"e35e375b-b334-49ca-865d-cc4852481337\") " pod="openstack/placement-6b78c5594-k6jlg" Oct 10 18:03:31 crc kubenswrapper[4799]: I1010 18:03:31.273805 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-6b78c5594-k6jlg" Oct 10 18:03:31 crc kubenswrapper[4799]: I1010 18:03:31.753201 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-6b78c5594-k6jlg"] Oct 10 18:03:31 crc kubenswrapper[4799]: W1010 18:03:31.758016 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode35e375b_b334_49ca_865d_cc4852481337.slice/crio-c1dc09ecadd0ac049860746eabbdc0dc9d5ae0dc514eb5254701122254cc0c85 WatchSource:0}: Error finding container c1dc09ecadd0ac049860746eabbdc0dc9d5ae0dc514eb5254701122254cc0c85: Status 404 returned error can't find the container with id c1dc09ecadd0ac049860746eabbdc0dc9d5ae0dc514eb5254701122254cc0c85 Oct 10 18:03:31 crc kubenswrapper[4799]: I1010 18:03:31.865093 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-6b78c5594-k6jlg" event={"ID":"e35e375b-b334-49ca-865d-cc4852481337","Type":"ContainerStarted","Data":"c1dc09ecadd0ac049860746eabbdc0dc9d5ae0dc514eb5254701122254cc0c85"} Oct 10 18:03:32 crc kubenswrapper[4799]: I1010 18:03:32.881443 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-6b78c5594-k6jlg" event={"ID":"e35e375b-b334-49ca-865d-cc4852481337","Type":"ContainerStarted","Data":"24e80b346e780af7be0d35c8a21fabaa9428ba8c5e81ddd6772571efe42d73d5"} Oct 10 18:03:32 crc kubenswrapper[4799]: I1010 18:03:32.882033 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-6b78c5594-k6jlg" event={"ID":"e35e375b-b334-49ca-865d-cc4852481337","Type":"ContainerStarted","Data":"17ebd7d62b4c010b08e4e4c630952382357a677ae26f87f21a064671c95cfbcf"} Oct 10 18:03:32 crc kubenswrapper[4799]: I1010 18:03:32.882072 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/placement-6b78c5594-k6jlg" Oct 10 18:03:32 crc kubenswrapper[4799]: I1010 18:03:32.882093 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/placement-6b78c5594-k6jlg" Oct 10 18:03:32 crc kubenswrapper[4799]: I1010 18:03:32.913899 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-6b78c5594-k6jlg" podStartSLOduration=2.913858499 podStartE2EDuration="2.913858499s" podCreationTimestamp="2025-10-10 18:03:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-10 18:03:32.903879556 +0000 UTC m=+5506.412203681" watchObservedRunningTime="2025-10-10 18:03:32.913858499 +0000 UTC m=+5506.422182694" Oct 10 18:03:35 crc kubenswrapper[4799]: I1010 18:03:35.898140 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-fdf47c55f-nkhwh" Oct 10 18:03:36 crc kubenswrapper[4799]: I1010 18:03:36.032157 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-769797d5d7-zjthw"] Oct 10 18:03:36 crc kubenswrapper[4799]: I1010 18:03:36.032456 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-769797d5d7-zjthw" podUID="c4fbb2c2-9422-4e1c-b7f2-d88141521268" containerName="dnsmasq-dns" containerID="cri-o://37961f471cf3b8844a085d0d50743c6963fac5c11832c3e36f77c653f31569fc" gracePeriod=10 Oct 10 18:03:36 crc kubenswrapper[4799]: I1010 18:03:36.482130 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-769797d5d7-zjthw" Oct 10 18:03:36 crc kubenswrapper[4799]: I1010 18:03:36.573784 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c4fbb2c2-9422-4e1c-b7f2-d88141521268-config\") pod \"c4fbb2c2-9422-4e1c-b7f2-d88141521268\" (UID: \"c4fbb2c2-9422-4e1c-b7f2-d88141521268\") " Oct 10 18:03:36 crc kubenswrapper[4799]: I1010 18:03:36.573851 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qth6d\" (UniqueName: \"kubernetes.io/projected/c4fbb2c2-9422-4e1c-b7f2-d88141521268-kube-api-access-qth6d\") pod \"c4fbb2c2-9422-4e1c-b7f2-d88141521268\" (UID: \"c4fbb2c2-9422-4e1c-b7f2-d88141521268\") " Oct 10 18:03:36 crc kubenswrapper[4799]: I1010 18:03:36.573901 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c4fbb2c2-9422-4e1c-b7f2-d88141521268-dns-svc\") pod \"c4fbb2c2-9422-4e1c-b7f2-d88141521268\" (UID: \"c4fbb2c2-9422-4e1c-b7f2-d88141521268\") " Oct 10 18:03:36 crc kubenswrapper[4799]: I1010 18:03:36.573937 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c4fbb2c2-9422-4e1c-b7f2-d88141521268-ovsdbserver-nb\") pod \"c4fbb2c2-9422-4e1c-b7f2-d88141521268\" (UID: \"c4fbb2c2-9422-4e1c-b7f2-d88141521268\") " Oct 10 18:03:36 crc kubenswrapper[4799]: I1010 18:03:36.573958 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c4fbb2c2-9422-4e1c-b7f2-d88141521268-ovsdbserver-sb\") pod \"c4fbb2c2-9422-4e1c-b7f2-d88141521268\" (UID: \"c4fbb2c2-9422-4e1c-b7f2-d88141521268\") " Oct 10 18:03:36 crc kubenswrapper[4799]: I1010 18:03:36.590961 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c4fbb2c2-9422-4e1c-b7f2-d88141521268-kube-api-access-qth6d" (OuterVolumeSpecName: "kube-api-access-qth6d") pod "c4fbb2c2-9422-4e1c-b7f2-d88141521268" (UID: "c4fbb2c2-9422-4e1c-b7f2-d88141521268"). InnerVolumeSpecName "kube-api-access-qth6d". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 18:03:36 crc kubenswrapper[4799]: I1010 18:03:36.614566 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c4fbb2c2-9422-4e1c-b7f2-d88141521268-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "c4fbb2c2-9422-4e1c-b7f2-d88141521268" (UID: "c4fbb2c2-9422-4e1c-b7f2-d88141521268"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 18:03:36 crc kubenswrapper[4799]: I1010 18:03:36.617175 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c4fbb2c2-9422-4e1c-b7f2-d88141521268-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "c4fbb2c2-9422-4e1c-b7f2-d88141521268" (UID: "c4fbb2c2-9422-4e1c-b7f2-d88141521268"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 18:03:36 crc kubenswrapper[4799]: I1010 18:03:36.622630 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c4fbb2c2-9422-4e1c-b7f2-d88141521268-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "c4fbb2c2-9422-4e1c-b7f2-d88141521268" (UID: "c4fbb2c2-9422-4e1c-b7f2-d88141521268"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 18:03:36 crc kubenswrapper[4799]: I1010 18:03:36.625063 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c4fbb2c2-9422-4e1c-b7f2-d88141521268-config" (OuterVolumeSpecName: "config") pod "c4fbb2c2-9422-4e1c-b7f2-d88141521268" (UID: "c4fbb2c2-9422-4e1c-b7f2-d88141521268"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 18:03:36 crc kubenswrapper[4799]: I1010 18:03:36.676044 4799 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c4fbb2c2-9422-4e1c-b7f2-d88141521268-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 10 18:03:36 crc kubenswrapper[4799]: I1010 18:03:36.676084 4799 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c4fbb2c2-9422-4e1c-b7f2-d88141521268-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 10 18:03:36 crc kubenswrapper[4799]: I1010 18:03:36.676097 4799 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c4fbb2c2-9422-4e1c-b7f2-d88141521268-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 10 18:03:36 crc kubenswrapper[4799]: I1010 18:03:36.676108 4799 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c4fbb2c2-9422-4e1c-b7f2-d88141521268-config\") on node \"crc\" DevicePath \"\"" Oct 10 18:03:36 crc kubenswrapper[4799]: I1010 18:03:36.676120 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qth6d\" (UniqueName: \"kubernetes.io/projected/c4fbb2c2-9422-4e1c-b7f2-d88141521268-kube-api-access-qth6d\") on node \"crc\" DevicePath \"\"" Oct 10 18:03:36 crc kubenswrapper[4799]: I1010 18:03:36.932061 4799 generic.go:334] "Generic (PLEG): container finished" podID="c4fbb2c2-9422-4e1c-b7f2-d88141521268" containerID="37961f471cf3b8844a085d0d50743c6963fac5c11832c3e36f77c653f31569fc" exitCode=0 Oct 10 18:03:36 crc kubenswrapper[4799]: I1010 18:03:36.932137 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-769797d5d7-zjthw" event={"ID":"c4fbb2c2-9422-4e1c-b7f2-d88141521268","Type":"ContainerDied","Data":"37961f471cf3b8844a085d0d50743c6963fac5c11832c3e36f77c653f31569fc"} Oct 10 18:03:36 crc kubenswrapper[4799]: I1010 18:03:36.932194 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-769797d5d7-zjthw" Oct 10 18:03:36 crc kubenswrapper[4799]: I1010 18:03:36.932229 4799 scope.go:117] "RemoveContainer" containerID="37961f471cf3b8844a085d0d50743c6963fac5c11832c3e36f77c653f31569fc" Oct 10 18:03:36 crc kubenswrapper[4799]: I1010 18:03:36.932198 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-769797d5d7-zjthw" event={"ID":"c4fbb2c2-9422-4e1c-b7f2-d88141521268","Type":"ContainerDied","Data":"f3b0f1650516a69f468baa63b58465d4daad9bc7d95a3ed5de09fd2ba856bea2"} Oct 10 18:03:36 crc kubenswrapper[4799]: I1010 18:03:36.965113 4799 scope.go:117] "RemoveContainer" containerID="150281defd8d9e8ef5efbec1515b4b93df6dc1d4e2a14187b78561759045a1f9" Oct 10 18:03:36 crc kubenswrapper[4799]: I1010 18:03:36.990703 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-769797d5d7-zjthw"] Oct 10 18:03:36 crc kubenswrapper[4799]: I1010 18:03:36.997335 4799 scope.go:117] "RemoveContainer" containerID="37961f471cf3b8844a085d0d50743c6963fac5c11832c3e36f77c653f31569fc" Oct 10 18:03:36 crc kubenswrapper[4799]: I1010 18:03:36.998066 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-769797d5d7-zjthw"] Oct 10 18:03:36 crc kubenswrapper[4799]: E1010 18:03:36.998106 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"37961f471cf3b8844a085d0d50743c6963fac5c11832c3e36f77c653f31569fc\": container with ID starting with 37961f471cf3b8844a085d0d50743c6963fac5c11832c3e36f77c653f31569fc not found: ID does not exist" containerID="37961f471cf3b8844a085d0d50743c6963fac5c11832c3e36f77c653f31569fc" Oct 10 18:03:36 crc kubenswrapper[4799]: I1010 18:03:36.998169 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"37961f471cf3b8844a085d0d50743c6963fac5c11832c3e36f77c653f31569fc"} err="failed to get container status \"37961f471cf3b8844a085d0d50743c6963fac5c11832c3e36f77c653f31569fc\": rpc error: code = NotFound desc = could not find container \"37961f471cf3b8844a085d0d50743c6963fac5c11832c3e36f77c653f31569fc\": container with ID starting with 37961f471cf3b8844a085d0d50743c6963fac5c11832c3e36f77c653f31569fc not found: ID does not exist" Oct 10 18:03:36 crc kubenswrapper[4799]: I1010 18:03:36.998216 4799 scope.go:117] "RemoveContainer" containerID="150281defd8d9e8ef5efbec1515b4b93df6dc1d4e2a14187b78561759045a1f9" Oct 10 18:03:36 crc kubenswrapper[4799]: E1010 18:03:36.998794 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"150281defd8d9e8ef5efbec1515b4b93df6dc1d4e2a14187b78561759045a1f9\": container with ID starting with 150281defd8d9e8ef5efbec1515b4b93df6dc1d4e2a14187b78561759045a1f9 not found: ID does not exist" containerID="150281defd8d9e8ef5efbec1515b4b93df6dc1d4e2a14187b78561759045a1f9" Oct 10 18:03:36 crc kubenswrapper[4799]: I1010 18:03:36.998834 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"150281defd8d9e8ef5efbec1515b4b93df6dc1d4e2a14187b78561759045a1f9"} err="failed to get container status \"150281defd8d9e8ef5efbec1515b4b93df6dc1d4e2a14187b78561759045a1f9\": rpc error: code = NotFound desc = could not find container \"150281defd8d9e8ef5efbec1515b4b93df6dc1d4e2a14187b78561759045a1f9\": container with ID starting with 150281defd8d9e8ef5efbec1515b4b93df6dc1d4e2a14187b78561759045a1f9 not found: ID does not exist" Oct 10 18:03:37 crc kubenswrapper[4799]: I1010 18:03:37.422458 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c4fbb2c2-9422-4e1c-b7f2-d88141521268" path="/var/lib/kubelet/pods/c4fbb2c2-9422-4e1c-b7f2-d88141521268/volumes" Oct 10 18:04:02 crc kubenswrapper[4799]: I1010 18:04:02.260333 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/placement-6b78c5594-k6jlg" Oct 10 18:04:03 crc kubenswrapper[4799]: I1010 18:04:03.315481 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/placement-6b78c5594-k6jlg" Oct 10 18:04:07 crc kubenswrapper[4799]: I1010 18:04:07.655965 4799 scope.go:117] "RemoveContainer" containerID="4508f7112a9b38f537a7e70bbe8de8d2cbb851fcb269dc35bb42f56b5e1a511e" Oct 10 18:04:26 crc kubenswrapper[4799]: I1010 18:04:26.311137 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-db-create-xfnk5"] Oct 10 18:04:26 crc kubenswrapper[4799]: E1010 18:04:26.311962 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c4fbb2c2-9422-4e1c-b7f2-d88141521268" containerName="init" Oct 10 18:04:26 crc kubenswrapper[4799]: I1010 18:04:26.311974 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="c4fbb2c2-9422-4e1c-b7f2-d88141521268" containerName="init" Oct 10 18:04:26 crc kubenswrapper[4799]: E1010 18:04:26.311991 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c4fbb2c2-9422-4e1c-b7f2-d88141521268" containerName="dnsmasq-dns" Oct 10 18:04:26 crc kubenswrapper[4799]: I1010 18:04:26.311997 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="c4fbb2c2-9422-4e1c-b7f2-d88141521268" containerName="dnsmasq-dns" Oct 10 18:04:26 crc kubenswrapper[4799]: I1010 18:04:26.312146 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="c4fbb2c2-9422-4e1c-b7f2-d88141521268" containerName="dnsmasq-dns" Oct 10 18:04:26 crc kubenswrapper[4799]: I1010 18:04:26.312834 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-xfnk5" Oct 10 18:04:26 crc kubenswrapper[4799]: I1010 18:04:26.319693 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-db-create-xfnk5"] Oct 10 18:04:26 crc kubenswrapper[4799]: I1010 18:04:26.380345 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jxkz7\" (UniqueName: \"kubernetes.io/projected/55013235-ff21-4a02-bb37-8eef30a18d79-kube-api-access-jxkz7\") pod \"nova-api-db-create-xfnk5\" (UID: \"55013235-ff21-4a02-bb37-8eef30a18d79\") " pod="openstack/nova-api-db-create-xfnk5" Oct 10 18:04:26 crc kubenswrapper[4799]: I1010 18:04:26.390586 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-db-create-z2qgp"] Oct 10 18:04:26 crc kubenswrapper[4799]: I1010 18:04:26.392056 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-z2qgp" Oct 10 18:04:26 crc kubenswrapper[4799]: I1010 18:04:26.401307 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-db-create-z2qgp"] Oct 10 18:04:26 crc kubenswrapper[4799]: I1010 18:04:26.482727 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jxkz7\" (UniqueName: \"kubernetes.io/projected/55013235-ff21-4a02-bb37-8eef30a18d79-kube-api-access-jxkz7\") pod \"nova-api-db-create-xfnk5\" (UID: \"55013235-ff21-4a02-bb37-8eef30a18d79\") " pod="openstack/nova-api-db-create-xfnk5" Oct 10 18:04:26 crc kubenswrapper[4799]: I1010 18:04:26.482822 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9kkfc\" (UniqueName: \"kubernetes.io/projected/dd05328d-e9e7-4afb-ae9e-2729453b99db-kube-api-access-9kkfc\") pod \"nova-cell0-db-create-z2qgp\" (UID: \"dd05328d-e9e7-4afb-ae9e-2729453b99db\") " pod="openstack/nova-cell0-db-create-z2qgp" Oct 10 18:04:26 crc kubenswrapper[4799]: I1010 18:04:26.502650 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-db-create-n52tc"] Oct 10 18:04:26 crc kubenswrapper[4799]: I1010 18:04:26.504105 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-n52tc" Oct 10 18:04:26 crc kubenswrapper[4799]: I1010 18:04:26.509201 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-db-create-n52tc"] Oct 10 18:04:26 crc kubenswrapper[4799]: I1010 18:04:26.517902 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jxkz7\" (UniqueName: \"kubernetes.io/projected/55013235-ff21-4a02-bb37-8eef30a18d79-kube-api-access-jxkz7\") pod \"nova-api-db-create-xfnk5\" (UID: \"55013235-ff21-4a02-bb37-8eef30a18d79\") " pod="openstack/nova-api-db-create-xfnk5" Oct 10 18:04:26 crc kubenswrapper[4799]: I1010 18:04:26.586884 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kkjj9\" (UniqueName: \"kubernetes.io/projected/e57db8dc-b992-4e8f-9d44-e23579585d4c-kube-api-access-kkjj9\") pod \"nova-cell1-db-create-n52tc\" (UID: \"e57db8dc-b992-4e8f-9d44-e23579585d4c\") " pod="openstack/nova-cell1-db-create-n52tc" Oct 10 18:04:26 crc kubenswrapper[4799]: I1010 18:04:26.586969 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9kkfc\" (UniqueName: \"kubernetes.io/projected/dd05328d-e9e7-4afb-ae9e-2729453b99db-kube-api-access-9kkfc\") pod \"nova-cell0-db-create-z2qgp\" (UID: \"dd05328d-e9e7-4afb-ae9e-2729453b99db\") " pod="openstack/nova-cell0-db-create-z2qgp" Oct 10 18:04:26 crc kubenswrapper[4799]: I1010 18:04:26.605560 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9kkfc\" (UniqueName: \"kubernetes.io/projected/dd05328d-e9e7-4afb-ae9e-2729453b99db-kube-api-access-9kkfc\") pod \"nova-cell0-db-create-z2qgp\" (UID: \"dd05328d-e9e7-4afb-ae9e-2729453b99db\") " pod="openstack/nova-cell0-db-create-z2qgp" Oct 10 18:04:26 crc kubenswrapper[4799]: I1010 18:04:26.632360 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-xfnk5" Oct 10 18:04:26 crc kubenswrapper[4799]: I1010 18:04:26.688816 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kkjj9\" (UniqueName: \"kubernetes.io/projected/e57db8dc-b992-4e8f-9d44-e23579585d4c-kube-api-access-kkjj9\") pod \"nova-cell1-db-create-n52tc\" (UID: \"e57db8dc-b992-4e8f-9d44-e23579585d4c\") " pod="openstack/nova-cell1-db-create-n52tc" Oct 10 18:04:26 crc kubenswrapper[4799]: I1010 18:04:26.707856 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kkjj9\" (UniqueName: \"kubernetes.io/projected/e57db8dc-b992-4e8f-9d44-e23579585d4c-kube-api-access-kkjj9\") pod \"nova-cell1-db-create-n52tc\" (UID: \"e57db8dc-b992-4e8f-9d44-e23579585d4c\") " pod="openstack/nova-cell1-db-create-n52tc" Oct 10 18:04:26 crc kubenswrapper[4799]: I1010 18:04:26.710706 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-z2qgp" Oct 10 18:04:26 crc kubenswrapper[4799]: I1010 18:04:26.862489 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-n52tc" Oct 10 18:04:27 crc kubenswrapper[4799]: I1010 18:04:27.150381 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-db-create-xfnk5"] Oct 10 18:04:27 crc kubenswrapper[4799]: I1010 18:04:27.208469 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-db-create-z2qgp"] Oct 10 18:04:27 crc kubenswrapper[4799]: W1010 18:04:27.215252 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poddd05328d_e9e7_4afb_ae9e_2729453b99db.slice/crio-f141a4004b295e951ecbeb2eec7ab8402719b247daff6d064c1fb6d75cffe341 WatchSource:0}: Error finding container f141a4004b295e951ecbeb2eec7ab8402719b247daff6d064c1fb6d75cffe341: Status 404 returned error can't find the container with id f141a4004b295e951ecbeb2eec7ab8402719b247daff6d064c1fb6d75cffe341 Oct 10 18:04:27 crc kubenswrapper[4799]: W1010 18:04:27.310689 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode57db8dc_b992_4e8f_9d44_e23579585d4c.slice/crio-8f7097fbce58b4092c6adccffbf5b9bbdbe9111bf7704003a2b81ea20905a8ad WatchSource:0}: Error finding container 8f7097fbce58b4092c6adccffbf5b9bbdbe9111bf7704003a2b81ea20905a8ad: Status 404 returned error can't find the container with id 8f7097fbce58b4092c6adccffbf5b9bbdbe9111bf7704003a2b81ea20905a8ad Oct 10 18:04:27 crc kubenswrapper[4799]: I1010 18:04:27.313079 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-db-create-n52tc"] Oct 10 18:04:27 crc kubenswrapper[4799]: I1010 18:04:27.497595 4799 generic.go:334] "Generic (PLEG): container finished" podID="55013235-ff21-4a02-bb37-8eef30a18d79" containerID="628a3fb55758d762250afd6974f8b3b5570c41eedb259c89f7f3f7ddc394d78b" exitCode=0 Oct 10 18:04:27 crc kubenswrapper[4799]: I1010 18:04:27.498368 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-xfnk5" event={"ID":"55013235-ff21-4a02-bb37-8eef30a18d79","Type":"ContainerDied","Data":"628a3fb55758d762250afd6974f8b3b5570c41eedb259c89f7f3f7ddc394d78b"} Oct 10 18:04:27 crc kubenswrapper[4799]: I1010 18:04:27.498403 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-xfnk5" event={"ID":"55013235-ff21-4a02-bb37-8eef30a18d79","Type":"ContainerStarted","Data":"fda7d534abaa6791673a0543e19fe77d692d65b285894f8b6696bbf81fbb1bf4"} Oct 10 18:04:27 crc kubenswrapper[4799]: I1010 18:04:27.537426 4799 generic.go:334] "Generic (PLEG): container finished" podID="dd05328d-e9e7-4afb-ae9e-2729453b99db" containerID="b8a9d483839a0f7141408ac98e54d3805bb710ab4ece61f1a7c874fc0cdbca2d" exitCode=0 Oct 10 18:04:27 crc kubenswrapper[4799]: I1010 18:04:27.537537 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-z2qgp" event={"ID":"dd05328d-e9e7-4afb-ae9e-2729453b99db","Type":"ContainerDied","Data":"b8a9d483839a0f7141408ac98e54d3805bb710ab4ece61f1a7c874fc0cdbca2d"} Oct 10 18:04:27 crc kubenswrapper[4799]: I1010 18:04:27.537571 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-z2qgp" event={"ID":"dd05328d-e9e7-4afb-ae9e-2729453b99db","Type":"ContainerStarted","Data":"f141a4004b295e951ecbeb2eec7ab8402719b247daff6d064c1fb6d75cffe341"} Oct 10 18:04:27 crc kubenswrapper[4799]: I1010 18:04:27.551615 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-n52tc" event={"ID":"e57db8dc-b992-4e8f-9d44-e23579585d4c","Type":"ContainerStarted","Data":"8f7097fbce58b4092c6adccffbf5b9bbdbe9111bf7704003a2b81ea20905a8ad"} Oct 10 18:04:28 crc kubenswrapper[4799]: I1010 18:04:28.568087 4799 generic.go:334] "Generic (PLEG): container finished" podID="e57db8dc-b992-4e8f-9d44-e23579585d4c" containerID="afd029883384655ab6781db94c6a93fbe761608618564bb089d9cf86a9fc29d6" exitCode=0 Oct 10 18:04:28 crc kubenswrapper[4799]: I1010 18:04:28.568169 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-n52tc" event={"ID":"e57db8dc-b992-4e8f-9d44-e23579585d4c","Type":"ContainerDied","Data":"afd029883384655ab6781db94c6a93fbe761608618564bb089d9cf86a9fc29d6"} Oct 10 18:04:29 crc kubenswrapper[4799]: I1010 18:04:29.064634 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-z2qgp" Oct 10 18:04:29 crc kubenswrapper[4799]: I1010 18:04:29.145056 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9kkfc\" (UniqueName: \"kubernetes.io/projected/dd05328d-e9e7-4afb-ae9e-2729453b99db-kube-api-access-9kkfc\") pod \"dd05328d-e9e7-4afb-ae9e-2729453b99db\" (UID: \"dd05328d-e9e7-4afb-ae9e-2729453b99db\") " Oct 10 18:04:29 crc kubenswrapper[4799]: I1010 18:04:29.156221 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dd05328d-e9e7-4afb-ae9e-2729453b99db-kube-api-access-9kkfc" (OuterVolumeSpecName: "kube-api-access-9kkfc") pod "dd05328d-e9e7-4afb-ae9e-2729453b99db" (UID: "dd05328d-e9e7-4afb-ae9e-2729453b99db"). InnerVolumeSpecName "kube-api-access-9kkfc". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 18:04:29 crc kubenswrapper[4799]: I1010 18:04:29.207316 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-xfnk5" Oct 10 18:04:29 crc kubenswrapper[4799]: I1010 18:04:29.247451 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9kkfc\" (UniqueName: \"kubernetes.io/projected/dd05328d-e9e7-4afb-ae9e-2729453b99db-kube-api-access-9kkfc\") on node \"crc\" DevicePath \"\"" Oct 10 18:04:29 crc kubenswrapper[4799]: I1010 18:04:29.348667 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jxkz7\" (UniqueName: \"kubernetes.io/projected/55013235-ff21-4a02-bb37-8eef30a18d79-kube-api-access-jxkz7\") pod \"55013235-ff21-4a02-bb37-8eef30a18d79\" (UID: \"55013235-ff21-4a02-bb37-8eef30a18d79\") " Oct 10 18:04:29 crc kubenswrapper[4799]: I1010 18:04:29.351721 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/55013235-ff21-4a02-bb37-8eef30a18d79-kube-api-access-jxkz7" (OuterVolumeSpecName: "kube-api-access-jxkz7") pod "55013235-ff21-4a02-bb37-8eef30a18d79" (UID: "55013235-ff21-4a02-bb37-8eef30a18d79"). InnerVolumeSpecName "kube-api-access-jxkz7". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 18:04:29 crc kubenswrapper[4799]: I1010 18:04:29.451889 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jxkz7\" (UniqueName: \"kubernetes.io/projected/55013235-ff21-4a02-bb37-8eef30a18d79-kube-api-access-jxkz7\") on node \"crc\" DevicePath \"\"" Oct 10 18:04:29 crc kubenswrapper[4799]: I1010 18:04:29.585546 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-z2qgp" event={"ID":"dd05328d-e9e7-4afb-ae9e-2729453b99db","Type":"ContainerDied","Data":"f141a4004b295e951ecbeb2eec7ab8402719b247daff6d064c1fb6d75cffe341"} Oct 10 18:04:29 crc kubenswrapper[4799]: I1010 18:04:29.587055 4799 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f141a4004b295e951ecbeb2eec7ab8402719b247daff6d064c1fb6d75cffe341" Oct 10 18:04:29 crc kubenswrapper[4799]: I1010 18:04:29.585650 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-z2qgp" Oct 10 18:04:29 crc kubenswrapper[4799]: I1010 18:04:29.589018 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-xfnk5" Oct 10 18:04:29 crc kubenswrapper[4799]: I1010 18:04:29.589028 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-xfnk5" event={"ID":"55013235-ff21-4a02-bb37-8eef30a18d79","Type":"ContainerDied","Data":"fda7d534abaa6791673a0543e19fe77d692d65b285894f8b6696bbf81fbb1bf4"} Oct 10 18:04:29 crc kubenswrapper[4799]: I1010 18:04:29.589112 4799 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="fda7d534abaa6791673a0543e19fe77d692d65b285894f8b6696bbf81fbb1bf4" Oct 10 18:04:29 crc kubenswrapper[4799]: I1010 18:04:29.918110 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-n52tc" Oct 10 18:04:30 crc kubenswrapper[4799]: I1010 18:04:30.061471 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kkjj9\" (UniqueName: \"kubernetes.io/projected/e57db8dc-b992-4e8f-9d44-e23579585d4c-kube-api-access-kkjj9\") pod \"e57db8dc-b992-4e8f-9d44-e23579585d4c\" (UID: \"e57db8dc-b992-4e8f-9d44-e23579585d4c\") " Oct 10 18:04:30 crc kubenswrapper[4799]: I1010 18:04:30.066859 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e57db8dc-b992-4e8f-9d44-e23579585d4c-kube-api-access-kkjj9" (OuterVolumeSpecName: "kube-api-access-kkjj9") pod "e57db8dc-b992-4e8f-9d44-e23579585d4c" (UID: "e57db8dc-b992-4e8f-9d44-e23579585d4c"). InnerVolumeSpecName "kube-api-access-kkjj9". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 18:04:30 crc kubenswrapper[4799]: I1010 18:04:30.163636 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kkjj9\" (UniqueName: \"kubernetes.io/projected/e57db8dc-b992-4e8f-9d44-e23579585d4c-kube-api-access-kkjj9\") on node \"crc\" DevicePath \"\"" Oct 10 18:04:30 crc kubenswrapper[4799]: I1010 18:04:30.602680 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-n52tc" event={"ID":"e57db8dc-b992-4e8f-9d44-e23579585d4c","Type":"ContainerDied","Data":"8f7097fbce58b4092c6adccffbf5b9bbdbe9111bf7704003a2b81ea20905a8ad"} Oct 10 18:04:30 crc kubenswrapper[4799]: I1010 18:04:30.603906 4799 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8f7097fbce58b4092c6adccffbf5b9bbdbe9111bf7704003a2b81ea20905a8ad" Oct 10 18:04:30 crc kubenswrapper[4799]: I1010 18:04:30.602787 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-n52tc" Oct 10 18:04:36 crc kubenswrapper[4799]: I1010 18:04:36.578136 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-600c-account-create-jl2pr"] Oct 10 18:04:36 crc kubenswrapper[4799]: E1010 18:04:36.579708 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dd05328d-e9e7-4afb-ae9e-2729453b99db" containerName="mariadb-database-create" Oct 10 18:04:36 crc kubenswrapper[4799]: I1010 18:04:36.579744 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="dd05328d-e9e7-4afb-ae9e-2729453b99db" containerName="mariadb-database-create" Oct 10 18:04:36 crc kubenswrapper[4799]: E1010 18:04:36.579824 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="55013235-ff21-4a02-bb37-8eef30a18d79" containerName="mariadb-database-create" Oct 10 18:04:36 crc kubenswrapper[4799]: I1010 18:04:36.579845 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="55013235-ff21-4a02-bb37-8eef30a18d79" containerName="mariadb-database-create" Oct 10 18:04:36 crc kubenswrapper[4799]: E1010 18:04:36.579875 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e57db8dc-b992-4e8f-9d44-e23579585d4c" containerName="mariadb-database-create" Oct 10 18:04:36 crc kubenswrapper[4799]: I1010 18:04:36.579895 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="e57db8dc-b992-4e8f-9d44-e23579585d4c" containerName="mariadb-database-create" Oct 10 18:04:36 crc kubenswrapper[4799]: I1010 18:04:36.580388 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="e57db8dc-b992-4e8f-9d44-e23579585d4c" containerName="mariadb-database-create" Oct 10 18:04:36 crc kubenswrapper[4799]: I1010 18:04:36.580425 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="55013235-ff21-4a02-bb37-8eef30a18d79" containerName="mariadb-database-create" Oct 10 18:04:36 crc kubenswrapper[4799]: I1010 18:04:36.580471 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="dd05328d-e9e7-4afb-ae9e-2729453b99db" containerName="mariadb-database-create" Oct 10 18:04:36 crc kubenswrapper[4799]: I1010 18:04:36.581569 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-600c-account-create-jl2pr" Oct 10 18:04:36 crc kubenswrapper[4799]: I1010 18:04:36.586325 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-db-secret" Oct 10 18:04:36 crc kubenswrapper[4799]: I1010 18:04:36.592742 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-600c-account-create-jl2pr"] Oct 10 18:04:36 crc kubenswrapper[4799]: I1010 18:04:36.593393 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6ft5d\" (UniqueName: \"kubernetes.io/projected/a2435e3e-477f-40b8-820f-1d9e2c80db7e-kube-api-access-6ft5d\") pod \"nova-api-600c-account-create-jl2pr\" (UID: \"a2435e3e-477f-40b8-820f-1d9e2c80db7e\") " pod="openstack/nova-api-600c-account-create-jl2pr" Oct 10 18:04:36 crc kubenswrapper[4799]: I1010 18:04:36.695868 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6ft5d\" (UniqueName: \"kubernetes.io/projected/a2435e3e-477f-40b8-820f-1d9e2c80db7e-kube-api-access-6ft5d\") pod \"nova-api-600c-account-create-jl2pr\" (UID: \"a2435e3e-477f-40b8-820f-1d9e2c80db7e\") " pod="openstack/nova-api-600c-account-create-jl2pr" Oct 10 18:04:36 crc kubenswrapper[4799]: I1010 18:04:36.722746 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6ft5d\" (UniqueName: \"kubernetes.io/projected/a2435e3e-477f-40b8-820f-1d9e2c80db7e-kube-api-access-6ft5d\") pod \"nova-api-600c-account-create-jl2pr\" (UID: \"a2435e3e-477f-40b8-820f-1d9e2c80db7e\") " pod="openstack/nova-api-600c-account-create-jl2pr" Oct 10 18:04:36 crc kubenswrapper[4799]: I1010 18:04:36.751052 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-6e42-account-create-2nqmr"] Oct 10 18:04:36 crc kubenswrapper[4799]: I1010 18:04:36.752741 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-6e42-account-create-2nqmr" Oct 10 18:04:36 crc kubenswrapper[4799]: I1010 18:04:36.754822 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-db-secret" Oct 10 18:04:36 crc kubenswrapper[4799]: I1010 18:04:36.761062 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-6e42-account-create-2nqmr"] Oct 10 18:04:36 crc kubenswrapper[4799]: I1010 18:04:36.797194 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-crjsd\" (UniqueName: \"kubernetes.io/projected/39ac58cf-e2da-416f-9456-2a9eaa8060a9-kube-api-access-crjsd\") pod \"nova-cell0-6e42-account-create-2nqmr\" (UID: \"39ac58cf-e2da-416f-9456-2a9eaa8060a9\") " pod="openstack/nova-cell0-6e42-account-create-2nqmr" Oct 10 18:04:36 crc kubenswrapper[4799]: I1010 18:04:36.899773 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-crjsd\" (UniqueName: \"kubernetes.io/projected/39ac58cf-e2da-416f-9456-2a9eaa8060a9-kube-api-access-crjsd\") pod \"nova-cell0-6e42-account-create-2nqmr\" (UID: \"39ac58cf-e2da-416f-9456-2a9eaa8060a9\") " pod="openstack/nova-cell0-6e42-account-create-2nqmr" Oct 10 18:04:36 crc kubenswrapper[4799]: I1010 18:04:36.906713 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-600c-account-create-jl2pr" Oct 10 18:04:36 crc kubenswrapper[4799]: I1010 18:04:36.950035 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-6a8f-account-create-2q2ts"] Oct 10 18:04:36 crc kubenswrapper[4799]: I1010 18:04:36.953567 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-6a8f-account-create-2q2ts" Oct 10 18:04:36 crc kubenswrapper[4799]: I1010 18:04:36.959312 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-db-secret" Oct 10 18:04:36 crc kubenswrapper[4799]: I1010 18:04:36.968102 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-crjsd\" (UniqueName: \"kubernetes.io/projected/39ac58cf-e2da-416f-9456-2a9eaa8060a9-kube-api-access-crjsd\") pod \"nova-cell0-6e42-account-create-2nqmr\" (UID: \"39ac58cf-e2da-416f-9456-2a9eaa8060a9\") " pod="openstack/nova-cell0-6e42-account-create-2nqmr" Oct 10 18:04:36 crc kubenswrapper[4799]: I1010 18:04:36.976642 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-6a8f-account-create-2q2ts"] Oct 10 18:04:37 crc kubenswrapper[4799]: I1010 18:04:37.001129 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f8bln\" (UniqueName: \"kubernetes.io/projected/7ff134f4-2451-4260-bd13-814729886f38-kube-api-access-f8bln\") pod \"nova-cell1-6a8f-account-create-2q2ts\" (UID: \"7ff134f4-2451-4260-bd13-814729886f38\") " pod="openstack/nova-cell1-6a8f-account-create-2q2ts" Oct 10 18:04:37 crc kubenswrapper[4799]: I1010 18:04:37.102766 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f8bln\" (UniqueName: \"kubernetes.io/projected/7ff134f4-2451-4260-bd13-814729886f38-kube-api-access-f8bln\") pod \"nova-cell1-6a8f-account-create-2q2ts\" (UID: \"7ff134f4-2451-4260-bd13-814729886f38\") " pod="openstack/nova-cell1-6a8f-account-create-2q2ts" Oct 10 18:04:37 crc kubenswrapper[4799]: I1010 18:04:37.102845 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-6e42-account-create-2nqmr" Oct 10 18:04:37 crc kubenswrapper[4799]: I1010 18:04:37.122966 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f8bln\" (UniqueName: \"kubernetes.io/projected/7ff134f4-2451-4260-bd13-814729886f38-kube-api-access-f8bln\") pod \"nova-cell1-6a8f-account-create-2q2ts\" (UID: \"7ff134f4-2451-4260-bd13-814729886f38\") " pod="openstack/nova-cell1-6a8f-account-create-2q2ts" Oct 10 18:04:37 crc kubenswrapper[4799]: I1010 18:04:37.334473 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-6a8f-account-create-2q2ts" Oct 10 18:04:37 crc kubenswrapper[4799]: I1010 18:04:37.430909 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-600c-account-create-jl2pr"] Oct 10 18:04:37 crc kubenswrapper[4799]: W1010 18:04:37.437419 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda2435e3e_477f_40b8_820f_1d9e2c80db7e.slice/crio-b30487da7a61a2bfa35656f3f962c18d964bb8e53172b085795394a5bc6d5c20 WatchSource:0}: Error finding container b30487da7a61a2bfa35656f3f962c18d964bb8e53172b085795394a5bc6d5c20: Status 404 returned error can't find the container with id b30487da7a61a2bfa35656f3f962c18d964bb8e53172b085795394a5bc6d5c20 Oct 10 18:04:37 crc kubenswrapper[4799]: I1010 18:04:37.536408 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-6e42-account-create-2nqmr"] Oct 10 18:04:37 crc kubenswrapper[4799]: I1010 18:04:37.682036 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-600c-account-create-jl2pr" event={"ID":"a2435e3e-477f-40b8-820f-1d9e2c80db7e","Type":"ContainerStarted","Data":"9f5e33abf26de7058cb51879143496bd6fdee40346a1e25c8f031d6ec4f33ea6"} Oct 10 18:04:37 crc kubenswrapper[4799]: I1010 18:04:37.682249 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-600c-account-create-jl2pr" event={"ID":"a2435e3e-477f-40b8-820f-1d9e2c80db7e","Type":"ContainerStarted","Data":"b30487da7a61a2bfa35656f3f962c18d964bb8e53172b085795394a5bc6d5c20"} Oct 10 18:04:37 crc kubenswrapper[4799]: I1010 18:04:37.684372 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-6e42-account-create-2nqmr" event={"ID":"39ac58cf-e2da-416f-9456-2a9eaa8060a9","Type":"ContainerStarted","Data":"ffa8bdaf6309eb0612578e1a39aaf79979fe83f28e03c50d652196615fec1a8d"} Oct 10 18:04:37 crc kubenswrapper[4799]: I1010 18:04:37.707205 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-600c-account-create-jl2pr" podStartSLOduration=1.7071800700000002 podStartE2EDuration="1.70718007s" podCreationTimestamp="2025-10-10 18:04:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-10 18:04:37.700489227 +0000 UTC m=+5571.208813342" watchObservedRunningTime="2025-10-10 18:04:37.70718007 +0000 UTC m=+5571.215504195" Oct 10 18:04:37 crc kubenswrapper[4799]: I1010 18:04:37.812037 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-6a8f-account-create-2q2ts"] Oct 10 18:04:37 crc kubenswrapper[4799]: W1010 18:04:37.877016 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7ff134f4_2451_4260_bd13_814729886f38.slice/crio-9eb8d7e561e27daccfd8a7d725836f1d32f2bfe512b5473be1c5d9668aef6eab WatchSource:0}: Error finding container 9eb8d7e561e27daccfd8a7d725836f1d32f2bfe512b5473be1c5d9668aef6eab: Status 404 returned error can't find the container with id 9eb8d7e561e27daccfd8a7d725836f1d32f2bfe512b5473be1c5d9668aef6eab Oct 10 18:04:38 crc kubenswrapper[4799]: I1010 18:04:38.698644 4799 generic.go:334] "Generic (PLEG): container finished" podID="39ac58cf-e2da-416f-9456-2a9eaa8060a9" containerID="f64befd8f0cf9c5f121099deea53861218bc7a1b177edc4df84e51dc3f4c537e" exitCode=0 Oct 10 18:04:38 crc kubenswrapper[4799]: I1010 18:04:38.698786 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-6e42-account-create-2nqmr" event={"ID":"39ac58cf-e2da-416f-9456-2a9eaa8060a9","Type":"ContainerDied","Data":"f64befd8f0cf9c5f121099deea53861218bc7a1b177edc4df84e51dc3f4c537e"} Oct 10 18:04:38 crc kubenswrapper[4799]: I1010 18:04:38.706229 4799 generic.go:334] "Generic (PLEG): container finished" podID="a2435e3e-477f-40b8-820f-1d9e2c80db7e" containerID="9f5e33abf26de7058cb51879143496bd6fdee40346a1e25c8f031d6ec4f33ea6" exitCode=0 Oct 10 18:04:38 crc kubenswrapper[4799]: I1010 18:04:38.706347 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-600c-account-create-jl2pr" event={"ID":"a2435e3e-477f-40b8-820f-1d9e2c80db7e","Type":"ContainerDied","Data":"9f5e33abf26de7058cb51879143496bd6fdee40346a1e25c8f031d6ec4f33ea6"} Oct 10 18:04:38 crc kubenswrapper[4799]: I1010 18:04:38.711608 4799 generic.go:334] "Generic (PLEG): container finished" podID="7ff134f4-2451-4260-bd13-814729886f38" containerID="64ebecf8d5b5ba4ffdd96eaf54fda535d9fa414f4f239dd9bbfde375c51c0dc5" exitCode=0 Oct 10 18:04:38 crc kubenswrapper[4799]: I1010 18:04:38.711731 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-6a8f-account-create-2q2ts" event={"ID":"7ff134f4-2451-4260-bd13-814729886f38","Type":"ContainerDied","Data":"64ebecf8d5b5ba4ffdd96eaf54fda535d9fa414f4f239dd9bbfde375c51c0dc5"} Oct 10 18:04:38 crc kubenswrapper[4799]: I1010 18:04:38.712016 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-6a8f-account-create-2q2ts" event={"ID":"7ff134f4-2451-4260-bd13-814729886f38","Type":"ContainerStarted","Data":"9eb8d7e561e27daccfd8a7d725836f1d32f2bfe512b5473be1c5d9668aef6eab"} Oct 10 18:04:40 crc kubenswrapper[4799]: I1010 18:04:40.236097 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-600c-account-create-jl2pr" Oct 10 18:04:40 crc kubenswrapper[4799]: I1010 18:04:40.243276 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-6a8f-account-create-2q2ts" Oct 10 18:04:40 crc kubenswrapper[4799]: I1010 18:04:40.254580 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-6e42-account-create-2nqmr" Oct 10 18:04:40 crc kubenswrapper[4799]: I1010 18:04:40.305672 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-crjsd\" (UniqueName: \"kubernetes.io/projected/39ac58cf-e2da-416f-9456-2a9eaa8060a9-kube-api-access-crjsd\") pod \"39ac58cf-e2da-416f-9456-2a9eaa8060a9\" (UID: \"39ac58cf-e2da-416f-9456-2a9eaa8060a9\") " Oct 10 18:04:40 crc kubenswrapper[4799]: I1010 18:04:40.306128 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6ft5d\" (UniqueName: \"kubernetes.io/projected/a2435e3e-477f-40b8-820f-1d9e2c80db7e-kube-api-access-6ft5d\") pod \"a2435e3e-477f-40b8-820f-1d9e2c80db7e\" (UID: \"a2435e3e-477f-40b8-820f-1d9e2c80db7e\") " Oct 10 18:04:40 crc kubenswrapper[4799]: I1010 18:04:40.306183 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-f8bln\" (UniqueName: \"kubernetes.io/projected/7ff134f4-2451-4260-bd13-814729886f38-kube-api-access-f8bln\") pod \"7ff134f4-2451-4260-bd13-814729886f38\" (UID: \"7ff134f4-2451-4260-bd13-814729886f38\") " Oct 10 18:04:40 crc kubenswrapper[4799]: I1010 18:04:40.311057 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/39ac58cf-e2da-416f-9456-2a9eaa8060a9-kube-api-access-crjsd" (OuterVolumeSpecName: "kube-api-access-crjsd") pod "39ac58cf-e2da-416f-9456-2a9eaa8060a9" (UID: "39ac58cf-e2da-416f-9456-2a9eaa8060a9"). InnerVolumeSpecName "kube-api-access-crjsd". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 18:04:40 crc kubenswrapper[4799]: I1010 18:04:40.311984 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a2435e3e-477f-40b8-820f-1d9e2c80db7e-kube-api-access-6ft5d" (OuterVolumeSpecName: "kube-api-access-6ft5d") pod "a2435e3e-477f-40b8-820f-1d9e2c80db7e" (UID: "a2435e3e-477f-40b8-820f-1d9e2c80db7e"). InnerVolumeSpecName "kube-api-access-6ft5d". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 18:04:40 crc kubenswrapper[4799]: I1010 18:04:40.312816 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7ff134f4-2451-4260-bd13-814729886f38-kube-api-access-f8bln" (OuterVolumeSpecName: "kube-api-access-f8bln") pod "7ff134f4-2451-4260-bd13-814729886f38" (UID: "7ff134f4-2451-4260-bd13-814729886f38"). InnerVolumeSpecName "kube-api-access-f8bln". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 18:04:40 crc kubenswrapper[4799]: I1010 18:04:40.408466 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-crjsd\" (UniqueName: \"kubernetes.io/projected/39ac58cf-e2da-416f-9456-2a9eaa8060a9-kube-api-access-crjsd\") on node \"crc\" DevicePath \"\"" Oct 10 18:04:40 crc kubenswrapper[4799]: I1010 18:04:40.408508 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6ft5d\" (UniqueName: \"kubernetes.io/projected/a2435e3e-477f-40b8-820f-1d9e2c80db7e-kube-api-access-6ft5d\") on node \"crc\" DevicePath \"\"" Oct 10 18:04:40 crc kubenswrapper[4799]: I1010 18:04:40.408522 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-f8bln\" (UniqueName: \"kubernetes.io/projected/7ff134f4-2451-4260-bd13-814729886f38-kube-api-access-f8bln\") on node \"crc\" DevicePath \"\"" Oct 10 18:04:40 crc kubenswrapper[4799]: I1010 18:04:40.741257 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-6a8f-account-create-2q2ts" event={"ID":"7ff134f4-2451-4260-bd13-814729886f38","Type":"ContainerDied","Data":"9eb8d7e561e27daccfd8a7d725836f1d32f2bfe512b5473be1c5d9668aef6eab"} Oct 10 18:04:40 crc kubenswrapper[4799]: I1010 18:04:40.741449 4799 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9eb8d7e561e27daccfd8a7d725836f1d32f2bfe512b5473be1c5d9668aef6eab" Oct 10 18:04:40 crc kubenswrapper[4799]: I1010 18:04:40.741291 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-6a8f-account-create-2q2ts" Oct 10 18:04:40 crc kubenswrapper[4799]: I1010 18:04:40.743845 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-6e42-account-create-2nqmr" event={"ID":"39ac58cf-e2da-416f-9456-2a9eaa8060a9","Type":"ContainerDied","Data":"ffa8bdaf6309eb0612578e1a39aaf79979fe83f28e03c50d652196615fec1a8d"} Oct 10 18:04:40 crc kubenswrapper[4799]: I1010 18:04:40.743906 4799 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ffa8bdaf6309eb0612578e1a39aaf79979fe83f28e03c50d652196615fec1a8d" Oct 10 18:04:40 crc kubenswrapper[4799]: I1010 18:04:40.743910 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-6e42-account-create-2nqmr" Oct 10 18:04:40 crc kubenswrapper[4799]: I1010 18:04:40.745592 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-600c-account-create-jl2pr" event={"ID":"a2435e3e-477f-40b8-820f-1d9e2c80db7e","Type":"ContainerDied","Data":"b30487da7a61a2bfa35656f3f962c18d964bb8e53172b085795394a5bc6d5c20"} Oct 10 18:04:40 crc kubenswrapper[4799]: I1010 18:04:40.745629 4799 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b30487da7a61a2bfa35656f3f962c18d964bb8e53172b085795394a5bc6d5c20" Oct 10 18:04:40 crc kubenswrapper[4799]: I1010 18:04:40.745693 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-600c-account-create-jl2pr" Oct 10 18:04:42 crc kubenswrapper[4799]: I1010 18:04:42.023382 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-db-sync-dcsnk"] Oct 10 18:04:42 crc kubenswrapper[4799]: E1010 18:04:42.025019 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7ff134f4-2451-4260-bd13-814729886f38" containerName="mariadb-account-create" Oct 10 18:04:42 crc kubenswrapper[4799]: I1010 18:04:42.025138 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="7ff134f4-2451-4260-bd13-814729886f38" containerName="mariadb-account-create" Oct 10 18:04:42 crc kubenswrapper[4799]: E1010 18:04:42.025270 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="39ac58cf-e2da-416f-9456-2a9eaa8060a9" containerName="mariadb-account-create" Oct 10 18:04:42 crc kubenswrapper[4799]: I1010 18:04:42.025368 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="39ac58cf-e2da-416f-9456-2a9eaa8060a9" containerName="mariadb-account-create" Oct 10 18:04:42 crc kubenswrapper[4799]: E1010 18:04:42.025491 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a2435e3e-477f-40b8-820f-1d9e2c80db7e" containerName="mariadb-account-create" Oct 10 18:04:42 crc kubenswrapper[4799]: I1010 18:04:42.025581 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="a2435e3e-477f-40b8-820f-1d9e2c80db7e" containerName="mariadb-account-create" Oct 10 18:04:42 crc kubenswrapper[4799]: I1010 18:04:42.025974 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="7ff134f4-2451-4260-bd13-814729886f38" containerName="mariadb-account-create" Oct 10 18:04:42 crc kubenswrapper[4799]: I1010 18:04:42.026102 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="a2435e3e-477f-40b8-820f-1d9e2c80db7e" containerName="mariadb-account-create" Oct 10 18:04:42 crc kubenswrapper[4799]: I1010 18:04:42.026200 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="39ac58cf-e2da-416f-9456-2a9eaa8060a9" containerName="mariadb-account-create" Oct 10 18:04:42 crc kubenswrapper[4799]: I1010 18:04:42.027281 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-dcsnk" Oct 10 18:04:42 crc kubenswrapper[4799]: I1010 18:04:42.029942 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-nova-dockercfg-knm85" Oct 10 18:04:42 crc kubenswrapper[4799]: I1010 18:04:42.030184 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-scripts" Oct 10 18:04:42 crc kubenswrapper[4799]: I1010 18:04:42.030894 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Oct 10 18:04:42 crc kubenswrapper[4799]: I1010 18:04:42.045337 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-dcsnk"] Oct 10 18:04:42 crc kubenswrapper[4799]: I1010 18:04:42.142487 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/43af56e2-6fae-4aec-bf32-201d0d17faa7-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-dcsnk\" (UID: \"43af56e2-6fae-4aec-bf32-201d0d17faa7\") " pod="openstack/nova-cell0-conductor-db-sync-dcsnk" Oct 10 18:04:42 crc kubenswrapper[4799]: I1010 18:04:42.142538 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q7pwm\" (UniqueName: \"kubernetes.io/projected/43af56e2-6fae-4aec-bf32-201d0d17faa7-kube-api-access-q7pwm\") pod \"nova-cell0-conductor-db-sync-dcsnk\" (UID: \"43af56e2-6fae-4aec-bf32-201d0d17faa7\") " pod="openstack/nova-cell0-conductor-db-sync-dcsnk" Oct 10 18:04:42 crc kubenswrapper[4799]: I1010 18:04:42.142612 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/43af56e2-6fae-4aec-bf32-201d0d17faa7-scripts\") pod \"nova-cell0-conductor-db-sync-dcsnk\" (UID: \"43af56e2-6fae-4aec-bf32-201d0d17faa7\") " pod="openstack/nova-cell0-conductor-db-sync-dcsnk" Oct 10 18:04:42 crc kubenswrapper[4799]: I1010 18:04:42.142640 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/43af56e2-6fae-4aec-bf32-201d0d17faa7-config-data\") pod \"nova-cell0-conductor-db-sync-dcsnk\" (UID: \"43af56e2-6fae-4aec-bf32-201d0d17faa7\") " pod="openstack/nova-cell0-conductor-db-sync-dcsnk" Oct 10 18:04:42 crc kubenswrapper[4799]: I1010 18:04:42.245078 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/43af56e2-6fae-4aec-bf32-201d0d17faa7-config-data\") pod \"nova-cell0-conductor-db-sync-dcsnk\" (UID: \"43af56e2-6fae-4aec-bf32-201d0d17faa7\") " pod="openstack/nova-cell0-conductor-db-sync-dcsnk" Oct 10 18:04:42 crc kubenswrapper[4799]: I1010 18:04:42.245260 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/43af56e2-6fae-4aec-bf32-201d0d17faa7-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-dcsnk\" (UID: \"43af56e2-6fae-4aec-bf32-201d0d17faa7\") " pod="openstack/nova-cell0-conductor-db-sync-dcsnk" Oct 10 18:04:42 crc kubenswrapper[4799]: I1010 18:04:42.245357 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q7pwm\" (UniqueName: \"kubernetes.io/projected/43af56e2-6fae-4aec-bf32-201d0d17faa7-kube-api-access-q7pwm\") pod \"nova-cell0-conductor-db-sync-dcsnk\" (UID: \"43af56e2-6fae-4aec-bf32-201d0d17faa7\") " pod="openstack/nova-cell0-conductor-db-sync-dcsnk" Oct 10 18:04:42 crc kubenswrapper[4799]: I1010 18:04:42.245572 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/43af56e2-6fae-4aec-bf32-201d0d17faa7-scripts\") pod \"nova-cell0-conductor-db-sync-dcsnk\" (UID: \"43af56e2-6fae-4aec-bf32-201d0d17faa7\") " pod="openstack/nova-cell0-conductor-db-sync-dcsnk" Oct 10 18:04:42 crc kubenswrapper[4799]: I1010 18:04:42.251975 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/43af56e2-6fae-4aec-bf32-201d0d17faa7-scripts\") pod \"nova-cell0-conductor-db-sync-dcsnk\" (UID: \"43af56e2-6fae-4aec-bf32-201d0d17faa7\") " pod="openstack/nova-cell0-conductor-db-sync-dcsnk" Oct 10 18:04:42 crc kubenswrapper[4799]: I1010 18:04:42.252407 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/43af56e2-6fae-4aec-bf32-201d0d17faa7-config-data\") pod \"nova-cell0-conductor-db-sync-dcsnk\" (UID: \"43af56e2-6fae-4aec-bf32-201d0d17faa7\") " pod="openstack/nova-cell0-conductor-db-sync-dcsnk" Oct 10 18:04:42 crc kubenswrapper[4799]: I1010 18:04:42.254131 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/43af56e2-6fae-4aec-bf32-201d0d17faa7-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-dcsnk\" (UID: \"43af56e2-6fae-4aec-bf32-201d0d17faa7\") " pod="openstack/nova-cell0-conductor-db-sync-dcsnk" Oct 10 18:04:42 crc kubenswrapper[4799]: I1010 18:04:42.275508 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q7pwm\" (UniqueName: \"kubernetes.io/projected/43af56e2-6fae-4aec-bf32-201d0d17faa7-kube-api-access-q7pwm\") pod \"nova-cell0-conductor-db-sync-dcsnk\" (UID: \"43af56e2-6fae-4aec-bf32-201d0d17faa7\") " pod="openstack/nova-cell0-conductor-db-sync-dcsnk" Oct 10 18:04:42 crc kubenswrapper[4799]: I1010 18:04:42.350525 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-dcsnk" Oct 10 18:04:42 crc kubenswrapper[4799]: I1010 18:04:42.897788 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-dcsnk"] Oct 10 18:04:43 crc kubenswrapper[4799]: I1010 18:04:43.779610 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-dcsnk" event={"ID":"43af56e2-6fae-4aec-bf32-201d0d17faa7","Type":"ContainerStarted","Data":"27083c69e73d75ec1d54f851c219aa39b224642c1cf83a4961cfa4fad3007bc7"} Oct 10 18:04:43 crc kubenswrapper[4799]: I1010 18:04:43.780125 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-dcsnk" event={"ID":"43af56e2-6fae-4aec-bf32-201d0d17faa7","Type":"ContainerStarted","Data":"abe73a6ae811d7cb20255f5414dfd9b71c2bdd9eb6a57b48405c8b73d4f3ebdb"} Oct 10 18:04:43 crc kubenswrapper[4799]: I1010 18:04:43.803129 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-db-sync-dcsnk" podStartSLOduration=2.803101151 podStartE2EDuration="2.803101151s" podCreationTimestamp="2025-10-10 18:04:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-10 18:04:43.800692072 +0000 UTC m=+5577.309016207" watchObservedRunningTime="2025-10-10 18:04:43.803101151 +0000 UTC m=+5577.311425306" Oct 10 18:04:45 crc kubenswrapper[4799]: I1010 18:04:45.249203 4799 patch_prober.go:28] interesting pod/machine-config-daemon-rh8zc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 10 18:04:45 crc kubenswrapper[4799]: I1010 18:04:45.249626 4799 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 10 18:04:47 crc kubenswrapper[4799]: I1010 18:04:47.749240 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-xcm9v"] Oct 10 18:04:47 crc kubenswrapper[4799]: I1010 18:04:47.752343 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-xcm9v" Oct 10 18:04:47 crc kubenswrapper[4799]: I1010 18:04:47.761468 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-xcm9v"] Oct 10 18:04:47 crc kubenswrapper[4799]: I1010 18:04:47.858695 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/091203ec-3414-41e9-96cd-74f97abd25a7-utilities\") pod \"redhat-marketplace-xcm9v\" (UID: \"091203ec-3414-41e9-96cd-74f97abd25a7\") " pod="openshift-marketplace/redhat-marketplace-xcm9v" Oct 10 18:04:47 crc kubenswrapper[4799]: I1010 18:04:47.858883 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gsnfq\" (UniqueName: \"kubernetes.io/projected/091203ec-3414-41e9-96cd-74f97abd25a7-kube-api-access-gsnfq\") pod \"redhat-marketplace-xcm9v\" (UID: \"091203ec-3414-41e9-96cd-74f97abd25a7\") " pod="openshift-marketplace/redhat-marketplace-xcm9v" Oct 10 18:04:47 crc kubenswrapper[4799]: I1010 18:04:47.859152 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/091203ec-3414-41e9-96cd-74f97abd25a7-catalog-content\") pod \"redhat-marketplace-xcm9v\" (UID: \"091203ec-3414-41e9-96cd-74f97abd25a7\") " pod="openshift-marketplace/redhat-marketplace-xcm9v" Oct 10 18:04:47 crc kubenswrapper[4799]: I1010 18:04:47.960961 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gsnfq\" (UniqueName: \"kubernetes.io/projected/091203ec-3414-41e9-96cd-74f97abd25a7-kube-api-access-gsnfq\") pod \"redhat-marketplace-xcm9v\" (UID: \"091203ec-3414-41e9-96cd-74f97abd25a7\") " pod="openshift-marketplace/redhat-marketplace-xcm9v" Oct 10 18:04:47 crc kubenswrapper[4799]: I1010 18:04:47.961067 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/091203ec-3414-41e9-96cd-74f97abd25a7-catalog-content\") pod \"redhat-marketplace-xcm9v\" (UID: \"091203ec-3414-41e9-96cd-74f97abd25a7\") " pod="openshift-marketplace/redhat-marketplace-xcm9v" Oct 10 18:04:47 crc kubenswrapper[4799]: I1010 18:04:47.961149 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/091203ec-3414-41e9-96cd-74f97abd25a7-utilities\") pod \"redhat-marketplace-xcm9v\" (UID: \"091203ec-3414-41e9-96cd-74f97abd25a7\") " pod="openshift-marketplace/redhat-marketplace-xcm9v" Oct 10 18:04:47 crc kubenswrapper[4799]: I1010 18:04:47.961727 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/091203ec-3414-41e9-96cd-74f97abd25a7-utilities\") pod \"redhat-marketplace-xcm9v\" (UID: \"091203ec-3414-41e9-96cd-74f97abd25a7\") " pod="openshift-marketplace/redhat-marketplace-xcm9v" Oct 10 18:04:47 crc kubenswrapper[4799]: I1010 18:04:47.961800 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/091203ec-3414-41e9-96cd-74f97abd25a7-catalog-content\") pod \"redhat-marketplace-xcm9v\" (UID: \"091203ec-3414-41e9-96cd-74f97abd25a7\") " pod="openshift-marketplace/redhat-marketplace-xcm9v" Oct 10 18:04:47 crc kubenswrapper[4799]: I1010 18:04:47.983332 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gsnfq\" (UniqueName: \"kubernetes.io/projected/091203ec-3414-41e9-96cd-74f97abd25a7-kube-api-access-gsnfq\") pod \"redhat-marketplace-xcm9v\" (UID: \"091203ec-3414-41e9-96cd-74f97abd25a7\") " pod="openshift-marketplace/redhat-marketplace-xcm9v" Oct 10 18:04:48 crc kubenswrapper[4799]: I1010 18:04:48.090952 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-xcm9v" Oct 10 18:04:48 crc kubenswrapper[4799]: I1010 18:04:48.568426 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-xcm9v"] Oct 10 18:04:48 crc kubenswrapper[4799]: I1010 18:04:48.857243 4799 generic.go:334] "Generic (PLEG): container finished" podID="43af56e2-6fae-4aec-bf32-201d0d17faa7" containerID="27083c69e73d75ec1d54f851c219aa39b224642c1cf83a4961cfa4fad3007bc7" exitCode=0 Oct 10 18:04:48 crc kubenswrapper[4799]: I1010 18:04:48.857330 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-dcsnk" event={"ID":"43af56e2-6fae-4aec-bf32-201d0d17faa7","Type":"ContainerDied","Data":"27083c69e73d75ec1d54f851c219aa39b224642c1cf83a4961cfa4fad3007bc7"} Oct 10 18:04:48 crc kubenswrapper[4799]: I1010 18:04:48.861990 4799 generic.go:334] "Generic (PLEG): container finished" podID="091203ec-3414-41e9-96cd-74f97abd25a7" containerID="602ac74074eb20c80ed462e38fb316205f1ad5f4166cf5f8c0389476d6cffaaa" exitCode=0 Oct 10 18:04:48 crc kubenswrapper[4799]: I1010 18:04:48.862036 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-xcm9v" event={"ID":"091203ec-3414-41e9-96cd-74f97abd25a7","Type":"ContainerDied","Data":"602ac74074eb20c80ed462e38fb316205f1ad5f4166cf5f8c0389476d6cffaaa"} Oct 10 18:04:48 crc kubenswrapper[4799]: I1010 18:04:48.862095 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-xcm9v" event={"ID":"091203ec-3414-41e9-96cd-74f97abd25a7","Type":"ContainerStarted","Data":"80ff04b19338a76abc0604cd9b2f529297d24bd23f99e5135e0fef248da68bf0"} Oct 10 18:04:48 crc kubenswrapper[4799]: I1010 18:04:48.865005 4799 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 10 18:04:50 crc kubenswrapper[4799]: I1010 18:04:50.242017 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-dcsnk" Oct 10 18:04:50 crc kubenswrapper[4799]: I1010 18:04:50.307845 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/43af56e2-6fae-4aec-bf32-201d0d17faa7-combined-ca-bundle\") pod \"43af56e2-6fae-4aec-bf32-201d0d17faa7\" (UID: \"43af56e2-6fae-4aec-bf32-201d0d17faa7\") " Oct 10 18:04:50 crc kubenswrapper[4799]: I1010 18:04:50.308014 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/43af56e2-6fae-4aec-bf32-201d0d17faa7-config-data\") pod \"43af56e2-6fae-4aec-bf32-201d0d17faa7\" (UID: \"43af56e2-6fae-4aec-bf32-201d0d17faa7\") " Oct 10 18:04:50 crc kubenswrapper[4799]: I1010 18:04:50.308064 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/43af56e2-6fae-4aec-bf32-201d0d17faa7-scripts\") pod \"43af56e2-6fae-4aec-bf32-201d0d17faa7\" (UID: \"43af56e2-6fae-4aec-bf32-201d0d17faa7\") " Oct 10 18:04:50 crc kubenswrapper[4799]: I1010 18:04:50.308211 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-q7pwm\" (UniqueName: \"kubernetes.io/projected/43af56e2-6fae-4aec-bf32-201d0d17faa7-kube-api-access-q7pwm\") pod \"43af56e2-6fae-4aec-bf32-201d0d17faa7\" (UID: \"43af56e2-6fae-4aec-bf32-201d0d17faa7\") " Oct 10 18:04:50 crc kubenswrapper[4799]: I1010 18:04:50.317250 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/43af56e2-6fae-4aec-bf32-201d0d17faa7-kube-api-access-q7pwm" (OuterVolumeSpecName: "kube-api-access-q7pwm") pod "43af56e2-6fae-4aec-bf32-201d0d17faa7" (UID: "43af56e2-6fae-4aec-bf32-201d0d17faa7"). InnerVolumeSpecName "kube-api-access-q7pwm". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 18:04:50 crc kubenswrapper[4799]: I1010 18:04:50.319860 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43af56e2-6fae-4aec-bf32-201d0d17faa7-scripts" (OuterVolumeSpecName: "scripts") pod "43af56e2-6fae-4aec-bf32-201d0d17faa7" (UID: "43af56e2-6fae-4aec-bf32-201d0d17faa7"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 18:04:50 crc kubenswrapper[4799]: I1010 18:04:50.347448 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43af56e2-6fae-4aec-bf32-201d0d17faa7-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "43af56e2-6fae-4aec-bf32-201d0d17faa7" (UID: "43af56e2-6fae-4aec-bf32-201d0d17faa7"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 18:04:50 crc kubenswrapper[4799]: I1010 18:04:50.360081 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43af56e2-6fae-4aec-bf32-201d0d17faa7-config-data" (OuterVolumeSpecName: "config-data") pod "43af56e2-6fae-4aec-bf32-201d0d17faa7" (UID: "43af56e2-6fae-4aec-bf32-201d0d17faa7"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 18:04:50 crc kubenswrapper[4799]: I1010 18:04:50.410033 4799 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/43af56e2-6fae-4aec-bf32-201d0d17faa7-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 10 18:04:50 crc kubenswrapper[4799]: I1010 18:04:50.410072 4799 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/43af56e2-6fae-4aec-bf32-201d0d17faa7-config-data\") on node \"crc\" DevicePath \"\"" Oct 10 18:04:50 crc kubenswrapper[4799]: I1010 18:04:50.410081 4799 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/43af56e2-6fae-4aec-bf32-201d0d17faa7-scripts\") on node \"crc\" DevicePath \"\"" Oct 10 18:04:50 crc kubenswrapper[4799]: I1010 18:04:50.410092 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-q7pwm\" (UniqueName: \"kubernetes.io/projected/43af56e2-6fae-4aec-bf32-201d0d17faa7-kube-api-access-q7pwm\") on node \"crc\" DevicePath \"\"" Oct 10 18:04:50 crc kubenswrapper[4799]: I1010 18:04:50.916885 4799 generic.go:334] "Generic (PLEG): container finished" podID="091203ec-3414-41e9-96cd-74f97abd25a7" containerID="b2d51133e8e31ede9f5b6dc10ad8299c632457609294a0749b7af8df42eb7da9" exitCode=0 Oct 10 18:04:50 crc kubenswrapper[4799]: I1010 18:04:50.917048 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-xcm9v" event={"ID":"091203ec-3414-41e9-96cd-74f97abd25a7","Type":"ContainerDied","Data":"b2d51133e8e31ede9f5b6dc10ad8299c632457609294a0749b7af8df42eb7da9"} Oct 10 18:04:50 crc kubenswrapper[4799]: I1010 18:04:50.925429 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-dcsnk" event={"ID":"43af56e2-6fae-4aec-bf32-201d0d17faa7","Type":"ContainerDied","Data":"abe73a6ae811d7cb20255f5414dfd9b71c2bdd9eb6a57b48405c8b73d4f3ebdb"} Oct 10 18:04:50 crc kubenswrapper[4799]: I1010 18:04:50.925484 4799 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="abe73a6ae811d7cb20255f5414dfd9b71c2bdd9eb6a57b48405c8b73d4f3ebdb" Oct 10 18:04:50 crc kubenswrapper[4799]: I1010 18:04:50.925828 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-dcsnk" Oct 10 18:04:51 crc kubenswrapper[4799]: I1010 18:04:50.996240 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-0"] Oct 10 18:04:51 crc kubenswrapper[4799]: E1010 18:04:50.996722 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="43af56e2-6fae-4aec-bf32-201d0d17faa7" containerName="nova-cell0-conductor-db-sync" Oct 10 18:04:51 crc kubenswrapper[4799]: I1010 18:04:50.996738 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="43af56e2-6fae-4aec-bf32-201d0d17faa7" containerName="nova-cell0-conductor-db-sync" Oct 10 18:04:51 crc kubenswrapper[4799]: I1010 18:04:50.997000 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="43af56e2-6fae-4aec-bf32-201d0d17faa7" containerName="nova-cell0-conductor-db-sync" Oct 10 18:04:51 crc kubenswrapper[4799]: I1010 18:04:50.997719 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Oct 10 18:04:51 crc kubenswrapper[4799]: I1010 18:04:51.002229 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Oct 10 18:04:51 crc kubenswrapper[4799]: I1010 18:04:51.002244 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-nova-dockercfg-knm85" Oct 10 18:04:51 crc kubenswrapper[4799]: I1010 18:04:51.016887 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Oct 10 18:04:51 crc kubenswrapper[4799]: I1010 18:04:51.023953 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/84cc2351-4774-443b-878a-91aedb81db3e-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"84cc2351-4774-443b-878a-91aedb81db3e\") " pod="openstack/nova-cell0-conductor-0" Oct 10 18:04:51 crc kubenswrapper[4799]: I1010 18:04:51.024020 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/84cc2351-4774-443b-878a-91aedb81db3e-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"84cc2351-4774-443b-878a-91aedb81db3e\") " pod="openstack/nova-cell0-conductor-0" Oct 10 18:04:51 crc kubenswrapper[4799]: I1010 18:04:51.024116 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bx9fv\" (UniqueName: \"kubernetes.io/projected/84cc2351-4774-443b-878a-91aedb81db3e-kube-api-access-bx9fv\") pod \"nova-cell0-conductor-0\" (UID: \"84cc2351-4774-443b-878a-91aedb81db3e\") " pod="openstack/nova-cell0-conductor-0" Oct 10 18:04:51 crc kubenswrapper[4799]: I1010 18:04:51.127458 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/84cc2351-4774-443b-878a-91aedb81db3e-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"84cc2351-4774-443b-878a-91aedb81db3e\") " pod="openstack/nova-cell0-conductor-0" Oct 10 18:04:51 crc kubenswrapper[4799]: I1010 18:04:51.127595 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/84cc2351-4774-443b-878a-91aedb81db3e-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"84cc2351-4774-443b-878a-91aedb81db3e\") " pod="openstack/nova-cell0-conductor-0" Oct 10 18:04:51 crc kubenswrapper[4799]: I1010 18:04:51.127706 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bx9fv\" (UniqueName: \"kubernetes.io/projected/84cc2351-4774-443b-878a-91aedb81db3e-kube-api-access-bx9fv\") pod \"nova-cell0-conductor-0\" (UID: \"84cc2351-4774-443b-878a-91aedb81db3e\") " pod="openstack/nova-cell0-conductor-0" Oct 10 18:04:51 crc kubenswrapper[4799]: I1010 18:04:51.132554 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/84cc2351-4774-443b-878a-91aedb81db3e-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"84cc2351-4774-443b-878a-91aedb81db3e\") " pod="openstack/nova-cell0-conductor-0" Oct 10 18:04:51 crc kubenswrapper[4799]: I1010 18:04:51.133366 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/84cc2351-4774-443b-878a-91aedb81db3e-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"84cc2351-4774-443b-878a-91aedb81db3e\") " pod="openstack/nova-cell0-conductor-0" Oct 10 18:04:51 crc kubenswrapper[4799]: I1010 18:04:51.150062 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bx9fv\" (UniqueName: \"kubernetes.io/projected/84cc2351-4774-443b-878a-91aedb81db3e-kube-api-access-bx9fv\") pod \"nova-cell0-conductor-0\" (UID: \"84cc2351-4774-443b-878a-91aedb81db3e\") " pod="openstack/nova-cell0-conductor-0" Oct 10 18:04:51 crc kubenswrapper[4799]: I1010 18:04:51.330848 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Oct 10 18:04:51 crc kubenswrapper[4799]: I1010 18:04:51.889570 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Oct 10 18:04:51 crc kubenswrapper[4799]: W1010 18:04:51.915643 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod84cc2351_4774_443b_878a_91aedb81db3e.slice/crio-4bb9b386c88b7d90198659b64f1b91fae1857145b0d1b459c77ab2320b788c3f WatchSource:0}: Error finding container 4bb9b386c88b7d90198659b64f1b91fae1857145b0d1b459c77ab2320b788c3f: Status 404 returned error can't find the container with id 4bb9b386c88b7d90198659b64f1b91fae1857145b0d1b459c77ab2320b788c3f Oct 10 18:04:51 crc kubenswrapper[4799]: I1010 18:04:51.940596 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"84cc2351-4774-443b-878a-91aedb81db3e","Type":"ContainerStarted","Data":"4bb9b386c88b7d90198659b64f1b91fae1857145b0d1b459c77ab2320b788c3f"} Oct 10 18:04:51 crc kubenswrapper[4799]: I1010 18:04:51.943722 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-xcm9v" event={"ID":"091203ec-3414-41e9-96cd-74f97abd25a7","Type":"ContainerStarted","Data":"1b4f5675fa62b8c7b33fab2438a4e1ad9af4c739eaaae8541745eb7afb88862f"} Oct 10 18:04:51 crc kubenswrapper[4799]: I1010 18:04:51.982324 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-xcm9v" podStartSLOduration=2.459050005 podStartE2EDuration="4.982299887s" podCreationTimestamp="2025-10-10 18:04:47 +0000 UTC" firstStartedPulling="2025-10-10 18:04:48.86470291 +0000 UTC m=+5582.373027035" lastFinishedPulling="2025-10-10 18:04:51.387952762 +0000 UTC m=+5584.896276917" observedRunningTime="2025-10-10 18:04:51.975860981 +0000 UTC m=+5585.484185156" watchObservedRunningTime="2025-10-10 18:04:51.982299887 +0000 UTC m=+5585.490624012" Oct 10 18:04:52 crc kubenswrapper[4799]: I1010 18:04:52.957188 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"84cc2351-4774-443b-878a-91aedb81db3e","Type":"ContainerStarted","Data":"0248e07d40807e658db7c7b6a5d475ff49dbe6d01ce9c4367c6ecaaf20143521"} Oct 10 18:04:52 crc kubenswrapper[4799]: I1010 18:04:52.958050 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell0-conductor-0" Oct 10 18:04:52 crc kubenswrapper[4799]: I1010 18:04:52.993124 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-0" podStartSLOduration=2.993099285 podStartE2EDuration="2.993099285s" podCreationTimestamp="2025-10-10 18:04:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-10 18:04:52.980164929 +0000 UTC m=+5586.488489074" watchObservedRunningTime="2025-10-10 18:04:52.993099285 +0000 UTC m=+5586.501423440" Oct 10 18:04:58 crc kubenswrapper[4799]: I1010 18:04:58.091187 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-xcm9v" Oct 10 18:04:58 crc kubenswrapper[4799]: I1010 18:04:58.091963 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-xcm9v" Oct 10 18:04:58 crc kubenswrapper[4799]: I1010 18:04:58.168323 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-xcm9v" Oct 10 18:04:59 crc kubenswrapper[4799]: I1010 18:04:59.119601 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-xcm9v" Oct 10 18:04:59 crc kubenswrapper[4799]: I1010 18:04:59.209020 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-xcm9v"] Oct 10 18:05:01 crc kubenswrapper[4799]: I1010 18:05:01.071080 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-xcm9v" podUID="091203ec-3414-41e9-96cd-74f97abd25a7" containerName="registry-server" containerID="cri-o://1b4f5675fa62b8c7b33fab2438a4e1ad9af4c739eaaae8541745eb7afb88862f" gracePeriod=2 Oct 10 18:05:01 crc kubenswrapper[4799]: I1010 18:05:01.382279 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell0-conductor-0" Oct 10 18:05:01 crc kubenswrapper[4799]: I1010 18:05:01.578219 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-xcm9v" Oct 10 18:05:01 crc kubenswrapper[4799]: I1010 18:05:01.712825 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/091203ec-3414-41e9-96cd-74f97abd25a7-utilities\") pod \"091203ec-3414-41e9-96cd-74f97abd25a7\" (UID: \"091203ec-3414-41e9-96cd-74f97abd25a7\") " Oct 10 18:05:01 crc kubenswrapper[4799]: I1010 18:05:01.712929 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/091203ec-3414-41e9-96cd-74f97abd25a7-catalog-content\") pod \"091203ec-3414-41e9-96cd-74f97abd25a7\" (UID: \"091203ec-3414-41e9-96cd-74f97abd25a7\") " Oct 10 18:05:01 crc kubenswrapper[4799]: I1010 18:05:01.712982 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gsnfq\" (UniqueName: \"kubernetes.io/projected/091203ec-3414-41e9-96cd-74f97abd25a7-kube-api-access-gsnfq\") pod \"091203ec-3414-41e9-96cd-74f97abd25a7\" (UID: \"091203ec-3414-41e9-96cd-74f97abd25a7\") " Oct 10 18:05:01 crc kubenswrapper[4799]: I1010 18:05:01.713631 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/091203ec-3414-41e9-96cd-74f97abd25a7-utilities" (OuterVolumeSpecName: "utilities") pod "091203ec-3414-41e9-96cd-74f97abd25a7" (UID: "091203ec-3414-41e9-96cd-74f97abd25a7"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 10 18:05:01 crc kubenswrapper[4799]: I1010 18:05:01.721085 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/091203ec-3414-41e9-96cd-74f97abd25a7-kube-api-access-gsnfq" (OuterVolumeSpecName: "kube-api-access-gsnfq") pod "091203ec-3414-41e9-96cd-74f97abd25a7" (UID: "091203ec-3414-41e9-96cd-74f97abd25a7"). InnerVolumeSpecName "kube-api-access-gsnfq". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 18:05:01 crc kubenswrapper[4799]: I1010 18:05:01.724061 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/091203ec-3414-41e9-96cd-74f97abd25a7-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "091203ec-3414-41e9-96cd-74f97abd25a7" (UID: "091203ec-3414-41e9-96cd-74f97abd25a7"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 10 18:05:01 crc kubenswrapper[4799]: I1010 18:05:01.815160 4799 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/091203ec-3414-41e9-96cd-74f97abd25a7-utilities\") on node \"crc\" DevicePath \"\"" Oct 10 18:05:01 crc kubenswrapper[4799]: I1010 18:05:01.815224 4799 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/091203ec-3414-41e9-96cd-74f97abd25a7-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 10 18:05:01 crc kubenswrapper[4799]: I1010 18:05:01.815245 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gsnfq\" (UniqueName: \"kubernetes.io/projected/091203ec-3414-41e9-96cd-74f97abd25a7-kube-api-access-gsnfq\") on node \"crc\" DevicePath \"\"" Oct 10 18:05:01 crc kubenswrapper[4799]: I1010 18:05:01.952483 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-cell-mapping-qbxgb"] Oct 10 18:05:01 crc kubenswrapper[4799]: E1010 18:05:01.952930 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="091203ec-3414-41e9-96cd-74f97abd25a7" containerName="extract-content" Oct 10 18:05:01 crc kubenswrapper[4799]: I1010 18:05:01.952953 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="091203ec-3414-41e9-96cd-74f97abd25a7" containerName="extract-content" Oct 10 18:05:01 crc kubenswrapper[4799]: E1010 18:05:01.952972 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="091203ec-3414-41e9-96cd-74f97abd25a7" containerName="registry-server" Oct 10 18:05:01 crc kubenswrapper[4799]: I1010 18:05:01.952981 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="091203ec-3414-41e9-96cd-74f97abd25a7" containerName="registry-server" Oct 10 18:05:01 crc kubenswrapper[4799]: E1010 18:05:01.952992 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="091203ec-3414-41e9-96cd-74f97abd25a7" containerName="extract-utilities" Oct 10 18:05:01 crc kubenswrapper[4799]: I1010 18:05:01.952999 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="091203ec-3414-41e9-96cd-74f97abd25a7" containerName="extract-utilities" Oct 10 18:05:01 crc kubenswrapper[4799]: I1010 18:05:01.953229 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="091203ec-3414-41e9-96cd-74f97abd25a7" containerName="registry-server" Oct 10 18:05:01 crc kubenswrapper[4799]: I1010 18:05:01.953910 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-qbxgb" Oct 10 18:05:01 crc kubenswrapper[4799]: I1010 18:05:01.956846 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-manage-config-data" Oct 10 18:05:01 crc kubenswrapper[4799]: I1010 18:05:01.957838 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-manage-scripts" Oct 10 18:05:01 crc kubenswrapper[4799]: I1010 18:05:01.975816 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-cell-mapping-qbxgb"] Oct 10 18:05:02 crc kubenswrapper[4799]: I1010 18:05:02.088714 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Oct 10 18:05:02 crc kubenswrapper[4799]: I1010 18:05:02.088948 4799 generic.go:334] "Generic (PLEG): container finished" podID="091203ec-3414-41e9-96cd-74f97abd25a7" containerID="1b4f5675fa62b8c7b33fab2438a4e1ad9af4c739eaaae8541745eb7afb88862f" exitCode=0 Oct 10 18:05:02 crc kubenswrapper[4799]: I1010 18:05:02.089053 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-xcm9v" Oct 10 18:05:02 crc kubenswrapper[4799]: I1010 18:05:02.092382 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-xcm9v" event={"ID":"091203ec-3414-41e9-96cd-74f97abd25a7","Type":"ContainerDied","Data":"1b4f5675fa62b8c7b33fab2438a4e1ad9af4c739eaaae8541745eb7afb88862f"} Oct 10 18:05:02 crc kubenswrapper[4799]: I1010 18:05:02.092429 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-xcm9v" event={"ID":"091203ec-3414-41e9-96cd-74f97abd25a7","Type":"ContainerDied","Data":"80ff04b19338a76abc0604cd9b2f529297d24bd23f99e5135e0fef248da68bf0"} Oct 10 18:05:02 crc kubenswrapper[4799]: I1010 18:05:02.092451 4799 scope.go:117] "RemoveContainer" containerID="1b4f5675fa62b8c7b33fab2438a4e1ad9af4c739eaaae8541745eb7afb88862f" Oct 10 18:05:02 crc kubenswrapper[4799]: I1010 18:05:02.092642 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 10 18:05:02 crc kubenswrapper[4799]: I1010 18:05:02.100534 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Oct 10 18:05:02 crc kubenswrapper[4799]: I1010 18:05:02.111480 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Oct 10 18:05:02 crc kubenswrapper[4799]: I1010 18:05:02.121022 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8d1fbe12-3328-4be8-a6bf-8a89c61bea63-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-qbxgb\" (UID: \"8d1fbe12-3328-4be8-a6bf-8a89c61bea63\") " pod="openstack/nova-cell0-cell-mapping-qbxgb" Oct 10 18:05:02 crc kubenswrapper[4799]: I1010 18:05:02.121095 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8d1fbe12-3328-4be8-a6bf-8a89c61bea63-config-data\") pod \"nova-cell0-cell-mapping-qbxgb\" (UID: \"8d1fbe12-3328-4be8-a6bf-8a89c61bea63\") " pod="openstack/nova-cell0-cell-mapping-qbxgb" Oct 10 18:05:02 crc kubenswrapper[4799]: I1010 18:05:02.121137 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8d1fbe12-3328-4be8-a6bf-8a89c61bea63-scripts\") pod \"nova-cell0-cell-mapping-qbxgb\" (UID: \"8d1fbe12-3328-4be8-a6bf-8a89c61bea63\") " pod="openstack/nova-cell0-cell-mapping-qbxgb" Oct 10 18:05:02 crc kubenswrapper[4799]: I1010 18:05:02.121173 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kjpbf\" (UniqueName: \"kubernetes.io/projected/8d1fbe12-3328-4be8-a6bf-8a89c61bea63-kube-api-access-kjpbf\") pod \"nova-cell0-cell-mapping-qbxgb\" (UID: \"8d1fbe12-3328-4be8-a6bf-8a89c61bea63\") " pod="openstack/nova-cell0-cell-mapping-qbxgb" Oct 10 18:05:02 crc kubenswrapper[4799]: I1010 18:05:02.137958 4799 scope.go:117] "RemoveContainer" containerID="b2d51133e8e31ede9f5b6dc10ad8299c632457609294a0749b7af8df42eb7da9" Oct 10 18:05:02 crc kubenswrapper[4799]: I1010 18:05:02.167207 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-xcm9v"] Oct 10 18:05:02 crc kubenswrapper[4799]: I1010 18:05:02.174018 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-xcm9v"] Oct 10 18:05:02 crc kubenswrapper[4799]: I1010 18:05:02.204034 4799 scope.go:117] "RemoveContainer" containerID="602ac74074eb20c80ed462e38fb316205f1ad5f4166cf5f8c0389476d6cffaaa" Oct 10 18:05:02 crc kubenswrapper[4799]: I1010 18:05:02.204364 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Oct 10 18:05:02 crc kubenswrapper[4799]: I1010 18:05:02.206804 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 10 18:05:02 crc kubenswrapper[4799]: I1010 18:05:02.218258 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Oct 10 18:05:02 crc kubenswrapper[4799]: I1010 18:05:02.222903 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8d1fbe12-3328-4be8-a6bf-8a89c61bea63-scripts\") pod \"nova-cell0-cell-mapping-qbxgb\" (UID: \"8d1fbe12-3328-4be8-a6bf-8a89c61bea63\") " pod="openstack/nova-cell0-cell-mapping-qbxgb" Oct 10 18:05:02 crc kubenswrapper[4799]: I1010 18:05:02.222955 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kjpbf\" (UniqueName: \"kubernetes.io/projected/8d1fbe12-3328-4be8-a6bf-8a89c61bea63-kube-api-access-kjpbf\") pod \"nova-cell0-cell-mapping-qbxgb\" (UID: \"8d1fbe12-3328-4be8-a6bf-8a89c61bea63\") " pod="openstack/nova-cell0-cell-mapping-qbxgb" Oct 10 18:05:02 crc kubenswrapper[4799]: I1010 18:05:02.222988 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/53f2c30d-6bb2-4de5-90d1-febbb40322b2-config-data\") pod \"nova-metadata-0\" (UID: \"53f2c30d-6bb2-4de5-90d1-febbb40322b2\") " pod="openstack/nova-metadata-0" Oct 10 18:05:02 crc kubenswrapper[4799]: I1010 18:05:02.223019 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/53f2c30d-6bb2-4de5-90d1-febbb40322b2-logs\") pod \"nova-metadata-0\" (UID: \"53f2c30d-6bb2-4de5-90d1-febbb40322b2\") " pod="openstack/nova-metadata-0" Oct 10 18:05:02 crc kubenswrapper[4799]: I1010 18:05:02.223045 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/53f2c30d-6bb2-4de5-90d1-febbb40322b2-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"53f2c30d-6bb2-4de5-90d1-febbb40322b2\") " pod="openstack/nova-metadata-0" Oct 10 18:05:02 crc kubenswrapper[4799]: I1010 18:05:02.223078 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8d1fbe12-3328-4be8-a6bf-8a89c61bea63-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-qbxgb\" (UID: \"8d1fbe12-3328-4be8-a6bf-8a89c61bea63\") " pod="openstack/nova-cell0-cell-mapping-qbxgb" Oct 10 18:05:02 crc kubenswrapper[4799]: I1010 18:05:02.223098 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kq7b2\" (UniqueName: \"kubernetes.io/projected/53f2c30d-6bb2-4de5-90d1-febbb40322b2-kube-api-access-kq7b2\") pod \"nova-metadata-0\" (UID: \"53f2c30d-6bb2-4de5-90d1-febbb40322b2\") " pod="openstack/nova-metadata-0" Oct 10 18:05:02 crc kubenswrapper[4799]: I1010 18:05:02.223135 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8d1fbe12-3328-4be8-a6bf-8a89c61bea63-config-data\") pod \"nova-cell0-cell-mapping-qbxgb\" (UID: \"8d1fbe12-3328-4be8-a6bf-8a89c61bea63\") " pod="openstack/nova-cell0-cell-mapping-qbxgb" Oct 10 18:05:02 crc kubenswrapper[4799]: I1010 18:05:02.228378 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Oct 10 18:05:02 crc kubenswrapper[4799]: I1010 18:05:02.261219 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8d1fbe12-3328-4be8-a6bf-8a89c61bea63-scripts\") pod \"nova-cell0-cell-mapping-qbxgb\" (UID: \"8d1fbe12-3328-4be8-a6bf-8a89c61bea63\") " pod="openstack/nova-cell0-cell-mapping-qbxgb" Oct 10 18:05:02 crc kubenswrapper[4799]: I1010 18:05:02.270384 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8d1fbe12-3328-4be8-a6bf-8a89c61bea63-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-qbxgb\" (UID: \"8d1fbe12-3328-4be8-a6bf-8a89c61bea63\") " pod="openstack/nova-cell0-cell-mapping-qbxgb" Oct 10 18:05:02 crc kubenswrapper[4799]: I1010 18:05:02.271221 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8d1fbe12-3328-4be8-a6bf-8a89c61bea63-config-data\") pod \"nova-cell0-cell-mapping-qbxgb\" (UID: \"8d1fbe12-3328-4be8-a6bf-8a89c61bea63\") " pod="openstack/nova-cell0-cell-mapping-qbxgb" Oct 10 18:05:02 crc kubenswrapper[4799]: I1010 18:05:02.273922 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kjpbf\" (UniqueName: \"kubernetes.io/projected/8d1fbe12-3328-4be8-a6bf-8a89c61bea63-kube-api-access-kjpbf\") pod \"nova-cell0-cell-mapping-qbxgb\" (UID: \"8d1fbe12-3328-4be8-a6bf-8a89c61bea63\") " pod="openstack/nova-cell0-cell-mapping-qbxgb" Oct 10 18:05:02 crc kubenswrapper[4799]: I1010 18:05:02.285737 4799 scope.go:117] "RemoveContainer" containerID="1b4f5675fa62b8c7b33fab2438a4e1ad9af4c739eaaae8541745eb7afb88862f" Oct 10 18:05:02 crc kubenswrapper[4799]: E1010 18:05:02.292368 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1b4f5675fa62b8c7b33fab2438a4e1ad9af4c739eaaae8541745eb7afb88862f\": container with ID starting with 1b4f5675fa62b8c7b33fab2438a4e1ad9af4c739eaaae8541745eb7afb88862f not found: ID does not exist" containerID="1b4f5675fa62b8c7b33fab2438a4e1ad9af4c739eaaae8541745eb7afb88862f" Oct 10 18:05:02 crc kubenswrapper[4799]: I1010 18:05:02.302387 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1b4f5675fa62b8c7b33fab2438a4e1ad9af4c739eaaae8541745eb7afb88862f"} err="failed to get container status \"1b4f5675fa62b8c7b33fab2438a4e1ad9af4c739eaaae8541745eb7afb88862f\": rpc error: code = NotFound desc = could not find container \"1b4f5675fa62b8c7b33fab2438a4e1ad9af4c739eaaae8541745eb7afb88862f\": container with ID starting with 1b4f5675fa62b8c7b33fab2438a4e1ad9af4c739eaaae8541745eb7afb88862f not found: ID does not exist" Oct 10 18:05:02 crc kubenswrapper[4799]: I1010 18:05:02.302464 4799 scope.go:117] "RemoveContainer" containerID="b2d51133e8e31ede9f5b6dc10ad8299c632457609294a0749b7af8df42eb7da9" Oct 10 18:05:02 crc kubenswrapper[4799]: I1010 18:05:02.298396 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-6f5fcdf7c9-pnv52"] Oct 10 18:05:02 crc kubenswrapper[4799]: E1010 18:05:02.305074 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b2d51133e8e31ede9f5b6dc10ad8299c632457609294a0749b7af8df42eb7da9\": container with ID starting with b2d51133e8e31ede9f5b6dc10ad8299c632457609294a0749b7af8df42eb7da9 not found: ID does not exist" containerID="b2d51133e8e31ede9f5b6dc10ad8299c632457609294a0749b7af8df42eb7da9" Oct 10 18:05:02 crc kubenswrapper[4799]: I1010 18:05:02.305125 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b2d51133e8e31ede9f5b6dc10ad8299c632457609294a0749b7af8df42eb7da9"} err="failed to get container status \"b2d51133e8e31ede9f5b6dc10ad8299c632457609294a0749b7af8df42eb7da9\": rpc error: code = NotFound desc = could not find container \"b2d51133e8e31ede9f5b6dc10ad8299c632457609294a0749b7af8df42eb7da9\": container with ID starting with b2d51133e8e31ede9f5b6dc10ad8299c632457609294a0749b7af8df42eb7da9 not found: ID does not exist" Oct 10 18:05:02 crc kubenswrapper[4799]: I1010 18:05:02.305156 4799 scope.go:117] "RemoveContainer" containerID="602ac74074eb20c80ed462e38fb316205f1ad5f4166cf5f8c0389476d6cffaaa" Oct 10 18:05:02 crc kubenswrapper[4799]: E1010 18:05:02.305473 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"602ac74074eb20c80ed462e38fb316205f1ad5f4166cf5f8c0389476d6cffaaa\": container with ID starting with 602ac74074eb20c80ed462e38fb316205f1ad5f4166cf5f8c0389476d6cffaaa not found: ID does not exist" containerID="602ac74074eb20c80ed462e38fb316205f1ad5f4166cf5f8c0389476d6cffaaa" Oct 10 18:05:02 crc kubenswrapper[4799]: I1010 18:05:02.305521 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"602ac74074eb20c80ed462e38fb316205f1ad5f4166cf5f8c0389476d6cffaaa"} err="failed to get container status \"602ac74074eb20c80ed462e38fb316205f1ad5f4166cf5f8c0389476d6cffaaa\": rpc error: code = NotFound desc = could not find container \"602ac74074eb20c80ed462e38fb316205f1ad5f4166cf5f8c0389476d6cffaaa\": container with ID starting with 602ac74074eb20c80ed462e38fb316205f1ad5f4166cf5f8c0389476d6cffaaa not found: ID does not exist" Oct 10 18:05:02 crc kubenswrapper[4799]: I1010 18:05:02.327016 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6f5fcdf7c9-pnv52" Oct 10 18:05:02 crc kubenswrapper[4799]: I1010 18:05:02.333828 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/53f2c30d-6bb2-4de5-90d1-febbb40322b2-config-data\") pod \"nova-metadata-0\" (UID: \"53f2c30d-6bb2-4de5-90d1-febbb40322b2\") " pod="openstack/nova-metadata-0" Oct 10 18:05:02 crc kubenswrapper[4799]: I1010 18:05:02.335077 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fg956\" (UniqueName: \"kubernetes.io/projected/6c155ce1-6bbc-48fd-bfad-6b19f5912e4a-kube-api-access-fg956\") pod \"nova-api-0\" (UID: \"6c155ce1-6bbc-48fd-bfad-6b19f5912e4a\") " pod="openstack/nova-api-0" Oct 10 18:05:02 crc kubenswrapper[4799]: I1010 18:05:02.335198 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/53f2c30d-6bb2-4de5-90d1-febbb40322b2-logs\") pod \"nova-metadata-0\" (UID: \"53f2c30d-6bb2-4de5-90d1-febbb40322b2\") " pod="openstack/nova-metadata-0" Oct 10 18:05:02 crc kubenswrapper[4799]: I1010 18:05:02.335301 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/53f2c30d-6bb2-4de5-90d1-febbb40322b2-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"53f2c30d-6bb2-4de5-90d1-febbb40322b2\") " pod="openstack/nova-metadata-0" Oct 10 18:05:02 crc kubenswrapper[4799]: I1010 18:05:02.335425 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6c155ce1-6bbc-48fd-bfad-6b19f5912e4a-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"6c155ce1-6bbc-48fd-bfad-6b19f5912e4a\") " pod="openstack/nova-api-0" Oct 10 18:05:02 crc kubenswrapper[4799]: I1010 18:05:02.336819 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kq7b2\" (UniqueName: \"kubernetes.io/projected/53f2c30d-6bb2-4de5-90d1-febbb40322b2-kube-api-access-kq7b2\") pod \"nova-metadata-0\" (UID: \"53f2c30d-6bb2-4de5-90d1-febbb40322b2\") " pod="openstack/nova-metadata-0" Oct 10 18:05:02 crc kubenswrapper[4799]: I1010 18:05:02.336954 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6c155ce1-6bbc-48fd-bfad-6b19f5912e4a-logs\") pod \"nova-api-0\" (UID: \"6c155ce1-6bbc-48fd-bfad-6b19f5912e4a\") " pod="openstack/nova-api-0" Oct 10 18:05:02 crc kubenswrapper[4799]: I1010 18:05:02.337179 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6c155ce1-6bbc-48fd-bfad-6b19f5912e4a-config-data\") pod \"nova-api-0\" (UID: \"6c155ce1-6bbc-48fd-bfad-6b19f5912e4a\") " pod="openstack/nova-api-0" Oct 10 18:05:02 crc kubenswrapper[4799]: I1010 18:05:02.337639 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/53f2c30d-6bb2-4de5-90d1-febbb40322b2-logs\") pod \"nova-metadata-0\" (UID: \"53f2c30d-6bb2-4de5-90d1-febbb40322b2\") " pod="openstack/nova-metadata-0" Oct 10 18:05:02 crc kubenswrapper[4799]: I1010 18:05:02.346999 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/53f2c30d-6bb2-4de5-90d1-febbb40322b2-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"53f2c30d-6bb2-4de5-90d1-febbb40322b2\") " pod="openstack/nova-metadata-0" Oct 10 18:05:02 crc kubenswrapper[4799]: I1010 18:05:02.356562 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/53f2c30d-6bb2-4de5-90d1-febbb40322b2-config-data\") pod \"nova-metadata-0\" (UID: \"53f2c30d-6bb2-4de5-90d1-febbb40322b2\") " pod="openstack/nova-metadata-0" Oct 10 18:05:02 crc kubenswrapper[4799]: I1010 18:05:02.365910 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6f5fcdf7c9-pnv52"] Oct 10 18:05:02 crc kubenswrapper[4799]: I1010 18:05:02.369656 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kq7b2\" (UniqueName: \"kubernetes.io/projected/53f2c30d-6bb2-4de5-90d1-febbb40322b2-kube-api-access-kq7b2\") pod \"nova-metadata-0\" (UID: \"53f2c30d-6bb2-4de5-90d1-febbb40322b2\") " pod="openstack/nova-metadata-0" Oct 10 18:05:02 crc kubenswrapper[4799]: I1010 18:05:02.394227 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Oct 10 18:05:02 crc kubenswrapper[4799]: I1010 18:05:02.395590 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Oct 10 18:05:02 crc kubenswrapper[4799]: I1010 18:05:02.398823 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-novncproxy-config-data" Oct 10 18:05:02 crc kubenswrapper[4799]: I1010 18:05:02.402041 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Oct 10 18:05:02 crc kubenswrapper[4799]: I1010 18:05:02.414276 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Oct 10 18:05:02 crc kubenswrapper[4799]: I1010 18:05:02.415529 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 10 18:05:02 crc kubenswrapper[4799]: I1010 18:05:02.418719 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Oct 10 18:05:02 crc kubenswrapper[4799]: I1010 18:05:02.422109 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 10 18:05:02 crc kubenswrapper[4799]: I1010 18:05:02.440969 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Oct 10 18:05:02 crc kubenswrapper[4799]: I1010 18:05:02.445649 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6c155ce1-6bbc-48fd-bfad-6b19f5912e4a-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"6c155ce1-6bbc-48fd-bfad-6b19f5912e4a\") " pod="openstack/nova-api-0" Oct 10 18:05:02 crc kubenswrapper[4799]: I1010 18:05:02.445789 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e28f9857-736d-4a60-833f-91faa9eaef2c-dns-svc\") pod \"dnsmasq-dns-6f5fcdf7c9-pnv52\" (UID: \"e28f9857-736d-4a60-833f-91faa9eaef2c\") " pod="openstack/dnsmasq-dns-6f5fcdf7c9-pnv52" Oct 10 18:05:02 crc kubenswrapper[4799]: I1010 18:05:02.445886 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6c155ce1-6bbc-48fd-bfad-6b19f5912e4a-logs\") pod \"nova-api-0\" (UID: \"6c155ce1-6bbc-48fd-bfad-6b19f5912e4a\") " pod="openstack/nova-api-0" Oct 10 18:05:02 crc kubenswrapper[4799]: I1010 18:05:02.445961 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e28f9857-736d-4a60-833f-91faa9eaef2c-ovsdbserver-sb\") pod \"dnsmasq-dns-6f5fcdf7c9-pnv52\" (UID: \"e28f9857-736d-4a60-833f-91faa9eaef2c\") " pod="openstack/dnsmasq-dns-6f5fcdf7c9-pnv52" Oct 10 18:05:02 crc kubenswrapper[4799]: I1010 18:05:02.446039 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e28f9857-736d-4a60-833f-91faa9eaef2c-ovsdbserver-nb\") pod \"dnsmasq-dns-6f5fcdf7c9-pnv52\" (UID: \"e28f9857-736d-4a60-833f-91faa9eaef2c\") " pod="openstack/dnsmasq-dns-6f5fcdf7c9-pnv52" Oct 10 18:05:02 crc kubenswrapper[4799]: I1010 18:05:02.446121 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hfz8g\" (UniqueName: \"kubernetes.io/projected/e28f9857-736d-4a60-833f-91faa9eaef2c-kube-api-access-hfz8g\") pod \"dnsmasq-dns-6f5fcdf7c9-pnv52\" (UID: \"e28f9857-736d-4a60-833f-91faa9eaef2c\") " pod="openstack/dnsmasq-dns-6f5fcdf7c9-pnv52" Oct 10 18:05:02 crc kubenswrapper[4799]: I1010 18:05:02.446189 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e28f9857-736d-4a60-833f-91faa9eaef2c-config\") pod \"dnsmasq-dns-6f5fcdf7c9-pnv52\" (UID: \"e28f9857-736d-4a60-833f-91faa9eaef2c\") " pod="openstack/dnsmasq-dns-6f5fcdf7c9-pnv52" Oct 10 18:05:02 crc kubenswrapper[4799]: I1010 18:05:02.446271 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6c155ce1-6bbc-48fd-bfad-6b19f5912e4a-config-data\") pod \"nova-api-0\" (UID: \"6c155ce1-6bbc-48fd-bfad-6b19f5912e4a\") " pod="openstack/nova-api-0" Oct 10 18:05:02 crc kubenswrapper[4799]: I1010 18:05:02.446363 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fg956\" (UniqueName: \"kubernetes.io/projected/6c155ce1-6bbc-48fd-bfad-6b19f5912e4a-kube-api-access-fg956\") pod \"nova-api-0\" (UID: \"6c155ce1-6bbc-48fd-bfad-6b19f5912e4a\") " pod="openstack/nova-api-0" Oct 10 18:05:02 crc kubenswrapper[4799]: I1010 18:05:02.447182 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6c155ce1-6bbc-48fd-bfad-6b19f5912e4a-logs\") pod \"nova-api-0\" (UID: \"6c155ce1-6bbc-48fd-bfad-6b19f5912e4a\") " pod="openstack/nova-api-0" Oct 10 18:05:02 crc kubenswrapper[4799]: I1010 18:05:02.477421 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6c155ce1-6bbc-48fd-bfad-6b19f5912e4a-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"6c155ce1-6bbc-48fd-bfad-6b19f5912e4a\") " pod="openstack/nova-api-0" Oct 10 18:05:02 crc kubenswrapper[4799]: I1010 18:05:02.477949 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6c155ce1-6bbc-48fd-bfad-6b19f5912e4a-config-data\") pod \"nova-api-0\" (UID: \"6c155ce1-6bbc-48fd-bfad-6b19f5912e4a\") " pod="openstack/nova-api-0" Oct 10 18:05:02 crc kubenswrapper[4799]: I1010 18:05:02.493343 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fg956\" (UniqueName: \"kubernetes.io/projected/6c155ce1-6bbc-48fd-bfad-6b19f5912e4a-kube-api-access-fg956\") pod \"nova-api-0\" (UID: \"6c155ce1-6bbc-48fd-bfad-6b19f5912e4a\") " pod="openstack/nova-api-0" Oct 10 18:05:02 crc kubenswrapper[4799]: I1010 18:05:02.547725 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/17b5efe0-880c-4841-b485-c58c950a3113-config-data\") pod \"nova-scheduler-0\" (UID: \"17b5efe0-880c-4841-b485-c58c950a3113\") " pod="openstack/nova-scheduler-0" Oct 10 18:05:02 crc kubenswrapper[4799]: I1010 18:05:02.547806 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/476023db-893d-4a8c-86cc-34cfe375f5ef-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"476023db-893d-4a8c-86cc-34cfe375f5ef\") " pod="openstack/nova-cell1-novncproxy-0" Oct 10 18:05:02 crc kubenswrapper[4799]: I1010 18:05:02.547864 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e28f9857-736d-4a60-833f-91faa9eaef2c-dns-svc\") pod \"dnsmasq-dns-6f5fcdf7c9-pnv52\" (UID: \"e28f9857-736d-4a60-833f-91faa9eaef2c\") " pod="openstack/dnsmasq-dns-6f5fcdf7c9-pnv52" Oct 10 18:05:02 crc kubenswrapper[4799]: I1010 18:05:02.547884 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/17b5efe0-880c-4841-b485-c58c950a3113-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"17b5efe0-880c-4841-b485-c58c950a3113\") " pod="openstack/nova-scheduler-0" Oct 10 18:05:02 crc kubenswrapper[4799]: I1010 18:05:02.547901 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8ftwk\" (UniqueName: \"kubernetes.io/projected/17b5efe0-880c-4841-b485-c58c950a3113-kube-api-access-8ftwk\") pod \"nova-scheduler-0\" (UID: \"17b5efe0-880c-4841-b485-c58c950a3113\") " pod="openstack/nova-scheduler-0" Oct 10 18:05:02 crc kubenswrapper[4799]: I1010 18:05:02.547952 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e28f9857-736d-4a60-833f-91faa9eaef2c-ovsdbserver-sb\") pod \"dnsmasq-dns-6f5fcdf7c9-pnv52\" (UID: \"e28f9857-736d-4a60-833f-91faa9eaef2c\") " pod="openstack/dnsmasq-dns-6f5fcdf7c9-pnv52" Oct 10 18:05:02 crc kubenswrapper[4799]: I1010 18:05:02.547976 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e28f9857-736d-4a60-833f-91faa9eaef2c-ovsdbserver-nb\") pod \"dnsmasq-dns-6f5fcdf7c9-pnv52\" (UID: \"e28f9857-736d-4a60-833f-91faa9eaef2c\") " pod="openstack/dnsmasq-dns-6f5fcdf7c9-pnv52" Oct 10 18:05:02 crc kubenswrapper[4799]: I1010 18:05:02.547998 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dgv6t\" (UniqueName: \"kubernetes.io/projected/476023db-893d-4a8c-86cc-34cfe375f5ef-kube-api-access-dgv6t\") pod \"nova-cell1-novncproxy-0\" (UID: \"476023db-893d-4a8c-86cc-34cfe375f5ef\") " pod="openstack/nova-cell1-novncproxy-0" Oct 10 18:05:02 crc kubenswrapper[4799]: I1010 18:05:02.548016 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/476023db-893d-4a8c-86cc-34cfe375f5ef-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"476023db-893d-4a8c-86cc-34cfe375f5ef\") " pod="openstack/nova-cell1-novncproxy-0" Oct 10 18:05:02 crc kubenswrapper[4799]: I1010 18:05:02.548043 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hfz8g\" (UniqueName: \"kubernetes.io/projected/e28f9857-736d-4a60-833f-91faa9eaef2c-kube-api-access-hfz8g\") pod \"dnsmasq-dns-6f5fcdf7c9-pnv52\" (UID: \"e28f9857-736d-4a60-833f-91faa9eaef2c\") " pod="openstack/dnsmasq-dns-6f5fcdf7c9-pnv52" Oct 10 18:05:02 crc kubenswrapper[4799]: I1010 18:05:02.548061 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e28f9857-736d-4a60-833f-91faa9eaef2c-config\") pod \"dnsmasq-dns-6f5fcdf7c9-pnv52\" (UID: \"e28f9857-736d-4a60-833f-91faa9eaef2c\") " pod="openstack/dnsmasq-dns-6f5fcdf7c9-pnv52" Oct 10 18:05:02 crc kubenswrapper[4799]: I1010 18:05:02.548953 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e28f9857-736d-4a60-833f-91faa9eaef2c-config\") pod \"dnsmasq-dns-6f5fcdf7c9-pnv52\" (UID: \"e28f9857-736d-4a60-833f-91faa9eaef2c\") " pod="openstack/dnsmasq-dns-6f5fcdf7c9-pnv52" Oct 10 18:05:02 crc kubenswrapper[4799]: I1010 18:05:02.550430 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e28f9857-736d-4a60-833f-91faa9eaef2c-ovsdbserver-nb\") pod \"dnsmasq-dns-6f5fcdf7c9-pnv52\" (UID: \"e28f9857-736d-4a60-833f-91faa9eaef2c\") " pod="openstack/dnsmasq-dns-6f5fcdf7c9-pnv52" Oct 10 18:05:02 crc kubenswrapper[4799]: I1010 18:05:02.550612 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e28f9857-736d-4a60-833f-91faa9eaef2c-ovsdbserver-sb\") pod \"dnsmasq-dns-6f5fcdf7c9-pnv52\" (UID: \"e28f9857-736d-4a60-833f-91faa9eaef2c\") " pod="openstack/dnsmasq-dns-6f5fcdf7c9-pnv52" Oct 10 18:05:02 crc kubenswrapper[4799]: I1010 18:05:02.551026 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e28f9857-736d-4a60-833f-91faa9eaef2c-dns-svc\") pod \"dnsmasq-dns-6f5fcdf7c9-pnv52\" (UID: \"e28f9857-736d-4a60-833f-91faa9eaef2c\") " pod="openstack/dnsmasq-dns-6f5fcdf7c9-pnv52" Oct 10 18:05:02 crc kubenswrapper[4799]: I1010 18:05:02.573713 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-qbxgb" Oct 10 18:05:02 crc kubenswrapper[4799]: I1010 18:05:02.592433 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hfz8g\" (UniqueName: \"kubernetes.io/projected/e28f9857-736d-4a60-833f-91faa9eaef2c-kube-api-access-hfz8g\") pod \"dnsmasq-dns-6f5fcdf7c9-pnv52\" (UID: \"e28f9857-736d-4a60-833f-91faa9eaef2c\") " pod="openstack/dnsmasq-dns-6f5fcdf7c9-pnv52" Oct 10 18:05:02 crc kubenswrapper[4799]: I1010 18:05:02.625242 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 10 18:05:02 crc kubenswrapper[4799]: I1010 18:05:02.651709 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dgv6t\" (UniqueName: \"kubernetes.io/projected/476023db-893d-4a8c-86cc-34cfe375f5ef-kube-api-access-dgv6t\") pod \"nova-cell1-novncproxy-0\" (UID: \"476023db-893d-4a8c-86cc-34cfe375f5ef\") " pod="openstack/nova-cell1-novncproxy-0" Oct 10 18:05:02 crc kubenswrapper[4799]: I1010 18:05:02.651770 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/476023db-893d-4a8c-86cc-34cfe375f5ef-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"476023db-893d-4a8c-86cc-34cfe375f5ef\") " pod="openstack/nova-cell1-novncproxy-0" Oct 10 18:05:02 crc kubenswrapper[4799]: I1010 18:05:02.652389 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/476023db-893d-4a8c-86cc-34cfe375f5ef-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"476023db-893d-4a8c-86cc-34cfe375f5ef\") " pod="openstack/nova-cell1-novncproxy-0" Oct 10 18:05:02 crc kubenswrapper[4799]: I1010 18:05:02.652416 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/17b5efe0-880c-4841-b485-c58c950a3113-config-data\") pod \"nova-scheduler-0\" (UID: \"17b5efe0-880c-4841-b485-c58c950a3113\") " pod="openstack/nova-scheduler-0" Oct 10 18:05:02 crc kubenswrapper[4799]: I1010 18:05:02.652451 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/17b5efe0-880c-4841-b485-c58c950a3113-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"17b5efe0-880c-4841-b485-c58c950a3113\") " pod="openstack/nova-scheduler-0" Oct 10 18:05:02 crc kubenswrapper[4799]: I1010 18:05:02.652471 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8ftwk\" (UniqueName: \"kubernetes.io/projected/17b5efe0-880c-4841-b485-c58c950a3113-kube-api-access-8ftwk\") pod \"nova-scheduler-0\" (UID: \"17b5efe0-880c-4841-b485-c58c950a3113\") " pod="openstack/nova-scheduler-0" Oct 10 18:05:02 crc kubenswrapper[4799]: I1010 18:05:02.655729 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/17b5efe0-880c-4841-b485-c58c950a3113-config-data\") pod \"nova-scheduler-0\" (UID: \"17b5efe0-880c-4841-b485-c58c950a3113\") " pod="openstack/nova-scheduler-0" Oct 10 18:05:02 crc kubenswrapper[4799]: I1010 18:05:02.657782 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/17b5efe0-880c-4841-b485-c58c950a3113-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"17b5efe0-880c-4841-b485-c58c950a3113\") " pod="openstack/nova-scheduler-0" Oct 10 18:05:02 crc kubenswrapper[4799]: I1010 18:05:02.659742 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/476023db-893d-4a8c-86cc-34cfe375f5ef-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"476023db-893d-4a8c-86cc-34cfe375f5ef\") " pod="openstack/nova-cell1-novncproxy-0" Oct 10 18:05:02 crc kubenswrapper[4799]: I1010 18:05:02.660263 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/476023db-893d-4a8c-86cc-34cfe375f5ef-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"476023db-893d-4a8c-86cc-34cfe375f5ef\") " pod="openstack/nova-cell1-novncproxy-0" Oct 10 18:05:02 crc kubenswrapper[4799]: I1010 18:05:02.675919 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dgv6t\" (UniqueName: \"kubernetes.io/projected/476023db-893d-4a8c-86cc-34cfe375f5ef-kube-api-access-dgv6t\") pod \"nova-cell1-novncproxy-0\" (UID: \"476023db-893d-4a8c-86cc-34cfe375f5ef\") " pod="openstack/nova-cell1-novncproxy-0" Oct 10 18:05:02 crc kubenswrapper[4799]: I1010 18:05:02.676369 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8ftwk\" (UniqueName: \"kubernetes.io/projected/17b5efe0-880c-4841-b485-c58c950a3113-kube-api-access-8ftwk\") pod \"nova-scheduler-0\" (UID: \"17b5efe0-880c-4841-b485-c58c950a3113\") " pod="openstack/nova-scheduler-0" Oct 10 18:05:02 crc kubenswrapper[4799]: I1010 18:05:02.713381 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6f5fcdf7c9-pnv52" Oct 10 18:05:02 crc kubenswrapper[4799]: I1010 18:05:02.744408 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Oct 10 18:05:02 crc kubenswrapper[4799]: I1010 18:05:02.745050 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 10 18:05:02 crc kubenswrapper[4799]: I1010 18:05:02.967642 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Oct 10 18:05:03 crc kubenswrapper[4799]: I1010 18:05:03.102017 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"53f2c30d-6bb2-4de5-90d1-febbb40322b2","Type":"ContainerStarted","Data":"6008c6ccc333b728c25764e00a22342b9f2c51a09903b0ab8f0c527e05bd86b5"} Oct 10 18:05:03 crc kubenswrapper[4799]: I1010 18:05:03.123544 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-cell-mapping-qbxgb"] Oct 10 18:05:03 crc kubenswrapper[4799]: I1010 18:05:03.196165 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Oct 10 18:05:03 crc kubenswrapper[4799]: I1010 18:05:03.242974 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6f5fcdf7c9-pnv52"] Oct 10 18:05:03 crc kubenswrapper[4799]: I1010 18:05:03.267287 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Oct 10 18:05:03 crc kubenswrapper[4799]: I1010 18:05:03.416270 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="091203ec-3414-41e9-96cd-74f97abd25a7" path="/var/lib/kubelet/pods/091203ec-3414-41e9-96cd-74f97abd25a7/volumes" Oct 10 18:05:03 crc kubenswrapper[4799]: I1010 18:05:03.417424 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Oct 10 18:05:03 crc kubenswrapper[4799]: I1010 18:05:03.454533 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-db-sync-rfnmm"] Oct 10 18:05:03 crc kubenswrapper[4799]: I1010 18:05:03.456140 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-rfnmm" Oct 10 18:05:03 crc kubenswrapper[4799]: I1010 18:05:03.458067 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-config-data" Oct 10 18:05:03 crc kubenswrapper[4799]: I1010 18:05:03.458244 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-scripts" Oct 10 18:05:03 crc kubenswrapper[4799]: I1010 18:05:03.474799 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-rfnmm"] Oct 10 18:05:03 crc kubenswrapper[4799]: I1010 18:05:03.592703 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a8a8adcb-59fe-4f76-a7dd-490195701120-config-data\") pod \"nova-cell1-conductor-db-sync-rfnmm\" (UID: \"a8a8adcb-59fe-4f76-a7dd-490195701120\") " pod="openstack/nova-cell1-conductor-db-sync-rfnmm" Oct 10 18:05:03 crc kubenswrapper[4799]: I1010 18:05:03.592797 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a8a8adcb-59fe-4f76-a7dd-490195701120-scripts\") pod \"nova-cell1-conductor-db-sync-rfnmm\" (UID: \"a8a8adcb-59fe-4f76-a7dd-490195701120\") " pod="openstack/nova-cell1-conductor-db-sync-rfnmm" Oct 10 18:05:03 crc kubenswrapper[4799]: I1010 18:05:03.592905 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a8a8adcb-59fe-4f76-a7dd-490195701120-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-rfnmm\" (UID: \"a8a8adcb-59fe-4f76-a7dd-490195701120\") " pod="openstack/nova-cell1-conductor-db-sync-rfnmm" Oct 10 18:05:03 crc kubenswrapper[4799]: I1010 18:05:03.592979 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xvtrd\" (UniqueName: \"kubernetes.io/projected/a8a8adcb-59fe-4f76-a7dd-490195701120-kube-api-access-xvtrd\") pod \"nova-cell1-conductor-db-sync-rfnmm\" (UID: \"a8a8adcb-59fe-4f76-a7dd-490195701120\") " pod="openstack/nova-cell1-conductor-db-sync-rfnmm" Oct 10 18:05:03 crc kubenswrapper[4799]: I1010 18:05:03.694505 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a8a8adcb-59fe-4f76-a7dd-490195701120-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-rfnmm\" (UID: \"a8a8adcb-59fe-4f76-a7dd-490195701120\") " pod="openstack/nova-cell1-conductor-db-sync-rfnmm" Oct 10 18:05:03 crc kubenswrapper[4799]: I1010 18:05:03.694587 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xvtrd\" (UniqueName: \"kubernetes.io/projected/a8a8adcb-59fe-4f76-a7dd-490195701120-kube-api-access-xvtrd\") pod \"nova-cell1-conductor-db-sync-rfnmm\" (UID: \"a8a8adcb-59fe-4f76-a7dd-490195701120\") " pod="openstack/nova-cell1-conductor-db-sync-rfnmm" Oct 10 18:05:03 crc kubenswrapper[4799]: I1010 18:05:03.694659 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a8a8adcb-59fe-4f76-a7dd-490195701120-config-data\") pod \"nova-cell1-conductor-db-sync-rfnmm\" (UID: \"a8a8adcb-59fe-4f76-a7dd-490195701120\") " pod="openstack/nova-cell1-conductor-db-sync-rfnmm" Oct 10 18:05:03 crc kubenswrapper[4799]: I1010 18:05:03.694689 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a8a8adcb-59fe-4f76-a7dd-490195701120-scripts\") pod \"nova-cell1-conductor-db-sync-rfnmm\" (UID: \"a8a8adcb-59fe-4f76-a7dd-490195701120\") " pod="openstack/nova-cell1-conductor-db-sync-rfnmm" Oct 10 18:05:03 crc kubenswrapper[4799]: I1010 18:05:03.698050 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a8a8adcb-59fe-4f76-a7dd-490195701120-scripts\") pod \"nova-cell1-conductor-db-sync-rfnmm\" (UID: \"a8a8adcb-59fe-4f76-a7dd-490195701120\") " pod="openstack/nova-cell1-conductor-db-sync-rfnmm" Oct 10 18:05:03 crc kubenswrapper[4799]: I1010 18:05:03.699180 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a8a8adcb-59fe-4f76-a7dd-490195701120-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-rfnmm\" (UID: \"a8a8adcb-59fe-4f76-a7dd-490195701120\") " pod="openstack/nova-cell1-conductor-db-sync-rfnmm" Oct 10 18:05:03 crc kubenswrapper[4799]: I1010 18:05:03.700304 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a8a8adcb-59fe-4f76-a7dd-490195701120-config-data\") pod \"nova-cell1-conductor-db-sync-rfnmm\" (UID: \"a8a8adcb-59fe-4f76-a7dd-490195701120\") " pod="openstack/nova-cell1-conductor-db-sync-rfnmm" Oct 10 18:05:03 crc kubenswrapper[4799]: I1010 18:05:03.715071 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xvtrd\" (UniqueName: \"kubernetes.io/projected/a8a8adcb-59fe-4f76-a7dd-490195701120-kube-api-access-xvtrd\") pod \"nova-cell1-conductor-db-sync-rfnmm\" (UID: \"a8a8adcb-59fe-4f76-a7dd-490195701120\") " pod="openstack/nova-cell1-conductor-db-sync-rfnmm" Oct 10 18:05:03 crc kubenswrapper[4799]: I1010 18:05:03.812557 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-rfnmm" Oct 10 18:05:04 crc kubenswrapper[4799]: I1010 18:05:04.116152 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-qbxgb" event={"ID":"8d1fbe12-3328-4be8-a6bf-8a89c61bea63","Type":"ContainerStarted","Data":"5d5087310f4cf1ba0ad902d87ab9cb2a26f05f9187f4bc3a3a67d019205c7b3d"} Oct 10 18:05:04 crc kubenswrapper[4799]: I1010 18:05:04.116215 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-qbxgb" event={"ID":"8d1fbe12-3328-4be8-a6bf-8a89c61bea63","Type":"ContainerStarted","Data":"1668bc47adb8694878a6d823e8584d7b8bc04ab8339c578891a1b10fe5bba30b"} Oct 10 18:05:04 crc kubenswrapper[4799]: I1010 18:05:04.119134 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"476023db-893d-4a8c-86cc-34cfe375f5ef","Type":"ContainerStarted","Data":"049e54e12e774d232e9d10d9155e6bdc94ba9564bca7719210354ad27e2bc377"} Oct 10 18:05:04 crc kubenswrapper[4799]: I1010 18:05:04.119178 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"476023db-893d-4a8c-86cc-34cfe375f5ef","Type":"ContainerStarted","Data":"72e47b85b9b2432e680a8c5aef08b6346dd7e00c00d6539dc272dd78a000748c"} Oct 10 18:05:04 crc kubenswrapper[4799]: I1010 18:05:04.125094 4799 generic.go:334] "Generic (PLEG): container finished" podID="e28f9857-736d-4a60-833f-91faa9eaef2c" containerID="c5855479684612e5c3ccbb7f6671a4969a9bf0cff9ccea33b4843ee8c5baef6c" exitCode=0 Oct 10 18:05:04 crc kubenswrapper[4799]: I1010 18:05:04.125232 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6f5fcdf7c9-pnv52" event={"ID":"e28f9857-736d-4a60-833f-91faa9eaef2c","Type":"ContainerDied","Data":"c5855479684612e5c3ccbb7f6671a4969a9bf0cff9ccea33b4843ee8c5baef6c"} Oct 10 18:05:04 crc kubenswrapper[4799]: I1010 18:05:04.125305 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6f5fcdf7c9-pnv52" event={"ID":"e28f9857-736d-4a60-833f-91faa9eaef2c","Type":"ContainerStarted","Data":"2888f0d173a6ffc2819957563d2fe65777c691d04f28ad194a08535516befcd6"} Oct 10 18:05:04 crc kubenswrapper[4799]: I1010 18:05:04.133406 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"17b5efe0-880c-4841-b485-c58c950a3113","Type":"ContainerStarted","Data":"b4fd51941e4e4416ae068b74365f15a6de1786075e0e1996eab0a797656da628"} Oct 10 18:05:04 crc kubenswrapper[4799]: I1010 18:05:04.133703 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"17b5efe0-880c-4841-b485-c58c950a3113","Type":"ContainerStarted","Data":"4336649fbb80666a4e1490f7c559c39e7c725b43287bfcd1ae0f8e8c52f3f4ec"} Oct 10 18:05:04 crc kubenswrapper[4799]: I1010 18:05:04.136896 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-cell-mapping-qbxgb" podStartSLOduration=3.13687332 podStartE2EDuration="3.13687332s" podCreationTimestamp="2025-10-10 18:05:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-10 18:05:04.129251514 +0000 UTC m=+5597.637575639" watchObservedRunningTime="2025-10-10 18:05:04.13687332 +0000 UTC m=+5597.645197435" Oct 10 18:05:04 crc kubenswrapper[4799]: I1010 18:05:04.145049 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"53f2c30d-6bb2-4de5-90d1-febbb40322b2","Type":"ContainerStarted","Data":"1869e8bcb00e717067ca7039184534e9c93fcbef5597ada401b92ab4a701c05f"} Oct 10 18:05:04 crc kubenswrapper[4799]: I1010 18:05:04.145115 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"53f2c30d-6bb2-4de5-90d1-febbb40322b2","Type":"ContainerStarted","Data":"f2f1c66fdffa243236a35cb1f9ed509f7c48e894ed5b3fd4062269c298c2c055"} Oct 10 18:05:04 crc kubenswrapper[4799]: I1010 18:05:04.152882 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"6c155ce1-6bbc-48fd-bfad-6b19f5912e4a","Type":"ContainerStarted","Data":"b01554f0b9052e53e0f060669fc667c715a650b8e49bcf7731f4cf4ebe92073a"} Oct 10 18:05:04 crc kubenswrapper[4799]: I1010 18:05:04.152926 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"6c155ce1-6bbc-48fd-bfad-6b19f5912e4a","Type":"ContainerStarted","Data":"c9c677691cb5c8dbdb15fc3acbd4aa35e8d408876d76c27daf69574c45bffd20"} Oct 10 18:05:04 crc kubenswrapper[4799]: I1010 18:05:04.152941 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"6c155ce1-6bbc-48fd-bfad-6b19f5912e4a","Type":"ContainerStarted","Data":"f309c4891748a041773b8d7189942cb6855cc56b9ea86c931b0c7463f50a0f07"} Oct 10 18:05:04 crc kubenswrapper[4799]: I1010 18:05:04.169324 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-novncproxy-0" podStartSLOduration=2.16930505 podStartE2EDuration="2.16930505s" podCreationTimestamp="2025-10-10 18:05:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-10 18:05:04.157332899 +0000 UTC m=+5597.665657024" watchObservedRunningTime="2025-10-10 18:05:04.16930505 +0000 UTC m=+5597.677629165" Oct 10 18:05:04 crc kubenswrapper[4799]: I1010 18:05:04.175554 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.175536882 podStartE2EDuration="2.175536882s" podCreationTimestamp="2025-10-10 18:05:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-10 18:05:04.173868072 +0000 UTC m=+5597.682192187" watchObservedRunningTime="2025-10-10 18:05:04.175536882 +0000 UTC m=+5597.683860997" Oct 10 18:05:04 crc kubenswrapper[4799]: I1010 18:05:04.196494 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.194619887 podStartE2EDuration="2.194619887s" podCreationTimestamp="2025-10-10 18:05:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-10 18:05:04.184940572 +0000 UTC m=+5597.693264707" watchObservedRunningTime="2025-10-10 18:05:04.194619887 +0000 UTC m=+5597.702944002" Oct 10 18:05:04 crc kubenswrapper[4799]: I1010 18:05:04.222180 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.222153059 podStartE2EDuration="2.222153059s" podCreationTimestamp="2025-10-10 18:05:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-10 18:05:04.199340603 +0000 UTC m=+5597.707664718" watchObservedRunningTime="2025-10-10 18:05:04.222153059 +0000 UTC m=+5597.730477184" Oct 10 18:05:04 crc kubenswrapper[4799]: I1010 18:05:04.272431 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-rfnmm"] Oct 10 18:05:05 crc kubenswrapper[4799]: I1010 18:05:05.160985 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6f5fcdf7c9-pnv52" event={"ID":"e28f9857-736d-4a60-833f-91faa9eaef2c","Type":"ContainerStarted","Data":"a52044ad423ee32529e72e713a84176f092a7d23535e74c5d6c641fe4d5b550d"} Oct 10 18:05:05 crc kubenswrapper[4799]: I1010 18:05:05.161147 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-6f5fcdf7c9-pnv52" Oct 10 18:05:05 crc kubenswrapper[4799]: I1010 18:05:05.162254 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-rfnmm" event={"ID":"a8a8adcb-59fe-4f76-a7dd-490195701120","Type":"ContainerStarted","Data":"41bb182737dc3bfee9c2f3d6425c1446b6ea9bc2bbbf9c6972496f7df633f981"} Oct 10 18:05:05 crc kubenswrapper[4799]: I1010 18:05:05.162329 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-rfnmm" event={"ID":"a8a8adcb-59fe-4f76-a7dd-490195701120","Type":"ContainerStarted","Data":"bc38ef38f076664fa36776f047382f33f8a63818f3ffb2b2cfb8c9c5733c1f59"} Oct 10 18:05:05 crc kubenswrapper[4799]: I1010 18:05:05.182143 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-6f5fcdf7c9-pnv52" podStartSLOduration=3.182122417 podStartE2EDuration="3.182122417s" podCreationTimestamp="2025-10-10 18:05:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-10 18:05:05.176367667 +0000 UTC m=+5598.684691772" watchObservedRunningTime="2025-10-10 18:05:05.182122417 +0000 UTC m=+5598.690446532" Oct 10 18:05:05 crc kubenswrapper[4799]: I1010 18:05:05.196515 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-db-sync-rfnmm" podStartSLOduration=2.196497107 podStartE2EDuration="2.196497107s" podCreationTimestamp="2025-10-10 18:05:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-10 18:05:05.190526572 +0000 UTC m=+5598.698850697" watchObservedRunningTime="2025-10-10 18:05:05.196497107 +0000 UTC m=+5598.704821222" Oct 10 18:05:07 crc kubenswrapper[4799]: I1010 18:05:07.204158 4799 generic.go:334] "Generic (PLEG): container finished" podID="a8a8adcb-59fe-4f76-a7dd-490195701120" containerID="41bb182737dc3bfee9c2f3d6425c1446b6ea9bc2bbbf9c6972496f7df633f981" exitCode=0 Oct 10 18:05:07 crc kubenswrapper[4799]: I1010 18:05:07.204887 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-rfnmm" event={"ID":"a8a8adcb-59fe-4f76-a7dd-490195701120","Type":"ContainerDied","Data":"41bb182737dc3bfee9c2f3d6425c1446b6ea9bc2bbbf9c6972496f7df633f981"} Oct 10 18:05:07 crc kubenswrapper[4799]: I1010 18:05:07.431542 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Oct 10 18:05:07 crc kubenswrapper[4799]: I1010 18:05:07.431613 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Oct 10 18:05:07 crc kubenswrapper[4799]: I1010 18:05:07.744831 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-novncproxy-0" Oct 10 18:05:07 crc kubenswrapper[4799]: I1010 18:05:07.746290 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Oct 10 18:05:08 crc kubenswrapper[4799]: I1010 18:05:08.673486 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-rfnmm" Oct 10 18:05:08 crc kubenswrapper[4799]: I1010 18:05:08.814947 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xvtrd\" (UniqueName: \"kubernetes.io/projected/a8a8adcb-59fe-4f76-a7dd-490195701120-kube-api-access-xvtrd\") pod \"a8a8adcb-59fe-4f76-a7dd-490195701120\" (UID: \"a8a8adcb-59fe-4f76-a7dd-490195701120\") " Oct 10 18:05:08 crc kubenswrapper[4799]: I1010 18:05:08.815401 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a8a8adcb-59fe-4f76-a7dd-490195701120-config-data\") pod \"a8a8adcb-59fe-4f76-a7dd-490195701120\" (UID: \"a8a8adcb-59fe-4f76-a7dd-490195701120\") " Oct 10 18:05:08 crc kubenswrapper[4799]: I1010 18:05:08.815487 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a8a8adcb-59fe-4f76-a7dd-490195701120-combined-ca-bundle\") pod \"a8a8adcb-59fe-4f76-a7dd-490195701120\" (UID: \"a8a8adcb-59fe-4f76-a7dd-490195701120\") " Oct 10 18:05:08 crc kubenswrapper[4799]: I1010 18:05:08.815710 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a8a8adcb-59fe-4f76-a7dd-490195701120-scripts\") pod \"a8a8adcb-59fe-4f76-a7dd-490195701120\" (UID: \"a8a8adcb-59fe-4f76-a7dd-490195701120\") " Oct 10 18:05:08 crc kubenswrapper[4799]: I1010 18:05:08.821276 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a8a8adcb-59fe-4f76-a7dd-490195701120-kube-api-access-xvtrd" (OuterVolumeSpecName: "kube-api-access-xvtrd") pod "a8a8adcb-59fe-4f76-a7dd-490195701120" (UID: "a8a8adcb-59fe-4f76-a7dd-490195701120"). InnerVolumeSpecName "kube-api-access-xvtrd". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 18:05:08 crc kubenswrapper[4799]: I1010 18:05:08.823280 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a8a8adcb-59fe-4f76-a7dd-490195701120-scripts" (OuterVolumeSpecName: "scripts") pod "a8a8adcb-59fe-4f76-a7dd-490195701120" (UID: "a8a8adcb-59fe-4f76-a7dd-490195701120"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 18:05:08 crc kubenswrapper[4799]: I1010 18:05:08.848516 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a8a8adcb-59fe-4f76-a7dd-490195701120-config-data" (OuterVolumeSpecName: "config-data") pod "a8a8adcb-59fe-4f76-a7dd-490195701120" (UID: "a8a8adcb-59fe-4f76-a7dd-490195701120"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 18:05:08 crc kubenswrapper[4799]: I1010 18:05:08.871888 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a8a8adcb-59fe-4f76-a7dd-490195701120-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a8a8adcb-59fe-4f76-a7dd-490195701120" (UID: "a8a8adcb-59fe-4f76-a7dd-490195701120"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 18:05:08 crc kubenswrapper[4799]: I1010 18:05:08.918121 4799 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a8a8adcb-59fe-4f76-a7dd-490195701120-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 10 18:05:08 crc kubenswrapper[4799]: I1010 18:05:08.918182 4799 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a8a8adcb-59fe-4f76-a7dd-490195701120-scripts\") on node \"crc\" DevicePath \"\"" Oct 10 18:05:08 crc kubenswrapper[4799]: I1010 18:05:08.918204 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xvtrd\" (UniqueName: \"kubernetes.io/projected/a8a8adcb-59fe-4f76-a7dd-490195701120-kube-api-access-xvtrd\") on node \"crc\" DevicePath \"\"" Oct 10 18:05:08 crc kubenswrapper[4799]: I1010 18:05:08.918222 4799 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a8a8adcb-59fe-4f76-a7dd-490195701120-config-data\") on node \"crc\" DevicePath \"\"" Oct 10 18:05:09 crc kubenswrapper[4799]: I1010 18:05:09.236685 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-rfnmm" Oct 10 18:05:09 crc kubenswrapper[4799]: I1010 18:05:09.236692 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-rfnmm" event={"ID":"a8a8adcb-59fe-4f76-a7dd-490195701120","Type":"ContainerDied","Data":"bc38ef38f076664fa36776f047382f33f8a63818f3ffb2b2cfb8c9c5733c1f59"} Oct 10 18:05:09 crc kubenswrapper[4799]: I1010 18:05:09.236742 4799 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="bc38ef38f076664fa36776f047382f33f8a63818f3ffb2b2cfb8c9c5733c1f59" Oct 10 18:05:09 crc kubenswrapper[4799]: I1010 18:05:09.238731 4799 generic.go:334] "Generic (PLEG): container finished" podID="8d1fbe12-3328-4be8-a6bf-8a89c61bea63" containerID="5d5087310f4cf1ba0ad902d87ab9cb2a26f05f9187f4bc3a3a67d019205c7b3d" exitCode=0 Oct 10 18:05:09 crc kubenswrapper[4799]: I1010 18:05:09.238799 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-qbxgb" event={"ID":"8d1fbe12-3328-4be8-a6bf-8a89c61bea63","Type":"ContainerDied","Data":"5d5087310f4cf1ba0ad902d87ab9cb2a26f05f9187f4bc3a3a67d019205c7b3d"} Oct 10 18:05:09 crc kubenswrapper[4799]: I1010 18:05:09.367527 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-0"] Oct 10 18:05:09 crc kubenswrapper[4799]: E1010 18:05:09.368273 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a8a8adcb-59fe-4f76-a7dd-490195701120" containerName="nova-cell1-conductor-db-sync" Oct 10 18:05:09 crc kubenswrapper[4799]: I1010 18:05:09.368361 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="a8a8adcb-59fe-4f76-a7dd-490195701120" containerName="nova-cell1-conductor-db-sync" Oct 10 18:05:09 crc kubenswrapper[4799]: I1010 18:05:09.368647 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="a8a8adcb-59fe-4f76-a7dd-490195701120" containerName="nova-cell1-conductor-db-sync" Oct 10 18:05:09 crc kubenswrapper[4799]: I1010 18:05:09.369527 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Oct 10 18:05:09 crc kubenswrapper[4799]: I1010 18:05:09.373884 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-config-data" Oct 10 18:05:09 crc kubenswrapper[4799]: I1010 18:05:09.396370 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Oct 10 18:05:09 crc kubenswrapper[4799]: I1010 18:05:09.533687 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6c1c1a59-308c-408a-9368-9c3be24cc383-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"6c1c1a59-308c-408a-9368-9c3be24cc383\") " pod="openstack/nova-cell1-conductor-0" Oct 10 18:05:09 crc kubenswrapper[4799]: I1010 18:05:09.533873 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6r2d4\" (UniqueName: \"kubernetes.io/projected/6c1c1a59-308c-408a-9368-9c3be24cc383-kube-api-access-6r2d4\") pod \"nova-cell1-conductor-0\" (UID: \"6c1c1a59-308c-408a-9368-9c3be24cc383\") " pod="openstack/nova-cell1-conductor-0" Oct 10 18:05:09 crc kubenswrapper[4799]: I1010 18:05:09.533907 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6c1c1a59-308c-408a-9368-9c3be24cc383-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"6c1c1a59-308c-408a-9368-9c3be24cc383\") " pod="openstack/nova-cell1-conductor-0" Oct 10 18:05:09 crc kubenswrapper[4799]: I1010 18:05:09.636144 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6r2d4\" (UniqueName: \"kubernetes.io/projected/6c1c1a59-308c-408a-9368-9c3be24cc383-kube-api-access-6r2d4\") pod \"nova-cell1-conductor-0\" (UID: \"6c1c1a59-308c-408a-9368-9c3be24cc383\") " pod="openstack/nova-cell1-conductor-0" Oct 10 18:05:09 crc kubenswrapper[4799]: I1010 18:05:09.636247 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6c1c1a59-308c-408a-9368-9c3be24cc383-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"6c1c1a59-308c-408a-9368-9c3be24cc383\") " pod="openstack/nova-cell1-conductor-0" Oct 10 18:05:09 crc kubenswrapper[4799]: I1010 18:05:09.636323 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6c1c1a59-308c-408a-9368-9c3be24cc383-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"6c1c1a59-308c-408a-9368-9c3be24cc383\") " pod="openstack/nova-cell1-conductor-0" Oct 10 18:05:09 crc kubenswrapper[4799]: I1010 18:05:09.642876 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6c1c1a59-308c-408a-9368-9c3be24cc383-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"6c1c1a59-308c-408a-9368-9c3be24cc383\") " pod="openstack/nova-cell1-conductor-0" Oct 10 18:05:09 crc kubenswrapper[4799]: I1010 18:05:09.650343 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6c1c1a59-308c-408a-9368-9c3be24cc383-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"6c1c1a59-308c-408a-9368-9c3be24cc383\") " pod="openstack/nova-cell1-conductor-0" Oct 10 18:05:09 crc kubenswrapper[4799]: I1010 18:05:09.653400 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6r2d4\" (UniqueName: \"kubernetes.io/projected/6c1c1a59-308c-408a-9368-9c3be24cc383-kube-api-access-6r2d4\") pod \"nova-cell1-conductor-0\" (UID: \"6c1c1a59-308c-408a-9368-9c3be24cc383\") " pod="openstack/nova-cell1-conductor-0" Oct 10 18:05:09 crc kubenswrapper[4799]: I1010 18:05:09.693007 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Oct 10 18:05:10 crc kubenswrapper[4799]: I1010 18:05:10.197271 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Oct 10 18:05:10 crc kubenswrapper[4799]: W1010 18:05:10.202203 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6c1c1a59_308c_408a_9368_9c3be24cc383.slice/crio-ecf0964db17339919591402743946b0da56a897eb1ffbe450ee7311cf9f361b7 WatchSource:0}: Error finding container ecf0964db17339919591402743946b0da56a897eb1ffbe450ee7311cf9f361b7: Status 404 returned error can't find the container with id ecf0964db17339919591402743946b0da56a897eb1ffbe450ee7311cf9f361b7 Oct 10 18:05:10 crc kubenswrapper[4799]: I1010 18:05:10.257917 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"6c1c1a59-308c-408a-9368-9c3be24cc383","Type":"ContainerStarted","Data":"ecf0964db17339919591402743946b0da56a897eb1ffbe450ee7311cf9f361b7"} Oct 10 18:05:10 crc kubenswrapper[4799]: I1010 18:05:10.544083 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-qbxgb" Oct 10 18:05:10 crc kubenswrapper[4799]: I1010 18:05:10.679829 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8d1fbe12-3328-4be8-a6bf-8a89c61bea63-scripts\") pod \"8d1fbe12-3328-4be8-a6bf-8a89c61bea63\" (UID: \"8d1fbe12-3328-4be8-a6bf-8a89c61bea63\") " Oct 10 18:05:10 crc kubenswrapper[4799]: I1010 18:05:10.680279 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8d1fbe12-3328-4be8-a6bf-8a89c61bea63-combined-ca-bundle\") pod \"8d1fbe12-3328-4be8-a6bf-8a89c61bea63\" (UID: \"8d1fbe12-3328-4be8-a6bf-8a89c61bea63\") " Oct 10 18:05:10 crc kubenswrapper[4799]: I1010 18:05:10.680447 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kjpbf\" (UniqueName: \"kubernetes.io/projected/8d1fbe12-3328-4be8-a6bf-8a89c61bea63-kube-api-access-kjpbf\") pod \"8d1fbe12-3328-4be8-a6bf-8a89c61bea63\" (UID: \"8d1fbe12-3328-4be8-a6bf-8a89c61bea63\") " Oct 10 18:05:10 crc kubenswrapper[4799]: I1010 18:05:10.680575 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8d1fbe12-3328-4be8-a6bf-8a89c61bea63-config-data\") pod \"8d1fbe12-3328-4be8-a6bf-8a89c61bea63\" (UID: \"8d1fbe12-3328-4be8-a6bf-8a89c61bea63\") " Oct 10 18:05:10 crc kubenswrapper[4799]: I1010 18:05:10.698204 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8d1fbe12-3328-4be8-a6bf-8a89c61bea63-scripts" (OuterVolumeSpecName: "scripts") pod "8d1fbe12-3328-4be8-a6bf-8a89c61bea63" (UID: "8d1fbe12-3328-4be8-a6bf-8a89c61bea63"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 18:05:10 crc kubenswrapper[4799]: I1010 18:05:10.698420 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8d1fbe12-3328-4be8-a6bf-8a89c61bea63-kube-api-access-kjpbf" (OuterVolumeSpecName: "kube-api-access-kjpbf") pod "8d1fbe12-3328-4be8-a6bf-8a89c61bea63" (UID: "8d1fbe12-3328-4be8-a6bf-8a89c61bea63"). InnerVolumeSpecName "kube-api-access-kjpbf". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 18:05:10 crc kubenswrapper[4799]: I1010 18:05:10.710979 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8d1fbe12-3328-4be8-a6bf-8a89c61bea63-config-data" (OuterVolumeSpecName: "config-data") pod "8d1fbe12-3328-4be8-a6bf-8a89c61bea63" (UID: "8d1fbe12-3328-4be8-a6bf-8a89c61bea63"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 18:05:10 crc kubenswrapper[4799]: I1010 18:05:10.726114 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8d1fbe12-3328-4be8-a6bf-8a89c61bea63-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "8d1fbe12-3328-4be8-a6bf-8a89c61bea63" (UID: "8d1fbe12-3328-4be8-a6bf-8a89c61bea63"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 18:05:10 crc kubenswrapper[4799]: I1010 18:05:10.782529 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kjpbf\" (UniqueName: \"kubernetes.io/projected/8d1fbe12-3328-4be8-a6bf-8a89c61bea63-kube-api-access-kjpbf\") on node \"crc\" DevicePath \"\"" Oct 10 18:05:10 crc kubenswrapper[4799]: I1010 18:05:10.782571 4799 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8d1fbe12-3328-4be8-a6bf-8a89c61bea63-config-data\") on node \"crc\" DevicePath \"\"" Oct 10 18:05:10 crc kubenswrapper[4799]: I1010 18:05:10.782581 4799 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8d1fbe12-3328-4be8-a6bf-8a89c61bea63-scripts\") on node \"crc\" DevicePath \"\"" Oct 10 18:05:10 crc kubenswrapper[4799]: I1010 18:05:10.782589 4799 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8d1fbe12-3328-4be8-a6bf-8a89c61bea63-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 10 18:05:11 crc kubenswrapper[4799]: I1010 18:05:11.270954 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-qbxgb" event={"ID":"8d1fbe12-3328-4be8-a6bf-8a89c61bea63","Type":"ContainerDied","Data":"1668bc47adb8694878a6d823e8584d7b8bc04ab8339c578891a1b10fe5bba30b"} Oct 10 18:05:11 crc kubenswrapper[4799]: I1010 18:05:11.271000 4799 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1668bc47adb8694878a6d823e8584d7b8bc04ab8339c578891a1b10fe5bba30b" Oct 10 18:05:11 crc kubenswrapper[4799]: I1010 18:05:11.271402 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-qbxgb" Oct 10 18:05:11 crc kubenswrapper[4799]: I1010 18:05:11.273621 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"6c1c1a59-308c-408a-9368-9c3be24cc383","Type":"ContainerStarted","Data":"6f3c40b2e02fe491d7a8cb2cd3cd4e974eb3094ffd182dc796407938e39fc38c"} Oct 10 18:05:11 crc kubenswrapper[4799]: I1010 18:05:11.275417 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-conductor-0" Oct 10 18:05:11 crc kubenswrapper[4799]: I1010 18:05:11.319242 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-0" podStartSLOduration=2.31920638 podStartE2EDuration="2.31920638s" podCreationTimestamp="2025-10-10 18:05:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-10 18:05:11.304990573 +0000 UTC m=+5604.813314728" watchObservedRunningTime="2025-10-10 18:05:11.31920638 +0000 UTC m=+5604.827530515" Oct 10 18:05:11 crc kubenswrapper[4799]: I1010 18:05:11.458880 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Oct 10 18:05:11 crc kubenswrapper[4799]: I1010 18:05:11.459160 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="6c155ce1-6bbc-48fd-bfad-6b19f5912e4a" containerName="nova-api-log" containerID="cri-o://c9c677691cb5c8dbdb15fc3acbd4aa35e8d408876d76c27daf69574c45bffd20" gracePeriod=30 Oct 10 18:05:11 crc kubenswrapper[4799]: I1010 18:05:11.459300 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="6c155ce1-6bbc-48fd-bfad-6b19f5912e4a" containerName="nova-api-api" containerID="cri-o://b01554f0b9052e53e0f060669fc667c715a650b8e49bcf7731f4cf4ebe92073a" gracePeriod=30 Oct 10 18:05:11 crc kubenswrapper[4799]: I1010 18:05:11.477452 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Oct 10 18:05:11 crc kubenswrapper[4799]: I1010 18:05:11.477733 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="17b5efe0-880c-4841-b485-c58c950a3113" containerName="nova-scheduler-scheduler" containerID="cri-o://b4fd51941e4e4416ae068b74365f15a6de1786075e0e1996eab0a797656da628" gracePeriod=30 Oct 10 18:05:11 crc kubenswrapper[4799]: I1010 18:05:11.507893 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Oct 10 18:05:11 crc kubenswrapper[4799]: I1010 18:05:11.508395 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="53f2c30d-6bb2-4de5-90d1-febbb40322b2" containerName="nova-metadata-metadata" containerID="cri-o://1869e8bcb00e717067ca7039184534e9c93fcbef5597ada401b92ab4a701c05f" gracePeriod=30 Oct 10 18:05:11 crc kubenswrapper[4799]: I1010 18:05:11.508190 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="53f2c30d-6bb2-4de5-90d1-febbb40322b2" containerName="nova-metadata-log" containerID="cri-o://f2f1c66fdffa243236a35cb1f9ed509f7c48e894ed5b3fd4062269c298c2c055" gracePeriod=30 Oct 10 18:05:12 crc kubenswrapper[4799]: I1010 18:05:12.158824 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 10 18:05:12 crc kubenswrapper[4799]: I1010 18:05:12.163561 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 10 18:05:12 crc kubenswrapper[4799]: I1010 18:05:12.284500 4799 generic.go:334] "Generic (PLEG): container finished" podID="6c155ce1-6bbc-48fd-bfad-6b19f5912e4a" containerID="b01554f0b9052e53e0f060669fc667c715a650b8e49bcf7731f4cf4ebe92073a" exitCode=0 Oct 10 18:05:12 crc kubenswrapper[4799]: I1010 18:05:12.284539 4799 generic.go:334] "Generic (PLEG): container finished" podID="6c155ce1-6bbc-48fd-bfad-6b19f5912e4a" containerID="c9c677691cb5c8dbdb15fc3acbd4aa35e8d408876d76c27daf69574c45bffd20" exitCode=143 Oct 10 18:05:12 crc kubenswrapper[4799]: I1010 18:05:12.284576 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 10 18:05:12 crc kubenswrapper[4799]: I1010 18:05:12.284560 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"6c155ce1-6bbc-48fd-bfad-6b19f5912e4a","Type":"ContainerDied","Data":"b01554f0b9052e53e0f060669fc667c715a650b8e49bcf7731f4cf4ebe92073a"} Oct 10 18:05:12 crc kubenswrapper[4799]: I1010 18:05:12.284771 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"6c155ce1-6bbc-48fd-bfad-6b19f5912e4a","Type":"ContainerDied","Data":"c9c677691cb5c8dbdb15fc3acbd4aa35e8d408876d76c27daf69574c45bffd20"} Oct 10 18:05:12 crc kubenswrapper[4799]: I1010 18:05:12.284795 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"6c155ce1-6bbc-48fd-bfad-6b19f5912e4a","Type":"ContainerDied","Data":"f309c4891748a041773b8d7189942cb6855cc56b9ea86c931b0c7463f50a0f07"} Oct 10 18:05:12 crc kubenswrapper[4799]: I1010 18:05:12.284818 4799 scope.go:117] "RemoveContainer" containerID="b01554f0b9052e53e0f060669fc667c715a650b8e49bcf7731f4cf4ebe92073a" Oct 10 18:05:12 crc kubenswrapper[4799]: I1010 18:05:12.292732 4799 generic.go:334] "Generic (PLEG): container finished" podID="53f2c30d-6bb2-4de5-90d1-febbb40322b2" containerID="1869e8bcb00e717067ca7039184534e9c93fcbef5597ada401b92ab4a701c05f" exitCode=0 Oct 10 18:05:12 crc kubenswrapper[4799]: I1010 18:05:12.292798 4799 generic.go:334] "Generic (PLEG): container finished" podID="53f2c30d-6bb2-4de5-90d1-febbb40322b2" containerID="f2f1c66fdffa243236a35cb1f9ed509f7c48e894ed5b3fd4062269c298c2c055" exitCode=143 Oct 10 18:05:12 crc kubenswrapper[4799]: I1010 18:05:12.293392 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 10 18:05:12 crc kubenswrapper[4799]: I1010 18:05:12.293954 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"53f2c30d-6bb2-4de5-90d1-febbb40322b2","Type":"ContainerDied","Data":"1869e8bcb00e717067ca7039184534e9c93fcbef5597ada401b92ab4a701c05f"} Oct 10 18:05:12 crc kubenswrapper[4799]: I1010 18:05:12.293994 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"53f2c30d-6bb2-4de5-90d1-febbb40322b2","Type":"ContainerDied","Data":"f2f1c66fdffa243236a35cb1f9ed509f7c48e894ed5b3fd4062269c298c2c055"} Oct 10 18:05:12 crc kubenswrapper[4799]: I1010 18:05:12.294026 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"53f2c30d-6bb2-4de5-90d1-febbb40322b2","Type":"ContainerDied","Data":"6008c6ccc333b728c25764e00a22342b9f2c51a09903b0ab8f0c527e05bd86b5"} Oct 10 18:05:12 crc kubenswrapper[4799]: I1010 18:05:12.311850 4799 scope.go:117] "RemoveContainer" containerID="c9c677691cb5c8dbdb15fc3acbd4aa35e8d408876d76c27daf69574c45bffd20" Oct 10 18:05:12 crc kubenswrapper[4799]: I1010 18:05:12.322651 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6c155ce1-6bbc-48fd-bfad-6b19f5912e4a-config-data\") pod \"6c155ce1-6bbc-48fd-bfad-6b19f5912e4a\" (UID: \"6c155ce1-6bbc-48fd-bfad-6b19f5912e4a\") " Oct 10 18:05:12 crc kubenswrapper[4799]: I1010 18:05:12.322803 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/53f2c30d-6bb2-4de5-90d1-febbb40322b2-combined-ca-bundle\") pod \"53f2c30d-6bb2-4de5-90d1-febbb40322b2\" (UID: \"53f2c30d-6bb2-4de5-90d1-febbb40322b2\") " Oct 10 18:05:12 crc kubenswrapper[4799]: I1010 18:05:12.323080 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6c155ce1-6bbc-48fd-bfad-6b19f5912e4a-combined-ca-bundle\") pod \"6c155ce1-6bbc-48fd-bfad-6b19f5912e4a\" (UID: \"6c155ce1-6bbc-48fd-bfad-6b19f5912e4a\") " Oct 10 18:05:12 crc kubenswrapper[4799]: I1010 18:05:12.323121 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6c155ce1-6bbc-48fd-bfad-6b19f5912e4a-logs\") pod \"6c155ce1-6bbc-48fd-bfad-6b19f5912e4a\" (UID: \"6c155ce1-6bbc-48fd-bfad-6b19f5912e4a\") " Oct 10 18:05:12 crc kubenswrapper[4799]: I1010 18:05:12.323149 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/53f2c30d-6bb2-4de5-90d1-febbb40322b2-logs\") pod \"53f2c30d-6bb2-4de5-90d1-febbb40322b2\" (UID: \"53f2c30d-6bb2-4de5-90d1-febbb40322b2\") " Oct 10 18:05:12 crc kubenswrapper[4799]: I1010 18:05:12.323255 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/53f2c30d-6bb2-4de5-90d1-febbb40322b2-config-data\") pod \"53f2c30d-6bb2-4de5-90d1-febbb40322b2\" (UID: \"53f2c30d-6bb2-4de5-90d1-febbb40322b2\") " Oct 10 18:05:12 crc kubenswrapper[4799]: I1010 18:05:12.323288 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kq7b2\" (UniqueName: \"kubernetes.io/projected/53f2c30d-6bb2-4de5-90d1-febbb40322b2-kube-api-access-kq7b2\") pod \"53f2c30d-6bb2-4de5-90d1-febbb40322b2\" (UID: \"53f2c30d-6bb2-4de5-90d1-febbb40322b2\") " Oct 10 18:05:12 crc kubenswrapper[4799]: I1010 18:05:12.323343 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fg956\" (UniqueName: \"kubernetes.io/projected/6c155ce1-6bbc-48fd-bfad-6b19f5912e4a-kube-api-access-fg956\") pod \"6c155ce1-6bbc-48fd-bfad-6b19f5912e4a\" (UID: \"6c155ce1-6bbc-48fd-bfad-6b19f5912e4a\") " Oct 10 18:05:12 crc kubenswrapper[4799]: I1010 18:05:12.323596 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6c155ce1-6bbc-48fd-bfad-6b19f5912e4a-logs" (OuterVolumeSpecName: "logs") pod "6c155ce1-6bbc-48fd-bfad-6b19f5912e4a" (UID: "6c155ce1-6bbc-48fd-bfad-6b19f5912e4a"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 10 18:05:12 crc kubenswrapper[4799]: I1010 18:05:12.323657 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/53f2c30d-6bb2-4de5-90d1-febbb40322b2-logs" (OuterVolumeSpecName: "logs") pod "53f2c30d-6bb2-4de5-90d1-febbb40322b2" (UID: "53f2c30d-6bb2-4de5-90d1-febbb40322b2"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 10 18:05:12 crc kubenswrapper[4799]: I1010 18:05:12.323898 4799 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6c155ce1-6bbc-48fd-bfad-6b19f5912e4a-logs\") on node \"crc\" DevicePath \"\"" Oct 10 18:05:12 crc kubenswrapper[4799]: I1010 18:05:12.323919 4799 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/53f2c30d-6bb2-4de5-90d1-febbb40322b2-logs\") on node \"crc\" DevicePath \"\"" Oct 10 18:05:12 crc kubenswrapper[4799]: I1010 18:05:12.329044 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/53f2c30d-6bb2-4de5-90d1-febbb40322b2-kube-api-access-kq7b2" (OuterVolumeSpecName: "kube-api-access-kq7b2") pod "53f2c30d-6bb2-4de5-90d1-febbb40322b2" (UID: "53f2c30d-6bb2-4de5-90d1-febbb40322b2"). InnerVolumeSpecName "kube-api-access-kq7b2". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 18:05:12 crc kubenswrapper[4799]: I1010 18:05:12.329938 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6c155ce1-6bbc-48fd-bfad-6b19f5912e4a-kube-api-access-fg956" (OuterVolumeSpecName: "kube-api-access-fg956") pod "6c155ce1-6bbc-48fd-bfad-6b19f5912e4a" (UID: "6c155ce1-6bbc-48fd-bfad-6b19f5912e4a"). InnerVolumeSpecName "kube-api-access-fg956". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 18:05:12 crc kubenswrapper[4799]: I1010 18:05:12.335054 4799 scope.go:117] "RemoveContainer" containerID="b01554f0b9052e53e0f060669fc667c715a650b8e49bcf7731f4cf4ebe92073a" Oct 10 18:05:12 crc kubenswrapper[4799]: E1010 18:05:12.335380 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b01554f0b9052e53e0f060669fc667c715a650b8e49bcf7731f4cf4ebe92073a\": container with ID starting with b01554f0b9052e53e0f060669fc667c715a650b8e49bcf7731f4cf4ebe92073a not found: ID does not exist" containerID="b01554f0b9052e53e0f060669fc667c715a650b8e49bcf7731f4cf4ebe92073a" Oct 10 18:05:12 crc kubenswrapper[4799]: I1010 18:05:12.335420 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b01554f0b9052e53e0f060669fc667c715a650b8e49bcf7731f4cf4ebe92073a"} err="failed to get container status \"b01554f0b9052e53e0f060669fc667c715a650b8e49bcf7731f4cf4ebe92073a\": rpc error: code = NotFound desc = could not find container \"b01554f0b9052e53e0f060669fc667c715a650b8e49bcf7731f4cf4ebe92073a\": container with ID starting with b01554f0b9052e53e0f060669fc667c715a650b8e49bcf7731f4cf4ebe92073a not found: ID does not exist" Oct 10 18:05:12 crc kubenswrapper[4799]: I1010 18:05:12.335446 4799 scope.go:117] "RemoveContainer" containerID="c9c677691cb5c8dbdb15fc3acbd4aa35e8d408876d76c27daf69574c45bffd20" Oct 10 18:05:12 crc kubenswrapper[4799]: E1010 18:05:12.335837 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c9c677691cb5c8dbdb15fc3acbd4aa35e8d408876d76c27daf69574c45bffd20\": container with ID starting with c9c677691cb5c8dbdb15fc3acbd4aa35e8d408876d76c27daf69574c45bffd20 not found: ID does not exist" containerID="c9c677691cb5c8dbdb15fc3acbd4aa35e8d408876d76c27daf69574c45bffd20" Oct 10 18:05:12 crc kubenswrapper[4799]: I1010 18:05:12.335862 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c9c677691cb5c8dbdb15fc3acbd4aa35e8d408876d76c27daf69574c45bffd20"} err="failed to get container status \"c9c677691cb5c8dbdb15fc3acbd4aa35e8d408876d76c27daf69574c45bffd20\": rpc error: code = NotFound desc = could not find container \"c9c677691cb5c8dbdb15fc3acbd4aa35e8d408876d76c27daf69574c45bffd20\": container with ID starting with c9c677691cb5c8dbdb15fc3acbd4aa35e8d408876d76c27daf69574c45bffd20 not found: ID does not exist" Oct 10 18:05:12 crc kubenswrapper[4799]: I1010 18:05:12.335878 4799 scope.go:117] "RemoveContainer" containerID="b01554f0b9052e53e0f060669fc667c715a650b8e49bcf7731f4cf4ebe92073a" Oct 10 18:05:12 crc kubenswrapper[4799]: I1010 18:05:12.336164 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b01554f0b9052e53e0f060669fc667c715a650b8e49bcf7731f4cf4ebe92073a"} err="failed to get container status \"b01554f0b9052e53e0f060669fc667c715a650b8e49bcf7731f4cf4ebe92073a\": rpc error: code = NotFound desc = could not find container \"b01554f0b9052e53e0f060669fc667c715a650b8e49bcf7731f4cf4ebe92073a\": container with ID starting with b01554f0b9052e53e0f060669fc667c715a650b8e49bcf7731f4cf4ebe92073a not found: ID does not exist" Oct 10 18:05:12 crc kubenswrapper[4799]: I1010 18:05:12.336209 4799 scope.go:117] "RemoveContainer" containerID="c9c677691cb5c8dbdb15fc3acbd4aa35e8d408876d76c27daf69574c45bffd20" Oct 10 18:05:12 crc kubenswrapper[4799]: I1010 18:05:12.336468 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c9c677691cb5c8dbdb15fc3acbd4aa35e8d408876d76c27daf69574c45bffd20"} err="failed to get container status \"c9c677691cb5c8dbdb15fc3acbd4aa35e8d408876d76c27daf69574c45bffd20\": rpc error: code = NotFound desc = could not find container \"c9c677691cb5c8dbdb15fc3acbd4aa35e8d408876d76c27daf69574c45bffd20\": container with ID starting with c9c677691cb5c8dbdb15fc3acbd4aa35e8d408876d76c27daf69574c45bffd20 not found: ID does not exist" Oct 10 18:05:12 crc kubenswrapper[4799]: I1010 18:05:12.336491 4799 scope.go:117] "RemoveContainer" containerID="1869e8bcb00e717067ca7039184534e9c93fcbef5597ada401b92ab4a701c05f" Oct 10 18:05:12 crc kubenswrapper[4799]: I1010 18:05:12.347735 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/53f2c30d-6bb2-4de5-90d1-febbb40322b2-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "53f2c30d-6bb2-4de5-90d1-febbb40322b2" (UID: "53f2c30d-6bb2-4de5-90d1-febbb40322b2"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 18:05:12 crc kubenswrapper[4799]: I1010 18:05:12.350267 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6c155ce1-6bbc-48fd-bfad-6b19f5912e4a-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "6c155ce1-6bbc-48fd-bfad-6b19f5912e4a" (UID: "6c155ce1-6bbc-48fd-bfad-6b19f5912e4a"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 18:05:12 crc kubenswrapper[4799]: I1010 18:05:12.352117 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/53f2c30d-6bb2-4de5-90d1-febbb40322b2-config-data" (OuterVolumeSpecName: "config-data") pod "53f2c30d-6bb2-4de5-90d1-febbb40322b2" (UID: "53f2c30d-6bb2-4de5-90d1-febbb40322b2"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 18:05:12 crc kubenswrapper[4799]: I1010 18:05:12.356467 4799 scope.go:117] "RemoveContainer" containerID="f2f1c66fdffa243236a35cb1f9ed509f7c48e894ed5b3fd4062269c298c2c055" Oct 10 18:05:12 crc kubenswrapper[4799]: I1010 18:05:12.370774 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6c155ce1-6bbc-48fd-bfad-6b19f5912e4a-config-data" (OuterVolumeSpecName: "config-data") pod "6c155ce1-6bbc-48fd-bfad-6b19f5912e4a" (UID: "6c155ce1-6bbc-48fd-bfad-6b19f5912e4a"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 18:05:12 crc kubenswrapper[4799]: I1010 18:05:12.384469 4799 scope.go:117] "RemoveContainer" containerID="1869e8bcb00e717067ca7039184534e9c93fcbef5597ada401b92ab4a701c05f" Oct 10 18:05:12 crc kubenswrapper[4799]: E1010 18:05:12.384826 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1869e8bcb00e717067ca7039184534e9c93fcbef5597ada401b92ab4a701c05f\": container with ID starting with 1869e8bcb00e717067ca7039184534e9c93fcbef5597ada401b92ab4a701c05f not found: ID does not exist" containerID="1869e8bcb00e717067ca7039184534e9c93fcbef5597ada401b92ab4a701c05f" Oct 10 18:05:12 crc kubenswrapper[4799]: I1010 18:05:12.384856 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1869e8bcb00e717067ca7039184534e9c93fcbef5597ada401b92ab4a701c05f"} err="failed to get container status \"1869e8bcb00e717067ca7039184534e9c93fcbef5597ada401b92ab4a701c05f\": rpc error: code = NotFound desc = could not find container \"1869e8bcb00e717067ca7039184534e9c93fcbef5597ada401b92ab4a701c05f\": container with ID starting with 1869e8bcb00e717067ca7039184534e9c93fcbef5597ada401b92ab4a701c05f not found: ID does not exist" Oct 10 18:05:12 crc kubenswrapper[4799]: I1010 18:05:12.384877 4799 scope.go:117] "RemoveContainer" containerID="f2f1c66fdffa243236a35cb1f9ed509f7c48e894ed5b3fd4062269c298c2c055" Oct 10 18:05:12 crc kubenswrapper[4799]: E1010 18:05:12.385165 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f2f1c66fdffa243236a35cb1f9ed509f7c48e894ed5b3fd4062269c298c2c055\": container with ID starting with f2f1c66fdffa243236a35cb1f9ed509f7c48e894ed5b3fd4062269c298c2c055 not found: ID does not exist" containerID="f2f1c66fdffa243236a35cb1f9ed509f7c48e894ed5b3fd4062269c298c2c055" Oct 10 18:05:12 crc kubenswrapper[4799]: I1010 18:05:12.385183 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f2f1c66fdffa243236a35cb1f9ed509f7c48e894ed5b3fd4062269c298c2c055"} err="failed to get container status \"f2f1c66fdffa243236a35cb1f9ed509f7c48e894ed5b3fd4062269c298c2c055\": rpc error: code = NotFound desc = could not find container \"f2f1c66fdffa243236a35cb1f9ed509f7c48e894ed5b3fd4062269c298c2c055\": container with ID starting with f2f1c66fdffa243236a35cb1f9ed509f7c48e894ed5b3fd4062269c298c2c055 not found: ID does not exist" Oct 10 18:05:12 crc kubenswrapper[4799]: I1010 18:05:12.385196 4799 scope.go:117] "RemoveContainer" containerID="1869e8bcb00e717067ca7039184534e9c93fcbef5597ada401b92ab4a701c05f" Oct 10 18:05:12 crc kubenswrapper[4799]: I1010 18:05:12.385334 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1869e8bcb00e717067ca7039184534e9c93fcbef5597ada401b92ab4a701c05f"} err="failed to get container status \"1869e8bcb00e717067ca7039184534e9c93fcbef5597ada401b92ab4a701c05f\": rpc error: code = NotFound desc = could not find container \"1869e8bcb00e717067ca7039184534e9c93fcbef5597ada401b92ab4a701c05f\": container with ID starting with 1869e8bcb00e717067ca7039184534e9c93fcbef5597ada401b92ab4a701c05f not found: ID does not exist" Oct 10 18:05:12 crc kubenswrapper[4799]: I1010 18:05:12.385351 4799 scope.go:117] "RemoveContainer" containerID="f2f1c66fdffa243236a35cb1f9ed509f7c48e894ed5b3fd4062269c298c2c055" Oct 10 18:05:12 crc kubenswrapper[4799]: I1010 18:05:12.385497 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f2f1c66fdffa243236a35cb1f9ed509f7c48e894ed5b3fd4062269c298c2c055"} err="failed to get container status \"f2f1c66fdffa243236a35cb1f9ed509f7c48e894ed5b3fd4062269c298c2c055\": rpc error: code = NotFound desc = could not find container \"f2f1c66fdffa243236a35cb1f9ed509f7c48e894ed5b3fd4062269c298c2c055\": container with ID starting with f2f1c66fdffa243236a35cb1f9ed509f7c48e894ed5b3fd4062269c298c2c055 not found: ID does not exist" Oct 10 18:05:12 crc kubenswrapper[4799]: I1010 18:05:12.425749 4799 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6c155ce1-6bbc-48fd-bfad-6b19f5912e4a-config-data\") on node \"crc\" DevicePath \"\"" Oct 10 18:05:12 crc kubenswrapper[4799]: I1010 18:05:12.426061 4799 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/53f2c30d-6bb2-4de5-90d1-febbb40322b2-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 10 18:05:12 crc kubenswrapper[4799]: I1010 18:05:12.426080 4799 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6c155ce1-6bbc-48fd-bfad-6b19f5912e4a-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 10 18:05:12 crc kubenswrapper[4799]: I1010 18:05:12.426089 4799 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/53f2c30d-6bb2-4de5-90d1-febbb40322b2-config-data\") on node \"crc\" DevicePath \"\"" Oct 10 18:05:12 crc kubenswrapper[4799]: I1010 18:05:12.426099 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kq7b2\" (UniqueName: \"kubernetes.io/projected/53f2c30d-6bb2-4de5-90d1-febbb40322b2-kube-api-access-kq7b2\") on node \"crc\" DevicePath \"\"" Oct 10 18:05:12 crc kubenswrapper[4799]: I1010 18:05:12.426117 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fg956\" (UniqueName: \"kubernetes.io/projected/6c155ce1-6bbc-48fd-bfad-6b19f5912e4a-kube-api-access-fg956\") on node \"crc\" DevicePath \"\"" Oct 10 18:05:12 crc kubenswrapper[4799]: I1010 18:05:12.622150 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Oct 10 18:05:12 crc kubenswrapper[4799]: I1010 18:05:12.638434 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Oct 10 18:05:12 crc kubenswrapper[4799]: I1010 18:05:12.652527 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Oct 10 18:05:12 crc kubenswrapper[4799]: E1010 18:05:12.653178 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8d1fbe12-3328-4be8-a6bf-8a89c61bea63" containerName="nova-manage" Oct 10 18:05:12 crc kubenswrapper[4799]: I1010 18:05:12.653275 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="8d1fbe12-3328-4be8-a6bf-8a89c61bea63" containerName="nova-manage" Oct 10 18:05:12 crc kubenswrapper[4799]: E1010 18:05:12.653373 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6c155ce1-6bbc-48fd-bfad-6b19f5912e4a" containerName="nova-api-api" Oct 10 18:05:12 crc kubenswrapper[4799]: I1010 18:05:12.653443 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="6c155ce1-6bbc-48fd-bfad-6b19f5912e4a" containerName="nova-api-api" Oct 10 18:05:12 crc kubenswrapper[4799]: E1010 18:05:12.653515 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="53f2c30d-6bb2-4de5-90d1-febbb40322b2" containerName="nova-metadata-log" Oct 10 18:05:12 crc kubenswrapper[4799]: I1010 18:05:12.653603 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="53f2c30d-6bb2-4de5-90d1-febbb40322b2" containerName="nova-metadata-log" Oct 10 18:05:12 crc kubenswrapper[4799]: E1010 18:05:12.653674 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="53f2c30d-6bb2-4de5-90d1-febbb40322b2" containerName="nova-metadata-metadata" Oct 10 18:05:12 crc kubenswrapper[4799]: I1010 18:05:12.653739 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="53f2c30d-6bb2-4de5-90d1-febbb40322b2" containerName="nova-metadata-metadata" Oct 10 18:05:12 crc kubenswrapper[4799]: E1010 18:05:12.653867 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6c155ce1-6bbc-48fd-bfad-6b19f5912e4a" containerName="nova-api-log" Oct 10 18:05:12 crc kubenswrapper[4799]: I1010 18:05:12.653947 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="6c155ce1-6bbc-48fd-bfad-6b19f5912e4a" containerName="nova-api-log" Oct 10 18:05:12 crc kubenswrapper[4799]: I1010 18:05:12.654227 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="53f2c30d-6bb2-4de5-90d1-febbb40322b2" containerName="nova-metadata-log" Oct 10 18:05:12 crc kubenswrapper[4799]: I1010 18:05:12.654315 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="8d1fbe12-3328-4be8-a6bf-8a89c61bea63" containerName="nova-manage" Oct 10 18:05:12 crc kubenswrapper[4799]: I1010 18:05:12.654405 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="53f2c30d-6bb2-4de5-90d1-febbb40322b2" containerName="nova-metadata-metadata" Oct 10 18:05:12 crc kubenswrapper[4799]: I1010 18:05:12.654492 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="6c155ce1-6bbc-48fd-bfad-6b19f5912e4a" containerName="nova-api-log" Oct 10 18:05:12 crc kubenswrapper[4799]: I1010 18:05:12.654563 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="6c155ce1-6bbc-48fd-bfad-6b19f5912e4a" containerName="nova-api-api" Oct 10 18:05:12 crc kubenswrapper[4799]: I1010 18:05:12.655853 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 10 18:05:12 crc kubenswrapper[4799]: I1010 18:05:12.659257 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Oct 10 18:05:12 crc kubenswrapper[4799]: I1010 18:05:12.664161 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Oct 10 18:05:12 crc kubenswrapper[4799]: I1010 18:05:12.678549 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Oct 10 18:05:12 crc kubenswrapper[4799]: I1010 18:05:12.686679 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Oct 10 18:05:12 crc kubenswrapper[4799]: I1010 18:05:12.695238 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Oct 10 18:05:12 crc kubenswrapper[4799]: I1010 18:05:12.696813 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 10 18:05:12 crc kubenswrapper[4799]: I1010 18:05:12.704241 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Oct 10 18:05:12 crc kubenswrapper[4799]: I1010 18:05:12.711694 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Oct 10 18:05:12 crc kubenswrapper[4799]: I1010 18:05:12.715939 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-6f5fcdf7c9-pnv52" Oct 10 18:05:12 crc kubenswrapper[4799]: I1010 18:05:12.733961 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pht5p\" (UniqueName: \"kubernetes.io/projected/85831ffe-f336-4725-a8dd-a3d8313a9690-kube-api-access-pht5p\") pod \"nova-api-0\" (UID: \"85831ffe-f336-4725-a8dd-a3d8313a9690\") " pod="openstack/nova-api-0" Oct 10 18:05:12 crc kubenswrapper[4799]: I1010 18:05:12.733999 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cllwc\" (UniqueName: \"kubernetes.io/projected/8bda0000-9ef7-4a01-a8a1-c0a902923b01-kube-api-access-cllwc\") pod \"nova-metadata-0\" (UID: \"8bda0000-9ef7-4a01-a8a1-c0a902923b01\") " pod="openstack/nova-metadata-0" Oct 10 18:05:12 crc kubenswrapper[4799]: I1010 18:05:12.734029 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/85831ffe-f336-4725-a8dd-a3d8313a9690-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"85831ffe-f336-4725-a8dd-a3d8313a9690\") " pod="openstack/nova-api-0" Oct 10 18:05:12 crc kubenswrapper[4799]: I1010 18:05:12.734079 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8bda0000-9ef7-4a01-a8a1-c0a902923b01-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"8bda0000-9ef7-4a01-a8a1-c0a902923b01\") " pod="openstack/nova-metadata-0" Oct 10 18:05:12 crc kubenswrapper[4799]: I1010 18:05:12.734102 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8bda0000-9ef7-4a01-a8a1-c0a902923b01-logs\") pod \"nova-metadata-0\" (UID: \"8bda0000-9ef7-4a01-a8a1-c0a902923b01\") " pod="openstack/nova-metadata-0" Oct 10 18:05:12 crc kubenswrapper[4799]: I1010 18:05:12.734145 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8bda0000-9ef7-4a01-a8a1-c0a902923b01-config-data\") pod \"nova-metadata-0\" (UID: \"8bda0000-9ef7-4a01-a8a1-c0a902923b01\") " pod="openstack/nova-metadata-0" Oct 10 18:05:12 crc kubenswrapper[4799]: I1010 18:05:12.734175 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/85831ffe-f336-4725-a8dd-a3d8313a9690-logs\") pod \"nova-api-0\" (UID: \"85831ffe-f336-4725-a8dd-a3d8313a9690\") " pod="openstack/nova-api-0" Oct 10 18:05:12 crc kubenswrapper[4799]: I1010 18:05:12.734197 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/85831ffe-f336-4725-a8dd-a3d8313a9690-config-data\") pod \"nova-api-0\" (UID: \"85831ffe-f336-4725-a8dd-a3d8313a9690\") " pod="openstack/nova-api-0" Oct 10 18:05:12 crc kubenswrapper[4799]: I1010 18:05:12.745415 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-cell1-novncproxy-0" Oct 10 18:05:12 crc kubenswrapper[4799]: I1010 18:05:12.770246 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-cell1-novncproxy-0" Oct 10 18:05:12 crc kubenswrapper[4799]: I1010 18:05:12.780681 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-fdf47c55f-nkhwh"] Oct 10 18:05:12 crc kubenswrapper[4799]: I1010 18:05:12.781004 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-fdf47c55f-nkhwh" podUID="a45728b4-650f-4ed5-bb39-118e04708f2b" containerName="dnsmasq-dns" containerID="cri-o://1c45ebbecf3f756ac75772ae3d242481cb7f4650a4ab85c9b14f261da87c52c1" gracePeriod=10 Oct 10 18:05:12 crc kubenswrapper[4799]: I1010 18:05:12.835203 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8bda0000-9ef7-4a01-a8a1-c0a902923b01-logs\") pod \"nova-metadata-0\" (UID: \"8bda0000-9ef7-4a01-a8a1-c0a902923b01\") " pod="openstack/nova-metadata-0" Oct 10 18:05:12 crc kubenswrapper[4799]: I1010 18:05:12.835276 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8bda0000-9ef7-4a01-a8a1-c0a902923b01-config-data\") pod \"nova-metadata-0\" (UID: \"8bda0000-9ef7-4a01-a8a1-c0a902923b01\") " pod="openstack/nova-metadata-0" Oct 10 18:05:12 crc kubenswrapper[4799]: I1010 18:05:12.835328 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/85831ffe-f336-4725-a8dd-a3d8313a9690-logs\") pod \"nova-api-0\" (UID: \"85831ffe-f336-4725-a8dd-a3d8313a9690\") " pod="openstack/nova-api-0" Oct 10 18:05:12 crc kubenswrapper[4799]: I1010 18:05:12.835351 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/85831ffe-f336-4725-a8dd-a3d8313a9690-config-data\") pod \"nova-api-0\" (UID: \"85831ffe-f336-4725-a8dd-a3d8313a9690\") " pod="openstack/nova-api-0" Oct 10 18:05:12 crc kubenswrapper[4799]: I1010 18:05:12.835413 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pht5p\" (UniqueName: \"kubernetes.io/projected/85831ffe-f336-4725-a8dd-a3d8313a9690-kube-api-access-pht5p\") pod \"nova-api-0\" (UID: \"85831ffe-f336-4725-a8dd-a3d8313a9690\") " pod="openstack/nova-api-0" Oct 10 18:05:12 crc kubenswrapper[4799]: I1010 18:05:12.835430 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cllwc\" (UniqueName: \"kubernetes.io/projected/8bda0000-9ef7-4a01-a8a1-c0a902923b01-kube-api-access-cllwc\") pod \"nova-metadata-0\" (UID: \"8bda0000-9ef7-4a01-a8a1-c0a902923b01\") " pod="openstack/nova-metadata-0" Oct 10 18:05:12 crc kubenswrapper[4799]: I1010 18:05:12.835468 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/85831ffe-f336-4725-a8dd-a3d8313a9690-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"85831ffe-f336-4725-a8dd-a3d8313a9690\") " pod="openstack/nova-api-0" Oct 10 18:05:12 crc kubenswrapper[4799]: I1010 18:05:12.835501 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8bda0000-9ef7-4a01-a8a1-c0a902923b01-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"8bda0000-9ef7-4a01-a8a1-c0a902923b01\") " pod="openstack/nova-metadata-0" Oct 10 18:05:12 crc kubenswrapper[4799]: I1010 18:05:12.835605 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8bda0000-9ef7-4a01-a8a1-c0a902923b01-logs\") pod \"nova-metadata-0\" (UID: \"8bda0000-9ef7-4a01-a8a1-c0a902923b01\") " pod="openstack/nova-metadata-0" Oct 10 18:05:12 crc kubenswrapper[4799]: I1010 18:05:12.836265 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/85831ffe-f336-4725-a8dd-a3d8313a9690-logs\") pod \"nova-api-0\" (UID: \"85831ffe-f336-4725-a8dd-a3d8313a9690\") " pod="openstack/nova-api-0" Oct 10 18:05:12 crc kubenswrapper[4799]: I1010 18:05:12.839092 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/85831ffe-f336-4725-a8dd-a3d8313a9690-config-data\") pod \"nova-api-0\" (UID: \"85831ffe-f336-4725-a8dd-a3d8313a9690\") " pod="openstack/nova-api-0" Oct 10 18:05:12 crc kubenswrapper[4799]: I1010 18:05:12.840037 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/85831ffe-f336-4725-a8dd-a3d8313a9690-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"85831ffe-f336-4725-a8dd-a3d8313a9690\") " pod="openstack/nova-api-0" Oct 10 18:05:12 crc kubenswrapper[4799]: I1010 18:05:12.840371 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8bda0000-9ef7-4a01-a8a1-c0a902923b01-config-data\") pod \"nova-metadata-0\" (UID: \"8bda0000-9ef7-4a01-a8a1-c0a902923b01\") " pod="openstack/nova-metadata-0" Oct 10 18:05:12 crc kubenswrapper[4799]: I1010 18:05:12.841231 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8bda0000-9ef7-4a01-a8a1-c0a902923b01-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"8bda0000-9ef7-4a01-a8a1-c0a902923b01\") " pod="openstack/nova-metadata-0" Oct 10 18:05:12 crc kubenswrapper[4799]: I1010 18:05:12.853665 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cllwc\" (UniqueName: \"kubernetes.io/projected/8bda0000-9ef7-4a01-a8a1-c0a902923b01-kube-api-access-cllwc\") pod \"nova-metadata-0\" (UID: \"8bda0000-9ef7-4a01-a8a1-c0a902923b01\") " pod="openstack/nova-metadata-0" Oct 10 18:05:12 crc kubenswrapper[4799]: I1010 18:05:12.857150 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pht5p\" (UniqueName: \"kubernetes.io/projected/85831ffe-f336-4725-a8dd-a3d8313a9690-kube-api-access-pht5p\") pod \"nova-api-0\" (UID: \"85831ffe-f336-4725-a8dd-a3d8313a9690\") " pod="openstack/nova-api-0" Oct 10 18:05:13 crc kubenswrapper[4799]: I1010 18:05:13.045573 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 10 18:05:13 crc kubenswrapper[4799]: I1010 18:05:13.055713 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 10 18:05:13 crc kubenswrapper[4799]: I1010 18:05:13.318205 4799 generic.go:334] "Generic (PLEG): container finished" podID="a45728b4-650f-4ed5-bb39-118e04708f2b" containerID="1c45ebbecf3f756ac75772ae3d242481cb7f4650a4ab85c9b14f261da87c52c1" exitCode=0 Oct 10 18:05:13 crc kubenswrapper[4799]: I1010 18:05:13.319382 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-fdf47c55f-nkhwh" event={"ID":"a45728b4-650f-4ed5-bb39-118e04708f2b","Type":"ContainerDied","Data":"1c45ebbecf3f756ac75772ae3d242481cb7f4650a4ab85c9b14f261da87c52c1"} Oct 10 18:05:13 crc kubenswrapper[4799]: I1010 18:05:13.326876 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-novncproxy-0" Oct 10 18:05:13 crc kubenswrapper[4799]: I1010 18:05:13.334182 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-fdf47c55f-nkhwh" Oct 10 18:05:13 crc kubenswrapper[4799]: I1010 18:05:13.360279 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a45728b4-650f-4ed5-bb39-118e04708f2b-config\") pod \"a45728b4-650f-4ed5-bb39-118e04708f2b\" (UID: \"a45728b4-650f-4ed5-bb39-118e04708f2b\") " Oct 10 18:05:13 crc kubenswrapper[4799]: I1010 18:05:13.360371 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a45728b4-650f-4ed5-bb39-118e04708f2b-dns-svc\") pod \"a45728b4-650f-4ed5-bb39-118e04708f2b\" (UID: \"a45728b4-650f-4ed5-bb39-118e04708f2b\") " Oct 10 18:05:13 crc kubenswrapper[4799]: I1010 18:05:13.360395 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/a45728b4-650f-4ed5-bb39-118e04708f2b-ovsdbserver-nb\") pod \"a45728b4-650f-4ed5-bb39-118e04708f2b\" (UID: \"a45728b4-650f-4ed5-bb39-118e04708f2b\") " Oct 10 18:05:13 crc kubenswrapper[4799]: I1010 18:05:13.360480 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/a45728b4-650f-4ed5-bb39-118e04708f2b-ovsdbserver-sb\") pod \"a45728b4-650f-4ed5-bb39-118e04708f2b\" (UID: \"a45728b4-650f-4ed5-bb39-118e04708f2b\") " Oct 10 18:05:13 crc kubenswrapper[4799]: I1010 18:05:13.360508 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-92x6q\" (UniqueName: \"kubernetes.io/projected/a45728b4-650f-4ed5-bb39-118e04708f2b-kube-api-access-92x6q\") pod \"a45728b4-650f-4ed5-bb39-118e04708f2b\" (UID: \"a45728b4-650f-4ed5-bb39-118e04708f2b\") " Oct 10 18:05:13 crc kubenswrapper[4799]: I1010 18:05:13.370251 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a45728b4-650f-4ed5-bb39-118e04708f2b-kube-api-access-92x6q" (OuterVolumeSpecName: "kube-api-access-92x6q") pod "a45728b4-650f-4ed5-bb39-118e04708f2b" (UID: "a45728b4-650f-4ed5-bb39-118e04708f2b"). InnerVolumeSpecName "kube-api-access-92x6q". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 18:05:13 crc kubenswrapper[4799]: I1010 18:05:13.423818 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="53f2c30d-6bb2-4de5-90d1-febbb40322b2" path="/var/lib/kubelet/pods/53f2c30d-6bb2-4de5-90d1-febbb40322b2/volumes" Oct 10 18:05:13 crc kubenswrapper[4799]: I1010 18:05:13.424632 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6c155ce1-6bbc-48fd-bfad-6b19f5912e4a" path="/var/lib/kubelet/pods/6c155ce1-6bbc-48fd-bfad-6b19f5912e4a/volumes" Oct 10 18:05:13 crc kubenswrapper[4799]: I1010 18:05:13.431362 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a45728b4-650f-4ed5-bb39-118e04708f2b-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "a45728b4-650f-4ed5-bb39-118e04708f2b" (UID: "a45728b4-650f-4ed5-bb39-118e04708f2b"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 18:05:13 crc kubenswrapper[4799]: I1010 18:05:13.439458 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a45728b4-650f-4ed5-bb39-118e04708f2b-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "a45728b4-650f-4ed5-bb39-118e04708f2b" (UID: "a45728b4-650f-4ed5-bb39-118e04708f2b"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 18:05:13 crc kubenswrapper[4799]: I1010 18:05:13.441019 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a45728b4-650f-4ed5-bb39-118e04708f2b-config" (OuterVolumeSpecName: "config") pod "a45728b4-650f-4ed5-bb39-118e04708f2b" (UID: "a45728b4-650f-4ed5-bb39-118e04708f2b"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 18:05:13 crc kubenswrapper[4799]: I1010 18:05:13.442019 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a45728b4-650f-4ed5-bb39-118e04708f2b-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "a45728b4-650f-4ed5-bb39-118e04708f2b" (UID: "a45728b4-650f-4ed5-bb39-118e04708f2b"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 18:05:13 crc kubenswrapper[4799]: I1010 18:05:13.463352 4799 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a45728b4-650f-4ed5-bb39-118e04708f2b-config\") on node \"crc\" DevicePath \"\"" Oct 10 18:05:13 crc kubenswrapper[4799]: I1010 18:05:13.463382 4799 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a45728b4-650f-4ed5-bb39-118e04708f2b-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 10 18:05:13 crc kubenswrapper[4799]: I1010 18:05:13.463392 4799 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/a45728b4-650f-4ed5-bb39-118e04708f2b-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 10 18:05:13 crc kubenswrapper[4799]: I1010 18:05:13.463402 4799 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/a45728b4-650f-4ed5-bb39-118e04708f2b-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 10 18:05:13 crc kubenswrapper[4799]: I1010 18:05:13.463411 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-92x6q\" (UniqueName: \"kubernetes.io/projected/a45728b4-650f-4ed5-bb39-118e04708f2b-kube-api-access-92x6q\") on node \"crc\" DevicePath \"\"" Oct 10 18:05:13 crc kubenswrapper[4799]: I1010 18:05:13.593827 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Oct 10 18:05:13 crc kubenswrapper[4799]: W1010 18:05:13.594900 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8bda0000_9ef7_4a01_a8a1_c0a902923b01.slice/crio-7699784d9775b231eda546a0f0db29a7a20e1d6e15f82f3389476edbb566ab63 WatchSource:0}: Error finding container 7699784d9775b231eda546a0f0db29a7a20e1d6e15f82f3389476edbb566ab63: Status 404 returned error can't find the container with id 7699784d9775b231eda546a0f0db29a7a20e1d6e15f82f3389476edbb566ab63 Oct 10 18:05:13 crc kubenswrapper[4799]: I1010 18:05:13.685885 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Oct 10 18:05:13 crc kubenswrapper[4799]: W1010 18:05:13.692150 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod85831ffe_f336_4725_a8dd_a3d8313a9690.slice/crio-c82729c47bb78356685daffe675d5d04d6e67ea3edab691b37191724a4903bbe WatchSource:0}: Error finding container c82729c47bb78356685daffe675d5d04d6e67ea3edab691b37191724a4903bbe: Status 404 returned error can't find the container with id c82729c47bb78356685daffe675d5d04d6e67ea3edab691b37191724a4903bbe Oct 10 18:05:14 crc kubenswrapper[4799]: I1010 18:05:14.329254 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-fdf47c55f-nkhwh" event={"ID":"a45728b4-650f-4ed5-bb39-118e04708f2b","Type":"ContainerDied","Data":"cd125e760601632b83474e9299c09ebbf97fbb09498ea2cef6fbb458e0ebfc04"} Oct 10 18:05:14 crc kubenswrapper[4799]: I1010 18:05:14.329350 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-fdf47c55f-nkhwh" Oct 10 18:05:14 crc kubenswrapper[4799]: I1010 18:05:14.329369 4799 scope.go:117] "RemoveContainer" containerID="1c45ebbecf3f756ac75772ae3d242481cb7f4650a4ab85c9b14f261da87c52c1" Oct 10 18:05:14 crc kubenswrapper[4799]: I1010 18:05:14.331512 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"8bda0000-9ef7-4a01-a8a1-c0a902923b01","Type":"ContainerStarted","Data":"4d699fa41135ce219d0a6806566fa53e2d73b60ed2599772b459f1e37e546641"} Oct 10 18:05:14 crc kubenswrapper[4799]: I1010 18:05:14.331556 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"8bda0000-9ef7-4a01-a8a1-c0a902923b01","Type":"ContainerStarted","Data":"7baeac385aa7ac9f5ddad49f359dcae1d19af30134dff019758bf641d9c27893"} Oct 10 18:05:14 crc kubenswrapper[4799]: I1010 18:05:14.331569 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"8bda0000-9ef7-4a01-a8a1-c0a902923b01","Type":"ContainerStarted","Data":"7699784d9775b231eda546a0f0db29a7a20e1d6e15f82f3389476edbb566ab63"} Oct 10 18:05:14 crc kubenswrapper[4799]: I1010 18:05:14.338396 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"85831ffe-f336-4725-a8dd-a3d8313a9690","Type":"ContainerStarted","Data":"bbf9904fa5e8f113d244126039da8c23ed8976b1737411f2e07c41505d94165c"} Oct 10 18:05:14 crc kubenswrapper[4799]: I1010 18:05:14.338431 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"85831ffe-f336-4725-a8dd-a3d8313a9690","Type":"ContainerStarted","Data":"aca97d527e858e11dd47a1c4db54731813c37e6a73837139b1b825b8103d93c9"} Oct 10 18:05:14 crc kubenswrapper[4799]: I1010 18:05:14.338442 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"85831ffe-f336-4725-a8dd-a3d8313a9690","Type":"ContainerStarted","Data":"c82729c47bb78356685daffe675d5d04d6e67ea3edab691b37191724a4903bbe"} Oct 10 18:05:14 crc kubenswrapper[4799]: I1010 18:05:14.349382 4799 scope.go:117] "RemoveContainer" containerID="6c38fb25af6f322d183cc6e140efbf88e0923f4c5ac469ccb33ebe6d1a3408ed" Oct 10 18:05:14 crc kubenswrapper[4799]: I1010 18:05:14.358548 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.358527899 podStartE2EDuration="2.358527899s" podCreationTimestamp="2025-10-10 18:05:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-10 18:05:14.35485923 +0000 UTC m=+5607.863183375" watchObservedRunningTime="2025-10-10 18:05:14.358527899 +0000 UTC m=+5607.866852014" Oct 10 18:05:14 crc kubenswrapper[4799]: I1010 18:05:14.383158 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.383135959 podStartE2EDuration="2.383135959s" podCreationTimestamp="2025-10-10 18:05:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-10 18:05:14.381280974 +0000 UTC m=+5607.889605099" watchObservedRunningTime="2025-10-10 18:05:14.383135959 +0000 UTC m=+5607.891460084" Oct 10 18:05:14 crc kubenswrapper[4799]: I1010 18:05:14.416256 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-fdf47c55f-nkhwh"] Oct 10 18:05:14 crc kubenswrapper[4799]: I1010 18:05:14.422434 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-fdf47c55f-nkhwh"] Oct 10 18:05:15 crc kubenswrapper[4799]: I1010 18:05:15.201846 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 10 18:05:15 crc kubenswrapper[4799]: I1010 18:05:15.248792 4799 patch_prober.go:28] interesting pod/machine-config-daemon-rh8zc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 10 18:05:15 crc kubenswrapper[4799]: I1010 18:05:15.248904 4799 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 10 18:05:15 crc kubenswrapper[4799]: I1010 18:05:15.302870 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/17b5efe0-880c-4841-b485-c58c950a3113-config-data\") pod \"17b5efe0-880c-4841-b485-c58c950a3113\" (UID: \"17b5efe0-880c-4841-b485-c58c950a3113\") " Oct 10 18:05:15 crc kubenswrapper[4799]: I1010 18:05:15.302964 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/17b5efe0-880c-4841-b485-c58c950a3113-combined-ca-bundle\") pod \"17b5efe0-880c-4841-b485-c58c950a3113\" (UID: \"17b5efe0-880c-4841-b485-c58c950a3113\") " Oct 10 18:05:15 crc kubenswrapper[4799]: I1010 18:05:15.303110 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8ftwk\" (UniqueName: \"kubernetes.io/projected/17b5efe0-880c-4841-b485-c58c950a3113-kube-api-access-8ftwk\") pod \"17b5efe0-880c-4841-b485-c58c950a3113\" (UID: \"17b5efe0-880c-4841-b485-c58c950a3113\") " Oct 10 18:05:15 crc kubenswrapper[4799]: I1010 18:05:15.309796 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/17b5efe0-880c-4841-b485-c58c950a3113-kube-api-access-8ftwk" (OuterVolumeSpecName: "kube-api-access-8ftwk") pod "17b5efe0-880c-4841-b485-c58c950a3113" (UID: "17b5efe0-880c-4841-b485-c58c950a3113"). InnerVolumeSpecName "kube-api-access-8ftwk". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 18:05:15 crc kubenswrapper[4799]: I1010 18:05:15.331506 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/17b5efe0-880c-4841-b485-c58c950a3113-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "17b5efe0-880c-4841-b485-c58c950a3113" (UID: "17b5efe0-880c-4841-b485-c58c950a3113"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 18:05:15 crc kubenswrapper[4799]: I1010 18:05:15.351635 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/17b5efe0-880c-4841-b485-c58c950a3113-config-data" (OuterVolumeSpecName: "config-data") pod "17b5efe0-880c-4841-b485-c58c950a3113" (UID: "17b5efe0-880c-4841-b485-c58c950a3113"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 18:05:15 crc kubenswrapper[4799]: I1010 18:05:15.357235 4799 generic.go:334] "Generic (PLEG): container finished" podID="17b5efe0-880c-4841-b485-c58c950a3113" containerID="b4fd51941e4e4416ae068b74365f15a6de1786075e0e1996eab0a797656da628" exitCode=0 Oct 10 18:05:15 crc kubenswrapper[4799]: I1010 18:05:15.357448 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"17b5efe0-880c-4841-b485-c58c950a3113","Type":"ContainerDied","Data":"b4fd51941e4e4416ae068b74365f15a6de1786075e0e1996eab0a797656da628"} Oct 10 18:05:15 crc kubenswrapper[4799]: I1010 18:05:15.357507 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"17b5efe0-880c-4841-b485-c58c950a3113","Type":"ContainerDied","Data":"4336649fbb80666a4e1490f7c559c39e7c725b43287bfcd1ae0f8e8c52f3f4ec"} Oct 10 18:05:15 crc kubenswrapper[4799]: I1010 18:05:15.357541 4799 scope.go:117] "RemoveContainer" containerID="b4fd51941e4e4416ae068b74365f15a6de1786075e0e1996eab0a797656da628" Oct 10 18:05:15 crc kubenswrapper[4799]: I1010 18:05:15.357933 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 10 18:05:15 crc kubenswrapper[4799]: I1010 18:05:15.381159 4799 scope.go:117] "RemoveContainer" containerID="b4fd51941e4e4416ae068b74365f15a6de1786075e0e1996eab0a797656da628" Oct 10 18:05:15 crc kubenswrapper[4799]: E1010 18:05:15.381774 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b4fd51941e4e4416ae068b74365f15a6de1786075e0e1996eab0a797656da628\": container with ID starting with b4fd51941e4e4416ae068b74365f15a6de1786075e0e1996eab0a797656da628 not found: ID does not exist" containerID="b4fd51941e4e4416ae068b74365f15a6de1786075e0e1996eab0a797656da628" Oct 10 18:05:15 crc kubenswrapper[4799]: I1010 18:05:15.381819 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b4fd51941e4e4416ae068b74365f15a6de1786075e0e1996eab0a797656da628"} err="failed to get container status \"b4fd51941e4e4416ae068b74365f15a6de1786075e0e1996eab0a797656da628\": rpc error: code = NotFound desc = could not find container \"b4fd51941e4e4416ae068b74365f15a6de1786075e0e1996eab0a797656da628\": container with ID starting with b4fd51941e4e4416ae068b74365f15a6de1786075e0e1996eab0a797656da628 not found: ID does not exist" Oct 10 18:05:15 crc kubenswrapper[4799]: I1010 18:05:15.407168 4799 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/17b5efe0-880c-4841-b485-c58c950a3113-config-data\") on node \"crc\" DevicePath \"\"" Oct 10 18:05:15 crc kubenswrapper[4799]: I1010 18:05:15.407200 4799 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/17b5efe0-880c-4841-b485-c58c950a3113-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 10 18:05:15 crc kubenswrapper[4799]: I1010 18:05:15.407210 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8ftwk\" (UniqueName: \"kubernetes.io/projected/17b5efe0-880c-4841-b485-c58c950a3113-kube-api-access-8ftwk\") on node \"crc\" DevicePath \"\"" Oct 10 18:05:15 crc kubenswrapper[4799]: I1010 18:05:15.440772 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a45728b4-650f-4ed5-bb39-118e04708f2b" path="/var/lib/kubelet/pods/a45728b4-650f-4ed5-bb39-118e04708f2b/volumes" Oct 10 18:05:15 crc kubenswrapper[4799]: I1010 18:05:15.456032 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Oct 10 18:05:15 crc kubenswrapper[4799]: I1010 18:05:15.456082 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Oct 10 18:05:15 crc kubenswrapper[4799]: I1010 18:05:15.456104 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Oct 10 18:05:15 crc kubenswrapper[4799]: E1010 18:05:15.456509 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a45728b4-650f-4ed5-bb39-118e04708f2b" containerName="init" Oct 10 18:05:15 crc kubenswrapper[4799]: I1010 18:05:15.456531 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="a45728b4-650f-4ed5-bb39-118e04708f2b" containerName="init" Oct 10 18:05:15 crc kubenswrapper[4799]: E1010 18:05:15.456553 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a45728b4-650f-4ed5-bb39-118e04708f2b" containerName="dnsmasq-dns" Oct 10 18:05:15 crc kubenswrapper[4799]: I1010 18:05:15.456562 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="a45728b4-650f-4ed5-bb39-118e04708f2b" containerName="dnsmasq-dns" Oct 10 18:05:15 crc kubenswrapper[4799]: E1010 18:05:15.456585 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="17b5efe0-880c-4841-b485-c58c950a3113" containerName="nova-scheduler-scheduler" Oct 10 18:05:15 crc kubenswrapper[4799]: I1010 18:05:15.456593 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="17b5efe0-880c-4841-b485-c58c950a3113" containerName="nova-scheduler-scheduler" Oct 10 18:05:15 crc kubenswrapper[4799]: I1010 18:05:15.458081 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="17b5efe0-880c-4841-b485-c58c950a3113" containerName="nova-scheduler-scheduler" Oct 10 18:05:15 crc kubenswrapper[4799]: I1010 18:05:15.458113 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="a45728b4-650f-4ed5-bb39-118e04708f2b" containerName="dnsmasq-dns" Oct 10 18:05:15 crc kubenswrapper[4799]: I1010 18:05:15.458676 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Oct 10 18:05:15 crc kubenswrapper[4799]: I1010 18:05:15.458780 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 10 18:05:15 crc kubenswrapper[4799]: I1010 18:05:15.460860 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Oct 10 18:05:15 crc kubenswrapper[4799]: I1010 18:05:15.508144 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cb6f58f7-24b1-4236-bb7e-20cdc16cc4c8-config-data\") pod \"nova-scheduler-0\" (UID: \"cb6f58f7-24b1-4236-bb7e-20cdc16cc4c8\") " pod="openstack/nova-scheduler-0" Oct 10 18:05:15 crc kubenswrapper[4799]: I1010 18:05:15.508388 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cb6f58f7-24b1-4236-bb7e-20cdc16cc4c8-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"cb6f58f7-24b1-4236-bb7e-20cdc16cc4c8\") " pod="openstack/nova-scheduler-0" Oct 10 18:05:15 crc kubenswrapper[4799]: I1010 18:05:15.509002 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lfn9q\" (UniqueName: \"kubernetes.io/projected/cb6f58f7-24b1-4236-bb7e-20cdc16cc4c8-kube-api-access-lfn9q\") pod \"nova-scheduler-0\" (UID: \"cb6f58f7-24b1-4236-bb7e-20cdc16cc4c8\") " pod="openstack/nova-scheduler-0" Oct 10 18:05:15 crc kubenswrapper[4799]: I1010 18:05:15.610435 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lfn9q\" (UniqueName: \"kubernetes.io/projected/cb6f58f7-24b1-4236-bb7e-20cdc16cc4c8-kube-api-access-lfn9q\") pod \"nova-scheduler-0\" (UID: \"cb6f58f7-24b1-4236-bb7e-20cdc16cc4c8\") " pod="openstack/nova-scheduler-0" Oct 10 18:05:15 crc kubenswrapper[4799]: I1010 18:05:15.610488 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cb6f58f7-24b1-4236-bb7e-20cdc16cc4c8-config-data\") pod \"nova-scheduler-0\" (UID: \"cb6f58f7-24b1-4236-bb7e-20cdc16cc4c8\") " pod="openstack/nova-scheduler-0" Oct 10 18:05:15 crc kubenswrapper[4799]: I1010 18:05:15.610526 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cb6f58f7-24b1-4236-bb7e-20cdc16cc4c8-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"cb6f58f7-24b1-4236-bb7e-20cdc16cc4c8\") " pod="openstack/nova-scheduler-0" Oct 10 18:05:15 crc kubenswrapper[4799]: I1010 18:05:15.617637 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cb6f58f7-24b1-4236-bb7e-20cdc16cc4c8-config-data\") pod \"nova-scheduler-0\" (UID: \"cb6f58f7-24b1-4236-bb7e-20cdc16cc4c8\") " pod="openstack/nova-scheduler-0" Oct 10 18:05:15 crc kubenswrapper[4799]: I1010 18:05:15.617924 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cb6f58f7-24b1-4236-bb7e-20cdc16cc4c8-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"cb6f58f7-24b1-4236-bb7e-20cdc16cc4c8\") " pod="openstack/nova-scheduler-0" Oct 10 18:05:15 crc kubenswrapper[4799]: I1010 18:05:15.627643 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lfn9q\" (UniqueName: \"kubernetes.io/projected/cb6f58f7-24b1-4236-bb7e-20cdc16cc4c8-kube-api-access-lfn9q\") pod \"nova-scheduler-0\" (UID: \"cb6f58f7-24b1-4236-bb7e-20cdc16cc4c8\") " pod="openstack/nova-scheduler-0" Oct 10 18:05:15 crc kubenswrapper[4799]: I1010 18:05:15.784100 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 10 18:05:16 crc kubenswrapper[4799]: I1010 18:05:16.351429 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Oct 10 18:05:16 crc kubenswrapper[4799]: I1010 18:05:16.370939 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"cb6f58f7-24b1-4236-bb7e-20cdc16cc4c8","Type":"ContainerStarted","Data":"5f0af88f3e9f073e7e0cf6cf20b17b75511bad364a4389176934362190a4324e"} Oct 10 18:05:17 crc kubenswrapper[4799]: I1010 18:05:17.390484 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"cb6f58f7-24b1-4236-bb7e-20cdc16cc4c8","Type":"ContainerStarted","Data":"d4b32c139fbc8b1fc450eb5d40640234591ae2d6d091df19dbe50bad0a9a1638"} Oct 10 18:05:17 crc kubenswrapper[4799]: I1010 18:05:17.419678 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.4196458 podStartE2EDuration="2.4196458s" podCreationTimestamp="2025-10-10 18:05:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-10 18:05:17.414026583 +0000 UTC m=+5610.922350708" watchObservedRunningTime="2025-10-10 18:05:17.4196458 +0000 UTC m=+5610.927969955" Oct 10 18:05:17 crc kubenswrapper[4799]: I1010 18:05:17.429266 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="17b5efe0-880c-4841-b485-c58c950a3113" path="/var/lib/kubelet/pods/17b5efe0-880c-4841-b485-c58c950a3113/volumes" Oct 10 18:05:18 crc kubenswrapper[4799]: I1010 18:05:18.056741 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Oct 10 18:05:18 crc kubenswrapper[4799]: I1010 18:05:18.057417 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Oct 10 18:05:19 crc kubenswrapper[4799]: I1010 18:05:19.739269 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-conductor-0" Oct 10 18:05:20 crc kubenswrapper[4799]: I1010 18:05:20.344331 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-cell-mapping-hphzd"] Oct 10 18:05:20 crc kubenswrapper[4799]: I1010 18:05:20.346535 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-hphzd" Oct 10 18:05:20 crc kubenswrapper[4799]: I1010 18:05:20.354388 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-manage-config-data" Oct 10 18:05:20 crc kubenswrapper[4799]: I1010 18:05:20.354668 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-manage-scripts" Oct 10 18:05:20 crc kubenswrapper[4799]: I1010 18:05:20.364497 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-cell-mapping-hphzd"] Oct 10 18:05:20 crc kubenswrapper[4799]: I1010 18:05:20.436888 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/af7d9738-f698-4bfa-9294-daf47a91c9ab-scripts\") pod \"nova-cell1-cell-mapping-hphzd\" (UID: \"af7d9738-f698-4bfa-9294-daf47a91c9ab\") " pod="openstack/nova-cell1-cell-mapping-hphzd" Oct 10 18:05:20 crc kubenswrapper[4799]: I1010 18:05:20.437272 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/af7d9738-f698-4bfa-9294-daf47a91c9ab-config-data\") pod \"nova-cell1-cell-mapping-hphzd\" (UID: \"af7d9738-f698-4bfa-9294-daf47a91c9ab\") " pod="openstack/nova-cell1-cell-mapping-hphzd" Oct 10 18:05:20 crc kubenswrapper[4799]: I1010 18:05:20.438069 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-drk9c\" (UniqueName: \"kubernetes.io/projected/af7d9738-f698-4bfa-9294-daf47a91c9ab-kube-api-access-drk9c\") pod \"nova-cell1-cell-mapping-hphzd\" (UID: \"af7d9738-f698-4bfa-9294-daf47a91c9ab\") " pod="openstack/nova-cell1-cell-mapping-hphzd" Oct 10 18:05:20 crc kubenswrapper[4799]: I1010 18:05:20.438664 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/af7d9738-f698-4bfa-9294-daf47a91c9ab-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-hphzd\" (UID: \"af7d9738-f698-4bfa-9294-daf47a91c9ab\") " pod="openstack/nova-cell1-cell-mapping-hphzd" Oct 10 18:05:20 crc kubenswrapper[4799]: I1010 18:05:20.539655 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-drk9c\" (UniqueName: \"kubernetes.io/projected/af7d9738-f698-4bfa-9294-daf47a91c9ab-kube-api-access-drk9c\") pod \"nova-cell1-cell-mapping-hphzd\" (UID: \"af7d9738-f698-4bfa-9294-daf47a91c9ab\") " pod="openstack/nova-cell1-cell-mapping-hphzd" Oct 10 18:05:20 crc kubenswrapper[4799]: I1010 18:05:20.539728 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/af7d9738-f698-4bfa-9294-daf47a91c9ab-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-hphzd\" (UID: \"af7d9738-f698-4bfa-9294-daf47a91c9ab\") " pod="openstack/nova-cell1-cell-mapping-hphzd" Oct 10 18:05:20 crc kubenswrapper[4799]: I1010 18:05:20.539823 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/af7d9738-f698-4bfa-9294-daf47a91c9ab-scripts\") pod \"nova-cell1-cell-mapping-hphzd\" (UID: \"af7d9738-f698-4bfa-9294-daf47a91c9ab\") " pod="openstack/nova-cell1-cell-mapping-hphzd" Oct 10 18:05:20 crc kubenswrapper[4799]: I1010 18:05:20.539895 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/af7d9738-f698-4bfa-9294-daf47a91c9ab-config-data\") pod \"nova-cell1-cell-mapping-hphzd\" (UID: \"af7d9738-f698-4bfa-9294-daf47a91c9ab\") " pod="openstack/nova-cell1-cell-mapping-hphzd" Oct 10 18:05:20 crc kubenswrapper[4799]: I1010 18:05:20.547580 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/af7d9738-f698-4bfa-9294-daf47a91c9ab-scripts\") pod \"nova-cell1-cell-mapping-hphzd\" (UID: \"af7d9738-f698-4bfa-9294-daf47a91c9ab\") " pod="openstack/nova-cell1-cell-mapping-hphzd" Oct 10 18:05:20 crc kubenswrapper[4799]: I1010 18:05:20.551226 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/af7d9738-f698-4bfa-9294-daf47a91c9ab-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-hphzd\" (UID: \"af7d9738-f698-4bfa-9294-daf47a91c9ab\") " pod="openstack/nova-cell1-cell-mapping-hphzd" Oct 10 18:05:20 crc kubenswrapper[4799]: I1010 18:05:20.557365 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/af7d9738-f698-4bfa-9294-daf47a91c9ab-config-data\") pod \"nova-cell1-cell-mapping-hphzd\" (UID: \"af7d9738-f698-4bfa-9294-daf47a91c9ab\") " pod="openstack/nova-cell1-cell-mapping-hphzd" Oct 10 18:05:20 crc kubenswrapper[4799]: I1010 18:05:20.567437 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-drk9c\" (UniqueName: \"kubernetes.io/projected/af7d9738-f698-4bfa-9294-daf47a91c9ab-kube-api-access-drk9c\") pod \"nova-cell1-cell-mapping-hphzd\" (UID: \"af7d9738-f698-4bfa-9294-daf47a91c9ab\") " pod="openstack/nova-cell1-cell-mapping-hphzd" Oct 10 18:05:20 crc kubenswrapper[4799]: I1010 18:05:20.675349 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-hphzd" Oct 10 18:05:20 crc kubenswrapper[4799]: I1010 18:05:20.785488 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Oct 10 18:05:21 crc kubenswrapper[4799]: I1010 18:05:21.160039 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-cell-mapping-hphzd"] Oct 10 18:05:21 crc kubenswrapper[4799]: I1010 18:05:21.432668 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-hphzd" event={"ID":"af7d9738-f698-4bfa-9294-daf47a91c9ab","Type":"ContainerStarted","Data":"5673edc64fbbc65660e1306ab5b2c3bfbcf9730efd6d9ce4894975150994e908"} Oct 10 18:05:21 crc kubenswrapper[4799]: I1010 18:05:21.433050 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-hphzd" event={"ID":"af7d9738-f698-4bfa-9294-daf47a91c9ab","Type":"ContainerStarted","Data":"56a81dbcce64f11f58f9e283b83f8596823ce22a917ceb8120c93cfe21947c32"} Oct 10 18:05:21 crc kubenswrapper[4799]: I1010 18:05:21.457474 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-cell-mapping-hphzd" podStartSLOduration=1.457447026 podStartE2EDuration="1.457447026s" podCreationTimestamp="2025-10-10 18:05:20 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-10 18:05:21.452326551 +0000 UTC m=+5614.960650676" watchObservedRunningTime="2025-10-10 18:05:21.457447026 +0000 UTC m=+5614.965771181" Oct 10 18:05:23 crc kubenswrapper[4799]: I1010 18:05:23.046063 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Oct 10 18:05:23 crc kubenswrapper[4799]: I1010 18:05:23.046410 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Oct 10 18:05:23 crc kubenswrapper[4799]: I1010 18:05:23.057123 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Oct 10 18:05:23 crc kubenswrapper[4799]: I1010 18:05:23.057178 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Oct 10 18:05:24 crc kubenswrapper[4799]: I1010 18:05:24.213181 4799 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="85831ffe-f336-4725-a8dd-a3d8313a9690" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.1.79:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Oct 10 18:05:24 crc kubenswrapper[4799]: I1010 18:05:24.213734 4799 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="85831ffe-f336-4725-a8dd-a3d8313a9690" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.1.79:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Oct 10 18:05:24 crc kubenswrapper[4799]: I1010 18:05:24.213800 4799 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="8bda0000-9ef7-4a01-a8a1-c0a902923b01" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"http://10.217.1.80:8775/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Oct 10 18:05:24 crc kubenswrapper[4799]: I1010 18:05:24.214683 4799 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="8bda0000-9ef7-4a01-a8a1-c0a902923b01" containerName="nova-metadata-log" probeResult="failure" output="Get \"http://10.217.1.80:8775/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Oct 10 18:05:25 crc kubenswrapper[4799]: I1010 18:05:25.784846 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Oct 10 18:05:25 crc kubenswrapper[4799]: I1010 18:05:25.836351 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Oct 10 18:05:26 crc kubenswrapper[4799]: I1010 18:05:26.488632 4799 generic.go:334] "Generic (PLEG): container finished" podID="af7d9738-f698-4bfa-9294-daf47a91c9ab" containerID="5673edc64fbbc65660e1306ab5b2c3bfbcf9730efd6d9ce4894975150994e908" exitCode=0 Oct 10 18:05:26 crc kubenswrapper[4799]: I1010 18:05:26.488750 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-hphzd" event={"ID":"af7d9738-f698-4bfa-9294-daf47a91c9ab","Type":"ContainerDied","Data":"5673edc64fbbc65660e1306ab5b2c3bfbcf9730efd6d9ce4894975150994e908"} Oct 10 18:05:26 crc kubenswrapper[4799]: I1010 18:05:26.520154 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Oct 10 18:05:27 crc kubenswrapper[4799]: I1010 18:05:27.934483 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-hphzd" Oct 10 18:05:28 crc kubenswrapper[4799]: I1010 18:05:28.092797 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/af7d9738-f698-4bfa-9294-daf47a91c9ab-config-data\") pod \"af7d9738-f698-4bfa-9294-daf47a91c9ab\" (UID: \"af7d9738-f698-4bfa-9294-daf47a91c9ab\") " Oct 10 18:05:28 crc kubenswrapper[4799]: I1010 18:05:28.092928 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/af7d9738-f698-4bfa-9294-daf47a91c9ab-combined-ca-bundle\") pod \"af7d9738-f698-4bfa-9294-daf47a91c9ab\" (UID: \"af7d9738-f698-4bfa-9294-daf47a91c9ab\") " Oct 10 18:05:28 crc kubenswrapper[4799]: I1010 18:05:28.093081 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/af7d9738-f698-4bfa-9294-daf47a91c9ab-scripts\") pod \"af7d9738-f698-4bfa-9294-daf47a91c9ab\" (UID: \"af7d9738-f698-4bfa-9294-daf47a91c9ab\") " Oct 10 18:05:28 crc kubenswrapper[4799]: I1010 18:05:28.093189 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-drk9c\" (UniqueName: \"kubernetes.io/projected/af7d9738-f698-4bfa-9294-daf47a91c9ab-kube-api-access-drk9c\") pod \"af7d9738-f698-4bfa-9294-daf47a91c9ab\" (UID: \"af7d9738-f698-4bfa-9294-daf47a91c9ab\") " Oct 10 18:05:28 crc kubenswrapper[4799]: I1010 18:05:28.102226 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/af7d9738-f698-4bfa-9294-daf47a91c9ab-scripts" (OuterVolumeSpecName: "scripts") pod "af7d9738-f698-4bfa-9294-daf47a91c9ab" (UID: "af7d9738-f698-4bfa-9294-daf47a91c9ab"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 18:05:28 crc kubenswrapper[4799]: I1010 18:05:28.103395 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/af7d9738-f698-4bfa-9294-daf47a91c9ab-kube-api-access-drk9c" (OuterVolumeSpecName: "kube-api-access-drk9c") pod "af7d9738-f698-4bfa-9294-daf47a91c9ab" (UID: "af7d9738-f698-4bfa-9294-daf47a91c9ab"). InnerVolumeSpecName "kube-api-access-drk9c". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 18:05:28 crc kubenswrapper[4799]: I1010 18:05:28.134152 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/af7d9738-f698-4bfa-9294-daf47a91c9ab-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "af7d9738-f698-4bfa-9294-daf47a91c9ab" (UID: "af7d9738-f698-4bfa-9294-daf47a91c9ab"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 18:05:28 crc kubenswrapper[4799]: I1010 18:05:28.138488 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/af7d9738-f698-4bfa-9294-daf47a91c9ab-config-data" (OuterVolumeSpecName: "config-data") pod "af7d9738-f698-4bfa-9294-daf47a91c9ab" (UID: "af7d9738-f698-4bfa-9294-daf47a91c9ab"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 18:05:28 crc kubenswrapper[4799]: I1010 18:05:28.195812 4799 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/af7d9738-f698-4bfa-9294-daf47a91c9ab-config-data\") on node \"crc\" DevicePath \"\"" Oct 10 18:05:28 crc kubenswrapper[4799]: I1010 18:05:28.195882 4799 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/af7d9738-f698-4bfa-9294-daf47a91c9ab-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 10 18:05:28 crc kubenswrapper[4799]: I1010 18:05:28.195911 4799 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/af7d9738-f698-4bfa-9294-daf47a91c9ab-scripts\") on node \"crc\" DevicePath \"\"" Oct 10 18:05:28 crc kubenswrapper[4799]: I1010 18:05:28.195936 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-drk9c\" (UniqueName: \"kubernetes.io/projected/af7d9738-f698-4bfa-9294-daf47a91c9ab-kube-api-access-drk9c\") on node \"crc\" DevicePath \"\"" Oct 10 18:05:28 crc kubenswrapper[4799]: I1010 18:05:28.540023 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-hphzd" event={"ID":"af7d9738-f698-4bfa-9294-daf47a91c9ab","Type":"ContainerDied","Data":"56a81dbcce64f11f58f9e283b83f8596823ce22a917ceb8120c93cfe21947c32"} Oct 10 18:05:28 crc kubenswrapper[4799]: I1010 18:05:28.540564 4799 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="56a81dbcce64f11f58f9e283b83f8596823ce22a917ceb8120c93cfe21947c32" Oct 10 18:05:28 crc kubenswrapper[4799]: I1010 18:05:28.540800 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-hphzd" Oct 10 18:05:28 crc kubenswrapper[4799]: I1010 18:05:28.718742 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Oct 10 18:05:28 crc kubenswrapper[4799]: I1010 18:05:28.719108 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="85831ffe-f336-4725-a8dd-a3d8313a9690" containerName="nova-api-log" containerID="cri-o://aca97d527e858e11dd47a1c4db54731813c37e6a73837139b1b825b8103d93c9" gracePeriod=30 Oct 10 18:05:28 crc kubenswrapper[4799]: I1010 18:05:28.719621 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="85831ffe-f336-4725-a8dd-a3d8313a9690" containerName="nova-api-api" containerID="cri-o://bbf9904fa5e8f113d244126039da8c23ed8976b1737411f2e07c41505d94165c" gracePeriod=30 Oct 10 18:05:28 crc kubenswrapper[4799]: I1010 18:05:28.740164 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Oct 10 18:05:28 crc kubenswrapper[4799]: I1010 18:05:28.740552 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="cb6f58f7-24b1-4236-bb7e-20cdc16cc4c8" containerName="nova-scheduler-scheduler" containerID="cri-o://d4b32c139fbc8b1fc450eb5d40640234591ae2d6d091df19dbe50bad0a9a1638" gracePeriod=30 Oct 10 18:05:28 crc kubenswrapper[4799]: I1010 18:05:28.747433 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Oct 10 18:05:28 crc kubenswrapper[4799]: I1010 18:05:28.747693 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="8bda0000-9ef7-4a01-a8a1-c0a902923b01" containerName="nova-metadata-log" containerID="cri-o://7baeac385aa7ac9f5ddad49f359dcae1d19af30134dff019758bf641d9c27893" gracePeriod=30 Oct 10 18:05:28 crc kubenswrapper[4799]: I1010 18:05:28.747840 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="8bda0000-9ef7-4a01-a8a1-c0a902923b01" containerName="nova-metadata-metadata" containerID="cri-o://4d699fa41135ce219d0a6806566fa53e2d73b60ed2599772b459f1e37e546641" gracePeriod=30 Oct 10 18:05:29 crc kubenswrapper[4799]: I1010 18:05:29.553841 4799 generic.go:334] "Generic (PLEG): container finished" podID="8bda0000-9ef7-4a01-a8a1-c0a902923b01" containerID="7baeac385aa7ac9f5ddad49f359dcae1d19af30134dff019758bf641d9c27893" exitCode=143 Oct 10 18:05:29 crc kubenswrapper[4799]: I1010 18:05:29.553903 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"8bda0000-9ef7-4a01-a8a1-c0a902923b01","Type":"ContainerDied","Data":"7baeac385aa7ac9f5ddad49f359dcae1d19af30134dff019758bf641d9c27893"} Oct 10 18:05:29 crc kubenswrapper[4799]: I1010 18:05:29.555140 4799 generic.go:334] "Generic (PLEG): container finished" podID="85831ffe-f336-4725-a8dd-a3d8313a9690" containerID="aca97d527e858e11dd47a1c4db54731813c37e6a73837139b1b825b8103d93c9" exitCode=143 Oct 10 18:05:29 crc kubenswrapper[4799]: I1010 18:05:29.555164 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"85831ffe-f336-4725-a8dd-a3d8313a9690","Type":"ContainerDied","Data":"aca97d527e858e11dd47a1c4db54731813c37e6a73837139b1b825b8103d93c9"} Oct 10 18:05:30 crc kubenswrapper[4799]: E1010 18:05:30.788663 4799 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="d4b32c139fbc8b1fc450eb5d40640234591ae2d6d091df19dbe50bad0a9a1638" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Oct 10 18:05:30 crc kubenswrapper[4799]: E1010 18:05:30.791968 4799 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="d4b32c139fbc8b1fc450eb5d40640234591ae2d6d091df19dbe50bad0a9a1638" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Oct 10 18:05:30 crc kubenswrapper[4799]: E1010 18:05:30.794393 4799 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="d4b32c139fbc8b1fc450eb5d40640234591ae2d6d091df19dbe50bad0a9a1638" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Oct 10 18:05:30 crc kubenswrapper[4799]: E1010 18:05:30.794453 4799 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/nova-scheduler-0" podUID="cb6f58f7-24b1-4236-bb7e-20cdc16cc4c8" containerName="nova-scheduler-scheduler" Oct 10 18:05:32 crc kubenswrapper[4799]: I1010 18:05:32.531520 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 10 18:05:32 crc kubenswrapper[4799]: I1010 18:05:32.548053 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 10 18:05:32 crc kubenswrapper[4799]: I1010 18:05:32.600571 4799 generic.go:334] "Generic (PLEG): container finished" podID="8bda0000-9ef7-4a01-a8a1-c0a902923b01" containerID="4d699fa41135ce219d0a6806566fa53e2d73b60ed2599772b459f1e37e546641" exitCode=0 Oct 10 18:05:32 crc kubenswrapper[4799]: I1010 18:05:32.600683 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 10 18:05:32 crc kubenswrapper[4799]: I1010 18:05:32.600959 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"8bda0000-9ef7-4a01-a8a1-c0a902923b01","Type":"ContainerDied","Data":"4d699fa41135ce219d0a6806566fa53e2d73b60ed2599772b459f1e37e546641"} Oct 10 18:05:32 crc kubenswrapper[4799]: I1010 18:05:32.601050 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"8bda0000-9ef7-4a01-a8a1-c0a902923b01","Type":"ContainerDied","Data":"7699784d9775b231eda546a0f0db29a7a20e1d6e15f82f3389476edbb566ab63"} Oct 10 18:05:32 crc kubenswrapper[4799]: I1010 18:05:32.601094 4799 scope.go:117] "RemoveContainer" containerID="4d699fa41135ce219d0a6806566fa53e2d73b60ed2599772b459f1e37e546641" Oct 10 18:05:32 crc kubenswrapper[4799]: I1010 18:05:32.608646 4799 generic.go:334] "Generic (PLEG): container finished" podID="85831ffe-f336-4725-a8dd-a3d8313a9690" containerID="bbf9904fa5e8f113d244126039da8c23ed8976b1737411f2e07c41505d94165c" exitCode=0 Oct 10 18:05:32 crc kubenswrapper[4799]: I1010 18:05:32.608691 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"85831ffe-f336-4725-a8dd-a3d8313a9690","Type":"ContainerDied","Data":"bbf9904fa5e8f113d244126039da8c23ed8976b1737411f2e07c41505d94165c"} Oct 10 18:05:32 crc kubenswrapper[4799]: I1010 18:05:32.608719 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"85831ffe-f336-4725-a8dd-a3d8313a9690","Type":"ContainerDied","Data":"c82729c47bb78356685daffe675d5d04d6e67ea3edab691b37191724a4903bbe"} Oct 10 18:05:32 crc kubenswrapper[4799]: I1010 18:05:32.608843 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 10 18:05:32 crc kubenswrapper[4799]: I1010 18:05:32.628018 4799 scope.go:117] "RemoveContainer" containerID="7baeac385aa7ac9f5ddad49f359dcae1d19af30134dff019758bf641d9c27893" Oct 10 18:05:32 crc kubenswrapper[4799]: I1010 18:05:32.657893 4799 scope.go:117] "RemoveContainer" containerID="4d699fa41135ce219d0a6806566fa53e2d73b60ed2599772b459f1e37e546641" Oct 10 18:05:32 crc kubenswrapper[4799]: E1010 18:05:32.658424 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4d699fa41135ce219d0a6806566fa53e2d73b60ed2599772b459f1e37e546641\": container with ID starting with 4d699fa41135ce219d0a6806566fa53e2d73b60ed2599772b459f1e37e546641 not found: ID does not exist" containerID="4d699fa41135ce219d0a6806566fa53e2d73b60ed2599772b459f1e37e546641" Oct 10 18:05:32 crc kubenswrapper[4799]: I1010 18:05:32.658475 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4d699fa41135ce219d0a6806566fa53e2d73b60ed2599772b459f1e37e546641"} err="failed to get container status \"4d699fa41135ce219d0a6806566fa53e2d73b60ed2599772b459f1e37e546641\": rpc error: code = NotFound desc = could not find container \"4d699fa41135ce219d0a6806566fa53e2d73b60ed2599772b459f1e37e546641\": container with ID starting with 4d699fa41135ce219d0a6806566fa53e2d73b60ed2599772b459f1e37e546641 not found: ID does not exist" Oct 10 18:05:32 crc kubenswrapper[4799]: I1010 18:05:32.658508 4799 scope.go:117] "RemoveContainer" containerID="7baeac385aa7ac9f5ddad49f359dcae1d19af30134dff019758bf641d9c27893" Oct 10 18:05:32 crc kubenswrapper[4799]: E1010 18:05:32.659011 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7baeac385aa7ac9f5ddad49f359dcae1d19af30134dff019758bf641d9c27893\": container with ID starting with 7baeac385aa7ac9f5ddad49f359dcae1d19af30134dff019758bf641d9c27893 not found: ID does not exist" containerID="7baeac385aa7ac9f5ddad49f359dcae1d19af30134dff019758bf641d9c27893" Oct 10 18:05:32 crc kubenswrapper[4799]: I1010 18:05:32.659065 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7baeac385aa7ac9f5ddad49f359dcae1d19af30134dff019758bf641d9c27893"} err="failed to get container status \"7baeac385aa7ac9f5ddad49f359dcae1d19af30134dff019758bf641d9c27893\": rpc error: code = NotFound desc = could not find container \"7baeac385aa7ac9f5ddad49f359dcae1d19af30134dff019758bf641d9c27893\": container with ID starting with 7baeac385aa7ac9f5ddad49f359dcae1d19af30134dff019758bf641d9c27893 not found: ID does not exist" Oct 10 18:05:32 crc kubenswrapper[4799]: I1010 18:05:32.659112 4799 scope.go:117] "RemoveContainer" containerID="bbf9904fa5e8f113d244126039da8c23ed8976b1737411f2e07c41505d94165c" Oct 10 18:05:32 crc kubenswrapper[4799]: I1010 18:05:32.684825 4799 scope.go:117] "RemoveContainer" containerID="aca97d527e858e11dd47a1c4db54731813c37e6a73837139b1b825b8103d93c9" Oct 10 18:05:32 crc kubenswrapper[4799]: I1010 18:05:32.686419 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/85831ffe-f336-4725-a8dd-a3d8313a9690-logs\") pod \"85831ffe-f336-4725-a8dd-a3d8313a9690\" (UID: \"85831ffe-f336-4725-a8dd-a3d8313a9690\") " Oct 10 18:05:32 crc kubenswrapper[4799]: I1010 18:05:32.686507 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pht5p\" (UniqueName: \"kubernetes.io/projected/85831ffe-f336-4725-a8dd-a3d8313a9690-kube-api-access-pht5p\") pod \"85831ffe-f336-4725-a8dd-a3d8313a9690\" (UID: \"85831ffe-f336-4725-a8dd-a3d8313a9690\") " Oct 10 18:05:32 crc kubenswrapper[4799]: I1010 18:05:32.686640 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/85831ffe-f336-4725-a8dd-a3d8313a9690-combined-ca-bundle\") pod \"85831ffe-f336-4725-a8dd-a3d8313a9690\" (UID: \"85831ffe-f336-4725-a8dd-a3d8313a9690\") " Oct 10 18:05:32 crc kubenswrapper[4799]: I1010 18:05:32.686875 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/85831ffe-f336-4725-a8dd-a3d8313a9690-logs" (OuterVolumeSpecName: "logs") pod "85831ffe-f336-4725-a8dd-a3d8313a9690" (UID: "85831ffe-f336-4725-a8dd-a3d8313a9690"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 10 18:05:32 crc kubenswrapper[4799]: I1010 18:05:32.686891 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cllwc\" (UniqueName: \"kubernetes.io/projected/8bda0000-9ef7-4a01-a8a1-c0a902923b01-kube-api-access-cllwc\") pod \"8bda0000-9ef7-4a01-a8a1-c0a902923b01\" (UID: \"8bda0000-9ef7-4a01-a8a1-c0a902923b01\") " Oct 10 18:05:32 crc kubenswrapper[4799]: I1010 18:05:32.687002 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8bda0000-9ef7-4a01-a8a1-c0a902923b01-config-data\") pod \"8bda0000-9ef7-4a01-a8a1-c0a902923b01\" (UID: \"8bda0000-9ef7-4a01-a8a1-c0a902923b01\") " Oct 10 18:05:32 crc kubenswrapper[4799]: I1010 18:05:32.687078 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8bda0000-9ef7-4a01-a8a1-c0a902923b01-logs\") pod \"8bda0000-9ef7-4a01-a8a1-c0a902923b01\" (UID: \"8bda0000-9ef7-4a01-a8a1-c0a902923b01\") " Oct 10 18:05:32 crc kubenswrapper[4799]: I1010 18:05:32.687244 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/85831ffe-f336-4725-a8dd-a3d8313a9690-config-data\") pod \"85831ffe-f336-4725-a8dd-a3d8313a9690\" (UID: \"85831ffe-f336-4725-a8dd-a3d8313a9690\") " Oct 10 18:05:32 crc kubenswrapper[4799]: I1010 18:05:32.687299 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8bda0000-9ef7-4a01-a8a1-c0a902923b01-combined-ca-bundle\") pod \"8bda0000-9ef7-4a01-a8a1-c0a902923b01\" (UID: \"8bda0000-9ef7-4a01-a8a1-c0a902923b01\") " Oct 10 18:05:32 crc kubenswrapper[4799]: I1010 18:05:32.688245 4799 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/85831ffe-f336-4725-a8dd-a3d8313a9690-logs\") on node \"crc\" DevicePath \"\"" Oct 10 18:05:32 crc kubenswrapper[4799]: I1010 18:05:32.688795 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8bda0000-9ef7-4a01-a8a1-c0a902923b01-logs" (OuterVolumeSpecName: "logs") pod "8bda0000-9ef7-4a01-a8a1-c0a902923b01" (UID: "8bda0000-9ef7-4a01-a8a1-c0a902923b01"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 10 18:05:32 crc kubenswrapper[4799]: I1010 18:05:32.696293 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8bda0000-9ef7-4a01-a8a1-c0a902923b01-kube-api-access-cllwc" (OuterVolumeSpecName: "kube-api-access-cllwc") pod "8bda0000-9ef7-4a01-a8a1-c0a902923b01" (UID: "8bda0000-9ef7-4a01-a8a1-c0a902923b01"). InnerVolumeSpecName "kube-api-access-cllwc". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 18:05:32 crc kubenswrapper[4799]: I1010 18:05:32.696450 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/85831ffe-f336-4725-a8dd-a3d8313a9690-kube-api-access-pht5p" (OuterVolumeSpecName: "kube-api-access-pht5p") pod "85831ffe-f336-4725-a8dd-a3d8313a9690" (UID: "85831ffe-f336-4725-a8dd-a3d8313a9690"). InnerVolumeSpecName "kube-api-access-pht5p". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 18:05:32 crc kubenswrapper[4799]: I1010 18:05:32.712538 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/85831ffe-f336-4725-a8dd-a3d8313a9690-config-data" (OuterVolumeSpecName: "config-data") pod "85831ffe-f336-4725-a8dd-a3d8313a9690" (UID: "85831ffe-f336-4725-a8dd-a3d8313a9690"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 18:05:32 crc kubenswrapper[4799]: I1010 18:05:32.714616 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8bda0000-9ef7-4a01-a8a1-c0a902923b01-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "8bda0000-9ef7-4a01-a8a1-c0a902923b01" (UID: "8bda0000-9ef7-4a01-a8a1-c0a902923b01"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 18:05:32 crc kubenswrapper[4799]: I1010 18:05:32.718085 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8bda0000-9ef7-4a01-a8a1-c0a902923b01-config-data" (OuterVolumeSpecName: "config-data") pod "8bda0000-9ef7-4a01-a8a1-c0a902923b01" (UID: "8bda0000-9ef7-4a01-a8a1-c0a902923b01"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 18:05:32 crc kubenswrapper[4799]: I1010 18:05:32.726884 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/85831ffe-f336-4725-a8dd-a3d8313a9690-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "85831ffe-f336-4725-a8dd-a3d8313a9690" (UID: "85831ffe-f336-4725-a8dd-a3d8313a9690"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 18:05:32 crc kubenswrapper[4799]: I1010 18:05:32.767236 4799 scope.go:117] "RemoveContainer" containerID="bbf9904fa5e8f113d244126039da8c23ed8976b1737411f2e07c41505d94165c" Oct 10 18:05:32 crc kubenswrapper[4799]: E1010 18:05:32.767688 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bbf9904fa5e8f113d244126039da8c23ed8976b1737411f2e07c41505d94165c\": container with ID starting with bbf9904fa5e8f113d244126039da8c23ed8976b1737411f2e07c41505d94165c not found: ID does not exist" containerID="bbf9904fa5e8f113d244126039da8c23ed8976b1737411f2e07c41505d94165c" Oct 10 18:05:32 crc kubenswrapper[4799]: I1010 18:05:32.767738 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bbf9904fa5e8f113d244126039da8c23ed8976b1737411f2e07c41505d94165c"} err="failed to get container status \"bbf9904fa5e8f113d244126039da8c23ed8976b1737411f2e07c41505d94165c\": rpc error: code = NotFound desc = could not find container \"bbf9904fa5e8f113d244126039da8c23ed8976b1737411f2e07c41505d94165c\": container with ID starting with bbf9904fa5e8f113d244126039da8c23ed8976b1737411f2e07c41505d94165c not found: ID does not exist" Oct 10 18:05:32 crc kubenswrapper[4799]: I1010 18:05:32.767826 4799 scope.go:117] "RemoveContainer" containerID="aca97d527e858e11dd47a1c4db54731813c37e6a73837139b1b825b8103d93c9" Oct 10 18:05:32 crc kubenswrapper[4799]: E1010 18:05:32.768219 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"aca97d527e858e11dd47a1c4db54731813c37e6a73837139b1b825b8103d93c9\": container with ID starting with aca97d527e858e11dd47a1c4db54731813c37e6a73837139b1b825b8103d93c9 not found: ID does not exist" containerID="aca97d527e858e11dd47a1c4db54731813c37e6a73837139b1b825b8103d93c9" Oct 10 18:05:32 crc kubenswrapper[4799]: I1010 18:05:32.768274 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"aca97d527e858e11dd47a1c4db54731813c37e6a73837139b1b825b8103d93c9"} err="failed to get container status \"aca97d527e858e11dd47a1c4db54731813c37e6a73837139b1b825b8103d93c9\": rpc error: code = NotFound desc = could not find container \"aca97d527e858e11dd47a1c4db54731813c37e6a73837139b1b825b8103d93c9\": container with ID starting with aca97d527e858e11dd47a1c4db54731813c37e6a73837139b1b825b8103d93c9 not found: ID does not exist" Oct 10 18:05:32 crc kubenswrapper[4799]: I1010 18:05:32.790743 4799 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/85831ffe-f336-4725-a8dd-a3d8313a9690-config-data\") on node \"crc\" DevicePath \"\"" Oct 10 18:05:32 crc kubenswrapper[4799]: I1010 18:05:32.790838 4799 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8bda0000-9ef7-4a01-a8a1-c0a902923b01-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 10 18:05:32 crc kubenswrapper[4799]: I1010 18:05:32.790869 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pht5p\" (UniqueName: \"kubernetes.io/projected/85831ffe-f336-4725-a8dd-a3d8313a9690-kube-api-access-pht5p\") on node \"crc\" DevicePath \"\"" Oct 10 18:05:32 crc kubenswrapper[4799]: I1010 18:05:32.790899 4799 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/85831ffe-f336-4725-a8dd-a3d8313a9690-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 10 18:05:32 crc kubenswrapper[4799]: I1010 18:05:32.790923 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cllwc\" (UniqueName: \"kubernetes.io/projected/8bda0000-9ef7-4a01-a8a1-c0a902923b01-kube-api-access-cllwc\") on node \"crc\" DevicePath \"\"" Oct 10 18:05:32 crc kubenswrapper[4799]: I1010 18:05:32.790946 4799 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8bda0000-9ef7-4a01-a8a1-c0a902923b01-config-data\") on node \"crc\" DevicePath \"\"" Oct 10 18:05:32 crc kubenswrapper[4799]: I1010 18:05:32.790969 4799 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8bda0000-9ef7-4a01-a8a1-c0a902923b01-logs\") on node \"crc\" DevicePath \"\"" Oct 10 18:05:32 crc kubenswrapper[4799]: I1010 18:05:32.992139 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Oct 10 18:05:33 crc kubenswrapper[4799]: I1010 18:05:33.031691 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Oct 10 18:05:33 crc kubenswrapper[4799]: I1010 18:05:33.061877 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Oct 10 18:05:33 crc kubenswrapper[4799]: I1010 18:05:33.074607 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Oct 10 18:05:33 crc kubenswrapper[4799]: I1010 18:05:33.080669 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Oct 10 18:05:33 crc kubenswrapper[4799]: E1010 18:05:33.081187 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="85831ffe-f336-4725-a8dd-a3d8313a9690" containerName="nova-api-log" Oct 10 18:05:33 crc kubenswrapper[4799]: I1010 18:05:33.081228 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="85831ffe-f336-4725-a8dd-a3d8313a9690" containerName="nova-api-log" Oct 10 18:05:33 crc kubenswrapper[4799]: E1010 18:05:33.081248 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="85831ffe-f336-4725-a8dd-a3d8313a9690" containerName="nova-api-api" Oct 10 18:05:33 crc kubenswrapper[4799]: I1010 18:05:33.081258 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="85831ffe-f336-4725-a8dd-a3d8313a9690" containerName="nova-api-api" Oct 10 18:05:33 crc kubenswrapper[4799]: E1010 18:05:33.081284 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="af7d9738-f698-4bfa-9294-daf47a91c9ab" containerName="nova-manage" Oct 10 18:05:33 crc kubenswrapper[4799]: I1010 18:05:33.081295 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="af7d9738-f698-4bfa-9294-daf47a91c9ab" containerName="nova-manage" Oct 10 18:05:33 crc kubenswrapper[4799]: E1010 18:05:33.081308 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8bda0000-9ef7-4a01-a8a1-c0a902923b01" containerName="nova-metadata-metadata" Oct 10 18:05:33 crc kubenswrapper[4799]: I1010 18:05:33.081317 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="8bda0000-9ef7-4a01-a8a1-c0a902923b01" containerName="nova-metadata-metadata" Oct 10 18:05:33 crc kubenswrapper[4799]: E1010 18:05:33.081331 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8bda0000-9ef7-4a01-a8a1-c0a902923b01" containerName="nova-metadata-log" Oct 10 18:05:33 crc kubenswrapper[4799]: I1010 18:05:33.081339 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="8bda0000-9ef7-4a01-a8a1-c0a902923b01" containerName="nova-metadata-log" Oct 10 18:05:33 crc kubenswrapper[4799]: I1010 18:05:33.081585 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="8bda0000-9ef7-4a01-a8a1-c0a902923b01" containerName="nova-metadata-log" Oct 10 18:05:33 crc kubenswrapper[4799]: I1010 18:05:33.081598 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="af7d9738-f698-4bfa-9294-daf47a91c9ab" containerName="nova-manage" Oct 10 18:05:33 crc kubenswrapper[4799]: I1010 18:05:33.081612 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="85831ffe-f336-4725-a8dd-a3d8313a9690" containerName="nova-api-api" Oct 10 18:05:33 crc kubenswrapper[4799]: I1010 18:05:33.081630 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="85831ffe-f336-4725-a8dd-a3d8313a9690" containerName="nova-api-log" Oct 10 18:05:33 crc kubenswrapper[4799]: I1010 18:05:33.081643 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="8bda0000-9ef7-4a01-a8a1-c0a902923b01" containerName="nova-metadata-metadata" Oct 10 18:05:33 crc kubenswrapper[4799]: I1010 18:05:33.082912 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 10 18:05:33 crc kubenswrapper[4799]: I1010 18:05:33.086138 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Oct 10 18:05:33 crc kubenswrapper[4799]: I1010 18:05:33.086849 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Oct 10 18:05:33 crc kubenswrapper[4799]: I1010 18:05:33.093941 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Oct 10 18:05:33 crc kubenswrapper[4799]: I1010 18:05:33.096146 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 10 18:05:33 crc kubenswrapper[4799]: I1010 18:05:33.100491 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Oct 10 18:05:33 crc kubenswrapper[4799]: I1010 18:05:33.106905 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Oct 10 18:05:33 crc kubenswrapper[4799]: I1010 18:05:33.118623 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 10 18:05:33 crc kubenswrapper[4799]: I1010 18:05:33.201000 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/272a89d6-8528-473f-8aed-1c3ef584fbd6-logs\") pod \"nova-metadata-0\" (UID: \"272a89d6-8528-473f-8aed-1c3ef584fbd6\") " pod="openstack/nova-metadata-0" Oct 10 18:05:33 crc kubenswrapper[4799]: I1010 18:05:33.201085 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/83dd6056-b85a-4884-8d59-9edc767e6558-logs\") pod \"nova-api-0\" (UID: \"83dd6056-b85a-4884-8d59-9edc767e6558\") " pod="openstack/nova-api-0" Oct 10 18:05:33 crc kubenswrapper[4799]: I1010 18:05:33.201112 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/272a89d6-8528-473f-8aed-1c3ef584fbd6-config-data\") pod \"nova-metadata-0\" (UID: \"272a89d6-8528-473f-8aed-1c3ef584fbd6\") " pod="openstack/nova-metadata-0" Oct 10 18:05:33 crc kubenswrapper[4799]: I1010 18:05:33.201202 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kc8ll\" (UniqueName: \"kubernetes.io/projected/83dd6056-b85a-4884-8d59-9edc767e6558-kube-api-access-kc8ll\") pod \"nova-api-0\" (UID: \"83dd6056-b85a-4884-8d59-9edc767e6558\") " pod="openstack/nova-api-0" Oct 10 18:05:33 crc kubenswrapper[4799]: I1010 18:05:33.201219 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j9xxf\" (UniqueName: \"kubernetes.io/projected/272a89d6-8528-473f-8aed-1c3ef584fbd6-kube-api-access-j9xxf\") pod \"nova-metadata-0\" (UID: \"272a89d6-8528-473f-8aed-1c3ef584fbd6\") " pod="openstack/nova-metadata-0" Oct 10 18:05:33 crc kubenswrapper[4799]: I1010 18:05:33.201252 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/83dd6056-b85a-4884-8d59-9edc767e6558-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"83dd6056-b85a-4884-8d59-9edc767e6558\") " pod="openstack/nova-api-0" Oct 10 18:05:33 crc kubenswrapper[4799]: I1010 18:05:33.201277 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/83dd6056-b85a-4884-8d59-9edc767e6558-config-data\") pod \"nova-api-0\" (UID: \"83dd6056-b85a-4884-8d59-9edc767e6558\") " pod="openstack/nova-api-0" Oct 10 18:05:33 crc kubenswrapper[4799]: I1010 18:05:33.201667 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/272a89d6-8528-473f-8aed-1c3ef584fbd6-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"272a89d6-8528-473f-8aed-1c3ef584fbd6\") " pod="openstack/nova-metadata-0" Oct 10 18:05:33 crc kubenswrapper[4799]: I1010 18:05:33.303552 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cb6f58f7-24b1-4236-bb7e-20cdc16cc4c8-combined-ca-bundle\") pod \"cb6f58f7-24b1-4236-bb7e-20cdc16cc4c8\" (UID: \"cb6f58f7-24b1-4236-bb7e-20cdc16cc4c8\") " Oct 10 18:05:33 crc kubenswrapper[4799]: I1010 18:05:33.304284 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lfn9q\" (UniqueName: \"kubernetes.io/projected/cb6f58f7-24b1-4236-bb7e-20cdc16cc4c8-kube-api-access-lfn9q\") pod \"cb6f58f7-24b1-4236-bb7e-20cdc16cc4c8\" (UID: \"cb6f58f7-24b1-4236-bb7e-20cdc16cc4c8\") " Oct 10 18:05:33 crc kubenswrapper[4799]: I1010 18:05:33.304458 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cb6f58f7-24b1-4236-bb7e-20cdc16cc4c8-config-data\") pod \"cb6f58f7-24b1-4236-bb7e-20cdc16cc4c8\" (UID: \"cb6f58f7-24b1-4236-bb7e-20cdc16cc4c8\") " Oct 10 18:05:33 crc kubenswrapper[4799]: I1010 18:05:33.305689 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/83dd6056-b85a-4884-8d59-9edc767e6558-logs\") pod \"nova-api-0\" (UID: \"83dd6056-b85a-4884-8d59-9edc767e6558\") " pod="openstack/nova-api-0" Oct 10 18:05:33 crc kubenswrapper[4799]: I1010 18:05:33.306017 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/272a89d6-8528-473f-8aed-1c3ef584fbd6-config-data\") pod \"nova-metadata-0\" (UID: \"272a89d6-8528-473f-8aed-1c3ef584fbd6\") " pod="openstack/nova-metadata-0" Oct 10 18:05:33 crc kubenswrapper[4799]: I1010 18:05:33.306249 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kc8ll\" (UniqueName: \"kubernetes.io/projected/83dd6056-b85a-4884-8d59-9edc767e6558-kube-api-access-kc8ll\") pod \"nova-api-0\" (UID: \"83dd6056-b85a-4884-8d59-9edc767e6558\") " pod="openstack/nova-api-0" Oct 10 18:05:33 crc kubenswrapper[4799]: I1010 18:05:33.306411 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j9xxf\" (UniqueName: \"kubernetes.io/projected/272a89d6-8528-473f-8aed-1c3ef584fbd6-kube-api-access-j9xxf\") pod \"nova-metadata-0\" (UID: \"272a89d6-8528-473f-8aed-1c3ef584fbd6\") " pod="openstack/nova-metadata-0" Oct 10 18:05:33 crc kubenswrapper[4799]: I1010 18:05:33.306656 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/83dd6056-b85a-4884-8d59-9edc767e6558-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"83dd6056-b85a-4884-8d59-9edc767e6558\") " pod="openstack/nova-api-0" Oct 10 18:05:33 crc kubenswrapper[4799]: I1010 18:05:33.306938 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/83dd6056-b85a-4884-8d59-9edc767e6558-config-data\") pod \"nova-api-0\" (UID: \"83dd6056-b85a-4884-8d59-9edc767e6558\") " pod="openstack/nova-api-0" Oct 10 18:05:33 crc kubenswrapper[4799]: I1010 18:05:33.307144 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/272a89d6-8528-473f-8aed-1c3ef584fbd6-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"272a89d6-8528-473f-8aed-1c3ef584fbd6\") " pod="openstack/nova-metadata-0" Oct 10 18:05:33 crc kubenswrapper[4799]: I1010 18:05:33.307332 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/272a89d6-8528-473f-8aed-1c3ef584fbd6-logs\") pod \"nova-metadata-0\" (UID: \"272a89d6-8528-473f-8aed-1c3ef584fbd6\") " pod="openstack/nova-metadata-0" Oct 10 18:05:33 crc kubenswrapper[4799]: I1010 18:05:33.308150 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/272a89d6-8528-473f-8aed-1c3ef584fbd6-logs\") pod \"nova-metadata-0\" (UID: \"272a89d6-8528-473f-8aed-1c3ef584fbd6\") " pod="openstack/nova-metadata-0" Oct 10 18:05:33 crc kubenswrapper[4799]: I1010 18:05:33.306304 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/83dd6056-b85a-4884-8d59-9edc767e6558-logs\") pod \"nova-api-0\" (UID: \"83dd6056-b85a-4884-8d59-9edc767e6558\") " pod="openstack/nova-api-0" Oct 10 18:05:33 crc kubenswrapper[4799]: I1010 18:05:33.313012 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/272a89d6-8528-473f-8aed-1c3ef584fbd6-config-data\") pod \"nova-metadata-0\" (UID: \"272a89d6-8528-473f-8aed-1c3ef584fbd6\") " pod="openstack/nova-metadata-0" Oct 10 18:05:33 crc kubenswrapper[4799]: I1010 18:05:33.313734 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cb6f58f7-24b1-4236-bb7e-20cdc16cc4c8-kube-api-access-lfn9q" (OuterVolumeSpecName: "kube-api-access-lfn9q") pod "cb6f58f7-24b1-4236-bb7e-20cdc16cc4c8" (UID: "cb6f58f7-24b1-4236-bb7e-20cdc16cc4c8"). InnerVolumeSpecName "kube-api-access-lfn9q". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 18:05:33 crc kubenswrapper[4799]: I1010 18:05:33.319534 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/83dd6056-b85a-4884-8d59-9edc767e6558-config-data\") pod \"nova-api-0\" (UID: \"83dd6056-b85a-4884-8d59-9edc767e6558\") " pod="openstack/nova-api-0" Oct 10 18:05:33 crc kubenswrapper[4799]: I1010 18:05:33.330675 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/83dd6056-b85a-4884-8d59-9edc767e6558-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"83dd6056-b85a-4884-8d59-9edc767e6558\") " pod="openstack/nova-api-0" Oct 10 18:05:33 crc kubenswrapper[4799]: I1010 18:05:33.333099 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/272a89d6-8528-473f-8aed-1c3ef584fbd6-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"272a89d6-8528-473f-8aed-1c3ef584fbd6\") " pod="openstack/nova-metadata-0" Oct 10 18:05:33 crc kubenswrapper[4799]: I1010 18:05:33.335442 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j9xxf\" (UniqueName: \"kubernetes.io/projected/272a89d6-8528-473f-8aed-1c3ef584fbd6-kube-api-access-j9xxf\") pod \"nova-metadata-0\" (UID: \"272a89d6-8528-473f-8aed-1c3ef584fbd6\") " pod="openstack/nova-metadata-0" Oct 10 18:05:33 crc kubenswrapper[4799]: I1010 18:05:33.338660 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kc8ll\" (UniqueName: \"kubernetes.io/projected/83dd6056-b85a-4884-8d59-9edc767e6558-kube-api-access-kc8ll\") pod \"nova-api-0\" (UID: \"83dd6056-b85a-4884-8d59-9edc767e6558\") " pod="openstack/nova-api-0" Oct 10 18:05:33 crc kubenswrapper[4799]: I1010 18:05:33.342855 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cb6f58f7-24b1-4236-bb7e-20cdc16cc4c8-config-data" (OuterVolumeSpecName: "config-data") pod "cb6f58f7-24b1-4236-bb7e-20cdc16cc4c8" (UID: "cb6f58f7-24b1-4236-bb7e-20cdc16cc4c8"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 18:05:33 crc kubenswrapper[4799]: I1010 18:05:33.354458 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cb6f58f7-24b1-4236-bb7e-20cdc16cc4c8-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "cb6f58f7-24b1-4236-bb7e-20cdc16cc4c8" (UID: "cb6f58f7-24b1-4236-bb7e-20cdc16cc4c8"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 18:05:33 crc kubenswrapper[4799]: I1010 18:05:33.409150 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lfn9q\" (UniqueName: \"kubernetes.io/projected/cb6f58f7-24b1-4236-bb7e-20cdc16cc4c8-kube-api-access-lfn9q\") on node \"crc\" DevicePath \"\"" Oct 10 18:05:33 crc kubenswrapper[4799]: I1010 18:05:33.409191 4799 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cb6f58f7-24b1-4236-bb7e-20cdc16cc4c8-config-data\") on node \"crc\" DevicePath \"\"" Oct 10 18:05:33 crc kubenswrapper[4799]: I1010 18:05:33.409205 4799 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cb6f58f7-24b1-4236-bb7e-20cdc16cc4c8-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 10 18:05:33 crc kubenswrapper[4799]: I1010 18:05:33.422115 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="85831ffe-f336-4725-a8dd-a3d8313a9690" path="/var/lib/kubelet/pods/85831ffe-f336-4725-a8dd-a3d8313a9690/volumes" Oct 10 18:05:33 crc kubenswrapper[4799]: I1010 18:05:33.423532 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8bda0000-9ef7-4a01-a8a1-c0a902923b01" path="/var/lib/kubelet/pods/8bda0000-9ef7-4a01-a8a1-c0a902923b01/volumes" Oct 10 18:05:33 crc kubenswrapper[4799]: I1010 18:05:33.436974 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 10 18:05:33 crc kubenswrapper[4799]: I1010 18:05:33.476721 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 10 18:05:33 crc kubenswrapper[4799]: I1010 18:05:33.627105 4799 generic.go:334] "Generic (PLEG): container finished" podID="cb6f58f7-24b1-4236-bb7e-20cdc16cc4c8" containerID="d4b32c139fbc8b1fc450eb5d40640234591ae2d6d091df19dbe50bad0a9a1638" exitCode=0 Oct 10 18:05:33 crc kubenswrapper[4799]: I1010 18:05:33.627227 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 10 18:05:33 crc kubenswrapper[4799]: I1010 18:05:33.627226 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"cb6f58f7-24b1-4236-bb7e-20cdc16cc4c8","Type":"ContainerDied","Data":"d4b32c139fbc8b1fc450eb5d40640234591ae2d6d091df19dbe50bad0a9a1638"} Oct 10 18:05:33 crc kubenswrapper[4799]: I1010 18:05:33.627574 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"cb6f58f7-24b1-4236-bb7e-20cdc16cc4c8","Type":"ContainerDied","Data":"5f0af88f3e9f073e7e0cf6cf20b17b75511bad364a4389176934362190a4324e"} Oct 10 18:05:33 crc kubenswrapper[4799]: I1010 18:05:33.627599 4799 scope.go:117] "RemoveContainer" containerID="d4b32c139fbc8b1fc450eb5d40640234591ae2d6d091df19dbe50bad0a9a1638" Oct 10 18:05:33 crc kubenswrapper[4799]: I1010 18:05:33.670735 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Oct 10 18:05:33 crc kubenswrapper[4799]: I1010 18:05:33.686246 4799 scope.go:117] "RemoveContainer" containerID="d4b32c139fbc8b1fc450eb5d40640234591ae2d6d091df19dbe50bad0a9a1638" Oct 10 18:05:33 crc kubenswrapper[4799]: E1010 18:05:33.686691 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d4b32c139fbc8b1fc450eb5d40640234591ae2d6d091df19dbe50bad0a9a1638\": container with ID starting with d4b32c139fbc8b1fc450eb5d40640234591ae2d6d091df19dbe50bad0a9a1638 not found: ID does not exist" containerID="d4b32c139fbc8b1fc450eb5d40640234591ae2d6d091df19dbe50bad0a9a1638" Oct 10 18:05:33 crc kubenswrapper[4799]: I1010 18:05:33.686721 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d4b32c139fbc8b1fc450eb5d40640234591ae2d6d091df19dbe50bad0a9a1638"} err="failed to get container status \"d4b32c139fbc8b1fc450eb5d40640234591ae2d6d091df19dbe50bad0a9a1638\": rpc error: code = NotFound desc = could not find container \"d4b32c139fbc8b1fc450eb5d40640234591ae2d6d091df19dbe50bad0a9a1638\": container with ID starting with d4b32c139fbc8b1fc450eb5d40640234591ae2d6d091df19dbe50bad0a9a1638 not found: ID does not exist" Oct 10 18:05:33 crc kubenswrapper[4799]: I1010 18:05:33.693847 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Oct 10 18:05:33 crc kubenswrapper[4799]: I1010 18:05:33.700563 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Oct 10 18:05:33 crc kubenswrapper[4799]: E1010 18:05:33.701021 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cb6f58f7-24b1-4236-bb7e-20cdc16cc4c8" containerName="nova-scheduler-scheduler" Oct 10 18:05:33 crc kubenswrapper[4799]: I1010 18:05:33.701038 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="cb6f58f7-24b1-4236-bb7e-20cdc16cc4c8" containerName="nova-scheduler-scheduler" Oct 10 18:05:33 crc kubenswrapper[4799]: I1010 18:05:33.701302 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="cb6f58f7-24b1-4236-bb7e-20cdc16cc4c8" containerName="nova-scheduler-scheduler" Oct 10 18:05:33 crc kubenswrapper[4799]: I1010 18:05:33.704301 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 10 18:05:33 crc kubenswrapper[4799]: I1010 18:05:33.711373 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Oct 10 18:05:33 crc kubenswrapper[4799]: I1010 18:05:33.712626 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Oct 10 18:05:33 crc kubenswrapper[4799]: I1010 18:05:33.818989 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a5051fa5-4437-4de3-8638-4f7afaaaf938-config-data\") pod \"nova-scheduler-0\" (UID: \"a5051fa5-4437-4de3-8638-4f7afaaaf938\") " pod="openstack/nova-scheduler-0" Oct 10 18:05:33 crc kubenswrapper[4799]: I1010 18:05:33.819057 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a5051fa5-4437-4de3-8638-4f7afaaaf938-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"a5051fa5-4437-4de3-8638-4f7afaaaf938\") " pod="openstack/nova-scheduler-0" Oct 10 18:05:33 crc kubenswrapper[4799]: I1010 18:05:33.819111 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t2nrb\" (UniqueName: \"kubernetes.io/projected/a5051fa5-4437-4de3-8638-4f7afaaaf938-kube-api-access-t2nrb\") pod \"nova-scheduler-0\" (UID: \"a5051fa5-4437-4de3-8638-4f7afaaaf938\") " pod="openstack/nova-scheduler-0" Oct 10 18:05:33 crc kubenswrapper[4799]: I1010 18:05:33.920637 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a5051fa5-4437-4de3-8638-4f7afaaaf938-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"a5051fa5-4437-4de3-8638-4f7afaaaf938\") " pod="openstack/nova-scheduler-0" Oct 10 18:05:33 crc kubenswrapper[4799]: I1010 18:05:33.920954 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t2nrb\" (UniqueName: \"kubernetes.io/projected/a5051fa5-4437-4de3-8638-4f7afaaaf938-kube-api-access-t2nrb\") pod \"nova-scheduler-0\" (UID: \"a5051fa5-4437-4de3-8638-4f7afaaaf938\") " pod="openstack/nova-scheduler-0" Oct 10 18:05:33 crc kubenswrapper[4799]: I1010 18:05:33.921128 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a5051fa5-4437-4de3-8638-4f7afaaaf938-config-data\") pod \"nova-scheduler-0\" (UID: \"a5051fa5-4437-4de3-8638-4f7afaaaf938\") " pod="openstack/nova-scheduler-0" Oct 10 18:05:33 crc kubenswrapper[4799]: I1010 18:05:33.929996 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a5051fa5-4437-4de3-8638-4f7afaaaf938-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"a5051fa5-4437-4de3-8638-4f7afaaaf938\") " pod="openstack/nova-scheduler-0" Oct 10 18:05:33 crc kubenswrapper[4799]: I1010 18:05:33.930429 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a5051fa5-4437-4de3-8638-4f7afaaaf938-config-data\") pod \"nova-scheduler-0\" (UID: \"a5051fa5-4437-4de3-8638-4f7afaaaf938\") " pod="openstack/nova-scheduler-0" Oct 10 18:05:33 crc kubenswrapper[4799]: I1010 18:05:33.938427 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t2nrb\" (UniqueName: \"kubernetes.io/projected/a5051fa5-4437-4de3-8638-4f7afaaaf938-kube-api-access-t2nrb\") pod \"nova-scheduler-0\" (UID: \"a5051fa5-4437-4de3-8638-4f7afaaaf938\") " pod="openstack/nova-scheduler-0" Oct 10 18:05:33 crc kubenswrapper[4799]: I1010 18:05:33.988427 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Oct 10 18:05:33 crc kubenswrapper[4799]: W1010 18:05:33.989825 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod83dd6056_b85a_4884_8d59_9edc767e6558.slice/crio-32a1e8afcf6826c0bab7528e2f7eebed31d448680c47a3fd8cf382521ce043a7 WatchSource:0}: Error finding container 32a1e8afcf6826c0bab7528e2f7eebed31d448680c47a3fd8cf382521ce043a7: Status 404 returned error can't find the container with id 32a1e8afcf6826c0bab7528e2f7eebed31d448680c47a3fd8cf382521ce043a7 Oct 10 18:05:34 crc kubenswrapper[4799]: I1010 18:05:34.033480 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 10 18:05:34 crc kubenswrapper[4799]: I1010 18:05:34.066597 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Oct 10 18:05:34 crc kubenswrapper[4799]: W1010 18:05:34.071417 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod272a89d6_8528_473f_8aed_1c3ef584fbd6.slice/crio-2db52de36ae7852b1880c89c3c1a9421187e246e6f2bdd1441e1f43dc03ac2d7 WatchSource:0}: Error finding container 2db52de36ae7852b1880c89c3c1a9421187e246e6f2bdd1441e1f43dc03ac2d7: Status 404 returned error can't find the container with id 2db52de36ae7852b1880c89c3c1a9421187e246e6f2bdd1441e1f43dc03ac2d7 Oct 10 18:05:34 crc kubenswrapper[4799]: I1010 18:05:34.338369 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Oct 10 18:05:34 crc kubenswrapper[4799]: W1010 18:05:34.350292 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda5051fa5_4437_4de3_8638_4f7afaaaf938.slice/crio-62b32f6f048bad1f2726a12f025e9a52f90c5fa43b9dfc536cd064487941941b WatchSource:0}: Error finding container 62b32f6f048bad1f2726a12f025e9a52f90c5fa43b9dfc536cd064487941941b: Status 404 returned error can't find the container with id 62b32f6f048bad1f2726a12f025e9a52f90c5fa43b9dfc536cd064487941941b Oct 10 18:05:34 crc kubenswrapper[4799]: I1010 18:05:34.639967 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"a5051fa5-4437-4de3-8638-4f7afaaaf938","Type":"ContainerStarted","Data":"9446512914eec048268f955226e54544437e148c087533f01679b5ecaf7bb0aa"} Oct 10 18:05:34 crc kubenswrapper[4799]: I1010 18:05:34.640016 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"a5051fa5-4437-4de3-8638-4f7afaaaf938","Type":"ContainerStarted","Data":"62b32f6f048bad1f2726a12f025e9a52f90c5fa43b9dfc536cd064487941941b"} Oct 10 18:05:34 crc kubenswrapper[4799]: I1010 18:05:34.643516 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"83dd6056-b85a-4884-8d59-9edc767e6558","Type":"ContainerStarted","Data":"e058e3e9ebaa77e73a73123b986ee8de170338b6c672981b6d67c9915f6cf154"} Oct 10 18:05:34 crc kubenswrapper[4799]: I1010 18:05:34.643565 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"83dd6056-b85a-4884-8d59-9edc767e6558","Type":"ContainerStarted","Data":"d7cb973096212d554815f18652c1539ace60989595bed637fee02e498244fffd"} Oct 10 18:05:34 crc kubenswrapper[4799]: I1010 18:05:34.644161 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"83dd6056-b85a-4884-8d59-9edc767e6558","Type":"ContainerStarted","Data":"32a1e8afcf6826c0bab7528e2f7eebed31d448680c47a3fd8cf382521ce043a7"} Oct 10 18:05:34 crc kubenswrapper[4799]: I1010 18:05:34.645557 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"272a89d6-8528-473f-8aed-1c3ef584fbd6","Type":"ContainerStarted","Data":"a1fea6e30e4b40ae24479ceb6f9f170a7ec6c26cd9ef632445d042d6a3ff0733"} Oct 10 18:05:34 crc kubenswrapper[4799]: I1010 18:05:34.645602 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"272a89d6-8528-473f-8aed-1c3ef584fbd6","Type":"ContainerStarted","Data":"ee802f0441aedc24b337be8bd108842c01a64df525467a4888330f248031054f"} Oct 10 18:05:34 crc kubenswrapper[4799]: I1010 18:05:34.645615 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"272a89d6-8528-473f-8aed-1c3ef584fbd6","Type":"ContainerStarted","Data":"2db52de36ae7852b1880c89c3c1a9421187e246e6f2bdd1441e1f43dc03ac2d7"} Oct 10 18:05:34 crc kubenswrapper[4799]: I1010 18:05:34.658567 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=1.658544705 podStartE2EDuration="1.658544705s" podCreationTimestamp="2025-10-10 18:05:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-10 18:05:34.657060229 +0000 UTC m=+5628.165384394" watchObservedRunningTime="2025-10-10 18:05:34.658544705 +0000 UTC m=+5628.166868830" Oct 10 18:05:34 crc kubenswrapper[4799]: I1010 18:05:34.678412 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.678391109 podStartE2EDuration="2.678391109s" podCreationTimestamp="2025-10-10 18:05:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-10 18:05:34.675626701 +0000 UTC m=+5628.183950806" watchObservedRunningTime="2025-10-10 18:05:34.678391109 +0000 UTC m=+5628.186715234" Oct 10 18:05:34 crc kubenswrapper[4799]: I1010 18:05:34.704060 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.704028123 podStartE2EDuration="2.704028123s" podCreationTimestamp="2025-10-10 18:05:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-10 18:05:34.70346574 +0000 UTC m=+5628.211789875" watchObservedRunningTime="2025-10-10 18:05:34.704028123 +0000 UTC m=+5628.212352278" Oct 10 18:05:35 crc kubenswrapper[4799]: I1010 18:05:35.422520 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cb6f58f7-24b1-4236-bb7e-20cdc16cc4c8" path="/var/lib/kubelet/pods/cb6f58f7-24b1-4236-bb7e-20cdc16cc4c8/volumes" Oct 10 18:05:38 crc kubenswrapper[4799]: I1010 18:05:38.478444 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Oct 10 18:05:38 crc kubenswrapper[4799]: I1010 18:05:38.478936 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Oct 10 18:05:39 crc kubenswrapper[4799]: I1010 18:05:39.034196 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Oct 10 18:05:43 crc kubenswrapper[4799]: I1010 18:05:43.437831 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Oct 10 18:05:43 crc kubenswrapper[4799]: I1010 18:05:43.438414 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Oct 10 18:05:43 crc kubenswrapper[4799]: I1010 18:05:43.478668 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Oct 10 18:05:43 crc kubenswrapper[4799]: I1010 18:05:43.478737 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Oct 10 18:05:44 crc kubenswrapper[4799]: I1010 18:05:44.034542 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Oct 10 18:05:44 crc kubenswrapper[4799]: I1010 18:05:44.091469 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Oct 10 18:05:44 crc kubenswrapper[4799]: I1010 18:05:44.562041 4799 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="272a89d6-8528-473f-8aed-1c3ef584fbd6" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"http://10.217.1.84:8775/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Oct 10 18:05:44 crc kubenswrapper[4799]: I1010 18:05:44.562180 4799 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="83dd6056-b85a-4884-8d59-9edc767e6558" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.1.83:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Oct 10 18:05:44 crc kubenswrapper[4799]: I1010 18:05:44.562227 4799 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="272a89d6-8528-473f-8aed-1c3ef584fbd6" containerName="nova-metadata-log" probeResult="failure" output="Get \"http://10.217.1.84:8775/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Oct 10 18:05:44 crc kubenswrapper[4799]: I1010 18:05:44.562272 4799 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="83dd6056-b85a-4884-8d59-9edc767e6558" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.1.83:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Oct 10 18:05:44 crc kubenswrapper[4799]: I1010 18:05:44.865041 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Oct 10 18:05:45 crc kubenswrapper[4799]: I1010 18:05:45.249063 4799 patch_prober.go:28] interesting pod/machine-config-daemon-rh8zc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 10 18:05:45 crc kubenswrapper[4799]: I1010 18:05:45.249170 4799 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 10 18:05:45 crc kubenswrapper[4799]: I1010 18:05:45.249239 4799 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" Oct 10 18:05:45 crc kubenswrapper[4799]: I1010 18:05:45.250318 4799 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"6ae067b7971fd6480cb0c3ccf44d4e22f837ba4674373b4b5903247a9af39cf1"} pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 10 18:05:45 crc kubenswrapper[4799]: I1010 18:05:45.250430 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" containerName="machine-config-daemon" containerID="cri-o://6ae067b7971fd6480cb0c3ccf44d4e22f837ba4674373b4b5903247a9af39cf1" gracePeriod=600 Oct 10 18:05:45 crc kubenswrapper[4799]: E1010 18:05:45.395267 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 18:05:45 crc kubenswrapper[4799]: I1010 18:05:45.833395 4799 generic.go:334] "Generic (PLEG): container finished" podID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" containerID="6ae067b7971fd6480cb0c3ccf44d4e22f837ba4674373b4b5903247a9af39cf1" exitCode=0 Oct 10 18:05:45 crc kubenswrapper[4799]: I1010 18:05:45.833475 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" event={"ID":"6cebefda-e31d-4be2-9bf4-8e1f8ec002cb","Type":"ContainerDied","Data":"6ae067b7971fd6480cb0c3ccf44d4e22f837ba4674373b4b5903247a9af39cf1"} Oct 10 18:05:45 crc kubenswrapper[4799]: I1010 18:05:45.833628 4799 scope.go:117] "RemoveContainer" containerID="7d93aa50210daf6f851b294fa12ece2a292bc34c273170b02b29c3a462fa4bf5" Oct 10 18:05:45 crc kubenswrapper[4799]: I1010 18:05:45.834705 4799 scope.go:117] "RemoveContainer" containerID="6ae067b7971fd6480cb0c3ccf44d4e22f837ba4674373b4b5903247a9af39cf1" Oct 10 18:05:45 crc kubenswrapper[4799]: E1010 18:05:45.835188 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 18:05:53 crc kubenswrapper[4799]: I1010 18:05:53.443826 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Oct 10 18:05:53 crc kubenswrapper[4799]: I1010 18:05:53.445235 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Oct 10 18:05:53 crc kubenswrapper[4799]: I1010 18:05:53.445287 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Oct 10 18:05:53 crc kubenswrapper[4799]: I1010 18:05:53.450303 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Oct 10 18:05:53 crc kubenswrapper[4799]: I1010 18:05:53.482973 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Oct 10 18:05:53 crc kubenswrapper[4799]: I1010 18:05:53.483110 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Oct 10 18:05:53 crc kubenswrapper[4799]: I1010 18:05:53.495530 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Oct 10 18:05:53 crc kubenswrapper[4799]: I1010 18:05:53.516182 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Oct 10 18:05:53 crc kubenswrapper[4799]: I1010 18:05:53.929539 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Oct 10 18:05:53 crc kubenswrapper[4799]: I1010 18:05:53.936287 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Oct 10 18:05:54 crc kubenswrapper[4799]: I1010 18:05:54.144995 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-6c8b9ddf8f-z54wf"] Oct 10 18:05:54 crc kubenswrapper[4799]: I1010 18:05:54.146625 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6c8b9ddf8f-z54wf" Oct 10 18:05:54 crc kubenswrapper[4799]: I1010 18:05:54.170885 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6c8b9ddf8f-z54wf"] Oct 10 18:05:54 crc kubenswrapper[4799]: I1010 18:05:54.294531 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/14c0281b-d3e8-40e4-8ecf-1d1a06c629e0-ovsdbserver-nb\") pod \"dnsmasq-dns-6c8b9ddf8f-z54wf\" (UID: \"14c0281b-d3e8-40e4-8ecf-1d1a06c629e0\") " pod="openstack/dnsmasq-dns-6c8b9ddf8f-z54wf" Oct 10 18:05:54 crc kubenswrapper[4799]: I1010 18:05:54.294665 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wklbs\" (UniqueName: \"kubernetes.io/projected/14c0281b-d3e8-40e4-8ecf-1d1a06c629e0-kube-api-access-wklbs\") pod \"dnsmasq-dns-6c8b9ddf8f-z54wf\" (UID: \"14c0281b-d3e8-40e4-8ecf-1d1a06c629e0\") " pod="openstack/dnsmasq-dns-6c8b9ddf8f-z54wf" Oct 10 18:05:54 crc kubenswrapper[4799]: I1010 18:05:54.294954 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/14c0281b-d3e8-40e4-8ecf-1d1a06c629e0-dns-svc\") pod \"dnsmasq-dns-6c8b9ddf8f-z54wf\" (UID: \"14c0281b-d3e8-40e4-8ecf-1d1a06c629e0\") " pod="openstack/dnsmasq-dns-6c8b9ddf8f-z54wf" Oct 10 18:05:54 crc kubenswrapper[4799]: I1010 18:05:54.295080 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/14c0281b-d3e8-40e4-8ecf-1d1a06c629e0-ovsdbserver-sb\") pod \"dnsmasq-dns-6c8b9ddf8f-z54wf\" (UID: \"14c0281b-d3e8-40e4-8ecf-1d1a06c629e0\") " pod="openstack/dnsmasq-dns-6c8b9ddf8f-z54wf" Oct 10 18:05:54 crc kubenswrapper[4799]: I1010 18:05:54.295147 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/14c0281b-d3e8-40e4-8ecf-1d1a06c629e0-config\") pod \"dnsmasq-dns-6c8b9ddf8f-z54wf\" (UID: \"14c0281b-d3e8-40e4-8ecf-1d1a06c629e0\") " pod="openstack/dnsmasq-dns-6c8b9ddf8f-z54wf" Oct 10 18:05:54 crc kubenswrapper[4799]: I1010 18:05:54.397130 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wklbs\" (UniqueName: \"kubernetes.io/projected/14c0281b-d3e8-40e4-8ecf-1d1a06c629e0-kube-api-access-wklbs\") pod \"dnsmasq-dns-6c8b9ddf8f-z54wf\" (UID: \"14c0281b-d3e8-40e4-8ecf-1d1a06c629e0\") " pod="openstack/dnsmasq-dns-6c8b9ddf8f-z54wf" Oct 10 18:05:54 crc kubenswrapper[4799]: I1010 18:05:54.397488 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/14c0281b-d3e8-40e4-8ecf-1d1a06c629e0-dns-svc\") pod \"dnsmasq-dns-6c8b9ddf8f-z54wf\" (UID: \"14c0281b-d3e8-40e4-8ecf-1d1a06c629e0\") " pod="openstack/dnsmasq-dns-6c8b9ddf8f-z54wf" Oct 10 18:05:54 crc kubenswrapper[4799]: I1010 18:05:54.397550 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/14c0281b-d3e8-40e4-8ecf-1d1a06c629e0-ovsdbserver-sb\") pod \"dnsmasq-dns-6c8b9ddf8f-z54wf\" (UID: \"14c0281b-d3e8-40e4-8ecf-1d1a06c629e0\") " pod="openstack/dnsmasq-dns-6c8b9ddf8f-z54wf" Oct 10 18:05:54 crc kubenswrapper[4799]: I1010 18:05:54.397599 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/14c0281b-d3e8-40e4-8ecf-1d1a06c629e0-config\") pod \"dnsmasq-dns-6c8b9ddf8f-z54wf\" (UID: \"14c0281b-d3e8-40e4-8ecf-1d1a06c629e0\") " pod="openstack/dnsmasq-dns-6c8b9ddf8f-z54wf" Oct 10 18:05:54 crc kubenswrapper[4799]: I1010 18:05:54.397624 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/14c0281b-d3e8-40e4-8ecf-1d1a06c629e0-ovsdbserver-nb\") pod \"dnsmasq-dns-6c8b9ddf8f-z54wf\" (UID: \"14c0281b-d3e8-40e4-8ecf-1d1a06c629e0\") " pod="openstack/dnsmasq-dns-6c8b9ddf8f-z54wf" Oct 10 18:05:54 crc kubenswrapper[4799]: I1010 18:05:54.398467 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/14c0281b-d3e8-40e4-8ecf-1d1a06c629e0-dns-svc\") pod \"dnsmasq-dns-6c8b9ddf8f-z54wf\" (UID: \"14c0281b-d3e8-40e4-8ecf-1d1a06c629e0\") " pod="openstack/dnsmasq-dns-6c8b9ddf8f-z54wf" Oct 10 18:05:54 crc kubenswrapper[4799]: I1010 18:05:54.398858 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/14c0281b-d3e8-40e4-8ecf-1d1a06c629e0-ovsdbserver-nb\") pod \"dnsmasq-dns-6c8b9ddf8f-z54wf\" (UID: \"14c0281b-d3e8-40e4-8ecf-1d1a06c629e0\") " pod="openstack/dnsmasq-dns-6c8b9ddf8f-z54wf" Oct 10 18:05:54 crc kubenswrapper[4799]: I1010 18:05:54.398902 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/14c0281b-d3e8-40e4-8ecf-1d1a06c629e0-config\") pod \"dnsmasq-dns-6c8b9ddf8f-z54wf\" (UID: \"14c0281b-d3e8-40e4-8ecf-1d1a06c629e0\") " pod="openstack/dnsmasq-dns-6c8b9ddf8f-z54wf" Oct 10 18:05:54 crc kubenswrapper[4799]: I1010 18:05:54.399148 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/14c0281b-d3e8-40e4-8ecf-1d1a06c629e0-ovsdbserver-sb\") pod \"dnsmasq-dns-6c8b9ddf8f-z54wf\" (UID: \"14c0281b-d3e8-40e4-8ecf-1d1a06c629e0\") " pod="openstack/dnsmasq-dns-6c8b9ddf8f-z54wf" Oct 10 18:05:54 crc kubenswrapper[4799]: I1010 18:05:54.429112 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wklbs\" (UniqueName: \"kubernetes.io/projected/14c0281b-d3e8-40e4-8ecf-1d1a06c629e0-kube-api-access-wklbs\") pod \"dnsmasq-dns-6c8b9ddf8f-z54wf\" (UID: \"14c0281b-d3e8-40e4-8ecf-1d1a06c629e0\") " pod="openstack/dnsmasq-dns-6c8b9ddf8f-z54wf" Oct 10 18:05:54 crc kubenswrapper[4799]: I1010 18:05:54.463921 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6c8b9ddf8f-z54wf" Oct 10 18:05:54 crc kubenswrapper[4799]: I1010 18:05:54.962957 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6c8b9ddf8f-z54wf"] Oct 10 18:05:54 crc kubenswrapper[4799]: W1010 18:05:54.968229 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod14c0281b_d3e8_40e4_8ecf_1d1a06c629e0.slice/crio-0d7d9c6b480099ad7d6cdd0feeb084c1f2d1df1bd48059e8e73358ba50bea1ca WatchSource:0}: Error finding container 0d7d9c6b480099ad7d6cdd0feeb084c1f2d1df1bd48059e8e73358ba50bea1ca: Status 404 returned error can't find the container with id 0d7d9c6b480099ad7d6cdd0feeb084c1f2d1df1bd48059e8e73358ba50bea1ca Oct 10 18:05:55 crc kubenswrapper[4799]: I1010 18:05:55.954317 4799 generic.go:334] "Generic (PLEG): container finished" podID="14c0281b-d3e8-40e4-8ecf-1d1a06c629e0" containerID="54d3c64236871e47647d2d0edcb9eb7ff7ae2f6fc6b4f7b1138e92ec72e9299a" exitCode=0 Oct 10 18:05:55 crc kubenswrapper[4799]: I1010 18:05:55.954378 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6c8b9ddf8f-z54wf" event={"ID":"14c0281b-d3e8-40e4-8ecf-1d1a06c629e0","Type":"ContainerDied","Data":"54d3c64236871e47647d2d0edcb9eb7ff7ae2f6fc6b4f7b1138e92ec72e9299a"} Oct 10 18:05:55 crc kubenswrapper[4799]: I1010 18:05:55.954716 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6c8b9ddf8f-z54wf" event={"ID":"14c0281b-d3e8-40e4-8ecf-1d1a06c629e0","Type":"ContainerStarted","Data":"0d7d9c6b480099ad7d6cdd0feeb084c1f2d1df1bd48059e8e73358ba50bea1ca"} Oct 10 18:05:56 crc kubenswrapper[4799]: I1010 18:05:56.967528 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6c8b9ddf8f-z54wf" event={"ID":"14c0281b-d3e8-40e4-8ecf-1d1a06c629e0","Type":"ContainerStarted","Data":"d814cc7beef1f3ae81759f1e6a69b19208b623862c7bccf0636fcdc509d150c0"} Oct 10 18:05:56 crc kubenswrapper[4799]: I1010 18:05:56.967908 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-6c8b9ddf8f-z54wf" Oct 10 18:05:57 crc kubenswrapper[4799]: I1010 18:05:57.007602 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-6c8b9ddf8f-z54wf" podStartSLOduration=3.007578966 podStartE2EDuration="3.007578966s" podCreationTimestamp="2025-10-10 18:05:54 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-10 18:05:56.995300207 +0000 UTC m=+5650.503624402" watchObservedRunningTime="2025-10-10 18:05:57.007578966 +0000 UTC m=+5650.515903081" Oct 10 18:05:59 crc kubenswrapper[4799]: I1010 18:05:59.405142 4799 scope.go:117] "RemoveContainer" containerID="6ae067b7971fd6480cb0c3ccf44d4e22f837ba4674373b4b5903247a9af39cf1" Oct 10 18:05:59 crc kubenswrapper[4799]: E1010 18:05:59.406068 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 18:06:04 crc kubenswrapper[4799]: I1010 18:06:04.467012 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-6c8b9ddf8f-z54wf" Oct 10 18:06:04 crc kubenswrapper[4799]: I1010 18:06:04.578665 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6f5fcdf7c9-pnv52"] Oct 10 18:06:04 crc kubenswrapper[4799]: I1010 18:06:04.579029 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-6f5fcdf7c9-pnv52" podUID="e28f9857-736d-4a60-833f-91faa9eaef2c" containerName="dnsmasq-dns" containerID="cri-o://a52044ad423ee32529e72e713a84176f092a7d23535e74c5d6c641fe4d5b550d" gracePeriod=10 Oct 10 18:06:05 crc kubenswrapper[4799]: I1010 18:06:05.076317 4799 generic.go:334] "Generic (PLEG): container finished" podID="e28f9857-736d-4a60-833f-91faa9eaef2c" containerID="a52044ad423ee32529e72e713a84176f092a7d23535e74c5d6c641fe4d5b550d" exitCode=0 Oct 10 18:06:05 crc kubenswrapper[4799]: I1010 18:06:05.076389 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6f5fcdf7c9-pnv52" event={"ID":"e28f9857-736d-4a60-833f-91faa9eaef2c","Type":"ContainerDied","Data":"a52044ad423ee32529e72e713a84176f092a7d23535e74c5d6c641fe4d5b550d"} Oct 10 18:06:05 crc kubenswrapper[4799]: I1010 18:06:05.076659 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6f5fcdf7c9-pnv52" event={"ID":"e28f9857-736d-4a60-833f-91faa9eaef2c","Type":"ContainerDied","Data":"2888f0d173a6ffc2819957563d2fe65777c691d04f28ad194a08535516befcd6"} Oct 10 18:06:05 crc kubenswrapper[4799]: I1010 18:06:05.076730 4799 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2888f0d173a6ffc2819957563d2fe65777c691d04f28ad194a08535516befcd6" Oct 10 18:06:05 crc kubenswrapper[4799]: I1010 18:06:05.108190 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6f5fcdf7c9-pnv52" Oct 10 18:06:05 crc kubenswrapper[4799]: I1010 18:06:05.127439 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e28f9857-736d-4a60-833f-91faa9eaef2c-ovsdbserver-nb\") pod \"e28f9857-736d-4a60-833f-91faa9eaef2c\" (UID: \"e28f9857-736d-4a60-833f-91faa9eaef2c\") " Oct 10 18:06:05 crc kubenswrapper[4799]: I1010 18:06:05.127534 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e28f9857-736d-4a60-833f-91faa9eaef2c-config\") pod \"e28f9857-736d-4a60-833f-91faa9eaef2c\" (UID: \"e28f9857-736d-4a60-833f-91faa9eaef2c\") " Oct 10 18:06:05 crc kubenswrapper[4799]: I1010 18:06:05.127708 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hfz8g\" (UniqueName: \"kubernetes.io/projected/e28f9857-736d-4a60-833f-91faa9eaef2c-kube-api-access-hfz8g\") pod \"e28f9857-736d-4a60-833f-91faa9eaef2c\" (UID: \"e28f9857-736d-4a60-833f-91faa9eaef2c\") " Oct 10 18:06:05 crc kubenswrapper[4799]: I1010 18:06:05.127767 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e28f9857-736d-4a60-833f-91faa9eaef2c-dns-svc\") pod \"e28f9857-736d-4a60-833f-91faa9eaef2c\" (UID: \"e28f9857-736d-4a60-833f-91faa9eaef2c\") " Oct 10 18:06:05 crc kubenswrapper[4799]: I1010 18:06:05.127814 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e28f9857-736d-4a60-833f-91faa9eaef2c-ovsdbserver-sb\") pod \"e28f9857-736d-4a60-833f-91faa9eaef2c\" (UID: \"e28f9857-736d-4a60-833f-91faa9eaef2c\") " Oct 10 18:06:05 crc kubenswrapper[4799]: I1010 18:06:05.140954 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e28f9857-736d-4a60-833f-91faa9eaef2c-kube-api-access-hfz8g" (OuterVolumeSpecName: "kube-api-access-hfz8g") pod "e28f9857-736d-4a60-833f-91faa9eaef2c" (UID: "e28f9857-736d-4a60-833f-91faa9eaef2c"). InnerVolumeSpecName "kube-api-access-hfz8g". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 18:06:05 crc kubenswrapper[4799]: I1010 18:06:05.188115 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e28f9857-736d-4a60-833f-91faa9eaef2c-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "e28f9857-736d-4a60-833f-91faa9eaef2c" (UID: "e28f9857-736d-4a60-833f-91faa9eaef2c"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 18:06:05 crc kubenswrapper[4799]: I1010 18:06:05.188178 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e28f9857-736d-4a60-833f-91faa9eaef2c-config" (OuterVolumeSpecName: "config") pod "e28f9857-736d-4a60-833f-91faa9eaef2c" (UID: "e28f9857-736d-4a60-833f-91faa9eaef2c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 18:06:05 crc kubenswrapper[4799]: I1010 18:06:05.190709 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e28f9857-736d-4a60-833f-91faa9eaef2c-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "e28f9857-736d-4a60-833f-91faa9eaef2c" (UID: "e28f9857-736d-4a60-833f-91faa9eaef2c"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 18:06:05 crc kubenswrapper[4799]: I1010 18:06:05.207011 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e28f9857-736d-4a60-833f-91faa9eaef2c-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "e28f9857-736d-4a60-833f-91faa9eaef2c" (UID: "e28f9857-736d-4a60-833f-91faa9eaef2c"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 18:06:05 crc kubenswrapper[4799]: I1010 18:06:05.230369 4799 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e28f9857-736d-4a60-833f-91faa9eaef2c-config\") on node \"crc\" DevicePath \"\"" Oct 10 18:06:05 crc kubenswrapper[4799]: I1010 18:06:05.230419 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hfz8g\" (UniqueName: \"kubernetes.io/projected/e28f9857-736d-4a60-833f-91faa9eaef2c-kube-api-access-hfz8g\") on node \"crc\" DevicePath \"\"" Oct 10 18:06:05 crc kubenswrapper[4799]: I1010 18:06:05.230438 4799 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e28f9857-736d-4a60-833f-91faa9eaef2c-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 10 18:06:05 crc kubenswrapper[4799]: I1010 18:06:05.230453 4799 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e28f9857-736d-4a60-833f-91faa9eaef2c-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 10 18:06:05 crc kubenswrapper[4799]: I1010 18:06:05.230469 4799 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e28f9857-736d-4a60-833f-91faa9eaef2c-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 10 18:06:06 crc kubenswrapper[4799]: I1010 18:06:06.087878 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6f5fcdf7c9-pnv52" Oct 10 18:06:06 crc kubenswrapper[4799]: I1010 18:06:06.121959 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6f5fcdf7c9-pnv52"] Oct 10 18:06:06 crc kubenswrapper[4799]: I1010 18:06:06.129424 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-6f5fcdf7c9-pnv52"] Oct 10 18:06:07 crc kubenswrapper[4799]: I1010 18:06:07.425191 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e28f9857-736d-4a60-833f-91faa9eaef2c" path="/var/lib/kubelet/pods/e28f9857-736d-4a60-833f-91faa9eaef2c/volumes" Oct 10 18:06:10 crc kubenswrapper[4799]: I1010 18:06:10.104357 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-db-create-nh82m"] Oct 10 18:06:10 crc kubenswrapper[4799]: E1010 18:06:10.105609 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e28f9857-736d-4a60-833f-91faa9eaef2c" containerName="dnsmasq-dns" Oct 10 18:06:10 crc kubenswrapper[4799]: I1010 18:06:10.105636 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="e28f9857-736d-4a60-833f-91faa9eaef2c" containerName="dnsmasq-dns" Oct 10 18:06:10 crc kubenswrapper[4799]: E1010 18:06:10.105703 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e28f9857-736d-4a60-833f-91faa9eaef2c" containerName="init" Oct 10 18:06:10 crc kubenswrapper[4799]: I1010 18:06:10.105718 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="e28f9857-736d-4a60-833f-91faa9eaef2c" containerName="init" Oct 10 18:06:10 crc kubenswrapper[4799]: I1010 18:06:10.106719 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="e28f9857-736d-4a60-833f-91faa9eaef2c" containerName="dnsmasq-dns" Oct 10 18:06:10 crc kubenswrapper[4799]: I1010 18:06:10.107947 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-nh82m" Oct 10 18:06:10 crc kubenswrapper[4799]: I1010 18:06:10.126865 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-create-nh82m"] Oct 10 18:06:10 crc kubenswrapper[4799]: I1010 18:06:10.147079 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-thzwn\" (UniqueName: \"kubernetes.io/projected/89463cf0-a6a9-4a2a-834c-cdf8914a4410-kube-api-access-thzwn\") pod \"cinder-db-create-nh82m\" (UID: \"89463cf0-a6a9-4a2a-834c-cdf8914a4410\") " pod="openstack/cinder-db-create-nh82m" Oct 10 18:06:10 crc kubenswrapper[4799]: I1010 18:06:10.248936 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-thzwn\" (UniqueName: \"kubernetes.io/projected/89463cf0-a6a9-4a2a-834c-cdf8914a4410-kube-api-access-thzwn\") pod \"cinder-db-create-nh82m\" (UID: \"89463cf0-a6a9-4a2a-834c-cdf8914a4410\") " pod="openstack/cinder-db-create-nh82m" Oct 10 18:06:10 crc kubenswrapper[4799]: I1010 18:06:10.269608 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-thzwn\" (UniqueName: \"kubernetes.io/projected/89463cf0-a6a9-4a2a-834c-cdf8914a4410-kube-api-access-thzwn\") pod \"cinder-db-create-nh82m\" (UID: \"89463cf0-a6a9-4a2a-834c-cdf8914a4410\") " pod="openstack/cinder-db-create-nh82m" Oct 10 18:06:10 crc kubenswrapper[4799]: I1010 18:06:10.403357 4799 scope.go:117] "RemoveContainer" containerID="6ae067b7971fd6480cb0c3ccf44d4e22f837ba4674373b4b5903247a9af39cf1" Oct 10 18:06:10 crc kubenswrapper[4799]: E1010 18:06:10.403811 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 18:06:10 crc kubenswrapper[4799]: I1010 18:06:10.432477 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-nh82m" Oct 10 18:06:10 crc kubenswrapper[4799]: I1010 18:06:10.922314 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-create-nh82m"] Oct 10 18:06:11 crc kubenswrapper[4799]: I1010 18:06:11.149778 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-nh82m" event={"ID":"89463cf0-a6a9-4a2a-834c-cdf8914a4410","Type":"ContainerStarted","Data":"cd39580451f04b80f9732d680d7015623810441c35a8112d7987a850a9879b5a"} Oct 10 18:06:11 crc kubenswrapper[4799]: I1010 18:06:11.149828 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-nh82m" event={"ID":"89463cf0-a6a9-4a2a-834c-cdf8914a4410","Type":"ContainerStarted","Data":"26e899dbaed8373e9756ffba464b26e7b1199d823621a18ada0dda8decb6ff16"} Oct 10 18:06:11 crc kubenswrapper[4799]: I1010 18:06:11.172834 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-db-create-nh82m" podStartSLOduration=1.172814295 podStartE2EDuration="1.172814295s" podCreationTimestamp="2025-10-10 18:06:10 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-10 18:06:11.167324331 +0000 UTC m=+5664.675648466" watchObservedRunningTime="2025-10-10 18:06:11.172814295 +0000 UTC m=+5664.681138420" Oct 10 18:06:12 crc kubenswrapper[4799]: I1010 18:06:12.165644 4799 generic.go:334] "Generic (PLEG): container finished" podID="89463cf0-a6a9-4a2a-834c-cdf8914a4410" containerID="cd39580451f04b80f9732d680d7015623810441c35a8112d7987a850a9879b5a" exitCode=0 Oct 10 18:06:12 crc kubenswrapper[4799]: I1010 18:06:12.166089 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-nh82m" event={"ID":"89463cf0-a6a9-4a2a-834c-cdf8914a4410","Type":"ContainerDied","Data":"cd39580451f04b80f9732d680d7015623810441c35a8112d7987a850a9879b5a"} Oct 10 18:06:13 crc kubenswrapper[4799]: I1010 18:06:13.611196 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-nh82m" Oct 10 18:06:13 crc kubenswrapper[4799]: I1010 18:06:13.716237 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-thzwn\" (UniqueName: \"kubernetes.io/projected/89463cf0-a6a9-4a2a-834c-cdf8914a4410-kube-api-access-thzwn\") pod \"89463cf0-a6a9-4a2a-834c-cdf8914a4410\" (UID: \"89463cf0-a6a9-4a2a-834c-cdf8914a4410\") " Oct 10 18:06:13 crc kubenswrapper[4799]: I1010 18:06:13.723668 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/89463cf0-a6a9-4a2a-834c-cdf8914a4410-kube-api-access-thzwn" (OuterVolumeSpecName: "kube-api-access-thzwn") pod "89463cf0-a6a9-4a2a-834c-cdf8914a4410" (UID: "89463cf0-a6a9-4a2a-834c-cdf8914a4410"). InnerVolumeSpecName "kube-api-access-thzwn". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 18:06:13 crc kubenswrapper[4799]: I1010 18:06:13.818666 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-thzwn\" (UniqueName: \"kubernetes.io/projected/89463cf0-a6a9-4a2a-834c-cdf8914a4410-kube-api-access-thzwn\") on node \"crc\" DevicePath \"\"" Oct 10 18:06:14 crc kubenswrapper[4799]: I1010 18:06:14.190880 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-nh82m" event={"ID":"89463cf0-a6a9-4a2a-834c-cdf8914a4410","Type":"ContainerDied","Data":"26e899dbaed8373e9756ffba464b26e7b1199d823621a18ada0dda8decb6ff16"} Oct 10 18:06:14 crc kubenswrapper[4799]: I1010 18:06:14.190952 4799 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="26e899dbaed8373e9756ffba464b26e7b1199d823621a18ada0dda8decb6ff16" Oct 10 18:06:14 crc kubenswrapper[4799]: I1010 18:06:14.190969 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-nh82m" Oct 10 18:06:20 crc kubenswrapper[4799]: I1010 18:06:20.143340 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-d79c-account-create-pktxw"] Oct 10 18:06:20 crc kubenswrapper[4799]: E1010 18:06:20.146651 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="89463cf0-a6a9-4a2a-834c-cdf8914a4410" containerName="mariadb-database-create" Oct 10 18:06:20 crc kubenswrapper[4799]: I1010 18:06:20.146708 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="89463cf0-a6a9-4a2a-834c-cdf8914a4410" containerName="mariadb-database-create" Oct 10 18:06:20 crc kubenswrapper[4799]: I1010 18:06:20.147255 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="89463cf0-a6a9-4a2a-834c-cdf8914a4410" containerName="mariadb-database-create" Oct 10 18:06:20 crc kubenswrapper[4799]: I1010 18:06:20.148648 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-d79c-account-create-pktxw" Oct 10 18:06:20 crc kubenswrapper[4799]: I1010 18:06:20.151058 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-db-secret" Oct 10 18:06:20 crc kubenswrapper[4799]: I1010 18:06:20.159969 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-d79c-account-create-pktxw"] Oct 10 18:06:20 crc kubenswrapper[4799]: I1010 18:06:20.248843 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tkz4j\" (UniqueName: \"kubernetes.io/projected/d6f6bcef-c635-424f-a956-eaf79c6df15f-kube-api-access-tkz4j\") pod \"cinder-d79c-account-create-pktxw\" (UID: \"d6f6bcef-c635-424f-a956-eaf79c6df15f\") " pod="openstack/cinder-d79c-account-create-pktxw" Oct 10 18:06:20 crc kubenswrapper[4799]: I1010 18:06:20.351528 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tkz4j\" (UniqueName: \"kubernetes.io/projected/d6f6bcef-c635-424f-a956-eaf79c6df15f-kube-api-access-tkz4j\") pod \"cinder-d79c-account-create-pktxw\" (UID: \"d6f6bcef-c635-424f-a956-eaf79c6df15f\") " pod="openstack/cinder-d79c-account-create-pktxw" Oct 10 18:06:20 crc kubenswrapper[4799]: I1010 18:06:20.380301 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tkz4j\" (UniqueName: \"kubernetes.io/projected/d6f6bcef-c635-424f-a956-eaf79c6df15f-kube-api-access-tkz4j\") pod \"cinder-d79c-account-create-pktxw\" (UID: \"d6f6bcef-c635-424f-a956-eaf79c6df15f\") " pod="openstack/cinder-d79c-account-create-pktxw" Oct 10 18:06:20 crc kubenswrapper[4799]: I1010 18:06:20.470872 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-d79c-account-create-pktxw" Oct 10 18:06:21 crc kubenswrapper[4799]: I1010 18:06:21.041820 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-d79c-account-create-pktxw"] Oct 10 18:06:21 crc kubenswrapper[4799]: I1010 18:06:21.276116 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-d79c-account-create-pktxw" event={"ID":"d6f6bcef-c635-424f-a956-eaf79c6df15f","Type":"ContainerStarted","Data":"b6362e8ac8b51ea62c6f9ab892205eec1ed2dc2deabb46ef18d06fd81276f22a"} Oct 10 18:06:22 crc kubenswrapper[4799]: I1010 18:06:22.292384 4799 generic.go:334] "Generic (PLEG): container finished" podID="d6f6bcef-c635-424f-a956-eaf79c6df15f" containerID="5c1235cfc25c047585184fe9857284ee44a564ceeac07e6fdda7f76b420bf0d3" exitCode=0 Oct 10 18:06:22 crc kubenswrapper[4799]: I1010 18:06:22.292451 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-d79c-account-create-pktxw" event={"ID":"d6f6bcef-c635-424f-a956-eaf79c6df15f","Type":"ContainerDied","Data":"5c1235cfc25c047585184fe9857284ee44a564ceeac07e6fdda7f76b420bf0d3"} Oct 10 18:06:23 crc kubenswrapper[4799]: I1010 18:06:23.749904 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-d79c-account-create-pktxw" Oct 10 18:06:23 crc kubenswrapper[4799]: I1010 18:06:23.858017 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tkz4j\" (UniqueName: \"kubernetes.io/projected/d6f6bcef-c635-424f-a956-eaf79c6df15f-kube-api-access-tkz4j\") pod \"d6f6bcef-c635-424f-a956-eaf79c6df15f\" (UID: \"d6f6bcef-c635-424f-a956-eaf79c6df15f\") " Oct 10 18:06:23 crc kubenswrapper[4799]: I1010 18:06:23.866160 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d6f6bcef-c635-424f-a956-eaf79c6df15f-kube-api-access-tkz4j" (OuterVolumeSpecName: "kube-api-access-tkz4j") pod "d6f6bcef-c635-424f-a956-eaf79c6df15f" (UID: "d6f6bcef-c635-424f-a956-eaf79c6df15f"). InnerVolumeSpecName "kube-api-access-tkz4j". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 18:06:23 crc kubenswrapper[4799]: I1010 18:06:23.960821 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tkz4j\" (UniqueName: \"kubernetes.io/projected/d6f6bcef-c635-424f-a956-eaf79c6df15f-kube-api-access-tkz4j\") on node \"crc\" DevicePath \"\"" Oct 10 18:06:24 crc kubenswrapper[4799]: I1010 18:06:24.321969 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-d79c-account-create-pktxw" event={"ID":"d6f6bcef-c635-424f-a956-eaf79c6df15f","Type":"ContainerDied","Data":"b6362e8ac8b51ea62c6f9ab892205eec1ed2dc2deabb46ef18d06fd81276f22a"} Oct 10 18:06:24 crc kubenswrapper[4799]: I1010 18:06:24.322033 4799 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b6362e8ac8b51ea62c6f9ab892205eec1ed2dc2deabb46ef18d06fd81276f22a" Oct 10 18:06:24 crc kubenswrapper[4799]: I1010 18:06:24.322107 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-d79c-account-create-pktxw" Oct 10 18:06:25 crc kubenswrapper[4799]: I1010 18:06:25.404056 4799 scope.go:117] "RemoveContainer" containerID="6ae067b7971fd6480cb0c3ccf44d4e22f837ba4674373b4b5903247a9af39cf1" Oct 10 18:06:25 crc kubenswrapper[4799]: E1010 18:06:25.405099 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 18:06:25 crc kubenswrapper[4799]: I1010 18:06:25.433524 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-db-sync-mdtk5"] Oct 10 18:06:25 crc kubenswrapper[4799]: E1010 18:06:25.434119 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d6f6bcef-c635-424f-a956-eaf79c6df15f" containerName="mariadb-account-create" Oct 10 18:06:25 crc kubenswrapper[4799]: I1010 18:06:25.434150 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="d6f6bcef-c635-424f-a956-eaf79c6df15f" containerName="mariadb-account-create" Oct 10 18:06:25 crc kubenswrapper[4799]: I1010 18:06:25.434541 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="d6f6bcef-c635-424f-a956-eaf79c6df15f" containerName="mariadb-account-create" Oct 10 18:06:25 crc kubenswrapper[4799]: I1010 18:06:25.435936 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-sync-mdtk5"] Oct 10 18:06:25 crc kubenswrapper[4799]: I1010 18:06:25.436105 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-mdtk5" Oct 10 18:06:25 crc kubenswrapper[4799]: I1010 18:06:25.446048 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scripts" Oct 10 18:06:25 crc kubenswrapper[4799]: I1010 18:06:25.446364 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-cinder-dockercfg-5d94p" Oct 10 18:06:25 crc kubenswrapper[4799]: I1010 18:06:25.446371 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-config-data" Oct 10 18:06:25 crc kubenswrapper[4799]: I1010 18:06:25.600259 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a3f0852c-cf99-44c9-b9ed-fcbfb98a6c7a-config-data\") pod \"cinder-db-sync-mdtk5\" (UID: \"a3f0852c-cf99-44c9-b9ed-fcbfb98a6c7a\") " pod="openstack/cinder-db-sync-mdtk5" Oct 10 18:06:25 crc kubenswrapper[4799]: I1010 18:06:25.600413 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/a3f0852c-cf99-44c9-b9ed-fcbfb98a6c7a-db-sync-config-data\") pod \"cinder-db-sync-mdtk5\" (UID: \"a3f0852c-cf99-44c9-b9ed-fcbfb98a6c7a\") " pod="openstack/cinder-db-sync-mdtk5" Oct 10 18:06:25 crc kubenswrapper[4799]: I1010 18:06:25.600455 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/a3f0852c-cf99-44c9-b9ed-fcbfb98a6c7a-etc-machine-id\") pod \"cinder-db-sync-mdtk5\" (UID: \"a3f0852c-cf99-44c9-b9ed-fcbfb98a6c7a\") " pod="openstack/cinder-db-sync-mdtk5" Oct 10 18:06:25 crc kubenswrapper[4799]: I1010 18:06:25.600497 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a3f0852c-cf99-44c9-b9ed-fcbfb98a6c7a-combined-ca-bundle\") pod \"cinder-db-sync-mdtk5\" (UID: \"a3f0852c-cf99-44c9-b9ed-fcbfb98a6c7a\") " pod="openstack/cinder-db-sync-mdtk5" Oct 10 18:06:25 crc kubenswrapper[4799]: I1010 18:06:25.600555 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a3f0852c-cf99-44c9-b9ed-fcbfb98a6c7a-scripts\") pod \"cinder-db-sync-mdtk5\" (UID: \"a3f0852c-cf99-44c9-b9ed-fcbfb98a6c7a\") " pod="openstack/cinder-db-sync-mdtk5" Oct 10 18:06:25 crc kubenswrapper[4799]: I1010 18:06:25.600650 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v52lf\" (UniqueName: \"kubernetes.io/projected/a3f0852c-cf99-44c9-b9ed-fcbfb98a6c7a-kube-api-access-v52lf\") pod \"cinder-db-sync-mdtk5\" (UID: \"a3f0852c-cf99-44c9-b9ed-fcbfb98a6c7a\") " pod="openstack/cinder-db-sync-mdtk5" Oct 10 18:06:25 crc kubenswrapper[4799]: I1010 18:06:25.702151 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a3f0852c-cf99-44c9-b9ed-fcbfb98a6c7a-config-data\") pod \"cinder-db-sync-mdtk5\" (UID: \"a3f0852c-cf99-44c9-b9ed-fcbfb98a6c7a\") " pod="openstack/cinder-db-sync-mdtk5" Oct 10 18:06:25 crc kubenswrapper[4799]: I1010 18:06:25.702223 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/a3f0852c-cf99-44c9-b9ed-fcbfb98a6c7a-db-sync-config-data\") pod \"cinder-db-sync-mdtk5\" (UID: \"a3f0852c-cf99-44c9-b9ed-fcbfb98a6c7a\") " pod="openstack/cinder-db-sync-mdtk5" Oct 10 18:06:25 crc kubenswrapper[4799]: I1010 18:06:25.702245 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/a3f0852c-cf99-44c9-b9ed-fcbfb98a6c7a-etc-machine-id\") pod \"cinder-db-sync-mdtk5\" (UID: \"a3f0852c-cf99-44c9-b9ed-fcbfb98a6c7a\") " pod="openstack/cinder-db-sync-mdtk5" Oct 10 18:06:25 crc kubenswrapper[4799]: I1010 18:06:25.702263 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a3f0852c-cf99-44c9-b9ed-fcbfb98a6c7a-combined-ca-bundle\") pod \"cinder-db-sync-mdtk5\" (UID: \"a3f0852c-cf99-44c9-b9ed-fcbfb98a6c7a\") " pod="openstack/cinder-db-sync-mdtk5" Oct 10 18:06:25 crc kubenswrapper[4799]: I1010 18:06:25.702324 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/a3f0852c-cf99-44c9-b9ed-fcbfb98a6c7a-etc-machine-id\") pod \"cinder-db-sync-mdtk5\" (UID: \"a3f0852c-cf99-44c9-b9ed-fcbfb98a6c7a\") " pod="openstack/cinder-db-sync-mdtk5" Oct 10 18:06:25 crc kubenswrapper[4799]: I1010 18:06:25.702360 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a3f0852c-cf99-44c9-b9ed-fcbfb98a6c7a-scripts\") pod \"cinder-db-sync-mdtk5\" (UID: \"a3f0852c-cf99-44c9-b9ed-fcbfb98a6c7a\") " pod="openstack/cinder-db-sync-mdtk5" Oct 10 18:06:25 crc kubenswrapper[4799]: I1010 18:06:25.703119 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v52lf\" (UniqueName: \"kubernetes.io/projected/a3f0852c-cf99-44c9-b9ed-fcbfb98a6c7a-kube-api-access-v52lf\") pod \"cinder-db-sync-mdtk5\" (UID: \"a3f0852c-cf99-44c9-b9ed-fcbfb98a6c7a\") " pod="openstack/cinder-db-sync-mdtk5" Oct 10 18:06:25 crc kubenswrapper[4799]: I1010 18:06:25.707304 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a3f0852c-cf99-44c9-b9ed-fcbfb98a6c7a-scripts\") pod \"cinder-db-sync-mdtk5\" (UID: \"a3f0852c-cf99-44c9-b9ed-fcbfb98a6c7a\") " pod="openstack/cinder-db-sync-mdtk5" Oct 10 18:06:25 crc kubenswrapper[4799]: I1010 18:06:25.707822 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a3f0852c-cf99-44c9-b9ed-fcbfb98a6c7a-combined-ca-bundle\") pod \"cinder-db-sync-mdtk5\" (UID: \"a3f0852c-cf99-44c9-b9ed-fcbfb98a6c7a\") " pod="openstack/cinder-db-sync-mdtk5" Oct 10 18:06:25 crc kubenswrapper[4799]: I1010 18:06:25.708372 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a3f0852c-cf99-44c9-b9ed-fcbfb98a6c7a-config-data\") pod \"cinder-db-sync-mdtk5\" (UID: \"a3f0852c-cf99-44c9-b9ed-fcbfb98a6c7a\") " pod="openstack/cinder-db-sync-mdtk5" Oct 10 18:06:25 crc kubenswrapper[4799]: I1010 18:06:25.708397 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/a3f0852c-cf99-44c9-b9ed-fcbfb98a6c7a-db-sync-config-data\") pod \"cinder-db-sync-mdtk5\" (UID: \"a3f0852c-cf99-44c9-b9ed-fcbfb98a6c7a\") " pod="openstack/cinder-db-sync-mdtk5" Oct 10 18:06:25 crc kubenswrapper[4799]: I1010 18:06:25.724634 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v52lf\" (UniqueName: \"kubernetes.io/projected/a3f0852c-cf99-44c9-b9ed-fcbfb98a6c7a-kube-api-access-v52lf\") pod \"cinder-db-sync-mdtk5\" (UID: \"a3f0852c-cf99-44c9-b9ed-fcbfb98a6c7a\") " pod="openstack/cinder-db-sync-mdtk5" Oct 10 18:06:25 crc kubenswrapper[4799]: I1010 18:06:25.774187 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-mdtk5" Oct 10 18:06:26 crc kubenswrapper[4799]: I1010 18:06:26.301581 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-sync-mdtk5"] Oct 10 18:06:26 crc kubenswrapper[4799]: I1010 18:06:26.355396 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-mdtk5" event={"ID":"a3f0852c-cf99-44c9-b9ed-fcbfb98a6c7a","Type":"ContainerStarted","Data":"f10d4b9de98c4f6e624716ade4dd34bce0135d5905222cc509f3e8c9161f25ce"} Oct 10 18:06:27 crc kubenswrapper[4799]: I1010 18:06:27.375798 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-mdtk5" event={"ID":"a3f0852c-cf99-44c9-b9ed-fcbfb98a6c7a","Type":"ContainerStarted","Data":"c5ac2f9cd280a52b31ae283a6199cecc76935c5dc55c79db22defd9b26528efc"} Oct 10 18:06:27 crc kubenswrapper[4799]: I1010 18:06:27.396563 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-db-sync-mdtk5" podStartSLOduration=2.396523657 podStartE2EDuration="2.396523657s" podCreationTimestamp="2025-10-10 18:06:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-10 18:06:27.390447839 +0000 UTC m=+5680.898772014" watchObservedRunningTime="2025-10-10 18:06:27.396523657 +0000 UTC m=+5680.904847812" Oct 10 18:06:30 crc kubenswrapper[4799]: I1010 18:06:30.417529 4799 generic.go:334] "Generic (PLEG): container finished" podID="a3f0852c-cf99-44c9-b9ed-fcbfb98a6c7a" containerID="c5ac2f9cd280a52b31ae283a6199cecc76935c5dc55c79db22defd9b26528efc" exitCode=0 Oct 10 18:06:30 crc kubenswrapper[4799]: I1010 18:06:30.417663 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-mdtk5" event={"ID":"a3f0852c-cf99-44c9-b9ed-fcbfb98a6c7a","Type":"ContainerDied","Data":"c5ac2f9cd280a52b31ae283a6199cecc76935c5dc55c79db22defd9b26528efc"} Oct 10 18:06:31 crc kubenswrapper[4799]: I1010 18:06:31.851697 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-mdtk5" Oct 10 18:06:32 crc kubenswrapper[4799]: I1010 18:06:32.040451 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a3f0852c-cf99-44c9-b9ed-fcbfb98a6c7a-config-data\") pod \"a3f0852c-cf99-44c9-b9ed-fcbfb98a6c7a\" (UID: \"a3f0852c-cf99-44c9-b9ed-fcbfb98a6c7a\") " Oct 10 18:06:32 crc kubenswrapper[4799]: I1010 18:06:32.040571 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a3f0852c-cf99-44c9-b9ed-fcbfb98a6c7a-scripts\") pod \"a3f0852c-cf99-44c9-b9ed-fcbfb98a6c7a\" (UID: \"a3f0852c-cf99-44c9-b9ed-fcbfb98a6c7a\") " Oct 10 18:06:32 crc kubenswrapper[4799]: I1010 18:06:32.040604 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/a3f0852c-cf99-44c9-b9ed-fcbfb98a6c7a-etc-machine-id\") pod \"a3f0852c-cf99-44c9-b9ed-fcbfb98a6c7a\" (UID: \"a3f0852c-cf99-44c9-b9ed-fcbfb98a6c7a\") " Oct 10 18:06:32 crc kubenswrapper[4799]: I1010 18:06:32.040694 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/a3f0852c-cf99-44c9-b9ed-fcbfb98a6c7a-db-sync-config-data\") pod \"a3f0852c-cf99-44c9-b9ed-fcbfb98a6c7a\" (UID: \"a3f0852c-cf99-44c9-b9ed-fcbfb98a6c7a\") " Oct 10 18:06:32 crc kubenswrapper[4799]: I1010 18:06:32.040735 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v52lf\" (UniqueName: \"kubernetes.io/projected/a3f0852c-cf99-44c9-b9ed-fcbfb98a6c7a-kube-api-access-v52lf\") pod \"a3f0852c-cf99-44c9-b9ed-fcbfb98a6c7a\" (UID: \"a3f0852c-cf99-44c9-b9ed-fcbfb98a6c7a\") " Oct 10 18:06:32 crc kubenswrapper[4799]: I1010 18:06:32.040792 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a3f0852c-cf99-44c9-b9ed-fcbfb98a6c7a-combined-ca-bundle\") pod \"a3f0852c-cf99-44c9-b9ed-fcbfb98a6c7a\" (UID: \"a3f0852c-cf99-44c9-b9ed-fcbfb98a6c7a\") " Oct 10 18:06:32 crc kubenswrapper[4799]: I1010 18:06:32.041791 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/a3f0852c-cf99-44c9-b9ed-fcbfb98a6c7a-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "a3f0852c-cf99-44c9-b9ed-fcbfb98a6c7a" (UID: "a3f0852c-cf99-44c9-b9ed-fcbfb98a6c7a"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 10 18:06:32 crc kubenswrapper[4799]: I1010 18:06:32.046172 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a3f0852c-cf99-44c9-b9ed-fcbfb98a6c7a-kube-api-access-v52lf" (OuterVolumeSpecName: "kube-api-access-v52lf") pod "a3f0852c-cf99-44c9-b9ed-fcbfb98a6c7a" (UID: "a3f0852c-cf99-44c9-b9ed-fcbfb98a6c7a"). InnerVolumeSpecName "kube-api-access-v52lf". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 18:06:32 crc kubenswrapper[4799]: I1010 18:06:32.046734 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a3f0852c-cf99-44c9-b9ed-fcbfb98a6c7a-scripts" (OuterVolumeSpecName: "scripts") pod "a3f0852c-cf99-44c9-b9ed-fcbfb98a6c7a" (UID: "a3f0852c-cf99-44c9-b9ed-fcbfb98a6c7a"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 18:06:32 crc kubenswrapper[4799]: I1010 18:06:32.048470 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a3f0852c-cf99-44c9-b9ed-fcbfb98a6c7a-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "a3f0852c-cf99-44c9-b9ed-fcbfb98a6c7a" (UID: "a3f0852c-cf99-44c9-b9ed-fcbfb98a6c7a"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 18:06:32 crc kubenswrapper[4799]: I1010 18:06:32.107319 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a3f0852c-cf99-44c9-b9ed-fcbfb98a6c7a-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a3f0852c-cf99-44c9-b9ed-fcbfb98a6c7a" (UID: "a3f0852c-cf99-44c9-b9ed-fcbfb98a6c7a"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 18:06:32 crc kubenswrapper[4799]: I1010 18:06:32.125488 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a3f0852c-cf99-44c9-b9ed-fcbfb98a6c7a-config-data" (OuterVolumeSpecName: "config-data") pod "a3f0852c-cf99-44c9-b9ed-fcbfb98a6c7a" (UID: "a3f0852c-cf99-44c9-b9ed-fcbfb98a6c7a"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 18:06:32 crc kubenswrapper[4799]: I1010 18:06:32.143488 4799 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a3f0852c-cf99-44c9-b9ed-fcbfb98a6c7a-config-data\") on node \"crc\" DevicePath \"\"" Oct 10 18:06:32 crc kubenswrapper[4799]: I1010 18:06:32.143553 4799 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a3f0852c-cf99-44c9-b9ed-fcbfb98a6c7a-scripts\") on node \"crc\" DevicePath \"\"" Oct 10 18:06:32 crc kubenswrapper[4799]: I1010 18:06:32.143575 4799 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/a3f0852c-cf99-44c9-b9ed-fcbfb98a6c7a-etc-machine-id\") on node \"crc\" DevicePath \"\"" Oct 10 18:06:32 crc kubenswrapper[4799]: I1010 18:06:32.143603 4799 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/a3f0852c-cf99-44c9-b9ed-fcbfb98a6c7a-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Oct 10 18:06:32 crc kubenswrapper[4799]: I1010 18:06:32.143631 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v52lf\" (UniqueName: \"kubernetes.io/projected/a3f0852c-cf99-44c9-b9ed-fcbfb98a6c7a-kube-api-access-v52lf\") on node \"crc\" DevicePath \"\"" Oct 10 18:06:32 crc kubenswrapper[4799]: I1010 18:06:32.143659 4799 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a3f0852c-cf99-44c9-b9ed-fcbfb98a6c7a-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 10 18:06:32 crc kubenswrapper[4799]: I1010 18:06:32.446828 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-mdtk5" event={"ID":"a3f0852c-cf99-44c9-b9ed-fcbfb98a6c7a","Type":"ContainerDied","Data":"f10d4b9de98c4f6e624716ade4dd34bce0135d5905222cc509f3e8c9161f25ce"} Oct 10 18:06:32 crc kubenswrapper[4799]: I1010 18:06:32.446907 4799 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f10d4b9de98c4f6e624716ade4dd34bce0135d5905222cc509f3e8c9161f25ce" Oct 10 18:06:32 crc kubenswrapper[4799]: I1010 18:06:32.446995 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-mdtk5" Oct 10 18:06:32 crc kubenswrapper[4799]: I1010 18:06:32.932484 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-78956d764c-x7sls"] Oct 10 18:06:32 crc kubenswrapper[4799]: E1010 18:06:32.933626 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a3f0852c-cf99-44c9-b9ed-fcbfb98a6c7a" containerName="cinder-db-sync" Oct 10 18:06:32 crc kubenswrapper[4799]: I1010 18:06:32.933704 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="a3f0852c-cf99-44c9-b9ed-fcbfb98a6c7a" containerName="cinder-db-sync" Oct 10 18:06:32 crc kubenswrapper[4799]: I1010 18:06:32.933927 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="a3f0852c-cf99-44c9-b9ed-fcbfb98a6c7a" containerName="cinder-db-sync" Oct 10 18:06:32 crc kubenswrapper[4799]: I1010 18:06:32.934992 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78956d764c-x7sls" Oct 10 18:06:32 crc kubenswrapper[4799]: I1010 18:06:32.943916 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-78956d764c-x7sls"] Oct 10 18:06:33 crc kubenswrapper[4799]: I1010 18:06:33.061899 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c899e0ee-7598-4182-8e81-a8d3c7681559-config\") pod \"dnsmasq-dns-78956d764c-x7sls\" (UID: \"c899e0ee-7598-4182-8e81-a8d3c7681559\") " pod="openstack/dnsmasq-dns-78956d764c-x7sls" Oct 10 18:06:33 crc kubenswrapper[4799]: I1010 18:06:33.062179 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c899e0ee-7598-4182-8e81-a8d3c7681559-dns-svc\") pod \"dnsmasq-dns-78956d764c-x7sls\" (UID: \"c899e0ee-7598-4182-8e81-a8d3c7681559\") " pod="openstack/dnsmasq-dns-78956d764c-x7sls" Oct 10 18:06:33 crc kubenswrapper[4799]: I1010 18:06:33.062208 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c899e0ee-7598-4182-8e81-a8d3c7681559-ovsdbserver-sb\") pod \"dnsmasq-dns-78956d764c-x7sls\" (UID: \"c899e0ee-7598-4182-8e81-a8d3c7681559\") " pod="openstack/dnsmasq-dns-78956d764c-x7sls" Oct 10 18:06:33 crc kubenswrapper[4799]: I1010 18:06:33.062305 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rb92v\" (UniqueName: \"kubernetes.io/projected/c899e0ee-7598-4182-8e81-a8d3c7681559-kube-api-access-rb92v\") pod \"dnsmasq-dns-78956d764c-x7sls\" (UID: \"c899e0ee-7598-4182-8e81-a8d3c7681559\") " pod="openstack/dnsmasq-dns-78956d764c-x7sls" Oct 10 18:06:33 crc kubenswrapper[4799]: I1010 18:06:33.062325 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c899e0ee-7598-4182-8e81-a8d3c7681559-ovsdbserver-nb\") pod \"dnsmasq-dns-78956d764c-x7sls\" (UID: \"c899e0ee-7598-4182-8e81-a8d3c7681559\") " pod="openstack/dnsmasq-dns-78956d764c-x7sls" Oct 10 18:06:33 crc kubenswrapper[4799]: I1010 18:06:33.118083 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-api-0"] Oct 10 18:06:33 crc kubenswrapper[4799]: I1010 18:06:33.126744 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Oct 10 18:06:33 crc kubenswrapper[4799]: I1010 18:06:33.129002 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-cinder-dockercfg-5d94p" Oct 10 18:06:33 crc kubenswrapper[4799]: I1010 18:06:33.129231 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scripts" Oct 10 18:06:33 crc kubenswrapper[4799]: I1010 18:06:33.129368 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-config-data" Oct 10 18:06:33 crc kubenswrapper[4799]: I1010 18:06:33.130448 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-api-config-data" Oct 10 18:06:33 crc kubenswrapper[4799]: I1010 18:06:33.139874 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Oct 10 18:06:33 crc kubenswrapper[4799]: I1010 18:06:33.179499 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c899e0ee-7598-4182-8e81-a8d3c7681559-ovsdbserver-nb\") pod \"dnsmasq-dns-78956d764c-x7sls\" (UID: \"c899e0ee-7598-4182-8e81-a8d3c7681559\") " pod="openstack/dnsmasq-dns-78956d764c-x7sls" Oct 10 18:06:33 crc kubenswrapper[4799]: I1010 18:06:33.179550 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c899e0ee-7598-4182-8e81-a8d3c7681559-config\") pod \"dnsmasq-dns-78956d764c-x7sls\" (UID: \"c899e0ee-7598-4182-8e81-a8d3c7681559\") " pod="openstack/dnsmasq-dns-78956d764c-x7sls" Oct 10 18:06:33 crc kubenswrapper[4799]: I1010 18:06:33.179601 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/48f454d9-4648-450f-a206-6859d9d191aa-logs\") pod \"cinder-api-0\" (UID: \"48f454d9-4648-450f-a206-6859d9d191aa\") " pod="openstack/cinder-api-0" Oct 10 18:06:33 crc kubenswrapper[4799]: I1010 18:06:33.179621 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c899e0ee-7598-4182-8e81-a8d3c7681559-dns-svc\") pod \"dnsmasq-dns-78956d764c-x7sls\" (UID: \"c899e0ee-7598-4182-8e81-a8d3c7681559\") " pod="openstack/dnsmasq-dns-78956d764c-x7sls" Oct 10 18:06:33 crc kubenswrapper[4799]: I1010 18:06:33.179641 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c899e0ee-7598-4182-8e81-a8d3c7681559-ovsdbserver-sb\") pod \"dnsmasq-dns-78956d764c-x7sls\" (UID: \"c899e0ee-7598-4182-8e81-a8d3c7681559\") " pod="openstack/dnsmasq-dns-78956d764c-x7sls" Oct 10 18:06:33 crc kubenswrapper[4799]: I1010 18:06:33.179668 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/48f454d9-4648-450f-a206-6859d9d191aa-etc-machine-id\") pod \"cinder-api-0\" (UID: \"48f454d9-4648-450f-a206-6859d9d191aa\") " pod="openstack/cinder-api-0" Oct 10 18:06:33 crc kubenswrapper[4799]: I1010 18:06:33.179682 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/48f454d9-4648-450f-a206-6859d9d191aa-config-data\") pod \"cinder-api-0\" (UID: \"48f454d9-4648-450f-a206-6859d9d191aa\") " pod="openstack/cinder-api-0" Oct 10 18:06:33 crc kubenswrapper[4799]: I1010 18:06:33.179722 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hx6lj\" (UniqueName: \"kubernetes.io/projected/48f454d9-4648-450f-a206-6859d9d191aa-kube-api-access-hx6lj\") pod \"cinder-api-0\" (UID: \"48f454d9-4648-450f-a206-6859d9d191aa\") " pod="openstack/cinder-api-0" Oct 10 18:06:33 crc kubenswrapper[4799]: I1010 18:06:33.179744 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/48f454d9-4648-450f-a206-6859d9d191aa-config-data-custom\") pod \"cinder-api-0\" (UID: \"48f454d9-4648-450f-a206-6859d9d191aa\") " pod="openstack/cinder-api-0" Oct 10 18:06:33 crc kubenswrapper[4799]: I1010 18:06:33.179788 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/48f454d9-4648-450f-a206-6859d9d191aa-scripts\") pod \"cinder-api-0\" (UID: \"48f454d9-4648-450f-a206-6859d9d191aa\") " pod="openstack/cinder-api-0" Oct 10 18:06:33 crc kubenswrapper[4799]: I1010 18:06:33.179813 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/48f454d9-4648-450f-a206-6859d9d191aa-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"48f454d9-4648-450f-a206-6859d9d191aa\") " pod="openstack/cinder-api-0" Oct 10 18:06:33 crc kubenswrapper[4799]: I1010 18:06:33.179837 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rb92v\" (UniqueName: \"kubernetes.io/projected/c899e0ee-7598-4182-8e81-a8d3c7681559-kube-api-access-rb92v\") pod \"dnsmasq-dns-78956d764c-x7sls\" (UID: \"c899e0ee-7598-4182-8e81-a8d3c7681559\") " pod="openstack/dnsmasq-dns-78956d764c-x7sls" Oct 10 18:06:33 crc kubenswrapper[4799]: I1010 18:06:33.181599 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c899e0ee-7598-4182-8e81-a8d3c7681559-ovsdbserver-sb\") pod \"dnsmasq-dns-78956d764c-x7sls\" (UID: \"c899e0ee-7598-4182-8e81-a8d3c7681559\") " pod="openstack/dnsmasq-dns-78956d764c-x7sls" Oct 10 18:06:33 crc kubenswrapper[4799]: I1010 18:06:33.181604 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c899e0ee-7598-4182-8e81-a8d3c7681559-config\") pod \"dnsmasq-dns-78956d764c-x7sls\" (UID: \"c899e0ee-7598-4182-8e81-a8d3c7681559\") " pod="openstack/dnsmasq-dns-78956d764c-x7sls" Oct 10 18:06:33 crc kubenswrapper[4799]: I1010 18:06:33.182073 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c899e0ee-7598-4182-8e81-a8d3c7681559-dns-svc\") pod \"dnsmasq-dns-78956d764c-x7sls\" (UID: \"c899e0ee-7598-4182-8e81-a8d3c7681559\") " pod="openstack/dnsmasq-dns-78956d764c-x7sls" Oct 10 18:06:33 crc kubenswrapper[4799]: I1010 18:06:33.182389 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c899e0ee-7598-4182-8e81-a8d3c7681559-ovsdbserver-nb\") pod \"dnsmasq-dns-78956d764c-x7sls\" (UID: \"c899e0ee-7598-4182-8e81-a8d3c7681559\") " pod="openstack/dnsmasq-dns-78956d764c-x7sls" Oct 10 18:06:33 crc kubenswrapper[4799]: I1010 18:06:33.204704 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rb92v\" (UniqueName: \"kubernetes.io/projected/c899e0ee-7598-4182-8e81-a8d3c7681559-kube-api-access-rb92v\") pod \"dnsmasq-dns-78956d764c-x7sls\" (UID: \"c899e0ee-7598-4182-8e81-a8d3c7681559\") " pod="openstack/dnsmasq-dns-78956d764c-x7sls" Oct 10 18:06:33 crc kubenswrapper[4799]: I1010 18:06:33.259561 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78956d764c-x7sls" Oct 10 18:06:33 crc kubenswrapper[4799]: I1010 18:06:33.281224 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/48f454d9-4648-450f-a206-6859d9d191aa-logs\") pod \"cinder-api-0\" (UID: \"48f454d9-4648-450f-a206-6859d9d191aa\") " pod="openstack/cinder-api-0" Oct 10 18:06:33 crc kubenswrapper[4799]: I1010 18:06:33.281537 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/48f454d9-4648-450f-a206-6859d9d191aa-etc-machine-id\") pod \"cinder-api-0\" (UID: \"48f454d9-4648-450f-a206-6859d9d191aa\") " pod="openstack/cinder-api-0" Oct 10 18:06:33 crc kubenswrapper[4799]: I1010 18:06:33.281626 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/48f454d9-4648-450f-a206-6859d9d191aa-config-data\") pod \"cinder-api-0\" (UID: \"48f454d9-4648-450f-a206-6859d9d191aa\") " pod="openstack/cinder-api-0" Oct 10 18:06:33 crc kubenswrapper[4799]: I1010 18:06:33.281744 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hx6lj\" (UniqueName: \"kubernetes.io/projected/48f454d9-4648-450f-a206-6859d9d191aa-kube-api-access-hx6lj\") pod \"cinder-api-0\" (UID: \"48f454d9-4648-450f-a206-6859d9d191aa\") " pod="openstack/cinder-api-0" Oct 10 18:06:33 crc kubenswrapper[4799]: I1010 18:06:33.281858 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/48f454d9-4648-450f-a206-6859d9d191aa-config-data-custom\") pod \"cinder-api-0\" (UID: \"48f454d9-4648-450f-a206-6859d9d191aa\") " pod="openstack/cinder-api-0" Oct 10 18:06:33 crc kubenswrapper[4799]: I1010 18:06:33.281945 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/48f454d9-4648-450f-a206-6859d9d191aa-scripts\") pod \"cinder-api-0\" (UID: \"48f454d9-4648-450f-a206-6859d9d191aa\") " pod="openstack/cinder-api-0" Oct 10 18:06:33 crc kubenswrapper[4799]: I1010 18:06:33.281638 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/48f454d9-4648-450f-a206-6859d9d191aa-etc-machine-id\") pod \"cinder-api-0\" (UID: \"48f454d9-4648-450f-a206-6859d9d191aa\") " pod="openstack/cinder-api-0" Oct 10 18:06:33 crc kubenswrapper[4799]: I1010 18:06:33.282078 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/48f454d9-4648-450f-a206-6859d9d191aa-logs\") pod \"cinder-api-0\" (UID: \"48f454d9-4648-450f-a206-6859d9d191aa\") " pod="openstack/cinder-api-0" Oct 10 18:06:33 crc kubenswrapper[4799]: I1010 18:06:33.282087 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/48f454d9-4648-450f-a206-6859d9d191aa-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"48f454d9-4648-450f-a206-6859d9d191aa\") " pod="openstack/cinder-api-0" Oct 10 18:06:33 crc kubenswrapper[4799]: I1010 18:06:33.284812 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/48f454d9-4648-450f-a206-6859d9d191aa-config-data-custom\") pod \"cinder-api-0\" (UID: \"48f454d9-4648-450f-a206-6859d9d191aa\") " pod="openstack/cinder-api-0" Oct 10 18:06:33 crc kubenswrapper[4799]: I1010 18:06:33.286075 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/48f454d9-4648-450f-a206-6859d9d191aa-scripts\") pod \"cinder-api-0\" (UID: \"48f454d9-4648-450f-a206-6859d9d191aa\") " pod="openstack/cinder-api-0" Oct 10 18:06:33 crc kubenswrapper[4799]: I1010 18:06:33.286605 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/48f454d9-4648-450f-a206-6859d9d191aa-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"48f454d9-4648-450f-a206-6859d9d191aa\") " pod="openstack/cinder-api-0" Oct 10 18:06:33 crc kubenswrapper[4799]: I1010 18:06:33.290547 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/48f454d9-4648-450f-a206-6859d9d191aa-config-data\") pod \"cinder-api-0\" (UID: \"48f454d9-4648-450f-a206-6859d9d191aa\") " pod="openstack/cinder-api-0" Oct 10 18:06:33 crc kubenswrapper[4799]: I1010 18:06:33.298455 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hx6lj\" (UniqueName: \"kubernetes.io/projected/48f454d9-4648-450f-a206-6859d9d191aa-kube-api-access-hx6lj\") pod \"cinder-api-0\" (UID: \"48f454d9-4648-450f-a206-6859d9d191aa\") " pod="openstack/cinder-api-0" Oct 10 18:06:33 crc kubenswrapper[4799]: I1010 18:06:33.446236 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Oct 10 18:06:33 crc kubenswrapper[4799]: I1010 18:06:33.707070 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-78956d764c-x7sls"] Oct 10 18:06:34 crc kubenswrapper[4799]: I1010 18:06:34.009438 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Oct 10 18:06:34 crc kubenswrapper[4799]: W1010 18:06:34.012975 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod48f454d9_4648_450f_a206_6859d9d191aa.slice/crio-01e86b178e981d5f7d9af6cf10df5e3b38d3476bc552111a5df0ed55f17677a9 WatchSource:0}: Error finding container 01e86b178e981d5f7d9af6cf10df5e3b38d3476bc552111a5df0ed55f17677a9: Status 404 returned error can't find the container with id 01e86b178e981d5f7d9af6cf10df5e3b38d3476bc552111a5df0ed55f17677a9 Oct 10 18:06:34 crc kubenswrapper[4799]: I1010 18:06:34.467164 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"48f454d9-4648-450f-a206-6859d9d191aa","Type":"ContainerStarted","Data":"01e86b178e981d5f7d9af6cf10df5e3b38d3476bc552111a5df0ed55f17677a9"} Oct 10 18:06:34 crc kubenswrapper[4799]: I1010 18:06:34.469127 4799 generic.go:334] "Generic (PLEG): container finished" podID="c899e0ee-7598-4182-8e81-a8d3c7681559" containerID="5b80f94f09964b8a24711764d9b6f73412dd7bcee480297585a51b32f732d564" exitCode=0 Oct 10 18:06:34 crc kubenswrapper[4799]: I1010 18:06:34.469208 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-78956d764c-x7sls" event={"ID":"c899e0ee-7598-4182-8e81-a8d3c7681559","Type":"ContainerDied","Data":"5b80f94f09964b8a24711764d9b6f73412dd7bcee480297585a51b32f732d564"} Oct 10 18:06:34 crc kubenswrapper[4799]: I1010 18:06:34.469245 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-78956d764c-x7sls" event={"ID":"c899e0ee-7598-4182-8e81-a8d3c7681559","Type":"ContainerStarted","Data":"9c240b82fd2e1d66913f227f11d6e0c5aa1d799c2ccff951b8e382640c05d927"} Oct 10 18:06:35 crc kubenswrapper[4799]: I1010 18:06:35.478516 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"48f454d9-4648-450f-a206-6859d9d191aa","Type":"ContainerStarted","Data":"ec6861bdba45ecca6ae0cca7a2b30b426dc2c5bc3c5b49fa419d33902aef48a9"} Oct 10 18:06:35 crc kubenswrapper[4799]: I1010 18:06:35.479014 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cinder-api-0" Oct 10 18:06:35 crc kubenswrapper[4799]: I1010 18:06:35.479053 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"48f454d9-4648-450f-a206-6859d9d191aa","Type":"ContainerStarted","Data":"866f59c9ebbba8598713593c9e8d69c70c8dcf459233184deec9b651341ba256"} Oct 10 18:06:35 crc kubenswrapper[4799]: I1010 18:06:35.480693 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-78956d764c-x7sls" event={"ID":"c899e0ee-7598-4182-8e81-a8d3c7681559","Type":"ContainerStarted","Data":"0d5343f8d899ee64151835d58b53690e618bf8c7e55c6167ad315047b2bc510c"} Oct 10 18:06:35 crc kubenswrapper[4799]: I1010 18:06:35.481181 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-78956d764c-x7sls" Oct 10 18:06:35 crc kubenswrapper[4799]: I1010 18:06:35.532617 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-api-0" podStartSLOduration=2.532586562 podStartE2EDuration="2.532586562s" podCreationTimestamp="2025-10-10 18:06:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-10 18:06:35.494437212 +0000 UTC m=+5689.002761337" watchObservedRunningTime="2025-10-10 18:06:35.532586562 +0000 UTC m=+5689.040910697" Oct 10 18:06:35 crc kubenswrapper[4799]: I1010 18:06:35.549581 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-78956d764c-x7sls" podStartSLOduration=3.549558855 podStartE2EDuration="3.549558855s" podCreationTimestamp="2025-10-10 18:06:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-10 18:06:35.526171455 +0000 UTC m=+5689.034495580" watchObservedRunningTime="2025-10-10 18:06:35.549558855 +0000 UTC m=+5689.057882960" Oct 10 18:06:37 crc kubenswrapper[4799]: I1010 18:06:37.414789 4799 scope.go:117] "RemoveContainer" containerID="6ae067b7971fd6480cb0c3ccf44d4e22f837ba4674373b4b5903247a9af39cf1" Oct 10 18:06:37 crc kubenswrapper[4799]: E1010 18:06:37.415735 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 18:06:43 crc kubenswrapper[4799]: I1010 18:06:43.262045 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-78956d764c-x7sls" Oct 10 18:06:43 crc kubenswrapper[4799]: I1010 18:06:43.382316 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6c8b9ddf8f-z54wf"] Oct 10 18:06:43 crc kubenswrapper[4799]: I1010 18:06:43.382623 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-6c8b9ddf8f-z54wf" podUID="14c0281b-d3e8-40e4-8ecf-1d1a06c629e0" containerName="dnsmasq-dns" containerID="cri-o://d814cc7beef1f3ae81759f1e6a69b19208b623862c7bccf0636fcdc509d150c0" gracePeriod=10 Oct 10 18:06:43 crc kubenswrapper[4799]: I1010 18:06:43.589257 4799 generic.go:334] "Generic (PLEG): container finished" podID="14c0281b-d3e8-40e4-8ecf-1d1a06c629e0" containerID="d814cc7beef1f3ae81759f1e6a69b19208b623862c7bccf0636fcdc509d150c0" exitCode=0 Oct 10 18:06:43 crc kubenswrapper[4799]: I1010 18:06:43.589428 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6c8b9ddf8f-z54wf" event={"ID":"14c0281b-d3e8-40e4-8ecf-1d1a06c629e0","Type":"ContainerDied","Data":"d814cc7beef1f3ae81759f1e6a69b19208b623862c7bccf0636fcdc509d150c0"} Oct 10 18:06:43 crc kubenswrapper[4799]: I1010 18:06:43.894385 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6c8b9ddf8f-z54wf" Oct 10 18:06:43 crc kubenswrapper[4799]: I1010 18:06:43.965628 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/14c0281b-d3e8-40e4-8ecf-1d1a06c629e0-config\") pod \"14c0281b-d3e8-40e4-8ecf-1d1a06c629e0\" (UID: \"14c0281b-d3e8-40e4-8ecf-1d1a06c629e0\") " Oct 10 18:06:43 crc kubenswrapper[4799]: I1010 18:06:43.965679 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/14c0281b-d3e8-40e4-8ecf-1d1a06c629e0-ovsdbserver-nb\") pod \"14c0281b-d3e8-40e4-8ecf-1d1a06c629e0\" (UID: \"14c0281b-d3e8-40e4-8ecf-1d1a06c629e0\") " Oct 10 18:06:43 crc kubenswrapper[4799]: I1010 18:06:43.965741 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wklbs\" (UniqueName: \"kubernetes.io/projected/14c0281b-d3e8-40e4-8ecf-1d1a06c629e0-kube-api-access-wklbs\") pod \"14c0281b-d3e8-40e4-8ecf-1d1a06c629e0\" (UID: \"14c0281b-d3e8-40e4-8ecf-1d1a06c629e0\") " Oct 10 18:06:43 crc kubenswrapper[4799]: I1010 18:06:43.965785 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/14c0281b-d3e8-40e4-8ecf-1d1a06c629e0-dns-svc\") pod \"14c0281b-d3e8-40e4-8ecf-1d1a06c629e0\" (UID: \"14c0281b-d3e8-40e4-8ecf-1d1a06c629e0\") " Oct 10 18:06:43 crc kubenswrapper[4799]: I1010 18:06:43.965805 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/14c0281b-d3e8-40e4-8ecf-1d1a06c629e0-ovsdbserver-sb\") pod \"14c0281b-d3e8-40e4-8ecf-1d1a06c629e0\" (UID: \"14c0281b-d3e8-40e4-8ecf-1d1a06c629e0\") " Oct 10 18:06:43 crc kubenswrapper[4799]: I1010 18:06:43.976048 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/14c0281b-d3e8-40e4-8ecf-1d1a06c629e0-kube-api-access-wklbs" (OuterVolumeSpecName: "kube-api-access-wklbs") pod "14c0281b-d3e8-40e4-8ecf-1d1a06c629e0" (UID: "14c0281b-d3e8-40e4-8ecf-1d1a06c629e0"). InnerVolumeSpecName "kube-api-access-wklbs". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 18:06:44 crc kubenswrapper[4799]: I1010 18:06:44.021041 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/14c0281b-d3e8-40e4-8ecf-1d1a06c629e0-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "14c0281b-d3e8-40e4-8ecf-1d1a06c629e0" (UID: "14c0281b-d3e8-40e4-8ecf-1d1a06c629e0"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 18:06:44 crc kubenswrapper[4799]: I1010 18:06:44.024540 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/14c0281b-d3e8-40e4-8ecf-1d1a06c629e0-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "14c0281b-d3e8-40e4-8ecf-1d1a06c629e0" (UID: "14c0281b-d3e8-40e4-8ecf-1d1a06c629e0"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 18:06:44 crc kubenswrapper[4799]: I1010 18:06:44.041044 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/14c0281b-d3e8-40e4-8ecf-1d1a06c629e0-config" (OuterVolumeSpecName: "config") pod "14c0281b-d3e8-40e4-8ecf-1d1a06c629e0" (UID: "14c0281b-d3e8-40e4-8ecf-1d1a06c629e0"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 18:06:44 crc kubenswrapper[4799]: I1010 18:06:44.055658 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/14c0281b-d3e8-40e4-8ecf-1d1a06c629e0-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "14c0281b-d3e8-40e4-8ecf-1d1a06c629e0" (UID: "14c0281b-d3e8-40e4-8ecf-1d1a06c629e0"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 18:06:44 crc kubenswrapper[4799]: I1010 18:06:44.067884 4799 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/14c0281b-d3e8-40e4-8ecf-1d1a06c629e0-config\") on node \"crc\" DevicePath \"\"" Oct 10 18:06:44 crc kubenswrapper[4799]: I1010 18:06:44.067915 4799 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/14c0281b-d3e8-40e4-8ecf-1d1a06c629e0-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 10 18:06:44 crc kubenswrapper[4799]: I1010 18:06:44.067927 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wklbs\" (UniqueName: \"kubernetes.io/projected/14c0281b-d3e8-40e4-8ecf-1d1a06c629e0-kube-api-access-wklbs\") on node \"crc\" DevicePath \"\"" Oct 10 18:06:44 crc kubenswrapper[4799]: I1010 18:06:44.067936 4799 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/14c0281b-d3e8-40e4-8ecf-1d1a06c629e0-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 10 18:06:44 crc kubenswrapper[4799]: I1010 18:06:44.067944 4799 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/14c0281b-d3e8-40e4-8ecf-1d1a06c629e0-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 10 18:06:44 crc kubenswrapper[4799]: I1010 18:06:44.600733 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6c8b9ddf8f-z54wf" event={"ID":"14c0281b-d3e8-40e4-8ecf-1d1a06c629e0","Type":"ContainerDied","Data":"0d7d9c6b480099ad7d6cdd0feeb084c1f2d1df1bd48059e8e73358ba50bea1ca"} Oct 10 18:06:44 crc kubenswrapper[4799]: I1010 18:06:44.600806 4799 scope.go:117] "RemoveContainer" containerID="d814cc7beef1f3ae81759f1e6a69b19208b623862c7bccf0636fcdc509d150c0" Oct 10 18:06:44 crc kubenswrapper[4799]: I1010 18:06:44.600887 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6c8b9ddf8f-z54wf" Oct 10 18:06:44 crc kubenswrapper[4799]: I1010 18:06:44.644522 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6c8b9ddf8f-z54wf"] Oct 10 18:06:44 crc kubenswrapper[4799]: I1010 18:06:44.649525 4799 scope.go:117] "RemoveContainer" containerID="54d3c64236871e47647d2d0edcb9eb7ff7ae2f6fc6b4f7b1138e92ec72e9299a" Oct 10 18:06:44 crc kubenswrapper[4799]: I1010 18:06:44.653217 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-6c8b9ddf8f-z54wf"] Oct 10 18:06:45 crc kubenswrapper[4799]: I1010 18:06:45.139561 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-conductor-0"] Oct 10 18:06:45 crc kubenswrapper[4799]: I1010 18:06:45.140037 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-cell0-conductor-0" podUID="84cc2351-4774-443b-878a-91aedb81db3e" containerName="nova-cell0-conductor-conductor" containerID="cri-o://0248e07d40807e658db7c7b6a5d475ff49dbe6d01ce9c4367c6ecaaf20143521" gracePeriod=30 Oct 10 18:06:45 crc kubenswrapper[4799]: I1010 18:06:45.155897 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Oct 10 18:06:45 crc kubenswrapper[4799]: I1010 18:06:45.156330 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="272a89d6-8528-473f-8aed-1c3ef584fbd6" containerName="nova-metadata-log" containerID="cri-o://ee802f0441aedc24b337be8bd108842c01a64df525467a4888330f248031054f" gracePeriod=30 Oct 10 18:06:45 crc kubenswrapper[4799]: I1010 18:06:45.156489 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="272a89d6-8528-473f-8aed-1c3ef584fbd6" containerName="nova-metadata-metadata" containerID="cri-o://a1fea6e30e4b40ae24479ceb6f9f170a7ec6c26cd9ef632445d042d6a3ff0733" gracePeriod=30 Oct 10 18:06:45 crc kubenswrapper[4799]: I1010 18:06:45.211500 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Oct 10 18:06:45 crc kubenswrapper[4799]: I1010 18:06:45.211830 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="83dd6056-b85a-4884-8d59-9edc767e6558" containerName="nova-api-log" containerID="cri-o://d7cb973096212d554815f18652c1539ace60989595bed637fee02e498244fffd" gracePeriod=30 Oct 10 18:06:45 crc kubenswrapper[4799]: I1010 18:06:45.211925 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="83dd6056-b85a-4884-8d59-9edc767e6558" containerName="nova-api-api" containerID="cri-o://e058e3e9ebaa77e73a73123b986ee8de170338b6c672981b6d67c9915f6cf154" gracePeriod=30 Oct 10 18:06:45 crc kubenswrapper[4799]: I1010 18:06:45.228940 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Oct 10 18:06:45 crc kubenswrapper[4799]: I1010 18:06:45.229192 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-cell1-novncproxy-0" podUID="476023db-893d-4a8c-86cc-34cfe375f5ef" containerName="nova-cell1-novncproxy-novncproxy" containerID="cri-o://049e54e12e774d232e9d10d9155e6bdc94ba9564bca7719210354ad27e2bc377" gracePeriod=30 Oct 10 18:06:45 crc kubenswrapper[4799]: I1010 18:06:45.237144 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/cinder-api-0" Oct 10 18:06:45 crc kubenswrapper[4799]: I1010 18:06:45.239194 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Oct 10 18:06:45 crc kubenswrapper[4799]: I1010 18:06:45.239500 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="a5051fa5-4437-4de3-8638-4f7afaaaf938" containerName="nova-scheduler-scheduler" containerID="cri-o://9446512914eec048268f955226e54544437e148c087533f01679b5ecaf7bb0aa" gracePeriod=30 Oct 10 18:06:45 crc kubenswrapper[4799]: I1010 18:06:45.446074 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="14c0281b-d3e8-40e4-8ecf-1d1a06c629e0" path="/var/lib/kubelet/pods/14c0281b-d3e8-40e4-8ecf-1d1a06c629e0/volumes" Oct 10 18:06:45 crc kubenswrapper[4799]: I1010 18:06:45.609553 4799 generic.go:334] "Generic (PLEG): container finished" podID="272a89d6-8528-473f-8aed-1c3ef584fbd6" containerID="ee802f0441aedc24b337be8bd108842c01a64df525467a4888330f248031054f" exitCode=143 Oct 10 18:06:45 crc kubenswrapper[4799]: I1010 18:06:45.609640 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"272a89d6-8528-473f-8aed-1c3ef584fbd6","Type":"ContainerDied","Data":"ee802f0441aedc24b337be8bd108842c01a64df525467a4888330f248031054f"} Oct 10 18:06:45 crc kubenswrapper[4799]: I1010 18:06:45.612668 4799 generic.go:334] "Generic (PLEG): container finished" podID="83dd6056-b85a-4884-8d59-9edc767e6558" containerID="d7cb973096212d554815f18652c1539ace60989595bed637fee02e498244fffd" exitCode=143 Oct 10 18:06:45 crc kubenswrapper[4799]: I1010 18:06:45.612711 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"83dd6056-b85a-4884-8d59-9edc767e6558","Type":"ContainerDied","Data":"d7cb973096212d554815f18652c1539ace60989595bed637fee02e498244fffd"} Oct 10 18:06:46 crc kubenswrapper[4799]: I1010 18:06:46.031749 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Oct 10 18:06:46 crc kubenswrapper[4799]: I1010 18:06:46.108450 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dgv6t\" (UniqueName: \"kubernetes.io/projected/476023db-893d-4a8c-86cc-34cfe375f5ef-kube-api-access-dgv6t\") pod \"476023db-893d-4a8c-86cc-34cfe375f5ef\" (UID: \"476023db-893d-4a8c-86cc-34cfe375f5ef\") " Oct 10 18:06:46 crc kubenswrapper[4799]: I1010 18:06:46.108519 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/476023db-893d-4a8c-86cc-34cfe375f5ef-combined-ca-bundle\") pod \"476023db-893d-4a8c-86cc-34cfe375f5ef\" (UID: \"476023db-893d-4a8c-86cc-34cfe375f5ef\") " Oct 10 18:06:46 crc kubenswrapper[4799]: I1010 18:06:46.108622 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/476023db-893d-4a8c-86cc-34cfe375f5ef-config-data\") pod \"476023db-893d-4a8c-86cc-34cfe375f5ef\" (UID: \"476023db-893d-4a8c-86cc-34cfe375f5ef\") " Oct 10 18:06:46 crc kubenswrapper[4799]: I1010 18:06:46.119656 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/476023db-893d-4a8c-86cc-34cfe375f5ef-kube-api-access-dgv6t" (OuterVolumeSpecName: "kube-api-access-dgv6t") pod "476023db-893d-4a8c-86cc-34cfe375f5ef" (UID: "476023db-893d-4a8c-86cc-34cfe375f5ef"). InnerVolumeSpecName "kube-api-access-dgv6t". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 18:06:46 crc kubenswrapper[4799]: I1010 18:06:46.137988 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/476023db-893d-4a8c-86cc-34cfe375f5ef-config-data" (OuterVolumeSpecName: "config-data") pod "476023db-893d-4a8c-86cc-34cfe375f5ef" (UID: "476023db-893d-4a8c-86cc-34cfe375f5ef"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 18:06:46 crc kubenswrapper[4799]: I1010 18:06:46.141324 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/476023db-893d-4a8c-86cc-34cfe375f5ef-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "476023db-893d-4a8c-86cc-34cfe375f5ef" (UID: "476023db-893d-4a8c-86cc-34cfe375f5ef"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 18:06:46 crc kubenswrapper[4799]: I1010 18:06:46.209957 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dgv6t\" (UniqueName: \"kubernetes.io/projected/476023db-893d-4a8c-86cc-34cfe375f5ef-kube-api-access-dgv6t\") on node \"crc\" DevicePath \"\"" Oct 10 18:06:46 crc kubenswrapper[4799]: I1010 18:06:46.209984 4799 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/476023db-893d-4a8c-86cc-34cfe375f5ef-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 10 18:06:46 crc kubenswrapper[4799]: I1010 18:06:46.210021 4799 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/476023db-893d-4a8c-86cc-34cfe375f5ef-config-data\") on node \"crc\" DevicePath \"\"" Oct 10 18:06:46 crc kubenswrapper[4799]: E1010 18:06:46.336500 4799 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="0248e07d40807e658db7c7b6a5d475ff49dbe6d01ce9c4367c6ecaaf20143521" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Oct 10 18:06:46 crc kubenswrapper[4799]: E1010 18:06:46.339809 4799 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="0248e07d40807e658db7c7b6a5d475ff49dbe6d01ce9c4367c6ecaaf20143521" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Oct 10 18:06:46 crc kubenswrapper[4799]: E1010 18:06:46.341601 4799 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="0248e07d40807e658db7c7b6a5d475ff49dbe6d01ce9c4367c6ecaaf20143521" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Oct 10 18:06:46 crc kubenswrapper[4799]: E1010 18:06:46.341689 4799 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/nova-cell0-conductor-0" podUID="84cc2351-4774-443b-878a-91aedb81db3e" containerName="nova-cell0-conductor-conductor" Oct 10 18:06:46 crc kubenswrapper[4799]: I1010 18:06:46.624713 4799 generic.go:334] "Generic (PLEG): container finished" podID="476023db-893d-4a8c-86cc-34cfe375f5ef" containerID="049e54e12e774d232e9d10d9155e6bdc94ba9564bca7719210354ad27e2bc377" exitCode=0 Oct 10 18:06:46 crc kubenswrapper[4799]: I1010 18:06:46.624806 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"476023db-893d-4a8c-86cc-34cfe375f5ef","Type":"ContainerDied","Data":"049e54e12e774d232e9d10d9155e6bdc94ba9564bca7719210354ad27e2bc377"} Oct 10 18:06:46 crc kubenswrapper[4799]: I1010 18:06:46.624839 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Oct 10 18:06:46 crc kubenswrapper[4799]: I1010 18:06:46.626480 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"476023db-893d-4a8c-86cc-34cfe375f5ef","Type":"ContainerDied","Data":"72e47b85b9b2432e680a8c5aef08b6346dd7e00c00d6539dc272dd78a000748c"} Oct 10 18:06:46 crc kubenswrapper[4799]: I1010 18:06:46.626605 4799 scope.go:117] "RemoveContainer" containerID="049e54e12e774d232e9d10d9155e6bdc94ba9564bca7719210354ad27e2bc377" Oct 10 18:06:46 crc kubenswrapper[4799]: I1010 18:06:46.662181 4799 scope.go:117] "RemoveContainer" containerID="049e54e12e774d232e9d10d9155e6bdc94ba9564bca7719210354ad27e2bc377" Oct 10 18:06:46 crc kubenswrapper[4799]: E1010 18:06:46.662782 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"049e54e12e774d232e9d10d9155e6bdc94ba9564bca7719210354ad27e2bc377\": container with ID starting with 049e54e12e774d232e9d10d9155e6bdc94ba9564bca7719210354ad27e2bc377 not found: ID does not exist" containerID="049e54e12e774d232e9d10d9155e6bdc94ba9564bca7719210354ad27e2bc377" Oct 10 18:06:46 crc kubenswrapper[4799]: I1010 18:06:46.662916 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"049e54e12e774d232e9d10d9155e6bdc94ba9564bca7719210354ad27e2bc377"} err="failed to get container status \"049e54e12e774d232e9d10d9155e6bdc94ba9564bca7719210354ad27e2bc377\": rpc error: code = NotFound desc = could not find container \"049e54e12e774d232e9d10d9155e6bdc94ba9564bca7719210354ad27e2bc377\": container with ID starting with 049e54e12e774d232e9d10d9155e6bdc94ba9564bca7719210354ad27e2bc377 not found: ID does not exist" Oct 10 18:06:46 crc kubenswrapper[4799]: I1010 18:06:46.689828 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Oct 10 18:06:46 crc kubenswrapper[4799]: I1010 18:06:46.703851 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Oct 10 18:06:46 crc kubenswrapper[4799]: I1010 18:06:46.720479 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Oct 10 18:06:46 crc kubenswrapper[4799]: E1010 18:06:46.732041 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="476023db-893d-4a8c-86cc-34cfe375f5ef" containerName="nova-cell1-novncproxy-novncproxy" Oct 10 18:06:46 crc kubenswrapper[4799]: I1010 18:06:46.732109 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="476023db-893d-4a8c-86cc-34cfe375f5ef" containerName="nova-cell1-novncproxy-novncproxy" Oct 10 18:06:46 crc kubenswrapper[4799]: E1010 18:06:46.732169 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="14c0281b-d3e8-40e4-8ecf-1d1a06c629e0" containerName="init" Oct 10 18:06:46 crc kubenswrapper[4799]: I1010 18:06:46.732205 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="14c0281b-d3e8-40e4-8ecf-1d1a06c629e0" containerName="init" Oct 10 18:06:46 crc kubenswrapper[4799]: E1010 18:06:46.732269 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="14c0281b-d3e8-40e4-8ecf-1d1a06c629e0" containerName="dnsmasq-dns" Oct 10 18:06:46 crc kubenswrapper[4799]: I1010 18:06:46.732310 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="14c0281b-d3e8-40e4-8ecf-1d1a06c629e0" containerName="dnsmasq-dns" Oct 10 18:06:46 crc kubenswrapper[4799]: I1010 18:06:46.732551 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="14c0281b-d3e8-40e4-8ecf-1d1a06c629e0" containerName="dnsmasq-dns" Oct 10 18:06:46 crc kubenswrapper[4799]: I1010 18:06:46.732580 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="476023db-893d-4a8c-86cc-34cfe375f5ef" containerName="nova-cell1-novncproxy-novncproxy" Oct 10 18:06:46 crc kubenswrapper[4799]: I1010 18:06:46.737144 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Oct 10 18:06:46 crc kubenswrapper[4799]: I1010 18:06:46.739485 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-novncproxy-config-data" Oct 10 18:06:46 crc kubenswrapper[4799]: I1010 18:06:46.753709 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Oct 10 18:06:46 crc kubenswrapper[4799]: I1010 18:06:46.823679 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rlnd9\" (UniqueName: \"kubernetes.io/projected/22259f82-d023-44bb-95c6-1def08492d5b-kube-api-access-rlnd9\") pod \"nova-cell1-novncproxy-0\" (UID: \"22259f82-d023-44bb-95c6-1def08492d5b\") " pod="openstack/nova-cell1-novncproxy-0" Oct 10 18:06:46 crc kubenswrapper[4799]: I1010 18:06:46.824113 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/22259f82-d023-44bb-95c6-1def08492d5b-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"22259f82-d023-44bb-95c6-1def08492d5b\") " pod="openstack/nova-cell1-novncproxy-0" Oct 10 18:06:46 crc kubenswrapper[4799]: I1010 18:06:46.824255 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/22259f82-d023-44bb-95c6-1def08492d5b-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"22259f82-d023-44bb-95c6-1def08492d5b\") " pod="openstack/nova-cell1-novncproxy-0" Oct 10 18:06:46 crc kubenswrapper[4799]: I1010 18:06:46.926441 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/22259f82-d023-44bb-95c6-1def08492d5b-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"22259f82-d023-44bb-95c6-1def08492d5b\") " pod="openstack/nova-cell1-novncproxy-0" Oct 10 18:06:46 crc kubenswrapper[4799]: I1010 18:06:46.926586 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/22259f82-d023-44bb-95c6-1def08492d5b-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"22259f82-d023-44bb-95c6-1def08492d5b\") " pod="openstack/nova-cell1-novncproxy-0" Oct 10 18:06:46 crc kubenswrapper[4799]: I1010 18:06:46.927503 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rlnd9\" (UniqueName: \"kubernetes.io/projected/22259f82-d023-44bb-95c6-1def08492d5b-kube-api-access-rlnd9\") pod \"nova-cell1-novncproxy-0\" (UID: \"22259f82-d023-44bb-95c6-1def08492d5b\") " pod="openstack/nova-cell1-novncproxy-0" Oct 10 18:06:46 crc kubenswrapper[4799]: I1010 18:06:46.931690 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/22259f82-d023-44bb-95c6-1def08492d5b-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"22259f82-d023-44bb-95c6-1def08492d5b\") " pod="openstack/nova-cell1-novncproxy-0" Oct 10 18:06:46 crc kubenswrapper[4799]: I1010 18:06:46.939117 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/22259f82-d023-44bb-95c6-1def08492d5b-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"22259f82-d023-44bb-95c6-1def08492d5b\") " pod="openstack/nova-cell1-novncproxy-0" Oct 10 18:06:46 crc kubenswrapper[4799]: I1010 18:06:46.953530 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rlnd9\" (UniqueName: \"kubernetes.io/projected/22259f82-d023-44bb-95c6-1def08492d5b-kube-api-access-rlnd9\") pod \"nova-cell1-novncproxy-0\" (UID: \"22259f82-d023-44bb-95c6-1def08492d5b\") " pod="openstack/nova-cell1-novncproxy-0" Oct 10 18:06:47 crc kubenswrapper[4799]: I1010 18:06:47.059681 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Oct 10 18:06:47 crc kubenswrapper[4799]: I1010 18:06:47.426173 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="476023db-893d-4a8c-86cc-34cfe375f5ef" path="/var/lib/kubelet/pods/476023db-893d-4a8c-86cc-34cfe375f5ef/volumes" Oct 10 18:06:47 crc kubenswrapper[4799]: I1010 18:06:47.538479 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 10 18:06:47 crc kubenswrapper[4799]: I1010 18:06:47.601930 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Oct 10 18:06:47 crc kubenswrapper[4799]: I1010 18:06:47.634695 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"22259f82-d023-44bb-95c6-1def08492d5b","Type":"ContainerStarted","Data":"67f8c7397b1d8671833eff48c99fdc7bee879f81d10b83b962a932ea76d14c1f"} Oct 10 18:06:47 crc kubenswrapper[4799]: I1010 18:06:47.635859 4799 generic.go:334] "Generic (PLEG): container finished" podID="a5051fa5-4437-4de3-8638-4f7afaaaf938" containerID="9446512914eec048268f955226e54544437e148c087533f01679b5ecaf7bb0aa" exitCode=0 Oct 10 18:06:47 crc kubenswrapper[4799]: I1010 18:06:47.635902 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"a5051fa5-4437-4de3-8638-4f7afaaaf938","Type":"ContainerDied","Data":"9446512914eec048268f955226e54544437e148c087533f01679b5ecaf7bb0aa"} Oct 10 18:06:47 crc kubenswrapper[4799]: I1010 18:06:47.635919 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"a5051fa5-4437-4de3-8638-4f7afaaaf938","Type":"ContainerDied","Data":"62b32f6f048bad1f2726a12f025e9a52f90c5fa43b9dfc536cd064487941941b"} Oct 10 18:06:47 crc kubenswrapper[4799]: I1010 18:06:47.635937 4799 scope.go:117] "RemoveContainer" containerID="9446512914eec048268f955226e54544437e148c087533f01679b5ecaf7bb0aa" Oct 10 18:06:47 crc kubenswrapper[4799]: I1010 18:06:47.636052 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 10 18:06:47 crc kubenswrapper[4799]: I1010 18:06:47.638035 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-t2nrb\" (UniqueName: \"kubernetes.io/projected/a5051fa5-4437-4de3-8638-4f7afaaaf938-kube-api-access-t2nrb\") pod \"a5051fa5-4437-4de3-8638-4f7afaaaf938\" (UID: \"a5051fa5-4437-4de3-8638-4f7afaaaf938\") " Oct 10 18:06:47 crc kubenswrapper[4799]: I1010 18:06:47.638166 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a5051fa5-4437-4de3-8638-4f7afaaaf938-combined-ca-bundle\") pod \"a5051fa5-4437-4de3-8638-4f7afaaaf938\" (UID: \"a5051fa5-4437-4de3-8638-4f7afaaaf938\") " Oct 10 18:06:47 crc kubenswrapper[4799]: I1010 18:06:47.638206 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a5051fa5-4437-4de3-8638-4f7afaaaf938-config-data\") pod \"a5051fa5-4437-4de3-8638-4f7afaaaf938\" (UID: \"a5051fa5-4437-4de3-8638-4f7afaaaf938\") " Oct 10 18:06:47 crc kubenswrapper[4799]: I1010 18:06:47.647118 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a5051fa5-4437-4de3-8638-4f7afaaaf938-kube-api-access-t2nrb" (OuterVolumeSpecName: "kube-api-access-t2nrb") pod "a5051fa5-4437-4de3-8638-4f7afaaaf938" (UID: "a5051fa5-4437-4de3-8638-4f7afaaaf938"). InnerVolumeSpecName "kube-api-access-t2nrb". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 18:06:47 crc kubenswrapper[4799]: I1010 18:06:47.660255 4799 scope.go:117] "RemoveContainer" containerID="9446512914eec048268f955226e54544437e148c087533f01679b5ecaf7bb0aa" Oct 10 18:06:47 crc kubenswrapper[4799]: E1010 18:06:47.660721 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9446512914eec048268f955226e54544437e148c087533f01679b5ecaf7bb0aa\": container with ID starting with 9446512914eec048268f955226e54544437e148c087533f01679b5ecaf7bb0aa not found: ID does not exist" containerID="9446512914eec048268f955226e54544437e148c087533f01679b5ecaf7bb0aa" Oct 10 18:06:47 crc kubenswrapper[4799]: I1010 18:06:47.660837 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9446512914eec048268f955226e54544437e148c087533f01679b5ecaf7bb0aa"} err="failed to get container status \"9446512914eec048268f955226e54544437e148c087533f01679b5ecaf7bb0aa\": rpc error: code = NotFound desc = could not find container \"9446512914eec048268f955226e54544437e148c087533f01679b5ecaf7bb0aa\": container with ID starting with 9446512914eec048268f955226e54544437e148c087533f01679b5ecaf7bb0aa not found: ID does not exist" Oct 10 18:06:47 crc kubenswrapper[4799]: I1010 18:06:47.665922 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a5051fa5-4437-4de3-8638-4f7afaaaf938-config-data" (OuterVolumeSpecName: "config-data") pod "a5051fa5-4437-4de3-8638-4f7afaaaf938" (UID: "a5051fa5-4437-4de3-8638-4f7afaaaf938"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 18:06:47 crc kubenswrapper[4799]: I1010 18:06:47.682118 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a5051fa5-4437-4de3-8638-4f7afaaaf938-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a5051fa5-4437-4de3-8638-4f7afaaaf938" (UID: "a5051fa5-4437-4de3-8638-4f7afaaaf938"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 18:06:47 crc kubenswrapper[4799]: I1010 18:06:47.740511 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-t2nrb\" (UniqueName: \"kubernetes.io/projected/a5051fa5-4437-4de3-8638-4f7afaaaf938-kube-api-access-t2nrb\") on node \"crc\" DevicePath \"\"" Oct 10 18:06:47 crc kubenswrapper[4799]: I1010 18:06:47.740540 4799 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a5051fa5-4437-4de3-8638-4f7afaaaf938-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 10 18:06:47 crc kubenswrapper[4799]: I1010 18:06:47.740551 4799 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a5051fa5-4437-4de3-8638-4f7afaaaf938-config-data\") on node \"crc\" DevicePath \"\"" Oct 10 18:06:48 crc kubenswrapper[4799]: I1010 18:06:48.050194 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Oct 10 18:06:48 crc kubenswrapper[4799]: I1010 18:06:48.058471 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Oct 10 18:06:48 crc kubenswrapper[4799]: I1010 18:06:48.067541 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Oct 10 18:06:48 crc kubenswrapper[4799]: E1010 18:06:48.068179 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a5051fa5-4437-4de3-8638-4f7afaaaf938" containerName="nova-scheduler-scheduler" Oct 10 18:06:48 crc kubenswrapper[4799]: I1010 18:06:48.068212 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="a5051fa5-4437-4de3-8638-4f7afaaaf938" containerName="nova-scheduler-scheduler" Oct 10 18:06:48 crc kubenswrapper[4799]: I1010 18:06:48.068569 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="a5051fa5-4437-4de3-8638-4f7afaaaf938" containerName="nova-scheduler-scheduler" Oct 10 18:06:48 crc kubenswrapper[4799]: I1010 18:06:48.069673 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 10 18:06:48 crc kubenswrapper[4799]: I1010 18:06:48.073729 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Oct 10 18:06:48 crc kubenswrapper[4799]: I1010 18:06:48.090904 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Oct 10 18:06:48 crc kubenswrapper[4799]: I1010 18:06:48.249617 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-btbgv\" (UniqueName: \"kubernetes.io/projected/5938ba89-d944-4772-834f-67074d54da34-kube-api-access-btbgv\") pod \"nova-scheduler-0\" (UID: \"5938ba89-d944-4772-834f-67074d54da34\") " pod="openstack/nova-scheduler-0" Oct 10 18:06:48 crc kubenswrapper[4799]: I1010 18:06:48.249779 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5938ba89-d944-4772-834f-67074d54da34-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"5938ba89-d944-4772-834f-67074d54da34\") " pod="openstack/nova-scheduler-0" Oct 10 18:06:48 crc kubenswrapper[4799]: I1010 18:06:48.249805 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5938ba89-d944-4772-834f-67074d54da34-config-data\") pod \"nova-scheduler-0\" (UID: \"5938ba89-d944-4772-834f-67074d54da34\") " pod="openstack/nova-scheduler-0" Oct 10 18:06:48 crc kubenswrapper[4799]: I1010 18:06:48.351571 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5938ba89-d944-4772-834f-67074d54da34-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"5938ba89-d944-4772-834f-67074d54da34\") " pod="openstack/nova-scheduler-0" Oct 10 18:06:48 crc kubenswrapper[4799]: I1010 18:06:48.351723 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5938ba89-d944-4772-834f-67074d54da34-config-data\") pod \"nova-scheduler-0\" (UID: \"5938ba89-d944-4772-834f-67074d54da34\") " pod="openstack/nova-scheduler-0" Oct 10 18:06:48 crc kubenswrapper[4799]: I1010 18:06:48.352618 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-btbgv\" (UniqueName: \"kubernetes.io/projected/5938ba89-d944-4772-834f-67074d54da34-kube-api-access-btbgv\") pod \"nova-scheduler-0\" (UID: \"5938ba89-d944-4772-834f-67074d54da34\") " pod="openstack/nova-scheduler-0" Oct 10 18:06:48 crc kubenswrapper[4799]: I1010 18:06:48.356748 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5938ba89-d944-4772-834f-67074d54da34-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"5938ba89-d944-4772-834f-67074d54da34\") " pod="openstack/nova-scheduler-0" Oct 10 18:06:48 crc kubenswrapper[4799]: I1010 18:06:48.356986 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5938ba89-d944-4772-834f-67074d54da34-config-data\") pod \"nova-scheduler-0\" (UID: \"5938ba89-d944-4772-834f-67074d54da34\") " pod="openstack/nova-scheduler-0" Oct 10 18:06:48 crc kubenswrapper[4799]: I1010 18:06:48.372634 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-btbgv\" (UniqueName: \"kubernetes.io/projected/5938ba89-d944-4772-834f-67074d54da34-kube-api-access-btbgv\") pod \"nova-scheduler-0\" (UID: \"5938ba89-d944-4772-834f-67074d54da34\") " pod="openstack/nova-scheduler-0" Oct 10 18:06:48 crc kubenswrapper[4799]: I1010 18:06:48.400233 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 10 18:06:48 crc kubenswrapper[4799]: I1010 18:06:48.457716 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-conductor-0"] Oct 10 18:06:48 crc kubenswrapper[4799]: I1010 18:06:48.468131 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-cell1-conductor-0" podUID="6c1c1a59-308c-408a-9368-9c3be24cc383" containerName="nova-cell1-conductor-conductor" containerID="cri-o://6f3c40b2e02fe491d7a8cb2cd3cd4e974eb3094ffd182dc796407938e39fc38c" gracePeriod=30 Oct 10 18:06:48 crc kubenswrapper[4799]: I1010 18:06:48.479364 4799 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-metadata-0" podUID="272a89d6-8528-473f-8aed-1c3ef584fbd6" containerName="nova-metadata-log" probeResult="failure" output="Get \"http://10.217.1.84:8775/\": dial tcp 10.217.1.84:8775: connect: connection refused" Oct 10 18:06:48 crc kubenswrapper[4799]: I1010 18:06:48.479631 4799 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-metadata-0" podUID="272a89d6-8528-473f-8aed-1c3ef584fbd6" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"http://10.217.1.84:8775/\": dial tcp 10.217.1.84:8775: connect: connection refused" Oct 10 18:06:48 crc kubenswrapper[4799]: E1010 18:06:48.599395 4799 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod272a89d6_8528_473f_8aed_1c3ef584fbd6.slice/crio-a1fea6e30e4b40ae24479ceb6f9f170a7ec6c26cd9ef632445d042d6a3ff0733.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod272a89d6_8528_473f_8aed_1c3ef584fbd6.slice/crio-conmon-a1fea6e30e4b40ae24479ceb6f9f170a7ec6c26cd9ef632445d042d6a3ff0733.scope\": RecentStats: unable to find data in memory cache]" Oct 10 18:06:48 crc kubenswrapper[4799]: I1010 18:06:48.653716 4799 generic.go:334] "Generic (PLEG): container finished" podID="272a89d6-8528-473f-8aed-1c3ef584fbd6" containerID="a1fea6e30e4b40ae24479ceb6f9f170a7ec6c26cd9ef632445d042d6a3ff0733" exitCode=0 Oct 10 18:06:48 crc kubenswrapper[4799]: I1010 18:06:48.653795 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"272a89d6-8528-473f-8aed-1c3ef584fbd6","Type":"ContainerDied","Data":"a1fea6e30e4b40ae24479ceb6f9f170a7ec6c26cd9ef632445d042d6a3ff0733"} Oct 10 18:06:48 crc kubenswrapper[4799]: I1010 18:06:48.657724 4799 generic.go:334] "Generic (PLEG): container finished" podID="83dd6056-b85a-4884-8d59-9edc767e6558" containerID="e058e3e9ebaa77e73a73123b986ee8de170338b6c672981b6d67c9915f6cf154" exitCode=0 Oct 10 18:06:48 crc kubenswrapper[4799]: I1010 18:06:48.657779 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"83dd6056-b85a-4884-8d59-9edc767e6558","Type":"ContainerDied","Data":"e058e3e9ebaa77e73a73123b986ee8de170338b6c672981b6d67c9915f6cf154"} Oct 10 18:06:48 crc kubenswrapper[4799]: I1010 18:06:48.659765 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"22259f82-d023-44bb-95c6-1def08492d5b","Type":"ContainerStarted","Data":"5cc4402b1f1d6d82d8b5e616473c78885ecba7b8979ad4795fab2c94f6ee04dd"} Oct 10 18:06:48 crc kubenswrapper[4799]: I1010 18:06:48.853202 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 10 18:06:48 crc kubenswrapper[4799]: I1010 18:06:48.882496 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-novncproxy-0" podStartSLOduration=2.882469289 podStartE2EDuration="2.882469289s" podCreationTimestamp="2025-10-10 18:06:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-10 18:06:48.685334534 +0000 UTC m=+5702.193658649" watchObservedRunningTime="2025-10-10 18:06:48.882469289 +0000 UTC m=+5702.390793414" Oct 10 18:06:48 crc kubenswrapper[4799]: I1010 18:06:48.963696 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/272a89d6-8528-473f-8aed-1c3ef584fbd6-config-data\") pod \"272a89d6-8528-473f-8aed-1c3ef584fbd6\" (UID: \"272a89d6-8528-473f-8aed-1c3ef584fbd6\") " Oct 10 18:06:48 crc kubenswrapper[4799]: I1010 18:06:48.964141 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/272a89d6-8528-473f-8aed-1c3ef584fbd6-logs\") pod \"272a89d6-8528-473f-8aed-1c3ef584fbd6\" (UID: \"272a89d6-8528-473f-8aed-1c3ef584fbd6\") " Oct 10 18:06:48 crc kubenswrapper[4799]: I1010 18:06:48.964300 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/272a89d6-8528-473f-8aed-1c3ef584fbd6-combined-ca-bundle\") pod \"272a89d6-8528-473f-8aed-1c3ef584fbd6\" (UID: \"272a89d6-8528-473f-8aed-1c3ef584fbd6\") " Oct 10 18:06:48 crc kubenswrapper[4799]: I1010 18:06:48.964371 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-j9xxf\" (UniqueName: \"kubernetes.io/projected/272a89d6-8528-473f-8aed-1c3ef584fbd6-kube-api-access-j9xxf\") pod \"272a89d6-8528-473f-8aed-1c3ef584fbd6\" (UID: \"272a89d6-8528-473f-8aed-1c3ef584fbd6\") " Oct 10 18:06:48 crc kubenswrapper[4799]: I1010 18:06:48.968537 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/272a89d6-8528-473f-8aed-1c3ef584fbd6-logs" (OuterVolumeSpecName: "logs") pod "272a89d6-8528-473f-8aed-1c3ef584fbd6" (UID: "272a89d6-8528-473f-8aed-1c3ef584fbd6"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 10 18:06:48 crc kubenswrapper[4799]: I1010 18:06:48.971869 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/272a89d6-8528-473f-8aed-1c3ef584fbd6-kube-api-access-j9xxf" (OuterVolumeSpecName: "kube-api-access-j9xxf") pod "272a89d6-8528-473f-8aed-1c3ef584fbd6" (UID: "272a89d6-8528-473f-8aed-1c3ef584fbd6"). InnerVolumeSpecName "kube-api-access-j9xxf". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 18:06:49 crc kubenswrapper[4799]: I1010 18:06:49.021963 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/272a89d6-8528-473f-8aed-1c3ef584fbd6-config-data" (OuterVolumeSpecName: "config-data") pod "272a89d6-8528-473f-8aed-1c3ef584fbd6" (UID: "272a89d6-8528-473f-8aed-1c3ef584fbd6"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 18:06:49 crc kubenswrapper[4799]: I1010 18:06:49.026345 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/272a89d6-8528-473f-8aed-1c3ef584fbd6-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "272a89d6-8528-473f-8aed-1c3ef584fbd6" (UID: "272a89d6-8528-473f-8aed-1c3ef584fbd6"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 18:06:49 crc kubenswrapper[4799]: I1010 18:06:49.066290 4799 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/272a89d6-8528-473f-8aed-1c3ef584fbd6-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 10 18:06:49 crc kubenswrapper[4799]: I1010 18:06:49.066320 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-j9xxf\" (UniqueName: \"kubernetes.io/projected/272a89d6-8528-473f-8aed-1c3ef584fbd6-kube-api-access-j9xxf\") on node \"crc\" DevicePath \"\"" Oct 10 18:06:49 crc kubenswrapper[4799]: I1010 18:06:49.066332 4799 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/272a89d6-8528-473f-8aed-1c3ef584fbd6-config-data\") on node \"crc\" DevicePath \"\"" Oct 10 18:06:49 crc kubenswrapper[4799]: I1010 18:06:49.066339 4799 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/272a89d6-8528-473f-8aed-1c3ef584fbd6-logs\") on node \"crc\" DevicePath \"\"" Oct 10 18:06:49 crc kubenswrapper[4799]: I1010 18:06:49.071399 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 10 18:06:49 crc kubenswrapper[4799]: I1010 18:06:49.139777 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Oct 10 18:06:49 crc kubenswrapper[4799]: I1010 18:06:49.167017 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/83dd6056-b85a-4884-8d59-9edc767e6558-logs\") pod \"83dd6056-b85a-4884-8d59-9edc767e6558\" (UID: \"83dd6056-b85a-4884-8d59-9edc767e6558\") " Oct 10 18:06:49 crc kubenswrapper[4799]: I1010 18:06:49.167095 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/83dd6056-b85a-4884-8d59-9edc767e6558-config-data\") pod \"83dd6056-b85a-4884-8d59-9edc767e6558\" (UID: \"83dd6056-b85a-4884-8d59-9edc767e6558\") " Oct 10 18:06:49 crc kubenswrapper[4799]: I1010 18:06:49.167213 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/83dd6056-b85a-4884-8d59-9edc767e6558-combined-ca-bundle\") pod \"83dd6056-b85a-4884-8d59-9edc767e6558\" (UID: \"83dd6056-b85a-4884-8d59-9edc767e6558\") " Oct 10 18:06:49 crc kubenswrapper[4799]: I1010 18:06:49.168246 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/83dd6056-b85a-4884-8d59-9edc767e6558-logs" (OuterVolumeSpecName: "logs") pod "83dd6056-b85a-4884-8d59-9edc767e6558" (UID: "83dd6056-b85a-4884-8d59-9edc767e6558"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 10 18:06:49 crc kubenswrapper[4799]: I1010 18:06:49.168316 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kc8ll\" (UniqueName: \"kubernetes.io/projected/83dd6056-b85a-4884-8d59-9edc767e6558-kube-api-access-kc8ll\") pod \"83dd6056-b85a-4884-8d59-9edc767e6558\" (UID: \"83dd6056-b85a-4884-8d59-9edc767e6558\") " Oct 10 18:06:49 crc kubenswrapper[4799]: I1010 18:06:49.168858 4799 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/83dd6056-b85a-4884-8d59-9edc767e6558-logs\") on node \"crc\" DevicePath \"\"" Oct 10 18:06:49 crc kubenswrapper[4799]: I1010 18:06:49.171280 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/83dd6056-b85a-4884-8d59-9edc767e6558-kube-api-access-kc8ll" (OuterVolumeSpecName: "kube-api-access-kc8ll") pod "83dd6056-b85a-4884-8d59-9edc767e6558" (UID: "83dd6056-b85a-4884-8d59-9edc767e6558"). InnerVolumeSpecName "kube-api-access-kc8ll". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 18:06:49 crc kubenswrapper[4799]: I1010 18:06:49.212973 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/83dd6056-b85a-4884-8d59-9edc767e6558-config-data" (OuterVolumeSpecName: "config-data") pod "83dd6056-b85a-4884-8d59-9edc767e6558" (UID: "83dd6056-b85a-4884-8d59-9edc767e6558"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 18:06:49 crc kubenswrapper[4799]: I1010 18:06:49.218263 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/83dd6056-b85a-4884-8d59-9edc767e6558-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "83dd6056-b85a-4884-8d59-9edc767e6558" (UID: "83dd6056-b85a-4884-8d59-9edc767e6558"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 18:06:49 crc kubenswrapper[4799]: I1010 18:06:49.270475 4799 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/83dd6056-b85a-4884-8d59-9edc767e6558-config-data\") on node \"crc\" DevicePath \"\"" Oct 10 18:06:49 crc kubenswrapper[4799]: I1010 18:06:49.270501 4799 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/83dd6056-b85a-4884-8d59-9edc767e6558-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 10 18:06:49 crc kubenswrapper[4799]: I1010 18:06:49.270512 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kc8ll\" (UniqueName: \"kubernetes.io/projected/83dd6056-b85a-4884-8d59-9edc767e6558-kube-api-access-kc8ll\") on node \"crc\" DevicePath \"\"" Oct 10 18:06:49 crc kubenswrapper[4799]: I1010 18:06:49.418278 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a5051fa5-4437-4de3-8638-4f7afaaaf938" path="/var/lib/kubelet/pods/a5051fa5-4437-4de3-8638-4f7afaaaf938/volumes" Oct 10 18:06:49 crc kubenswrapper[4799]: I1010 18:06:49.671129 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"5938ba89-d944-4772-834f-67074d54da34","Type":"ContainerStarted","Data":"08e3bad4a85582426737268e738b80039fecbcaaa296a24c98b087818d85da91"} Oct 10 18:06:49 crc kubenswrapper[4799]: I1010 18:06:49.671181 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"5938ba89-d944-4772-834f-67074d54da34","Type":"ContainerStarted","Data":"8e3cba8437bd76085794176c8cd61e13b106167f28d165dc138b0f888d2ed87c"} Oct 10 18:06:49 crc kubenswrapper[4799]: I1010 18:06:49.675704 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 10 18:06:49 crc kubenswrapper[4799]: I1010 18:06:49.675703 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"272a89d6-8528-473f-8aed-1c3ef584fbd6","Type":"ContainerDied","Data":"2db52de36ae7852b1880c89c3c1a9421187e246e6f2bdd1441e1f43dc03ac2d7"} Oct 10 18:06:49 crc kubenswrapper[4799]: I1010 18:06:49.675842 4799 scope.go:117] "RemoveContainer" containerID="a1fea6e30e4b40ae24479ceb6f9f170a7ec6c26cd9ef632445d042d6a3ff0733" Oct 10 18:06:49 crc kubenswrapper[4799]: I1010 18:06:49.679415 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 10 18:06:49 crc kubenswrapper[4799]: I1010 18:06:49.679934 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"83dd6056-b85a-4884-8d59-9edc767e6558","Type":"ContainerDied","Data":"32a1e8afcf6826c0bab7528e2f7eebed31d448680c47a3fd8cf382521ce043a7"} Oct 10 18:06:49 crc kubenswrapper[4799]: E1010 18:06:49.703151 4799 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="6f3c40b2e02fe491d7a8cb2cd3cd4e974eb3094ffd182dc796407938e39fc38c" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Oct 10 18:06:49 crc kubenswrapper[4799]: E1010 18:06:49.704891 4799 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="6f3c40b2e02fe491d7a8cb2cd3cd4e974eb3094ffd182dc796407938e39fc38c" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Oct 10 18:06:49 crc kubenswrapper[4799]: E1010 18:06:49.706208 4799 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="6f3c40b2e02fe491d7a8cb2cd3cd4e974eb3094ffd182dc796407938e39fc38c" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Oct 10 18:06:49 crc kubenswrapper[4799]: E1010 18:06:49.706256 4799 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/nova-cell1-conductor-0" podUID="6c1c1a59-308c-408a-9368-9c3be24cc383" containerName="nova-cell1-conductor-conductor" Oct 10 18:06:49 crc kubenswrapper[4799]: I1010 18:06:49.713926 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=1.713905593 podStartE2EDuration="1.713905593s" podCreationTimestamp="2025-10-10 18:06:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-10 18:06:49.698500458 +0000 UTC m=+5703.206824603" watchObservedRunningTime="2025-10-10 18:06:49.713905593 +0000 UTC m=+5703.222229708" Oct 10 18:06:49 crc kubenswrapper[4799]: I1010 18:06:49.714026 4799 scope.go:117] "RemoveContainer" containerID="ee802f0441aedc24b337be8bd108842c01a64df525467a4888330f248031054f" Oct 10 18:06:49 crc kubenswrapper[4799]: I1010 18:06:49.733807 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Oct 10 18:06:49 crc kubenswrapper[4799]: I1010 18:06:49.740134 4799 scope.go:117] "RemoveContainer" containerID="e058e3e9ebaa77e73a73123b986ee8de170338b6c672981b6d67c9915f6cf154" Oct 10 18:06:49 crc kubenswrapper[4799]: I1010 18:06:49.747744 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Oct 10 18:06:49 crc kubenswrapper[4799]: I1010 18:06:49.772891 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Oct 10 18:06:49 crc kubenswrapper[4799]: I1010 18:06:49.792831 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Oct 10 18:06:49 crc kubenswrapper[4799]: I1010 18:06:49.818854 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Oct 10 18:06:49 crc kubenswrapper[4799]: I1010 18:06:49.818875 4799 scope.go:117] "RemoveContainer" containerID="d7cb973096212d554815f18652c1539ace60989595bed637fee02e498244fffd" Oct 10 18:06:49 crc kubenswrapper[4799]: E1010 18:06:49.819291 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="83dd6056-b85a-4884-8d59-9edc767e6558" containerName="nova-api-log" Oct 10 18:06:49 crc kubenswrapper[4799]: I1010 18:06:49.819310 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="83dd6056-b85a-4884-8d59-9edc767e6558" containerName="nova-api-log" Oct 10 18:06:49 crc kubenswrapper[4799]: E1010 18:06:49.819331 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="83dd6056-b85a-4884-8d59-9edc767e6558" containerName="nova-api-api" Oct 10 18:06:49 crc kubenswrapper[4799]: I1010 18:06:49.819338 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="83dd6056-b85a-4884-8d59-9edc767e6558" containerName="nova-api-api" Oct 10 18:06:49 crc kubenswrapper[4799]: E1010 18:06:49.819369 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="272a89d6-8528-473f-8aed-1c3ef584fbd6" containerName="nova-metadata-metadata" Oct 10 18:06:49 crc kubenswrapper[4799]: I1010 18:06:49.819375 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="272a89d6-8528-473f-8aed-1c3ef584fbd6" containerName="nova-metadata-metadata" Oct 10 18:06:49 crc kubenswrapper[4799]: E1010 18:06:49.819388 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="272a89d6-8528-473f-8aed-1c3ef584fbd6" containerName="nova-metadata-log" Oct 10 18:06:49 crc kubenswrapper[4799]: I1010 18:06:49.819393 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="272a89d6-8528-473f-8aed-1c3ef584fbd6" containerName="nova-metadata-log" Oct 10 18:06:49 crc kubenswrapper[4799]: I1010 18:06:49.819545 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="272a89d6-8528-473f-8aed-1c3ef584fbd6" containerName="nova-metadata-log" Oct 10 18:06:49 crc kubenswrapper[4799]: I1010 18:06:49.819562 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="83dd6056-b85a-4884-8d59-9edc767e6558" containerName="nova-api-api" Oct 10 18:06:49 crc kubenswrapper[4799]: I1010 18:06:49.819570 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="272a89d6-8528-473f-8aed-1c3ef584fbd6" containerName="nova-metadata-metadata" Oct 10 18:06:49 crc kubenswrapper[4799]: I1010 18:06:49.819582 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="83dd6056-b85a-4884-8d59-9edc767e6558" containerName="nova-api-log" Oct 10 18:06:49 crc kubenswrapper[4799]: I1010 18:06:49.821436 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 10 18:06:49 crc kubenswrapper[4799]: I1010 18:06:49.824462 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Oct 10 18:06:49 crc kubenswrapper[4799]: I1010 18:06:49.827805 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Oct 10 18:06:49 crc kubenswrapper[4799]: I1010 18:06:49.836438 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Oct 10 18:06:49 crc kubenswrapper[4799]: I1010 18:06:49.838029 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 10 18:06:49 crc kubenswrapper[4799]: I1010 18:06:49.840560 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Oct 10 18:06:49 crc kubenswrapper[4799]: I1010 18:06:49.849207 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Oct 10 18:06:49 crc kubenswrapper[4799]: I1010 18:06:49.985876 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6v5d5\" (UniqueName: \"kubernetes.io/projected/3f04144b-1b73-4aa0-8525-53f1a68da6ee-kube-api-access-6v5d5\") pod \"nova-api-0\" (UID: \"3f04144b-1b73-4aa0-8525-53f1a68da6ee\") " pod="openstack/nova-api-0" Oct 10 18:06:49 crc kubenswrapper[4799]: I1010 18:06:49.985998 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d3ae1096-1bb5-408b-84a1-58b8cd21bad7-logs\") pod \"nova-metadata-0\" (UID: \"d3ae1096-1bb5-408b-84a1-58b8cd21bad7\") " pod="openstack/nova-metadata-0" Oct 10 18:06:49 crc kubenswrapper[4799]: I1010 18:06:49.986041 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3f04144b-1b73-4aa0-8525-53f1a68da6ee-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"3f04144b-1b73-4aa0-8525-53f1a68da6ee\") " pod="openstack/nova-api-0" Oct 10 18:06:49 crc kubenswrapper[4799]: I1010 18:06:49.986082 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d3ae1096-1bb5-408b-84a1-58b8cd21bad7-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"d3ae1096-1bb5-408b-84a1-58b8cd21bad7\") " pod="openstack/nova-metadata-0" Oct 10 18:06:49 crc kubenswrapper[4799]: I1010 18:06:49.986103 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d3ae1096-1bb5-408b-84a1-58b8cd21bad7-config-data\") pod \"nova-metadata-0\" (UID: \"d3ae1096-1bb5-408b-84a1-58b8cd21bad7\") " pod="openstack/nova-metadata-0" Oct 10 18:06:49 crc kubenswrapper[4799]: I1010 18:06:49.986252 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3f04144b-1b73-4aa0-8525-53f1a68da6ee-logs\") pod \"nova-api-0\" (UID: \"3f04144b-1b73-4aa0-8525-53f1a68da6ee\") " pod="openstack/nova-api-0" Oct 10 18:06:49 crc kubenswrapper[4799]: I1010 18:06:49.986317 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4p2pj\" (UniqueName: \"kubernetes.io/projected/d3ae1096-1bb5-408b-84a1-58b8cd21bad7-kube-api-access-4p2pj\") pod \"nova-metadata-0\" (UID: \"d3ae1096-1bb5-408b-84a1-58b8cd21bad7\") " pod="openstack/nova-metadata-0" Oct 10 18:06:49 crc kubenswrapper[4799]: I1010 18:06:49.986464 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3f04144b-1b73-4aa0-8525-53f1a68da6ee-config-data\") pod \"nova-api-0\" (UID: \"3f04144b-1b73-4aa0-8525-53f1a68da6ee\") " pod="openstack/nova-api-0" Oct 10 18:06:50 crc kubenswrapper[4799]: I1010 18:06:50.087887 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3f04144b-1b73-4aa0-8525-53f1a68da6ee-config-data\") pod \"nova-api-0\" (UID: \"3f04144b-1b73-4aa0-8525-53f1a68da6ee\") " pod="openstack/nova-api-0" Oct 10 18:06:50 crc kubenswrapper[4799]: I1010 18:06:50.087950 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6v5d5\" (UniqueName: \"kubernetes.io/projected/3f04144b-1b73-4aa0-8525-53f1a68da6ee-kube-api-access-6v5d5\") pod \"nova-api-0\" (UID: \"3f04144b-1b73-4aa0-8525-53f1a68da6ee\") " pod="openstack/nova-api-0" Oct 10 18:06:50 crc kubenswrapper[4799]: I1010 18:06:50.088007 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d3ae1096-1bb5-408b-84a1-58b8cd21bad7-logs\") pod \"nova-metadata-0\" (UID: \"d3ae1096-1bb5-408b-84a1-58b8cd21bad7\") " pod="openstack/nova-metadata-0" Oct 10 18:06:50 crc kubenswrapper[4799]: I1010 18:06:50.088032 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3f04144b-1b73-4aa0-8525-53f1a68da6ee-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"3f04144b-1b73-4aa0-8525-53f1a68da6ee\") " pod="openstack/nova-api-0" Oct 10 18:06:50 crc kubenswrapper[4799]: I1010 18:06:50.088072 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d3ae1096-1bb5-408b-84a1-58b8cd21bad7-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"d3ae1096-1bb5-408b-84a1-58b8cd21bad7\") " pod="openstack/nova-metadata-0" Oct 10 18:06:50 crc kubenswrapper[4799]: I1010 18:06:50.088092 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d3ae1096-1bb5-408b-84a1-58b8cd21bad7-config-data\") pod \"nova-metadata-0\" (UID: \"d3ae1096-1bb5-408b-84a1-58b8cd21bad7\") " pod="openstack/nova-metadata-0" Oct 10 18:06:50 crc kubenswrapper[4799]: I1010 18:06:50.088113 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3f04144b-1b73-4aa0-8525-53f1a68da6ee-logs\") pod \"nova-api-0\" (UID: \"3f04144b-1b73-4aa0-8525-53f1a68da6ee\") " pod="openstack/nova-api-0" Oct 10 18:06:50 crc kubenswrapper[4799]: I1010 18:06:50.088132 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4p2pj\" (UniqueName: \"kubernetes.io/projected/d3ae1096-1bb5-408b-84a1-58b8cd21bad7-kube-api-access-4p2pj\") pod \"nova-metadata-0\" (UID: \"d3ae1096-1bb5-408b-84a1-58b8cd21bad7\") " pod="openstack/nova-metadata-0" Oct 10 18:06:50 crc kubenswrapper[4799]: I1010 18:06:50.088592 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d3ae1096-1bb5-408b-84a1-58b8cd21bad7-logs\") pod \"nova-metadata-0\" (UID: \"d3ae1096-1bb5-408b-84a1-58b8cd21bad7\") " pod="openstack/nova-metadata-0" Oct 10 18:06:50 crc kubenswrapper[4799]: I1010 18:06:50.089124 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3f04144b-1b73-4aa0-8525-53f1a68da6ee-logs\") pod \"nova-api-0\" (UID: \"3f04144b-1b73-4aa0-8525-53f1a68da6ee\") " pod="openstack/nova-api-0" Oct 10 18:06:50 crc kubenswrapper[4799]: I1010 18:06:50.093135 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d3ae1096-1bb5-408b-84a1-58b8cd21bad7-config-data\") pod \"nova-metadata-0\" (UID: \"d3ae1096-1bb5-408b-84a1-58b8cd21bad7\") " pod="openstack/nova-metadata-0" Oct 10 18:06:50 crc kubenswrapper[4799]: I1010 18:06:50.093233 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3f04144b-1b73-4aa0-8525-53f1a68da6ee-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"3f04144b-1b73-4aa0-8525-53f1a68da6ee\") " pod="openstack/nova-api-0" Oct 10 18:06:50 crc kubenswrapper[4799]: I1010 18:06:50.095673 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3f04144b-1b73-4aa0-8525-53f1a68da6ee-config-data\") pod \"nova-api-0\" (UID: \"3f04144b-1b73-4aa0-8525-53f1a68da6ee\") " pod="openstack/nova-api-0" Oct 10 18:06:50 crc kubenswrapper[4799]: I1010 18:06:50.097242 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d3ae1096-1bb5-408b-84a1-58b8cd21bad7-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"d3ae1096-1bb5-408b-84a1-58b8cd21bad7\") " pod="openstack/nova-metadata-0" Oct 10 18:06:50 crc kubenswrapper[4799]: I1010 18:06:50.109059 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4p2pj\" (UniqueName: \"kubernetes.io/projected/d3ae1096-1bb5-408b-84a1-58b8cd21bad7-kube-api-access-4p2pj\") pod \"nova-metadata-0\" (UID: \"d3ae1096-1bb5-408b-84a1-58b8cd21bad7\") " pod="openstack/nova-metadata-0" Oct 10 18:06:50 crc kubenswrapper[4799]: I1010 18:06:50.112340 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6v5d5\" (UniqueName: \"kubernetes.io/projected/3f04144b-1b73-4aa0-8525-53f1a68da6ee-kube-api-access-6v5d5\") pod \"nova-api-0\" (UID: \"3f04144b-1b73-4aa0-8525-53f1a68da6ee\") " pod="openstack/nova-api-0" Oct 10 18:06:50 crc kubenswrapper[4799]: I1010 18:06:50.140161 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 10 18:06:50 crc kubenswrapper[4799]: I1010 18:06:50.163214 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 10 18:06:50 crc kubenswrapper[4799]: I1010 18:06:50.526272 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Oct 10 18:06:50 crc kubenswrapper[4799]: I1010 18:06:50.596697 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/84cc2351-4774-443b-878a-91aedb81db3e-config-data\") pod \"84cc2351-4774-443b-878a-91aedb81db3e\" (UID: \"84cc2351-4774-443b-878a-91aedb81db3e\") " Oct 10 18:06:50 crc kubenswrapper[4799]: I1010 18:06:50.596744 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bx9fv\" (UniqueName: \"kubernetes.io/projected/84cc2351-4774-443b-878a-91aedb81db3e-kube-api-access-bx9fv\") pod \"84cc2351-4774-443b-878a-91aedb81db3e\" (UID: \"84cc2351-4774-443b-878a-91aedb81db3e\") " Oct 10 18:06:50 crc kubenswrapper[4799]: I1010 18:06:50.596838 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/84cc2351-4774-443b-878a-91aedb81db3e-combined-ca-bundle\") pod \"84cc2351-4774-443b-878a-91aedb81db3e\" (UID: \"84cc2351-4774-443b-878a-91aedb81db3e\") " Oct 10 18:06:50 crc kubenswrapper[4799]: I1010 18:06:50.603401 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/84cc2351-4774-443b-878a-91aedb81db3e-kube-api-access-bx9fv" (OuterVolumeSpecName: "kube-api-access-bx9fv") pod "84cc2351-4774-443b-878a-91aedb81db3e" (UID: "84cc2351-4774-443b-878a-91aedb81db3e"). InnerVolumeSpecName "kube-api-access-bx9fv". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 18:06:50 crc kubenswrapper[4799]: I1010 18:06:50.633919 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/84cc2351-4774-443b-878a-91aedb81db3e-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "84cc2351-4774-443b-878a-91aedb81db3e" (UID: "84cc2351-4774-443b-878a-91aedb81db3e"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 18:06:50 crc kubenswrapper[4799]: I1010 18:06:50.634466 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/84cc2351-4774-443b-878a-91aedb81db3e-config-data" (OuterVolumeSpecName: "config-data") pod "84cc2351-4774-443b-878a-91aedb81db3e" (UID: "84cc2351-4774-443b-878a-91aedb81db3e"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 18:06:50 crc kubenswrapper[4799]: I1010 18:06:50.692871 4799 generic.go:334] "Generic (PLEG): container finished" podID="84cc2351-4774-443b-878a-91aedb81db3e" containerID="0248e07d40807e658db7c7b6a5d475ff49dbe6d01ce9c4367c6ecaaf20143521" exitCode=0 Oct 10 18:06:50 crc kubenswrapper[4799]: I1010 18:06:50.692920 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Oct 10 18:06:50 crc kubenswrapper[4799]: I1010 18:06:50.692995 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"84cc2351-4774-443b-878a-91aedb81db3e","Type":"ContainerDied","Data":"0248e07d40807e658db7c7b6a5d475ff49dbe6d01ce9c4367c6ecaaf20143521"} Oct 10 18:06:50 crc kubenswrapper[4799]: I1010 18:06:50.693036 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"84cc2351-4774-443b-878a-91aedb81db3e","Type":"ContainerDied","Data":"4bb9b386c88b7d90198659b64f1b91fae1857145b0d1b459c77ab2320b788c3f"} Oct 10 18:06:50 crc kubenswrapper[4799]: I1010 18:06:50.693063 4799 scope.go:117] "RemoveContainer" containerID="0248e07d40807e658db7c7b6a5d475ff49dbe6d01ce9c4367c6ecaaf20143521" Oct 10 18:06:50 crc kubenswrapper[4799]: I1010 18:06:50.698258 4799 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/84cc2351-4774-443b-878a-91aedb81db3e-config-data\") on node \"crc\" DevicePath \"\"" Oct 10 18:06:50 crc kubenswrapper[4799]: I1010 18:06:50.698285 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bx9fv\" (UniqueName: \"kubernetes.io/projected/84cc2351-4774-443b-878a-91aedb81db3e-kube-api-access-bx9fv\") on node \"crc\" DevicePath \"\"" Oct 10 18:06:50 crc kubenswrapper[4799]: I1010 18:06:50.698296 4799 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/84cc2351-4774-443b-878a-91aedb81db3e-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 10 18:06:50 crc kubenswrapper[4799]: I1010 18:06:50.713854 4799 scope.go:117] "RemoveContainer" containerID="0248e07d40807e658db7c7b6a5d475ff49dbe6d01ce9c4367c6ecaaf20143521" Oct 10 18:06:50 crc kubenswrapper[4799]: E1010 18:06:50.714253 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0248e07d40807e658db7c7b6a5d475ff49dbe6d01ce9c4367c6ecaaf20143521\": container with ID starting with 0248e07d40807e658db7c7b6a5d475ff49dbe6d01ce9c4367c6ecaaf20143521 not found: ID does not exist" containerID="0248e07d40807e658db7c7b6a5d475ff49dbe6d01ce9c4367c6ecaaf20143521" Oct 10 18:06:50 crc kubenswrapper[4799]: I1010 18:06:50.714288 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0248e07d40807e658db7c7b6a5d475ff49dbe6d01ce9c4367c6ecaaf20143521"} err="failed to get container status \"0248e07d40807e658db7c7b6a5d475ff49dbe6d01ce9c4367c6ecaaf20143521\": rpc error: code = NotFound desc = could not find container \"0248e07d40807e658db7c7b6a5d475ff49dbe6d01ce9c4367c6ecaaf20143521\": container with ID starting with 0248e07d40807e658db7c7b6a5d475ff49dbe6d01ce9c4367c6ecaaf20143521 not found: ID does not exist" Oct 10 18:06:50 crc kubenswrapper[4799]: I1010 18:06:50.725875 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Oct 10 18:06:50 crc kubenswrapper[4799]: I1010 18:06:50.739819 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-conductor-0"] Oct 10 18:06:50 crc kubenswrapper[4799]: I1010 18:06:50.751198 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Oct 10 18:06:50 crc kubenswrapper[4799]: I1010 18:06:50.760894 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-conductor-0"] Oct 10 18:06:50 crc kubenswrapper[4799]: W1010 18:06:50.760965 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd3ae1096_1bb5_408b_84a1_58b8cd21bad7.slice/crio-bf2278442e31a64f005650faca74f800a4d40ec5bfcee20c0f8f433ea36251a0 WatchSource:0}: Error finding container bf2278442e31a64f005650faca74f800a4d40ec5bfcee20c0f8f433ea36251a0: Status 404 returned error can't find the container with id bf2278442e31a64f005650faca74f800a4d40ec5bfcee20c0f8f433ea36251a0 Oct 10 18:06:50 crc kubenswrapper[4799]: I1010 18:06:50.771514 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-0"] Oct 10 18:06:50 crc kubenswrapper[4799]: E1010 18:06:50.771990 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="84cc2351-4774-443b-878a-91aedb81db3e" containerName="nova-cell0-conductor-conductor" Oct 10 18:06:50 crc kubenswrapper[4799]: I1010 18:06:50.772008 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="84cc2351-4774-443b-878a-91aedb81db3e" containerName="nova-cell0-conductor-conductor" Oct 10 18:06:50 crc kubenswrapper[4799]: I1010 18:06:50.772177 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="84cc2351-4774-443b-878a-91aedb81db3e" containerName="nova-cell0-conductor-conductor" Oct 10 18:06:50 crc kubenswrapper[4799]: I1010 18:06:50.772885 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Oct 10 18:06:50 crc kubenswrapper[4799]: I1010 18:06:50.775112 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Oct 10 18:06:50 crc kubenswrapper[4799]: I1010 18:06:50.796778 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Oct 10 18:06:50 crc kubenswrapper[4799]: I1010 18:06:50.902417 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/44c716fd-97e8-41e2-a350-99ec283d47d7-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"44c716fd-97e8-41e2-a350-99ec283d47d7\") " pod="openstack/nova-cell0-conductor-0" Oct 10 18:06:50 crc kubenswrapper[4799]: I1010 18:06:50.902563 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/44c716fd-97e8-41e2-a350-99ec283d47d7-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"44c716fd-97e8-41e2-a350-99ec283d47d7\") " pod="openstack/nova-cell0-conductor-0" Oct 10 18:06:50 crc kubenswrapper[4799]: I1010 18:06:50.902596 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7cqxb\" (UniqueName: \"kubernetes.io/projected/44c716fd-97e8-41e2-a350-99ec283d47d7-kube-api-access-7cqxb\") pod \"nova-cell0-conductor-0\" (UID: \"44c716fd-97e8-41e2-a350-99ec283d47d7\") " pod="openstack/nova-cell0-conductor-0" Oct 10 18:06:51 crc kubenswrapper[4799]: I1010 18:06:51.004660 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/44c716fd-97e8-41e2-a350-99ec283d47d7-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"44c716fd-97e8-41e2-a350-99ec283d47d7\") " pod="openstack/nova-cell0-conductor-0" Oct 10 18:06:51 crc kubenswrapper[4799]: I1010 18:06:51.005140 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/44c716fd-97e8-41e2-a350-99ec283d47d7-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"44c716fd-97e8-41e2-a350-99ec283d47d7\") " pod="openstack/nova-cell0-conductor-0" Oct 10 18:06:51 crc kubenswrapper[4799]: I1010 18:06:51.005180 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7cqxb\" (UniqueName: \"kubernetes.io/projected/44c716fd-97e8-41e2-a350-99ec283d47d7-kube-api-access-7cqxb\") pod \"nova-cell0-conductor-0\" (UID: \"44c716fd-97e8-41e2-a350-99ec283d47d7\") " pod="openstack/nova-cell0-conductor-0" Oct 10 18:06:51 crc kubenswrapper[4799]: I1010 18:06:51.010566 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/44c716fd-97e8-41e2-a350-99ec283d47d7-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"44c716fd-97e8-41e2-a350-99ec283d47d7\") " pod="openstack/nova-cell0-conductor-0" Oct 10 18:06:51 crc kubenswrapper[4799]: I1010 18:06:51.012321 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/44c716fd-97e8-41e2-a350-99ec283d47d7-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"44c716fd-97e8-41e2-a350-99ec283d47d7\") " pod="openstack/nova-cell0-conductor-0" Oct 10 18:06:51 crc kubenswrapper[4799]: I1010 18:06:51.026261 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7cqxb\" (UniqueName: \"kubernetes.io/projected/44c716fd-97e8-41e2-a350-99ec283d47d7-kube-api-access-7cqxb\") pod \"nova-cell0-conductor-0\" (UID: \"44c716fd-97e8-41e2-a350-99ec283d47d7\") " pod="openstack/nova-cell0-conductor-0" Oct 10 18:06:51 crc kubenswrapper[4799]: I1010 18:06:51.091635 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Oct 10 18:06:51 crc kubenswrapper[4799]: I1010 18:06:51.405726 4799 scope.go:117] "RemoveContainer" containerID="6ae067b7971fd6480cb0c3ccf44d4e22f837ba4674373b4b5903247a9af39cf1" Oct 10 18:06:51 crc kubenswrapper[4799]: E1010 18:06:51.406354 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 18:06:51 crc kubenswrapper[4799]: I1010 18:06:51.419198 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="272a89d6-8528-473f-8aed-1c3ef584fbd6" path="/var/lib/kubelet/pods/272a89d6-8528-473f-8aed-1c3ef584fbd6/volumes" Oct 10 18:06:51 crc kubenswrapper[4799]: I1010 18:06:51.419962 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="83dd6056-b85a-4884-8d59-9edc767e6558" path="/var/lib/kubelet/pods/83dd6056-b85a-4884-8d59-9edc767e6558/volumes" Oct 10 18:06:51 crc kubenswrapper[4799]: I1010 18:06:51.420484 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="84cc2351-4774-443b-878a-91aedb81db3e" path="/var/lib/kubelet/pods/84cc2351-4774-443b-878a-91aedb81db3e/volumes" Oct 10 18:06:51 crc kubenswrapper[4799]: I1010 18:06:51.563806 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Oct 10 18:06:51 crc kubenswrapper[4799]: I1010 18:06:51.708218 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"d3ae1096-1bb5-408b-84a1-58b8cd21bad7","Type":"ContainerStarted","Data":"8a162cb9ff7707a7d12da2bd28d025e2ff34657d53cdd924e8311302a0804bf3"} Oct 10 18:06:51 crc kubenswrapper[4799]: I1010 18:06:51.708279 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"d3ae1096-1bb5-408b-84a1-58b8cd21bad7","Type":"ContainerStarted","Data":"59ecd2d4a250236551316a6d1607810dac5f688a05127dcdf83bdcdfdc7a7f02"} Oct 10 18:06:51 crc kubenswrapper[4799]: I1010 18:06:51.708299 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"d3ae1096-1bb5-408b-84a1-58b8cd21bad7","Type":"ContainerStarted","Data":"bf2278442e31a64f005650faca74f800a4d40ec5bfcee20c0f8f433ea36251a0"} Oct 10 18:06:51 crc kubenswrapper[4799]: I1010 18:06:51.711643 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"44c716fd-97e8-41e2-a350-99ec283d47d7","Type":"ContainerStarted","Data":"7bf9facc25356684a55466b8a34f2d517a6f321573b4d7f658c2a16d57b2c2f4"} Oct 10 18:06:51 crc kubenswrapper[4799]: I1010 18:06:51.714915 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"3f04144b-1b73-4aa0-8525-53f1a68da6ee","Type":"ContainerStarted","Data":"20359d41f42d593db627882ec635cdb4e247abcdb9e6bcf45e80cc035d84cdfb"} Oct 10 18:06:51 crc kubenswrapper[4799]: I1010 18:06:51.714954 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"3f04144b-1b73-4aa0-8525-53f1a68da6ee","Type":"ContainerStarted","Data":"edc89c18d9c9a9d77cbeac4da2357579278c2841d39602040e0d43b3dc9839ec"} Oct 10 18:06:51 crc kubenswrapper[4799]: I1010 18:06:51.714994 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"3f04144b-1b73-4aa0-8525-53f1a68da6ee","Type":"ContainerStarted","Data":"a5eed2577162c8e66eeb4eda9d6decb9db911d9b46e4884414317f20579f4f95"} Oct 10 18:06:51 crc kubenswrapper[4799]: I1010 18:06:51.748143 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.748119395 podStartE2EDuration="2.748119395s" podCreationTimestamp="2025-10-10 18:06:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-10 18:06:51.724451418 +0000 UTC m=+5705.232775543" watchObservedRunningTime="2025-10-10 18:06:51.748119395 +0000 UTC m=+5705.256443520" Oct 10 18:06:51 crc kubenswrapper[4799]: I1010 18:06:51.765736 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.765713264 podStartE2EDuration="2.765713264s" podCreationTimestamp="2025-10-10 18:06:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-10 18:06:51.754171683 +0000 UTC m=+5705.262495818" watchObservedRunningTime="2025-10-10 18:06:51.765713264 +0000 UTC m=+5705.274037389" Oct 10 18:06:52 crc kubenswrapper[4799]: I1010 18:06:52.060431 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-novncproxy-0" Oct 10 18:06:52 crc kubenswrapper[4799]: I1010 18:06:52.731099 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"44c716fd-97e8-41e2-a350-99ec283d47d7","Type":"ContainerStarted","Data":"b5286df2abe730c9b0086c71d5f1c6b73d94403b6f477b779d598d862320347e"} Oct 10 18:06:52 crc kubenswrapper[4799]: I1010 18:06:52.732304 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell0-conductor-0" Oct 10 18:06:52 crc kubenswrapper[4799]: I1010 18:06:52.755030 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-0" podStartSLOduration=2.755004227 podStartE2EDuration="2.755004227s" podCreationTimestamp="2025-10-10 18:06:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-10 18:06:52.753963001 +0000 UTC m=+5706.262287156" watchObservedRunningTime="2025-10-10 18:06:52.755004227 +0000 UTC m=+5706.263328382" Oct 10 18:06:53 crc kubenswrapper[4799]: I1010 18:06:53.401022 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Oct 10 18:06:53 crc kubenswrapper[4799]: I1010 18:06:53.751045 4799 generic.go:334] "Generic (PLEG): container finished" podID="6c1c1a59-308c-408a-9368-9c3be24cc383" containerID="6f3c40b2e02fe491d7a8cb2cd3cd4e974eb3094ffd182dc796407938e39fc38c" exitCode=0 Oct 10 18:06:53 crc kubenswrapper[4799]: I1010 18:06:53.752933 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"6c1c1a59-308c-408a-9368-9c3be24cc383","Type":"ContainerDied","Data":"6f3c40b2e02fe491d7a8cb2cd3cd4e974eb3094ffd182dc796407938e39fc38c"} Oct 10 18:06:53 crc kubenswrapper[4799]: I1010 18:06:53.753295 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"6c1c1a59-308c-408a-9368-9c3be24cc383","Type":"ContainerDied","Data":"ecf0964db17339919591402743946b0da56a897eb1ffbe450ee7311cf9f361b7"} Oct 10 18:06:53 crc kubenswrapper[4799]: I1010 18:06:53.753317 4799 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ecf0964db17339919591402743946b0da56a897eb1ffbe450ee7311cf9f361b7" Oct 10 18:06:53 crc kubenswrapper[4799]: I1010 18:06:53.816356 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Oct 10 18:06:53 crc kubenswrapper[4799]: I1010 18:06:53.963649 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6c1c1a59-308c-408a-9368-9c3be24cc383-config-data\") pod \"6c1c1a59-308c-408a-9368-9c3be24cc383\" (UID: \"6c1c1a59-308c-408a-9368-9c3be24cc383\") " Oct 10 18:06:53 crc kubenswrapper[4799]: I1010 18:06:53.963731 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6r2d4\" (UniqueName: \"kubernetes.io/projected/6c1c1a59-308c-408a-9368-9c3be24cc383-kube-api-access-6r2d4\") pod \"6c1c1a59-308c-408a-9368-9c3be24cc383\" (UID: \"6c1c1a59-308c-408a-9368-9c3be24cc383\") " Oct 10 18:06:53 crc kubenswrapper[4799]: I1010 18:06:53.963866 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6c1c1a59-308c-408a-9368-9c3be24cc383-combined-ca-bundle\") pod \"6c1c1a59-308c-408a-9368-9c3be24cc383\" (UID: \"6c1c1a59-308c-408a-9368-9c3be24cc383\") " Oct 10 18:06:53 crc kubenswrapper[4799]: I1010 18:06:53.975545 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6c1c1a59-308c-408a-9368-9c3be24cc383-kube-api-access-6r2d4" (OuterVolumeSpecName: "kube-api-access-6r2d4") pod "6c1c1a59-308c-408a-9368-9c3be24cc383" (UID: "6c1c1a59-308c-408a-9368-9c3be24cc383"). InnerVolumeSpecName "kube-api-access-6r2d4". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 18:06:54 crc kubenswrapper[4799]: I1010 18:06:54.000072 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6c1c1a59-308c-408a-9368-9c3be24cc383-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "6c1c1a59-308c-408a-9368-9c3be24cc383" (UID: "6c1c1a59-308c-408a-9368-9c3be24cc383"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 18:06:54 crc kubenswrapper[4799]: I1010 18:06:54.015811 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6c1c1a59-308c-408a-9368-9c3be24cc383-config-data" (OuterVolumeSpecName: "config-data") pod "6c1c1a59-308c-408a-9368-9c3be24cc383" (UID: "6c1c1a59-308c-408a-9368-9c3be24cc383"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 18:06:54 crc kubenswrapper[4799]: I1010 18:06:54.066098 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6r2d4\" (UniqueName: \"kubernetes.io/projected/6c1c1a59-308c-408a-9368-9c3be24cc383-kube-api-access-6r2d4\") on node \"crc\" DevicePath \"\"" Oct 10 18:06:54 crc kubenswrapper[4799]: I1010 18:06:54.066147 4799 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6c1c1a59-308c-408a-9368-9c3be24cc383-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 10 18:06:54 crc kubenswrapper[4799]: I1010 18:06:54.066165 4799 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6c1c1a59-308c-408a-9368-9c3be24cc383-config-data\") on node \"crc\" DevicePath \"\"" Oct 10 18:06:54 crc kubenswrapper[4799]: I1010 18:06:54.763722 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Oct 10 18:06:54 crc kubenswrapper[4799]: I1010 18:06:54.820960 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-conductor-0"] Oct 10 18:06:54 crc kubenswrapper[4799]: I1010 18:06:54.839767 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-conductor-0"] Oct 10 18:06:54 crc kubenswrapper[4799]: I1010 18:06:54.855257 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-0"] Oct 10 18:06:54 crc kubenswrapper[4799]: E1010 18:06:54.855781 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6c1c1a59-308c-408a-9368-9c3be24cc383" containerName="nova-cell1-conductor-conductor" Oct 10 18:06:54 crc kubenswrapper[4799]: I1010 18:06:54.855805 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="6c1c1a59-308c-408a-9368-9c3be24cc383" containerName="nova-cell1-conductor-conductor" Oct 10 18:06:54 crc kubenswrapper[4799]: I1010 18:06:54.856059 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="6c1c1a59-308c-408a-9368-9c3be24cc383" containerName="nova-cell1-conductor-conductor" Oct 10 18:06:54 crc kubenswrapper[4799]: I1010 18:06:54.856896 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Oct 10 18:06:54 crc kubenswrapper[4799]: I1010 18:06:54.860527 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-config-data" Oct 10 18:06:54 crc kubenswrapper[4799]: I1010 18:06:54.867913 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Oct 10 18:06:54 crc kubenswrapper[4799]: I1010 18:06:54.985875 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c2a9c57f-e6e4-4792-b89b-ab5f9724bf36-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"c2a9c57f-e6e4-4792-b89b-ab5f9724bf36\") " pod="openstack/nova-cell1-conductor-0" Oct 10 18:06:54 crc kubenswrapper[4799]: I1010 18:06:54.985965 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qmqbq\" (UniqueName: \"kubernetes.io/projected/c2a9c57f-e6e4-4792-b89b-ab5f9724bf36-kube-api-access-qmqbq\") pod \"nova-cell1-conductor-0\" (UID: \"c2a9c57f-e6e4-4792-b89b-ab5f9724bf36\") " pod="openstack/nova-cell1-conductor-0" Oct 10 18:06:54 crc kubenswrapper[4799]: I1010 18:06:54.986005 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c2a9c57f-e6e4-4792-b89b-ab5f9724bf36-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"c2a9c57f-e6e4-4792-b89b-ab5f9724bf36\") " pod="openstack/nova-cell1-conductor-0" Oct 10 18:06:55 crc kubenswrapper[4799]: I1010 18:06:55.087844 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c2a9c57f-e6e4-4792-b89b-ab5f9724bf36-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"c2a9c57f-e6e4-4792-b89b-ab5f9724bf36\") " pod="openstack/nova-cell1-conductor-0" Oct 10 18:06:55 crc kubenswrapper[4799]: I1010 18:06:55.087969 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c2a9c57f-e6e4-4792-b89b-ab5f9724bf36-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"c2a9c57f-e6e4-4792-b89b-ab5f9724bf36\") " pod="openstack/nova-cell1-conductor-0" Oct 10 18:06:55 crc kubenswrapper[4799]: I1010 18:06:55.088022 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qmqbq\" (UniqueName: \"kubernetes.io/projected/c2a9c57f-e6e4-4792-b89b-ab5f9724bf36-kube-api-access-qmqbq\") pod \"nova-cell1-conductor-0\" (UID: \"c2a9c57f-e6e4-4792-b89b-ab5f9724bf36\") " pod="openstack/nova-cell1-conductor-0" Oct 10 18:06:55 crc kubenswrapper[4799]: I1010 18:06:55.096626 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c2a9c57f-e6e4-4792-b89b-ab5f9724bf36-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"c2a9c57f-e6e4-4792-b89b-ab5f9724bf36\") " pod="openstack/nova-cell1-conductor-0" Oct 10 18:06:55 crc kubenswrapper[4799]: I1010 18:06:55.105666 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c2a9c57f-e6e4-4792-b89b-ab5f9724bf36-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"c2a9c57f-e6e4-4792-b89b-ab5f9724bf36\") " pod="openstack/nova-cell1-conductor-0" Oct 10 18:06:55 crc kubenswrapper[4799]: I1010 18:06:55.106276 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qmqbq\" (UniqueName: \"kubernetes.io/projected/c2a9c57f-e6e4-4792-b89b-ab5f9724bf36-kube-api-access-qmqbq\") pod \"nova-cell1-conductor-0\" (UID: \"c2a9c57f-e6e4-4792-b89b-ab5f9724bf36\") " pod="openstack/nova-cell1-conductor-0" Oct 10 18:06:55 crc kubenswrapper[4799]: I1010 18:06:55.141327 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Oct 10 18:06:55 crc kubenswrapper[4799]: I1010 18:06:55.141392 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Oct 10 18:06:55 crc kubenswrapper[4799]: I1010 18:06:55.191699 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Oct 10 18:06:55 crc kubenswrapper[4799]: I1010 18:06:55.425027 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6c1c1a59-308c-408a-9368-9c3be24cc383" path="/var/lib/kubelet/pods/6c1c1a59-308c-408a-9368-9c3be24cc383/volumes" Oct 10 18:06:55 crc kubenswrapper[4799]: I1010 18:06:55.739692 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Oct 10 18:06:55 crc kubenswrapper[4799]: W1010 18:06:55.746980 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc2a9c57f_e6e4_4792_b89b_ab5f9724bf36.slice/crio-2c9c633543b7304b6d7b2ca0a844d1d17abd89b0528206001b1e1e1124985c05 WatchSource:0}: Error finding container 2c9c633543b7304b6d7b2ca0a844d1d17abd89b0528206001b1e1e1124985c05: Status 404 returned error can't find the container with id 2c9c633543b7304b6d7b2ca0a844d1d17abd89b0528206001b1e1e1124985c05 Oct 10 18:06:55 crc kubenswrapper[4799]: I1010 18:06:55.774282 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"c2a9c57f-e6e4-4792-b89b-ab5f9724bf36","Type":"ContainerStarted","Data":"2c9c633543b7304b6d7b2ca0a844d1d17abd89b0528206001b1e1e1124985c05"} Oct 10 18:06:56 crc kubenswrapper[4799]: I1010 18:06:56.117791 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell0-conductor-0" Oct 10 18:06:56 crc kubenswrapper[4799]: I1010 18:06:56.785688 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"c2a9c57f-e6e4-4792-b89b-ab5f9724bf36","Type":"ContainerStarted","Data":"64382c304beacff299cd8fe303ce2e24e2978c781d0ee3c6055e62035ff81435"} Oct 10 18:06:56 crc kubenswrapper[4799]: I1010 18:06:56.786970 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-conductor-0" Oct 10 18:06:57 crc kubenswrapper[4799]: I1010 18:06:57.060055 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-cell1-novncproxy-0" Oct 10 18:06:57 crc kubenswrapper[4799]: I1010 18:06:57.075284 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-cell1-novncproxy-0" Oct 10 18:06:57 crc kubenswrapper[4799]: I1010 18:06:57.108493 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-0" podStartSLOduration=3.108471196 podStartE2EDuration="3.108471196s" podCreationTimestamp="2025-10-10 18:06:54 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-10 18:06:56.813100057 +0000 UTC m=+5710.321424202" watchObservedRunningTime="2025-10-10 18:06:57.108471196 +0000 UTC m=+5710.616795321" Oct 10 18:06:57 crc kubenswrapper[4799]: I1010 18:06:57.807459 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-novncproxy-0" Oct 10 18:06:58 crc kubenswrapper[4799]: I1010 18:06:58.401074 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Oct 10 18:06:58 crc kubenswrapper[4799]: I1010 18:06:58.442176 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Oct 10 18:06:58 crc kubenswrapper[4799]: I1010 18:06:58.850497 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Oct 10 18:07:00 crc kubenswrapper[4799]: I1010 18:07:00.141567 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Oct 10 18:07:00 crc kubenswrapper[4799]: I1010 18:07:00.141620 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Oct 10 18:07:00 crc kubenswrapper[4799]: I1010 18:07:00.163580 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Oct 10 18:07:00 crc kubenswrapper[4799]: I1010 18:07:00.163920 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Oct 10 18:07:00 crc kubenswrapper[4799]: I1010 18:07:00.243243 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-conductor-0" Oct 10 18:07:01 crc kubenswrapper[4799]: I1010 18:07:01.306996 4799 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="d3ae1096-1bb5-408b-84a1-58b8cd21bad7" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"http://10.217.1.94:8775/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Oct 10 18:07:01 crc kubenswrapper[4799]: I1010 18:07:01.307094 4799 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="3f04144b-1b73-4aa0-8525-53f1a68da6ee" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.1.95:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Oct 10 18:07:01 crc kubenswrapper[4799]: I1010 18:07:01.307664 4799 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="d3ae1096-1bb5-408b-84a1-58b8cd21bad7" containerName="nova-metadata-log" probeResult="failure" output="Get \"http://10.217.1.94:8775/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Oct 10 18:07:01 crc kubenswrapper[4799]: I1010 18:07:01.307700 4799 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="3f04144b-1b73-4aa0-8525-53f1a68da6ee" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.1.95:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Oct 10 18:07:04 crc kubenswrapper[4799]: I1010 18:07:04.403031 4799 scope.go:117] "RemoveContainer" containerID="6ae067b7971fd6480cb0c3ccf44d4e22f837ba4674373b4b5903247a9af39cf1" Oct 10 18:07:04 crc kubenswrapper[4799]: E1010 18:07:04.403408 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 18:07:04 crc kubenswrapper[4799]: I1010 18:07:04.579157 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-scheduler-0"] Oct 10 18:07:04 crc kubenswrapper[4799]: I1010 18:07:04.581182 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Oct 10 18:07:04 crc kubenswrapper[4799]: I1010 18:07:04.583325 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scheduler-config-data" Oct 10 18:07:04 crc kubenswrapper[4799]: I1010 18:07:04.596475 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Oct 10 18:07:04 crc kubenswrapper[4799]: I1010 18:07:04.691925 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bc1e302d-b598-46d7-94a3-6fe3c85e1922-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"bc1e302d-b598-46d7-94a3-6fe3c85e1922\") " pod="openstack/cinder-scheduler-0" Oct 10 18:07:04 crc kubenswrapper[4799]: I1010 18:07:04.691981 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/bc1e302d-b598-46d7-94a3-6fe3c85e1922-scripts\") pod \"cinder-scheduler-0\" (UID: \"bc1e302d-b598-46d7-94a3-6fe3c85e1922\") " pod="openstack/cinder-scheduler-0" Oct 10 18:07:04 crc kubenswrapper[4799]: I1010 18:07:04.692200 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mb545\" (UniqueName: \"kubernetes.io/projected/bc1e302d-b598-46d7-94a3-6fe3c85e1922-kube-api-access-mb545\") pod \"cinder-scheduler-0\" (UID: \"bc1e302d-b598-46d7-94a3-6fe3c85e1922\") " pod="openstack/cinder-scheduler-0" Oct 10 18:07:04 crc kubenswrapper[4799]: I1010 18:07:04.692305 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bc1e302d-b598-46d7-94a3-6fe3c85e1922-config-data\") pod \"cinder-scheduler-0\" (UID: \"bc1e302d-b598-46d7-94a3-6fe3c85e1922\") " pod="openstack/cinder-scheduler-0" Oct 10 18:07:04 crc kubenswrapper[4799]: I1010 18:07:04.692336 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/bc1e302d-b598-46d7-94a3-6fe3c85e1922-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"bc1e302d-b598-46d7-94a3-6fe3c85e1922\") " pod="openstack/cinder-scheduler-0" Oct 10 18:07:04 crc kubenswrapper[4799]: I1010 18:07:04.692452 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/bc1e302d-b598-46d7-94a3-6fe3c85e1922-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"bc1e302d-b598-46d7-94a3-6fe3c85e1922\") " pod="openstack/cinder-scheduler-0" Oct 10 18:07:04 crc kubenswrapper[4799]: I1010 18:07:04.794314 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bc1e302d-b598-46d7-94a3-6fe3c85e1922-config-data\") pod \"cinder-scheduler-0\" (UID: \"bc1e302d-b598-46d7-94a3-6fe3c85e1922\") " pod="openstack/cinder-scheduler-0" Oct 10 18:07:04 crc kubenswrapper[4799]: I1010 18:07:04.794358 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/bc1e302d-b598-46d7-94a3-6fe3c85e1922-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"bc1e302d-b598-46d7-94a3-6fe3c85e1922\") " pod="openstack/cinder-scheduler-0" Oct 10 18:07:04 crc kubenswrapper[4799]: I1010 18:07:04.794415 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/bc1e302d-b598-46d7-94a3-6fe3c85e1922-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"bc1e302d-b598-46d7-94a3-6fe3c85e1922\") " pod="openstack/cinder-scheduler-0" Oct 10 18:07:04 crc kubenswrapper[4799]: I1010 18:07:04.794494 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bc1e302d-b598-46d7-94a3-6fe3c85e1922-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"bc1e302d-b598-46d7-94a3-6fe3c85e1922\") " pod="openstack/cinder-scheduler-0" Oct 10 18:07:04 crc kubenswrapper[4799]: I1010 18:07:04.794534 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/bc1e302d-b598-46d7-94a3-6fe3c85e1922-scripts\") pod \"cinder-scheduler-0\" (UID: \"bc1e302d-b598-46d7-94a3-6fe3c85e1922\") " pod="openstack/cinder-scheduler-0" Oct 10 18:07:04 crc kubenswrapper[4799]: I1010 18:07:04.794604 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mb545\" (UniqueName: \"kubernetes.io/projected/bc1e302d-b598-46d7-94a3-6fe3c85e1922-kube-api-access-mb545\") pod \"cinder-scheduler-0\" (UID: \"bc1e302d-b598-46d7-94a3-6fe3c85e1922\") " pod="openstack/cinder-scheduler-0" Oct 10 18:07:04 crc kubenswrapper[4799]: I1010 18:07:04.795018 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/bc1e302d-b598-46d7-94a3-6fe3c85e1922-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"bc1e302d-b598-46d7-94a3-6fe3c85e1922\") " pod="openstack/cinder-scheduler-0" Oct 10 18:07:04 crc kubenswrapper[4799]: I1010 18:07:04.801365 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bc1e302d-b598-46d7-94a3-6fe3c85e1922-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"bc1e302d-b598-46d7-94a3-6fe3c85e1922\") " pod="openstack/cinder-scheduler-0" Oct 10 18:07:04 crc kubenswrapper[4799]: I1010 18:07:04.801843 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/bc1e302d-b598-46d7-94a3-6fe3c85e1922-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"bc1e302d-b598-46d7-94a3-6fe3c85e1922\") " pod="openstack/cinder-scheduler-0" Oct 10 18:07:04 crc kubenswrapper[4799]: I1010 18:07:04.802095 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bc1e302d-b598-46d7-94a3-6fe3c85e1922-config-data\") pod \"cinder-scheduler-0\" (UID: \"bc1e302d-b598-46d7-94a3-6fe3c85e1922\") " pod="openstack/cinder-scheduler-0" Oct 10 18:07:04 crc kubenswrapper[4799]: I1010 18:07:04.820939 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/bc1e302d-b598-46d7-94a3-6fe3c85e1922-scripts\") pod \"cinder-scheduler-0\" (UID: \"bc1e302d-b598-46d7-94a3-6fe3c85e1922\") " pod="openstack/cinder-scheduler-0" Oct 10 18:07:04 crc kubenswrapper[4799]: I1010 18:07:04.826354 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mb545\" (UniqueName: \"kubernetes.io/projected/bc1e302d-b598-46d7-94a3-6fe3c85e1922-kube-api-access-mb545\") pod \"cinder-scheduler-0\" (UID: \"bc1e302d-b598-46d7-94a3-6fe3c85e1922\") " pod="openstack/cinder-scheduler-0" Oct 10 18:07:04 crc kubenswrapper[4799]: I1010 18:07:04.908157 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Oct 10 18:07:05 crc kubenswrapper[4799]: I1010 18:07:05.393065 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Oct 10 18:07:05 crc kubenswrapper[4799]: W1010 18:07:05.394069 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podbc1e302d_b598_46d7_94a3_6fe3c85e1922.slice/crio-7095a7876c26e38b4570312f7e5a80834b384c12ecf6474a6e4a0bb6c1d97ef0 WatchSource:0}: Error finding container 7095a7876c26e38b4570312f7e5a80834b384c12ecf6474a6e4a0bb6c1d97ef0: Status 404 returned error can't find the container with id 7095a7876c26e38b4570312f7e5a80834b384c12ecf6474a6e4a0bb6c1d97ef0 Oct 10 18:07:05 crc kubenswrapper[4799]: I1010 18:07:05.890032 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"bc1e302d-b598-46d7-94a3-6fe3c85e1922","Type":"ContainerStarted","Data":"7095a7876c26e38b4570312f7e5a80834b384c12ecf6474a6e4a0bb6c1d97ef0"} Oct 10 18:07:06 crc kubenswrapper[4799]: I1010 18:07:06.134568 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Oct 10 18:07:06 crc kubenswrapper[4799]: I1010 18:07:06.135037 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="48f454d9-4648-450f-a206-6859d9d191aa" containerName="cinder-api" containerID="cri-o://ec6861bdba45ecca6ae0cca7a2b30b426dc2c5bc3c5b49fa419d33902aef48a9" gracePeriod=30 Oct 10 18:07:06 crc kubenswrapper[4799]: I1010 18:07:06.134893 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="48f454d9-4648-450f-a206-6859d9d191aa" containerName="cinder-api-log" containerID="cri-o://866f59c9ebbba8598713593c9e8d69c70c8dcf459233184deec9b651341ba256" gracePeriod=30 Oct 10 18:07:06 crc kubenswrapper[4799]: I1010 18:07:06.666687 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-volume-volume1-0"] Oct 10 18:07:06 crc kubenswrapper[4799]: I1010 18:07:06.672552 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-volume-volume1-0" Oct 10 18:07:06 crc kubenswrapper[4799]: I1010 18:07:06.675400 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-volume-volume1-config-data" Oct 10 18:07:06 crc kubenswrapper[4799]: I1010 18:07:06.685281 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-volume-volume1-0"] Oct 10 18:07:06 crc kubenswrapper[4799]: I1010 18:07:06.734642 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/e0c788e1-2358-4b32-b410-bb615bed3971-etc-iscsi\") pod \"cinder-volume-volume1-0\" (UID: \"e0c788e1-2358-4b32-b410-bb615bed3971\") " pod="openstack/cinder-volume-volume1-0" Oct 10 18:07:06 crc kubenswrapper[4799]: I1010 18:07:06.734999 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/e0c788e1-2358-4b32-b410-bb615bed3971-var-locks-brick\") pod \"cinder-volume-volume1-0\" (UID: \"e0c788e1-2358-4b32-b410-bb615bed3971\") " pod="openstack/cinder-volume-volume1-0" Oct 10 18:07:06 crc kubenswrapper[4799]: I1010 18:07:06.735037 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l8lv6\" (UniqueName: \"kubernetes.io/projected/e0c788e1-2358-4b32-b410-bb615bed3971-kube-api-access-l8lv6\") pod \"cinder-volume-volume1-0\" (UID: \"e0c788e1-2358-4b32-b410-bb615bed3971\") " pod="openstack/cinder-volume-volume1-0" Oct 10 18:07:06 crc kubenswrapper[4799]: I1010 18:07:06.735069 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/e0c788e1-2358-4b32-b410-bb615bed3971-config-data-custom\") pod \"cinder-volume-volume1-0\" (UID: \"e0c788e1-2358-4b32-b410-bb615bed3971\") " pod="openstack/cinder-volume-volume1-0" Oct 10 18:07:06 crc kubenswrapper[4799]: I1010 18:07:06.735088 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e0c788e1-2358-4b32-b410-bb615bed3971-combined-ca-bundle\") pod \"cinder-volume-volume1-0\" (UID: \"e0c788e1-2358-4b32-b410-bb615bed3971\") " pod="openstack/cinder-volume-volume1-0" Oct 10 18:07:06 crc kubenswrapper[4799]: I1010 18:07:06.735186 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/e0c788e1-2358-4b32-b410-bb615bed3971-etc-nvme\") pod \"cinder-volume-volume1-0\" (UID: \"e0c788e1-2358-4b32-b410-bb615bed3971\") " pod="openstack/cinder-volume-volume1-0" Oct 10 18:07:06 crc kubenswrapper[4799]: I1010 18:07:06.735212 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-cinder\" (UniqueName: \"kubernetes.io/host-path/e0c788e1-2358-4b32-b410-bb615bed3971-var-lib-cinder\") pod \"cinder-volume-volume1-0\" (UID: \"e0c788e1-2358-4b32-b410-bb615bed3971\") " pod="openstack/cinder-volume-volume1-0" Oct 10 18:07:06 crc kubenswrapper[4799]: I1010 18:07:06.735240 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/e0c788e1-2358-4b32-b410-bb615bed3971-sys\") pod \"cinder-volume-volume1-0\" (UID: \"e0c788e1-2358-4b32-b410-bb615bed3971\") " pod="openstack/cinder-volume-volume1-0" Oct 10 18:07:06 crc kubenswrapper[4799]: I1010 18:07:06.735274 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e0c788e1-2358-4b32-b410-bb615bed3971-scripts\") pod \"cinder-volume-volume1-0\" (UID: \"e0c788e1-2358-4b32-b410-bb615bed3971\") " pod="openstack/cinder-volume-volume1-0" Oct 10 18:07:06 crc kubenswrapper[4799]: I1010 18:07:06.735314 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run\" (UniqueName: \"kubernetes.io/host-path/e0c788e1-2358-4b32-b410-bb615bed3971-run\") pod \"cinder-volume-volume1-0\" (UID: \"e0c788e1-2358-4b32-b410-bb615bed3971\") " pod="openstack/cinder-volume-volume1-0" Oct 10 18:07:06 crc kubenswrapper[4799]: I1010 18:07:06.735352 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-locks-cinder\" (UniqueName: \"kubernetes.io/host-path/e0c788e1-2358-4b32-b410-bb615bed3971-var-locks-cinder\") pod \"cinder-volume-volume1-0\" (UID: \"e0c788e1-2358-4b32-b410-bb615bed3971\") " pod="openstack/cinder-volume-volume1-0" Oct 10 18:07:06 crc kubenswrapper[4799]: I1010 18:07:06.735379 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/e0c788e1-2358-4b32-b410-bb615bed3971-lib-modules\") pod \"cinder-volume-volume1-0\" (UID: \"e0c788e1-2358-4b32-b410-bb615bed3971\") " pod="openstack/cinder-volume-volume1-0" Oct 10 18:07:06 crc kubenswrapper[4799]: I1010 18:07:06.735411 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e0c788e1-2358-4b32-b410-bb615bed3971-config-data\") pod \"cinder-volume-volume1-0\" (UID: \"e0c788e1-2358-4b32-b410-bb615bed3971\") " pod="openstack/cinder-volume-volume1-0" Oct 10 18:07:06 crc kubenswrapper[4799]: I1010 18:07:06.735432 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/e0c788e1-2358-4b32-b410-bb615bed3971-ceph\") pod \"cinder-volume-volume1-0\" (UID: \"e0c788e1-2358-4b32-b410-bb615bed3971\") " pod="openstack/cinder-volume-volume1-0" Oct 10 18:07:06 crc kubenswrapper[4799]: I1010 18:07:06.735459 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/e0c788e1-2358-4b32-b410-bb615bed3971-etc-machine-id\") pod \"cinder-volume-volume1-0\" (UID: \"e0c788e1-2358-4b32-b410-bb615bed3971\") " pod="openstack/cinder-volume-volume1-0" Oct 10 18:07:06 crc kubenswrapper[4799]: I1010 18:07:06.735483 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/e0c788e1-2358-4b32-b410-bb615bed3971-dev\") pod \"cinder-volume-volume1-0\" (UID: \"e0c788e1-2358-4b32-b410-bb615bed3971\") " pod="openstack/cinder-volume-volume1-0" Oct 10 18:07:06 crc kubenswrapper[4799]: I1010 18:07:06.840657 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/e0c788e1-2358-4b32-b410-bb615bed3971-etc-iscsi\") pod \"cinder-volume-volume1-0\" (UID: \"e0c788e1-2358-4b32-b410-bb615bed3971\") " pod="openstack/cinder-volume-volume1-0" Oct 10 18:07:06 crc kubenswrapper[4799]: I1010 18:07:06.840825 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/e0c788e1-2358-4b32-b410-bb615bed3971-var-locks-brick\") pod \"cinder-volume-volume1-0\" (UID: \"e0c788e1-2358-4b32-b410-bb615bed3971\") " pod="openstack/cinder-volume-volume1-0" Oct 10 18:07:06 crc kubenswrapper[4799]: I1010 18:07:06.840886 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l8lv6\" (UniqueName: \"kubernetes.io/projected/e0c788e1-2358-4b32-b410-bb615bed3971-kube-api-access-l8lv6\") pod \"cinder-volume-volume1-0\" (UID: \"e0c788e1-2358-4b32-b410-bb615bed3971\") " pod="openstack/cinder-volume-volume1-0" Oct 10 18:07:06 crc kubenswrapper[4799]: I1010 18:07:06.840943 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/e0c788e1-2358-4b32-b410-bb615bed3971-config-data-custom\") pod \"cinder-volume-volume1-0\" (UID: \"e0c788e1-2358-4b32-b410-bb615bed3971\") " pod="openstack/cinder-volume-volume1-0" Oct 10 18:07:06 crc kubenswrapper[4799]: I1010 18:07:06.840988 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e0c788e1-2358-4b32-b410-bb615bed3971-combined-ca-bundle\") pod \"cinder-volume-volume1-0\" (UID: \"e0c788e1-2358-4b32-b410-bb615bed3971\") " pod="openstack/cinder-volume-volume1-0" Oct 10 18:07:06 crc kubenswrapper[4799]: I1010 18:07:06.841069 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/e0c788e1-2358-4b32-b410-bb615bed3971-etc-nvme\") pod \"cinder-volume-volume1-0\" (UID: \"e0c788e1-2358-4b32-b410-bb615bed3971\") " pod="openstack/cinder-volume-volume1-0" Oct 10 18:07:06 crc kubenswrapper[4799]: I1010 18:07:06.841159 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-cinder\" (UniqueName: \"kubernetes.io/host-path/e0c788e1-2358-4b32-b410-bb615bed3971-var-lib-cinder\") pod \"cinder-volume-volume1-0\" (UID: \"e0c788e1-2358-4b32-b410-bb615bed3971\") " pod="openstack/cinder-volume-volume1-0" Oct 10 18:07:06 crc kubenswrapper[4799]: I1010 18:07:06.841257 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/e0c788e1-2358-4b32-b410-bb615bed3971-sys\") pod \"cinder-volume-volume1-0\" (UID: \"e0c788e1-2358-4b32-b410-bb615bed3971\") " pod="openstack/cinder-volume-volume1-0" Oct 10 18:07:06 crc kubenswrapper[4799]: I1010 18:07:06.841359 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e0c788e1-2358-4b32-b410-bb615bed3971-scripts\") pod \"cinder-volume-volume1-0\" (UID: \"e0c788e1-2358-4b32-b410-bb615bed3971\") " pod="openstack/cinder-volume-volume1-0" Oct 10 18:07:06 crc kubenswrapper[4799]: I1010 18:07:06.841474 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run\" (UniqueName: \"kubernetes.io/host-path/e0c788e1-2358-4b32-b410-bb615bed3971-run\") pod \"cinder-volume-volume1-0\" (UID: \"e0c788e1-2358-4b32-b410-bb615bed3971\") " pod="openstack/cinder-volume-volume1-0" Oct 10 18:07:06 crc kubenswrapper[4799]: I1010 18:07:06.841589 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-locks-cinder\" (UniqueName: \"kubernetes.io/host-path/e0c788e1-2358-4b32-b410-bb615bed3971-var-locks-cinder\") pod \"cinder-volume-volume1-0\" (UID: \"e0c788e1-2358-4b32-b410-bb615bed3971\") " pod="openstack/cinder-volume-volume1-0" Oct 10 18:07:06 crc kubenswrapper[4799]: I1010 18:07:06.841660 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/e0c788e1-2358-4b32-b410-bb615bed3971-lib-modules\") pod \"cinder-volume-volume1-0\" (UID: \"e0c788e1-2358-4b32-b410-bb615bed3971\") " pod="openstack/cinder-volume-volume1-0" Oct 10 18:07:06 crc kubenswrapper[4799]: I1010 18:07:06.841725 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e0c788e1-2358-4b32-b410-bb615bed3971-config-data\") pod \"cinder-volume-volume1-0\" (UID: \"e0c788e1-2358-4b32-b410-bb615bed3971\") " pod="openstack/cinder-volume-volume1-0" Oct 10 18:07:06 crc kubenswrapper[4799]: I1010 18:07:06.843315 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/e0c788e1-2358-4b32-b410-bb615bed3971-etc-nvme\") pod \"cinder-volume-volume1-0\" (UID: \"e0c788e1-2358-4b32-b410-bb615bed3971\") " pod="openstack/cinder-volume-volume1-0" Oct 10 18:07:06 crc kubenswrapper[4799]: I1010 18:07:06.843395 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/e0c788e1-2358-4b32-b410-bb615bed3971-etc-iscsi\") pod \"cinder-volume-volume1-0\" (UID: \"e0c788e1-2358-4b32-b410-bb615bed3971\") " pod="openstack/cinder-volume-volume1-0" Oct 10 18:07:06 crc kubenswrapper[4799]: I1010 18:07:06.843646 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/e0c788e1-2358-4b32-b410-bb615bed3971-var-locks-brick\") pod \"cinder-volume-volume1-0\" (UID: \"e0c788e1-2358-4b32-b410-bb615bed3971\") " pod="openstack/cinder-volume-volume1-0" Oct 10 18:07:06 crc kubenswrapper[4799]: I1010 18:07:06.856538 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/e0c788e1-2358-4b32-b410-bb615bed3971-config-data-custom\") pod \"cinder-volume-volume1-0\" (UID: \"e0c788e1-2358-4b32-b410-bb615bed3971\") " pod="openstack/cinder-volume-volume1-0" Oct 10 18:07:06 crc kubenswrapper[4799]: I1010 18:07:06.856730 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-locks-cinder\" (UniqueName: \"kubernetes.io/host-path/e0c788e1-2358-4b32-b410-bb615bed3971-var-locks-cinder\") pod \"cinder-volume-volume1-0\" (UID: \"e0c788e1-2358-4b32-b410-bb615bed3971\") " pod="openstack/cinder-volume-volume1-0" Oct 10 18:07:06 crc kubenswrapper[4799]: I1010 18:07:06.856806 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run\" (UniqueName: \"kubernetes.io/host-path/e0c788e1-2358-4b32-b410-bb615bed3971-run\") pod \"cinder-volume-volume1-0\" (UID: \"e0c788e1-2358-4b32-b410-bb615bed3971\") " pod="openstack/cinder-volume-volume1-0" Oct 10 18:07:06 crc kubenswrapper[4799]: I1010 18:07:06.856860 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/e0c788e1-2358-4b32-b410-bb615bed3971-lib-modules\") pod \"cinder-volume-volume1-0\" (UID: \"e0c788e1-2358-4b32-b410-bb615bed3971\") " pod="openstack/cinder-volume-volume1-0" Oct 10 18:07:06 crc kubenswrapper[4799]: I1010 18:07:06.856990 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-cinder\" (UniqueName: \"kubernetes.io/host-path/e0c788e1-2358-4b32-b410-bb615bed3971-var-lib-cinder\") pod \"cinder-volume-volume1-0\" (UID: \"e0c788e1-2358-4b32-b410-bb615bed3971\") " pod="openstack/cinder-volume-volume1-0" Oct 10 18:07:06 crc kubenswrapper[4799]: I1010 18:07:06.857188 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/e0c788e1-2358-4b32-b410-bb615bed3971-sys\") pod \"cinder-volume-volume1-0\" (UID: \"e0c788e1-2358-4b32-b410-bb615bed3971\") " pod="openstack/cinder-volume-volume1-0" Oct 10 18:07:06 crc kubenswrapper[4799]: I1010 18:07:06.857358 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e0c788e1-2358-4b32-b410-bb615bed3971-scripts\") pod \"cinder-volume-volume1-0\" (UID: \"e0c788e1-2358-4b32-b410-bb615bed3971\") " pod="openstack/cinder-volume-volume1-0" Oct 10 18:07:06 crc kubenswrapper[4799]: I1010 18:07:06.857572 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/e0c788e1-2358-4b32-b410-bb615bed3971-ceph\") pod \"cinder-volume-volume1-0\" (UID: \"e0c788e1-2358-4b32-b410-bb615bed3971\") " pod="openstack/cinder-volume-volume1-0" Oct 10 18:07:06 crc kubenswrapper[4799]: I1010 18:07:06.858715 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e0c788e1-2358-4b32-b410-bb615bed3971-combined-ca-bundle\") pod \"cinder-volume-volume1-0\" (UID: \"e0c788e1-2358-4b32-b410-bb615bed3971\") " pod="openstack/cinder-volume-volume1-0" Oct 10 18:07:06 crc kubenswrapper[4799]: I1010 18:07:06.866329 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l8lv6\" (UniqueName: \"kubernetes.io/projected/e0c788e1-2358-4b32-b410-bb615bed3971-kube-api-access-l8lv6\") pod \"cinder-volume-volume1-0\" (UID: \"e0c788e1-2358-4b32-b410-bb615bed3971\") " pod="openstack/cinder-volume-volume1-0" Oct 10 18:07:06 crc kubenswrapper[4799]: I1010 18:07:06.879780 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/e0c788e1-2358-4b32-b410-bb615bed3971-etc-machine-id\") pod \"cinder-volume-volume1-0\" (UID: \"e0c788e1-2358-4b32-b410-bb615bed3971\") " pod="openstack/cinder-volume-volume1-0" Oct 10 18:07:06 crc kubenswrapper[4799]: I1010 18:07:06.879899 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/e0c788e1-2358-4b32-b410-bb615bed3971-dev\") pod \"cinder-volume-volume1-0\" (UID: \"e0c788e1-2358-4b32-b410-bb615bed3971\") " pod="openstack/cinder-volume-volume1-0" Oct 10 18:07:06 crc kubenswrapper[4799]: I1010 18:07:06.880191 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/e0c788e1-2358-4b32-b410-bb615bed3971-dev\") pod \"cinder-volume-volume1-0\" (UID: \"e0c788e1-2358-4b32-b410-bb615bed3971\") " pod="openstack/cinder-volume-volume1-0" Oct 10 18:07:06 crc kubenswrapper[4799]: I1010 18:07:06.880628 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/e0c788e1-2358-4b32-b410-bb615bed3971-etc-machine-id\") pod \"cinder-volume-volume1-0\" (UID: \"e0c788e1-2358-4b32-b410-bb615bed3971\") " pod="openstack/cinder-volume-volume1-0" Oct 10 18:07:06 crc kubenswrapper[4799]: I1010 18:07:06.883361 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e0c788e1-2358-4b32-b410-bb615bed3971-config-data\") pod \"cinder-volume-volume1-0\" (UID: \"e0c788e1-2358-4b32-b410-bb615bed3971\") " pod="openstack/cinder-volume-volume1-0" Oct 10 18:07:06 crc kubenswrapper[4799]: I1010 18:07:06.900214 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/e0c788e1-2358-4b32-b410-bb615bed3971-ceph\") pod \"cinder-volume-volume1-0\" (UID: \"e0c788e1-2358-4b32-b410-bb615bed3971\") " pod="openstack/cinder-volume-volume1-0" Oct 10 18:07:06 crc kubenswrapper[4799]: I1010 18:07:06.909321 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"bc1e302d-b598-46d7-94a3-6fe3c85e1922","Type":"ContainerStarted","Data":"e36f9cd5c46d5f9ccc0cdf810198f3aed38dd9c010a824a14d994b9ae23a8ff6"} Oct 10 18:07:06 crc kubenswrapper[4799]: I1010 18:07:06.909371 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"bc1e302d-b598-46d7-94a3-6fe3c85e1922","Type":"ContainerStarted","Data":"8f82bfa550e60a432b183ed452c8bd9a5f46610677e72eaf328b1589375d5ea6"} Oct 10 18:07:06 crc kubenswrapper[4799]: I1010 18:07:06.912426 4799 generic.go:334] "Generic (PLEG): container finished" podID="48f454d9-4648-450f-a206-6859d9d191aa" containerID="866f59c9ebbba8598713593c9e8d69c70c8dcf459233184deec9b651341ba256" exitCode=143 Oct 10 18:07:06 crc kubenswrapper[4799]: I1010 18:07:06.912478 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"48f454d9-4648-450f-a206-6859d9d191aa","Type":"ContainerDied","Data":"866f59c9ebbba8598713593c9e8d69c70c8dcf459233184deec9b651341ba256"} Oct 10 18:07:06 crc kubenswrapper[4799]: I1010 18:07:06.932630 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-scheduler-0" podStartSLOduration=2.932613907 podStartE2EDuration="2.932613907s" podCreationTimestamp="2025-10-10 18:07:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-10 18:07:06.926542679 +0000 UTC m=+5720.434866794" watchObservedRunningTime="2025-10-10 18:07:06.932613907 +0000 UTC m=+5720.440938022" Oct 10 18:07:07 crc kubenswrapper[4799]: I1010 18:07:07.004714 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-volume-volume1-0" Oct 10 18:07:07 crc kubenswrapper[4799]: I1010 18:07:07.531298 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-volume-volume1-0"] Oct 10 18:07:07 crc kubenswrapper[4799]: I1010 18:07:07.929100 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-volume-volume1-0" event={"ID":"e0c788e1-2358-4b32-b410-bb615bed3971","Type":"ContainerStarted","Data":"7d039a0645432319a3c2690fde39de44b2bfebc807316c5e9585c1e40bbef40a"} Oct 10 18:07:08 crc kubenswrapper[4799]: I1010 18:07:08.046295 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-backup-0"] Oct 10 18:07:08 crc kubenswrapper[4799]: I1010 18:07:08.047771 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-backup-0" Oct 10 18:07:08 crc kubenswrapper[4799]: I1010 18:07:08.054461 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-backup-config-data" Oct 10 18:07:08 crc kubenswrapper[4799]: I1010 18:07:08.063306 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-backup-0"] Oct 10 18:07:08 crc kubenswrapper[4799]: I1010 18:07:08.111995 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8d1df32c-4647-4ce9-9a5a-c7d32f297332-combined-ca-bundle\") pod \"cinder-backup-0\" (UID: \"8d1df32c-4647-4ce9-9a5a-c7d32f297332\") " pod="openstack/cinder-backup-0" Oct 10 18:07:08 crc kubenswrapper[4799]: I1010 18:07:08.112032 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/8d1df32c-4647-4ce9-9a5a-c7d32f297332-sys\") pod \"cinder-backup-0\" (UID: \"8d1df32c-4647-4ce9-9a5a-c7d32f297332\") " pod="openstack/cinder-backup-0" Oct 10 18:07:08 crc kubenswrapper[4799]: I1010 18:07:08.112071 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/8d1df32c-4647-4ce9-9a5a-c7d32f297332-config-data-custom\") pod \"cinder-backup-0\" (UID: \"8d1df32c-4647-4ce9-9a5a-c7d32f297332\") " pod="openstack/cinder-backup-0" Oct 10 18:07:08 crc kubenswrapper[4799]: I1010 18:07:08.112094 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8d1df32c-4647-4ce9-9a5a-c7d32f297332-config-data\") pod \"cinder-backup-0\" (UID: \"8d1df32c-4647-4ce9-9a5a-c7d32f297332\") " pod="openstack/cinder-backup-0" Oct 10 18:07:08 crc kubenswrapper[4799]: I1010 18:07:08.112119 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/8d1df32c-4647-4ce9-9a5a-c7d32f297332-var-locks-brick\") pod \"cinder-backup-0\" (UID: \"8d1df32c-4647-4ce9-9a5a-c7d32f297332\") " pod="openstack/cinder-backup-0" Oct 10 18:07:08 crc kubenswrapper[4799]: I1010 18:07:08.112139 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/8d1df32c-4647-4ce9-9a5a-c7d32f297332-etc-nvme\") pod \"cinder-backup-0\" (UID: \"8d1df32c-4647-4ce9-9a5a-c7d32f297332\") " pod="openstack/cinder-backup-0" Oct 10 18:07:08 crc kubenswrapper[4799]: I1010 18:07:08.112202 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run\" (UniqueName: \"kubernetes.io/host-path/8d1df32c-4647-4ce9-9a5a-c7d32f297332-run\") pod \"cinder-backup-0\" (UID: \"8d1df32c-4647-4ce9-9a5a-c7d32f297332\") " pod="openstack/cinder-backup-0" Oct 10 18:07:08 crc kubenswrapper[4799]: I1010 18:07:08.112219 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/8d1df32c-4647-4ce9-9a5a-c7d32f297332-dev\") pod \"cinder-backup-0\" (UID: \"8d1df32c-4647-4ce9-9a5a-c7d32f297332\") " pod="openstack/cinder-backup-0" Oct 10 18:07:08 crc kubenswrapper[4799]: I1010 18:07:08.112244 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/8d1df32c-4647-4ce9-9a5a-c7d32f297332-etc-machine-id\") pod \"cinder-backup-0\" (UID: \"8d1df32c-4647-4ce9-9a5a-c7d32f297332\") " pod="openstack/cinder-backup-0" Oct 10 18:07:08 crc kubenswrapper[4799]: I1010 18:07:08.112262 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-cinder\" (UniqueName: \"kubernetes.io/host-path/8d1df32c-4647-4ce9-9a5a-c7d32f297332-var-lib-cinder\") pod \"cinder-backup-0\" (UID: \"8d1df32c-4647-4ce9-9a5a-c7d32f297332\") " pod="openstack/cinder-backup-0" Oct 10 18:07:08 crc kubenswrapper[4799]: I1010 18:07:08.112289 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8d1df32c-4647-4ce9-9a5a-c7d32f297332-scripts\") pod \"cinder-backup-0\" (UID: \"8d1df32c-4647-4ce9-9a5a-c7d32f297332\") " pod="openstack/cinder-backup-0" Oct 10 18:07:08 crc kubenswrapper[4799]: I1010 18:07:08.112306 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-locks-cinder\" (UniqueName: \"kubernetes.io/host-path/8d1df32c-4647-4ce9-9a5a-c7d32f297332-var-locks-cinder\") pod \"cinder-backup-0\" (UID: \"8d1df32c-4647-4ce9-9a5a-c7d32f297332\") " pod="openstack/cinder-backup-0" Oct 10 18:07:08 crc kubenswrapper[4799]: I1010 18:07:08.112321 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/8d1df32c-4647-4ce9-9a5a-c7d32f297332-etc-iscsi\") pod \"cinder-backup-0\" (UID: \"8d1df32c-4647-4ce9-9a5a-c7d32f297332\") " pod="openstack/cinder-backup-0" Oct 10 18:07:08 crc kubenswrapper[4799]: I1010 18:07:08.112340 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hwnv7\" (UniqueName: \"kubernetes.io/projected/8d1df32c-4647-4ce9-9a5a-c7d32f297332-kube-api-access-hwnv7\") pod \"cinder-backup-0\" (UID: \"8d1df32c-4647-4ce9-9a5a-c7d32f297332\") " pod="openstack/cinder-backup-0" Oct 10 18:07:08 crc kubenswrapper[4799]: I1010 18:07:08.112371 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/8d1df32c-4647-4ce9-9a5a-c7d32f297332-lib-modules\") pod \"cinder-backup-0\" (UID: \"8d1df32c-4647-4ce9-9a5a-c7d32f297332\") " pod="openstack/cinder-backup-0" Oct 10 18:07:08 crc kubenswrapper[4799]: I1010 18:07:08.112389 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/8d1df32c-4647-4ce9-9a5a-c7d32f297332-ceph\") pod \"cinder-backup-0\" (UID: \"8d1df32c-4647-4ce9-9a5a-c7d32f297332\") " pod="openstack/cinder-backup-0" Oct 10 18:07:08 crc kubenswrapper[4799]: I1010 18:07:08.213565 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/8d1df32c-4647-4ce9-9a5a-c7d32f297332-dev\") pod \"cinder-backup-0\" (UID: \"8d1df32c-4647-4ce9-9a5a-c7d32f297332\") " pod="openstack/cinder-backup-0" Oct 10 18:07:08 crc kubenswrapper[4799]: I1010 18:07:08.213609 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run\" (UniqueName: \"kubernetes.io/host-path/8d1df32c-4647-4ce9-9a5a-c7d32f297332-run\") pod \"cinder-backup-0\" (UID: \"8d1df32c-4647-4ce9-9a5a-c7d32f297332\") " pod="openstack/cinder-backup-0" Oct 10 18:07:08 crc kubenswrapper[4799]: I1010 18:07:08.213652 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/8d1df32c-4647-4ce9-9a5a-c7d32f297332-etc-machine-id\") pod \"cinder-backup-0\" (UID: \"8d1df32c-4647-4ce9-9a5a-c7d32f297332\") " pod="openstack/cinder-backup-0" Oct 10 18:07:08 crc kubenswrapper[4799]: I1010 18:07:08.213676 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-cinder\" (UniqueName: \"kubernetes.io/host-path/8d1df32c-4647-4ce9-9a5a-c7d32f297332-var-lib-cinder\") pod \"cinder-backup-0\" (UID: \"8d1df32c-4647-4ce9-9a5a-c7d32f297332\") " pod="openstack/cinder-backup-0" Oct 10 18:07:08 crc kubenswrapper[4799]: I1010 18:07:08.213712 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8d1df32c-4647-4ce9-9a5a-c7d32f297332-scripts\") pod \"cinder-backup-0\" (UID: \"8d1df32c-4647-4ce9-9a5a-c7d32f297332\") " pod="openstack/cinder-backup-0" Oct 10 18:07:08 crc kubenswrapper[4799]: I1010 18:07:08.213732 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-locks-cinder\" (UniqueName: \"kubernetes.io/host-path/8d1df32c-4647-4ce9-9a5a-c7d32f297332-var-locks-cinder\") pod \"cinder-backup-0\" (UID: \"8d1df32c-4647-4ce9-9a5a-c7d32f297332\") " pod="openstack/cinder-backup-0" Oct 10 18:07:08 crc kubenswrapper[4799]: I1010 18:07:08.213769 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/8d1df32c-4647-4ce9-9a5a-c7d32f297332-etc-iscsi\") pod \"cinder-backup-0\" (UID: \"8d1df32c-4647-4ce9-9a5a-c7d32f297332\") " pod="openstack/cinder-backup-0" Oct 10 18:07:08 crc kubenswrapper[4799]: I1010 18:07:08.213799 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hwnv7\" (UniqueName: \"kubernetes.io/projected/8d1df32c-4647-4ce9-9a5a-c7d32f297332-kube-api-access-hwnv7\") pod \"cinder-backup-0\" (UID: \"8d1df32c-4647-4ce9-9a5a-c7d32f297332\") " pod="openstack/cinder-backup-0" Oct 10 18:07:08 crc kubenswrapper[4799]: I1010 18:07:08.213846 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/8d1df32c-4647-4ce9-9a5a-c7d32f297332-lib-modules\") pod \"cinder-backup-0\" (UID: \"8d1df32c-4647-4ce9-9a5a-c7d32f297332\") " pod="openstack/cinder-backup-0" Oct 10 18:07:08 crc kubenswrapper[4799]: I1010 18:07:08.213872 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/8d1df32c-4647-4ce9-9a5a-c7d32f297332-ceph\") pod \"cinder-backup-0\" (UID: \"8d1df32c-4647-4ce9-9a5a-c7d32f297332\") " pod="openstack/cinder-backup-0" Oct 10 18:07:08 crc kubenswrapper[4799]: I1010 18:07:08.213921 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8d1df32c-4647-4ce9-9a5a-c7d32f297332-combined-ca-bundle\") pod \"cinder-backup-0\" (UID: \"8d1df32c-4647-4ce9-9a5a-c7d32f297332\") " pod="openstack/cinder-backup-0" Oct 10 18:07:08 crc kubenswrapper[4799]: I1010 18:07:08.213943 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/8d1df32c-4647-4ce9-9a5a-c7d32f297332-sys\") pod \"cinder-backup-0\" (UID: \"8d1df32c-4647-4ce9-9a5a-c7d32f297332\") " pod="openstack/cinder-backup-0" Oct 10 18:07:08 crc kubenswrapper[4799]: I1010 18:07:08.213975 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/8d1df32c-4647-4ce9-9a5a-c7d32f297332-config-data-custom\") pod \"cinder-backup-0\" (UID: \"8d1df32c-4647-4ce9-9a5a-c7d32f297332\") " pod="openstack/cinder-backup-0" Oct 10 18:07:08 crc kubenswrapper[4799]: I1010 18:07:08.213995 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8d1df32c-4647-4ce9-9a5a-c7d32f297332-config-data\") pod \"cinder-backup-0\" (UID: \"8d1df32c-4647-4ce9-9a5a-c7d32f297332\") " pod="openstack/cinder-backup-0" Oct 10 18:07:08 crc kubenswrapper[4799]: I1010 18:07:08.214020 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/8d1df32c-4647-4ce9-9a5a-c7d32f297332-var-locks-brick\") pod \"cinder-backup-0\" (UID: \"8d1df32c-4647-4ce9-9a5a-c7d32f297332\") " pod="openstack/cinder-backup-0" Oct 10 18:07:08 crc kubenswrapper[4799]: I1010 18:07:08.214040 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/8d1df32c-4647-4ce9-9a5a-c7d32f297332-etc-nvme\") pod \"cinder-backup-0\" (UID: \"8d1df32c-4647-4ce9-9a5a-c7d32f297332\") " pod="openstack/cinder-backup-0" Oct 10 18:07:08 crc kubenswrapper[4799]: I1010 18:07:08.214184 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/8d1df32c-4647-4ce9-9a5a-c7d32f297332-etc-nvme\") pod \"cinder-backup-0\" (UID: \"8d1df32c-4647-4ce9-9a5a-c7d32f297332\") " pod="openstack/cinder-backup-0" Oct 10 18:07:08 crc kubenswrapper[4799]: I1010 18:07:08.214230 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/8d1df32c-4647-4ce9-9a5a-c7d32f297332-dev\") pod \"cinder-backup-0\" (UID: \"8d1df32c-4647-4ce9-9a5a-c7d32f297332\") " pod="openstack/cinder-backup-0" Oct 10 18:07:08 crc kubenswrapper[4799]: I1010 18:07:08.214254 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run\" (UniqueName: \"kubernetes.io/host-path/8d1df32c-4647-4ce9-9a5a-c7d32f297332-run\") pod \"cinder-backup-0\" (UID: \"8d1df32c-4647-4ce9-9a5a-c7d32f297332\") " pod="openstack/cinder-backup-0" Oct 10 18:07:08 crc kubenswrapper[4799]: I1010 18:07:08.214282 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/8d1df32c-4647-4ce9-9a5a-c7d32f297332-etc-machine-id\") pod \"cinder-backup-0\" (UID: \"8d1df32c-4647-4ce9-9a5a-c7d32f297332\") " pod="openstack/cinder-backup-0" Oct 10 18:07:08 crc kubenswrapper[4799]: I1010 18:07:08.214320 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-cinder\" (UniqueName: \"kubernetes.io/host-path/8d1df32c-4647-4ce9-9a5a-c7d32f297332-var-lib-cinder\") pod \"cinder-backup-0\" (UID: \"8d1df32c-4647-4ce9-9a5a-c7d32f297332\") " pod="openstack/cinder-backup-0" Oct 10 18:07:08 crc kubenswrapper[4799]: I1010 18:07:08.215419 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/8d1df32c-4647-4ce9-9a5a-c7d32f297332-etc-iscsi\") pod \"cinder-backup-0\" (UID: \"8d1df32c-4647-4ce9-9a5a-c7d32f297332\") " pod="openstack/cinder-backup-0" Oct 10 18:07:08 crc kubenswrapper[4799]: I1010 18:07:08.215432 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-locks-cinder\" (UniqueName: \"kubernetes.io/host-path/8d1df32c-4647-4ce9-9a5a-c7d32f297332-var-locks-cinder\") pod \"cinder-backup-0\" (UID: \"8d1df32c-4647-4ce9-9a5a-c7d32f297332\") " pod="openstack/cinder-backup-0" Oct 10 18:07:08 crc kubenswrapper[4799]: I1010 18:07:08.215548 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/8d1df32c-4647-4ce9-9a5a-c7d32f297332-sys\") pod \"cinder-backup-0\" (UID: \"8d1df32c-4647-4ce9-9a5a-c7d32f297332\") " pod="openstack/cinder-backup-0" Oct 10 18:07:08 crc kubenswrapper[4799]: I1010 18:07:08.215620 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/8d1df32c-4647-4ce9-9a5a-c7d32f297332-var-locks-brick\") pod \"cinder-backup-0\" (UID: \"8d1df32c-4647-4ce9-9a5a-c7d32f297332\") " pod="openstack/cinder-backup-0" Oct 10 18:07:08 crc kubenswrapper[4799]: I1010 18:07:08.216093 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/8d1df32c-4647-4ce9-9a5a-c7d32f297332-lib-modules\") pod \"cinder-backup-0\" (UID: \"8d1df32c-4647-4ce9-9a5a-c7d32f297332\") " pod="openstack/cinder-backup-0" Oct 10 18:07:08 crc kubenswrapper[4799]: I1010 18:07:08.220151 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8d1df32c-4647-4ce9-9a5a-c7d32f297332-scripts\") pod \"cinder-backup-0\" (UID: \"8d1df32c-4647-4ce9-9a5a-c7d32f297332\") " pod="openstack/cinder-backup-0" Oct 10 18:07:08 crc kubenswrapper[4799]: I1010 18:07:08.220359 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8d1df32c-4647-4ce9-9a5a-c7d32f297332-combined-ca-bundle\") pod \"cinder-backup-0\" (UID: \"8d1df32c-4647-4ce9-9a5a-c7d32f297332\") " pod="openstack/cinder-backup-0" Oct 10 18:07:08 crc kubenswrapper[4799]: I1010 18:07:08.221762 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/8d1df32c-4647-4ce9-9a5a-c7d32f297332-ceph\") pod \"cinder-backup-0\" (UID: \"8d1df32c-4647-4ce9-9a5a-c7d32f297332\") " pod="openstack/cinder-backup-0" Oct 10 18:07:08 crc kubenswrapper[4799]: I1010 18:07:08.234483 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8d1df32c-4647-4ce9-9a5a-c7d32f297332-config-data\") pod \"cinder-backup-0\" (UID: \"8d1df32c-4647-4ce9-9a5a-c7d32f297332\") " pod="openstack/cinder-backup-0" Oct 10 18:07:08 crc kubenswrapper[4799]: I1010 18:07:08.236042 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/8d1df32c-4647-4ce9-9a5a-c7d32f297332-config-data-custom\") pod \"cinder-backup-0\" (UID: \"8d1df32c-4647-4ce9-9a5a-c7d32f297332\") " pod="openstack/cinder-backup-0" Oct 10 18:07:08 crc kubenswrapper[4799]: I1010 18:07:08.236918 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hwnv7\" (UniqueName: \"kubernetes.io/projected/8d1df32c-4647-4ce9-9a5a-c7d32f297332-kube-api-access-hwnv7\") pod \"cinder-backup-0\" (UID: \"8d1df32c-4647-4ce9-9a5a-c7d32f297332\") " pod="openstack/cinder-backup-0" Oct 10 18:07:08 crc kubenswrapper[4799]: I1010 18:07:08.398053 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-backup-0" Oct 10 18:07:08 crc kubenswrapper[4799]: I1010 18:07:08.941604 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-volume-volume1-0" event={"ID":"e0c788e1-2358-4b32-b410-bb615bed3971","Type":"ContainerStarted","Data":"b13e7bd3d93ecc6bf919a5e9ee4df8b098a2994d59ae95288552cb761bf4a322"} Oct 10 18:07:08 crc kubenswrapper[4799]: I1010 18:07:08.947872 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-volume-volume1-0" event={"ID":"e0c788e1-2358-4b32-b410-bb615bed3971","Type":"ContainerStarted","Data":"b18fd17e9594da35fa44725accc1f247a85a32067f6c27a934afa359a774270c"} Oct 10 18:07:08 crc kubenswrapper[4799]: I1010 18:07:08.967106 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-volume-volume1-0" podStartSLOduration=2.193015147 podStartE2EDuration="2.967088794s" podCreationTimestamp="2025-10-10 18:07:06 +0000 UTC" firstStartedPulling="2025-10-10 18:07:07.544372317 +0000 UTC m=+5721.052696442" lastFinishedPulling="2025-10-10 18:07:08.318445974 +0000 UTC m=+5721.826770089" observedRunningTime="2025-10-10 18:07:08.963018715 +0000 UTC m=+5722.471342830" watchObservedRunningTime="2025-10-10 18:07:08.967088794 +0000 UTC m=+5722.475412909" Oct 10 18:07:08 crc kubenswrapper[4799]: I1010 18:07:08.994781 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-backup-0"] Oct 10 18:07:09 crc kubenswrapper[4799]: I1010 18:07:09.298710 4799 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/cinder-api-0" podUID="48f454d9-4648-450f-a206-6859d9d191aa" containerName="cinder-api" probeResult="failure" output="Get \"http://10.217.1.91:8776/healthcheck\": read tcp 10.217.0.2:53928->10.217.1.91:8776: read: connection reset by peer" Oct 10 18:07:09 crc kubenswrapper[4799]: I1010 18:07:09.795126 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Oct 10 18:07:09 crc kubenswrapper[4799]: I1010 18:07:09.853078 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/48f454d9-4648-450f-a206-6859d9d191aa-etc-machine-id\") pod \"48f454d9-4648-450f-a206-6859d9d191aa\" (UID: \"48f454d9-4648-450f-a206-6859d9d191aa\") " Oct 10 18:07:09 crc kubenswrapper[4799]: I1010 18:07:09.853153 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/48f454d9-4648-450f-a206-6859d9d191aa-scripts\") pod \"48f454d9-4648-450f-a206-6859d9d191aa\" (UID: \"48f454d9-4648-450f-a206-6859d9d191aa\") " Oct 10 18:07:09 crc kubenswrapper[4799]: I1010 18:07:09.853181 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/48f454d9-4648-450f-a206-6859d9d191aa-config-data-custom\") pod \"48f454d9-4648-450f-a206-6859d9d191aa\" (UID: \"48f454d9-4648-450f-a206-6859d9d191aa\") " Oct 10 18:07:09 crc kubenswrapper[4799]: I1010 18:07:09.853530 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/48f454d9-4648-450f-a206-6859d9d191aa-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "48f454d9-4648-450f-a206-6859d9d191aa" (UID: "48f454d9-4648-450f-a206-6859d9d191aa"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 10 18:07:09 crc kubenswrapper[4799]: I1010 18:07:09.854339 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/48f454d9-4648-450f-a206-6859d9d191aa-config-data\") pod \"48f454d9-4648-450f-a206-6859d9d191aa\" (UID: \"48f454d9-4648-450f-a206-6859d9d191aa\") " Oct 10 18:07:09 crc kubenswrapper[4799]: I1010 18:07:09.854399 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/48f454d9-4648-450f-a206-6859d9d191aa-logs\") pod \"48f454d9-4648-450f-a206-6859d9d191aa\" (UID: \"48f454d9-4648-450f-a206-6859d9d191aa\") " Oct 10 18:07:09 crc kubenswrapper[4799]: I1010 18:07:09.854461 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/48f454d9-4648-450f-a206-6859d9d191aa-combined-ca-bundle\") pod \"48f454d9-4648-450f-a206-6859d9d191aa\" (UID: \"48f454d9-4648-450f-a206-6859d9d191aa\") " Oct 10 18:07:09 crc kubenswrapper[4799]: I1010 18:07:09.854586 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hx6lj\" (UniqueName: \"kubernetes.io/projected/48f454d9-4648-450f-a206-6859d9d191aa-kube-api-access-hx6lj\") pod \"48f454d9-4648-450f-a206-6859d9d191aa\" (UID: \"48f454d9-4648-450f-a206-6859d9d191aa\") " Oct 10 18:07:09 crc kubenswrapper[4799]: I1010 18:07:09.855031 4799 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/48f454d9-4648-450f-a206-6859d9d191aa-etc-machine-id\") on node \"crc\" DevicePath \"\"" Oct 10 18:07:09 crc kubenswrapper[4799]: I1010 18:07:09.855015 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/48f454d9-4648-450f-a206-6859d9d191aa-logs" (OuterVolumeSpecName: "logs") pod "48f454d9-4648-450f-a206-6859d9d191aa" (UID: "48f454d9-4648-450f-a206-6859d9d191aa"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 10 18:07:09 crc kubenswrapper[4799]: I1010 18:07:09.860473 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/48f454d9-4648-450f-a206-6859d9d191aa-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "48f454d9-4648-450f-a206-6859d9d191aa" (UID: "48f454d9-4648-450f-a206-6859d9d191aa"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 18:07:09 crc kubenswrapper[4799]: I1010 18:07:09.862109 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/48f454d9-4648-450f-a206-6859d9d191aa-scripts" (OuterVolumeSpecName: "scripts") pod "48f454d9-4648-450f-a206-6859d9d191aa" (UID: "48f454d9-4648-450f-a206-6859d9d191aa"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 18:07:09 crc kubenswrapper[4799]: I1010 18:07:09.871432 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/48f454d9-4648-450f-a206-6859d9d191aa-kube-api-access-hx6lj" (OuterVolumeSpecName: "kube-api-access-hx6lj") pod "48f454d9-4648-450f-a206-6859d9d191aa" (UID: "48f454d9-4648-450f-a206-6859d9d191aa"). InnerVolumeSpecName "kube-api-access-hx6lj". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 18:07:09 crc kubenswrapper[4799]: I1010 18:07:09.903085 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/48f454d9-4648-450f-a206-6859d9d191aa-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "48f454d9-4648-450f-a206-6859d9d191aa" (UID: "48f454d9-4648-450f-a206-6859d9d191aa"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 18:07:09 crc kubenswrapper[4799]: I1010 18:07:09.908213 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-scheduler-0" Oct 10 18:07:09 crc kubenswrapper[4799]: I1010 18:07:09.951329 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/48f454d9-4648-450f-a206-6859d9d191aa-config-data" (OuterVolumeSpecName: "config-data") pod "48f454d9-4648-450f-a206-6859d9d191aa" (UID: "48f454d9-4648-450f-a206-6859d9d191aa"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 18:07:09 crc kubenswrapper[4799]: I1010 18:07:09.956909 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hx6lj\" (UniqueName: \"kubernetes.io/projected/48f454d9-4648-450f-a206-6859d9d191aa-kube-api-access-hx6lj\") on node \"crc\" DevicePath \"\"" Oct 10 18:07:09 crc kubenswrapper[4799]: I1010 18:07:09.957069 4799 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/48f454d9-4648-450f-a206-6859d9d191aa-scripts\") on node \"crc\" DevicePath \"\"" Oct 10 18:07:09 crc kubenswrapper[4799]: I1010 18:07:09.957136 4799 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/48f454d9-4648-450f-a206-6859d9d191aa-config-data-custom\") on node \"crc\" DevicePath \"\"" Oct 10 18:07:09 crc kubenswrapper[4799]: I1010 18:07:09.957190 4799 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/48f454d9-4648-450f-a206-6859d9d191aa-config-data\") on node \"crc\" DevicePath \"\"" Oct 10 18:07:09 crc kubenswrapper[4799]: I1010 18:07:09.957255 4799 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/48f454d9-4648-450f-a206-6859d9d191aa-logs\") on node \"crc\" DevicePath \"\"" Oct 10 18:07:09 crc kubenswrapper[4799]: I1010 18:07:09.957320 4799 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/48f454d9-4648-450f-a206-6859d9d191aa-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 10 18:07:09 crc kubenswrapper[4799]: I1010 18:07:09.958431 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-backup-0" event={"ID":"8d1df32c-4647-4ce9-9a5a-c7d32f297332","Type":"ContainerStarted","Data":"e6c8ca29216f7c7dd61169dd4aed323f6fbc161bb63dd53e26b510965c198d0b"} Oct 10 18:07:09 crc kubenswrapper[4799]: I1010 18:07:09.960277 4799 generic.go:334] "Generic (PLEG): container finished" podID="48f454d9-4648-450f-a206-6859d9d191aa" containerID="ec6861bdba45ecca6ae0cca7a2b30b426dc2c5bc3c5b49fa419d33902aef48a9" exitCode=0 Oct 10 18:07:09 crc kubenswrapper[4799]: I1010 18:07:09.961090 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Oct 10 18:07:09 crc kubenswrapper[4799]: I1010 18:07:09.961336 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"48f454d9-4648-450f-a206-6859d9d191aa","Type":"ContainerDied","Data":"ec6861bdba45ecca6ae0cca7a2b30b426dc2c5bc3c5b49fa419d33902aef48a9"} Oct 10 18:07:09 crc kubenswrapper[4799]: I1010 18:07:09.961508 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"48f454d9-4648-450f-a206-6859d9d191aa","Type":"ContainerDied","Data":"01e86b178e981d5f7d9af6cf10df5e3b38d3476bc552111a5df0ed55f17677a9"} Oct 10 18:07:09 crc kubenswrapper[4799]: I1010 18:07:09.961531 4799 scope.go:117] "RemoveContainer" containerID="ec6861bdba45ecca6ae0cca7a2b30b426dc2c5bc3c5b49fa419d33902aef48a9" Oct 10 18:07:09 crc kubenswrapper[4799]: I1010 18:07:09.992881 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Oct 10 18:07:10 crc kubenswrapper[4799]: I1010 18:07:09.998881 4799 scope.go:117] "RemoveContainer" containerID="866f59c9ebbba8598713593c9e8d69c70c8dcf459233184deec9b651341ba256" Oct 10 18:07:10 crc kubenswrapper[4799]: I1010 18:07:10.005872 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-api-0"] Oct 10 18:07:10 crc kubenswrapper[4799]: I1010 18:07:10.031525 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-api-0"] Oct 10 18:07:10 crc kubenswrapper[4799]: E1010 18:07:10.032171 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="48f454d9-4648-450f-a206-6859d9d191aa" containerName="cinder-api" Oct 10 18:07:10 crc kubenswrapper[4799]: I1010 18:07:10.032190 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="48f454d9-4648-450f-a206-6859d9d191aa" containerName="cinder-api" Oct 10 18:07:10 crc kubenswrapper[4799]: E1010 18:07:10.032224 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="48f454d9-4648-450f-a206-6859d9d191aa" containerName="cinder-api-log" Oct 10 18:07:10 crc kubenswrapper[4799]: I1010 18:07:10.032231 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="48f454d9-4648-450f-a206-6859d9d191aa" containerName="cinder-api-log" Oct 10 18:07:10 crc kubenswrapper[4799]: I1010 18:07:10.032418 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="48f454d9-4648-450f-a206-6859d9d191aa" containerName="cinder-api" Oct 10 18:07:10 crc kubenswrapper[4799]: I1010 18:07:10.032445 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="48f454d9-4648-450f-a206-6859d9d191aa" containerName="cinder-api-log" Oct 10 18:07:10 crc kubenswrapper[4799]: I1010 18:07:10.033455 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Oct 10 18:07:10 crc kubenswrapper[4799]: I1010 18:07:10.035292 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-api-config-data" Oct 10 18:07:10 crc kubenswrapper[4799]: I1010 18:07:10.046539 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Oct 10 18:07:10 crc kubenswrapper[4799]: I1010 18:07:10.069884 4799 scope.go:117] "RemoveContainer" containerID="ec6861bdba45ecca6ae0cca7a2b30b426dc2c5bc3c5b49fa419d33902aef48a9" Oct 10 18:07:10 crc kubenswrapper[4799]: E1010 18:07:10.078338 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ec6861bdba45ecca6ae0cca7a2b30b426dc2c5bc3c5b49fa419d33902aef48a9\": container with ID starting with ec6861bdba45ecca6ae0cca7a2b30b426dc2c5bc3c5b49fa419d33902aef48a9 not found: ID does not exist" containerID="ec6861bdba45ecca6ae0cca7a2b30b426dc2c5bc3c5b49fa419d33902aef48a9" Oct 10 18:07:10 crc kubenswrapper[4799]: I1010 18:07:10.078373 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ec6861bdba45ecca6ae0cca7a2b30b426dc2c5bc3c5b49fa419d33902aef48a9"} err="failed to get container status \"ec6861bdba45ecca6ae0cca7a2b30b426dc2c5bc3c5b49fa419d33902aef48a9\": rpc error: code = NotFound desc = could not find container \"ec6861bdba45ecca6ae0cca7a2b30b426dc2c5bc3c5b49fa419d33902aef48a9\": container with ID starting with ec6861bdba45ecca6ae0cca7a2b30b426dc2c5bc3c5b49fa419d33902aef48a9 not found: ID does not exist" Oct 10 18:07:10 crc kubenswrapper[4799]: I1010 18:07:10.078397 4799 scope.go:117] "RemoveContainer" containerID="866f59c9ebbba8598713593c9e8d69c70c8dcf459233184deec9b651341ba256" Oct 10 18:07:10 crc kubenswrapper[4799]: E1010 18:07:10.078720 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"866f59c9ebbba8598713593c9e8d69c70c8dcf459233184deec9b651341ba256\": container with ID starting with 866f59c9ebbba8598713593c9e8d69c70c8dcf459233184deec9b651341ba256 not found: ID does not exist" containerID="866f59c9ebbba8598713593c9e8d69c70c8dcf459233184deec9b651341ba256" Oct 10 18:07:10 crc kubenswrapper[4799]: I1010 18:07:10.078740 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"866f59c9ebbba8598713593c9e8d69c70c8dcf459233184deec9b651341ba256"} err="failed to get container status \"866f59c9ebbba8598713593c9e8d69c70c8dcf459233184deec9b651341ba256\": rpc error: code = NotFound desc = could not find container \"866f59c9ebbba8598713593c9e8d69c70c8dcf459233184deec9b651341ba256\": container with ID starting with 866f59c9ebbba8598713593c9e8d69c70c8dcf459233184deec9b651341ba256 not found: ID does not exist" Oct 10 18:07:10 crc kubenswrapper[4799]: I1010 18:07:10.146211 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Oct 10 18:07:10 crc kubenswrapper[4799]: I1010 18:07:10.148209 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Oct 10 18:07:10 crc kubenswrapper[4799]: I1010 18:07:10.149543 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Oct 10 18:07:10 crc kubenswrapper[4799]: I1010 18:07:10.164945 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f5fgn\" (UniqueName: \"kubernetes.io/projected/30b1084d-18c0-428e-8682-48773409a820-kube-api-access-f5fgn\") pod \"cinder-api-0\" (UID: \"30b1084d-18c0-428e-8682-48773409a820\") " pod="openstack/cinder-api-0" Oct 10 18:07:10 crc kubenswrapper[4799]: I1010 18:07:10.165124 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/30b1084d-18c0-428e-8682-48773409a820-scripts\") pod \"cinder-api-0\" (UID: \"30b1084d-18c0-428e-8682-48773409a820\") " pod="openstack/cinder-api-0" Oct 10 18:07:10 crc kubenswrapper[4799]: I1010 18:07:10.165156 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/30b1084d-18c0-428e-8682-48773409a820-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"30b1084d-18c0-428e-8682-48773409a820\") " pod="openstack/cinder-api-0" Oct 10 18:07:10 crc kubenswrapper[4799]: I1010 18:07:10.165181 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/30b1084d-18c0-428e-8682-48773409a820-config-data\") pod \"cinder-api-0\" (UID: \"30b1084d-18c0-428e-8682-48773409a820\") " pod="openstack/cinder-api-0" Oct 10 18:07:10 crc kubenswrapper[4799]: I1010 18:07:10.165224 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/30b1084d-18c0-428e-8682-48773409a820-logs\") pod \"cinder-api-0\" (UID: \"30b1084d-18c0-428e-8682-48773409a820\") " pod="openstack/cinder-api-0" Oct 10 18:07:10 crc kubenswrapper[4799]: I1010 18:07:10.165249 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/30b1084d-18c0-428e-8682-48773409a820-config-data-custom\") pod \"cinder-api-0\" (UID: \"30b1084d-18c0-428e-8682-48773409a820\") " pod="openstack/cinder-api-0" Oct 10 18:07:10 crc kubenswrapper[4799]: I1010 18:07:10.165495 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/30b1084d-18c0-428e-8682-48773409a820-etc-machine-id\") pod \"cinder-api-0\" (UID: \"30b1084d-18c0-428e-8682-48773409a820\") " pod="openstack/cinder-api-0" Oct 10 18:07:10 crc kubenswrapper[4799]: I1010 18:07:10.167623 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Oct 10 18:07:10 crc kubenswrapper[4799]: I1010 18:07:10.168269 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Oct 10 18:07:10 crc kubenswrapper[4799]: I1010 18:07:10.173272 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Oct 10 18:07:10 crc kubenswrapper[4799]: I1010 18:07:10.173347 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Oct 10 18:07:10 crc kubenswrapper[4799]: I1010 18:07:10.266546 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/30b1084d-18c0-428e-8682-48773409a820-etc-machine-id\") pod \"cinder-api-0\" (UID: \"30b1084d-18c0-428e-8682-48773409a820\") " pod="openstack/cinder-api-0" Oct 10 18:07:10 crc kubenswrapper[4799]: I1010 18:07:10.266644 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f5fgn\" (UniqueName: \"kubernetes.io/projected/30b1084d-18c0-428e-8682-48773409a820-kube-api-access-f5fgn\") pod \"cinder-api-0\" (UID: \"30b1084d-18c0-428e-8682-48773409a820\") " pod="openstack/cinder-api-0" Oct 10 18:07:10 crc kubenswrapper[4799]: I1010 18:07:10.266649 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/30b1084d-18c0-428e-8682-48773409a820-etc-machine-id\") pod \"cinder-api-0\" (UID: \"30b1084d-18c0-428e-8682-48773409a820\") " pod="openstack/cinder-api-0" Oct 10 18:07:10 crc kubenswrapper[4799]: I1010 18:07:10.266679 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/30b1084d-18c0-428e-8682-48773409a820-scripts\") pod \"cinder-api-0\" (UID: \"30b1084d-18c0-428e-8682-48773409a820\") " pod="openstack/cinder-api-0" Oct 10 18:07:10 crc kubenswrapper[4799]: I1010 18:07:10.266710 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/30b1084d-18c0-428e-8682-48773409a820-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"30b1084d-18c0-428e-8682-48773409a820\") " pod="openstack/cinder-api-0" Oct 10 18:07:10 crc kubenswrapper[4799]: I1010 18:07:10.266736 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/30b1084d-18c0-428e-8682-48773409a820-config-data\") pod \"cinder-api-0\" (UID: \"30b1084d-18c0-428e-8682-48773409a820\") " pod="openstack/cinder-api-0" Oct 10 18:07:10 crc kubenswrapper[4799]: I1010 18:07:10.266860 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/30b1084d-18c0-428e-8682-48773409a820-logs\") pod \"cinder-api-0\" (UID: \"30b1084d-18c0-428e-8682-48773409a820\") " pod="openstack/cinder-api-0" Oct 10 18:07:10 crc kubenswrapper[4799]: I1010 18:07:10.266890 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/30b1084d-18c0-428e-8682-48773409a820-config-data-custom\") pod \"cinder-api-0\" (UID: \"30b1084d-18c0-428e-8682-48773409a820\") " pod="openstack/cinder-api-0" Oct 10 18:07:10 crc kubenswrapper[4799]: I1010 18:07:10.267399 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/30b1084d-18c0-428e-8682-48773409a820-logs\") pod \"cinder-api-0\" (UID: \"30b1084d-18c0-428e-8682-48773409a820\") " pod="openstack/cinder-api-0" Oct 10 18:07:10 crc kubenswrapper[4799]: I1010 18:07:10.270412 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/30b1084d-18c0-428e-8682-48773409a820-config-data\") pod \"cinder-api-0\" (UID: \"30b1084d-18c0-428e-8682-48773409a820\") " pod="openstack/cinder-api-0" Oct 10 18:07:10 crc kubenswrapper[4799]: I1010 18:07:10.270944 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/30b1084d-18c0-428e-8682-48773409a820-scripts\") pod \"cinder-api-0\" (UID: \"30b1084d-18c0-428e-8682-48773409a820\") " pod="openstack/cinder-api-0" Oct 10 18:07:10 crc kubenswrapper[4799]: I1010 18:07:10.271241 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/30b1084d-18c0-428e-8682-48773409a820-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"30b1084d-18c0-428e-8682-48773409a820\") " pod="openstack/cinder-api-0" Oct 10 18:07:10 crc kubenswrapper[4799]: I1010 18:07:10.271495 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/30b1084d-18c0-428e-8682-48773409a820-config-data-custom\") pod \"cinder-api-0\" (UID: \"30b1084d-18c0-428e-8682-48773409a820\") " pod="openstack/cinder-api-0" Oct 10 18:07:10 crc kubenswrapper[4799]: I1010 18:07:10.280649 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f5fgn\" (UniqueName: \"kubernetes.io/projected/30b1084d-18c0-428e-8682-48773409a820-kube-api-access-f5fgn\") pod \"cinder-api-0\" (UID: \"30b1084d-18c0-428e-8682-48773409a820\") " pod="openstack/cinder-api-0" Oct 10 18:07:10 crc kubenswrapper[4799]: I1010 18:07:10.368442 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Oct 10 18:07:10 crc kubenswrapper[4799]: I1010 18:07:10.855794 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Oct 10 18:07:10 crc kubenswrapper[4799]: W1010 18:07:10.861340 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod30b1084d_18c0_428e_8682_48773409a820.slice/crio-4ba2e38df214b12ec6d70acd61f2f1aaa81c7b5ea14a28975321b1a88bf84885 WatchSource:0}: Error finding container 4ba2e38df214b12ec6d70acd61f2f1aaa81c7b5ea14a28975321b1a88bf84885: Status 404 returned error can't find the container with id 4ba2e38df214b12ec6d70acd61f2f1aaa81c7b5ea14a28975321b1a88bf84885 Oct 10 18:07:10 crc kubenswrapper[4799]: I1010 18:07:10.977005 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-backup-0" event={"ID":"8d1df32c-4647-4ce9-9a5a-c7d32f297332","Type":"ContainerStarted","Data":"9f24e38ca2268142e9818c4eea23ab531eb20ddd5b480cf3bf16c72aa375d21e"} Oct 10 18:07:10 crc kubenswrapper[4799]: I1010 18:07:10.977072 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-backup-0" event={"ID":"8d1df32c-4647-4ce9-9a5a-c7d32f297332","Type":"ContainerStarted","Data":"7786aa71e23641c328b99629b83b961eb86c63312aa704df867b14238c49c4db"} Oct 10 18:07:10 crc kubenswrapper[4799]: I1010 18:07:10.987229 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"30b1084d-18c0-428e-8682-48773409a820","Type":"ContainerStarted","Data":"4ba2e38df214b12ec6d70acd61f2f1aaa81c7b5ea14a28975321b1a88bf84885"} Oct 10 18:07:10 crc kubenswrapper[4799]: I1010 18:07:10.988256 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Oct 10 18:07:10 crc kubenswrapper[4799]: I1010 18:07:10.993290 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Oct 10 18:07:10 crc kubenswrapper[4799]: I1010 18:07:10.995596 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Oct 10 18:07:11 crc kubenswrapper[4799]: I1010 18:07:11.007270 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-backup-0" podStartSLOduration=1.947536661 podStartE2EDuration="3.007252261s" podCreationTimestamp="2025-10-10 18:07:08 +0000 UTC" firstStartedPulling="2025-10-10 18:07:09.011336582 +0000 UTC m=+5722.519660717" lastFinishedPulling="2025-10-10 18:07:10.071052202 +0000 UTC m=+5723.579376317" observedRunningTime="2025-10-10 18:07:11.005962629 +0000 UTC m=+5724.514286764" watchObservedRunningTime="2025-10-10 18:07:11.007252261 +0000 UTC m=+5724.515576386" Oct 10 18:07:11 crc kubenswrapper[4799]: I1010 18:07:11.411725 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="48f454d9-4648-450f-a206-6859d9d191aa" path="/var/lib/kubelet/pods/48f454d9-4648-450f-a206-6859d9d191aa/volumes" Oct 10 18:07:12 crc kubenswrapper[4799]: I1010 18:07:12.015159 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-volume-volume1-0" Oct 10 18:07:12 crc kubenswrapper[4799]: I1010 18:07:12.018519 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"30b1084d-18c0-428e-8682-48773409a820","Type":"ContainerStarted","Data":"a7efff13215ecf5aa6e46588d84f0baa90e31d3ef7f4011edbbe74d8a9ab7d67"} Oct 10 18:07:13 crc kubenswrapper[4799]: I1010 18:07:13.043034 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"30b1084d-18c0-428e-8682-48773409a820","Type":"ContainerStarted","Data":"41eaf5add87f22bb6128df376171b9c836b1dc9ed30e5449acb8657f46659ff2"} Oct 10 18:07:13 crc kubenswrapper[4799]: I1010 18:07:13.043804 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cinder-api-0" Oct 10 18:07:13 crc kubenswrapper[4799]: I1010 18:07:13.079317 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-api-0" podStartSLOduration=4.079293494 podStartE2EDuration="4.079293494s" podCreationTimestamp="2025-10-10 18:07:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-10 18:07:13.071791501 +0000 UTC m=+5726.580115716" watchObservedRunningTime="2025-10-10 18:07:13.079293494 +0000 UTC m=+5726.587617609" Oct 10 18:07:13 crc kubenswrapper[4799]: I1010 18:07:13.398613 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-backup-0" Oct 10 18:07:15 crc kubenswrapper[4799]: I1010 18:07:15.107139 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-scheduler-0" Oct 10 18:07:15 crc kubenswrapper[4799]: I1010 18:07:15.176354 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Oct 10 18:07:16 crc kubenswrapper[4799]: I1010 18:07:16.075901 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="bc1e302d-b598-46d7-94a3-6fe3c85e1922" containerName="cinder-scheduler" containerID="cri-o://8f82bfa550e60a432b183ed452c8bd9a5f46610677e72eaf328b1589375d5ea6" gracePeriod=30 Oct 10 18:07:16 crc kubenswrapper[4799]: I1010 18:07:16.075999 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="bc1e302d-b598-46d7-94a3-6fe3c85e1922" containerName="probe" containerID="cri-o://e36f9cd5c46d5f9ccc0cdf810198f3aed38dd9c010a824a14d994b9ae23a8ff6" gracePeriod=30 Oct 10 18:07:17 crc kubenswrapper[4799]: I1010 18:07:17.091167 4799 generic.go:334] "Generic (PLEG): container finished" podID="bc1e302d-b598-46d7-94a3-6fe3c85e1922" containerID="e36f9cd5c46d5f9ccc0cdf810198f3aed38dd9c010a824a14d994b9ae23a8ff6" exitCode=0 Oct 10 18:07:17 crc kubenswrapper[4799]: I1010 18:07:17.091251 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"bc1e302d-b598-46d7-94a3-6fe3c85e1922","Type":"ContainerDied","Data":"e36f9cd5c46d5f9ccc0cdf810198f3aed38dd9c010a824a14d994b9ae23a8ff6"} Oct 10 18:07:17 crc kubenswrapper[4799]: I1010 18:07:17.291862 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-volume-volume1-0" Oct 10 18:07:17 crc kubenswrapper[4799]: I1010 18:07:17.632034 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Oct 10 18:07:17 crc kubenswrapper[4799]: I1010 18:07:17.656528 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bc1e302d-b598-46d7-94a3-6fe3c85e1922-combined-ca-bundle\") pod \"bc1e302d-b598-46d7-94a3-6fe3c85e1922\" (UID: \"bc1e302d-b598-46d7-94a3-6fe3c85e1922\") " Oct 10 18:07:17 crc kubenswrapper[4799]: I1010 18:07:17.656606 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mb545\" (UniqueName: \"kubernetes.io/projected/bc1e302d-b598-46d7-94a3-6fe3c85e1922-kube-api-access-mb545\") pod \"bc1e302d-b598-46d7-94a3-6fe3c85e1922\" (UID: \"bc1e302d-b598-46d7-94a3-6fe3c85e1922\") " Oct 10 18:07:17 crc kubenswrapper[4799]: I1010 18:07:17.656652 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bc1e302d-b598-46d7-94a3-6fe3c85e1922-config-data\") pod \"bc1e302d-b598-46d7-94a3-6fe3c85e1922\" (UID: \"bc1e302d-b598-46d7-94a3-6fe3c85e1922\") " Oct 10 18:07:17 crc kubenswrapper[4799]: I1010 18:07:17.656730 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/bc1e302d-b598-46d7-94a3-6fe3c85e1922-etc-machine-id\") pod \"bc1e302d-b598-46d7-94a3-6fe3c85e1922\" (UID: \"bc1e302d-b598-46d7-94a3-6fe3c85e1922\") " Oct 10 18:07:17 crc kubenswrapper[4799]: I1010 18:07:17.656812 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/bc1e302d-b598-46d7-94a3-6fe3c85e1922-scripts\") pod \"bc1e302d-b598-46d7-94a3-6fe3c85e1922\" (UID: \"bc1e302d-b598-46d7-94a3-6fe3c85e1922\") " Oct 10 18:07:17 crc kubenswrapper[4799]: I1010 18:07:17.656849 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/bc1e302d-b598-46d7-94a3-6fe3c85e1922-config-data-custom\") pod \"bc1e302d-b598-46d7-94a3-6fe3c85e1922\" (UID: \"bc1e302d-b598-46d7-94a3-6fe3c85e1922\") " Oct 10 18:07:17 crc kubenswrapper[4799]: I1010 18:07:17.661490 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/bc1e302d-b598-46d7-94a3-6fe3c85e1922-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "bc1e302d-b598-46d7-94a3-6fe3c85e1922" (UID: "bc1e302d-b598-46d7-94a3-6fe3c85e1922"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 10 18:07:17 crc kubenswrapper[4799]: I1010 18:07:17.665381 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bc1e302d-b598-46d7-94a3-6fe3c85e1922-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "bc1e302d-b598-46d7-94a3-6fe3c85e1922" (UID: "bc1e302d-b598-46d7-94a3-6fe3c85e1922"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 18:07:17 crc kubenswrapper[4799]: I1010 18:07:17.679042 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bc1e302d-b598-46d7-94a3-6fe3c85e1922-scripts" (OuterVolumeSpecName: "scripts") pod "bc1e302d-b598-46d7-94a3-6fe3c85e1922" (UID: "bc1e302d-b598-46d7-94a3-6fe3c85e1922"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 18:07:17 crc kubenswrapper[4799]: I1010 18:07:17.685462 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bc1e302d-b598-46d7-94a3-6fe3c85e1922-kube-api-access-mb545" (OuterVolumeSpecName: "kube-api-access-mb545") pod "bc1e302d-b598-46d7-94a3-6fe3c85e1922" (UID: "bc1e302d-b598-46d7-94a3-6fe3c85e1922"). InnerVolumeSpecName "kube-api-access-mb545". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 18:07:17 crc kubenswrapper[4799]: I1010 18:07:17.747864 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bc1e302d-b598-46d7-94a3-6fe3c85e1922-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "bc1e302d-b598-46d7-94a3-6fe3c85e1922" (UID: "bc1e302d-b598-46d7-94a3-6fe3c85e1922"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 18:07:17 crc kubenswrapper[4799]: I1010 18:07:17.759068 4799 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bc1e302d-b598-46d7-94a3-6fe3c85e1922-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 10 18:07:17 crc kubenswrapper[4799]: I1010 18:07:17.759100 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mb545\" (UniqueName: \"kubernetes.io/projected/bc1e302d-b598-46d7-94a3-6fe3c85e1922-kube-api-access-mb545\") on node \"crc\" DevicePath \"\"" Oct 10 18:07:17 crc kubenswrapper[4799]: I1010 18:07:17.759116 4799 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/bc1e302d-b598-46d7-94a3-6fe3c85e1922-etc-machine-id\") on node \"crc\" DevicePath \"\"" Oct 10 18:07:17 crc kubenswrapper[4799]: I1010 18:07:17.759125 4799 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/bc1e302d-b598-46d7-94a3-6fe3c85e1922-scripts\") on node \"crc\" DevicePath \"\"" Oct 10 18:07:17 crc kubenswrapper[4799]: I1010 18:07:17.759134 4799 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/bc1e302d-b598-46d7-94a3-6fe3c85e1922-config-data-custom\") on node \"crc\" DevicePath \"\"" Oct 10 18:07:17 crc kubenswrapper[4799]: I1010 18:07:17.778402 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bc1e302d-b598-46d7-94a3-6fe3c85e1922-config-data" (OuterVolumeSpecName: "config-data") pod "bc1e302d-b598-46d7-94a3-6fe3c85e1922" (UID: "bc1e302d-b598-46d7-94a3-6fe3c85e1922"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 18:07:17 crc kubenswrapper[4799]: I1010 18:07:17.861435 4799 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bc1e302d-b598-46d7-94a3-6fe3c85e1922-config-data\") on node \"crc\" DevicePath \"\"" Oct 10 18:07:18 crc kubenswrapper[4799]: I1010 18:07:18.107314 4799 generic.go:334] "Generic (PLEG): container finished" podID="bc1e302d-b598-46d7-94a3-6fe3c85e1922" containerID="8f82bfa550e60a432b183ed452c8bd9a5f46610677e72eaf328b1589375d5ea6" exitCode=0 Oct 10 18:07:18 crc kubenswrapper[4799]: I1010 18:07:18.107374 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"bc1e302d-b598-46d7-94a3-6fe3c85e1922","Type":"ContainerDied","Data":"8f82bfa550e60a432b183ed452c8bd9a5f46610677e72eaf328b1589375d5ea6"} Oct 10 18:07:18 crc kubenswrapper[4799]: I1010 18:07:18.107411 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"bc1e302d-b598-46d7-94a3-6fe3c85e1922","Type":"ContainerDied","Data":"7095a7876c26e38b4570312f7e5a80834b384c12ecf6474a6e4a0bb6c1d97ef0"} Oct 10 18:07:18 crc kubenswrapper[4799]: I1010 18:07:18.107418 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Oct 10 18:07:18 crc kubenswrapper[4799]: I1010 18:07:18.107431 4799 scope.go:117] "RemoveContainer" containerID="e36f9cd5c46d5f9ccc0cdf810198f3aed38dd9c010a824a14d994b9ae23a8ff6" Oct 10 18:07:18 crc kubenswrapper[4799]: I1010 18:07:18.150369 4799 scope.go:117] "RemoveContainer" containerID="8f82bfa550e60a432b183ed452c8bd9a5f46610677e72eaf328b1589375d5ea6" Oct 10 18:07:18 crc kubenswrapper[4799]: I1010 18:07:18.169448 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Oct 10 18:07:18 crc kubenswrapper[4799]: I1010 18:07:18.190842 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-scheduler-0"] Oct 10 18:07:18 crc kubenswrapper[4799]: I1010 18:07:18.203920 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-scheduler-0"] Oct 10 18:07:18 crc kubenswrapper[4799]: E1010 18:07:18.204573 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bc1e302d-b598-46d7-94a3-6fe3c85e1922" containerName="cinder-scheduler" Oct 10 18:07:18 crc kubenswrapper[4799]: I1010 18:07:18.204603 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="bc1e302d-b598-46d7-94a3-6fe3c85e1922" containerName="cinder-scheduler" Oct 10 18:07:18 crc kubenswrapper[4799]: E1010 18:07:18.204670 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bc1e302d-b598-46d7-94a3-6fe3c85e1922" containerName="probe" Oct 10 18:07:18 crc kubenswrapper[4799]: I1010 18:07:18.204684 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="bc1e302d-b598-46d7-94a3-6fe3c85e1922" containerName="probe" Oct 10 18:07:18 crc kubenswrapper[4799]: I1010 18:07:18.205114 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="bc1e302d-b598-46d7-94a3-6fe3c85e1922" containerName="probe" Oct 10 18:07:18 crc kubenswrapper[4799]: I1010 18:07:18.205167 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="bc1e302d-b598-46d7-94a3-6fe3c85e1922" containerName="cinder-scheduler" Oct 10 18:07:18 crc kubenswrapper[4799]: I1010 18:07:18.206963 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Oct 10 18:07:18 crc kubenswrapper[4799]: I1010 18:07:18.210799 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scheduler-config-data" Oct 10 18:07:18 crc kubenswrapper[4799]: I1010 18:07:18.211024 4799 scope.go:117] "RemoveContainer" containerID="e36f9cd5c46d5f9ccc0cdf810198f3aed38dd9c010a824a14d994b9ae23a8ff6" Oct 10 18:07:18 crc kubenswrapper[4799]: E1010 18:07:18.213393 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e36f9cd5c46d5f9ccc0cdf810198f3aed38dd9c010a824a14d994b9ae23a8ff6\": container with ID starting with e36f9cd5c46d5f9ccc0cdf810198f3aed38dd9c010a824a14d994b9ae23a8ff6 not found: ID does not exist" containerID="e36f9cd5c46d5f9ccc0cdf810198f3aed38dd9c010a824a14d994b9ae23a8ff6" Oct 10 18:07:18 crc kubenswrapper[4799]: I1010 18:07:18.213456 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e36f9cd5c46d5f9ccc0cdf810198f3aed38dd9c010a824a14d994b9ae23a8ff6"} err="failed to get container status \"e36f9cd5c46d5f9ccc0cdf810198f3aed38dd9c010a824a14d994b9ae23a8ff6\": rpc error: code = NotFound desc = could not find container \"e36f9cd5c46d5f9ccc0cdf810198f3aed38dd9c010a824a14d994b9ae23a8ff6\": container with ID starting with e36f9cd5c46d5f9ccc0cdf810198f3aed38dd9c010a824a14d994b9ae23a8ff6 not found: ID does not exist" Oct 10 18:07:18 crc kubenswrapper[4799]: I1010 18:07:18.213505 4799 scope.go:117] "RemoveContainer" containerID="8f82bfa550e60a432b183ed452c8bd9a5f46610677e72eaf328b1589375d5ea6" Oct 10 18:07:18 crc kubenswrapper[4799]: E1010 18:07:18.216496 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8f82bfa550e60a432b183ed452c8bd9a5f46610677e72eaf328b1589375d5ea6\": container with ID starting with 8f82bfa550e60a432b183ed452c8bd9a5f46610677e72eaf328b1589375d5ea6 not found: ID does not exist" containerID="8f82bfa550e60a432b183ed452c8bd9a5f46610677e72eaf328b1589375d5ea6" Oct 10 18:07:18 crc kubenswrapper[4799]: I1010 18:07:18.216546 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8f82bfa550e60a432b183ed452c8bd9a5f46610677e72eaf328b1589375d5ea6"} err="failed to get container status \"8f82bfa550e60a432b183ed452c8bd9a5f46610677e72eaf328b1589375d5ea6\": rpc error: code = NotFound desc = could not find container \"8f82bfa550e60a432b183ed452c8bd9a5f46610677e72eaf328b1589375d5ea6\": container with ID starting with 8f82bfa550e60a432b183ed452c8bd9a5f46610677e72eaf328b1589375d5ea6 not found: ID does not exist" Oct 10 18:07:18 crc kubenswrapper[4799]: I1010 18:07:18.225586 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Oct 10 18:07:18 crc kubenswrapper[4799]: I1010 18:07:18.268678 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/62762f59-7347-40da-9e08-51c45798a4cd-scripts\") pod \"cinder-scheduler-0\" (UID: \"62762f59-7347-40da-9e08-51c45798a4cd\") " pod="openstack/cinder-scheduler-0" Oct 10 18:07:18 crc kubenswrapper[4799]: I1010 18:07:18.268748 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/62762f59-7347-40da-9e08-51c45798a4cd-config-data\") pod \"cinder-scheduler-0\" (UID: \"62762f59-7347-40da-9e08-51c45798a4cd\") " pod="openstack/cinder-scheduler-0" Oct 10 18:07:18 crc kubenswrapper[4799]: I1010 18:07:18.268960 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/62762f59-7347-40da-9e08-51c45798a4cd-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"62762f59-7347-40da-9e08-51c45798a4cd\") " pod="openstack/cinder-scheduler-0" Oct 10 18:07:18 crc kubenswrapper[4799]: I1010 18:07:18.269122 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/62762f59-7347-40da-9e08-51c45798a4cd-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"62762f59-7347-40da-9e08-51c45798a4cd\") " pod="openstack/cinder-scheduler-0" Oct 10 18:07:18 crc kubenswrapper[4799]: I1010 18:07:18.269164 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/62762f59-7347-40da-9e08-51c45798a4cd-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"62762f59-7347-40da-9e08-51c45798a4cd\") " pod="openstack/cinder-scheduler-0" Oct 10 18:07:18 crc kubenswrapper[4799]: I1010 18:07:18.269417 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4wdsd\" (UniqueName: \"kubernetes.io/projected/62762f59-7347-40da-9e08-51c45798a4cd-kube-api-access-4wdsd\") pod \"cinder-scheduler-0\" (UID: \"62762f59-7347-40da-9e08-51c45798a4cd\") " pod="openstack/cinder-scheduler-0" Oct 10 18:07:18 crc kubenswrapper[4799]: I1010 18:07:18.371862 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4wdsd\" (UniqueName: \"kubernetes.io/projected/62762f59-7347-40da-9e08-51c45798a4cd-kube-api-access-4wdsd\") pod \"cinder-scheduler-0\" (UID: \"62762f59-7347-40da-9e08-51c45798a4cd\") " pod="openstack/cinder-scheduler-0" Oct 10 18:07:18 crc kubenswrapper[4799]: I1010 18:07:18.372013 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/62762f59-7347-40da-9e08-51c45798a4cd-scripts\") pod \"cinder-scheduler-0\" (UID: \"62762f59-7347-40da-9e08-51c45798a4cd\") " pod="openstack/cinder-scheduler-0" Oct 10 18:07:18 crc kubenswrapper[4799]: I1010 18:07:18.372063 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/62762f59-7347-40da-9e08-51c45798a4cd-config-data\") pod \"cinder-scheduler-0\" (UID: \"62762f59-7347-40da-9e08-51c45798a4cd\") " pod="openstack/cinder-scheduler-0" Oct 10 18:07:18 crc kubenswrapper[4799]: I1010 18:07:18.372115 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/62762f59-7347-40da-9e08-51c45798a4cd-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"62762f59-7347-40da-9e08-51c45798a4cd\") " pod="openstack/cinder-scheduler-0" Oct 10 18:07:18 crc kubenswrapper[4799]: I1010 18:07:18.372246 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/62762f59-7347-40da-9e08-51c45798a4cd-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"62762f59-7347-40da-9e08-51c45798a4cd\") " pod="openstack/cinder-scheduler-0" Oct 10 18:07:18 crc kubenswrapper[4799]: I1010 18:07:18.372287 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/62762f59-7347-40da-9e08-51c45798a4cd-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"62762f59-7347-40da-9e08-51c45798a4cd\") " pod="openstack/cinder-scheduler-0" Oct 10 18:07:18 crc kubenswrapper[4799]: I1010 18:07:18.372444 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/62762f59-7347-40da-9e08-51c45798a4cd-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"62762f59-7347-40da-9e08-51c45798a4cd\") " pod="openstack/cinder-scheduler-0" Oct 10 18:07:18 crc kubenswrapper[4799]: I1010 18:07:18.376807 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/62762f59-7347-40da-9e08-51c45798a4cd-scripts\") pod \"cinder-scheduler-0\" (UID: \"62762f59-7347-40da-9e08-51c45798a4cd\") " pod="openstack/cinder-scheduler-0" Oct 10 18:07:18 crc kubenswrapper[4799]: I1010 18:07:18.376943 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/62762f59-7347-40da-9e08-51c45798a4cd-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"62762f59-7347-40da-9e08-51c45798a4cd\") " pod="openstack/cinder-scheduler-0" Oct 10 18:07:18 crc kubenswrapper[4799]: I1010 18:07:18.378064 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/62762f59-7347-40da-9e08-51c45798a4cd-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"62762f59-7347-40da-9e08-51c45798a4cd\") " pod="openstack/cinder-scheduler-0" Oct 10 18:07:18 crc kubenswrapper[4799]: I1010 18:07:18.379129 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/62762f59-7347-40da-9e08-51c45798a4cd-config-data\") pod \"cinder-scheduler-0\" (UID: \"62762f59-7347-40da-9e08-51c45798a4cd\") " pod="openstack/cinder-scheduler-0" Oct 10 18:07:18 crc kubenswrapper[4799]: I1010 18:07:18.397165 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4wdsd\" (UniqueName: \"kubernetes.io/projected/62762f59-7347-40da-9e08-51c45798a4cd-kube-api-access-4wdsd\") pod \"cinder-scheduler-0\" (UID: \"62762f59-7347-40da-9e08-51c45798a4cd\") " pod="openstack/cinder-scheduler-0" Oct 10 18:07:18 crc kubenswrapper[4799]: I1010 18:07:18.404297 4799 scope.go:117] "RemoveContainer" containerID="6ae067b7971fd6480cb0c3ccf44d4e22f837ba4674373b4b5903247a9af39cf1" Oct 10 18:07:18 crc kubenswrapper[4799]: E1010 18:07:18.404881 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 18:07:18 crc kubenswrapper[4799]: I1010 18:07:18.536195 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Oct 10 18:07:18 crc kubenswrapper[4799]: I1010 18:07:18.624048 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-backup-0" Oct 10 18:07:19 crc kubenswrapper[4799]: I1010 18:07:19.091002 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Oct 10 18:07:19 crc kubenswrapper[4799]: I1010 18:07:19.124462 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"62762f59-7347-40da-9e08-51c45798a4cd","Type":"ContainerStarted","Data":"16a0ae103e5f3780084bfca4787967388e6180e705e738e18445732b60cf3f12"} Oct 10 18:07:19 crc kubenswrapper[4799]: I1010 18:07:19.422321 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bc1e302d-b598-46d7-94a3-6fe3c85e1922" path="/var/lib/kubelet/pods/bc1e302d-b598-46d7-94a3-6fe3c85e1922/volumes" Oct 10 18:07:20 crc kubenswrapper[4799]: I1010 18:07:20.138334 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"62762f59-7347-40da-9e08-51c45798a4cd","Type":"ContainerStarted","Data":"00d792b0aed49816ff6f43182082be786a5db42478bd85d8aa4b3c7288cc31e3"} Oct 10 18:07:21 crc kubenswrapper[4799]: I1010 18:07:21.151041 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"62762f59-7347-40da-9e08-51c45798a4cd","Type":"ContainerStarted","Data":"c693880b6ad12b85cdc77d3407723e33e4e19320454ac7f9f0fffbe2611d553a"} Oct 10 18:07:21 crc kubenswrapper[4799]: I1010 18:07:21.991942 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/cinder-api-0" Oct 10 18:07:22 crc kubenswrapper[4799]: I1010 18:07:22.039092 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-scheduler-0" podStartSLOduration=4.039061286 podStartE2EDuration="4.039061286s" podCreationTimestamp="2025-10-10 18:07:18 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-10 18:07:21.16943042 +0000 UTC m=+5734.677754565" watchObservedRunningTime="2025-10-10 18:07:22.039061286 +0000 UTC m=+5735.547385451" Oct 10 18:07:23 crc kubenswrapper[4799]: I1010 18:07:23.536328 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-scheduler-0" Oct 10 18:07:28 crc kubenswrapper[4799]: I1010 18:07:28.793839 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-scheduler-0" Oct 10 18:07:33 crc kubenswrapper[4799]: I1010 18:07:33.402665 4799 scope.go:117] "RemoveContainer" containerID="6ae067b7971fd6480cb0c3ccf44d4e22f837ba4674373b4b5903247a9af39cf1" Oct 10 18:07:33 crc kubenswrapper[4799]: E1010 18:07:33.403814 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 18:07:41 crc kubenswrapper[4799]: I1010 18:07:41.091702 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-db-create-tcjxt"] Oct 10 18:07:41 crc kubenswrapper[4799]: I1010 18:07:41.108900 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-db-create-tcjxt"] Oct 10 18:07:41 crc kubenswrapper[4799]: I1010 18:07:41.426329 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d769744a-3c43-43c7-918f-0af5be0c4974" path="/var/lib/kubelet/pods/d769744a-3c43-43c7-918f-0af5be0c4974/volumes" Oct 10 18:07:48 crc kubenswrapper[4799]: I1010 18:07:48.403478 4799 scope.go:117] "RemoveContainer" containerID="6ae067b7971fd6480cb0c3ccf44d4e22f837ba4674373b4b5903247a9af39cf1" Oct 10 18:07:48 crc kubenswrapper[4799]: E1010 18:07:48.404475 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 18:07:50 crc kubenswrapper[4799]: I1010 18:07:50.037940 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-3f3c-account-create-ws7mb"] Oct 10 18:07:50 crc kubenswrapper[4799]: I1010 18:07:50.052217 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-3f3c-account-create-ws7mb"] Oct 10 18:07:51 crc kubenswrapper[4799]: I1010 18:07:51.415348 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="30bdf99b-c4af-4caa-87db-2ff622e2eecd" path="/var/lib/kubelet/pods/30bdf99b-c4af-4caa-87db-2ff622e2eecd/volumes" Oct 10 18:07:56 crc kubenswrapper[4799]: I1010 18:07:56.050399 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-db-sync-jbxkg"] Oct 10 18:07:56 crc kubenswrapper[4799]: I1010 18:07:56.060505 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-db-sync-jbxkg"] Oct 10 18:07:57 crc kubenswrapper[4799]: I1010 18:07:57.422388 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="419d63e8-2e77-4702-af72-31e30165e453" path="/var/lib/kubelet/pods/419d63e8-2e77-4702-af72-31e30165e453/volumes" Oct 10 18:08:01 crc kubenswrapper[4799]: I1010 18:08:01.405298 4799 scope.go:117] "RemoveContainer" containerID="6ae067b7971fd6480cb0c3ccf44d4e22f837ba4674373b4b5903247a9af39cf1" Oct 10 18:08:01 crc kubenswrapper[4799]: E1010 18:08:01.406535 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 18:08:08 crc kubenswrapper[4799]: I1010 18:08:08.151006 4799 scope.go:117] "RemoveContainer" containerID="5cc5cb66dc221d431e002bb6c081715f35660d3980f73c0c87e4a69b0a54cd68" Oct 10 18:08:08 crc kubenswrapper[4799]: I1010 18:08:08.190527 4799 scope.go:117] "RemoveContainer" containerID="7cb047d0d62c0ebfb2408a9c5fd465068ed67a7963e00639f7e5858dc64dec55" Oct 10 18:08:08 crc kubenswrapper[4799]: I1010 18:08:08.280025 4799 scope.go:117] "RemoveContainer" containerID="dbc7ff51b886053be7d30b5b79155481e18ab3aeb7c25036396ae983a24d25c0" Oct 10 18:08:10 crc kubenswrapper[4799]: I1010 18:08:10.064622 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-bootstrap-lpdsw"] Oct 10 18:08:10 crc kubenswrapper[4799]: I1010 18:08:10.072126 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-bootstrap-lpdsw"] Oct 10 18:08:11 crc kubenswrapper[4799]: I1010 18:08:11.421207 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="32928cd2-df58-4d78-a702-f59ec3449bae" path="/var/lib/kubelet/pods/32928cd2-df58-4d78-a702-f59ec3449bae/volumes" Oct 10 18:08:15 crc kubenswrapper[4799]: I1010 18:08:15.402646 4799 scope.go:117] "RemoveContainer" containerID="6ae067b7971fd6480cb0c3ccf44d4e22f837ba4674373b4b5903247a9af39cf1" Oct 10 18:08:15 crc kubenswrapper[4799]: E1010 18:08:15.403451 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 18:08:27 crc kubenswrapper[4799]: I1010 18:08:27.412438 4799 scope.go:117] "RemoveContainer" containerID="6ae067b7971fd6480cb0c3ccf44d4e22f837ba4674373b4b5903247a9af39cf1" Oct 10 18:08:27 crc kubenswrapper[4799]: E1010 18:08:27.416146 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 18:08:41 crc kubenswrapper[4799]: I1010 18:08:41.403077 4799 scope.go:117] "RemoveContainer" containerID="6ae067b7971fd6480cb0c3ccf44d4e22f837ba4674373b4b5903247a9af39cf1" Oct 10 18:08:41 crc kubenswrapper[4799]: E1010 18:08:41.404353 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 18:08:52 crc kubenswrapper[4799]: I1010 18:08:52.403052 4799 scope.go:117] "RemoveContainer" containerID="6ae067b7971fd6480cb0c3ccf44d4e22f837ba4674373b4b5903247a9af39cf1" Oct 10 18:08:52 crc kubenswrapper[4799]: E1010 18:08:52.404864 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 18:08:57 crc kubenswrapper[4799]: I1010 18:08:57.284417 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-lt5hp"] Oct 10 18:08:57 crc kubenswrapper[4799]: I1010 18:08:57.287362 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-lt5hp" Oct 10 18:08:57 crc kubenswrapper[4799]: I1010 18:08:57.313323 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-lt5hp"] Oct 10 18:08:57 crc kubenswrapper[4799]: I1010 18:08:57.392372 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/410b0fd3-c6cc-427c-aa9f-dfee85f6bcb6-utilities\") pod \"redhat-operators-lt5hp\" (UID: \"410b0fd3-c6cc-427c-aa9f-dfee85f6bcb6\") " pod="openshift-marketplace/redhat-operators-lt5hp" Oct 10 18:08:57 crc kubenswrapper[4799]: I1010 18:08:57.392419 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mpzrp\" (UniqueName: \"kubernetes.io/projected/410b0fd3-c6cc-427c-aa9f-dfee85f6bcb6-kube-api-access-mpzrp\") pod \"redhat-operators-lt5hp\" (UID: \"410b0fd3-c6cc-427c-aa9f-dfee85f6bcb6\") " pod="openshift-marketplace/redhat-operators-lt5hp" Oct 10 18:08:57 crc kubenswrapper[4799]: I1010 18:08:57.392795 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/410b0fd3-c6cc-427c-aa9f-dfee85f6bcb6-catalog-content\") pod \"redhat-operators-lt5hp\" (UID: \"410b0fd3-c6cc-427c-aa9f-dfee85f6bcb6\") " pod="openshift-marketplace/redhat-operators-lt5hp" Oct 10 18:08:57 crc kubenswrapper[4799]: I1010 18:08:57.494077 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/410b0fd3-c6cc-427c-aa9f-dfee85f6bcb6-catalog-content\") pod \"redhat-operators-lt5hp\" (UID: \"410b0fd3-c6cc-427c-aa9f-dfee85f6bcb6\") " pod="openshift-marketplace/redhat-operators-lt5hp" Oct 10 18:08:57 crc kubenswrapper[4799]: I1010 18:08:57.494152 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/410b0fd3-c6cc-427c-aa9f-dfee85f6bcb6-utilities\") pod \"redhat-operators-lt5hp\" (UID: \"410b0fd3-c6cc-427c-aa9f-dfee85f6bcb6\") " pod="openshift-marketplace/redhat-operators-lt5hp" Oct 10 18:08:57 crc kubenswrapper[4799]: I1010 18:08:57.494174 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mpzrp\" (UniqueName: \"kubernetes.io/projected/410b0fd3-c6cc-427c-aa9f-dfee85f6bcb6-kube-api-access-mpzrp\") pod \"redhat-operators-lt5hp\" (UID: \"410b0fd3-c6cc-427c-aa9f-dfee85f6bcb6\") " pod="openshift-marketplace/redhat-operators-lt5hp" Oct 10 18:08:57 crc kubenswrapper[4799]: I1010 18:08:57.494751 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/410b0fd3-c6cc-427c-aa9f-dfee85f6bcb6-catalog-content\") pod \"redhat-operators-lt5hp\" (UID: \"410b0fd3-c6cc-427c-aa9f-dfee85f6bcb6\") " pod="openshift-marketplace/redhat-operators-lt5hp" Oct 10 18:08:57 crc kubenswrapper[4799]: I1010 18:08:57.494769 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/410b0fd3-c6cc-427c-aa9f-dfee85f6bcb6-utilities\") pod \"redhat-operators-lt5hp\" (UID: \"410b0fd3-c6cc-427c-aa9f-dfee85f6bcb6\") " pod="openshift-marketplace/redhat-operators-lt5hp" Oct 10 18:08:57 crc kubenswrapper[4799]: I1010 18:08:57.523355 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mpzrp\" (UniqueName: \"kubernetes.io/projected/410b0fd3-c6cc-427c-aa9f-dfee85f6bcb6-kube-api-access-mpzrp\") pod \"redhat-operators-lt5hp\" (UID: \"410b0fd3-c6cc-427c-aa9f-dfee85f6bcb6\") " pod="openshift-marketplace/redhat-operators-lt5hp" Oct 10 18:08:57 crc kubenswrapper[4799]: I1010 18:08:57.621321 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-lt5hp" Oct 10 18:08:58 crc kubenswrapper[4799]: I1010 18:08:58.081682 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-lt5hp"] Oct 10 18:08:58 crc kubenswrapper[4799]: I1010 18:08:58.438887 4799 generic.go:334] "Generic (PLEG): container finished" podID="410b0fd3-c6cc-427c-aa9f-dfee85f6bcb6" containerID="7cb175f883ea3c00509c84b06d5bf4287c32e79f32b0ac40759ef6475721c4d9" exitCode=0 Oct 10 18:08:58 crc kubenswrapper[4799]: I1010 18:08:58.438938 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-lt5hp" event={"ID":"410b0fd3-c6cc-427c-aa9f-dfee85f6bcb6","Type":"ContainerDied","Data":"7cb175f883ea3c00509c84b06d5bf4287c32e79f32b0ac40759ef6475721c4d9"} Oct 10 18:08:58 crc kubenswrapper[4799]: I1010 18:08:58.438970 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-lt5hp" event={"ID":"410b0fd3-c6cc-427c-aa9f-dfee85f6bcb6","Type":"ContainerStarted","Data":"540b454e88cd0810ce31d57595fe661fbd17fe6254df39b1f33b1fb5cfcc10d6"} Oct 10 18:09:00 crc kubenswrapper[4799]: I1010 18:09:00.465356 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-lt5hp" event={"ID":"410b0fd3-c6cc-427c-aa9f-dfee85f6bcb6","Type":"ContainerStarted","Data":"d8b70f49973f4690e71012318a65f839385cf2fc52ccb0a203fe22c24c28bd71"} Oct 10 18:09:02 crc kubenswrapper[4799]: I1010 18:09:02.490581 4799 generic.go:334] "Generic (PLEG): container finished" podID="410b0fd3-c6cc-427c-aa9f-dfee85f6bcb6" containerID="d8b70f49973f4690e71012318a65f839385cf2fc52ccb0a203fe22c24c28bd71" exitCode=0 Oct 10 18:09:02 crc kubenswrapper[4799]: I1010 18:09:02.490683 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-lt5hp" event={"ID":"410b0fd3-c6cc-427c-aa9f-dfee85f6bcb6","Type":"ContainerDied","Data":"d8b70f49973f4690e71012318a65f839385cf2fc52ccb0a203fe22c24c28bd71"} Oct 10 18:09:03 crc kubenswrapper[4799]: I1010 18:09:03.504673 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-lt5hp" event={"ID":"410b0fd3-c6cc-427c-aa9f-dfee85f6bcb6","Type":"ContainerStarted","Data":"f53a9e787bb7bc6cd23599d5eca0fcf9a184e7823a8a0d3ce568c9464bae5ea3"} Oct 10 18:09:03 crc kubenswrapper[4799]: I1010 18:09:03.551241 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-lt5hp" podStartSLOduration=2.076605069 podStartE2EDuration="6.551215263s" podCreationTimestamp="2025-10-10 18:08:57 +0000 UTC" firstStartedPulling="2025-10-10 18:08:58.441001397 +0000 UTC m=+5831.949325512" lastFinishedPulling="2025-10-10 18:09:02.915611551 +0000 UTC m=+5836.423935706" observedRunningTime="2025-10-10 18:09:03.541028254 +0000 UTC m=+5837.049352399" watchObservedRunningTime="2025-10-10 18:09:03.551215263 +0000 UTC m=+5837.059539418" Oct 10 18:09:05 crc kubenswrapper[4799]: I1010 18:09:05.403844 4799 scope.go:117] "RemoveContainer" containerID="6ae067b7971fd6480cb0c3ccf44d4e22f837ba4674373b4b5903247a9af39cf1" Oct 10 18:09:05 crc kubenswrapper[4799]: E1010 18:09:05.404990 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 18:09:07 crc kubenswrapper[4799]: I1010 18:09:07.622148 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-lt5hp" Oct 10 18:09:07 crc kubenswrapper[4799]: I1010 18:09:07.622378 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-lt5hp" Oct 10 18:09:08 crc kubenswrapper[4799]: I1010 18:09:08.428571 4799 scope.go:117] "RemoveContainer" containerID="1ee729ddf15efa277a3f6bd311967cc2196f1dedc3e6b542d10e7d2012d252ec" Oct 10 18:09:08 crc kubenswrapper[4799]: I1010 18:09:08.682036 4799 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-lt5hp" podUID="410b0fd3-c6cc-427c-aa9f-dfee85f6bcb6" containerName="registry-server" probeResult="failure" output=< Oct 10 18:09:08 crc kubenswrapper[4799]: timeout: failed to connect service ":50051" within 1s Oct 10 18:09:08 crc kubenswrapper[4799]: > Oct 10 18:09:13 crc kubenswrapper[4799]: I1010 18:09:13.630949 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-ovs-8zzgn"] Oct 10 18:09:13 crc kubenswrapper[4799]: I1010 18:09:13.633705 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-8zzgn" Oct 10 18:09:13 crc kubenswrapper[4799]: I1010 18:09:13.636412 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncontroller-ovncontroller-dockercfg-m8ccx" Oct 10 18:09:13 crc kubenswrapper[4799]: I1010 18:09:13.636811 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-scripts" Oct 10 18:09:13 crc kubenswrapper[4799]: I1010 18:09:13.642942 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-j24zd"] Oct 10 18:09:13 crc kubenswrapper[4799]: I1010 18:09:13.644447 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-j24zd" Oct 10 18:09:13 crc kubenswrapper[4799]: I1010 18:09:13.658673 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-ovs-8zzgn"] Oct 10 18:09:13 crc kubenswrapper[4799]: I1010 18:09:13.665848 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-j24zd"] Oct 10 18:09:13 crc kubenswrapper[4799]: I1010 18:09:13.745428 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/505e76c2-15f5-4188-bc6e-a249a8d753d0-etc-ovs\") pod \"ovn-controller-ovs-8zzgn\" (UID: \"505e76c2-15f5-4188-bc6e-a249a8d753d0\") " pod="openstack/ovn-controller-ovs-8zzgn" Oct 10 18:09:13 crc kubenswrapper[4799]: I1010 18:09:13.745482 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/0b6a8795-4a13-49de-a9cd-c58595e216dc-scripts\") pod \"ovn-controller-j24zd\" (UID: \"0b6a8795-4a13-49de-a9cd-c58595e216dc\") " pod="openstack/ovn-controller-j24zd" Oct 10 18:09:13 crc kubenswrapper[4799]: I1010 18:09:13.745577 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qlczk\" (UniqueName: \"kubernetes.io/projected/505e76c2-15f5-4188-bc6e-a249a8d753d0-kube-api-access-qlczk\") pod \"ovn-controller-ovs-8zzgn\" (UID: \"505e76c2-15f5-4188-bc6e-a249a8d753d0\") " pod="openstack/ovn-controller-ovs-8zzgn" Oct 10 18:09:13 crc kubenswrapper[4799]: I1010 18:09:13.745608 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/505e76c2-15f5-4188-bc6e-a249a8d753d0-scripts\") pod \"ovn-controller-ovs-8zzgn\" (UID: \"505e76c2-15f5-4188-bc6e-a249a8d753d0\") " pod="openstack/ovn-controller-ovs-8zzgn" Oct 10 18:09:13 crc kubenswrapper[4799]: I1010 18:09:13.745629 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/0b6a8795-4a13-49de-a9cd-c58595e216dc-var-log-ovn\") pod \"ovn-controller-j24zd\" (UID: \"0b6a8795-4a13-49de-a9cd-c58595e216dc\") " pod="openstack/ovn-controller-j24zd" Oct 10 18:09:13 crc kubenswrapper[4799]: I1010 18:09:13.745696 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/505e76c2-15f5-4188-bc6e-a249a8d753d0-var-lib\") pod \"ovn-controller-ovs-8zzgn\" (UID: \"505e76c2-15f5-4188-bc6e-a249a8d753d0\") " pod="openstack/ovn-controller-ovs-8zzgn" Oct 10 18:09:13 crc kubenswrapper[4799]: I1010 18:09:13.745729 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wntfc\" (UniqueName: \"kubernetes.io/projected/0b6a8795-4a13-49de-a9cd-c58595e216dc-kube-api-access-wntfc\") pod \"ovn-controller-j24zd\" (UID: \"0b6a8795-4a13-49de-a9cd-c58595e216dc\") " pod="openstack/ovn-controller-j24zd" Oct 10 18:09:13 crc kubenswrapper[4799]: I1010 18:09:13.745792 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/505e76c2-15f5-4188-bc6e-a249a8d753d0-var-log\") pod \"ovn-controller-ovs-8zzgn\" (UID: \"505e76c2-15f5-4188-bc6e-a249a8d753d0\") " pod="openstack/ovn-controller-ovs-8zzgn" Oct 10 18:09:13 crc kubenswrapper[4799]: I1010 18:09:13.745817 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/0b6a8795-4a13-49de-a9cd-c58595e216dc-var-run-ovn\") pod \"ovn-controller-j24zd\" (UID: \"0b6a8795-4a13-49de-a9cd-c58595e216dc\") " pod="openstack/ovn-controller-j24zd" Oct 10 18:09:13 crc kubenswrapper[4799]: I1010 18:09:13.745843 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/0b6a8795-4a13-49de-a9cd-c58595e216dc-var-run\") pod \"ovn-controller-j24zd\" (UID: \"0b6a8795-4a13-49de-a9cd-c58595e216dc\") " pod="openstack/ovn-controller-j24zd" Oct 10 18:09:13 crc kubenswrapper[4799]: I1010 18:09:13.745874 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/505e76c2-15f5-4188-bc6e-a249a8d753d0-var-run\") pod \"ovn-controller-ovs-8zzgn\" (UID: \"505e76c2-15f5-4188-bc6e-a249a8d753d0\") " pod="openstack/ovn-controller-ovs-8zzgn" Oct 10 18:09:13 crc kubenswrapper[4799]: I1010 18:09:13.847156 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/505e76c2-15f5-4188-bc6e-a249a8d753d0-var-log\") pod \"ovn-controller-ovs-8zzgn\" (UID: \"505e76c2-15f5-4188-bc6e-a249a8d753d0\") " pod="openstack/ovn-controller-ovs-8zzgn" Oct 10 18:09:13 crc kubenswrapper[4799]: I1010 18:09:13.847465 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/0b6a8795-4a13-49de-a9cd-c58595e216dc-var-run-ovn\") pod \"ovn-controller-j24zd\" (UID: \"0b6a8795-4a13-49de-a9cd-c58595e216dc\") " pod="openstack/ovn-controller-j24zd" Oct 10 18:09:13 crc kubenswrapper[4799]: I1010 18:09:13.847562 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/0b6a8795-4a13-49de-a9cd-c58595e216dc-var-run\") pod \"ovn-controller-j24zd\" (UID: \"0b6a8795-4a13-49de-a9cd-c58595e216dc\") " pod="openstack/ovn-controller-j24zd" Oct 10 18:09:13 crc kubenswrapper[4799]: I1010 18:09:13.847646 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/0b6a8795-4a13-49de-a9cd-c58595e216dc-var-run-ovn\") pod \"ovn-controller-j24zd\" (UID: \"0b6a8795-4a13-49de-a9cd-c58595e216dc\") " pod="openstack/ovn-controller-j24zd" Oct 10 18:09:13 crc kubenswrapper[4799]: I1010 18:09:13.847674 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/0b6a8795-4a13-49de-a9cd-c58595e216dc-var-run\") pod \"ovn-controller-j24zd\" (UID: \"0b6a8795-4a13-49de-a9cd-c58595e216dc\") " pod="openstack/ovn-controller-j24zd" Oct 10 18:09:13 crc kubenswrapper[4799]: I1010 18:09:13.847551 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/505e76c2-15f5-4188-bc6e-a249a8d753d0-var-log\") pod \"ovn-controller-ovs-8zzgn\" (UID: \"505e76c2-15f5-4188-bc6e-a249a8d753d0\") " pod="openstack/ovn-controller-ovs-8zzgn" Oct 10 18:09:13 crc kubenswrapper[4799]: I1010 18:09:13.847812 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/505e76c2-15f5-4188-bc6e-a249a8d753d0-var-run\") pod \"ovn-controller-ovs-8zzgn\" (UID: \"505e76c2-15f5-4188-bc6e-a249a8d753d0\") " pod="openstack/ovn-controller-ovs-8zzgn" Oct 10 18:09:13 crc kubenswrapper[4799]: I1010 18:09:13.847813 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/505e76c2-15f5-4188-bc6e-a249a8d753d0-var-run\") pod \"ovn-controller-ovs-8zzgn\" (UID: \"505e76c2-15f5-4188-bc6e-a249a8d753d0\") " pod="openstack/ovn-controller-ovs-8zzgn" Oct 10 18:09:13 crc kubenswrapper[4799]: I1010 18:09:13.848002 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/505e76c2-15f5-4188-bc6e-a249a8d753d0-etc-ovs\") pod \"ovn-controller-ovs-8zzgn\" (UID: \"505e76c2-15f5-4188-bc6e-a249a8d753d0\") " pod="openstack/ovn-controller-ovs-8zzgn" Oct 10 18:09:13 crc kubenswrapper[4799]: I1010 18:09:13.848032 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/0b6a8795-4a13-49de-a9cd-c58595e216dc-scripts\") pod \"ovn-controller-j24zd\" (UID: \"0b6a8795-4a13-49de-a9cd-c58595e216dc\") " pod="openstack/ovn-controller-j24zd" Oct 10 18:09:13 crc kubenswrapper[4799]: I1010 18:09:13.848140 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qlczk\" (UniqueName: \"kubernetes.io/projected/505e76c2-15f5-4188-bc6e-a249a8d753d0-kube-api-access-qlczk\") pod \"ovn-controller-ovs-8zzgn\" (UID: \"505e76c2-15f5-4188-bc6e-a249a8d753d0\") " pod="openstack/ovn-controller-ovs-8zzgn" Oct 10 18:09:13 crc kubenswrapper[4799]: I1010 18:09:13.848177 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/0b6a8795-4a13-49de-a9cd-c58595e216dc-var-log-ovn\") pod \"ovn-controller-j24zd\" (UID: \"0b6a8795-4a13-49de-a9cd-c58595e216dc\") " pod="openstack/ovn-controller-j24zd" Oct 10 18:09:13 crc kubenswrapper[4799]: I1010 18:09:13.848199 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/505e76c2-15f5-4188-bc6e-a249a8d753d0-scripts\") pod \"ovn-controller-ovs-8zzgn\" (UID: \"505e76c2-15f5-4188-bc6e-a249a8d753d0\") " pod="openstack/ovn-controller-ovs-8zzgn" Oct 10 18:09:13 crc kubenswrapper[4799]: I1010 18:09:13.848292 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/505e76c2-15f5-4188-bc6e-a249a8d753d0-etc-ovs\") pod \"ovn-controller-ovs-8zzgn\" (UID: \"505e76c2-15f5-4188-bc6e-a249a8d753d0\") " pod="openstack/ovn-controller-ovs-8zzgn" Oct 10 18:09:13 crc kubenswrapper[4799]: I1010 18:09:13.848314 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/505e76c2-15f5-4188-bc6e-a249a8d753d0-var-lib\") pod \"ovn-controller-ovs-8zzgn\" (UID: \"505e76c2-15f5-4188-bc6e-a249a8d753d0\") " pod="openstack/ovn-controller-ovs-8zzgn" Oct 10 18:09:13 crc kubenswrapper[4799]: I1010 18:09:13.848457 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wntfc\" (UniqueName: \"kubernetes.io/projected/0b6a8795-4a13-49de-a9cd-c58595e216dc-kube-api-access-wntfc\") pod \"ovn-controller-j24zd\" (UID: \"0b6a8795-4a13-49de-a9cd-c58595e216dc\") " pod="openstack/ovn-controller-j24zd" Oct 10 18:09:13 crc kubenswrapper[4799]: I1010 18:09:13.848648 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/0b6a8795-4a13-49de-a9cd-c58595e216dc-var-log-ovn\") pod \"ovn-controller-j24zd\" (UID: \"0b6a8795-4a13-49de-a9cd-c58595e216dc\") " pod="openstack/ovn-controller-j24zd" Oct 10 18:09:13 crc kubenswrapper[4799]: I1010 18:09:13.848351 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/505e76c2-15f5-4188-bc6e-a249a8d753d0-var-lib\") pod \"ovn-controller-ovs-8zzgn\" (UID: \"505e76c2-15f5-4188-bc6e-a249a8d753d0\") " pod="openstack/ovn-controller-ovs-8zzgn" Oct 10 18:09:13 crc kubenswrapper[4799]: I1010 18:09:13.850620 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/505e76c2-15f5-4188-bc6e-a249a8d753d0-scripts\") pod \"ovn-controller-ovs-8zzgn\" (UID: \"505e76c2-15f5-4188-bc6e-a249a8d753d0\") " pod="openstack/ovn-controller-ovs-8zzgn" Oct 10 18:09:13 crc kubenswrapper[4799]: I1010 18:09:13.851838 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/0b6a8795-4a13-49de-a9cd-c58595e216dc-scripts\") pod \"ovn-controller-j24zd\" (UID: \"0b6a8795-4a13-49de-a9cd-c58595e216dc\") " pod="openstack/ovn-controller-j24zd" Oct 10 18:09:13 crc kubenswrapper[4799]: I1010 18:09:13.872746 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wntfc\" (UniqueName: \"kubernetes.io/projected/0b6a8795-4a13-49de-a9cd-c58595e216dc-kube-api-access-wntfc\") pod \"ovn-controller-j24zd\" (UID: \"0b6a8795-4a13-49de-a9cd-c58595e216dc\") " pod="openstack/ovn-controller-j24zd" Oct 10 18:09:13 crc kubenswrapper[4799]: I1010 18:09:13.873924 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qlczk\" (UniqueName: \"kubernetes.io/projected/505e76c2-15f5-4188-bc6e-a249a8d753d0-kube-api-access-qlczk\") pod \"ovn-controller-ovs-8zzgn\" (UID: \"505e76c2-15f5-4188-bc6e-a249a8d753d0\") " pod="openstack/ovn-controller-ovs-8zzgn" Oct 10 18:09:13 crc kubenswrapper[4799]: I1010 18:09:13.959218 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-8zzgn" Oct 10 18:09:13 crc kubenswrapper[4799]: I1010 18:09:13.967189 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-j24zd" Oct 10 18:09:14 crc kubenswrapper[4799]: I1010 18:09:14.508619 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-j24zd"] Oct 10 18:09:14 crc kubenswrapper[4799]: W1010 18:09:14.516877 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0b6a8795_4a13_49de_a9cd_c58595e216dc.slice/crio-bcffca99e804ad24eee73d6016abd319b249c576bef511b09099baaf55da2a3f WatchSource:0}: Error finding container bcffca99e804ad24eee73d6016abd319b249c576bef511b09099baaf55da2a3f: Status 404 returned error can't find the container with id bcffca99e804ad24eee73d6016abd319b249c576bef511b09099baaf55da2a3f Oct 10 18:09:14 crc kubenswrapper[4799]: I1010 18:09:14.647980 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-j24zd" event={"ID":"0b6a8795-4a13-49de-a9cd-c58595e216dc","Type":"ContainerStarted","Data":"bcffca99e804ad24eee73d6016abd319b249c576bef511b09099baaf55da2a3f"} Oct 10 18:09:15 crc kubenswrapper[4799]: I1010 18:09:15.042520 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-ovs-8zzgn"] Oct 10 18:09:15 crc kubenswrapper[4799]: W1010 18:09:15.051802 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod505e76c2_15f5_4188_bc6e_a249a8d753d0.slice/crio-255484708da86ec0162ca3b95f4da88342a17eb9baae1914a0cde8fb60111414 WatchSource:0}: Error finding container 255484708da86ec0162ca3b95f4da88342a17eb9baae1914a0cde8fb60111414: Status 404 returned error can't find the container with id 255484708da86ec0162ca3b95f4da88342a17eb9baae1914a0cde8fb60111414 Oct 10 18:09:15 crc kubenswrapper[4799]: I1010 18:09:15.657469 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-8zzgn" event={"ID":"505e76c2-15f5-4188-bc6e-a249a8d753d0","Type":"ContainerStarted","Data":"5ae79a8ae0379d24c505cc25df76f665757157a9bc3a1c070957a40eea8c69a6"} Oct 10 18:09:15 crc kubenswrapper[4799]: I1010 18:09:15.657864 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-8zzgn" event={"ID":"505e76c2-15f5-4188-bc6e-a249a8d753d0","Type":"ContainerStarted","Data":"255484708da86ec0162ca3b95f4da88342a17eb9baae1914a0cde8fb60111414"} Oct 10 18:09:15 crc kubenswrapper[4799]: I1010 18:09:15.660923 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-j24zd" event={"ID":"0b6a8795-4a13-49de-a9cd-c58595e216dc","Type":"ContainerStarted","Data":"1337d24332499bbacb7984e2c0e26a853b2926a292f9c159651ef98a11e7a906"} Oct 10 18:09:15 crc kubenswrapper[4799]: I1010 18:09:15.661603 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-j24zd" Oct 10 18:09:15 crc kubenswrapper[4799]: I1010 18:09:15.717232 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-j24zd" podStartSLOduration=2.717209614 podStartE2EDuration="2.717209614s" podCreationTimestamp="2025-10-10 18:09:13 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-10 18:09:15.716447805 +0000 UTC m=+5849.224771950" watchObservedRunningTime="2025-10-10 18:09:15.717209614 +0000 UTC m=+5849.225533729" Oct 10 18:09:16 crc kubenswrapper[4799]: I1010 18:09:16.311259 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-metrics-5hxb5"] Oct 10 18:09:16 crc kubenswrapper[4799]: I1010 18:09:16.313686 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-5hxb5" Oct 10 18:09:16 crc kubenswrapper[4799]: I1010 18:09:16.316045 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-metrics-config" Oct 10 18:09:16 crc kubenswrapper[4799]: I1010 18:09:16.321531 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-metrics-5hxb5"] Oct 10 18:09:16 crc kubenswrapper[4799]: I1010 18:09:16.408310 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dfn9v\" (UniqueName: \"kubernetes.io/projected/0da0a07e-3ab0-416b-a5c2-a70d10d75135-kube-api-access-dfn9v\") pod \"ovn-controller-metrics-5hxb5\" (UID: \"0da0a07e-3ab0-416b-a5c2-a70d10d75135\") " pod="openstack/ovn-controller-metrics-5hxb5" Oct 10 18:09:16 crc kubenswrapper[4799]: I1010 18:09:16.408361 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0da0a07e-3ab0-416b-a5c2-a70d10d75135-config\") pod \"ovn-controller-metrics-5hxb5\" (UID: \"0da0a07e-3ab0-416b-a5c2-a70d10d75135\") " pod="openstack/ovn-controller-metrics-5hxb5" Oct 10 18:09:16 crc kubenswrapper[4799]: I1010 18:09:16.408416 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/0da0a07e-3ab0-416b-a5c2-a70d10d75135-ovs-rundir\") pod \"ovn-controller-metrics-5hxb5\" (UID: \"0da0a07e-3ab0-416b-a5c2-a70d10d75135\") " pod="openstack/ovn-controller-metrics-5hxb5" Oct 10 18:09:16 crc kubenswrapper[4799]: I1010 18:09:16.408464 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/0da0a07e-3ab0-416b-a5c2-a70d10d75135-ovn-rundir\") pod \"ovn-controller-metrics-5hxb5\" (UID: \"0da0a07e-3ab0-416b-a5c2-a70d10d75135\") " pod="openstack/ovn-controller-metrics-5hxb5" Oct 10 18:09:16 crc kubenswrapper[4799]: I1010 18:09:16.510259 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dfn9v\" (UniqueName: \"kubernetes.io/projected/0da0a07e-3ab0-416b-a5c2-a70d10d75135-kube-api-access-dfn9v\") pod \"ovn-controller-metrics-5hxb5\" (UID: \"0da0a07e-3ab0-416b-a5c2-a70d10d75135\") " pod="openstack/ovn-controller-metrics-5hxb5" Oct 10 18:09:16 crc kubenswrapper[4799]: I1010 18:09:16.510316 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0da0a07e-3ab0-416b-a5c2-a70d10d75135-config\") pod \"ovn-controller-metrics-5hxb5\" (UID: \"0da0a07e-3ab0-416b-a5c2-a70d10d75135\") " pod="openstack/ovn-controller-metrics-5hxb5" Oct 10 18:09:16 crc kubenswrapper[4799]: I1010 18:09:16.510372 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/0da0a07e-3ab0-416b-a5c2-a70d10d75135-ovs-rundir\") pod \"ovn-controller-metrics-5hxb5\" (UID: \"0da0a07e-3ab0-416b-a5c2-a70d10d75135\") " pod="openstack/ovn-controller-metrics-5hxb5" Oct 10 18:09:16 crc kubenswrapper[4799]: I1010 18:09:16.510420 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/0da0a07e-3ab0-416b-a5c2-a70d10d75135-ovn-rundir\") pod \"ovn-controller-metrics-5hxb5\" (UID: \"0da0a07e-3ab0-416b-a5c2-a70d10d75135\") " pod="openstack/ovn-controller-metrics-5hxb5" Oct 10 18:09:16 crc kubenswrapper[4799]: I1010 18:09:16.510881 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/0da0a07e-3ab0-416b-a5c2-a70d10d75135-ovn-rundir\") pod \"ovn-controller-metrics-5hxb5\" (UID: \"0da0a07e-3ab0-416b-a5c2-a70d10d75135\") " pod="openstack/ovn-controller-metrics-5hxb5" Oct 10 18:09:16 crc kubenswrapper[4799]: I1010 18:09:16.512059 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0da0a07e-3ab0-416b-a5c2-a70d10d75135-config\") pod \"ovn-controller-metrics-5hxb5\" (UID: \"0da0a07e-3ab0-416b-a5c2-a70d10d75135\") " pod="openstack/ovn-controller-metrics-5hxb5" Oct 10 18:09:16 crc kubenswrapper[4799]: I1010 18:09:16.512136 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/0da0a07e-3ab0-416b-a5c2-a70d10d75135-ovs-rundir\") pod \"ovn-controller-metrics-5hxb5\" (UID: \"0da0a07e-3ab0-416b-a5c2-a70d10d75135\") " pod="openstack/ovn-controller-metrics-5hxb5" Oct 10 18:09:16 crc kubenswrapper[4799]: I1010 18:09:16.527852 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dfn9v\" (UniqueName: \"kubernetes.io/projected/0da0a07e-3ab0-416b-a5c2-a70d10d75135-kube-api-access-dfn9v\") pod \"ovn-controller-metrics-5hxb5\" (UID: \"0da0a07e-3ab0-416b-a5c2-a70d10d75135\") " pod="openstack/ovn-controller-metrics-5hxb5" Oct 10 18:09:16 crc kubenswrapper[4799]: I1010 18:09:16.646767 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-5hxb5" Oct 10 18:09:16 crc kubenswrapper[4799]: I1010 18:09:16.676280 4799 generic.go:334] "Generic (PLEG): container finished" podID="505e76c2-15f5-4188-bc6e-a249a8d753d0" containerID="5ae79a8ae0379d24c505cc25df76f665757157a9bc3a1c070957a40eea8c69a6" exitCode=0 Oct 10 18:09:16 crc kubenswrapper[4799]: I1010 18:09:16.676372 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-8zzgn" event={"ID":"505e76c2-15f5-4188-bc6e-a249a8d753d0","Type":"ContainerDied","Data":"5ae79a8ae0379d24c505cc25df76f665757157a9bc3a1c070957a40eea8c69a6"} Oct 10 18:09:16 crc kubenswrapper[4799]: I1010 18:09:16.695195 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/octavia-db-create-sms8r"] Oct 10 18:09:16 crc kubenswrapper[4799]: I1010 18:09:16.696935 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-db-create-sms8r" Oct 10 18:09:16 crc kubenswrapper[4799]: I1010 18:09:16.710345 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-db-create-sms8r"] Oct 10 18:09:16 crc kubenswrapper[4799]: I1010 18:09:16.820829 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s29db\" (UniqueName: \"kubernetes.io/projected/68c3ec49-6c3d-4520-a5d9-a2e0340854bd-kube-api-access-s29db\") pod \"octavia-db-create-sms8r\" (UID: \"68c3ec49-6c3d-4520-a5d9-a2e0340854bd\") " pod="openstack/octavia-db-create-sms8r" Oct 10 18:09:16 crc kubenswrapper[4799]: I1010 18:09:16.922723 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s29db\" (UniqueName: \"kubernetes.io/projected/68c3ec49-6c3d-4520-a5d9-a2e0340854bd-kube-api-access-s29db\") pod \"octavia-db-create-sms8r\" (UID: \"68c3ec49-6c3d-4520-a5d9-a2e0340854bd\") " pod="openstack/octavia-db-create-sms8r" Oct 10 18:09:16 crc kubenswrapper[4799]: I1010 18:09:16.941374 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s29db\" (UniqueName: \"kubernetes.io/projected/68c3ec49-6c3d-4520-a5d9-a2e0340854bd-kube-api-access-s29db\") pod \"octavia-db-create-sms8r\" (UID: \"68c3ec49-6c3d-4520-a5d9-a2e0340854bd\") " pod="openstack/octavia-db-create-sms8r" Oct 10 18:09:17 crc kubenswrapper[4799]: I1010 18:09:17.106679 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-metrics-5hxb5"] Oct 10 18:09:17 crc kubenswrapper[4799]: W1010 18:09:17.110626 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0da0a07e_3ab0_416b_a5c2_a70d10d75135.slice/crio-9a458e59eb90e620cc0e82298995d35c275f6bcb4c62fc17ec8b8102e0b09e38 WatchSource:0}: Error finding container 9a458e59eb90e620cc0e82298995d35c275f6bcb4c62fc17ec8b8102e0b09e38: Status 404 returned error can't find the container with id 9a458e59eb90e620cc0e82298995d35c275f6bcb4c62fc17ec8b8102e0b09e38 Oct 10 18:09:17 crc kubenswrapper[4799]: I1010 18:09:17.120462 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-db-create-sms8r" Oct 10 18:09:17 crc kubenswrapper[4799]: I1010 18:09:17.412307 4799 scope.go:117] "RemoveContainer" containerID="6ae067b7971fd6480cb0c3ccf44d4e22f837ba4674373b4b5903247a9af39cf1" Oct 10 18:09:17 crc kubenswrapper[4799]: E1010 18:09:17.413086 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 18:09:17 crc kubenswrapper[4799]: I1010 18:09:17.570885 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-db-create-sms8r"] Oct 10 18:09:17 crc kubenswrapper[4799]: I1010 18:09:17.677073 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-lt5hp" Oct 10 18:09:17 crc kubenswrapper[4799]: I1010 18:09:17.688105 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-5hxb5" event={"ID":"0da0a07e-3ab0-416b-a5c2-a70d10d75135","Type":"ContainerStarted","Data":"02a3edf8dbfe50daa761a709dc36cc5eb21321c9cc5df3d8294509bc1c1416a2"} Oct 10 18:09:17 crc kubenswrapper[4799]: I1010 18:09:17.688158 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-5hxb5" event={"ID":"0da0a07e-3ab0-416b-a5c2-a70d10d75135","Type":"ContainerStarted","Data":"9a458e59eb90e620cc0e82298995d35c275f6bcb4c62fc17ec8b8102e0b09e38"} Oct 10 18:09:17 crc kubenswrapper[4799]: I1010 18:09:17.692229 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-8zzgn" event={"ID":"505e76c2-15f5-4188-bc6e-a249a8d753d0","Type":"ContainerStarted","Data":"6205a472c2eeee4ef3a6859a47288652e9ad8e85ed4d79330011742b68cfd581"} Oct 10 18:09:17 crc kubenswrapper[4799]: I1010 18:09:17.692256 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-8zzgn" event={"ID":"505e76c2-15f5-4188-bc6e-a249a8d753d0","Type":"ContainerStarted","Data":"897a0323369bf87bdf6866d7a545b07877fe0d7711727cc892cc4155d8914a9e"} Oct 10 18:09:17 crc kubenswrapper[4799]: I1010 18:09:17.692512 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-ovs-8zzgn" Oct 10 18:09:17 crc kubenswrapper[4799]: I1010 18:09:17.692531 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-ovs-8zzgn" Oct 10 18:09:17 crc kubenswrapper[4799]: I1010 18:09:17.700853 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-db-create-sms8r" event={"ID":"68c3ec49-6c3d-4520-a5d9-a2e0340854bd","Type":"ContainerStarted","Data":"0131ea326d4750a3d75c5456c6d9463afee2aca965e0dc12affca90acbc4d946"} Oct 10 18:09:17 crc kubenswrapper[4799]: I1010 18:09:17.726688 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-metrics-5hxb5" podStartSLOduration=1.7266664409999999 podStartE2EDuration="1.726666441s" podCreationTimestamp="2025-10-10 18:09:16 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-10 18:09:17.717723633 +0000 UTC m=+5851.226047768" watchObservedRunningTime="2025-10-10 18:09:17.726666441 +0000 UTC m=+5851.234990556" Oct 10 18:09:17 crc kubenswrapper[4799]: I1010 18:09:17.742208 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-ovs-8zzgn" podStartSLOduration=4.742187039 podStartE2EDuration="4.742187039s" podCreationTimestamp="2025-10-10 18:09:13 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-10 18:09:17.739038023 +0000 UTC m=+5851.247362148" watchObservedRunningTime="2025-10-10 18:09:17.742187039 +0000 UTC m=+5851.250511154" Oct 10 18:09:17 crc kubenswrapper[4799]: I1010 18:09:17.751952 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-lt5hp" Oct 10 18:09:17 crc kubenswrapper[4799]: I1010 18:09:17.917977 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-lt5hp"] Oct 10 18:09:18 crc kubenswrapper[4799]: I1010 18:09:18.712620 4799 generic.go:334] "Generic (PLEG): container finished" podID="68c3ec49-6c3d-4520-a5d9-a2e0340854bd" containerID="dfe8143778a1b2423893bcad964b129586678c9d374d94729863059bc32c4be9" exitCode=0 Oct 10 18:09:18 crc kubenswrapper[4799]: I1010 18:09:18.712715 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-db-create-sms8r" event={"ID":"68c3ec49-6c3d-4520-a5d9-a2e0340854bd","Type":"ContainerDied","Data":"dfe8143778a1b2423893bcad964b129586678c9d374d94729863059bc32c4be9"} Oct 10 18:09:19 crc kubenswrapper[4799]: I1010 18:09:19.722205 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-lt5hp" podUID="410b0fd3-c6cc-427c-aa9f-dfee85f6bcb6" containerName="registry-server" containerID="cri-o://f53a9e787bb7bc6cd23599d5eca0fcf9a184e7823a8a0d3ce568c9464bae5ea3" gracePeriod=2 Oct 10 18:09:20 crc kubenswrapper[4799]: I1010 18:09:20.142535 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-db-create-sms8r" Oct 10 18:09:20 crc kubenswrapper[4799]: I1010 18:09:20.241554 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-lt5hp" Oct 10 18:09:20 crc kubenswrapper[4799]: I1010 18:09:20.303960 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s29db\" (UniqueName: \"kubernetes.io/projected/68c3ec49-6c3d-4520-a5d9-a2e0340854bd-kube-api-access-s29db\") pod \"68c3ec49-6c3d-4520-a5d9-a2e0340854bd\" (UID: \"68c3ec49-6c3d-4520-a5d9-a2e0340854bd\") " Oct 10 18:09:20 crc kubenswrapper[4799]: I1010 18:09:20.308613 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/68c3ec49-6c3d-4520-a5d9-a2e0340854bd-kube-api-access-s29db" (OuterVolumeSpecName: "kube-api-access-s29db") pod "68c3ec49-6c3d-4520-a5d9-a2e0340854bd" (UID: "68c3ec49-6c3d-4520-a5d9-a2e0340854bd"). InnerVolumeSpecName "kube-api-access-s29db". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 18:09:20 crc kubenswrapper[4799]: I1010 18:09:20.405542 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/410b0fd3-c6cc-427c-aa9f-dfee85f6bcb6-utilities\") pod \"410b0fd3-c6cc-427c-aa9f-dfee85f6bcb6\" (UID: \"410b0fd3-c6cc-427c-aa9f-dfee85f6bcb6\") " Oct 10 18:09:20 crc kubenswrapper[4799]: I1010 18:09:20.405682 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/410b0fd3-c6cc-427c-aa9f-dfee85f6bcb6-catalog-content\") pod \"410b0fd3-c6cc-427c-aa9f-dfee85f6bcb6\" (UID: \"410b0fd3-c6cc-427c-aa9f-dfee85f6bcb6\") " Oct 10 18:09:20 crc kubenswrapper[4799]: I1010 18:09:20.405895 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mpzrp\" (UniqueName: \"kubernetes.io/projected/410b0fd3-c6cc-427c-aa9f-dfee85f6bcb6-kube-api-access-mpzrp\") pod \"410b0fd3-c6cc-427c-aa9f-dfee85f6bcb6\" (UID: \"410b0fd3-c6cc-427c-aa9f-dfee85f6bcb6\") " Oct 10 18:09:20 crc kubenswrapper[4799]: I1010 18:09:20.406419 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s29db\" (UniqueName: \"kubernetes.io/projected/68c3ec49-6c3d-4520-a5d9-a2e0340854bd-kube-api-access-s29db\") on node \"crc\" DevicePath \"\"" Oct 10 18:09:20 crc kubenswrapper[4799]: I1010 18:09:20.407253 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/410b0fd3-c6cc-427c-aa9f-dfee85f6bcb6-utilities" (OuterVolumeSpecName: "utilities") pod "410b0fd3-c6cc-427c-aa9f-dfee85f6bcb6" (UID: "410b0fd3-c6cc-427c-aa9f-dfee85f6bcb6"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 10 18:09:20 crc kubenswrapper[4799]: I1010 18:09:20.412662 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/410b0fd3-c6cc-427c-aa9f-dfee85f6bcb6-kube-api-access-mpzrp" (OuterVolumeSpecName: "kube-api-access-mpzrp") pod "410b0fd3-c6cc-427c-aa9f-dfee85f6bcb6" (UID: "410b0fd3-c6cc-427c-aa9f-dfee85f6bcb6"). InnerVolumeSpecName "kube-api-access-mpzrp". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 18:09:20 crc kubenswrapper[4799]: I1010 18:09:20.495357 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/410b0fd3-c6cc-427c-aa9f-dfee85f6bcb6-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "410b0fd3-c6cc-427c-aa9f-dfee85f6bcb6" (UID: "410b0fd3-c6cc-427c-aa9f-dfee85f6bcb6"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 10 18:09:20 crc kubenswrapper[4799]: I1010 18:09:20.508726 4799 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/410b0fd3-c6cc-427c-aa9f-dfee85f6bcb6-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 10 18:09:20 crc kubenswrapper[4799]: I1010 18:09:20.508799 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mpzrp\" (UniqueName: \"kubernetes.io/projected/410b0fd3-c6cc-427c-aa9f-dfee85f6bcb6-kube-api-access-mpzrp\") on node \"crc\" DevicePath \"\"" Oct 10 18:09:20 crc kubenswrapper[4799]: I1010 18:09:20.508823 4799 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/410b0fd3-c6cc-427c-aa9f-dfee85f6bcb6-utilities\") on node \"crc\" DevicePath \"\"" Oct 10 18:09:20 crc kubenswrapper[4799]: I1010 18:09:20.735905 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-db-create-sms8r" event={"ID":"68c3ec49-6c3d-4520-a5d9-a2e0340854bd","Type":"ContainerDied","Data":"0131ea326d4750a3d75c5456c6d9463afee2aca965e0dc12affca90acbc4d946"} Oct 10 18:09:20 crc kubenswrapper[4799]: I1010 18:09:20.735948 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-db-create-sms8r" Oct 10 18:09:20 crc kubenswrapper[4799]: I1010 18:09:20.735971 4799 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0131ea326d4750a3d75c5456c6d9463afee2aca965e0dc12affca90acbc4d946" Oct 10 18:09:20 crc kubenswrapper[4799]: I1010 18:09:20.742592 4799 generic.go:334] "Generic (PLEG): container finished" podID="410b0fd3-c6cc-427c-aa9f-dfee85f6bcb6" containerID="f53a9e787bb7bc6cd23599d5eca0fcf9a184e7823a8a0d3ce568c9464bae5ea3" exitCode=0 Oct 10 18:09:20 crc kubenswrapper[4799]: I1010 18:09:20.742657 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-lt5hp" event={"ID":"410b0fd3-c6cc-427c-aa9f-dfee85f6bcb6","Type":"ContainerDied","Data":"f53a9e787bb7bc6cd23599d5eca0fcf9a184e7823a8a0d3ce568c9464bae5ea3"} Oct 10 18:09:20 crc kubenswrapper[4799]: I1010 18:09:20.742697 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-lt5hp" event={"ID":"410b0fd3-c6cc-427c-aa9f-dfee85f6bcb6","Type":"ContainerDied","Data":"540b454e88cd0810ce31d57595fe661fbd17fe6254df39b1f33b1fb5cfcc10d6"} Oct 10 18:09:20 crc kubenswrapper[4799]: I1010 18:09:20.742735 4799 scope.go:117] "RemoveContainer" containerID="f53a9e787bb7bc6cd23599d5eca0fcf9a184e7823a8a0d3ce568c9464bae5ea3" Oct 10 18:09:20 crc kubenswrapper[4799]: I1010 18:09:20.743022 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-lt5hp" Oct 10 18:09:20 crc kubenswrapper[4799]: I1010 18:09:20.789525 4799 scope.go:117] "RemoveContainer" containerID="d8b70f49973f4690e71012318a65f839385cf2fc52ccb0a203fe22c24c28bd71" Oct 10 18:09:20 crc kubenswrapper[4799]: I1010 18:09:20.821183 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-lt5hp"] Oct 10 18:09:20 crc kubenswrapper[4799]: I1010 18:09:20.835969 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-lt5hp"] Oct 10 18:09:20 crc kubenswrapper[4799]: I1010 18:09:20.837051 4799 scope.go:117] "RemoveContainer" containerID="7cb175f883ea3c00509c84b06d5bf4287c32e79f32b0ac40759ef6475721c4d9" Oct 10 18:09:20 crc kubenswrapper[4799]: I1010 18:09:20.901110 4799 scope.go:117] "RemoveContainer" containerID="f53a9e787bb7bc6cd23599d5eca0fcf9a184e7823a8a0d3ce568c9464bae5ea3" Oct 10 18:09:20 crc kubenswrapper[4799]: E1010 18:09:20.901894 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f53a9e787bb7bc6cd23599d5eca0fcf9a184e7823a8a0d3ce568c9464bae5ea3\": container with ID starting with f53a9e787bb7bc6cd23599d5eca0fcf9a184e7823a8a0d3ce568c9464bae5ea3 not found: ID does not exist" containerID="f53a9e787bb7bc6cd23599d5eca0fcf9a184e7823a8a0d3ce568c9464bae5ea3" Oct 10 18:09:20 crc kubenswrapper[4799]: I1010 18:09:20.902064 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f53a9e787bb7bc6cd23599d5eca0fcf9a184e7823a8a0d3ce568c9464bae5ea3"} err="failed to get container status \"f53a9e787bb7bc6cd23599d5eca0fcf9a184e7823a8a0d3ce568c9464bae5ea3\": rpc error: code = NotFound desc = could not find container \"f53a9e787bb7bc6cd23599d5eca0fcf9a184e7823a8a0d3ce568c9464bae5ea3\": container with ID starting with f53a9e787bb7bc6cd23599d5eca0fcf9a184e7823a8a0d3ce568c9464bae5ea3 not found: ID does not exist" Oct 10 18:09:20 crc kubenswrapper[4799]: I1010 18:09:20.902230 4799 scope.go:117] "RemoveContainer" containerID="d8b70f49973f4690e71012318a65f839385cf2fc52ccb0a203fe22c24c28bd71" Oct 10 18:09:20 crc kubenswrapper[4799]: E1010 18:09:20.903056 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d8b70f49973f4690e71012318a65f839385cf2fc52ccb0a203fe22c24c28bd71\": container with ID starting with d8b70f49973f4690e71012318a65f839385cf2fc52ccb0a203fe22c24c28bd71 not found: ID does not exist" containerID="d8b70f49973f4690e71012318a65f839385cf2fc52ccb0a203fe22c24c28bd71" Oct 10 18:09:20 crc kubenswrapper[4799]: I1010 18:09:20.903089 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d8b70f49973f4690e71012318a65f839385cf2fc52ccb0a203fe22c24c28bd71"} err="failed to get container status \"d8b70f49973f4690e71012318a65f839385cf2fc52ccb0a203fe22c24c28bd71\": rpc error: code = NotFound desc = could not find container \"d8b70f49973f4690e71012318a65f839385cf2fc52ccb0a203fe22c24c28bd71\": container with ID starting with d8b70f49973f4690e71012318a65f839385cf2fc52ccb0a203fe22c24c28bd71 not found: ID does not exist" Oct 10 18:09:20 crc kubenswrapper[4799]: I1010 18:09:20.903107 4799 scope.go:117] "RemoveContainer" containerID="7cb175f883ea3c00509c84b06d5bf4287c32e79f32b0ac40759ef6475721c4d9" Oct 10 18:09:20 crc kubenswrapper[4799]: E1010 18:09:20.903301 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7cb175f883ea3c00509c84b06d5bf4287c32e79f32b0ac40759ef6475721c4d9\": container with ID starting with 7cb175f883ea3c00509c84b06d5bf4287c32e79f32b0ac40759ef6475721c4d9 not found: ID does not exist" containerID="7cb175f883ea3c00509c84b06d5bf4287c32e79f32b0ac40759ef6475721c4d9" Oct 10 18:09:20 crc kubenswrapper[4799]: I1010 18:09:20.903324 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7cb175f883ea3c00509c84b06d5bf4287c32e79f32b0ac40759ef6475721c4d9"} err="failed to get container status \"7cb175f883ea3c00509c84b06d5bf4287c32e79f32b0ac40759ef6475721c4d9\": rpc error: code = NotFound desc = could not find container \"7cb175f883ea3c00509c84b06d5bf4287c32e79f32b0ac40759ef6475721c4d9\": container with ID starting with 7cb175f883ea3c00509c84b06d5bf4287c32e79f32b0ac40759ef6475721c4d9 not found: ID does not exist" Oct 10 18:09:21 crc kubenswrapper[4799]: I1010 18:09:21.442134 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="410b0fd3-c6cc-427c-aa9f-dfee85f6bcb6" path="/var/lib/kubelet/pods/410b0fd3-c6cc-427c-aa9f-dfee85f6bcb6/volumes" Oct 10 18:09:28 crc kubenswrapper[4799]: I1010 18:09:28.402873 4799 scope.go:117] "RemoveContainer" containerID="6ae067b7971fd6480cb0c3ccf44d4e22f837ba4674373b4b5903247a9af39cf1" Oct 10 18:09:28 crc kubenswrapper[4799]: E1010 18:09:28.404036 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 18:09:28 crc kubenswrapper[4799]: I1010 18:09:28.718071 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/octavia-85a1-account-create-nq577"] Oct 10 18:09:28 crc kubenswrapper[4799]: E1010 18:09:28.718597 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="410b0fd3-c6cc-427c-aa9f-dfee85f6bcb6" containerName="registry-server" Oct 10 18:09:28 crc kubenswrapper[4799]: I1010 18:09:28.718620 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="410b0fd3-c6cc-427c-aa9f-dfee85f6bcb6" containerName="registry-server" Oct 10 18:09:28 crc kubenswrapper[4799]: E1010 18:09:28.718634 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="410b0fd3-c6cc-427c-aa9f-dfee85f6bcb6" containerName="extract-content" Oct 10 18:09:28 crc kubenswrapper[4799]: I1010 18:09:28.718643 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="410b0fd3-c6cc-427c-aa9f-dfee85f6bcb6" containerName="extract-content" Oct 10 18:09:28 crc kubenswrapper[4799]: E1010 18:09:28.718668 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="68c3ec49-6c3d-4520-a5d9-a2e0340854bd" containerName="mariadb-database-create" Oct 10 18:09:28 crc kubenswrapper[4799]: I1010 18:09:28.718676 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="68c3ec49-6c3d-4520-a5d9-a2e0340854bd" containerName="mariadb-database-create" Oct 10 18:09:28 crc kubenswrapper[4799]: E1010 18:09:28.718692 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="410b0fd3-c6cc-427c-aa9f-dfee85f6bcb6" containerName="extract-utilities" Oct 10 18:09:28 crc kubenswrapper[4799]: I1010 18:09:28.718700 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="410b0fd3-c6cc-427c-aa9f-dfee85f6bcb6" containerName="extract-utilities" Oct 10 18:09:28 crc kubenswrapper[4799]: I1010 18:09:28.718960 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="410b0fd3-c6cc-427c-aa9f-dfee85f6bcb6" containerName="registry-server" Oct 10 18:09:28 crc kubenswrapper[4799]: I1010 18:09:28.718985 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="68c3ec49-6c3d-4520-a5d9-a2e0340854bd" containerName="mariadb-database-create" Oct 10 18:09:28 crc kubenswrapper[4799]: I1010 18:09:28.719755 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-85a1-account-create-nq577" Oct 10 18:09:28 crc kubenswrapper[4799]: I1010 18:09:28.733829 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"octavia-db-secret" Oct 10 18:09:28 crc kubenswrapper[4799]: I1010 18:09:28.736314 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-85a1-account-create-nq577"] Oct 10 18:09:28 crc kubenswrapper[4799]: I1010 18:09:28.885509 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hg8kl\" (UniqueName: \"kubernetes.io/projected/80a0077a-7750-4ae6-b57b-b248f493764a-kube-api-access-hg8kl\") pod \"octavia-85a1-account-create-nq577\" (UID: \"80a0077a-7750-4ae6-b57b-b248f493764a\") " pod="openstack/octavia-85a1-account-create-nq577" Oct 10 18:09:28 crc kubenswrapper[4799]: I1010 18:09:28.988625 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hg8kl\" (UniqueName: \"kubernetes.io/projected/80a0077a-7750-4ae6-b57b-b248f493764a-kube-api-access-hg8kl\") pod \"octavia-85a1-account-create-nq577\" (UID: \"80a0077a-7750-4ae6-b57b-b248f493764a\") " pod="openstack/octavia-85a1-account-create-nq577" Oct 10 18:09:29 crc kubenswrapper[4799]: I1010 18:09:29.021878 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hg8kl\" (UniqueName: \"kubernetes.io/projected/80a0077a-7750-4ae6-b57b-b248f493764a-kube-api-access-hg8kl\") pod \"octavia-85a1-account-create-nq577\" (UID: \"80a0077a-7750-4ae6-b57b-b248f493764a\") " pod="openstack/octavia-85a1-account-create-nq577" Oct 10 18:09:29 crc kubenswrapper[4799]: I1010 18:09:29.057411 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-85a1-account-create-nq577" Oct 10 18:09:29 crc kubenswrapper[4799]: I1010 18:09:29.606284 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-85a1-account-create-nq577"] Oct 10 18:09:29 crc kubenswrapper[4799]: I1010 18:09:29.861563 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-85a1-account-create-nq577" event={"ID":"80a0077a-7750-4ae6-b57b-b248f493764a","Type":"ContainerStarted","Data":"d204f8469e95b71c319d65496b75d85cad86f66b7ba3c28d595793db641a3b41"} Oct 10 18:09:29 crc kubenswrapper[4799]: I1010 18:09:29.861608 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-85a1-account-create-nq577" event={"ID":"80a0077a-7750-4ae6-b57b-b248f493764a","Type":"ContainerStarted","Data":"455a5f6a69b116e3c10f98222c66e306d4affef49e69bd9c19274241ea49a53a"} Oct 10 18:09:29 crc kubenswrapper[4799]: I1010 18:09:29.891595 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/octavia-85a1-account-create-nq577" podStartSLOduration=1.891573645 podStartE2EDuration="1.891573645s" podCreationTimestamp="2025-10-10 18:09:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-10 18:09:29.877742018 +0000 UTC m=+5863.386066133" watchObservedRunningTime="2025-10-10 18:09:29.891573645 +0000 UTC m=+5863.399897760" Oct 10 18:09:30 crc kubenswrapper[4799]: I1010 18:09:30.873333 4799 generic.go:334] "Generic (PLEG): container finished" podID="80a0077a-7750-4ae6-b57b-b248f493764a" containerID="d204f8469e95b71c319d65496b75d85cad86f66b7ba3c28d595793db641a3b41" exitCode=0 Oct 10 18:09:30 crc kubenswrapper[4799]: I1010 18:09:30.873605 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-85a1-account-create-nq577" event={"ID":"80a0077a-7750-4ae6-b57b-b248f493764a","Type":"ContainerDied","Data":"d204f8469e95b71c319d65496b75d85cad86f66b7ba3c28d595793db641a3b41"} Oct 10 18:09:32 crc kubenswrapper[4799]: I1010 18:09:32.340415 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-85a1-account-create-nq577" Oct 10 18:09:32 crc kubenswrapper[4799]: I1010 18:09:32.404431 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-gjlpm"] Oct 10 18:09:32 crc kubenswrapper[4799]: E1010 18:09:32.404954 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="80a0077a-7750-4ae6-b57b-b248f493764a" containerName="mariadb-account-create" Oct 10 18:09:32 crc kubenswrapper[4799]: I1010 18:09:32.404975 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="80a0077a-7750-4ae6-b57b-b248f493764a" containerName="mariadb-account-create" Oct 10 18:09:32 crc kubenswrapper[4799]: I1010 18:09:32.405214 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="80a0077a-7750-4ae6-b57b-b248f493764a" containerName="mariadb-account-create" Oct 10 18:09:32 crc kubenswrapper[4799]: I1010 18:09:32.406929 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-gjlpm" Oct 10 18:09:32 crc kubenswrapper[4799]: I1010 18:09:32.442205 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-gjlpm"] Oct 10 18:09:32 crc kubenswrapper[4799]: I1010 18:09:32.462794 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hg8kl\" (UniqueName: \"kubernetes.io/projected/80a0077a-7750-4ae6-b57b-b248f493764a-kube-api-access-hg8kl\") pod \"80a0077a-7750-4ae6-b57b-b248f493764a\" (UID: \"80a0077a-7750-4ae6-b57b-b248f493764a\") " Oct 10 18:09:32 crc kubenswrapper[4799]: I1010 18:09:32.468977 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/80a0077a-7750-4ae6-b57b-b248f493764a-kube-api-access-hg8kl" (OuterVolumeSpecName: "kube-api-access-hg8kl") pod "80a0077a-7750-4ae6-b57b-b248f493764a" (UID: "80a0077a-7750-4ae6-b57b-b248f493764a"). InnerVolumeSpecName "kube-api-access-hg8kl". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 18:09:32 crc kubenswrapper[4799]: I1010 18:09:32.564542 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d0aea3f0-c0b2-4d79-9953-a42d584b70e0-utilities\") pod \"community-operators-gjlpm\" (UID: \"d0aea3f0-c0b2-4d79-9953-a42d584b70e0\") " pod="openshift-marketplace/community-operators-gjlpm" Oct 10 18:09:32 crc kubenswrapper[4799]: I1010 18:09:32.564613 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lng46\" (UniqueName: \"kubernetes.io/projected/d0aea3f0-c0b2-4d79-9953-a42d584b70e0-kube-api-access-lng46\") pod \"community-operators-gjlpm\" (UID: \"d0aea3f0-c0b2-4d79-9953-a42d584b70e0\") " pod="openshift-marketplace/community-operators-gjlpm" Oct 10 18:09:32 crc kubenswrapper[4799]: I1010 18:09:32.564927 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d0aea3f0-c0b2-4d79-9953-a42d584b70e0-catalog-content\") pod \"community-operators-gjlpm\" (UID: \"d0aea3f0-c0b2-4d79-9953-a42d584b70e0\") " pod="openshift-marketplace/community-operators-gjlpm" Oct 10 18:09:32 crc kubenswrapper[4799]: I1010 18:09:32.565354 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hg8kl\" (UniqueName: \"kubernetes.io/projected/80a0077a-7750-4ae6-b57b-b248f493764a-kube-api-access-hg8kl\") on node \"crc\" DevicePath \"\"" Oct 10 18:09:32 crc kubenswrapper[4799]: I1010 18:09:32.667247 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d0aea3f0-c0b2-4d79-9953-a42d584b70e0-utilities\") pod \"community-operators-gjlpm\" (UID: \"d0aea3f0-c0b2-4d79-9953-a42d584b70e0\") " pod="openshift-marketplace/community-operators-gjlpm" Oct 10 18:09:32 crc kubenswrapper[4799]: I1010 18:09:32.667317 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lng46\" (UniqueName: \"kubernetes.io/projected/d0aea3f0-c0b2-4d79-9953-a42d584b70e0-kube-api-access-lng46\") pod \"community-operators-gjlpm\" (UID: \"d0aea3f0-c0b2-4d79-9953-a42d584b70e0\") " pod="openshift-marketplace/community-operators-gjlpm" Oct 10 18:09:32 crc kubenswrapper[4799]: I1010 18:09:32.667391 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d0aea3f0-c0b2-4d79-9953-a42d584b70e0-catalog-content\") pod \"community-operators-gjlpm\" (UID: \"d0aea3f0-c0b2-4d79-9953-a42d584b70e0\") " pod="openshift-marketplace/community-operators-gjlpm" Oct 10 18:09:32 crc kubenswrapper[4799]: I1010 18:09:32.667812 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d0aea3f0-c0b2-4d79-9953-a42d584b70e0-utilities\") pod \"community-operators-gjlpm\" (UID: \"d0aea3f0-c0b2-4d79-9953-a42d584b70e0\") " pod="openshift-marketplace/community-operators-gjlpm" Oct 10 18:09:32 crc kubenswrapper[4799]: I1010 18:09:32.667837 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d0aea3f0-c0b2-4d79-9953-a42d584b70e0-catalog-content\") pod \"community-operators-gjlpm\" (UID: \"d0aea3f0-c0b2-4d79-9953-a42d584b70e0\") " pod="openshift-marketplace/community-operators-gjlpm" Oct 10 18:09:32 crc kubenswrapper[4799]: I1010 18:09:32.685522 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lng46\" (UniqueName: \"kubernetes.io/projected/d0aea3f0-c0b2-4d79-9953-a42d584b70e0-kube-api-access-lng46\") pod \"community-operators-gjlpm\" (UID: \"d0aea3f0-c0b2-4d79-9953-a42d584b70e0\") " pod="openshift-marketplace/community-operators-gjlpm" Oct 10 18:09:32 crc kubenswrapper[4799]: I1010 18:09:32.730660 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-gjlpm" Oct 10 18:09:32 crc kubenswrapper[4799]: I1010 18:09:32.929070 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-85a1-account-create-nq577" event={"ID":"80a0077a-7750-4ae6-b57b-b248f493764a","Type":"ContainerDied","Data":"455a5f6a69b116e3c10f98222c66e306d4affef49e69bd9c19274241ea49a53a"} Oct 10 18:09:32 crc kubenswrapper[4799]: I1010 18:09:32.929337 4799 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="455a5f6a69b116e3c10f98222c66e306d4affef49e69bd9c19274241ea49a53a" Oct 10 18:09:32 crc kubenswrapper[4799]: I1010 18:09:32.929400 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-85a1-account-create-nq577" Oct 10 18:09:33 crc kubenswrapper[4799]: E1010 18:09:33.077999 4799 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod80a0077a_7750_4ae6_b57b_b248f493764a.slice/crio-455a5f6a69b116e3c10f98222c66e306d4affef49e69bd9c19274241ea49a53a\": RecentStats: unable to find data in memory cache]" Oct 10 18:09:33 crc kubenswrapper[4799]: W1010 18:09:33.201077 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd0aea3f0_c0b2_4d79_9953_a42d584b70e0.slice/crio-fe5ebd1f6060e0ab463379e2520716b733be35a8fde998324829af2b1c834c04 WatchSource:0}: Error finding container fe5ebd1f6060e0ab463379e2520716b733be35a8fde998324829af2b1c834c04: Status 404 returned error can't find the container with id fe5ebd1f6060e0ab463379e2520716b733be35a8fde998324829af2b1c834c04 Oct 10 18:09:33 crc kubenswrapper[4799]: I1010 18:09:33.202353 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-gjlpm"] Oct 10 18:09:33 crc kubenswrapper[4799]: I1010 18:09:33.955375 4799 generic.go:334] "Generic (PLEG): container finished" podID="d0aea3f0-c0b2-4d79-9953-a42d584b70e0" containerID="9541fb1b75ee0b9564e5e8d34d21c6666e00b9a360b0a217ea70f70dbfad47e5" exitCode=0 Oct 10 18:09:33 crc kubenswrapper[4799]: I1010 18:09:33.955724 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-gjlpm" event={"ID":"d0aea3f0-c0b2-4d79-9953-a42d584b70e0","Type":"ContainerDied","Data":"9541fb1b75ee0b9564e5e8d34d21c6666e00b9a360b0a217ea70f70dbfad47e5"} Oct 10 18:09:33 crc kubenswrapper[4799]: I1010 18:09:33.955788 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-gjlpm" event={"ID":"d0aea3f0-c0b2-4d79-9953-a42d584b70e0","Type":"ContainerStarted","Data":"fe5ebd1f6060e0ab463379e2520716b733be35a8fde998324829af2b1c834c04"} Oct 10 18:09:35 crc kubenswrapper[4799]: I1010 18:09:35.757212 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/octavia-persistence-db-create-dqhfx"] Oct 10 18:09:35 crc kubenswrapper[4799]: I1010 18:09:35.758865 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-persistence-db-create-dqhfx" Oct 10 18:09:35 crc kubenswrapper[4799]: I1010 18:09:35.783936 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-persistence-db-create-dqhfx"] Oct 10 18:09:35 crc kubenswrapper[4799]: I1010 18:09:35.933638 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pwgzj\" (UniqueName: \"kubernetes.io/projected/0e69d12b-eaf4-4325-aab1-a9d6bfaca44b-kube-api-access-pwgzj\") pod \"octavia-persistence-db-create-dqhfx\" (UID: \"0e69d12b-eaf4-4325-aab1-a9d6bfaca44b\") " pod="openstack/octavia-persistence-db-create-dqhfx" Oct 10 18:09:35 crc kubenswrapper[4799]: I1010 18:09:35.979128 4799 generic.go:334] "Generic (PLEG): container finished" podID="d0aea3f0-c0b2-4d79-9953-a42d584b70e0" containerID="b66fe703c1b743bea1921b014ac14b155914d2d44e882b3454a0cc1e0319f6ff" exitCode=0 Oct 10 18:09:35 crc kubenswrapper[4799]: I1010 18:09:35.979176 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-gjlpm" event={"ID":"d0aea3f0-c0b2-4d79-9953-a42d584b70e0","Type":"ContainerDied","Data":"b66fe703c1b743bea1921b014ac14b155914d2d44e882b3454a0cc1e0319f6ff"} Oct 10 18:09:36 crc kubenswrapper[4799]: I1010 18:09:36.035511 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pwgzj\" (UniqueName: \"kubernetes.io/projected/0e69d12b-eaf4-4325-aab1-a9d6bfaca44b-kube-api-access-pwgzj\") pod \"octavia-persistence-db-create-dqhfx\" (UID: \"0e69d12b-eaf4-4325-aab1-a9d6bfaca44b\") " pod="openstack/octavia-persistence-db-create-dqhfx" Oct 10 18:09:36 crc kubenswrapper[4799]: I1010 18:09:36.055614 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pwgzj\" (UniqueName: \"kubernetes.io/projected/0e69d12b-eaf4-4325-aab1-a9d6bfaca44b-kube-api-access-pwgzj\") pod \"octavia-persistence-db-create-dqhfx\" (UID: \"0e69d12b-eaf4-4325-aab1-a9d6bfaca44b\") " pod="openstack/octavia-persistence-db-create-dqhfx" Oct 10 18:09:36 crc kubenswrapper[4799]: I1010 18:09:36.118396 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-persistence-db-create-dqhfx" Oct 10 18:09:36 crc kubenswrapper[4799]: I1010 18:09:36.592457 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-persistence-db-create-dqhfx"] Oct 10 18:09:36 crc kubenswrapper[4799]: W1010 18:09:36.597225 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0e69d12b_eaf4_4325_aab1_a9d6bfaca44b.slice/crio-18d29322ca35308a2c868b509f3235bca88d46f6b7675ef2b242dca55a29dd61 WatchSource:0}: Error finding container 18d29322ca35308a2c868b509f3235bca88d46f6b7675ef2b242dca55a29dd61: Status 404 returned error can't find the container with id 18d29322ca35308a2c868b509f3235bca88d46f6b7675ef2b242dca55a29dd61 Oct 10 18:09:36 crc kubenswrapper[4799]: I1010 18:09:36.993657 4799 generic.go:334] "Generic (PLEG): container finished" podID="0e69d12b-eaf4-4325-aab1-a9d6bfaca44b" containerID="0ad7ff0ef387bdaebc4ddcf0acab08d06eb3579386cfd15fc10ffd544f638ac0" exitCode=0 Oct 10 18:09:36 crc kubenswrapper[4799]: I1010 18:09:36.993725 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-persistence-db-create-dqhfx" event={"ID":"0e69d12b-eaf4-4325-aab1-a9d6bfaca44b","Type":"ContainerDied","Data":"0ad7ff0ef387bdaebc4ddcf0acab08d06eb3579386cfd15fc10ffd544f638ac0"} Oct 10 18:09:36 crc kubenswrapper[4799]: I1010 18:09:36.994034 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-persistence-db-create-dqhfx" event={"ID":"0e69d12b-eaf4-4325-aab1-a9d6bfaca44b","Type":"ContainerStarted","Data":"18d29322ca35308a2c868b509f3235bca88d46f6b7675ef2b242dca55a29dd61"} Oct 10 18:09:37 crc kubenswrapper[4799]: I1010 18:09:37.000052 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-gjlpm" event={"ID":"d0aea3f0-c0b2-4d79-9953-a42d584b70e0","Type":"ContainerStarted","Data":"559dee2d65b2139f309a0da227b1bf10032d30e52ba2884c23275d51d8636e83"} Oct 10 18:09:37 crc kubenswrapper[4799]: I1010 18:09:37.036771 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-gjlpm" podStartSLOduration=2.615344101 podStartE2EDuration="5.036733459s" podCreationTimestamp="2025-10-10 18:09:32 +0000 UTC" firstStartedPulling="2025-10-10 18:09:33.959629089 +0000 UTC m=+5867.467953234" lastFinishedPulling="2025-10-10 18:09:36.381018467 +0000 UTC m=+5869.889342592" observedRunningTime="2025-10-10 18:09:37.03227727 +0000 UTC m=+5870.540601405" watchObservedRunningTime="2025-10-10 18:09:37.036733459 +0000 UTC m=+5870.545057584" Oct 10 18:09:38 crc kubenswrapper[4799]: I1010 18:09:38.416912 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-persistence-db-create-dqhfx" Oct 10 18:09:38 crc kubenswrapper[4799]: I1010 18:09:38.600460 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pwgzj\" (UniqueName: \"kubernetes.io/projected/0e69d12b-eaf4-4325-aab1-a9d6bfaca44b-kube-api-access-pwgzj\") pod \"0e69d12b-eaf4-4325-aab1-a9d6bfaca44b\" (UID: \"0e69d12b-eaf4-4325-aab1-a9d6bfaca44b\") " Oct 10 18:09:38 crc kubenswrapper[4799]: I1010 18:09:38.611278 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0e69d12b-eaf4-4325-aab1-a9d6bfaca44b-kube-api-access-pwgzj" (OuterVolumeSpecName: "kube-api-access-pwgzj") pod "0e69d12b-eaf4-4325-aab1-a9d6bfaca44b" (UID: "0e69d12b-eaf4-4325-aab1-a9d6bfaca44b"). InnerVolumeSpecName "kube-api-access-pwgzj". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 18:09:38 crc kubenswrapper[4799]: I1010 18:09:38.703148 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pwgzj\" (UniqueName: \"kubernetes.io/projected/0e69d12b-eaf4-4325-aab1-a9d6bfaca44b-kube-api-access-pwgzj\") on node \"crc\" DevicePath \"\"" Oct 10 18:09:39 crc kubenswrapper[4799]: I1010 18:09:39.042788 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-persistence-db-create-dqhfx" event={"ID":"0e69d12b-eaf4-4325-aab1-a9d6bfaca44b","Type":"ContainerDied","Data":"18d29322ca35308a2c868b509f3235bca88d46f6b7675ef2b242dca55a29dd61"} Oct 10 18:09:39 crc kubenswrapper[4799]: I1010 18:09:39.042854 4799 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="18d29322ca35308a2c868b509f3235bca88d46f6b7675ef2b242dca55a29dd61" Oct 10 18:09:39 crc kubenswrapper[4799]: I1010 18:09:39.043085 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-persistence-db-create-dqhfx" Oct 10 18:09:39 crc kubenswrapper[4799]: I1010 18:09:39.403507 4799 scope.go:117] "RemoveContainer" containerID="6ae067b7971fd6480cb0c3ccf44d4e22f837ba4674373b4b5903247a9af39cf1" Oct 10 18:09:39 crc kubenswrapper[4799]: E1010 18:09:39.404132 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 18:09:42 crc kubenswrapper[4799]: I1010 18:09:42.731694 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-gjlpm" Oct 10 18:09:42 crc kubenswrapper[4799]: I1010 18:09:42.732143 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-gjlpm" Oct 10 18:09:42 crc kubenswrapper[4799]: I1010 18:09:42.813957 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-gjlpm" Oct 10 18:09:43 crc kubenswrapper[4799]: I1010 18:09:43.167109 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-gjlpm" Oct 10 18:09:43 crc kubenswrapper[4799]: I1010 18:09:43.228772 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-gjlpm"] Oct 10 18:09:45 crc kubenswrapper[4799]: I1010 18:09:45.121729 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-gjlpm" podUID="d0aea3f0-c0b2-4d79-9953-a42d584b70e0" containerName="registry-server" containerID="cri-o://559dee2d65b2139f309a0da227b1bf10032d30e52ba2884c23275d51d8636e83" gracePeriod=2 Oct 10 18:09:45 crc kubenswrapper[4799]: I1010 18:09:45.672954 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-gjlpm" Oct 10 18:09:45 crc kubenswrapper[4799]: I1010 18:09:45.771792 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d0aea3f0-c0b2-4d79-9953-a42d584b70e0-utilities\") pod \"d0aea3f0-c0b2-4d79-9953-a42d584b70e0\" (UID: \"d0aea3f0-c0b2-4d79-9953-a42d584b70e0\") " Oct 10 18:09:45 crc kubenswrapper[4799]: I1010 18:09:45.771861 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d0aea3f0-c0b2-4d79-9953-a42d584b70e0-catalog-content\") pod \"d0aea3f0-c0b2-4d79-9953-a42d584b70e0\" (UID: \"d0aea3f0-c0b2-4d79-9953-a42d584b70e0\") " Oct 10 18:09:45 crc kubenswrapper[4799]: I1010 18:09:45.772037 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lng46\" (UniqueName: \"kubernetes.io/projected/d0aea3f0-c0b2-4d79-9953-a42d584b70e0-kube-api-access-lng46\") pod \"d0aea3f0-c0b2-4d79-9953-a42d584b70e0\" (UID: \"d0aea3f0-c0b2-4d79-9953-a42d584b70e0\") " Oct 10 18:09:45 crc kubenswrapper[4799]: I1010 18:09:45.773740 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d0aea3f0-c0b2-4d79-9953-a42d584b70e0-utilities" (OuterVolumeSpecName: "utilities") pod "d0aea3f0-c0b2-4d79-9953-a42d584b70e0" (UID: "d0aea3f0-c0b2-4d79-9953-a42d584b70e0"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 10 18:09:45 crc kubenswrapper[4799]: I1010 18:09:45.779192 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d0aea3f0-c0b2-4d79-9953-a42d584b70e0-kube-api-access-lng46" (OuterVolumeSpecName: "kube-api-access-lng46") pod "d0aea3f0-c0b2-4d79-9953-a42d584b70e0" (UID: "d0aea3f0-c0b2-4d79-9953-a42d584b70e0"). InnerVolumeSpecName "kube-api-access-lng46". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 18:09:45 crc kubenswrapper[4799]: I1010 18:09:45.873976 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lng46\" (UniqueName: \"kubernetes.io/projected/d0aea3f0-c0b2-4d79-9953-a42d584b70e0-kube-api-access-lng46\") on node \"crc\" DevicePath \"\"" Oct 10 18:09:45 crc kubenswrapper[4799]: I1010 18:09:45.874011 4799 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d0aea3f0-c0b2-4d79-9953-a42d584b70e0-utilities\") on node \"crc\" DevicePath \"\"" Oct 10 18:09:46 crc kubenswrapper[4799]: I1010 18:09:46.019593 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d0aea3f0-c0b2-4d79-9953-a42d584b70e0-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "d0aea3f0-c0b2-4d79-9953-a42d584b70e0" (UID: "d0aea3f0-c0b2-4d79-9953-a42d584b70e0"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 10 18:09:46 crc kubenswrapper[4799]: I1010 18:09:46.078120 4799 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d0aea3f0-c0b2-4d79-9953-a42d584b70e0-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 10 18:09:46 crc kubenswrapper[4799]: I1010 18:09:46.139742 4799 generic.go:334] "Generic (PLEG): container finished" podID="d0aea3f0-c0b2-4d79-9953-a42d584b70e0" containerID="559dee2d65b2139f309a0da227b1bf10032d30e52ba2884c23275d51d8636e83" exitCode=0 Oct 10 18:09:46 crc kubenswrapper[4799]: I1010 18:09:46.139882 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-gjlpm" Oct 10 18:09:46 crc kubenswrapper[4799]: I1010 18:09:46.139824 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-gjlpm" event={"ID":"d0aea3f0-c0b2-4d79-9953-a42d584b70e0","Type":"ContainerDied","Data":"559dee2d65b2139f309a0da227b1bf10032d30e52ba2884c23275d51d8636e83"} Oct 10 18:09:46 crc kubenswrapper[4799]: I1010 18:09:46.140040 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-gjlpm" event={"ID":"d0aea3f0-c0b2-4d79-9953-a42d584b70e0","Type":"ContainerDied","Data":"fe5ebd1f6060e0ab463379e2520716b733be35a8fde998324829af2b1c834c04"} Oct 10 18:09:46 crc kubenswrapper[4799]: I1010 18:09:46.140170 4799 scope.go:117] "RemoveContainer" containerID="559dee2d65b2139f309a0da227b1bf10032d30e52ba2884c23275d51d8636e83" Oct 10 18:09:46 crc kubenswrapper[4799]: I1010 18:09:46.189007 4799 scope.go:117] "RemoveContainer" containerID="b66fe703c1b743bea1921b014ac14b155914d2d44e882b3454a0cc1e0319f6ff" Oct 10 18:09:46 crc kubenswrapper[4799]: I1010 18:09:46.204455 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-gjlpm"] Oct 10 18:09:46 crc kubenswrapper[4799]: I1010 18:09:46.221925 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-gjlpm"] Oct 10 18:09:46 crc kubenswrapper[4799]: I1010 18:09:46.236320 4799 scope.go:117] "RemoveContainer" containerID="9541fb1b75ee0b9564e5e8d34d21c6666e00b9a360b0a217ea70f70dbfad47e5" Oct 10 18:09:46 crc kubenswrapper[4799]: I1010 18:09:46.294391 4799 scope.go:117] "RemoveContainer" containerID="559dee2d65b2139f309a0da227b1bf10032d30e52ba2884c23275d51d8636e83" Oct 10 18:09:46 crc kubenswrapper[4799]: E1010 18:09:46.295181 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"559dee2d65b2139f309a0da227b1bf10032d30e52ba2884c23275d51d8636e83\": container with ID starting with 559dee2d65b2139f309a0da227b1bf10032d30e52ba2884c23275d51d8636e83 not found: ID does not exist" containerID="559dee2d65b2139f309a0da227b1bf10032d30e52ba2884c23275d51d8636e83" Oct 10 18:09:46 crc kubenswrapper[4799]: I1010 18:09:46.295251 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"559dee2d65b2139f309a0da227b1bf10032d30e52ba2884c23275d51d8636e83"} err="failed to get container status \"559dee2d65b2139f309a0da227b1bf10032d30e52ba2884c23275d51d8636e83\": rpc error: code = NotFound desc = could not find container \"559dee2d65b2139f309a0da227b1bf10032d30e52ba2884c23275d51d8636e83\": container with ID starting with 559dee2d65b2139f309a0da227b1bf10032d30e52ba2884c23275d51d8636e83 not found: ID does not exist" Oct 10 18:09:46 crc kubenswrapper[4799]: I1010 18:09:46.295294 4799 scope.go:117] "RemoveContainer" containerID="b66fe703c1b743bea1921b014ac14b155914d2d44e882b3454a0cc1e0319f6ff" Oct 10 18:09:46 crc kubenswrapper[4799]: E1010 18:09:46.296123 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b66fe703c1b743bea1921b014ac14b155914d2d44e882b3454a0cc1e0319f6ff\": container with ID starting with b66fe703c1b743bea1921b014ac14b155914d2d44e882b3454a0cc1e0319f6ff not found: ID does not exist" containerID="b66fe703c1b743bea1921b014ac14b155914d2d44e882b3454a0cc1e0319f6ff" Oct 10 18:09:46 crc kubenswrapper[4799]: I1010 18:09:46.296174 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b66fe703c1b743bea1921b014ac14b155914d2d44e882b3454a0cc1e0319f6ff"} err="failed to get container status \"b66fe703c1b743bea1921b014ac14b155914d2d44e882b3454a0cc1e0319f6ff\": rpc error: code = NotFound desc = could not find container \"b66fe703c1b743bea1921b014ac14b155914d2d44e882b3454a0cc1e0319f6ff\": container with ID starting with b66fe703c1b743bea1921b014ac14b155914d2d44e882b3454a0cc1e0319f6ff not found: ID does not exist" Oct 10 18:09:46 crc kubenswrapper[4799]: I1010 18:09:46.296228 4799 scope.go:117] "RemoveContainer" containerID="9541fb1b75ee0b9564e5e8d34d21c6666e00b9a360b0a217ea70f70dbfad47e5" Oct 10 18:09:46 crc kubenswrapper[4799]: E1010 18:09:46.296609 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9541fb1b75ee0b9564e5e8d34d21c6666e00b9a360b0a217ea70f70dbfad47e5\": container with ID starting with 9541fb1b75ee0b9564e5e8d34d21c6666e00b9a360b0a217ea70f70dbfad47e5 not found: ID does not exist" containerID="9541fb1b75ee0b9564e5e8d34d21c6666e00b9a360b0a217ea70f70dbfad47e5" Oct 10 18:09:46 crc kubenswrapper[4799]: I1010 18:09:46.296833 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9541fb1b75ee0b9564e5e8d34d21c6666e00b9a360b0a217ea70f70dbfad47e5"} err="failed to get container status \"9541fb1b75ee0b9564e5e8d34d21c6666e00b9a360b0a217ea70f70dbfad47e5\": rpc error: code = NotFound desc = could not find container \"9541fb1b75ee0b9564e5e8d34d21c6666e00b9a360b0a217ea70f70dbfad47e5\": container with ID starting with 9541fb1b75ee0b9564e5e8d34d21c6666e00b9a360b0a217ea70f70dbfad47e5 not found: ID does not exist" Oct 10 18:09:46 crc kubenswrapper[4799]: I1010 18:09:46.913123 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/octavia-c8c8-account-create-6q68n"] Oct 10 18:09:46 crc kubenswrapper[4799]: E1010 18:09:46.913474 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d0aea3f0-c0b2-4d79-9953-a42d584b70e0" containerName="extract-utilities" Oct 10 18:09:46 crc kubenswrapper[4799]: I1010 18:09:46.913492 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="d0aea3f0-c0b2-4d79-9953-a42d584b70e0" containerName="extract-utilities" Oct 10 18:09:46 crc kubenswrapper[4799]: E1010 18:09:46.913506 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d0aea3f0-c0b2-4d79-9953-a42d584b70e0" containerName="registry-server" Oct 10 18:09:46 crc kubenswrapper[4799]: I1010 18:09:46.913512 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="d0aea3f0-c0b2-4d79-9953-a42d584b70e0" containerName="registry-server" Oct 10 18:09:46 crc kubenswrapper[4799]: E1010 18:09:46.913536 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d0aea3f0-c0b2-4d79-9953-a42d584b70e0" containerName="extract-content" Oct 10 18:09:46 crc kubenswrapper[4799]: I1010 18:09:46.913543 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="d0aea3f0-c0b2-4d79-9953-a42d584b70e0" containerName="extract-content" Oct 10 18:09:46 crc kubenswrapper[4799]: E1010 18:09:46.913556 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0e69d12b-eaf4-4325-aab1-a9d6bfaca44b" containerName="mariadb-database-create" Oct 10 18:09:46 crc kubenswrapper[4799]: I1010 18:09:46.913563 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="0e69d12b-eaf4-4325-aab1-a9d6bfaca44b" containerName="mariadb-database-create" Oct 10 18:09:46 crc kubenswrapper[4799]: I1010 18:09:46.913723 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="d0aea3f0-c0b2-4d79-9953-a42d584b70e0" containerName="registry-server" Oct 10 18:09:46 crc kubenswrapper[4799]: I1010 18:09:46.913745 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="0e69d12b-eaf4-4325-aab1-a9d6bfaca44b" containerName="mariadb-database-create" Oct 10 18:09:46 crc kubenswrapper[4799]: I1010 18:09:46.914453 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-c8c8-account-create-6q68n" Oct 10 18:09:46 crc kubenswrapper[4799]: I1010 18:09:46.917827 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"octavia-persistence-db-secret" Oct 10 18:09:46 crc kubenswrapper[4799]: I1010 18:09:46.942808 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-c8c8-account-create-6q68n"] Oct 10 18:09:47 crc kubenswrapper[4799]: I1010 18:09:47.025913 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-phx92\" (UniqueName: \"kubernetes.io/projected/553e946c-8009-464a-a913-289757441985-kube-api-access-phx92\") pod \"octavia-c8c8-account-create-6q68n\" (UID: \"553e946c-8009-464a-a913-289757441985\") " pod="openstack/octavia-c8c8-account-create-6q68n" Oct 10 18:09:47 crc kubenswrapper[4799]: I1010 18:09:47.128098 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-phx92\" (UniqueName: \"kubernetes.io/projected/553e946c-8009-464a-a913-289757441985-kube-api-access-phx92\") pod \"octavia-c8c8-account-create-6q68n\" (UID: \"553e946c-8009-464a-a913-289757441985\") " pod="openstack/octavia-c8c8-account-create-6q68n" Oct 10 18:09:47 crc kubenswrapper[4799]: I1010 18:09:47.165793 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-phx92\" (UniqueName: \"kubernetes.io/projected/553e946c-8009-464a-a913-289757441985-kube-api-access-phx92\") pod \"octavia-c8c8-account-create-6q68n\" (UID: \"553e946c-8009-464a-a913-289757441985\") " pod="openstack/octavia-c8c8-account-create-6q68n" Oct 10 18:09:47 crc kubenswrapper[4799]: I1010 18:09:47.240589 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-c8c8-account-create-6q68n" Oct 10 18:09:47 crc kubenswrapper[4799]: I1010 18:09:47.416710 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d0aea3f0-c0b2-4d79-9953-a42d584b70e0" path="/var/lib/kubelet/pods/d0aea3f0-c0b2-4d79-9953-a42d584b70e0/volumes" Oct 10 18:09:47 crc kubenswrapper[4799]: I1010 18:09:47.743742 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-c8c8-account-create-6q68n"] Oct 10 18:09:47 crc kubenswrapper[4799]: W1010 18:09:47.747313 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod553e946c_8009_464a_a913_289757441985.slice/crio-ec14572e906482ddc09518b4a302da61aa0526ba11daeccd78c6e5340ca8c4d8 WatchSource:0}: Error finding container ec14572e906482ddc09518b4a302da61aa0526ba11daeccd78c6e5340ca8c4d8: Status 404 returned error can't find the container with id ec14572e906482ddc09518b4a302da61aa0526ba11daeccd78c6e5340ca8c4d8 Oct 10 18:09:47 crc kubenswrapper[4799]: I1010 18:09:47.753313 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"octavia-persistence-db-secret" Oct 10 18:09:48 crc kubenswrapper[4799]: I1010 18:09:48.180870 4799 generic.go:334] "Generic (PLEG): container finished" podID="553e946c-8009-464a-a913-289757441985" containerID="5fd86085811fd050d3e1c53b62c78027061a240943e4b5e18afa917b5f6889a7" exitCode=0 Oct 10 18:09:48 crc kubenswrapper[4799]: I1010 18:09:48.180924 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-c8c8-account-create-6q68n" event={"ID":"553e946c-8009-464a-a913-289757441985","Type":"ContainerDied","Data":"5fd86085811fd050d3e1c53b62c78027061a240943e4b5e18afa917b5f6889a7"} Oct 10 18:09:48 crc kubenswrapper[4799]: I1010 18:09:48.180957 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-c8c8-account-create-6q68n" event={"ID":"553e946c-8009-464a-a913-289757441985","Type":"ContainerStarted","Data":"ec14572e906482ddc09518b4a302da61aa0526ba11daeccd78c6e5340ca8c4d8"} Oct 10 18:09:49 crc kubenswrapper[4799]: I1010 18:09:49.009289 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-ovs-8zzgn" Oct 10 18:09:49 crc kubenswrapper[4799]: I1010 18:09:49.017615 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-ovs-8zzgn" Oct 10 18:09:49 crc kubenswrapper[4799]: I1010 18:09:49.025573 4799 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ovn-controller-j24zd" podUID="0b6a8795-4a13-49de-a9cd-c58595e216dc" containerName="ovn-controller" probeResult="failure" output=< Oct 10 18:09:49 crc kubenswrapper[4799]: ERROR - ovn-controller connection status is 'not connected', expecting 'connected' status Oct 10 18:09:49 crc kubenswrapper[4799]: > Oct 10 18:09:49 crc kubenswrapper[4799]: I1010 18:09:49.137198 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-j24zd-config-d5gmz"] Oct 10 18:09:49 crc kubenswrapper[4799]: I1010 18:09:49.138495 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-j24zd-config-d5gmz" Oct 10 18:09:49 crc kubenswrapper[4799]: I1010 18:09:49.141060 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-extra-scripts" Oct 10 18:09:49 crc kubenswrapper[4799]: I1010 18:09:49.150916 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-j24zd-config-d5gmz"] Oct 10 18:09:49 crc kubenswrapper[4799]: I1010 18:09:49.272661 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/33d3bf3e-2918-40f9-8197-2d58e9ad358d-var-run\") pod \"ovn-controller-j24zd-config-d5gmz\" (UID: \"33d3bf3e-2918-40f9-8197-2d58e9ad358d\") " pod="openstack/ovn-controller-j24zd-config-d5gmz" Oct 10 18:09:49 crc kubenswrapper[4799]: I1010 18:09:49.272939 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/33d3bf3e-2918-40f9-8197-2d58e9ad358d-var-log-ovn\") pod \"ovn-controller-j24zd-config-d5gmz\" (UID: \"33d3bf3e-2918-40f9-8197-2d58e9ad358d\") " pod="openstack/ovn-controller-j24zd-config-d5gmz" Oct 10 18:09:49 crc kubenswrapper[4799]: I1010 18:09:49.273256 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/33d3bf3e-2918-40f9-8197-2d58e9ad358d-var-run-ovn\") pod \"ovn-controller-j24zd-config-d5gmz\" (UID: \"33d3bf3e-2918-40f9-8197-2d58e9ad358d\") " pod="openstack/ovn-controller-j24zd-config-d5gmz" Oct 10 18:09:49 crc kubenswrapper[4799]: I1010 18:09:49.273324 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/33d3bf3e-2918-40f9-8197-2d58e9ad358d-additional-scripts\") pod \"ovn-controller-j24zd-config-d5gmz\" (UID: \"33d3bf3e-2918-40f9-8197-2d58e9ad358d\") " pod="openstack/ovn-controller-j24zd-config-d5gmz" Oct 10 18:09:49 crc kubenswrapper[4799]: I1010 18:09:49.273420 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/33d3bf3e-2918-40f9-8197-2d58e9ad358d-scripts\") pod \"ovn-controller-j24zd-config-d5gmz\" (UID: \"33d3bf3e-2918-40f9-8197-2d58e9ad358d\") " pod="openstack/ovn-controller-j24zd-config-d5gmz" Oct 10 18:09:49 crc kubenswrapper[4799]: I1010 18:09:49.273550 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-brlw8\" (UniqueName: \"kubernetes.io/projected/33d3bf3e-2918-40f9-8197-2d58e9ad358d-kube-api-access-brlw8\") pod \"ovn-controller-j24zd-config-d5gmz\" (UID: \"33d3bf3e-2918-40f9-8197-2d58e9ad358d\") " pod="openstack/ovn-controller-j24zd-config-d5gmz" Oct 10 18:09:49 crc kubenswrapper[4799]: I1010 18:09:49.375079 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/33d3bf3e-2918-40f9-8197-2d58e9ad358d-var-run\") pod \"ovn-controller-j24zd-config-d5gmz\" (UID: \"33d3bf3e-2918-40f9-8197-2d58e9ad358d\") " pod="openstack/ovn-controller-j24zd-config-d5gmz" Oct 10 18:09:49 crc kubenswrapper[4799]: I1010 18:09:49.375163 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/33d3bf3e-2918-40f9-8197-2d58e9ad358d-var-log-ovn\") pod \"ovn-controller-j24zd-config-d5gmz\" (UID: \"33d3bf3e-2918-40f9-8197-2d58e9ad358d\") " pod="openstack/ovn-controller-j24zd-config-d5gmz" Oct 10 18:09:49 crc kubenswrapper[4799]: I1010 18:09:49.375234 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/33d3bf3e-2918-40f9-8197-2d58e9ad358d-var-run-ovn\") pod \"ovn-controller-j24zd-config-d5gmz\" (UID: \"33d3bf3e-2918-40f9-8197-2d58e9ad358d\") " pod="openstack/ovn-controller-j24zd-config-d5gmz" Oct 10 18:09:49 crc kubenswrapper[4799]: I1010 18:09:49.375262 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/33d3bf3e-2918-40f9-8197-2d58e9ad358d-additional-scripts\") pod \"ovn-controller-j24zd-config-d5gmz\" (UID: \"33d3bf3e-2918-40f9-8197-2d58e9ad358d\") " pod="openstack/ovn-controller-j24zd-config-d5gmz" Oct 10 18:09:49 crc kubenswrapper[4799]: I1010 18:09:49.375299 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/33d3bf3e-2918-40f9-8197-2d58e9ad358d-scripts\") pod \"ovn-controller-j24zd-config-d5gmz\" (UID: \"33d3bf3e-2918-40f9-8197-2d58e9ad358d\") " pod="openstack/ovn-controller-j24zd-config-d5gmz" Oct 10 18:09:49 crc kubenswrapper[4799]: I1010 18:09:49.375349 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-brlw8\" (UniqueName: \"kubernetes.io/projected/33d3bf3e-2918-40f9-8197-2d58e9ad358d-kube-api-access-brlw8\") pod \"ovn-controller-j24zd-config-d5gmz\" (UID: \"33d3bf3e-2918-40f9-8197-2d58e9ad358d\") " pod="openstack/ovn-controller-j24zd-config-d5gmz" Oct 10 18:09:49 crc kubenswrapper[4799]: I1010 18:09:49.376341 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/33d3bf3e-2918-40f9-8197-2d58e9ad358d-var-run\") pod \"ovn-controller-j24zd-config-d5gmz\" (UID: \"33d3bf3e-2918-40f9-8197-2d58e9ad358d\") " pod="openstack/ovn-controller-j24zd-config-d5gmz" Oct 10 18:09:49 crc kubenswrapper[4799]: I1010 18:09:49.377151 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/33d3bf3e-2918-40f9-8197-2d58e9ad358d-additional-scripts\") pod \"ovn-controller-j24zd-config-d5gmz\" (UID: \"33d3bf3e-2918-40f9-8197-2d58e9ad358d\") " pod="openstack/ovn-controller-j24zd-config-d5gmz" Oct 10 18:09:49 crc kubenswrapper[4799]: I1010 18:09:49.377223 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/33d3bf3e-2918-40f9-8197-2d58e9ad358d-var-log-ovn\") pod \"ovn-controller-j24zd-config-d5gmz\" (UID: \"33d3bf3e-2918-40f9-8197-2d58e9ad358d\") " pod="openstack/ovn-controller-j24zd-config-d5gmz" Oct 10 18:09:49 crc kubenswrapper[4799]: I1010 18:09:49.377269 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/33d3bf3e-2918-40f9-8197-2d58e9ad358d-var-run-ovn\") pod \"ovn-controller-j24zd-config-d5gmz\" (UID: \"33d3bf3e-2918-40f9-8197-2d58e9ad358d\") " pod="openstack/ovn-controller-j24zd-config-d5gmz" Oct 10 18:09:49 crc kubenswrapper[4799]: I1010 18:09:49.379452 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/33d3bf3e-2918-40f9-8197-2d58e9ad358d-scripts\") pod \"ovn-controller-j24zd-config-d5gmz\" (UID: \"33d3bf3e-2918-40f9-8197-2d58e9ad358d\") " pod="openstack/ovn-controller-j24zd-config-d5gmz" Oct 10 18:09:49 crc kubenswrapper[4799]: I1010 18:09:49.413413 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-brlw8\" (UniqueName: \"kubernetes.io/projected/33d3bf3e-2918-40f9-8197-2d58e9ad358d-kube-api-access-brlw8\") pod \"ovn-controller-j24zd-config-d5gmz\" (UID: \"33d3bf3e-2918-40f9-8197-2d58e9ad358d\") " pod="openstack/ovn-controller-j24zd-config-d5gmz" Oct 10 18:09:49 crc kubenswrapper[4799]: I1010 18:09:49.456178 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-j24zd-config-d5gmz" Oct 10 18:09:49 crc kubenswrapper[4799]: I1010 18:09:49.620146 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-c8c8-account-create-6q68n" Oct 10 18:09:49 crc kubenswrapper[4799]: I1010 18:09:49.681502 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-phx92\" (UniqueName: \"kubernetes.io/projected/553e946c-8009-464a-a913-289757441985-kube-api-access-phx92\") pod \"553e946c-8009-464a-a913-289757441985\" (UID: \"553e946c-8009-464a-a913-289757441985\") " Oct 10 18:09:49 crc kubenswrapper[4799]: I1010 18:09:49.685807 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/553e946c-8009-464a-a913-289757441985-kube-api-access-phx92" (OuterVolumeSpecName: "kube-api-access-phx92") pod "553e946c-8009-464a-a913-289757441985" (UID: "553e946c-8009-464a-a913-289757441985"). InnerVolumeSpecName "kube-api-access-phx92". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 18:09:49 crc kubenswrapper[4799]: I1010 18:09:49.784049 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-phx92\" (UniqueName: \"kubernetes.io/projected/553e946c-8009-464a-a913-289757441985-kube-api-access-phx92\") on node \"crc\" DevicePath \"\"" Oct 10 18:09:49 crc kubenswrapper[4799]: I1010 18:09:49.942203 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-j24zd-config-d5gmz"] Oct 10 18:09:49 crc kubenswrapper[4799]: W1010 18:09:49.956351 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod33d3bf3e_2918_40f9_8197_2d58e9ad358d.slice/crio-e29207c90d486eae72d1bd187844e91bbb066a2d9eed57f73612360ef2169316 WatchSource:0}: Error finding container e29207c90d486eae72d1bd187844e91bbb066a2d9eed57f73612360ef2169316: Status 404 returned error can't find the container with id e29207c90d486eae72d1bd187844e91bbb066a2d9eed57f73612360ef2169316 Oct 10 18:09:50 crc kubenswrapper[4799]: I1010 18:09:50.212094 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-j24zd-config-d5gmz" event={"ID":"33d3bf3e-2918-40f9-8197-2d58e9ad358d","Type":"ContainerStarted","Data":"e29207c90d486eae72d1bd187844e91bbb066a2d9eed57f73612360ef2169316"} Oct 10 18:09:50 crc kubenswrapper[4799]: I1010 18:09:50.214286 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-c8c8-account-create-6q68n" event={"ID":"553e946c-8009-464a-a913-289757441985","Type":"ContainerDied","Data":"ec14572e906482ddc09518b4a302da61aa0526ba11daeccd78c6e5340ca8c4d8"} Oct 10 18:09:50 crc kubenswrapper[4799]: I1010 18:09:50.214372 4799 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ec14572e906482ddc09518b4a302da61aa0526ba11daeccd78c6e5340ca8c4d8" Oct 10 18:09:50 crc kubenswrapper[4799]: I1010 18:09:50.214375 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-c8c8-account-create-6q68n" Oct 10 18:09:51 crc kubenswrapper[4799]: I1010 18:09:51.231316 4799 generic.go:334] "Generic (PLEG): container finished" podID="33d3bf3e-2918-40f9-8197-2d58e9ad358d" containerID="fc16a722f190cc8007d428edd16bbb2a95388dcf8c5e37789f36b923514d7e78" exitCode=0 Oct 10 18:09:51 crc kubenswrapper[4799]: I1010 18:09:51.231484 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-j24zd-config-d5gmz" event={"ID":"33d3bf3e-2918-40f9-8197-2d58e9ad358d","Type":"ContainerDied","Data":"fc16a722f190cc8007d428edd16bbb2a95388dcf8c5e37789f36b923514d7e78"} Oct 10 18:09:52 crc kubenswrapper[4799]: I1010 18:09:52.403981 4799 scope.go:117] "RemoveContainer" containerID="6ae067b7971fd6480cb0c3ccf44d4e22f837ba4674373b4b5903247a9af39cf1" Oct 10 18:09:52 crc kubenswrapper[4799]: E1010 18:09:52.405034 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 18:09:52 crc kubenswrapper[4799]: I1010 18:09:52.670779 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-j24zd-config-d5gmz" Oct 10 18:09:52 crc kubenswrapper[4799]: I1010 18:09:52.745444 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/33d3bf3e-2918-40f9-8197-2d58e9ad358d-additional-scripts\") pod \"33d3bf3e-2918-40f9-8197-2d58e9ad358d\" (UID: \"33d3bf3e-2918-40f9-8197-2d58e9ad358d\") " Oct 10 18:09:52 crc kubenswrapper[4799]: I1010 18:09:52.745849 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-brlw8\" (UniqueName: \"kubernetes.io/projected/33d3bf3e-2918-40f9-8197-2d58e9ad358d-kube-api-access-brlw8\") pod \"33d3bf3e-2918-40f9-8197-2d58e9ad358d\" (UID: \"33d3bf3e-2918-40f9-8197-2d58e9ad358d\") " Oct 10 18:09:52 crc kubenswrapper[4799]: I1010 18:09:52.746100 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/33d3bf3e-2918-40f9-8197-2d58e9ad358d-var-run-ovn\") pod \"33d3bf3e-2918-40f9-8197-2d58e9ad358d\" (UID: \"33d3bf3e-2918-40f9-8197-2d58e9ad358d\") " Oct 10 18:09:52 crc kubenswrapper[4799]: I1010 18:09:52.746188 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/33d3bf3e-2918-40f9-8197-2d58e9ad358d-var-log-ovn\") pod \"33d3bf3e-2918-40f9-8197-2d58e9ad358d\" (UID: \"33d3bf3e-2918-40f9-8197-2d58e9ad358d\") " Oct 10 18:09:52 crc kubenswrapper[4799]: I1010 18:09:52.746237 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/33d3bf3e-2918-40f9-8197-2d58e9ad358d-var-run\") pod \"33d3bf3e-2918-40f9-8197-2d58e9ad358d\" (UID: \"33d3bf3e-2918-40f9-8197-2d58e9ad358d\") " Oct 10 18:09:52 crc kubenswrapper[4799]: I1010 18:09:52.746268 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/33d3bf3e-2918-40f9-8197-2d58e9ad358d-scripts\") pod \"33d3bf3e-2918-40f9-8197-2d58e9ad358d\" (UID: \"33d3bf3e-2918-40f9-8197-2d58e9ad358d\") " Oct 10 18:09:52 crc kubenswrapper[4799]: I1010 18:09:52.746239 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/33d3bf3e-2918-40f9-8197-2d58e9ad358d-var-run-ovn" (OuterVolumeSpecName: "var-run-ovn") pod "33d3bf3e-2918-40f9-8197-2d58e9ad358d" (UID: "33d3bf3e-2918-40f9-8197-2d58e9ad358d"). InnerVolumeSpecName "var-run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 10 18:09:52 crc kubenswrapper[4799]: I1010 18:09:52.746295 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/33d3bf3e-2918-40f9-8197-2d58e9ad358d-var-log-ovn" (OuterVolumeSpecName: "var-log-ovn") pod "33d3bf3e-2918-40f9-8197-2d58e9ad358d" (UID: "33d3bf3e-2918-40f9-8197-2d58e9ad358d"). InnerVolumeSpecName "var-log-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 10 18:09:52 crc kubenswrapper[4799]: I1010 18:09:52.746347 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/33d3bf3e-2918-40f9-8197-2d58e9ad358d-var-run" (OuterVolumeSpecName: "var-run") pod "33d3bf3e-2918-40f9-8197-2d58e9ad358d" (UID: "33d3bf3e-2918-40f9-8197-2d58e9ad358d"). InnerVolumeSpecName "var-run". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 10 18:09:52 crc kubenswrapper[4799]: I1010 18:09:52.746921 4799 reconciler_common.go:293] "Volume detached for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/33d3bf3e-2918-40f9-8197-2d58e9ad358d-var-run-ovn\") on node \"crc\" DevicePath \"\"" Oct 10 18:09:52 crc kubenswrapper[4799]: I1010 18:09:52.746954 4799 reconciler_common.go:293] "Volume detached for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/33d3bf3e-2918-40f9-8197-2d58e9ad358d-var-log-ovn\") on node \"crc\" DevicePath \"\"" Oct 10 18:09:52 crc kubenswrapper[4799]: I1010 18:09:52.746973 4799 reconciler_common.go:293] "Volume detached for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/33d3bf3e-2918-40f9-8197-2d58e9ad358d-var-run\") on node \"crc\" DevicePath \"\"" Oct 10 18:09:52 crc kubenswrapper[4799]: I1010 18:09:52.746975 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/33d3bf3e-2918-40f9-8197-2d58e9ad358d-additional-scripts" (OuterVolumeSpecName: "additional-scripts") pod "33d3bf3e-2918-40f9-8197-2d58e9ad358d" (UID: "33d3bf3e-2918-40f9-8197-2d58e9ad358d"). InnerVolumeSpecName "additional-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 18:09:52 crc kubenswrapper[4799]: I1010 18:09:52.747386 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/33d3bf3e-2918-40f9-8197-2d58e9ad358d-scripts" (OuterVolumeSpecName: "scripts") pod "33d3bf3e-2918-40f9-8197-2d58e9ad358d" (UID: "33d3bf3e-2918-40f9-8197-2d58e9ad358d"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 18:09:52 crc kubenswrapper[4799]: I1010 18:09:52.753449 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/33d3bf3e-2918-40f9-8197-2d58e9ad358d-kube-api-access-brlw8" (OuterVolumeSpecName: "kube-api-access-brlw8") pod "33d3bf3e-2918-40f9-8197-2d58e9ad358d" (UID: "33d3bf3e-2918-40f9-8197-2d58e9ad358d"). InnerVolumeSpecName "kube-api-access-brlw8". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 18:09:52 crc kubenswrapper[4799]: I1010 18:09:52.849321 4799 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/33d3bf3e-2918-40f9-8197-2d58e9ad358d-scripts\") on node \"crc\" DevicePath \"\"" Oct 10 18:09:52 crc kubenswrapper[4799]: I1010 18:09:52.849374 4799 reconciler_common.go:293] "Volume detached for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/33d3bf3e-2918-40f9-8197-2d58e9ad358d-additional-scripts\") on node \"crc\" DevicePath \"\"" Oct 10 18:09:52 crc kubenswrapper[4799]: I1010 18:09:52.849395 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-brlw8\" (UniqueName: \"kubernetes.io/projected/33d3bf3e-2918-40f9-8197-2d58e9ad358d-kube-api-access-brlw8\") on node \"crc\" DevicePath \"\"" Oct 10 18:09:53 crc kubenswrapper[4799]: I1010 18:09:53.255511 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-j24zd-config-d5gmz" event={"ID":"33d3bf3e-2918-40f9-8197-2d58e9ad358d","Type":"ContainerDied","Data":"e29207c90d486eae72d1bd187844e91bbb066a2d9eed57f73612360ef2169316"} Oct 10 18:09:53 crc kubenswrapper[4799]: I1010 18:09:53.255566 4799 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e29207c90d486eae72d1bd187844e91bbb066a2d9eed57f73612360ef2169316" Oct 10 18:09:53 crc kubenswrapper[4799]: I1010 18:09:53.255641 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-j24zd-config-d5gmz" Oct 10 18:09:53 crc kubenswrapper[4799]: I1010 18:09:53.257021 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/octavia-api-65d4ffcdbb-6xxlk"] Oct 10 18:09:53 crc kubenswrapper[4799]: E1010 18:09:53.257429 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="33d3bf3e-2918-40f9-8197-2d58e9ad358d" containerName="ovn-config" Oct 10 18:09:53 crc kubenswrapper[4799]: I1010 18:09:53.257447 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="33d3bf3e-2918-40f9-8197-2d58e9ad358d" containerName="ovn-config" Oct 10 18:09:53 crc kubenswrapper[4799]: E1010 18:09:53.257462 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="553e946c-8009-464a-a913-289757441985" containerName="mariadb-account-create" Oct 10 18:09:53 crc kubenswrapper[4799]: I1010 18:09:53.257469 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="553e946c-8009-464a-a913-289757441985" containerName="mariadb-account-create" Oct 10 18:09:53 crc kubenswrapper[4799]: I1010 18:09:53.257675 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="33d3bf3e-2918-40f9-8197-2d58e9ad358d" containerName="ovn-config" Oct 10 18:09:53 crc kubenswrapper[4799]: I1010 18:09:53.257693 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="553e946c-8009-464a-a913-289757441985" containerName="mariadb-account-create" Oct 10 18:09:53 crc kubenswrapper[4799]: I1010 18:09:53.259033 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-api-65d4ffcdbb-6xxlk" Oct 10 18:09:53 crc kubenswrapper[4799]: I1010 18:09:53.263975 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"octavia-api-scripts" Oct 10 18:09:53 crc kubenswrapper[4799]: I1010 18:09:53.264116 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"octavia-octavia-dockercfg-p2s56" Oct 10 18:09:53 crc kubenswrapper[4799]: I1010 18:09:53.267289 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"octavia-api-config-data" Oct 10 18:09:53 crc kubenswrapper[4799]: I1010 18:09:53.270413 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-api-65d4ffcdbb-6xxlk"] Oct 10 18:09:53 crc kubenswrapper[4799]: I1010 18:09:53.359641 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f349eb02-eae6-4047-852e-bd65208edf94-combined-ca-bundle\") pod \"octavia-api-65d4ffcdbb-6xxlk\" (UID: \"f349eb02-eae6-4047-852e-bd65208edf94\") " pod="openstack/octavia-api-65d4ffcdbb-6xxlk" Oct 10 18:09:53 crc kubenswrapper[4799]: I1010 18:09:53.359731 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f349eb02-eae6-4047-852e-bd65208edf94-config-data\") pod \"octavia-api-65d4ffcdbb-6xxlk\" (UID: \"f349eb02-eae6-4047-852e-bd65208edf94\") " pod="openstack/octavia-api-65d4ffcdbb-6xxlk" Oct 10 18:09:53 crc kubenswrapper[4799]: I1010 18:09:53.359928 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f349eb02-eae6-4047-852e-bd65208edf94-scripts\") pod \"octavia-api-65d4ffcdbb-6xxlk\" (UID: \"f349eb02-eae6-4047-852e-bd65208edf94\") " pod="openstack/octavia-api-65d4ffcdbb-6xxlk" Oct 10 18:09:53 crc kubenswrapper[4799]: I1010 18:09:53.360109 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/f349eb02-eae6-4047-852e-bd65208edf94-config-data-merged\") pod \"octavia-api-65d4ffcdbb-6xxlk\" (UID: \"f349eb02-eae6-4047-852e-bd65208edf94\") " pod="openstack/octavia-api-65d4ffcdbb-6xxlk" Oct 10 18:09:53 crc kubenswrapper[4799]: I1010 18:09:53.360183 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"octavia-run\" (UniqueName: \"kubernetes.io/empty-dir/f349eb02-eae6-4047-852e-bd65208edf94-octavia-run\") pod \"octavia-api-65d4ffcdbb-6xxlk\" (UID: \"f349eb02-eae6-4047-852e-bd65208edf94\") " pod="openstack/octavia-api-65d4ffcdbb-6xxlk" Oct 10 18:09:53 crc kubenswrapper[4799]: I1010 18:09:53.462261 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/f349eb02-eae6-4047-852e-bd65208edf94-config-data-merged\") pod \"octavia-api-65d4ffcdbb-6xxlk\" (UID: \"f349eb02-eae6-4047-852e-bd65208edf94\") " pod="openstack/octavia-api-65d4ffcdbb-6xxlk" Oct 10 18:09:53 crc kubenswrapper[4799]: I1010 18:09:53.462346 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"octavia-run\" (UniqueName: \"kubernetes.io/empty-dir/f349eb02-eae6-4047-852e-bd65208edf94-octavia-run\") pod \"octavia-api-65d4ffcdbb-6xxlk\" (UID: \"f349eb02-eae6-4047-852e-bd65208edf94\") " pod="openstack/octavia-api-65d4ffcdbb-6xxlk" Oct 10 18:09:53 crc kubenswrapper[4799]: I1010 18:09:53.462420 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f349eb02-eae6-4047-852e-bd65208edf94-combined-ca-bundle\") pod \"octavia-api-65d4ffcdbb-6xxlk\" (UID: \"f349eb02-eae6-4047-852e-bd65208edf94\") " pod="openstack/octavia-api-65d4ffcdbb-6xxlk" Oct 10 18:09:53 crc kubenswrapper[4799]: I1010 18:09:53.462483 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f349eb02-eae6-4047-852e-bd65208edf94-config-data\") pod \"octavia-api-65d4ffcdbb-6xxlk\" (UID: \"f349eb02-eae6-4047-852e-bd65208edf94\") " pod="openstack/octavia-api-65d4ffcdbb-6xxlk" Oct 10 18:09:53 crc kubenswrapper[4799]: I1010 18:09:53.462534 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f349eb02-eae6-4047-852e-bd65208edf94-scripts\") pod \"octavia-api-65d4ffcdbb-6xxlk\" (UID: \"f349eb02-eae6-4047-852e-bd65208edf94\") " pod="openstack/octavia-api-65d4ffcdbb-6xxlk" Oct 10 18:09:53 crc kubenswrapper[4799]: I1010 18:09:53.462717 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/f349eb02-eae6-4047-852e-bd65208edf94-config-data-merged\") pod \"octavia-api-65d4ffcdbb-6xxlk\" (UID: \"f349eb02-eae6-4047-852e-bd65208edf94\") " pod="openstack/octavia-api-65d4ffcdbb-6xxlk" Oct 10 18:09:53 crc kubenswrapper[4799]: I1010 18:09:53.463668 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"octavia-run\" (UniqueName: \"kubernetes.io/empty-dir/f349eb02-eae6-4047-852e-bd65208edf94-octavia-run\") pod \"octavia-api-65d4ffcdbb-6xxlk\" (UID: \"f349eb02-eae6-4047-852e-bd65208edf94\") " pod="openstack/octavia-api-65d4ffcdbb-6xxlk" Oct 10 18:09:53 crc kubenswrapper[4799]: I1010 18:09:53.479381 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f349eb02-eae6-4047-852e-bd65208edf94-scripts\") pod \"octavia-api-65d4ffcdbb-6xxlk\" (UID: \"f349eb02-eae6-4047-852e-bd65208edf94\") " pod="openstack/octavia-api-65d4ffcdbb-6xxlk" Oct 10 18:09:53 crc kubenswrapper[4799]: I1010 18:09:53.479410 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f349eb02-eae6-4047-852e-bd65208edf94-combined-ca-bundle\") pod \"octavia-api-65d4ffcdbb-6xxlk\" (UID: \"f349eb02-eae6-4047-852e-bd65208edf94\") " pod="openstack/octavia-api-65d4ffcdbb-6xxlk" Oct 10 18:09:53 crc kubenswrapper[4799]: I1010 18:09:53.479661 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f349eb02-eae6-4047-852e-bd65208edf94-config-data\") pod \"octavia-api-65d4ffcdbb-6xxlk\" (UID: \"f349eb02-eae6-4047-852e-bd65208edf94\") " pod="openstack/octavia-api-65d4ffcdbb-6xxlk" Oct 10 18:09:53 crc kubenswrapper[4799]: I1010 18:09:53.586940 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-api-65d4ffcdbb-6xxlk" Oct 10 18:09:53 crc kubenswrapper[4799]: I1010 18:09:53.761512 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-j24zd-config-d5gmz"] Oct 10 18:09:53 crc kubenswrapper[4799]: I1010 18:09:53.773230 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-controller-j24zd-config-d5gmz"] Oct 10 18:09:53 crc kubenswrapper[4799]: I1010 18:09:53.877894 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-j24zd-config-5k2m9"] Oct 10 18:09:53 crc kubenswrapper[4799]: I1010 18:09:53.879251 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-j24zd-config-5k2m9" Oct 10 18:09:53 crc kubenswrapper[4799]: I1010 18:09:53.880999 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-extra-scripts" Oct 10 18:09:53 crc kubenswrapper[4799]: I1010 18:09:53.894154 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-j24zd-config-5k2m9"] Oct 10 18:09:54 crc kubenswrapper[4799]: I1010 18:09:54.045435 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-api-65d4ffcdbb-6xxlk"] Oct 10 18:09:54 crc kubenswrapper[4799]: I1010 18:09:54.047359 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-j24zd" Oct 10 18:09:54 crc kubenswrapper[4799]: W1010 18:09:54.063662 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf349eb02_eae6_4047_852e_bd65208edf94.slice/crio-e44787d12c928be8c9281078cee8ed5fd73cf07a6c793d2abd28bb973099917f WatchSource:0}: Error finding container e44787d12c928be8c9281078cee8ed5fd73cf07a6c793d2abd28bb973099917f: Status 404 returned error can't find the container with id e44787d12c928be8c9281078cee8ed5fd73cf07a6c793d2abd28bb973099917f Oct 10 18:09:54 crc kubenswrapper[4799]: I1010 18:09:54.070946 4799 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 10 18:09:54 crc kubenswrapper[4799]: I1010 18:09:54.071096 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/6e0225c6-c2a2-427f-9da7-6cb19b681341-additional-scripts\") pod \"ovn-controller-j24zd-config-5k2m9\" (UID: \"6e0225c6-c2a2-427f-9da7-6cb19b681341\") " pod="openstack/ovn-controller-j24zd-config-5k2m9" Oct 10 18:09:54 crc kubenswrapper[4799]: I1010 18:09:54.071130 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5qg5n\" (UniqueName: \"kubernetes.io/projected/6e0225c6-c2a2-427f-9da7-6cb19b681341-kube-api-access-5qg5n\") pod \"ovn-controller-j24zd-config-5k2m9\" (UID: \"6e0225c6-c2a2-427f-9da7-6cb19b681341\") " pod="openstack/ovn-controller-j24zd-config-5k2m9" Oct 10 18:09:54 crc kubenswrapper[4799]: I1010 18:09:54.071393 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/6e0225c6-c2a2-427f-9da7-6cb19b681341-var-run-ovn\") pod \"ovn-controller-j24zd-config-5k2m9\" (UID: \"6e0225c6-c2a2-427f-9da7-6cb19b681341\") " pod="openstack/ovn-controller-j24zd-config-5k2m9" Oct 10 18:09:54 crc kubenswrapper[4799]: I1010 18:09:54.071425 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/6e0225c6-c2a2-427f-9da7-6cb19b681341-var-run\") pod \"ovn-controller-j24zd-config-5k2m9\" (UID: \"6e0225c6-c2a2-427f-9da7-6cb19b681341\") " pod="openstack/ovn-controller-j24zd-config-5k2m9" Oct 10 18:09:54 crc kubenswrapper[4799]: I1010 18:09:54.071534 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/6e0225c6-c2a2-427f-9da7-6cb19b681341-scripts\") pod \"ovn-controller-j24zd-config-5k2m9\" (UID: \"6e0225c6-c2a2-427f-9da7-6cb19b681341\") " pod="openstack/ovn-controller-j24zd-config-5k2m9" Oct 10 18:09:54 crc kubenswrapper[4799]: I1010 18:09:54.071623 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/6e0225c6-c2a2-427f-9da7-6cb19b681341-var-log-ovn\") pod \"ovn-controller-j24zd-config-5k2m9\" (UID: \"6e0225c6-c2a2-427f-9da7-6cb19b681341\") " pod="openstack/ovn-controller-j24zd-config-5k2m9" Oct 10 18:09:54 crc kubenswrapper[4799]: I1010 18:09:54.173555 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/6e0225c6-c2a2-427f-9da7-6cb19b681341-var-log-ovn\") pod \"ovn-controller-j24zd-config-5k2m9\" (UID: \"6e0225c6-c2a2-427f-9da7-6cb19b681341\") " pod="openstack/ovn-controller-j24zd-config-5k2m9" Oct 10 18:09:54 crc kubenswrapper[4799]: I1010 18:09:54.173654 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/6e0225c6-c2a2-427f-9da7-6cb19b681341-additional-scripts\") pod \"ovn-controller-j24zd-config-5k2m9\" (UID: \"6e0225c6-c2a2-427f-9da7-6cb19b681341\") " pod="openstack/ovn-controller-j24zd-config-5k2m9" Oct 10 18:09:54 crc kubenswrapper[4799]: I1010 18:09:54.173672 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5qg5n\" (UniqueName: \"kubernetes.io/projected/6e0225c6-c2a2-427f-9da7-6cb19b681341-kube-api-access-5qg5n\") pod \"ovn-controller-j24zd-config-5k2m9\" (UID: \"6e0225c6-c2a2-427f-9da7-6cb19b681341\") " pod="openstack/ovn-controller-j24zd-config-5k2m9" Oct 10 18:09:54 crc kubenswrapper[4799]: I1010 18:09:54.173739 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/6e0225c6-c2a2-427f-9da7-6cb19b681341-var-run-ovn\") pod \"ovn-controller-j24zd-config-5k2m9\" (UID: \"6e0225c6-c2a2-427f-9da7-6cb19b681341\") " pod="openstack/ovn-controller-j24zd-config-5k2m9" Oct 10 18:09:54 crc kubenswrapper[4799]: I1010 18:09:54.173789 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/6e0225c6-c2a2-427f-9da7-6cb19b681341-var-run\") pod \"ovn-controller-j24zd-config-5k2m9\" (UID: \"6e0225c6-c2a2-427f-9da7-6cb19b681341\") " pod="openstack/ovn-controller-j24zd-config-5k2m9" Oct 10 18:09:54 crc kubenswrapper[4799]: I1010 18:09:54.173837 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/6e0225c6-c2a2-427f-9da7-6cb19b681341-scripts\") pod \"ovn-controller-j24zd-config-5k2m9\" (UID: \"6e0225c6-c2a2-427f-9da7-6cb19b681341\") " pod="openstack/ovn-controller-j24zd-config-5k2m9" Oct 10 18:09:54 crc kubenswrapper[4799]: I1010 18:09:54.173913 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/6e0225c6-c2a2-427f-9da7-6cb19b681341-var-log-ovn\") pod \"ovn-controller-j24zd-config-5k2m9\" (UID: \"6e0225c6-c2a2-427f-9da7-6cb19b681341\") " pod="openstack/ovn-controller-j24zd-config-5k2m9" Oct 10 18:09:54 crc kubenswrapper[4799]: I1010 18:09:54.173978 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/6e0225c6-c2a2-427f-9da7-6cb19b681341-var-run\") pod \"ovn-controller-j24zd-config-5k2m9\" (UID: \"6e0225c6-c2a2-427f-9da7-6cb19b681341\") " pod="openstack/ovn-controller-j24zd-config-5k2m9" Oct 10 18:09:54 crc kubenswrapper[4799]: I1010 18:09:54.174079 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/6e0225c6-c2a2-427f-9da7-6cb19b681341-var-run-ovn\") pod \"ovn-controller-j24zd-config-5k2m9\" (UID: \"6e0225c6-c2a2-427f-9da7-6cb19b681341\") " pod="openstack/ovn-controller-j24zd-config-5k2m9" Oct 10 18:09:54 crc kubenswrapper[4799]: I1010 18:09:54.174482 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/6e0225c6-c2a2-427f-9da7-6cb19b681341-additional-scripts\") pod \"ovn-controller-j24zd-config-5k2m9\" (UID: \"6e0225c6-c2a2-427f-9da7-6cb19b681341\") " pod="openstack/ovn-controller-j24zd-config-5k2m9" Oct 10 18:09:54 crc kubenswrapper[4799]: I1010 18:09:54.175778 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/6e0225c6-c2a2-427f-9da7-6cb19b681341-scripts\") pod \"ovn-controller-j24zd-config-5k2m9\" (UID: \"6e0225c6-c2a2-427f-9da7-6cb19b681341\") " pod="openstack/ovn-controller-j24zd-config-5k2m9" Oct 10 18:09:54 crc kubenswrapper[4799]: I1010 18:09:54.192250 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5qg5n\" (UniqueName: \"kubernetes.io/projected/6e0225c6-c2a2-427f-9da7-6cb19b681341-kube-api-access-5qg5n\") pod \"ovn-controller-j24zd-config-5k2m9\" (UID: \"6e0225c6-c2a2-427f-9da7-6cb19b681341\") " pod="openstack/ovn-controller-j24zd-config-5k2m9" Oct 10 18:09:54 crc kubenswrapper[4799]: I1010 18:09:54.206998 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-j24zd-config-5k2m9" Oct 10 18:09:54 crc kubenswrapper[4799]: I1010 18:09:54.296131 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-api-65d4ffcdbb-6xxlk" event={"ID":"f349eb02-eae6-4047-852e-bd65208edf94","Type":"ContainerStarted","Data":"e44787d12c928be8c9281078cee8ed5fd73cf07a6c793d2abd28bb973099917f"} Oct 10 18:09:54 crc kubenswrapper[4799]: I1010 18:09:54.705416 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-j24zd-config-5k2m9"] Oct 10 18:09:54 crc kubenswrapper[4799]: W1010 18:09:54.707032 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6e0225c6_c2a2_427f_9da7_6cb19b681341.slice/crio-8498096d44f860066d9a7ae361cdfc23f00f4ac377ab9e0a61dc459fda89f8c7 WatchSource:0}: Error finding container 8498096d44f860066d9a7ae361cdfc23f00f4ac377ab9e0a61dc459fda89f8c7: Status 404 returned error can't find the container with id 8498096d44f860066d9a7ae361cdfc23f00f4ac377ab9e0a61dc459fda89f8c7 Oct 10 18:09:55 crc kubenswrapper[4799]: I1010 18:09:55.309805 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-j24zd-config-5k2m9" event={"ID":"6e0225c6-c2a2-427f-9da7-6cb19b681341","Type":"ContainerStarted","Data":"bfd496f44ac8bb7a6b44b9240f3b4a21fdda6e8b9ff22c4519d59683a6630cc7"} Oct 10 18:09:55 crc kubenswrapper[4799]: I1010 18:09:55.310084 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-j24zd-config-5k2m9" event={"ID":"6e0225c6-c2a2-427f-9da7-6cb19b681341","Type":"ContainerStarted","Data":"8498096d44f860066d9a7ae361cdfc23f00f4ac377ab9e0a61dc459fda89f8c7"} Oct 10 18:09:55 crc kubenswrapper[4799]: I1010 18:09:55.331974 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-j24zd-config-5k2m9" podStartSLOduration=2.33192009 podStartE2EDuration="2.33192009s" podCreationTimestamp="2025-10-10 18:09:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-10 18:09:55.324410287 +0000 UTC m=+5888.832734412" watchObservedRunningTime="2025-10-10 18:09:55.33192009 +0000 UTC m=+5888.840244205" Oct 10 18:09:55 crc kubenswrapper[4799]: I1010 18:09:55.417132 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="33d3bf3e-2918-40f9-8197-2d58e9ad358d" path="/var/lib/kubelet/pods/33d3bf3e-2918-40f9-8197-2d58e9ad358d/volumes" Oct 10 18:09:56 crc kubenswrapper[4799]: I1010 18:09:56.320494 4799 generic.go:334] "Generic (PLEG): container finished" podID="6e0225c6-c2a2-427f-9da7-6cb19b681341" containerID="bfd496f44ac8bb7a6b44b9240f3b4a21fdda6e8b9ff22c4519d59683a6630cc7" exitCode=0 Oct 10 18:09:56 crc kubenswrapper[4799]: I1010 18:09:56.320538 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-j24zd-config-5k2m9" event={"ID":"6e0225c6-c2a2-427f-9da7-6cb19b681341","Type":"ContainerDied","Data":"bfd496f44ac8bb7a6b44b9240f3b4a21fdda6e8b9ff22c4519d59683a6630cc7"} Oct 10 18:10:03 crc kubenswrapper[4799]: I1010 18:10:03.526971 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-j24zd-config-5k2m9" Oct 10 18:10:03 crc kubenswrapper[4799]: I1010 18:10:03.682943 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/6e0225c6-c2a2-427f-9da7-6cb19b681341-additional-scripts\") pod \"6e0225c6-c2a2-427f-9da7-6cb19b681341\" (UID: \"6e0225c6-c2a2-427f-9da7-6cb19b681341\") " Oct 10 18:10:03 crc kubenswrapper[4799]: I1010 18:10:03.683195 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/6e0225c6-c2a2-427f-9da7-6cb19b681341-var-run\") pod \"6e0225c6-c2a2-427f-9da7-6cb19b681341\" (UID: \"6e0225c6-c2a2-427f-9da7-6cb19b681341\") " Oct 10 18:10:03 crc kubenswrapper[4799]: I1010 18:10:03.683273 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5qg5n\" (UniqueName: \"kubernetes.io/projected/6e0225c6-c2a2-427f-9da7-6cb19b681341-kube-api-access-5qg5n\") pod \"6e0225c6-c2a2-427f-9da7-6cb19b681341\" (UID: \"6e0225c6-c2a2-427f-9da7-6cb19b681341\") " Oct 10 18:10:03 crc kubenswrapper[4799]: I1010 18:10:03.683335 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/6e0225c6-c2a2-427f-9da7-6cb19b681341-scripts\") pod \"6e0225c6-c2a2-427f-9da7-6cb19b681341\" (UID: \"6e0225c6-c2a2-427f-9da7-6cb19b681341\") " Oct 10 18:10:03 crc kubenswrapper[4799]: I1010 18:10:03.683336 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/6e0225c6-c2a2-427f-9da7-6cb19b681341-var-run" (OuterVolumeSpecName: "var-run") pod "6e0225c6-c2a2-427f-9da7-6cb19b681341" (UID: "6e0225c6-c2a2-427f-9da7-6cb19b681341"). InnerVolumeSpecName "var-run". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 10 18:10:03 crc kubenswrapper[4799]: I1010 18:10:03.683458 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/6e0225c6-c2a2-427f-9da7-6cb19b681341-var-run-ovn\") pod \"6e0225c6-c2a2-427f-9da7-6cb19b681341\" (UID: \"6e0225c6-c2a2-427f-9da7-6cb19b681341\") " Oct 10 18:10:03 crc kubenswrapper[4799]: I1010 18:10:03.683510 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/6e0225c6-c2a2-427f-9da7-6cb19b681341-var-log-ovn\") pod \"6e0225c6-c2a2-427f-9da7-6cb19b681341\" (UID: \"6e0225c6-c2a2-427f-9da7-6cb19b681341\") " Oct 10 18:10:03 crc kubenswrapper[4799]: I1010 18:10:03.683605 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6e0225c6-c2a2-427f-9da7-6cb19b681341-additional-scripts" (OuterVolumeSpecName: "additional-scripts") pod "6e0225c6-c2a2-427f-9da7-6cb19b681341" (UID: "6e0225c6-c2a2-427f-9da7-6cb19b681341"). InnerVolumeSpecName "additional-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 18:10:03 crc kubenswrapper[4799]: I1010 18:10:03.683708 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/6e0225c6-c2a2-427f-9da7-6cb19b681341-var-log-ovn" (OuterVolumeSpecName: "var-log-ovn") pod "6e0225c6-c2a2-427f-9da7-6cb19b681341" (UID: "6e0225c6-c2a2-427f-9da7-6cb19b681341"). InnerVolumeSpecName "var-log-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 10 18:10:03 crc kubenswrapper[4799]: I1010 18:10:03.684538 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6e0225c6-c2a2-427f-9da7-6cb19b681341-scripts" (OuterVolumeSpecName: "scripts") pod "6e0225c6-c2a2-427f-9da7-6cb19b681341" (UID: "6e0225c6-c2a2-427f-9da7-6cb19b681341"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 18:10:03 crc kubenswrapper[4799]: I1010 18:10:03.684579 4799 reconciler_common.go:293] "Volume detached for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/6e0225c6-c2a2-427f-9da7-6cb19b681341-additional-scripts\") on node \"crc\" DevicePath \"\"" Oct 10 18:10:03 crc kubenswrapper[4799]: I1010 18:10:03.684614 4799 reconciler_common.go:293] "Volume detached for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/6e0225c6-c2a2-427f-9da7-6cb19b681341-var-run\") on node \"crc\" DevicePath \"\"" Oct 10 18:10:03 crc kubenswrapper[4799]: I1010 18:10:03.684635 4799 reconciler_common.go:293] "Volume detached for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/6e0225c6-c2a2-427f-9da7-6cb19b681341-var-log-ovn\") on node \"crc\" DevicePath \"\"" Oct 10 18:10:03 crc kubenswrapper[4799]: I1010 18:10:03.684674 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/6e0225c6-c2a2-427f-9da7-6cb19b681341-var-run-ovn" (OuterVolumeSpecName: "var-run-ovn") pod "6e0225c6-c2a2-427f-9da7-6cb19b681341" (UID: "6e0225c6-c2a2-427f-9da7-6cb19b681341"). InnerVolumeSpecName "var-run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 10 18:10:03 crc kubenswrapper[4799]: I1010 18:10:03.689601 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6e0225c6-c2a2-427f-9da7-6cb19b681341-kube-api-access-5qg5n" (OuterVolumeSpecName: "kube-api-access-5qg5n") pod "6e0225c6-c2a2-427f-9da7-6cb19b681341" (UID: "6e0225c6-c2a2-427f-9da7-6cb19b681341"). InnerVolumeSpecName "kube-api-access-5qg5n". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 18:10:03 crc kubenswrapper[4799]: I1010 18:10:03.786080 4799 reconciler_common.go:293] "Volume detached for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/6e0225c6-c2a2-427f-9da7-6cb19b681341-var-run-ovn\") on node \"crc\" DevicePath \"\"" Oct 10 18:10:03 crc kubenswrapper[4799]: I1010 18:10:03.786126 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5qg5n\" (UniqueName: \"kubernetes.io/projected/6e0225c6-c2a2-427f-9da7-6cb19b681341-kube-api-access-5qg5n\") on node \"crc\" DevicePath \"\"" Oct 10 18:10:03 crc kubenswrapper[4799]: I1010 18:10:03.786140 4799 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/6e0225c6-c2a2-427f-9da7-6cb19b681341-scripts\") on node \"crc\" DevicePath \"\"" Oct 10 18:10:04 crc kubenswrapper[4799]: I1010 18:10:04.408674 4799 generic.go:334] "Generic (PLEG): container finished" podID="f349eb02-eae6-4047-852e-bd65208edf94" containerID="8083e9de4999d3213de3ed83fe043ce531a3e8d1c9789e7c6665c694934d810e" exitCode=0 Oct 10 18:10:04 crc kubenswrapper[4799]: I1010 18:10:04.408746 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-api-65d4ffcdbb-6xxlk" event={"ID":"f349eb02-eae6-4047-852e-bd65208edf94","Type":"ContainerDied","Data":"8083e9de4999d3213de3ed83fe043ce531a3e8d1c9789e7c6665c694934d810e"} Oct 10 18:10:04 crc kubenswrapper[4799]: I1010 18:10:04.412995 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-j24zd-config-5k2m9" event={"ID":"6e0225c6-c2a2-427f-9da7-6cb19b681341","Type":"ContainerDied","Data":"8498096d44f860066d9a7ae361cdfc23f00f4ac377ab9e0a61dc459fda89f8c7"} Oct 10 18:10:04 crc kubenswrapper[4799]: I1010 18:10:04.413037 4799 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8498096d44f860066d9a7ae361cdfc23f00f4ac377ab9e0a61dc459fda89f8c7" Oct 10 18:10:04 crc kubenswrapper[4799]: I1010 18:10:04.413037 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-j24zd-config-5k2m9" Oct 10 18:10:04 crc kubenswrapper[4799]: I1010 18:10:04.620079 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-j24zd-config-5k2m9"] Oct 10 18:10:04 crc kubenswrapper[4799]: I1010 18:10:04.629410 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-controller-j24zd-config-5k2m9"] Oct 10 18:10:05 crc kubenswrapper[4799]: I1010 18:10:05.402888 4799 scope.go:117] "RemoveContainer" containerID="6ae067b7971fd6480cb0c3ccf44d4e22f837ba4674373b4b5903247a9af39cf1" Oct 10 18:10:05 crc kubenswrapper[4799]: E1010 18:10:05.403432 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 18:10:05 crc kubenswrapper[4799]: I1010 18:10:05.414077 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6e0225c6-c2a2-427f-9da7-6cb19b681341" path="/var/lib/kubelet/pods/6e0225c6-c2a2-427f-9da7-6cb19b681341/volumes" Oct 10 18:10:05 crc kubenswrapper[4799]: I1010 18:10:05.431442 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-api-65d4ffcdbb-6xxlk" event={"ID":"f349eb02-eae6-4047-852e-bd65208edf94","Type":"ContainerStarted","Data":"0bc944f56e360ae8d38fabac6adf60ea02bee351b7852a778586f7db90d48cdc"} Oct 10 18:10:05 crc kubenswrapper[4799]: I1010 18:10:05.431499 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-api-65d4ffcdbb-6xxlk" event={"ID":"f349eb02-eae6-4047-852e-bd65208edf94","Type":"ContainerStarted","Data":"3e67bb8039f81e77b56a30f40a820da750eb4f795bac01397cdd90dd94967807"} Oct 10 18:10:05 crc kubenswrapper[4799]: I1010 18:10:05.431999 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/octavia-api-65d4ffcdbb-6xxlk" Oct 10 18:10:05 crc kubenswrapper[4799]: I1010 18:10:05.460458 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/octavia-api-65d4ffcdbb-6xxlk" podStartSLOduration=2.9951597359999997 podStartE2EDuration="12.460435579s" podCreationTimestamp="2025-10-10 18:09:53 +0000 UTC" firstStartedPulling="2025-10-10 18:09:54.070618607 +0000 UTC m=+5887.578942722" lastFinishedPulling="2025-10-10 18:10:03.53589445 +0000 UTC m=+5897.044218565" observedRunningTime="2025-10-10 18:10:05.456454092 +0000 UTC m=+5898.964778207" watchObservedRunningTime="2025-10-10 18:10:05.460435579 +0000 UTC m=+5898.968759694" Oct 10 18:10:06 crc kubenswrapper[4799]: I1010 18:10:06.440733 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/octavia-api-65d4ffcdbb-6xxlk" Oct 10 18:10:13 crc kubenswrapper[4799]: I1010 18:10:13.944873 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/octavia-rsyslog-nth2b"] Oct 10 18:10:13 crc kubenswrapper[4799]: E1010 18:10:13.946114 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6e0225c6-c2a2-427f-9da7-6cb19b681341" containerName="ovn-config" Oct 10 18:10:13 crc kubenswrapper[4799]: I1010 18:10:13.946138 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="6e0225c6-c2a2-427f-9da7-6cb19b681341" containerName="ovn-config" Oct 10 18:10:13 crc kubenswrapper[4799]: I1010 18:10:13.946532 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="6e0225c6-c2a2-427f-9da7-6cb19b681341" containerName="ovn-config" Oct 10 18:10:13 crc kubenswrapper[4799]: I1010 18:10:13.948393 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-rsyslog-nth2b" Oct 10 18:10:13 crc kubenswrapper[4799]: I1010 18:10:13.950640 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"octavia-rsyslog-config-data" Oct 10 18:10:13 crc kubenswrapper[4799]: I1010 18:10:13.951256 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"octavia-rsyslog-scripts" Oct 10 18:10:13 crc kubenswrapper[4799]: I1010 18:10:13.951617 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"octavia-hmport-map" Oct 10 18:10:13 crc kubenswrapper[4799]: I1010 18:10:13.960497 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-rsyslog-nth2b"] Oct 10 18:10:14 crc kubenswrapper[4799]: I1010 18:10:14.022806 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hm-ports\" (UniqueName: \"kubernetes.io/configmap/62197d1d-2108-4294-96f3-afe7487d515b-hm-ports\") pod \"octavia-rsyslog-nth2b\" (UID: \"62197d1d-2108-4294-96f3-afe7487d515b\") " pod="openstack/octavia-rsyslog-nth2b" Oct 10 18:10:14 crc kubenswrapper[4799]: I1010 18:10:14.022895 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/62197d1d-2108-4294-96f3-afe7487d515b-scripts\") pod \"octavia-rsyslog-nth2b\" (UID: \"62197d1d-2108-4294-96f3-afe7487d515b\") " pod="openstack/octavia-rsyslog-nth2b" Oct 10 18:10:14 crc kubenswrapper[4799]: I1010 18:10:14.023012 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/62197d1d-2108-4294-96f3-afe7487d515b-config-data\") pod \"octavia-rsyslog-nth2b\" (UID: \"62197d1d-2108-4294-96f3-afe7487d515b\") " pod="openstack/octavia-rsyslog-nth2b" Oct 10 18:10:14 crc kubenswrapper[4799]: I1010 18:10:14.023099 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/62197d1d-2108-4294-96f3-afe7487d515b-config-data-merged\") pod \"octavia-rsyslog-nth2b\" (UID: \"62197d1d-2108-4294-96f3-afe7487d515b\") " pod="openstack/octavia-rsyslog-nth2b" Oct 10 18:10:14 crc kubenswrapper[4799]: I1010 18:10:14.124517 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hm-ports\" (UniqueName: \"kubernetes.io/configmap/62197d1d-2108-4294-96f3-afe7487d515b-hm-ports\") pod \"octavia-rsyslog-nth2b\" (UID: \"62197d1d-2108-4294-96f3-afe7487d515b\") " pod="openstack/octavia-rsyslog-nth2b" Oct 10 18:10:14 crc kubenswrapper[4799]: I1010 18:10:14.124610 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/62197d1d-2108-4294-96f3-afe7487d515b-scripts\") pod \"octavia-rsyslog-nth2b\" (UID: \"62197d1d-2108-4294-96f3-afe7487d515b\") " pod="openstack/octavia-rsyslog-nth2b" Oct 10 18:10:14 crc kubenswrapper[4799]: I1010 18:10:14.124714 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/62197d1d-2108-4294-96f3-afe7487d515b-config-data\") pod \"octavia-rsyslog-nth2b\" (UID: \"62197d1d-2108-4294-96f3-afe7487d515b\") " pod="openstack/octavia-rsyslog-nth2b" Oct 10 18:10:14 crc kubenswrapper[4799]: I1010 18:10:14.124811 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/62197d1d-2108-4294-96f3-afe7487d515b-config-data-merged\") pod \"octavia-rsyslog-nth2b\" (UID: \"62197d1d-2108-4294-96f3-afe7487d515b\") " pod="openstack/octavia-rsyslog-nth2b" Oct 10 18:10:14 crc kubenswrapper[4799]: I1010 18:10:14.125333 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hm-ports\" (UniqueName: \"kubernetes.io/configmap/62197d1d-2108-4294-96f3-afe7487d515b-hm-ports\") pod \"octavia-rsyslog-nth2b\" (UID: \"62197d1d-2108-4294-96f3-afe7487d515b\") " pod="openstack/octavia-rsyslog-nth2b" Oct 10 18:10:14 crc kubenswrapper[4799]: I1010 18:10:14.125356 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/62197d1d-2108-4294-96f3-afe7487d515b-config-data-merged\") pod \"octavia-rsyslog-nth2b\" (UID: \"62197d1d-2108-4294-96f3-afe7487d515b\") " pod="openstack/octavia-rsyslog-nth2b" Oct 10 18:10:14 crc kubenswrapper[4799]: I1010 18:10:14.133331 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/62197d1d-2108-4294-96f3-afe7487d515b-scripts\") pod \"octavia-rsyslog-nth2b\" (UID: \"62197d1d-2108-4294-96f3-afe7487d515b\") " pod="openstack/octavia-rsyslog-nth2b" Oct 10 18:10:14 crc kubenswrapper[4799]: I1010 18:10:14.147019 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/62197d1d-2108-4294-96f3-afe7487d515b-config-data\") pod \"octavia-rsyslog-nth2b\" (UID: \"62197d1d-2108-4294-96f3-afe7487d515b\") " pod="openstack/octavia-rsyslog-nth2b" Oct 10 18:10:14 crc kubenswrapper[4799]: I1010 18:10:14.276303 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-rsyslog-nth2b" Oct 10 18:10:15 crc kubenswrapper[4799]: I1010 18:10:15.003020 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/octavia-image-upload-678599687f-dlr8w"] Oct 10 18:10:15 crc kubenswrapper[4799]: I1010 18:10:15.005698 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-image-upload-678599687f-dlr8w" Oct 10 18:10:15 crc kubenswrapper[4799]: I1010 18:10:15.008325 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"octavia-config-data" Oct 10 18:10:15 crc kubenswrapper[4799]: I1010 18:10:15.026088 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-image-upload-678599687f-dlr8w"] Oct 10 18:10:15 crc kubenswrapper[4799]: I1010 18:10:15.047370 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/b254e109-c7f5-4d7f-811d-21072cc98789-httpd-config\") pod \"octavia-image-upload-678599687f-dlr8w\" (UID: \"b254e109-c7f5-4d7f-811d-21072cc98789\") " pod="openstack/octavia-image-upload-678599687f-dlr8w" Oct 10 18:10:15 crc kubenswrapper[4799]: I1010 18:10:15.047825 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"amphora-image\" (UniqueName: \"kubernetes.io/empty-dir/b254e109-c7f5-4d7f-811d-21072cc98789-amphora-image\") pod \"octavia-image-upload-678599687f-dlr8w\" (UID: \"b254e109-c7f5-4d7f-811d-21072cc98789\") " pod="openstack/octavia-image-upload-678599687f-dlr8w" Oct 10 18:10:15 crc kubenswrapper[4799]: I1010 18:10:15.150520 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/b254e109-c7f5-4d7f-811d-21072cc98789-httpd-config\") pod \"octavia-image-upload-678599687f-dlr8w\" (UID: \"b254e109-c7f5-4d7f-811d-21072cc98789\") " pod="openstack/octavia-image-upload-678599687f-dlr8w" Oct 10 18:10:15 crc kubenswrapper[4799]: I1010 18:10:15.151190 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"amphora-image\" (UniqueName: \"kubernetes.io/empty-dir/b254e109-c7f5-4d7f-811d-21072cc98789-amphora-image\") pod \"octavia-image-upload-678599687f-dlr8w\" (UID: \"b254e109-c7f5-4d7f-811d-21072cc98789\") " pod="openstack/octavia-image-upload-678599687f-dlr8w" Oct 10 18:10:15 crc kubenswrapper[4799]: I1010 18:10:15.151601 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"amphora-image\" (UniqueName: \"kubernetes.io/empty-dir/b254e109-c7f5-4d7f-811d-21072cc98789-amphora-image\") pod \"octavia-image-upload-678599687f-dlr8w\" (UID: \"b254e109-c7f5-4d7f-811d-21072cc98789\") " pod="openstack/octavia-image-upload-678599687f-dlr8w" Oct 10 18:10:15 crc kubenswrapper[4799]: I1010 18:10:15.163370 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/b254e109-c7f5-4d7f-811d-21072cc98789-httpd-config\") pod \"octavia-image-upload-678599687f-dlr8w\" (UID: \"b254e109-c7f5-4d7f-811d-21072cc98789\") " pod="openstack/octavia-image-upload-678599687f-dlr8w" Oct 10 18:10:15 crc kubenswrapper[4799]: I1010 18:10:15.334644 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-image-upload-678599687f-dlr8w" Oct 10 18:10:16 crc kubenswrapper[4799]: I1010 18:10:15.520787 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-rsyslog-nth2b"] Oct 10 18:10:16 crc kubenswrapper[4799]: I1010 18:10:15.551017 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-rsyslog-nth2b" event={"ID":"62197d1d-2108-4294-96f3-afe7487d515b","Type":"ContainerStarted","Data":"53ffa01e6121d91dcd903391af1d5e54cacfcb3ddaedfe2ef2accffbb50bf087"} Oct 10 18:10:16 crc kubenswrapper[4799]: I1010 18:10:16.479133 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-image-upload-678599687f-dlr8w"] Oct 10 18:10:16 crc kubenswrapper[4799]: I1010 18:10:16.725325 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/octavia-db-sync-hfpxv"] Oct 10 18:10:16 crc kubenswrapper[4799]: I1010 18:10:16.728158 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-db-sync-hfpxv" Oct 10 18:10:16 crc kubenswrapper[4799]: I1010 18:10:16.731345 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"octavia-scripts" Oct 10 18:10:16 crc kubenswrapper[4799]: I1010 18:10:16.751521 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-db-sync-hfpxv"] Oct 10 18:10:16 crc kubenswrapper[4799]: I1010 18:10:16.798575 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/188baf6a-09f1-4a8b-9454-b67f2cb0dada-scripts\") pod \"octavia-db-sync-hfpxv\" (UID: \"188baf6a-09f1-4a8b-9454-b67f2cb0dada\") " pod="openstack/octavia-db-sync-hfpxv" Oct 10 18:10:16 crc kubenswrapper[4799]: I1010 18:10:16.798765 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/188baf6a-09f1-4a8b-9454-b67f2cb0dada-config-data-merged\") pod \"octavia-db-sync-hfpxv\" (UID: \"188baf6a-09f1-4a8b-9454-b67f2cb0dada\") " pod="openstack/octavia-db-sync-hfpxv" Oct 10 18:10:16 crc kubenswrapper[4799]: I1010 18:10:16.799050 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/188baf6a-09f1-4a8b-9454-b67f2cb0dada-config-data\") pod \"octavia-db-sync-hfpxv\" (UID: \"188baf6a-09f1-4a8b-9454-b67f2cb0dada\") " pod="openstack/octavia-db-sync-hfpxv" Oct 10 18:10:16 crc kubenswrapper[4799]: I1010 18:10:16.799143 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/188baf6a-09f1-4a8b-9454-b67f2cb0dada-combined-ca-bundle\") pod \"octavia-db-sync-hfpxv\" (UID: \"188baf6a-09f1-4a8b-9454-b67f2cb0dada\") " pod="openstack/octavia-db-sync-hfpxv" Oct 10 18:10:16 crc kubenswrapper[4799]: I1010 18:10:16.900410 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/188baf6a-09f1-4a8b-9454-b67f2cb0dada-combined-ca-bundle\") pod \"octavia-db-sync-hfpxv\" (UID: \"188baf6a-09f1-4a8b-9454-b67f2cb0dada\") " pod="openstack/octavia-db-sync-hfpxv" Oct 10 18:10:16 crc kubenswrapper[4799]: I1010 18:10:16.900459 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/188baf6a-09f1-4a8b-9454-b67f2cb0dada-scripts\") pod \"octavia-db-sync-hfpxv\" (UID: \"188baf6a-09f1-4a8b-9454-b67f2cb0dada\") " pod="openstack/octavia-db-sync-hfpxv" Oct 10 18:10:16 crc kubenswrapper[4799]: I1010 18:10:16.900525 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/188baf6a-09f1-4a8b-9454-b67f2cb0dada-config-data-merged\") pod \"octavia-db-sync-hfpxv\" (UID: \"188baf6a-09f1-4a8b-9454-b67f2cb0dada\") " pod="openstack/octavia-db-sync-hfpxv" Oct 10 18:10:16 crc kubenswrapper[4799]: I1010 18:10:16.900621 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/188baf6a-09f1-4a8b-9454-b67f2cb0dada-config-data\") pod \"octavia-db-sync-hfpxv\" (UID: \"188baf6a-09f1-4a8b-9454-b67f2cb0dada\") " pod="openstack/octavia-db-sync-hfpxv" Oct 10 18:10:16 crc kubenswrapper[4799]: I1010 18:10:16.901445 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/188baf6a-09f1-4a8b-9454-b67f2cb0dada-config-data-merged\") pod \"octavia-db-sync-hfpxv\" (UID: \"188baf6a-09f1-4a8b-9454-b67f2cb0dada\") " pod="openstack/octavia-db-sync-hfpxv" Oct 10 18:10:16 crc kubenswrapper[4799]: I1010 18:10:16.908654 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/188baf6a-09f1-4a8b-9454-b67f2cb0dada-combined-ca-bundle\") pod \"octavia-db-sync-hfpxv\" (UID: \"188baf6a-09f1-4a8b-9454-b67f2cb0dada\") " pod="openstack/octavia-db-sync-hfpxv" Oct 10 18:10:16 crc kubenswrapper[4799]: I1010 18:10:16.916238 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/188baf6a-09f1-4a8b-9454-b67f2cb0dada-scripts\") pod \"octavia-db-sync-hfpxv\" (UID: \"188baf6a-09f1-4a8b-9454-b67f2cb0dada\") " pod="openstack/octavia-db-sync-hfpxv" Oct 10 18:10:16 crc kubenswrapper[4799]: I1010 18:10:16.922809 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/188baf6a-09f1-4a8b-9454-b67f2cb0dada-config-data\") pod \"octavia-db-sync-hfpxv\" (UID: \"188baf6a-09f1-4a8b-9454-b67f2cb0dada\") " pod="openstack/octavia-db-sync-hfpxv" Oct 10 18:10:16 crc kubenswrapper[4799]: W1010 18:10:16.925632 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb254e109_c7f5_4d7f_811d_21072cc98789.slice/crio-d6490398eec515f7f180b988bb24c261763a9a20ad0aaba95593514a33f7345a WatchSource:0}: Error finding container d6490398eec515f7f180b988bb24c261763a9a20ad0aaba95593514a33f7345a: Status 404 returned error can't find the container with id d6490398eec515f7f180b988bb24c261763a9a20ad0aaba95593514a33f7345a Oct 10 18:10:17 crc kubenswrapper[4799]: I1010 18:10:17.059838 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-db-sync-hfpxv" Oct 10 18:10:17 crc kubenswrapper[4799]: I1010 18:10:17.538874 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-db-sync-hfpxv"] Oct 10 18:10:17 crc kubenswrapper[4799]: I1010 18:10:17.575904 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-rsyslog-nth2b" event={"ID":"62197d1d-2108-4294-96f3-afe7487d515b","Type":"ContainerStarted","Data":"4262da690c6774aed2f4f77b7ea222796847a1e21f8901ba79f16c528b63e1ab"} Oct 10 18:10:17 crc kubenswrapper[4799]: I1010 18:10:17.579691 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-db-sync-hfpxv" event={"ID":"188baf6a-09f1-4a8b-9454-b67f2cb0dada","Type":"ContainerStarted","Data":"4b928f34eee88cc2493c5b761cab20b58554be89cf598922e9d727f437c59122"} Oct 10 18:10:17 crc kubenswrapper[4799]: I1010 18:10:17.584029 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-image-upload-678599687f-dlr8w" event={"ID":"b254e109-c7f5-4d7f-811d-21072cc98789","Type":"ContainerStarted","Data":"d6490398eec515f7f180b988bb24c261763a9a20ad0aaba95593514a33f7345a"} Oct 10 18:10:18 crc kubenswrapper[4799]: I1010 18:10:18.402639 4799 scope.go:117] "RemoveContainer" containerID="6ae067b7971fd6480cb0c3ccf44d4e22f837ba4674373b4b5903247a9af39cf1" Oct 10 18:10:18 crc kubenswrapper[4799]: E1010 18:10:18.403613 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 18:10:18 crc kubenswrapper[4799]: I1010 18:10:18.599567 4799 generic.go:334] "Generic (PLEG): container finished" podID="188baf6a-09f1-4a8b-9454-b67f2cb0dada" containerID="21ea28bfc3e0f0e893de774f4a40d2b02f3cb699b7abe1c335903770672fb354" exitCode=0 Oct 10 18:10:18 crc kubenswrapper[4799]: I1010 18:10:18.599836 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-db-sync-hfpxv" event={"ID":"188baf6a-09f1-4a8b-9454-b67f2cb0dada","Type":"ContainerDied","Data":"21ea28bfc3e0f0e893de774f4a40d2b02f3cb699b7abe1c335903770672fb354"} Oct 10 18:10:19 crc kubenswrapper[4799]: I1010 18:10:19.608990 4799 generic.go:334] "Generic (PLEG): container finished" podID="62197d1d-2108-4294-96f3-afe7487d515b" containerID="4262da690c6774aed2f4f77b7ea222796847a1e21f8901ba79f16c528b63e1ab" exitCode=0 Oct 10 18:10:19 crc kubenswrapper[4799]: I1010 18:10:19.609046 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-rsyslog-nth2b" event={"ID":"62197d1d-2108-4294-96f3-afe7487d515b","Type":"ContainerDied","Data":"4262da690c6774aed2f4f77b7ea222796847a1e21f8901ba79f16c528b63e1ab"} Oct 10 18:10:20 crc kubenswrapper[4799]: I1010 18:10:20.622150 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-db-sync-hfpxv" event={"ID":"188baf6a-09f1-4a8b-9454-b67f2cb0dada","Type":"ContainerStarted","Data":"e9e71403b7ccff1b358e2120b49b1ac55a6261b0b059c24e89b6e61dd6729a3f"} Oct 10 18:10:20 crc kubenswrapper[4799]: I1010 18:10:20.642160 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/octavia-db-sync-hfpxv" podStartSLOduration=4.642128923 podStartE2EDuration="4.642128923s" podCreationTimestamp="2025-10-10 18:10:16 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-10 18:10:20.638263429 +0000 UTC m=+5914.146587554" watchObservedRunningTime="2025-10-10 18:10:20.642128923 +0000 UTC m=+5914.150453048" Oct 10 18:10:23 crc kubenswrapper[4799]: I1010 18:10:23.659720 4799 generic.go:334] "Generic (PLEG): container finished" podID="188baf6a-09f1-4a8b-9454-b67f2cb0dada" containerID="e9e71403b7ccff1b358e2120b49b1ac55a6261b0b059c24e89b6e61dd6729a3f" exitCode=0 Oct 10 18:10:23 crc kubenswrapper[4799]: I1010 18:10:23.660230 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-db-sync-hfpxv" event={"ID":"188baf6a-09f1-4a8b-9454-b67f2cb0dada","Type":"ContainerDied","Data":"e9e71403b7ccff1b358e2120b49b1ac55a6261b0b059c24e89b6e61dd6729a3f"} Oct 10 18:10:25 crc kubenswrapper[4799]: I1010 18:10:25.689550 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-db-sync-hfpxv" event={"ID":"188baf6a-09f1-4a8b-9454-b67f2cb0dada","Type":"ContainerDied","Data":"4b928f34eee88cc2493c5b761cab20b58554be89cf598922e9d727f437c59122"} Oct 10 18:10:25 crc kubenswrapper[4799]: I1010 18:10:25.689962 4799 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4b928f34eee88cc2493c5b761cab20b58554be89cf598922e9d727f437c59122" Oct 10 18:10:25 crc kubenswrapper[4799]: I1010 18:10:25.710005 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-db-sync-hfpxv" Oct 10 18:10:25 crc kubenswrapper[4799]: I1010 18:10:25.775579 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/188baf6a-09f1-4a8b-9454-b67f2cb0dada-scripts\") pod \"188baf6a-09f1-4a8b-9454-b67f2cb0dada\" (UID: \"188baf6a-09f1-4a8b-9454-b67f2cb0dada\") " Oct 10 18:10:25 crc kubenswrapper[4799]: I1010 18:10:25.775926 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/188baf6a-09f1-4a8b-9454-b67f2cb0dada-combined-ca-bundle\") pod \"188baf6a-09f1-4a8b-9454-b67f2cb0dada\" (UID: \"188baf6a-09f1-4a8b-9454-b67f2cb0dada\") " Oct 10 18:10:25 crc kubenswrapper[4799]: I1010 18:10:25.776023 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/188baf6a-09f1-4a8b-9454-b67f2cb0dada-config-data\") pod \"188baf6a-09f1-4a8b-9454-b67f2cb0dada\" (UID: \"188baf6a-09f1-4a8b-9454-b67f2cb0dada\") " Oct 10 18:10:25 crc kubenswrapper[4799]: I1010 18:10:25.776091 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/188baf6a-09f1-4a8b-9454-b67f2cb0dada-config-data-merged\") pod \"188baf6a-09f1-4a8b-9454-b67f2cb0dada\" (UID: \"188baf6a-09f1-4a8b-9454-b67f2cb0dada\") " Oct 10 18:10:25 crc kubenswrapper[4799]: I1010 18:10:25.792537 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/188baf6a-09f1-4a8b-9454-b67f2cb0dada-config-data" (OuterVolumeSpecName: "config-data") pod "188baf6a-09f1-4a8b-9454-b67f2cb0dada" (UID: "188baf6a-09f1-4a8b-9454-b67f2cb0dada"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 18:10:25 crc kubenswrapper[4799]: I1010 18:10:25.794360 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/188baf6a-09f1-4a8b-9454-b67f2cb0dada-scripts" (OuterVolumeSpecName: "scripts") pod "188baf6a-09f1-4a8b-9454-b67f2cb0dada" (UID: "188baf6a-09f1-4a8b-9454-b67f2cb0dada"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 18:10:25 crc kubenswrapper[4799]: I1010 18:10:25.814828 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/188baf6a-09f1-4a8b-9454-b67f2cb0dada-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "188baf6a-09f1-4a8b-9454-b67f2cb0dada" (UID: "188baf6a-09f1-4a8b-9454-b67f2cb0dada"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 18:10:25 crc kubenswrapper[4799]: I1010 18:10:25.825207 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/188baf6a-09f1-4a8b-9454-b67f2cb0dada-config-data-merged" (OuterVolumeSpecName: "config-data-merged") pod "188baf6a-09f1-4a8b-9454-b67f2cb0dada" (UID: "188baf6a-09f1-4a8b-9454-b67f2cb0dada"). InnerVolumeSpecName "config-data-merged". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 10 18:10:25 crc kubenswrapper[4799]: I1010 18:10:25.878028 4799 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/188baf6a-09f1-4a8b-9454-b67f2cb0dada-config-data\") on node \"crc\" DevicePath \"\"" Oct 10 18:10:25 crc kubenswrapper[4799]: I1010 18:10:25.878060 4799 reconciler_common.go:293] "Volume detached for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/188baf6a-09f1-4a8b-9454-b67f2cb0dada-config-data-merged\") on node \"crc\" DevicePath \"\"" Oct 10 18:10:25 crc kubenswrapper[4799]: I1010 18:10:25.878072 4799 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/188baf6a-09f1-4a8b-9454-b67f2cb0dada-scripts\") on node \"crc\" DevicePath \"\"" Oct 10 18:10:25 crc kubenswrapper[4799]: I1010 18:10:25.878082 4799 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/188baf6a-09f1-4a8b-9454-b67f2cb0dada-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 10 18:10:26 crc kubenswrapper[4799]: I1010 18:10:26.704866 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-image-upload-678599687f-dlr8w" event={"ID":"b254e109-c7f5-4d7f-811d-21072cc98789","Type":"ContainerStarted","Data":"665d445741da79b1262eba80459e8c35d0ceb37c862f9749eede5f12fc67bac5"} Oct 10 18:10:26 crc kubenswrapper[4799]: I1010 18:10:26.710557 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-db-sync-hfpxv" Oct 10 18:10:26 crc kubenswrapper[4799]: I1010 18:10:26.712278 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-rsyslog-nth2b" event={"ID":"62197d1d-2108-4294-96f3-afe7487d515b","Type":"ContainerStarted","Data":"9a8f32f7368db463d354b98fa28e3df995639fecd84141be4b4d7dc002e08f74"} Oct 10 18:10:26 crc kubenswrapper[4799]: I1010 18:10:26.713233 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/octavia-rsyslog-nth2b" Oct 10 18:10:26 crc kubenswrapper[4799]: I1010 18:10:26.784488 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/octavia-rsyslog-nth2b" podStartSLOduration=3.221195228 podStartE2EDuration="13.784462754s" podCreationTimestamp="2025-10-10 18:10:13 +0000 UTC" firstStartedPulling="2025-10-10 18:10:15.523888572 +0000 UTC m=+5909.032212687" lastFinishedPulling="2025-10-10 18:10:26.087156098 +0000 UTC m=+5919.595480213" observedRunningTime="2025-10-10 18:10:26.763050822 +0000 UTC m=+5920.271374947" watchObservedRunningTime="2025-10-10 18:10:26.784462754 +0000 UTC m=+5920.292786879" Oct 10 18:10:27 crc kubenswrapper[4799]: I1010 18:10:27.734276 4799 generic.go:334] "Generic (PLEG): container finished" podID="b254e109-c7f5-4d7f-811d-21072cc98789" containerID="665d445741da79b1262eba80459e8c35d0ceb37c862f9749eede5f12fc67bac5" exitCode=0 Oct 10 18:10:27 crc kubenswrapper[4799]: I1010 18:10:27.734600 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-image-upload-678599687f-dlr8w" event={"ID":"b254e109-c7f5-4d7f-811d-21072cc98789","Type":"ContainerDied","Data":"665d445741da79b1262eba80459e8c35d0ceb37c862f9749eede5f12fc67bac5"} Oct 10 18:10:27 crc kubenswrapper[4799]: I1010 18:10:27.800506 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/octavia-api-65d4ffcdbb-6xxlk" Oct 10 18:10:27 crc kubenswrapper[4799]: I1010 18:10:27.817361 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/octavia-api-65d4ffcdbb-6xxlk" Oct 10 18:10:28 crc kubenswrapper[4799]: I1010 18:10:28.750431 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-image-upload-678599687f-dlr8w" event={"ID":"b254e109-c7f5-4d7f-811d-21072cc98789","Type":"ContainerStarted","Data":"6fd98b23f1bf8cec3be260e9ad24605c27e6874412edffa8bb19490570768843"} Oct 10 18:10:28 crc kubenswrapper[4799]: I1010 18:10:28.787909 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/octavia-image-upload-678599687f-dlr8w" podStartSLOduration=5.460563563 podStartE2EDuration="14.787884174s" podCreationTimestamp="2025-10-10 18:10:14 +0000 UTC" firstStartedPulling="2025-10-10 18:10:16.92818521 +0000 UTC m=+5910.436509325" lastFinishedPulling="2025-10-10 18:10:26.255505781 +0000 UTC m=+5919.763829936" observedRunningTime="2025-10-10 18:10:28.772302464 +0000 UTC m=+5922.280626619" watchObservedRunningTime="2025-10-10 18:10:28.787884174 +0000 UTC m=+5922.296208299" Oct 10 18:10:33 crc kubenswrapper[4799]: I1010 18:10:33.403235 4799 scope.go:117] "RemoveContainer" containerID="6ae067b7971fd6480cb0c3ccf44d4e22f837ba4674373b4b5903247a9af39cf1" Oct 10 18:10:33 crc kubenswrapper[4799]: E1010 18:10:33.403990 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 18:10:37 crc kubenswrapper[4799]: I1010 18:10:37.064006 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-db-create-fvlgw"] Oct 10 18:10:37 crc kubenswrapper[4799]: I1010 18:10:37.075015 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-db-create-fvlgw"] Oct 10 18:10:37 crc kubenswrapper[4799]: I1010 18:10:37.417380 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="33875892-5c9e-4315-98bd-cc799f670b18" path="/var/lib/kubelet/pods/33875892-5c9e-4315-98bd-cc799f670b18/volumes" Oct 10 18:10:44 crc kubenswrapper[4799]: I1010 18:10:44.327862 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/octavia-rsyslog-nth2b" Oct 10 18:10:45 crc kubenswrapper[4799]: I1010 18:10:45.818840 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-7lfxc"] Oct 10 18:10:45 crc kubenswrapper[4799]: E1010 18:10:45.819849 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="188baf6a-09f1-4a8b-9454-b67f2cb0dada" containerName="octavia-db-sync" Oct 10 18:10:45 crc kubenswrapper[4799]: I1010 18:10:45.819866 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="188baf6a-09f1-4a8b-9454-b67f2cb0dada" containerName="octavia-db-sync" Oct 10 18:10:45 crc kubenswrapper[4799]: E1010 18:10:45.819879 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="188baf6a-09f1-4a8b-9454-b67f2cb0dada" containerName="init" Oct 10 18:10:45 crc kubenswrapper[4799]: I1010 18:10:45.819887 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="188baf6a-09f1-4a8b-9454-b67f2cb0dada" containerName="init" Oct 10 18:10:45 crc kubenswrapper[4799]: I1010 18:10:45.820152 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="188baf6a-09f1-4a8b-9454-b67f2cb0dada" containerName="octavia-db-sync" Oct 10 18:10:45 crc kubenswrapper[4799]: I1010 18:10:45.821915 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-7lfxc" Oct 10 18:10:45 crc kubenswrapper[4799]: I1010 18:10:45.839668 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-7lfxc"] Oct 10 18:10:46 crc kubenswrapper[4799]: I1010 18:10:46.004127 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/16fbf240-7cd8-496d-b0de-0772571849d3-utilities\") pod \"certified-operators-7lfxc\" (UID: \"16fbf240-7cd8-496d-b0de-0772571849d3\") " pod="openshift-marketplace/certified-operators-7lfxc" Oct 10 18:10:46 crc kubenswrapper[4799]: I1010 18:10:46.004640 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/16fbf240-7cd8-496d-b0de-0772571849d3-catalog-content\") pod \"certified-operators-7lfxc\" (UID: \"16fbf240-7cd8-496d-b0de-0772571849d3\") " pod="openshift-marketplace/certified-operators-7lfxc" Oct 10 18:10:46 crc kubenswrapper[4799]: I1010 18:10:46.004674 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7ll6f\" (UniqueName: \"kubernetes.io/projected/16fbf240-7cd8-496d-b0de-0772571849d3-kube-api-access-7ll6f\") pod \"certified-operators-7lfxc\" (UID: \"16fbf240-7cd8-496d-b0de-0772571849d3\") " pod="openshift-marketplace/certified-operators-7lfxc" Oct 10 18:10:46 crc kubenswrapper[4799]: I1010 18:10:46.105837 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/16fbf240-7cd8-496d-b0de-0772571849d3-catalog-content\") pod \"certified-operators-7lfxc\" (UID: \"16fbf240-7cd8-496d-b0de-0772571849d3\") " pod="openshift-marketplace/certified-operators-7lfxc" Oct 10 18:10:46 crc kubenswrapper[4799]: I1010 18:10:46.105877 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7ll6f\" (UniqueName: \"kubernetes.io/projected/16fbf240-7cd8-496d-b0de-0772571849d3-kube-api-access-7ll6f\") pod \"certified-operators-7lfxc\" (UID: \"16fbf240-7cd8-496d-b0de-0772571849d3\") " pod="openshift-marketplace/certified-operators-7lfxc" Oct 10 18:10:46 crc kubenswrapper[4799]: I1010 18:10:46.105926 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/16fbf240-7cd8-496d-b0de-0772571849d3-utilities\") pod \"certified-operators-7lfxc\" (UID: \"16fbf240-7cd8-496d-b0de-0772571849d3\") " pod="openshift-marketplace/certified-operators-7lfxc" Oct 10 18:10:46 crc kubenswrapper[4799]: I1010 18:10:46.106610 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/16fbf240-7cd8-496d-b0de-0772571849d3-catalog-content\") pod \"certified-operators-7lfxc\" (UID: \"16fbf240-7cd8-496d-b0de-0772571849d3\") " pod="openshift-marketplace/certified-operators-7lfxc" Oct 10 18:10:46 crc kubenswrapper[4799]: I1010 18:10:46.106686 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/16fbf240-7cd8-496d-b0de-0772571849d3-utilities\") pod \"certified-operators-7lfxc\" (UID: \"16fbf240-7cd8-496d-b0de-0772571849d3\") " pod="openshift-marketplace/certified-operators-7lfxc" Oct 10 18:10:46 crc kubenswrapper[4799]: I1010 18:10:46.132885 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7ll6f\" (UniqueName: \"kubernetes.io/projected/16fbf240-7cd8-496d-b0de-0772571849d3-kube-api-access-7ll6f\") pod \"certified-operators-7lfxc\" (UID: \"16fbf240-7cd8-496d-b0de-0772571849d3\") " pod="openshift-marketplace/certified-operators-7lfxc" Oct 10 18:10:46 crc kubenswrapper[4799]: I1010 18:10:46.145119 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-7lfxc" Oct 10 18:10:46 crc kubenswrapper[4799]: W1010 18:10:46.673370 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod16fbf240_7cd8_496d_b0de_0772571849d3.slice/crio-af86584cd2816df3f4de712c8cb8fdb123aece7df0edcf160fe958cb90677bf0 WatchSource:0}: Error finding container af86584cd2816df3f4de712c8cb8fdb123aece7df0edcf160fe958cb90677bf0: Status 404 returned error can't find the container with id af86584cd2816df3f4de712c8cb8fdb123aece7df0edcf160fe958cb90677bf0 Oct 10 18:10:46 crc kubenswrapper[4799]: I1010 18:10:46.677122 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-7lfxc"] Oct 10 18:10:46 crc kubenswrapper[4799]: I1010 18:10:46.973389 4799 generic.go:334] "Generic (PLEG): container finished" podID="16fbf240-7cd8-496d-b0de-0772571849d3" containerID="0f3f7bcfeb4a1a3b98a599be3ddb750e47bec10a00359c18b69bc1759458cb94" exitCode=0 Oct 10 18:10:46 crc kubenswrapper[4799]: I1010 18:10:46.973556 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-7lfxc" event={"ID":"16fbf240-7cd8-496d-b0de-0772571849d3","Type":"ContainerDied","Data":"0f3f7bcfeb4a1a3b98a599be3ddb750e47bec10a00359c18b69bc1759458cb94"} Oct 10 18:10:46 crc kubenswrapper[4799]: I1010 18:10:46.973728 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-7lfxc" event={"ID":"16fbf240-7cd8-496d-b0de-0772571849d3","Type":"ContainerStarted","Data":"af86584cd2816df3f4de712c8cb8fdb123aece7df0edcf160fe958cb90677bf0"} Oct 10 18:10:47 crc kubenswrapper[4799]: I1010 18:10:47.030678 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-f77e-account-create-4lxhb"] Oct 10 18:10:47 crc kubenswrapper[4799]: I1010 18:10:47.038817 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-f77e-account-create-4lxhb"] Oct 10 18:10:47 crc kubenswrapper[4799]: I1010 18:10:47.474663 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e8a40d86-64a7-4f0f-91bc-82a5c67754c0" path="/var/lib/kubelet/pods/e8a40d86-64a7-4f0f-91bc-82a5c67754c0/volumes" Oct 10 18:10:47 crc kubenswrapper[4799]: I1010 18:10:47.987458 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-7lfxc" event={"ID":"16fbf240-7cd8-496d-b0de-0772571849d3","Type":"ContainerStarted","Data":"883557c6681cd280d6ba5cc8ed361f8f20a9fe44e2196a3a36732871bba068b8"} Oct 10 18:10:48 crc kubenswrapper[4799]: I1010 18:10:48.402689 4799 scope.go:117] "RemoveContainer" containerID="6ae067b7971fd6480cb0c3ccf44d4e22f837ba4674373b4b5903247a9af39cf1" Oct 10 18:10:49 crc kubenswrapper[4799]: I1010 18:10:49.009517 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" event={"ID":"6cebefda-e31d-4be2-9bf4-8e1f8ec002cb","Type":"ContainerStarted","Data":"78eb2a5dbad4fabd2d68def3efca3798c9b19e24aad44cc0581450cbe14e2a76"} Oct 10 18:10:50 crc kubenswrapper[4799]: I1010 18:10:50.021095 4799 generic.go:334] "Generic (PLEG): container finished" podID="16fbf240-7cd8-496d-b0de-0772571849d3" containerID="883557c6681cd280d6ba5cc8ed361f8f20a9fe44e2196a3a36732871bba068b8" exitCode=0 Oct 10 18:10:50 crc kubenswrapper[4799]: I1010 18:10:50.021212 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-7lfxc" event={"ID":"16fbf240-7cd8-496d-b0de-0772571849d3","Type":"ContainerDied","Data":"883557c6681cd280d6ba5cc8ed361f8f20a9fe44e2196a3a36732871bba068b8"} Oct 10 18:10:51 crc kubenswrapper[4799]: I1010 18:10:51.034604 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-7lfxc" event={"ID":"16fbf240-7cd8-496d-b0de-0772571849d3","Type":"ContainerStarted","Data":"f26457a4f2e23b8a692f1717db2eb4e02620eedeb4cbc1bdf03cfec422b1776d"} Oct 10 18:10:51 crc kubenswrapper[4799]: I1010 18:10:51.061485 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-7lfxc" podStartSLOduration=2.516385887 podStartE2EDuration="6.061466554s" podCreationTimestamp="2025-10-10 18:10:45 +0000 UTC" firstStartedPulling="2025-10-10 18:10:46.975094294 +0000 UTC m=+5940.483418429" lastFinishedPulling="2025-10-10 18:10:50.520174981 +0000 UTC m=+5944.028499096" observedRunningTime="2025-10-10 18:10:51.056206166 +0000 UTC m=+5944.564530271" watchObservedRunningTime="2025-10-10 18:10:51.061466554 +0000 UTC m=+5944.569790669" Oct 10 18:10:52 crc kubenswrapper[4799]: I1010 18:10:52.028531 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-db-sync-9259b"] Oct 10 18:10:52 crc kubenswrapper[4799]: I1010 18:10:52.038224 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-db-sync-9259b"] Oct 10 18:10:53 crc kubenswrapper[4799]: I1010 18:10:53.414216 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09cb6e81-46ef-4b47-a9a6-33dacfd5f400" path="/var/lib/kubelet/pods/09cb6e81-46ef-4b47-a9a6-33dacfd5f400/volumes" Oct 10 18:10:55 crc kubenswrapper[4799]: I1010 18:10:55.192778 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/octavia-image-upload-678599687f-dlr8w"] Oct 10 18:10:55 crc kubenswrapper[4799]: I1010 18:10:55.193383 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/octavia-image-upload-678599687f-dlr8w" podUID="b254e109-c7f5-4d7f-811d-21072cc98789" containerName="octavia-amphora-httpd" containerID="cri-o://6fd98b23f1bf8cec3be260e9ad24605c27e6874412edffa8bb19490570768843" gracePeriod=30 Oct 10 18:10:55 crc kubenswrapper[4799]: I1010 18:10:55.783811 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-image-upload-678599687f-dlr8w" Oct 10 18:10:55 crc kubenswrapper[4799]: I1010 18:10:55.921853 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/b254e109-c7f5-4d7f-811d-21072cc98789-httpd-config\") pod \"b254e109-c7f5-4d7f-811d-21072cc98789\" (UID: \"b254e109-c7f5-4d7f-811d-21072cc98789\") " Oct 10 18:10:55 crc kubenswrapper[4799]: I1010 18:10:55.922345 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"amphora-image\" (UniqueName: \"kubernetes.io/empty-dir/b254e109-c7f5-4d7f-811d-21072cc98789-amphora-image\") pod \"b254e109-c7f5-4d7f-811d-21072cc98789\" (UID: \"b254e109-c7f5-4d7f-811d-21072cc98789\") " Oct 10 18:10:55 crc kubenswrapper[4799]: I1010 18:10:55.947659 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b254e109-c7f5-4d7f-811d-21072cc98789-httpd-config" (OuterVolumeSpecName: "httpd-config") pod "b254e109-c7f5-4d7f-811d-21072cc98789" (UID: "b254e109-c7f5-4d7f-811d-21072cc98789"). InnerVolumeSpecName "httpd-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 18:10:56 crc kubenswrapper[4799]: I1010 18:10:55.999220 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b254e109-c7f5-4d7f-811d-21072cc98789-amphora-image" (OuterVolumeSpecName: "amphora-image") pod "b254e109-c7f5-4d7f-811d-21072cc98789" (UID: "b254e109-c7f5-4d7f-811d-21072cc98789"). InnerVolumeSpecName "amphora-image". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 10 18:10:56 crc kubenswrapper[4799]: I1010 18:10:56.024790 4799 reconciler_common.go:293] "Volume detached for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/b254e109-c7f5-4d7f-811d-21072cc98789-httpd-config\") on node \"crc\" DevicePath \"\"" Oct 10 18:10:56 crc kubenswrapper[4799]: I1010 18:10:56.024835 4799 reconciler_common.go:293] "Volume detached for volume \"amphora-image\" (UniqueName: \"kubernetes.io/empty-dir/b254e109-c7f5-4d7f-811d-21072cc98789-amphora-image\") on node \"crc\" DevicePath \"\"" Oct 10 18:10:56 crc kubenswrapper[4799]: I1010 18:10:56.109345 4799 generic.go:334] "Generic (PLEG): container finished" podID="b254e109-c7f5-4d7f-811d-21072cc98789" containerID="6fd98b23f1bf8cec3be260e9ad24605c27e6874412edffa8bb19490570768843" exitCode=0 Oct 10 18:10:56 crc kubenswrapper[4799]: I1010 18:10:56.109422 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-image-upload-678599687f-dlr8w" Oct 10 18:10:56 crc kubenswrapper[4799]: I1010 18:10:56.109413 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-image-upload-678599687f-dlr8w" event={"ID":"b254e109-c7f5-4d7f-811d-21072cc98789","Type":"ContainerDied","Data":"6fd98b23f1bf8cec3be260e9ad24605c27e6874412edffa8bb19490570768843"} Oct 10 18:10:56 crc kubenswrapper[4799]: I1010 18:10:56.109501 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-image-upload-678599687f-dlr8w" event={"ID":"b254e109-c7f5-4d7f-811d-21072cc98789","Type":"ContainerDied","Data":"d6490398eec515f7f180b988bb24c261763a9a20ad0aaba95593514a33f7345a"} Oct 10 18:10:56 crc kubenswrapper[4799]: I1010 18:10:56.109529 4799 scope.go:117] "RemoveContainer" containerID="6fd98b23f1bf8cec3be260e9ad24605c27e6874412edffa8bb19490570768843" Oct 10 18:10:56 crc kubenswrapper[4799]: I1010 18:10:56.138860 4799 scope.go:117] "RemoveContainer" containerID="665d445741da79b1262eba80459e8c35d0ceb37c862f9749eede5f12fc67bac5" Oct 10 18:10:56 crc kubenswrapper[4799]: I1010 18:10:56.144560 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/octavia-image-upload-678599687f-dlr8w"] Oct 10 18:10:56 crc kubenswrapper[4799]: I1010 18:10:56.146308 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-7lfxc" Oct 10 18:10:56 crc kubenswrapper[4799]: I1010 18:10:56.146342 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-7lfxc" Oct 10 18:10:56 crc kubenswrapper[4799]: I1010 18:10:56.154469 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/octavia-image-upload-678599687f-dlr8w"] Oct 10 18:10:56 crc kubenswrapper[4799]: I1010 18:10:56.165875 4799 scope.go:117] "RemoveContainer" containerID="6fd98b23f1bf8cec3be260e9ad24605c27e6874412edffa8bb19490570768843" Oct 10 18:10:56 crc kubenswrapper[4799]: E1010 18:10:56.166352 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6fd98b23f1bf8cec3be260e9ad24605c27e6874412edffa8bb19490570768843\": container with ID starting with 6fd98b23f1bf8cec3be260e9ad24605c27e6874412edffa8bb19490570768843 not found: ID does not exist" containerID="6fd98b23f1bf8cec3be260e9ad24605c27e6874412edffa8bb19490570768843" Oct 10 18:10:56 crc kubenswrapper[4799]: I1010 18:10:56.166397 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6fd98b23f1bf8cec3be260e9ad24605c27e6874412edffa8bb19490570768843"} err="failed to get container status \"6fd98b23f1bf8cec3be260e9ad24605c27e6874412edffa8bb19490570768843\": rpc error: code = NotFound desc = could not find container \"6fd98b23f1bf8cec3be260e9ad24605c27e6874412edffa8bb19490570768843\": container with ID starting with 6fd98b23f1bf8cec3be260e9ad24605c27e6874412edffa8bb19490570768843 not found: ID does not exist" Oct 10 18:10:56 crc kubenswrapper[4799]: I1010 18:10:56.166427 4799 scope.go:117] "RemoveContainer" containerID="665d445741da79b1262eba80459e8c35d0ceb37c862f9749eede5f12fc67bac5" Oct 10 18:10:56 crc kubenswrapper[4799]: E1010 18:10:56.166792 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"665d445741da79b1262eba80459e8c35d0ceb37c862f9749eede5f12fc67bac5\": container with ID starting with 665d445741da79b1262eba80459e8c35d0ceb37c862f9749eede5f12fc67bac5 not found: ID does not exist" containerID="665d445741da79b1262eba80459e8c35d0ceb37c862f9749eede5f12fc67bac5" Oct 10 18:10:56 crc kubenswrapper[4799]: I1010 18:10:56.166827 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"665d445741da79b1262eba80459e8c35d0ceb37c862f9749eede5f12fc67bac5"} err="failed to get container status \"665d445741da79b1262eba80459e8c35d0ceb37c862f9749eede5f12fc67bac5\": rpc error: code = NotFound desc = could not find container \"665d445741da79b1262eba80459e8c35d0ceb37c862f9749eede5f12fc67bac5\": container with ID starting with 665d445741da79b1262eba80459e8c35d0ceb37c862f9749eede5f12fc67bac5 not found: ID does not exist" Oct 10 18:10:56 crc kubenswrapper[4799]: I1010 18:10:56.191053 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-7lfxc" Oct 10 18:10:57 crc kubenswrapper[4799]: I1010 18:10:57.210521 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-7lfxc" Oct 10 18:10:57 crc kubenswrapper[4799]: I1010 18:10:57.269047 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-7lfxc"] Oct 10 18:10:57 crc kubenswrapper[4799]: I1010 18:10:57.419266 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b254e109-c7f5-4d7f-811d-21072cc98789" path="/var/lib/kubelet/pods/b254e109-c7f5-4d7f-811d-21072cc98789/volumes" Oct 10 18:10:59 crc kubenswrapper[4799]: I1010 18:10:59.147014 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-7lfxc" podUID="16fbf240-7cd8-496d-b0de-0772571849d3" containerName="registry-server" containerID="cri-o://f26457a4f2e23b8a692f1717db2eb4e02620eedeb4cbc1bdf03cfec422b1776d" gracePeriod=2 Oct 10 18:10:59 crc kubenswrapper[4799]: I1010 18:10:59.727025 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-7lfxc" Oct 10 18:10:59 crc kubenswrapper[4799]: I1010 18:10:59.811949 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/16fbf240-7cd8-496d-b0de-0772571849d3-utilities\") pod \"16fbf240-7cd8-496d-b0de-0772571849d3\" (UID: \"16fbf240-7cd8-496d-b0de-0772571849d3\") " Oct 10 18:10:59 crc kubenswrapper[4799]: I1010 18:10:59.812128 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7ll6f\" (UniqueName: \"kubernetes.io/projected/16fbf240-7cd8-496d-b0de-0772571849d3-kube-api-access-7ll6f\") pod \"16fbf240-7cd8-496d-b0de-0772571849d3\" (UID: \"16fbf240-7cd8-496d-b0de-0772571849d3\") " Oct 10 18:10:59 crc kubenswrapper[4799]: I1010 18:10:59.812494 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/16fbf240-7cd8-496d-b0de-0772571849d3-catalog-content\") pod \"16fbf240-7cd8-496d-b0de-0772571849d3\" (UID: \"16fbf240-7cd8-496d-b0de-0772571849d3\") " Oct 10 18:10:59 crc kubenswrapper[4799]: I1010 18:10:59.812935 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/16fbf240-7cd8-496d-b0de-0772571849d3-utilities" (OuterVolumeSpecName: "utilities") pod "16fbf240-7cd8-496d-b0de-0772571849d3" (UID: "16fbf240-7cd8-496d-b0de-0772571849d3"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 10 18:10:59 crc kubenswrapper[4799]: I1010 18:10:59.813518 4799 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/16fbf240-7cd8-496d-b0de-0772571849d3-utilities\") on node \"crc\" DevicePath \"\"" Oct 10 18:10:59 crc kubenswrapper[4799]: I1010 18:10:59.820130 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/16fbf240-7cd8-496d-b0de-0772571849d3-kube-api-access-7ll6f" (OuterVolumeSpecName: "kube-api-access-7ll6f") pod "16fbf240-7cd8-496d-b0de-0772571849d3" (UID: "16fbf240-7cd8-496d-b0de-0772571849d3"). InnerVolumeSpecName "kube-api-access-7ll6f". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 18:10:59 crc kubenswrapper[4799]: I1010 18:10:59.870912 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/16fbf240-7cd8-496d-b0de-0772571849d3-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "16fbf240-7cd8-496d-b0de-0772571849d3" (UID: "16fbf240-7cd8-496d-b0de-0772571849d3"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 10 18:10:59 crc kubenswrapper[4799]: I1010 18:10:59.915213 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7ll6f\" (UniqueName: \"kubernetes.io/projected/16fbf240-7cd8-496d-b0de-0772571849d3-kube-api-access-7ll6f\") on node \"crc\" DevicePath \"\"" Oct 10 18:10:59 crc kubenswrapper[4799]: I1010 18:10:59.915251 4799 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/16fbf240-7cd8-496d-b0de-0772571849d3-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 10 18:11:00 crc kubenswrapper[4799]: I1010 18:11:00.162546 4799 generic.go:334] "Generic (PLEG): container finished" podID="16fbf240-7cd8-496d-b0de-0772571849d3" containerID="f26457a4f2e23b8a692f1717db2eb4e02620eedeb4cbc1bdf03cfec422b1776d" exitCode=0 Oct 10 18:11:00 crc kubenswrapper[4799]: I1010 18:11:00.162607 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-7lfxc" event={"ID":"16fbf240-7cd8-496d-b0de-0772571849d3","Type":"ContainerDied","Data":"f26457a4f2e23b8a692f1717db2eb4e02620eedeb4cbc1bdf03cfec422b1776d"} Oct 10 18:11:00 crc kubenswrapper[4799]: I1010 18:11:00.162657 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-7lfxc" event={"ID":"16fbf240-7cd8-496d-b0de-0772571849d3","Type":"ContainerDied","Data":"af86584cd2816df3f4de712c8cb8fdb123aece7df0edcf160fe958cb90677bf0"} Oct 10 18:11:00 crc kubenswrapper[4799]: I1010 18:11:00.162686 4799 scope.go:117] "RemoveContainer" containerID="f26457a4f2e23b8a692f1717db2eb4e02620eedeb4cbc1bdf03cfec422b1776d" Oct 10 18:11:00 crc kubenswrapper[4799]: I1010 18:11:00.162704 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-7lfxc" Oct 10 18:11:00 crc kubenswrapper[4799]: I1010 18:11:00.217025 4799 scope.go:117] "RemoveContainer" containerID="883557c6681cd280d6ba5cc8ed361f8f20a9fe44e2196a3a36732871bba068b8" Oct 10 18:11:00 crc kubenswrapper[4799]: I1010 18:11:00.229475 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-7lfxc"] Oct 10 18:11:00 crc kubenswrapper[4799]: I1010 18:11:00.243813 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-7lfxc"] Oct 10 18:11:00 crc kubenswrapper[4799]: I1010 18:11:00.253399 4799 scope.go:117] "RemoveContainer" containerID="0f3f7bcfeb4a1a3b98a599be3ddb750e47bec10a00359c18b69bc1759458cb94" Oct 10 18:11:00 crc kubenswrapper[4799]: I1010 18:11:00.310236 4799 scope.go:117] "RemoveContainer" containerID="f26457a4f2e23b8a692f1717db2eb4e02620eedeb4cbc1bdf03cfec422b1776d" Oct 10 18:11:00 crc kubenswrapper[4799]: E1010 18:11:00.310801 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f26457a4f2e23b8a692f1717db2eb4e02620eedeb4cbc1bdf03cfec422b1776d\": container with ID starting with f26457a4f2e23b8a692f1717db2eb4e02620eedeb4cbc1bdf03cfec422b1776d not found: ID does not exist" containerID="f26457a4f2e23b8a692f1717db2eb4e02620eedeb4cbc1bdf03cfec422b1776d" Oct 10 18:11:00 crc kubenswrapper[4799]: I1010 18:11:00.310845 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f26457a4f2e23b8a692f1717db2eb4e02620eedeb4cbc1bdf03cfec422b1776d"} err="failed to get container status \"f26457a4f2e23b8a692f1717db2eb4e02620eedeb4cbc1bdf03cfec422b1776d\": rpc error: code = NotFound desc = could not find container \"f26457a4f2e23b8a692f1717db2eb4e02620eedeb4cbc1bdf03cfec422b1776d\": container with ID starting with f26457a4f2e23b8a692f1717db2eb4e02620eedeb4cbc1bdf03cfec422b1776d not found: ID does not exist" Oct 10 18:11:00 crc kubenswrapper[4799]: I1010 18:11:00.310878 4799 scope.go:117] "RemoveContainer" containerID="883557c6681cd280d6ba5cc8ed361f8f20a9fe44e2196a3a36732871bba068b8" Oct 10 18:11:00 crc kubenswrapper[4799]: E1010 18:11:00.311379 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"883557c6681cd280d6ba5cc8ed361f8f20a9fe44e2196a3a36732871bba068b8\": container with ID starting with 883557c6681cd280d6ba5cc8ed361f8f20a9fe44e2196a3a36732871bba068b8 not found: ID does not exist" containerID="883557c6681cd280d6ba5cc8ed361f8f20a9fe44e2196a3a36732871bba068b8" Oct 10 18:11:00 crc kubenswrapper[4799]: I1010 18:11:00.311429 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"883557c6681cd280d6ba5cc8ed361f8f20a9fe44e2196a3a36732871bba068b8"} err="failed to get container status \"883557c6681cd280d6ba5cc8ed361f8f20a9fe44e2196a3a36732871bba068b8\": rpc error: code = NotFound desc = could not find container \"883557c6681cd280d6ba5cc8ed361f8f20a9fe44e2196a3a36732871bba068b8\": container with ID starting with 883557c6681cd280d6ba5cc8ed361f8f20a9fe44e2196a3a36732871bba068b8 not found: ID does not exist" Oct 10 18:11:00 crc kubenswrapper[4799]: I1010 18:11:00.311483 4799 scope.go:117] "RemoveContainer" containerID="0f3f7bcfeb4a1a3b98a599be3ddb750e47bec10a00359c18b69bc1759458cb94" Oct 10 18:11:00 crc kubenswrapper[4799]: E1010 18:11:00.312037 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0f3f7bcfeb4a1a3b98a599be3ddb750e47bec10a00359c18b69bc1759458cb94\": container with ID starting with 0f3f7bcfeb4a1a3b98a599be3ddb750e47bec10a00359c18b69bc1759458cb94 not found: ID does not exist" containerID="0f3f7bcfeb4a1a3b98a599be3ddb750e47bec10a00359c18b69bc1759458cb94" Oct 10 18:11:00 crc kubenswrapper[4799]: I1010 18:11:00.312088 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0f3f7bcfeb4a1a3b98a599be3ddb750e47bec10a00359c18b69bc1759458cb94"} err="failed to get container status \"0f3f7bcfeb4a1a3b98a599be3ddb750e47bec10a00359c18b69bc1759458cb94\": rpc error: code = NotFound desc = could not find container \"0f3f7bcfeb4a1a3b98a599be3ddb750e47bec10a00359c18b69bc1759458cb94\": container with ID starting with 0f3f7bcfeb4a1a3b98a599be3ddb750e47bec10a00359c18b69bc1759458cb94 not found: ID does not exist" Oct 10 18:11:00 crc kubenswrapper[4799]: I1010 18:11:00.502802 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/octavia-image-upload-678599687f-dlbhf"] Oct 10 18:11:00 crc kubenswrapper[4799]: E1010 18:11:00.503356 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b254e109-c7f5-4d7f-811d-21072cc98789" containerName="octavia-amphora-httpd" Oct 10 18:11:00 crc kubenswrapper[4799]: I1010 18:11:00.503379 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="b254e109-c7f5-4d7f-811d-21072cc98789" containerName="octavia-amphora-httpd" Oct 10 18:11:00 crc kubenswrapper[4799]: E1010 18:11:00.503400 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="16fbf240-7cd8-496d-b0de-0772571849d3" containerName="extract-utilities" Oct 10 18:11:00 crc kubenswrapper[4799]: I1010 18:11:00.503408 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="16fbf240-7cd8-496d-b0de-0772571849d3" containerName="extract-utilities" Oct 10 18:11:00 crc kubenswrapper[4799]: E1010 18:11:00.503426 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b254e109-c7f5-4d7f-811d-21072cc98789" containerName="init" Oct 10 18:11:00 crc kubenswrapper[4799]: I1010 18:11:00.503436 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="b254e109-c7f5-4d7f-811d-21072cc98789" containerName="init" Oct 10 18:11:00 crc kubenswrapper[4799]: E1010 18:11:00.503456 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="16fbf240-7cd8-496d-b0de-0772571849d3" containerName="extract-content" Oct 10 18:11:00 crc kubenswrapper[4799]: I1010 18:11:00.503465 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="16fbf240-7cd8-496d-b0de-0772571849d3" containerName="extract-content" Oct 10 18:11:00 crc kubenswrapper[4799]: E1010 18:11:00.503493 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="16fbf240-7cd8-496d-b0de-0772571849d3" containerName="registry-server" Oct 10 18:11:00 crc kubenswrapper[4799]: I1010 18:11:00.503500 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="16fbf240-7cd8-496d-b0de-0772571849d3" containerName="registry-server" Oct 10 18:11:00 crc kubenswrapper[4799]: I1010 18:11:00.503805 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="16fbf240-7cd8-496d-b0de-0772571849d3" containerName="registry-server" Oct 10 18:11:00 crc kubenswrapper[4799]: I1010 18:11:00.503832 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="b254e109-c7f5-4d7f-811d-21072cc98789" containerName="octavia-amphora-httpd" Oct 10 18:11:00 crc kubenswrapper[4799]: I1010 18:11:00.505159 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-image-upload-678599687f-dlbhf" Oct 10 18:11:00 crc kubenswrapper[4799]: I1010 18:11:00.507997 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"octavia-config-data" Oct 10 18:11:00 crc kubenswrapper[4799]: I1010 18:11:00.513404 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-image-upload-678599687f-dlbhf"] Oct 10 18:11:00 crc kubenswrapper[4799]: I1010 18:11:00.646878 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"amphora-image\" (UniqueName: \"kubernetes.io/empty-dir/b3d27d6a-4cd4-4955-9cfb-7a4d92f3af7a-amphora-image\") pod \"octavia-image-upload-678599687f-dlbhf\" (UID: \"b3d27d6a-4cd4-4955-9cfb-7a4d92f3af7a\") " pod="openstack/octavia-image-upload-678599687f-dlbhf" Oct 10 18:11:00 crc kubenswrapper[4799]: I1010 18:11:00.647001 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/b3d27d6a-4cd4-4955-9cfb-7a4d92f3af7a-httpd-config\") pod \"octavia-image-upload-678599687f-dlbhf\" (UID: \"b3d27d6a-4cd4-4955-9cfb-7a4d92f3af7a\") " pod="openstack/octavia-image-upload-678599687f-dlbhf" Oct 10 18:11:00 crc kubenswrapper[4799]: I1010 18:11:00.748712 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"amphora-image\" (UniqueName: \"kubernetes.io/empty-dir/b3d27d6a-4cd4-4955-9cfb-7a4d92f3af7a-amphora-image\") pod \"octavia-image-upload-678599687f-dlbhf\" (UID: \"b3d27d6a-4cd4-4955-9cfb-7a4d92f3af7a\") " pod="openstack/octavia-image-upload-678599687f-dlbhf" Oct 10 18:11:00 crc kubenswrapper[4799]: I1010 18:11:00.749170 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/b3d27d6a-4cd4-4955-9cfb-7a4d92f3af7a-httpd-config\") pod \"octavia-image-upload-678599687f-dlbhf\" (UID: \"b3d27d6a-4cd4-4955-9cfb-7a4d92f3af7a\") " pod="openstack/octavia-image-upload-678599687f-dlbhf" Oct 10 18:11:00 crc kubenswrapper[4799]: I1010 18:11:00.749174 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"amphora-image\" (UniqueName: \"kubernetes.io/empty-dir/b3d27d6a-4cd4-4955-9cfb-7a4d92f3af7a-amphora-image\") pod \"octavia-image-upload-678599687f-dlbhf\" (UID: \"b3d27d6a-4cd4-4955-9cfb-7a4d92f3af7a\") " pod="openstack/octavia-image-upload-678599687f-dlbhf" Oct 10 18:11:00 crc kubenswrapper[4799]: I1010 18:11:00.753883 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/b3d27d6a-4cd4-4955-9cfb-7a4d92f3af7a-httpd-config\") pod \"octavia-image-upload-678599687f-dlbhf\" (UID: \"b3d27d6a-4cd4-4955-9cfb-7a4d92f3af7a\") " pod="openstack/octavia-image-upload-678599687f-dlbhf" Oct 10 18:11:00 crc kubenswrapper[4799]: I1010 18:11:00.875926 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-image-upload-678599687f-dlbhf" Oct 10 18:11:01 crc kubenswrapper[4799]: I1010 18:11:01.314711 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-image-upload-678599687f-dlbhf"] Oct 10 18:11:01 crc kubenswrapper[4799]: W1010 18:11:01.317727 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb3d27d6a_4cd4_4955_9cfb_7a4d92f3af7a.slice/crio-3c2ab152023554cc7d0a7a773a20cee01263a341667dacb7110a2af9230ac470 WatchSource:0}: Error finding container 3c2ab152023554cc7d0a7a773a20cee01263a341667dacb7110a2af9230ac470: Status 404 returned error can't find the container with id 3c2ab152023554cc7d0a7a773a20cee01263a341667dacb7110a2af9230ac470 Oct 10 18:11:01 crc kubenswrapper[4799]: I1010 18:11:01.412139 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="16fbf240-7cd8-496d-b0de-0772571849d3" path="/var/lib/kubelet/pods/16fbf240-7cd8-496d-b0de-0772571849d3/volumes" Oct 10 18:11:02 crc kubenswrapper[4799]: I1010 18:11:02.188906 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-image-upload-678599687f-dlbhf" event={"ID":"b3d27d6a-4cd4-4955-9cfb-7a4d92f3af7a","Type":"ContainerStarted","Data":"81ebce50d4fa44963b4c1a5344afd27604e7dc8f5a96587324d15ec9e1243f3d"} Oct 10 18:11:02 crc kubenswrapper[4799]: I1010 18:11:02.189309 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-image-upload-678599687f-dlbhf" event={"ID":"b3d27d6a-4cd4-4955-9cfb-7a4d92f3af7a","Type":"ContainerStarted","Data":"3c2ab152023554cc7d0a7a773a20cee01263a341667dacb7110a2af9230ac470"} Oct 10 18:11:03 crc kubenswrapper[4799]: I1010 18:11:03.203171 4799 generic.go:334] "Generic (PLEG): container finished" podID="b3d27d6a-4cd4-4955-9cfb-7a4d92f3af7a" containerID="81ebce50d4fa44963b4c1a5344afd27604e7dc8f5a96587324d15ec9e1243f3d" exitCode=0 Oct 10 18:11:03 crc kubenswrapper[4799]: I1010 18:11:03.203237 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-image-upload-678599687f-dlbhf" event={"ID":"b3d27d6a-4cd4-4955-9cfb-7a4d92f3af7a","Type":"ContainerDied","Data":"81ebce50d4fa44963b4c1a5344afd27604e7dc8f5a96587324d15ec9e1243f3d"} Oct 10 18:11:04 crc kubenswrapper[4799]: I1010 18:11:04.223150 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-image-upload-678599687f-dlbhf" event={"ID":"b3d27d6a-4cd4-4955-9cfb-7a4d92f3af7a","Type":"ContainerStarted","Data":"6c33f560bc9259c751d53c435a5a8ec81bc0d5445a75ce501bb040e77cff4d34"} Oct 10 18:11:04 crc kubenswrapper[4799]: I1010 18:11:04.260098 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/octavia-image-upload-678599687f-dlbhf" podStartSLOduration=3.659582416 podStartE2EDuration="4.260070382s" podCreationTimestamp="2025-10-10 18:11:00 +0000 UTC" firstStartedPulling="2025-10-10 18:11:01.32116275 +0000 UTC m=+5954.829486865" lastFinishedPulling="2025-10-10 18:11:01.921650676 +0000 UTC m=+5955.429974831" observedRunningTime="2025-10-10 18:11:04.243739224 +0000 UTC m=+5957.752063349" watchObservedRunningTime="2025-10-10 18:11:04.260070382 +0000 UTC m=+5957.768394517" Oct 10 18:11:08 crc kubenswrapper[4799]: I1010 18:11:08.581110 4799 scope.go:117] "RemoveContainer" containerID="c5855479684612e5c3ccbb7f6671a4969a9bf0cff9ccea33b4843ee8c5baef6c" Oct 10 18:11:08 crc kubenswrapper[4799]: I1010 18:11:08.615155 4799 scope.go:117] "RemoveContainer" containerID="965948bd2fdcb674ea5c9f30d8aa5523ac6ca7b995603cfcf1e0370703ae0769" Oct 10 18:11:08 crc kubenswrapper[4799]: I1010 18:11:08.693416 4799 scope.go:117] "RemoveContainer" containerID="a52044ad423ee32529e72e713a84176f092a7d23535e74c5d6c641fe4d5b550d" Oct 10 18:11:08 crc kubenswrapper[4799]: I1010 18:11:08.726739 4799 scope.go:117] "RemoveContainer" containerID="670226d31cdc3ffda0f0631a1acdd07742a6dd714a8d0468414e3727487ec1ce" Oct 10 18:11:08 crc kubenswrapper[4799]: I1010 18:11:08.745845 4799 scope.go:117] "RemoveContainer" containerID="df2a19a9c85cf31c0420e54e60b29aa4ff7853062bd390bbcee8763c5b47d818" Oct 10 18:11:21 crc kubenswrapper[4799]: I1010 18:11:21.784525 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/octavia-healthmanager-rsn7b"] Oct 10 18:11:21 crc kubenswrapper[4799]: I1010 18:11:21.788540 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-healthmanager-rsn7b" Oct 10 18:11:21 crc kubenswrapper[4799]: I1010 18:11:21.794983 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"octavia-certs-secret" Oct 10 18:11:21 crc kubenswrapper[4799]: I1010 18:11:21.794993 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"octavia-healthmanager-scripts" Oct 10 18:11:21 crc kubenswrapper[4799]: I1010 18:11:21.795607 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"octavia-healthmanager-config-data" Oct 10 18:11:21 crc kubenswrapper[4799]: I1010 18:11:21.823742 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-healthmanager-rsn7b"] Oct 10 18:11:21 crc kubenswrapper[4799]: I1010 18:11:21.957025 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/feecd9e6-4f59-495f-85ab-0067ed38a79c-config-data\") pod \"octavia-healthmanager-rsn7b\" (UID: \"feecd9e6-4f59-495f-85ab-0067ed38a79c\") " pod="openstack/octavia-healthmanager-rsn7b" Oct 10 18:11:21 crc kubenswrapper[4799]: I1010 18:11:21.957093 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/feecd9e6-4f59-495f-85ab-0067ed38a79c-combined-ca-bundle\") pod \"octavia-healthmanager-rsn7b\" (UID: \"feecd9e6-4f59-495f-85ab-0067ed38a79c\") " pod="openstack/octavia-healthmanager-rsn7b" Oct 10 18:11:21 crc kubenswrapper[4799]: I1010 18:11:21.957379 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/feecd9e6-4f59-495f-85ab-0067ed38a79c-scripts\") pod \"octavia-healthmanager-rsn7b\" (UID: \"feecd9e6-4f59-495f-85ab-0067ed38a79c\") " pod="openstack/octavia-healthmanager-rsn7b" Oct 10 18:11:21 crc kubenswrapper[4799]: I1010 18:11:21.957440 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"amphora-certs\" (UniqueName: \"kubernetes.io/secret/feecd9e6-4f59-495f-85ab-0067ed38a79c-amphora-certs\") pod \"octavia-healthmanager-rsn7b\" (UID: \"feecd9e6-4f59-495f-85ab-0067ed38a79c\") " pod="openstack/octavia-healthmanager-rsn7b" Oct 10 18:11:21 crc kubenswrapper[4799]: I1010 18:11:21.957472 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hm-ports\" (UniqueName: \"kubernetes.io/configmap/feecd9e6-4f59-495f-85ab-0067ed38a79c-hm-ports\") pod \"octavia-healthmanager-rsn7b\" (UID: \"feecd9e6-4f59-495f-85ab-0067ed38a79c\") " pod="openstack/octavia-healthmanager-rsn7b" Oct 10 18:11:21 crc kubenswrapper[4799]: I1010 18:11:21.957492 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/feecd9e6-4f59-495f-85ab-0067ed38a79c-config-data-merged\") pod \"octavia-healthmanager-rsn7b\" (UID: \"feecd9e6-4f59-495f-85ab-0067ed38a79c\") " pod="openstack/octavia-healthmanager-rsn7b" Oct 10 18:11:22 crc kubenswrapper[4799]: I1010 18:11:22.059149 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/feecd9e6-4f59-495f-85ab-0067ed38a79c-scripts\") pod \"octavia-healthmanager-rsn7b\" (UID: \"feecd9e6-4f59-495f-85ab-0067ed38a79c\") " pod="openstack/octavia-healthmanager-rsn7b" Oct 10 18:11:22 crc kubenswrapper[4799]: I1010 18:11:22.059224 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"amphora-certs\" (UniqueName: \"kubernetes.io/secret/feecd9e6-4f59-495f-85ab-0067ed38a79c-amphora-certs\") pod \"octavia-healthmanager-rsn7b\" (UID: \"feecd9e6-4f59-495f-85ab-0067ed38a79c\") " pod="openstack/octavia-healthmanager-rsn7b" Oct 10 18:11:22 crc kubenswrapper[4799]: I1010 18:11:22.059263 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hm-ports\" (UniqueName: \"kubernetes.io/configmap/feecd9e6-4f59-495f-85ab-0067ed38a79c-hm-ports\") pod \"octavia-healthmanager-rsn7b\" (UID: \"feecd9e6-4f59-495f-85ab-0067ed38a79c\") " pod="openstack/octavia-healthmanager-rsn7b" Oct 10 18:11:22 crc kubenswrapper[4799]: I1010 18:11:22.059294 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/feecd9e6-4f59-495f-85ab-0067ed38a79c-config-data-merged\") pod \"octavia-healthmanager-rsn7b\" (UID: \"feecd9e6-4f59-495f-85ab-0067ed38a79c\") " pod="openstack/octavia-healthmanager-rsn7b" Oct 10 18:11:22 crc kubenswrapper[4799]: I1010 18:11:22.059478 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/feecd9e6-4f59-495f-85ab-0067ed38a79c-config-data\") pod \"octavia-healthmanager-rsn7b\" (UID: \"feecd9e6-4f59-495f-85ab-0067ed38a79c\") " pod="openstack/octavia-healthmanager-rsn7b" Oct 10 18:11:22 crc kubenswrapper[4799]: I1010 18:11:22.059521 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/feecd9e6-4f59-495f-85ab-0067ed38a79c-combined-ca-bundle\") pod \"octavia-healthmanager-rsn7b\" (UID: \"feecd9e6-4f59-495f-85ab-0067ed38a79c\") " pod="openstack/octavia-healthmanager-rsn7b" Oct 10 18:11:22 crc kubenswrapper[4799]: I1010 18:11:22.060432 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hm-ports\" (UniqueName: \"kubernetes.io/configmap/feecd9e6-4f59-495f-85ab-0067ed38a79c-hm-ports\") pod \"octavia-healthmanager-rsn7b\" (UID: \"feecd9e6-4f59-495f-85ab-0067ed38a79c\") " pod="openstack/octavia-healthmanager-rsn7b" Oct 10 18:11:22 crc kubenswrapper[4799]: I1010 18:11:22.060591 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/feecd9e6-4f59-495f-85ab-0067ed38a79c-config-data-merged\") pod \"octavia-healthmanager-rsn7b\" (UID: \"feecd9e6-4f59-495f-85ab-0067ed38a79c\") " pod="openstack/octavia-healthmanager-rsn7b" Oct 10 18:11:22 crc kubenswrapper[4799]: I1010 18:11:22.066875 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/feecd9e6-4f59-495f-85ab-0067ed38a79c-combined-ca-bundle\") pod \"octavia-healthmanager-rsn7b\" (UID: \"feecd9e6-4f59-495f-85ab-0067ed38a79c\") " pod="openstack/octavia-healthmanager-rsn7b" Oct 10 18:11:22 crc kubenswrapper[4799]: I1010 18:11:22.067291 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/feecd9e6-4f59-495f-85ab-0067ed38a79c-scripts\") pod \"octavia-healthmanager-rsn7b\" (UID: \"feecd9e6-4f59-495f-85ab-0067ed38a79c\") " pod="openstack/octavia-healthmanager-rsn7b" Oct 10 18:11:22 crc kubenswrapper[4799]: I1010 18:11:22.068725 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/feecd9e6-4f59-495f-85ab-0067ed38a79c-config-data\") pod \"octavia-healthmanager-rsn7b\" (UID: \"feecd9e6-4f59-495f-85ab-0067ed38a79c\") " pod="openstack/octavia-healthmanager-rsn7b" Oct 10 18:11:22 crc kubenswrapper[4799]: I1010 18:11:22.083739 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"amphora-certs\" (UniqueName: \"kubernetes.io/secret/feecd9e6-4f59-495f-85ab-0067ed38a79c-amphora-certs\") pod \"octavia-healthmanager-rsn7b\" (UID: \"feecd9e6-4f59-495f-85ab-0067ed38a79c\") " pod="openstack/octavia-healthmanager-rsn7b" Oct 10 18:11:22 crc kubenswrapper[4799]: I1010 18:11:22.124024 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-healthmanager-rsn7b" Oct 10 18:11:22 crc kubenswrapper[4799]: I1010 18:11:22.744059 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-healthmanager-rsn7b"] Oct 10 18:11:22 crc kubenswrapper[4799]: W1010 18:11:22.746446 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podfeecd9e6_4f59_495f_85ab_0067ed38a79c.slice/crio-bab19df466433c655c7d540bf2a5c68fa815e1ad6a221ff07db17551dd2c99b7 WatchSource:0}: Error finding container bab19df466433c655c7d540bf2a5c68fa815e1ad6a221ff07db17551dd2c99b7: Status 404 returned error can't find the container with id bab19df466433c655c7d540bf2a5c68fa815e1ad6a221ff07db17551dd2c99b7 Oct 10 18:11:23 crc kubenswrapper[4799]: I1010 18:11:23.036506 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-db-create-rljmt"] Oct 10 18:11:23 crc kubenswrapper[4799]: I1010 18:11:23.060437 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-db-create-rljmt"] Oct 10 18:11:23 crc kubenswrapper[4799]: I1010 18:11:23.425720 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0bb189b2-d065-4e80-921f-f9fd38382e9f" path="/var/lib/kubelet/pods/0bb189b2-d065-4e80-921f-f9fd38382e9f/volumes" Oct 10 18:11:23 crc kubenswrapper[4799]: I1010 18:11:23.486914 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-healthmanager-rsn7b" event={"ID":"feecd9e6-4f59-495f-85ab-0067ed38a79c","Type":"ContainerStarted","Data":"27a9295dceda245233a46855669b48c9423e5b54bd4c06621a7be3c32089cf3c"} Oct 10 18:11:23 crc kubenswrapper[4799]: I1010 18:11:23.486988 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-healthmanager-rsn7b" event={"ID":"feecd9e6-4f59-495f-85ab-0067ed38a79c","Type":"ContainerStarted","Data":"bab19df466433c655c7d540bf2a5c68fa815e1ad6a221ff07db17551dd2c99b7"} Oct 10 18:11:23 crc kubenswrapper[4799]: I1010 18:11:23.779539 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/octavia-housekeeping-8fpkg"] Oct 10 18:11:23 crc kubenswrapper[4799]: I1010 18:11:23.810447 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-housekeeping-8fpkg" Oct 10 18:11:23 crc kubenswrapper[4799]: I1010 18:11:23.814182 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"octavia-housekeeping-scripts" Oct 10 18:11:23 crc kubenswrapper[4799]: I1010 18:11:23.814535 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"octavia-housekeeping-config-data" Oct 10 18:11:23 crc kubenswrapper[4799]: I1010 18:11:23.836088 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-housekeeping-8fpkg"] Oct 10 18:11:23 crc kubenswrapper[4799]: I1010 18:11:23.901849 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7f287658-c2be-401e-89a1-89203fadb380-combined-ca-bundle\") pod \"octavia-housekeeping-8fpkg\" (UID: \"7f287658-c2be-401e-89a1-89203fadb380\") " pod="openstack/octavia-housekeeping-8fpkg" Oct 10 18:11:23 crc kubenswrapper[4799]: I1010 18:11:23.902036 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/7f287658-c2be-401e-89a1-89203fadb380-config-data-merged\") pod \"octavia-housekeeping-8fpkg\" (UID: \"7f287658-c2be-401e-89a1-89203fadb380\") " pod="openstack/octavia-housekeeping-8fpkg" Oct 10 18:11:23 crc kubenswrapper[4799]: I1010 18:11:23.902256 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"amphora-certs\" (UniqueName: \"kubernetes.io/secret/7f287658-c2be-401e-89a1-89203fadb380-amphora-certs\") pod \"octavia-housekeeping-8fpkg\" (UID: \"7f287658-c2be-401e-89a1-89203fadb380\") " pod="openstack/octavia-housekeeping-8fpkg" Oct 10 18:11:23 crc kubenswrapper[4799]: I1010 18:11:23.902326 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7f287658-c2be-401e-89a1-89203fadb380-config-data\") pod \"octavia-housekeeping-8fpkg\" (UID: \"7f287658-c2be-401e-89a1-89203fadb380\") " pod="openstack/octavia-housekeeping-8fpkg" Oct 10 18:11:23 crc kubenswrapper[4799]: I1010 18:11:23.902392 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7f287658-c2be-401e-89a1-89203fadb380-scripts\") pod \"octavia-housekeeping-8fpkg\" (UID: \"7f287658-c2be-401e-89a1-89203fadb380\") " pod="openstack/octavia-housekeeping-8fpkg" Oct 10 18:11:23 crc kubenswrapper[4799]: I1010 18:11:23.902538 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hm-ports\" (UniqueName: \"kubernetes.io/configmap/7f287658-c2be-401e-89a1-89203fadb380-hm-ports\") pod \"octavia-housekeeping-8fpkg\" (UID: \"7f287658-c2be-401e-89a1-89203fadb380\") " pod="openstack/octavia-housekeeping-8fpkg" Oct 10 18:11:24 crc kubenswrapper[4799]: I1010 18:11:24.003868 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hm-ports\" (UniqueName: \"kubernetes.io/configmap/7f287658-c2be-401e-89a1-89203fadb380-hm-ports\") pod \"octavia-housekeeping-8fpkg\" (UID: \"7f287658-c2be-401e-89a1-89203fadb380\") " pod="openstack/octavia-housekeeping-8fpkg" Oct 10 18:11:24 crc kubenswrapper[4799]: I1010 18:11:24.003939 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7f287658-c2be-401e-89a1-89203fadb380-combined-ca-bundle\") pod \"octavia-housekeeping-8fpkg\" (UID: \"7f287658-c2be-401e-89a1-89203fadb380\") " pod="openstack/octavia-housekeeping-8fpkg" Oct 10 18:11:24 crc kubenswrapper[4799]: I1010 18:11:24.003989 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/7f287658-c2be-401e-89a1-89203fadb380-config-data-merged\") pod \"octavia-housekeeping-8fpkg\" (UID: \"7f287658-c2be-401e-89a1-89203fadb380\") " pod="openstack/octavia-housekeeping-8fpkg" Oct 10 18:11:24 crc kubenswrapper[4799]: I1010 18:11:24.004080 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"amphora-certs\" (UniqueName: \"kubernetes.io/secret/7f287658-c2be-401e-89a1-89203fadb380-amphora-certs\") pod \"octavia-housekeeping-8fpkg\" (UID: \"7f287658-c2be-401e-89a1-89203fadb380\") " pod="openstack/octavia-housekeeping-8fpkg" Oct 10 18:11:24 crc kubenswrapper[4799]: I1010 18:11:24.004116 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7f287658-c2be-401e-89a1-89203fadb380-config-data\") pod \"octavia-housekeeping-8fpkg\" (UID: \"7f287658-c2be-401e-89a1-89203fadb380\") " pod="openstack/octavia-housekeeping-8fpkg" Oct 10 18:11:24 crc kubenswrapper[4799]: I1010 18:11:24.004145 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7f287658-c2be-401e-89a1-89203fadb380-scripts\") pod \"octavia-housekeeping-8fpkg\" (UID: \"7f287658-c2be-401e-89a1-89203fadb380\") " pod="openstack/octavia-housekeeping-8fpkg" Oct 10 18:11:24 crc kubenswrapper[4799]: I1010 18:11:24.005644 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/7f287658-c2be-401e-89a1-89203fadb380-config-data-merged\") pod \"octavia-housekeeping-8fpkg\" (UID: \"7f287658-c2be-401e-89a1-89203fadb380\") " pod="openstack/octavia-housekeeping-8fpkg" Oct 10 18:11:24 crc kubenswrapper[4799]: I1010 18:11:24.006591 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hm-ports\" (UniqueName: \"kubernetes.io/configmap/7f287658-c2be-401e-89a1-89203fadb380-hm-ports\") pod \"octavia-housekeeping-8fpkg\" (UID: \"7f287658-c2be-401e-89a1-89203fadb380\") " pod="openstack/octavia-housekeeping-8fpkg" Oct 10 18:11:24 crc kubenswrapper[4799]: I1010 18:11:24.012369 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7f287658-c2be-401e-89a1-89203fadb380-scripts\") pod \"octavia-housekeeping-8fpkg\" (UID: \"7f287658-c2be-401e-89a1-89203fadb380\") " pod="openstack/octavia-housekeeping-8fpkg" Oct 10 18:11:24 crc kubenswrapper[4799]: I1010 18:11:24.014057 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"amphora-certs\" (UniqueName: \"kubernetes.io/secret/7f287658-c2be-401e-89a1-89203fadb380-amphora-certs\") pod \"octavia-housekeeping-8fpkg\" (UID: \"7f287658-c2be-401e-89a1-89203fadb380\") " pod="openstack/octavia-housekeeping-8fpkg" Oct 10 18:11:24 crc kubenswrapper[4799]: I1010 18:11:24.014388 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7f287658-c2be-401e-89a1-89203fadb380-combined-ca-bundle\") pod \"octavia-housekeeping-8fpkg\" (UID: \"7f287658-c2be-401e-89a1-89203fadb380\") " pod="openstack/octavia-housekeeping-8fpkg" Oct 10 18:11:24 crc kubenswrapper[4799]: I1010 18:11:24.015471 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7f287658-c2be-401e-89a1-89203fadb380-config-data\") pod \"octavia-housekeeping-8fpkg\" (UID: \"7f287658-c2be-401e-89a1-89203fadb380\") " pod="openstack/octavia-housekeeping-8fpkg" Oct 10 18:11:24 crc kubenswrapper[4799]: I1010 18:11:24.136494 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-housekeeping-8fpkg" Oct 10 18:11:24 crc kubenswrapper[4799]: I1010 18:11:24.718681 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-housekeeping-8fpkg"] Oct 10 18:11:25 crc kubenswrapper[4799]: I1010 18:11:25.348158 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/octavia-worker-5fwqt"] Oct 10 18:11:25 crc kubenswrapper[4799]: I1010 18:11:25.351520 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-worker-5fwqt" Oct 10 18:11:25 crc kubenswrapper[4799]: I1010 18:11:25.354536 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"octavia-worker-config-data" Oct 10 18:11:25 crc kubenswrapper[4799]: I1010 18:11:25.355093 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"octavia-worker-scripts" Oct 10 18:11:25 crc kubenswrapper[4799]: I1010 18:11:25.363434 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-worker-5fwqt"] Oct 10 18:11:25 crc kubenswrapper[4799]: I1010 18:11:25.512062 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-housekeeping-8fpkg" event={"ID":"7f287658-c2be-401e-89a1-89203fadb380","Type":"ContainerStarted","Data":"ed0699f1b18ed45d506facf4f0e279a1721176b31ec9128c9922a03cbafa0e8c"} Oct 10 18:11:25 crc kubenswrapper[4799]: I1010 18:11:25.515093 4799 generic.go:334] "Generic (PLEG): container finished" podID="feecd9e6-4f59-495f-85ab-0067ed38a79c" containerID="27a9295dceda245233a46855669b48c9423e5b54bd4c06621a7be3c32089cf3c" exitCode=0 Oct 10 18:11:25 crc kubenswrapper[4799]: I1010 18:11:25.515159 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-healthmanager-rsn7b" event={"ID":"feecd9e6-4f59-495f-85ab-0067ed38a79c","Type":"ContainerDied","Data":"27a9295dceda245233a46855669b48c9423e5b54bd4c06621a7be3c32089cf3c"} Oct 10 18:11:25 crc kubenswrapper[4799]: I1010 18:11:25.544416 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0908b7ab-2ec4-4514-a38c-0595d3554396-combined-ca-bundle\") pod \"octavia-worker-5fwqt\" (UID: \"0908b7ab-2ec4-4514-a38c-0595d3554396\") " pod="openstack/octavia-worker-5fwqt" Oct 10 18:11:25 crc kubenswrapper[4799]: I1010 18:11:25.544660 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/0908b7ab-2ec4-4514-a38c-0595d3554396-config-data-merged\") pod \"octavia-worker-5fwqt\" (UID: \"0908b7ab-2ec4-4514-a38c-0595d3554396\") " pod="openstack/octavia-worker-5fwqt" Oct 10 18:11:25 crc kubenswrapper[4799]: I1010 18:11:25.544898 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0908b7ab-2ec4-4514-a38c-0595d3554396-scripts\") pod \"octavia-worker-5fwqt\" (UID: \"0908b7ab-2ec4-4514-a38c-0595d3554396\") " pod="openstack/octavia-worker-5fwqt" Oct 10 18:11:25 crc kubenswrapper[4799]: I1010 18:11:25.545273 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hm-ports\" (UniqueName: \"kubernetes.io/configmap/0908b7ab-2ec4-4514-a38c-0595d3554396-hm-ports\") pod \"octavia-worker-5fwqt\" (UID: \"0908b7ab-2ec4-4514-a38c-0595d3554396\") " pod="openstack/octavia-worker-5fwqt" Oct 10 18:11:25 crc kubenswrapper[4799]: I1010 18:11:25.545749 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0908b7ab-2ec4-4514-a38c-0595d3554396-config-data\") pod \"octavia-worker-5fwqt\" (UID: \"0908b7ab-2ec4-4514-a38c-0595d3554396\") " pod="openstack/octavia-worker-5fwqt" Oct 10 18:11:25 crc kubenswrapper[4799]: I1010 18:11:25.547595 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"amphora-certs\" (UniqueName: \"kubernetes.io/secret/0908b7ab-2ec4-4514-a38c-0595d3554396-amphora-certs\") pod \"octavia-worker-5fwqt\" (UID: \"0908b7ab-2ec4-4514-a38c-0595d3554396\") " pod="openstack/octavia-worker-5fwqt" Oct 10 18:11:25 crc kubenswrapper[4799]: I1010 18:11:25.649627 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"amphora-certs\" (UniqueName: \"kubernetes.io/secret/0908b7ab-2ec4-4514-a38c-0595d3554396-amphora-certs\") pod \"octavia-worker-5fwqt\" (UID: \"0908b7ab-2ec4-4514-a38c-0595d3554396\") " pod="openstack/octavia-worker-5fwqt" Oct 10 18:11:25 crc kubenswrapper[4799]: I1010 18:11:25.649807 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0908b7ab-2ec4-4514-a38c-0595d3554396-combined-ca-bundle\") pod \"octavia-worker-5fwqt\" (UID: \"0908b7ab-2ec4-4514-a38c-0595d3554396\") " pod="openstack/octavia-worker-5fwqt" Oct 10 18:11:25 crc kubenswrapper[4799]: I1010 18:11:25.649874 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/0908b7ab-2ec4-4514-a38c-0595d3554396-config-data-merged\") pod \"octavia-worker-5fwqt\" (UID: \"0908b7ab-2ec4-4514-a38c-0595d3554396\") " pod="openstack/octavia-worker-5fwqt" Oct 10 18:11:25 crc kubenswrapper[4799]: I1010 18:11:25.649976 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0908b7ab-2ec4-4514-a38c-0595d3554396-scripts\") pod \"octavia-worker-5fwqt\" (UID: \"0908b7ab-2ec4-4514-a38c-0595d3554396\") " pod="openstack/octavia-worker-5fwqt" Oct 10 18:11:25 crc kubenswrapper[4799]: I1010 18:11:25.650794 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hm-ports\" (UniqueName: \"kubernetes.io/configmap/0908b7ab-2ec4-4514-a38c-0595d3554396-hm-ports\") pod \"octavia-worker-5fwqt\" (UID: \"0908b7ab-2ec4-4514-a38c-0595d3554396\") " pod="openstack/octavia-worker-5fwqt" Oct 10 18:11:25 crc kubenswrapper[4799]: I1010 18:11:25.650904 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0908b7ab-2ec4-4514-a38c-0595d3554396-config-data\") pod \"octavia-worker-5fwqt\" (UID: \"0908b7ab-2ec4-4514-a38c-0595d3554396\") " pod="openstack/octavia-worker-5fwqt" Oct 10 18:11:25 crc kubenswrapper[4799]: I1010 18:11:25.652231 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/0908b7ab-2ec4-4514-a38c-0595d3554396-config-data-merged\") pod \"octavia-worker-5fwqt\" (UID: \"0908b7ab-2ec4-4514-a38c-0595d3554396\") " pod="openstack/octavia-worker-5fwqt" Oct 10 18:11:25 crc kubenswrapper[4799]: I1010 18:11:25.653671 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hm-ports\" (UniqueName: \"kubernetes.io/configmap/0908b7ab-2ec4-4514-a38c-0595d3554396-hm-ports\") pod \"octavia-worker-5fwqt\" (UID: \"0908b7ab-2ec4-4514-a38c-0595d3554396\") " pod="openstack/octavia-worker-5fwqt" Oct 10 18:11:25 crc kubenswrapper[4799]: I1010 18:11:25.655126 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"amphora-certs\" (UniqueName: \"kubernetes.io/secret/0908b7ab-2ec4-4514-a38c-0595d3554396-amphora-certs\") pod \"octavia-worker-5fwqt\" (UID: \"0908b7ab-2ec4-4514-a38c-0595d3554396\") " pod="openstack/octavia-worker-5fwqt" Oct 10 18:11:25 crc kubenswrapper[4799]: I1010 18:11:25.655777 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0908b7ab-2ec4-4514-a38c-0595d3554396-combined-ca-bundle\") pod \"octavia-worker-5fwqt\" (UID: \"0908b7ab-2ec4-4514-a38c-0595d3554396\") " pod="openstack/octavia-worker-5fwqt" Oct 10 18:11:25 crc kubenswrapper[4799]: I1010 18:11:25.656059 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0908b7ab-2ec4-4514-a38c-0595d3554396-config-data\") pod \"octavia-worker-5fwqt\" (UID: \"0908b7ab-2ec4-4514-a38c-0595d3554396\") " pod="openstack/octavia-worker-5fwqt" Oct 10 18:11:25 crc kubenswrapper[4799]: I1010 18:11:25.656687 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0908b7ab-2ec4-4514-a38c-0595d3554396-scripts\") pod \"octavia-worker-5fwqt\" (UID: \"0908b7ab-2ec4-4514-a38c-0595d3554396\") " pod="openstack/octavia-worker-5fwqt" Oct 10 18:11:25 crc kubenswrapper[4799]: I1010 18:11:25.675259 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-worker-5fwqt" Oct 10 18:11:26 crc kubenswrapper[4799]: I1010 18:11:26.229547 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-worker-5fwqt"] Oct 10 18:11:26 crc kubenswrapper[4799]: W1010 18:11:26.410440 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0908b7ab_2ec4_4514_a38c_0595d3554396.slice/crio-5cfe6b0d34a80d4ba45cb7acfc39785fc1b08c763c194d85d1f845893125d611 WatchSource:0}: Error finding container 5cfe6b0d34a80d4ba45cb7acfc39785fc1b08c763c194d85d1f845893125d611: Status 404 returned error can't find the container with id 5cfe6b0d34a80d4ba45cb7acfc39785fc1b08c763c194d85d1f845893125d611 Oct 10 18:11:26 crc kubenswrapper[4799]: I1010 18:11:26.546589 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-healthmanager-rsn7b" event={"ID":"feecd9e6-4f59-495f-85ab-0067ed38a79c","Type":"ContainerStarted","Data":"beece37e49d94e7ca265cd0982a245e8163b0d0f8892092a45d33d8292540282"} Oct 10 18:11:26 crc kubenswrapper[4799]: I1010 18:11:26.548165 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/octavia-healthmanager-rsn7b" Oct 10 18:11:26 crc kubenswrapper[4799]: I1010 18:11:26.558134 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-worker-5fwqt" event={"ID":"0908b7ab-2ec4-4514-a38c-0595d3554396","Type":"ContainerStarted","Data":"5cfe6b0d34a80d4ba45cb7acfc39785fc1b08c763c194d85d1f845893125d611"} Oct 10 18:11:26 crc kubenswrapper[4799]: I1010 18:11:26.593074 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/octavia-healthmanager-rsn7b" podStartSLOduration=5.593048298 podStartE2EDuration="5.593048298s" podCreationTimestamp="2025-10-10 18:11:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-10 18:11:26.585225108 +0000 UTC m=+5980.093549233" watchObservedRunningTime="2025-10-10 18:11:26.593048298 +0000 UTC m=+5980.101372423" Oct 10 18:11:27 crc kubenswrapper[4799]: I1010 18:11:27.568891 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-housekeeping-8fpkg" event={"ID":"7f287658-c2be-401e-89a1-89203fadb380","Type":"ContainerStarted","Data":"10297f7b5436c863a6297fefe31f54f6356d2bead91f7352f1101b0723ff7b0f"} Oct 10 18:11:28 crc kubenswrapper[4799]: I1010 18:11:28.588166 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-worker-5fwqt" event={"ID":"0908b7ab-2ec4-4514-a38c-0595d3554396","Type":"ContainerStarted","Data":"9aebacc5948d8524ca5ec30263133350878418a552c5fd9ac3500c276f7c96e5"} Oct 10 18:11:28 crc kubenswrapper[4799]: I1010 18:11:28.591660 4799 generic.go:334] "Generic (PLEG): container finished" podID="7f287658-c2be-401e-89a1-89203fadb380" containerID="10297f7b5436c863a6297fefe31f54f6356d2bead91f7352f1101b0723ff7b0f" exitCode=0 Oct 10 18:11:28 crc kubenswrapper[4799]: I1010 18:11:28.592031 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-housekeeping-8fpkg" event={"ID":"7f287658-c2be-401e-89a1-89203fadb380","Type":"ContainerDied","Data":"10297f7b5436c863a6297fefe31f54f6356d2bead91f7352f1101b0723ff7b0f"} Oct 10 18:11:29 crc kubenswrapper[4799]: I1010 18:11:29.600844 4799 generic.go:334] "Generic (PLEG): container finished" podID="0908b7ab-2ec4-4514-a38c-0595d3554396" containerID="9aebacc5948d8524ca5ec30263133350878418a552c5fd9ac3500c276f7c96e5" exitCode=0 Oct 10 18:11:29 crc kubenswrapper[4799]: I1010 18:11:29.600945 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-worker-5fwqt" event={"ID":"0908b7ab-2ec4-4514-a38c-0595d3554396","Type":"ContainerDied","Data":"9aebacc5948d8524ca5ec30263133350878418a552c5fd9ac3500c276f7c96e5"} Oct 10 18:11:29 crc kubenswrapper[4799]: I1010 18:11:29.603740 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-housekeeping-8fpkg" event={"ID":"7f287658-c2be-401e-89a1-89203fadb380","Type":"ContainerStarted","Data":"b230678b12813b4982019214ec73f75da6f995b20a71a1df06e270d0a5033383"} Oct 10 18:11:29 crc kubenswrapper[4799]: I1010 18:11:29.604862 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/octavia-housekeeping-8fpkg" Oct 10 18:11:29 crc kubenswrapper[4799]: I1010 18:11:29.664620 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/octavia-housekeeping-8fpkg" podStartSLOduration=4.928513619 podStartE2EDuration="6.664598563s" podCreationTimestamp="2025-10-10 18:11:23 +0000 UTC" firstStartedPulling="2025-10-10 18:11:24.722936167 +0000 UTC m=+5978.231260302" lastFinishedPulling="2025-10-10 18:11:26.459021121 +0000 UTC m=+5979.967345246" observedRunningTime="2025-10-10 18:11:29.658587027 +0000 UTC m=+5983.166911152" watchObservedRunningTime="2025-10-10 18:11:29.664598563 +0000 UTC m=+5983.172922678" Oct 10 18:11:30 crc kubenswrapper[4799]: I1010 18:11:30.617558 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-worker-5fwqt" event={"ID":"0908b7ab-2ec4-4514-a38c-0595d3554396","Type":"ContainerStarted","Data":"13b621c4b31f059cc9c4ad7b69c91aaa907e226c7cf470e1b415a1973bdf98e0"} Oct 10 18:11:30 crc kubenswrapper[4799]: I1010 18:11:30.654116 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/octavia-worker-5fwqt" podStartSLOduration=4.194866504 podStartE2EDuration="5.654085321s" podCreationTimestamp="2025-10-10 18:11:25 +0000 UTC" firstStartedPulling="2025-10-10 18:11:26.420952584 +0000 UTC m=+5979.929276699" lastFinishedPulling="2025-10-10 18:11:27.880171401 +0000 UTC m=+5981.388495516" observedRunningTime="2025-10-10 18:11:30.650463733 +0000 UTC m=+5984.158787858" watchObservedRunningTime="2025-10-10 18:11:30.654085321 +0000 UTC m=+5984.162409466" Oct 10 18:11:31 crc kubenswrapper[4799]: I1010 18:11:31.631619 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/octavia-worker-5fwqt" Oct 10 18:11:33 crc kubenswrapper[4799]: I1010 18:11:33.045086 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-5ea6-account-create-6fwkh"] Oct 10 18:11:33 crc kubenswrapper[4799]: I1010 18:11:33.060873 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-5ea6-account-create-6fwkh"] Oct 10 18:11:33 crc kubenswrapper[4799]: I1010 18:11:33.416672 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b0bb367f-695f-493a-8cc2-04c336682c1f" path="/var/lib/kubelet/pods/b0bb367f-695f-493a-8cc2-04c336682c1f/volumes" Oct 10 18:11:37 crc kubenswrapper[4799]: I1010 18:11:37.178110 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/octavia-healthmanager-rsn7b" Oct 10 18:11:39 crc kubenswrapper[4799]: I1010 18:11:39.175709 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/octavia-housekeeping-8fpkg" Oct 10 18:11:40 crc kubenswrapper[4799]: I1010 18:11:40.712589 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/octavia-worker-5fwqt" Oct 10 18:11:42 crc kubenswrapper[4799]: I1010 18:11:42.056860 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-db-sync-hq6ns"] Oct 10 18:11:42 crc kubenswrapper[4799]: I1010 18:11:42.068647 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-db-sync-hq6ns"] Oct 10 18:11:43 crc kubenswrapper[4799]: I1010 18:11:43.421353 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a61c870f-1ead-4ccb-b226-7326e146af42" path="/var/lib/kubelet/pods/a61c870f-1ead-4ccb-b226-7326e146af42/volumes" Oct 10 18:12:08 crc kubenswrapper[4799]: I1010 18:12:08.940278 4799 scope.go:117] "RemoveContainer" containerID="9df01443b9762abb687a9d8999266dc191394bd7b698a1a0506582d6606d989f" Oct 10 18:12:08 crc kubenswrapper[4799]: I1010 18:12:08.980164 4799 scope.go:117] "RemoveContainer" containerID="6f3c40b2e02fe491d7a8cb2cd3cd4e974eb3094ffd182dc796407938e39fc38c" Oct 10 18:12:09 crc kubenswrapper[4799]: I1010 18:12:09.076160 4799 scope.go:117] "RemoveContainer" containerID="36d3ee12f53c9548c92cbe67328cdde1224408eb4d01d226e182cb955ade830e" Oct 10 18:12:09 crc kubenswrapper[4799]: I1010 18:12:09.115948 4799 scope.go:117] "RemoveContainer" containerID="445fba41263834141280af4408a74bb8f56e3cae6baa7003c2d82f0939b59ca0" Oct 10 18:12:25 crc kubenswrapper[4799]: I1010 18:12:25.045304 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-db-create-w9z7v"] Oct 10 18:12:25 crc kubenswrapper[4799]: I1010 18:12:25.055131 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-db-create-w9z7v"] Oct 10 18:12:25 crc kubenswrapper[4799]: I1010 18:12:25.421568 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4b682ece-9af6-4f90-ab20-0251369b6791" path="/var/lib/kubelet/pods/4b682ece-9af6-4f90-ab20-0251369b6791/volumes" Oct 10 18:12:28 crc kubenswrapper[4799]: E1010 18:12:28.199256 4799 upgradeaware.go:427] Error proxying data from client to backend: readfrom tcp 38.102.83.145:50524->38.102.83.145:34753: write tcp 38.102.83.145:50524->38.102.83.145:34753: write: connection reset by peer Oct 10 18:12:32 crc kubenswrapper[4799]: I1010 18:12:32.796726 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-5c878c4b95-t5jdl"] Oct 10 18:12:32 crc kubenswrapper[4799]: I1010 18:12:32.799359 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-5c878c4b95-t5jdl" Oct 10 18:12:32 crc kubenswrapper[4799]: I1010 18:12:32.815920 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"horizon-scripts" Oct 10 18:12:32 crc kubenswrapper[4799]: I1010 18:12:32.815934 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"horizon" Oct 10 18:12:32 crc kubenswrapper[4799]: I1010 18:12:32.815999 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"horizon-horizon-dockercfg-j2z7d" Oct 10 18:12:32 crc kubenswrapper[4799]: I1010 18:12:32.815920 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"horizon-config-data" Oct 10 18:12:32 crc kubenswrapper[4799]: I1010 18:12:32.827019 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ljl8n\" (UniqueName: \"kubernetes.io/projected/75ca2da4-fcc5-4b58-9e57-5555df0b3ab7-kube-api-access-ljl8n\") pod \"horizon-5c878c4b95-t5jdl\" (UID: \"75ca2da4-fcc5-4b58-9e57-5555df0b3ab7\") " pod="openstack/horizon-5c878c4b95-t5jdl" Oct 10 18:12:32 crc kubenswrapper[4799]: I1010 18:12:32.827097 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/75ca2da4-fcc5-4b58-9e57-5555df0b3ab7-horizon-secret-key\") pod \"horizon-5c878c4b95-t5jdl\" (UID: \"75ca2da4-fcc5-4b58-9e57-5555df0b3ab7\") " pod="openstack/horizon-5c878c4b95-t5jdl" Oct 10 18:12:32 crc kubenswrapper[4799]: I1010 18:12:32.827134 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/75ca2da4-fcc5-4b58-9e57-5555df0b3ab7-logs\") pod \"horizon-5c878c4b95-t5jdl\" (UID: \"75ca2da4-fcc5-4b58-9e57-5555df0b3ab7\") " pod="openstack/horizon-5c878c4b95-t5jdl" Oct 10 18:12:32 crc kubenswrapper[4799]: I1010 18:12:32.827180 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/75ca2da4-fcc5-4b58-9e57-5555df0b3ab7-scripts\") pod \"horizon-5c878c4b95-t5jdl\" (UID: \"75ca2da4-fcc5-4b58-9e57-5555df0b3ab7\") " pod="openstack/horizon-5c878c4b95-t5jdl" Oct 10 18:12:32 crc kubenswrapper[4799]: I1010 18:12:32.827251 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/75ca2da4-fcc5-4b58-9e57-5555df0b3ab7-config-data\") pod \"horizon-5c878c4b95-t5jdl\" (UID: \"75ca2da4-fcc5-4b58-9e57-5555df0b3ab7\") " pod="openstack/horizon-5c878c4b95-t5jdl" Oct 10 18:12:32 crc kubenswrapper[4799]: I1010 18:12:32.828484 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-5c878c4b95-t5jdl"] Oct 10 18:12:32 crc kubenswrapper[4799]: I1010 18:12:32.852403 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 10 18:12:32 crc kubenswrapper[4799]: I1010 18:12:32.852679 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="4d6ca553-e264-4abd-a853-bf86bf3b22bd" containerName="glance-log" containerID="cri-o://6ef17f87538e7d672932f8e4b33ace3b8c80f4997198bcb997f948e0f6a49ef3" gracePeriod=30 Oct 10 18:12:32 crc kubenswrapper[4799]: I1010 18:12:32.853175 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="4d6ca553-e264-4abd-a853-bf86bf3b22bd" containerName="glance-httpd" containerID="cri-o://ecd2dd18bc296c70097d8ef6fc6df5fb035af072ab286ceadfd05c8b44dbf0ff" gracePeriod=30 Oct 10 18:12:32 crc kubenswrapper[4799]: I1010 18:12:32.884094 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-6bf9dd6ff7-gfnlb"] Oct 10 18:12:32 crc kubenswrapper[4799]: I1010 18:12:32.898794 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-6bf9dd6ff7-gfnlb" Oct 10 18:12:32 crc kubenswrapper[4799]: I1010 18:12:32.912609 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-6bf9dd6ff7-gfnlb"] Oct 10 18:12:32 crc kubenswrapper[4799]: I1010 18:12:32.928774 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/bf3c5fcf-3145-40c0-b93c-ba5eac936b43-config-data\") pod \"horizon-6bf9dd6ff7-gfnlb\" (UID: \"bf3c5fcf-3145-40c0-b93c-ba5eac936b43\") " pod="openstack/horizon-6bf9dd6ff7-gfnlb" Oct 10 18:12:32 crc kubenswrapper[4799]: I1010 18:12:32.928831 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/75ca2da4-fcc5-4b58-9e57-5555df0b3ab7-config-data\") pod \"horizon-5c878c4b95-t5jdl\" (UID: \"75ca2da4-fcc5-4b58-9e57-5555df0b3ab7\") " pod="openstack/horizon-5c878c4b95-t5jdl" Oct 10 18:12:32 crc kubenswrapper[4799]: I1010 18:12:32.928869 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ljl8n\" (UniqueName: \"kubernetes.io/projected/75ca2da4-fcc5-4b58-9e57-5555df0b3ab7-kube-api-access-ljl8n\") pod \"horizon-5c878c4b95-t5jdl\" (UID: \"75ca2da4-fcc5-4b58-9e57-5555df0b3ab7\") " pod="openstack/horizon-5c878c4b95-t5jdl" Oct 10 18:12:32 crc kubenswrapper[4799]: I1010 18:12:32.928888 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/bf3c5fcf-3145-40c0-b93c-ba5eac936b43-horizon-secret-key\") pod \"horizon-6bf9dd6ff7-gfnlb\" (UID: \"bf3c5fcf-3145-40c0-b93c-ba5eac936b43\") " pod="openstack/horizon-6bf9dd6ff7-gfnlb" Oct 10 18:12:32 crc kubenswrapper[4799]: I1010 18:12:32.928919 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/bf3c5fcf-3145-40c0-b93c-ba5eac936b43-logs\") pod \"horizon-6bf9dd6ff7-gfnlb\" (UID: \"bf3c5fcf-3145-40c0-b93c-ba5eac936b43\") " pod="openstack/horizon-6bf9dd6ff7-gfnlb" Oct 10 18:12:32 crc kubenswrapper[4799]: I1010 18:12:32.928952 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/75ca2da4-fcc5-4b58-9e57-5555df0b3ab7-horizon-secret-key\") pod \"horizon-5c878c4b95-t5jdl\" (UID: \"75ca2da4-fcc5-4b58-9e57-5555df0b3ab7\") " pod="openstack/horizon-5c878c4b95-t5jdl" Oct 10 18:12:32 crc kubenswrapper[4799]: I1010 18:12:32.928972 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/bf3c5fcf-3145-40c0-b93c-ba5eac936b43-scripts\") pod \"horizon-6bf9dd6ff7-gfnlb\" (UID: \"bf3c5fcf-3145-40c0-b93c-ba5eac936b43\") " pod="openstack/horizon-6bf9dd6ff7-gfnlb" Oct 10 18:12:32 crc kubenswrapper[4799]: I1010 18:12:32.929007 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/75ca2da4-fcc5-4b58-9e57-5555df0b3ab7-logs\") pod \"horizon-5c878c4b95-t5jdl\" (UID: \"75ca2da4-fcc5-4b58-9e57-5555df0b3ab7\") " pod="openstack/horizon-5c878c4b95-t5jdl" Oct 10 18:12:32 crc kubenswrapper[4799]: I1010 18:12:32.929034 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r9sd6\" (UniqueName: \"kubernetes.io/projected/bf3c5fcf-3145-40c0-b93c-ba5eac936b43-kube-api-access-r9sd6\") pod \"horizon-6bf9dd6ff7-gfnlb\" (UID: \"bf3c5fcf-3145-40c0-b93c-ba5eac936b43\") " pod="openstack/horizon-6bf9dd6ff7-gfnlb" Oct 10 18:12:32 crc kubenswrapper[4799]: I1010 18:12:32.929069 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/75ca2da4-fcc5-4b58-9e57-5555df0b3ab7-scripts\") pod \"horizon-5c878c4b95-t5jdl\" (UID: \"75ca2da4-fcc5-4b58-9e57-5555df0b3ab7\") " pod="openstack/horizon-5c878c4b95-t5jdl" Oct 10 18:12:32 crc kubenswrapper[4799]: I1010 18:12:32.929787 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 10 18:12:32 crc kubenswrapper[4799]: I1010 18:12:32.929852 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/75ca2da4-fcc5-4b58-9e57-5555df0b3ab7-scripts\") pod \"horizon-5c878c4b95-t5jdl\" (UID: \"75ca2da4-fcc5-4b58-9e57-5555df0b3ab7\") " pod="openstack/horizon-5c878c4b95-t5jdl" Oct 10 18:12:32 crc kubenswrapper[4799]: I1010 18:12:32.930013 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="7619fca7-982b-4e59-aa11-127f345ffbc0" containerName="glance-log" containerID="cri-o://0f2fa7258bd37688952d053abceccaf84fc26464448073ec999397d92f6bb5ac" gracePeriod=30 Oct 10 18:12:32 crc kubenswrapper[4799]: I1010 18:12:32.930441 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="7619fca7-982b-4e59-aa11-127f345ffbc0" containerName="glance-httpd" containerID="cri-o://f16021709cb91c7869706e26dfc2ee021e3931ad08061ea2ce72ea1a23302c37" gracePeriod=30 Oct 10 18:12:32 crc kubenswrapper[4799]: I1010 18:12:32.930654 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/75ca2da4-fcc5-4b58-9e57-5555df0b3ab7-config-data\") pod \"horizon-5c878c4b95-t5jdl\" (UID: \"75ca2da4-fcc5-4b58-9e57-5555df0b3ab7\") " pod="openstack/horizon-5c878c4b95-t5jdl" Oct 10 18:12:32 crc kubenswrapper[4799]: I1010 18:12:32.930911 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/75ca2da4-fcc5-4b58-9e57-5555df0b3ab7-logs\") pod \"horizon-5c878c4b95-t5jdl\" (UID: \"75ca2da4-fcc5-4b58-9e57-5555df0b3ab7\") " pod="openstack/horizon-5c878c4b95-t5jdl" Oct 10 18:12:32 crc kubenswrapper[4799]: I1010 18:12:32.944333 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/75ca2da4-fcc5-4b58-9e57-5555df0b3ab7-horizon-secret-key\") pod \"horizon-5c878c4b95-t5jdl\" (UID: \"75ca2da4-fcc5-4b58-9e57-5555df0b3ab7\") " pod="openstack/horizon-5c878c4b95-t5jdl" Oct 10 18:12:32 crc kubenswrapper[4799]: I1010 18:12:32.948057 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ljl8n\" (UniqueName: \"kubernetes.io/projected/75ca2da4-fcc5-4b58-9e57-5555df0b3ab7-kube-api-access-ljl8n\") pod \"horizon-5c878c4b95-t5jdl\" (UID: \"75ca2da4-fcc5-4b58-9e57-5555df0b3ab7\") " pod="openstack/horizon-5c878c4b95-t5jdl" Oct 10 18:12:33 crc kubenswrapper[4799]: I1010 18:12:33.030967 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/bf3c5fcf-3145-40c0-b93c-ba5eac936b43-config-data\") pod \"horizon-6bf9dd6ff7-gfnlb\" (UID: \"bf3c5fcf-3145-40c0-b93c-ba5eac936b43\") " pod="openstack/horizon-6bf9dd6ff7-gfnlb" Oct 10 18:12:33 crc kubenswrapper[4799]: I1010 18:12:33.031053 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/bf3c5fcf-3145-40c0-b93c-ba5eac936b43-horizon-secret-key\") pod \"horizon-6bf9dd6ff7-gfnlb\" (UID: \"bf3c5fcf-3145-40c0-b93c-ba5eac936b43\") " pod="openstack/horizon-6bf9dd6ff7-gfnlb" Oct 10 18:12:33 crc kubenswrapper[4799]: I1010 18:12:33.031088 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/bf3c5fcf-3145-40c0-b93c-ba5eac936b43-logs\") pod \"horizon-6bf9dd6ff7-gfnlb\" (UID: \"bf3c5fcf-3145-40c0-b93c-ba5eac936b43\") " pod="openstack/horizon-6bf9dd6ff7-gfnlb" Oct 10 18:12:33 crc kubenswrapper[4799]: I1010 18:12:33.031122 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/bf3c5fcf-3145-40c0-b93c-ba5eac936b43-scripts\") pod \"horizon-6bf9dd6ff7-gfnlb\" (UID: \"bf3c5fcf-3145-40c0-b93c-ba5eac936b43\") " pod="openstack/horizon-6bf9dd6ff7-gfnlb" Oct 10 18:12:33 crc kubenswrapper[4799]: I1010 18:12:33.031160 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r9sd6\" (UniqueName: \"kubernetes.io/projected/bf3c5fcf-3145-40c0-b93c-ba5eac936b43-kube-api-access-r9sd6\") pod \"horizon-6bf9dd6ff7-gfnlb\" (UID: \"bf3c5fcf-3145-40c0-b93c-ba5eac936b43\") " pod="openstack/horizon-6bf9dd6ff7-gfnlb" Oct 10 18:12:33 crc kubenswrapper[4799]: I1010 18:12:33.031897 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/bf3c5fcf-3145-40c0-b93c-ba5eac936b43-logs\") pod \"horizon-6bf9dd6ff7-gfnlb\" (UID: \"bf3c5fcf-3145-40c0-b93c-ba5eac936b43\") " pod="openstack/horizon-6bf9dd6ff7-gfnlb" Oct 10 18:12:33 crc kubenswrapper[4799]: I1010 18:12:33.032322 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/bf3c5fcf-3145-40c0-b93c-ba5eac936b43-scripts\") pod \"horizon-6bf9dd6ff7-gfnlb\" (UID: \"bf3c5fcf-3145-40c0-b93c-ba5eac936b43\") " pod="openstack/horizon-6bf9dd6ff7-gfnlb" Oct 10 18:12:33 crc kubenswrapper[4799]: I1010 18:12:33.032838 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/bf3c5fcf-3145-40c0-b93c-ba5eac936b43-config-data\") pod \"horizon-6bf9dd6ff7-gfnlb\" (UID: \"bf3c5fcf-3145-40c0-b93c-ba5eac936b43\") " pod="openstack/horizon-6bf9dd6ff7-gfnlb" Oct 10 18:12:33 crc kubenswrapper[4799]: I1010 18:12:33.036116 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/bf3c5fcf-3145-40c0-b93c-ba5eac936b43-horizon-secret-key\") pod \"horizon-6bf9dd6ff7-gfnlb\" (UID: \"bf3c5fcf-3145-40c0-b93c-ba5eac936b43\") " pod="openstack/horizon-6bf9dd6ff7-gfnlb" Oct 10 18:12:33 crc kubenswrapper[4799]: I1010 18:12:33.050774 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r9sd6\" (UniqueName: \"kubernetes.io/projected/bf3c5fcf-3145-40c0-b93c-ba5eac936b43-kube-api-access-r9sd6\") pod \"horizon-6bf9dd6ff7-gfnlb\" (UID: \"bf3c5fcf-3145-40c0-b93c-ba5eac936b43\") " pod="openstack/horizon-6bf9dd6ff7-gfnlb" Oct 10 18:12:33 crc kubenswrapper[4799]: I1010 18:12:33.120658 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-5c878c4b95-t5jdl" Oct 10 18:12:33 crc kubenswrapper[4799]: I1010 18:12:33.219807 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-6bf9dd6ff7-gfnlb" Oct 10 18:12:33 crc kubenswrapper[4799]: I1010 18:12:33.451375 4799 generic.go:334] "Generic (PLEG): container finished" podID="7619fca7-982b-4e59-aa11-127f345ffbc0" containerID="0f2fa7258bd37688952d053abceccaf84fc26464448073ec999397d92f6bb5ac" exitCode=143 Oct 10 18:12:33 crc kubenswrapper[4799]: I1010 18:12:33.455345 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-5c878c4b95-t5jdl"] Oct 10 18:12:33 crc kubenswrapper[4799]: I1010 18:12:33.455380 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"7619fca7-982b-4e59-aa11-127f345ffbc0","Type":"ContainerDied","Data":"0f2fa7258bd37688952d053abceccaf84fc26464448073ec999397d92f6bb5ac"} Oct 10 18:12:33 crc kubenswrapper[4799]: I1010 18:12:33.457077 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-7ccb584f4f-z4j4m"] Oct 10 18:12:33 crc kubenswrapper[4799]: I1010 18:12:33.459238 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-7ccb584f4f-z4j4m" Oct 10 18:12:33 crc kubenswrapper[4799]: I1010 18:12:33.460201 4799 generic.go:334] "Generic (PLEG): container finished" podID="4d6ca553-e264-4abd-a853-bf86bf3b22bd" containerID="6ef17f87538e7d672932f8e4b33ace3b8c80f4997198bcb997f948e0f6a49ef3" exitCode=143 Oct 10 18:12:33 crc kubenswrapper[4799]: I1010 18:12:33.460227 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"4d6ca553-e264-4abd-a853-bf86bf3b22bd","Type":"ContainerDied","Data":"6ef17f87538e7d672932f8e4b33ace3b8c80f4997198bcb997f948e0f6a49ef3"} Oct 10 18:12:33 crc kubenswrapper[4799]: I1010 18:12:33.480565 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-7ccb584f4f-z4j4m"] Oct 10 18:12:33 crc kubenswrapper[4799]: I1010 18:12:33.536897 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-5c878c4b95-t5jdl"] Oct 10 18:12:33 crc kubenswrapper[4799]: I1010 18:12:33.541185 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5eefa605-65ba-438e-9d40-58c6225ff2ff-logs\") pod \"horizon-7ccb584f4f-z4j4m\" (UID: \"5eefa605-65ba-438e-9d40-58c6225ff2ff\") " pod="openstack/horizon-7ccb584f4f-z4j4m" Oct 10 18:12:33 crc kubenswrapper[4799]: I1010 18:12:33.541253 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/5eefa605-65ba-438e-9d40-58c6225ff2ff-config-data\") pod \"horizon-7ccb584f4f-z4j4m\" (UID: \"5eefa605-65ba-438e-9d40-58c6225ff2ff\") " pod="openstack/horizon-7ccb584f4f-z4j4m" Oct 10 18:12:33 crc kubenswrapper[4799]: I1010 18:12:33.541284 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/5eefa605-65ba-438e-9d40-58c6225ff2ff-scripts\") pod \"horizon-7ccb584f4f-z4j4m\" (UID: \"5eefa605-65ba-438e-9d40-58c6225ff2ff\") " pod="openstack/horizon-7ccb584f4f-z4j4m" Oct 10 18:12:33 crc kubenswrapper[4799]: I1010 18:12:33.541351 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tjjn9\" (UniqueName: \"kubernetes.io/projected/5eefa605-65ba-438e-9d40-58c6225ff2ff-kube-api-access-tjjn9\") pod \"horizon-7ccb584f4f-z4j4m\" (UID: \"5eefa605-65ba-438e-9d40-58c6225ff2ff\") " pod="openstack/horizon-7ccb584f4f-z4j4m" Oct 10 18:12:33 crc kubenswrapper[4799]: I1010 18:12:33.541378 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/5eefa605-65ba-438e-9d40-58c6225ff2ff-horizon-secret-key\") pod \"horizon-7ccb584f4f-z4j4m\" (UID: \"5eefa605-65ba-438e-9d40-58c6225ff2ff\") " pod="openstack/horizon-7ccb584f4f-z4j4m" Oct 10 18:12:33 crc kubenswrapper[4799]: I1010 18:12:33.643912 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5eefa605-65ba-438e-9d40-58c6225ff2ff-logs\") pod \"horizon-7ccb584f4f-z4j4m\" (UID: \"5eefa605-65ba-438e-9d40-58c6225ff2ff\") " pod="openstack/horizon-7ccb584f4f-z4j4m" Oct 10 18:12:33 crc kubenswrapper[4799]: I1010 18:12:33.644011 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/5eefa605-65ba-438e-9d40-58c6225ff2ff-config-data\") pod \"horizon-7ccb584f4f-z4j4m\" (UID: \"5eefa605-65ba-438e-9d40-58c6225ff2ff\") " pod="openstack/horizon-7ccb584f4f-z4j4m" Oct 10 18:12:33 crc kubenswrapper[4799]: I1010 18:12:33.644051 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/5eefa605-65ba-438e-9d40-58c6225ff2ff-scripts\") pod \"horizon-7ccb584f4f-z4j4m\" (UID: \"5eefa605-65ba-438e-9d40-58c6225ff2ff\") " pod="openstack/horizon-7ccb584f4f-z4j4m" Oct 10 18:12:33 crc kubenswrapper[4799]: I1010 18:12:33.644121 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tjjn9\" (UniqueName: \"kubernetes.io/projected/5eefa605-65ba-438e-9d40-58c6225ff2ff-kube-api-access-tjjn9\") pod \"horizon-7ccb584f4f-z4j4m\" (UID: \"5eefa605-65ba-438e-9d40-58c6225ff2ff\") " pod="openstack/horizon-7ccb584f4f-z4j4m" Oct 10 18:12:33 crc kubenswrapper[4799]: I1010 18:12:33.644147 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/5eefa605-65ba-438e-9d40-58c6225ff2ff-horizon-secret-key\") pod \"horizon-7ccb584f4f-z4j4m\" (UID: \"5eefa605-65ba-438e-9d40-58c6225ff2ff\") " pod="openstack/horizon-7ccb584f4f-z4j4m" Oct 10 18:12:33 crc kubenswrapper[4799]: I1010 18:12:33.644456 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5eefa605-65ba-438e-9d40-58c6225ff2ff-logs\") pod \"horizon-7ccb584f4f-z4j4m\" (UID: \"5eefa605-65ba-438e-9d40-58c6225ff2ff\") " pod="openstack/horizon-7ccb584f4f-z4j4m" Oct 10 18:12:33 crc kubenswrapper[4799]: I1010 18:12:33.645035 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/5eefa605-65ba-438e-9d40-58c6225ff2ff-scripts\") pod \"horizon-7ccb584f4f-z4j4m\" (UID: \"5eefa605-65ba-438e-9d40-58c6225ff2ff\") " pod="openstack/horizon-7ccb584f4f-z4j4m" Oct 10 18:12:33 crc kubenswrapper[4799]: I1010 18:12:33.646411 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/5eefa605-65ba-438e-9d40-58c6225ff2ff-config-data\") pod \"horizon-7ccb584f4f-z4j4m\" (UID: \"5eefa605-65ba-438e-9d40-58c6225ff2ff\") " pod="openstack/horizon-7ccb584f4f-z4j4m" Oct 10 18:12:33 crc kubenswrapper[4799]: I1010 18:12:33.652012 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/5eefa605-65ba-438e-9d40-58c6225ff2ff-horizon-secret-key\") pod \"horizon-7ccb584f4f-z4j4m\" (UID: \"5eefa605-65ba-438e-9d40-58c6225ff2ff\") " pod="openstack/horizon-7ccb584f4f-z4j4m" Oct 10 18:12:33 crc kubenswrapper[4799]: I1010 18:12:33.660005 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tjjn9\" (UniqueName: \"kubernetes.io/projected/5eefa605-65ba-438e-9d40-58c6225ff2ff-kube-api-access-tjjn9\") pod \"horizon-7ccb584f4f-z4j4m\" (UID: \"5eefa605-65ba-438e-9d40-58c6225ff2ff\") " pod="openstack/horizon-7ccb584f4f-z4j4m" Oct 10 18:12:33 crc kubenswrapper[4799]: I1010 18:12:33.785710 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-7ccb584f4f-z4j4m" Oct 10 18:12:33 crc kubenswrapper[4799]: I1010 18:12:33.792242 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-6bf9dd6ff7-gfnlb"] Oct 10 18:12:34 crc kubenswrapper[4799]: I1010 18:12:34.266393 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-7ccb584f4f-z4j4m"] Oct 10 18:12:34 crc kubenswrapper[4799]: W1010 18:12:34.278587 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5eefa605_65ba_438e_9d40_58c6225ff2ff.slice/crio-92d0b4b759b3b1dd90c574f38597afc90e388b4d31bb6e9846c0c8a48ade150a WatchSource:0}: Error finding container 92d0b4b759b3b1dd90c574f38597afc90e388b4d31bb6e9846c0c8a48ade150a: Status 404 returned error can't find the container with id 92d0b4b759b3b1dd90c574f38597afc90e388b4d31bb6e9846c0c8a48ade150a Oct 10 18:12:34 crc kubenswrapper[4799]: I1010 18:12:34.472472 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-7ccb584f4f-z4j4m" event={"ID":"5eefa605-65ba-438e-9d40-58c6225ff2ff","Type":"ContainerStarted","Data":"92d0b4b759b3b1dd90c574f38597afc90e388b4d31bb6e9846c0c8a48ade150a"} Oct 10 18:12:34 crc kubenswrapper[4799]: I1010 18:12:34.474142 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-5c878c4b95-t5jdl" event={"ID":"75ca2da4-fcc5-4b58-9e57-5555df0b3ab7","Type":"ContainerStarted","Data":"b5c491c61e7627329c1cc77b97a75860165b115a22b8b058ec525351efb32ad4"} Oct 10 18:12:34 crc kubenswrapper[4799]: I1010 18:12:34.475240 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-6bf9dd6ff7-gfnlb" event={"ID":"bf3c5fcf-3145-40c0-b93c-ba5eac936b43","Type":"ContainerStarted","Data":"12ea61ba5e5ecf31743b11e071ad4ed7c670835c867d4cee32af2dabfdc93896"} Oct 10 18:12:35 crc kubenswrapper[4799]: I1010 18:12:35.033058 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-f6c2-account-create-6ngzk"] Oct 10 18:12:35 crc kubenswrapper[4799]: I1010 18:12:35.040078 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-f6c2-account-create-6ngzk"] Oct 10 18:12:35 crc kubenswrapper[4799]: I1010 18:12:35.417158 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="af686f10-f6a4-4f7b-96f0-1dc80aa26f65" path="/var/lib/kubelet/pods/af686f10-f6a4-4f7b-96f0-1dc80aa26f65/volumes" Oct 10 18:12:36 crc kubenswrapper[4799]: I1010 18:12:36.503917 4799 generic.go:334] "Generic (PLEG): container finished" podID="4d6ca553-e264-4abd-a853-bf86bf3b22bd" containerID="ecd2dd18bc296c70097d8ef6fc6df5fb035af072ab286ceadfd05c8b44dbf0ff" exitCode=0 Oct 10 18:12:36 crc kubenswrapper[4799]: I1010 18:12:36.504011 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"4d6ca553-e264-4abd-a853-bf86bf3b22bd","Type":"ContainerDied","Data":"ecd2dd18bc296c70097d8ef6fc6df5fb035af072ab286ceadfd05c8b44dbf0ff"} Oct 10 18:12:36 crc kubenswrapper[4799]: I1010 18:12:36.508345 4799 generic.go:334] "Generic (PLEG): container finished" podID="7619fca7-982b-4e59-aa11-127f345ffbc0" containerID="f16021709cb91c7869706e26dfc2ee021e3931ad08061ea2ce72ea1a23302c37" exitCode=0 Oct 10 18:12:36 crc kubenswrapper[4799]: I1010 18:12:36.508418 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"7619fca7-982b-4e59-aa11-127f345ffbc0","Type":"ContainerDied","Data":"f16021709cb91c7869706e26dfc2ee021e3931ad08061ea2ce72ea1a23302c37"} Oct 10 18:12:40 crc kubenswrapper[4799]: I1010 18:12:40.825493 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Oct 10 18:12:40 crc kubenswrapper[4799]: I1010 18:12:40.899311 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Oct 10 18:12:40 crc kubenswrapper[4799]: I1010 18:12:40.940305 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fq5sj\" (UniqueName: \"kubernetes.io/projected/7619fca7-982b-4e59-aa11-127f345ffbc0-kube-api-access-fq5sj\") pod \"7619fca7-982b-4e59-aa11-127f345ffbc0\" (UID: \"7619fca7-982b-4e59-aa11-127f345ffbc0\") " Oct 10 18:12:40 crc kubenswrapper[4799]: I1010 18:12:40.940372 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7619fca7-982b-4e59-aa11-127f345ffbc0-combined-ca-bundle\") pod \"7619fca7-982b-4e59-aa11-127f345ffbc0\" (UID: \"7619fca7-982b-4e59-aa11-127f345ffbc0\") " Oct 10 18:12:40 crc kubenswrapper[4799]: I1010 18:12:40.940485 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7619fca7-982b-4e59-aa11-127f345ffbc0-config-data\") pod \"7619fca7-982b-4e59-aa11-127f345ffbc0\" (UID: \"7619fca7-982b-4e59-aa11-127f345ffbc0\") " Oct 10 18:12:40 crc kubenswrapper[4799]: I1010 18:12:40.940517 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7619fca7-982b-4e59-aa11-127f345ffbc0-logs\") pod \"7619fca7-982b-4e59-aa11-127f345ffbc0\" (UID: \"7619fca7-982b-4e59-aa11-127f345ffbc0\") " Oct 10 18:12:40 crc kubenswrapper[4799]: I1010 18:12:40.940534 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7619fca7-982b-4e59-aa11-127f345ffbc0-scripts\") pod \"7619fca7-982b-4e59-aa11-127f345ffbc0\" (UID: \"7619fca7-982b-4e59-aa11-127f345ffbc0\") " Oct 10 18:12:40 crc kubenswrapper[4799]: I1010 18:12:40.940581 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/7619fca7-982b-4e59-aa11-127f345ffbc0-httpd-run\") pod \"7619fca7-982b-4e59-aa11-127f345ffbc0\" (UID: \"7619fca7-982b-4e59-aa11-127f345ffbc0\") " Oct 10 18:12:40 crc kubenswrapper[4799]: I1010 18:12:40.940637 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/7619fca7-982b-4e59-aa11-127f345ffbc0-ceph\") pod \"7619fca7-982b-4e59-aa11-127f345ffbc0\" (UID: \"7619fca7-982b-4e59-aa11-127f345ffbc0\") " Oct 10 18:12:40 crc kubenswrapper[4799]: I1010 18:12:40.946602 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7619fca7-982b-4e59-aa11-127f345ffbc0-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "7619fca7-982b-4e59-aa11-127f345ffbc0" (UID: "7619fca7-982b-4e59-aa11-127f345ffbc0"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 10 18:12:40 crc kubenswrapper[4799]: I1010 18:12:40.946900 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7619fca7-982b-4e59-aa11-127f345ffbc0-ceph" (OuterVolumeSpecName: "ceph") pod "7619fca7-982b-4e59-aa11-127f345ffbc0" (UID: "7619fca7-982b-4e59-aa11-127f345ffbc0"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 18:12:40 crc kubenswrapper[4799]: I1010 18:12:40.947972 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7619fca7-982b-4e59-aa11-127f345ffbc0-logs" (OuterVolumeSpecName: "logs") pod "7619fca7-982b-4e59-aa11-127f345ffbc0" (UID: "7619fca7-982b-4e59-aa11-127f345ffbc0"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 10 18:12:40 crc kubenswrapper[4799]: I1010 18:12:40.949163 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7619fca7-982b-4e59-aa11-127f345ffbc0-scripts" (OuterVolumeSpecName: "scripts") pod "7619fca7-982b-4e59-aa11-127f345ffbc0" (UID: "7619fca7-982b-4e59-aa11-127f345ffbc0"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 18:12:40 crc kubenswrapper[4799]: I1010 18:12:40.950241 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7619fca7-982b-4e59-aa11-127f345ffbc0-kube-api-access-fq5sj" (OuterVolumeSpecName: "kube-api-access-fq5sj") pod "7619fca7-982b-4e59-aa11-127f345ffbc0" (UID: "7619fca7-982b-4e59-aa11-127f345ffbc0"). InnerVolumeSpecName "kube-api-access-fq5sj". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 18:12:40 crc kubenswrapper[4799]: I1010 18:12:40.992709 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7619fca7-982b-4e59-aa11-127f345ffbc0-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "7619fca7-982b-4e59-aa11-127f345ffbc0" (UID: "7619fca7-982b-4e59-aa11-127f345ffbc0"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 18:12:41 crc kubenswrapper[4799]: I1010 18:12:41.020960 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7619fca7-982b-4e59-aa11-127f345ffbc0-config-data" (OuterVolumeSpecName: "config-data") pod "7619fca7-982b-4e59-aa11-127f345ffbc0" (UID: "7619fca7-982b-4e59-aa11-127f345ffbc0"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 18:12:41 crc kubenswrapper[4799]: I1010 18:12:41.051053 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/4d6ca553-e264-4abd-a853-bf86bf3b22bd-ceph\") pod \"4d6ca553-e264-4abd-a853-bf86bf3b22bd\" (UID: \"4d6ca553-e264-4abd-a853-bf86bf3b22bd\") " Oct 10 18:12:41 crc kubenswrapper[4799]: I1010 18:12:41.051164 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4d6ca553-e264-4abd-a853-bf86bf3b22bd-scripts\") pod \"4d6ca553-e264-4abd-a853-bf86bf3b22bd\" (UID: \"4d6ca553-e264-4abd-a853-bf86bf3b22bd\") " Oct 10 18:12:41 crc kubenswrapper[4799]: I1010 18:12:41.051323 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4d6ca553-e264-4abd-a853-bf86bf3b22bd-combined-ca-bundle\") pod \"4d6ca553-e264-4abd-a853-bf86bf3b22bd\" (UID: \"4d6ca553-e264-4abd-a853-bf86bf3b22bd\") " Oct 10 18:12:41 crc kubenswrapper[4799]: I1010 18:12:41.051383 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4d6ca553-e264-4abd-a853-bf86bf3b22bd-config-data\") pod \"4d6ca553-e264-4abd-a853-bf86bf3b22bd\" (UID: \"4d6ca553-e264-4abd-a853-bf86bf3b22bd\") " Oct 10 18:12:41 crc kubenswrapper[4799]: I1010 18:12:41.051487 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/4d6ca553-e264-4abd-a853-bf86bf3b22bd-httpd-run\") pod \"4d6ca553-e264-4abd-a853-bf86bf3b22bd\" (UID: \"4d6ca553-e264-4abd-a853-bf86bf3b22bd\") " Oct 10 18:12:41 crc kubenswrapper[4799]: I1010 18:12:41.051572 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4cdkw\" (UniqueName: \"kubernetes.io/projected/4d6ca553-e264-4abd-a853-bf86bf3b22bd-kube-api-access-4cdkw\") pod \"4d6ca553-e264-4abd-a853-bf86bf3b22bd\" (UID: \"4d6ca553-e264-4abd-a853-bf86bf3b22bd\") " Oct 10 18:12:41 crc kubenswrapper[4799]: I1010 18:12:41.051591 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4d6ca553-e264-4abd-a853-bf86bf3b22bd-logs\") pod \"4d6ca553-e264-4abd-a853-bf86bf3b22bd\" (UID: \"4d6ca553-e264-4abd-a853-bf86bf3b22bd\") " Oct 10 18:12:41 crc kubenswrapper[4799]: I1010 18:12:41.053179 4799 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7619fca7-982b-4e59-aa11-127f345ffbc0-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 10 18:12:41 crc kubenswrapper[4799]: I1010 18:12:41.053214 4799 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7619fca7-982b-4e59-aa11-127f345ffbc0-config-data\") on node \"crc\" DevicePath \"\"" Oct 10 18:12:41 crc kubenswrapper[4799]: I1010 18:12:41.053224 4799 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7619fca7-982b-4e59-aa11-127f345ffbc0-logs\") on node \"crc\" DevicePath \"\"" Oct 10 18:12:41 crc kubenswrapper[4799]: I1010 18:12:41.053233 4799 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7619fca7-982b-4e59-aa11-127f345ffbc0-scripts\") on node \"crc\" DevicePath \"\"" Oct 10 18:12:41 crc kubenswrapper[4799]: I1010 18:12:41.053242 4799 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/7619fca7-982b-4e59-aa11-127f345ffbc0-httpd-run\") on node \"crc\" DevicePath \"\"" Oct 10 18:12:41 crc kubenswrapper[4799]: I1010 18:12:41.053249 4799 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/7619fca7-982b-4e59-aa11-127f345ffbc0-ceph\") on node \"crc\" DevicePath \"\"" Oct 10 18:12:41 crc kubenswrapper[4799]: I1010 18:12:41.053257 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fq5sj\" (UniqueName: \"kubernetes.io/projected/7619fca7-982b-4e59-aa11-127f345ffbc0-kube-api-access-fq5sj\") on node \"crc\" DevicePath \"\"" Oct 10 18:12:41 crc kubenswrapper[4799]: I1010 18:12:41.056056 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4d6ca553-e264-4abd-a853-bf86bf3b22bd-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "4d6ca553-e264-4abd-a853-bf86bf3b22bd" (UID: "4d6ca553-e264-4abd-a853-bf86bf3b22bd"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 10 18:12:41 crc kubenswrapper[4799]: I1010 18:12:41.059303 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4d6ca553-e264-4abd-a853-bf86bf3b22bd-logs" (OuterVolumeSpecName: "logs") pod "4d6ca553-e264-4abd-a853-bf86bf3b22bd" (UID: "4d6ca553-e264-4abd-a853-bf86bf3b22bd"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 10 18:12:41 crc kubenswrapper[4799]: I1010 18:12:41.063043 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4d6ca553-e264-4abd-a853-bf86bf3b22bd-kube-api-access-4cdkw" (OuterVolumeSpecName: "kube-api-access-4cdkw") pod "4d6ca553-e264-4abd-a853-bf86bf3b22bd" (UID: "4d6ca553-e264-4abd-a853-bf86bf3b22bd"). InnerVolumeSpecName "kube-api-access-4cdkw". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 18:12:41 crc kubenswrapper[4799]: I1010 18:12:41.068131 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4d6ca553-e264-4abd-a853-bf86bf3b22bd-ceph" (OuterVolumeSpecName: "ceph") pod "4d6ca553-e264-4abd-a853-bf86bf3b22bd" (UID: "4d6ca553-e264-4abd-a853-bf86bf3b22bd"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 18:12:41 crc kubenswrapper[4799]: I1010 18:12:41.068942 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4d6ca553-e264-4abd-a853-bf86bf3b22bd-scripts" (OuterVolumeSpecName: "scripts") pod "4d6ca553-e264-4abd-a853-bf86bf3b22bd" (UID: "4d6ca553-e264-4abd-a853-bf86bf3b22bd"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 18:12:41 crc kubenswrapper[4799]: I1010 18:12:41.101485 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4d6ca553-e264-4abd-a853-bf86bf3b22bd-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "4d6ca553-e264-4abd-a853-bf86bf3b22bd" (UID: "4d6ca553-e264-4abd-a853-bf86bf3b22bd"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 18:12:41 crc kubenswrapper[4799]: I1010 18:12:41.127957 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4d6ca553-e264-4abd-a853-bf86bf3b22bd-config-data" (OuterVolumeSpecName: "config-data") pod "4d6ca553-e264-4abd-a853-bf86bf3b22bd" (UID: "4d6ca553-e264-4abd-a853-bf86bf3b22bd"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 18:12:41 crc kubenswrapper[4799]: I1010 18:12:41.155073 4799 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/4d6ca553-e264-4abd-a853-bf86bf3b22bd-httpd-run\") on node \"crc\" DevicePath \"\"" Oct 10 18:12:41 crc kubenswrapper[4799]: I1010 18:12:41.155262 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4cdkw\" (UniqueName: \"kubernetes.io/projected/4d6ca553-e264-4abd-a853-bf86bf3b22bd-kube-api-access-4cdkw\") on node \"crc\" DevicePath \"\"" Oct 10 18:12:41 crc kubenswrapper[4799]: I1010 18:12:41.155318 4799 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4d6ca553-e264-4abd-a853-bf86bf3b22bd-logs\") on node \"crc\" DevicePath \"\"" Oct 10 18:12:41 crc kubenswrapper[4799]: I1010 18:12:41.155393 4799 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/4d6ca553-e264-4abd-a853-bf86bf3b22bd-ceph\") on node \"crc\" DevicePath \"\"" Oct 10 18:12:41 crc kubenswrapper[4799]: I1010 18:12:41.155444 4799 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4d6ca553-e264-4abd-a853-bf86bf3b22bd-scripts\") on node \"crc\" DevicePath \"\"" Oct 10 18:12:41 crc kubenswrapper[4799]: I1010 18:12:41.155492 4799 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4d6ca553-e264-4abd-a853-bf86bf3b22bd-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 10 18:12:41 crc kubenswrapper[4799]: I1010 18:12:41.155540 4799 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4d6ca553-e264-4abd-a853-bf86bf3b22bd-config-data\") on node \"crc\" DevicePath \"\"" Oct 10 18:12:41 crc kubenswrapper[4799]: I1010 18:12:41.564909 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-6bf9dd6ff7-gfnlb" event={"ID":"bf3c5fcf-3145-40c0-b93c-ba5eac936b43","Type":"ContainerStarted","Data":"8c70c62a434b2bf7a5e0f029253bc8c75e1b8f500a3f7025df5a8fecd7c64408"} Oct 10 18:12:41 crc kubenswrapper[4799]: I1010 18:12:41.565298 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-6bf9dd6ff7-gfnlb" event={"ID":"bf3c5fcf-3145-40c0-b93c-ba5eac936b43","Type":"ContainerStarted","Data":"3e5e7d54699c3f096b3ac31e44e7e67f30ebc5eed9584ccd462783c3502dd417"} Oct 10 18:12:41 crc kubenswrapper[4799]: I1010 18:12:41.567328 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"7619fca7-982b-4e59-aa11-127f345ffbc0","Type":"ContainerDied","Data":"b93bbd4c9b121f2ad5c5d57db563219222ea92e2980b07de1cab4a9f9d80171d"} Oct 10 18:12:41 crc kubenswrapper[4799]: I1010 18:12:41.567383 4799 scope.go:117] "RemoveContainer" containerID="f16021709cb91c7869706e26dfc2ee021e3931ad08061ea2ce72ea1a23302c37" Oct 10 18:12:41 crc kubenswrapper[4799]: I1010 18:12:41.567475 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Oct 10 18:12:41 crc kubenswrapper[4799]: I1010 18:12:41.570259 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"4d6ca553-e264-4abd-a853-bf86bf3b22bd","Type":"ContainerDied","Data":"5c97c7875787b1cec111a567c9df395a0598152fa580cfaf04c0062dc7a850d6"} Oct 10 18:12:41 crc kubenswrapper[4799]: I1010 18:12:41.570330 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Oct 10 18:12:41 crc kubenswrapper[4799]: I1010 18:12:41.576786 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-7ccb584f4f-z4j4m" event={"ID":"5eefa605-65ba-438e-9d40-58c6225ff2ff","Type":"ContainerStarted","Data":"d80062c40fe28666e86970019eb6e0ea8482f96a9581f3c179dd9de92d6e9969"} Oct 10 18:12:41 crc kubenswrapper[4799]: I1010 18:12:41.576833 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-7ccb584f4f-z4j4m" event={"ID":"5eefa605-65ba-438e-9d40-58c6225ff2ff","Type":"ContainerStarted","Data":"f8ac406c405deb39da7cf2c72457034ef28402867d0205f2f9ccc0ab1dc32509"} Oct 10 18:12:41 crc kubenswrapper[4799]: I1010 18:12:41.580396 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-5c878c4b95-t5jdl" event={"ID":"75ca2da4-fcc5-4b58-9e57-5555df0b3ab7","Type":"ContainerStarted","Data":"ee9fc7fc3e78ef13cd3247e11f7f009113d4fec2f9f8f35f0e2024b069163e39"} Oct 10 18:12:41 crc kubenswrapper[4799]: I1010 18:12:41.580568 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-5c878c4b95-t5jdl" event={"ID":"75ca2da4-fcc5-4b58-9e57-5555df0b3ab7","Type":"ContainerStarted","Data":"6326bb1d1e25431d162328177b4897e073738f078e156cda7b3b08ab999f811f"} Oct 10 18:12:41 crc kubenswrapper[4799]: I1010 18:12:41.580819 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-5c878c4b95-t5jdl" podUID="75ca2da4-fcc5-4b58-9e57-5555df0b3ab7" containerName="horizon-log" containerID="cri-o://6326bb1d1e25431d162328177b4897e073738f078e156cda7b3b08ab999f811f" gracePeriod=30 Oct 10 18:12:41 crc kubenswrapper[4799]: I1010 18:12:41.581027 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-5c878c4b95-t5jdl" podUID="75ca2da4-fcc5-4b58-9e57-5555df0b3ab7" containerName="horizon" containerID="cri-o://ee9fc7fc3e78ef13cd3247e11f7f009113d4fec2f9f8f35f0e2024b069163e39" gracePeriod=30 Oct 10 18:12:41 crc kubenswrapper[4799]: I1010 18:12:41.592748 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/horizon-6bf9dd6ff7-gfnlb" podStartSLOduration=2.94768291 podStartE2EDuration="9.592663704s" podCreationTimestamp="2025-10-10 18:12:32 +0000 UTC" firstStartedPulling="2025-10-10 18:12:33.800704664 +0000 UTC m=+6047.309028819" lastFinishedPulling="2025-10-10 18:12:40.445685498 +0000 UTC m=+6053.954009613" observedRunningTime="2025-10-10 18:12:41.579341309 +0000 UTC m=+6055.087665434" watchObservedRunningTime="2025-10-10 18:12:41.592663704 +0000 UTC m=+6055.100987819" Oct 10 18:12:41 crc kubenswrapper[4799]: I1010 18:12:41.604268 4799 scope.go:117] "RemoveContainer" containerID="0f2fa7258bd37688952d053abceccaf84fc26464448073ec999397d92f6bb5ac" Oct 10 18:12:41 crc kubenswrapper[4799]: I1010 18:12:41.631998 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 10 18:12:41 crc kubenswrapper[4799]: I1010 18:12:41.648275 4799 scope.go:117] "RemoveContainer" containerID="ecd2dd18bc296c70097d8ef6fc6df5fb035af072ab286ceadfd05c8b44dbf0ff" Oct 10 18:12:41 crc kubenswrapper[4799]: I1010 18:12:41.649771 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 10 18:12:41 crc kubenswrapper[4799]: I1010 18:12:41.665351 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 10 18:12:41 crc kubenswrapper[4799]: I1010 18:12:41.678682 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Oct 10 18:12:41 crc kubenswrapper[4799]: E1010 18:12:41.679078 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7619fca7-982b-4e59-aa11-127f345ffbc0" containerName="glance-httpd" Oct 10 18:12:41 crc kubenswrapper[4799]: I1010 18:12:41.679090 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="7619fca7-982b-4e59-aa11-127f345ffbc0" containerName="glance-httpd" Oct 10 18:12:41 crc kubenswrapper[4799]: E1010 18:12:41.679106 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7619fca7-982b-4e59-aa11-127f345ffbc0" containerName="glance-log" Oct 10 18:12:41 crc kubenswrapper[4799]: I1010 18:12:41.679112 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="7619fca7-982b-4e59-aa11-127f345ffbc0" containerName="glance-log" Oct 10 18:12:41 crc kubenswrapper[4799]: E1010 18:12:41.679129 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4d6ca553-e264-4abd-a853-bf86bf3b22bd" containerName="glance-log" Oct 10 18:12:41 crc kubenswrapper[4799]: I1010 18:12:41.679135 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="4d6ca553-e264-4abd-a853-bf86bf3b22bd" containerName="glance-log" Oct 10 18:12:41 crc kubenswrapper[4799]: E1010 18:12:41.679158 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4d6ca553-e264-4abd-a853-bf86bf3b22bd" containerName="glance-httpd" Oct 10 18:12:41 crc kubenswrapper[4799]: I1010 18:12:41.679163 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="4d6ca553-e264-4abd-a853-bf86bf3b22bd" containerName="glance-httpd" Oct 10 18:12:41 crc kubenswrapper[4799]: I1010 18:12:41.679338 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="4d6ca553-e264-4abd-a853-bf86bf3b22bd" containerName="glance-log" Oct 10 18:12:41 crc kubenswrapper[4799]: I1010 18:12:41.679354 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="7619fca7-982b-4e59-aa11-127f345ffbc0" containerName="glance-log" Oct 10 18:12:41 crc kubenswrapper[4799]: I1010 18:12:41.679363 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="7619fca7-982b-4e59-aa11-127f345ffbc0" containerName="glance-httpd" Oct 10 18:12:41 crc kubenswrapper[4799]: I1010 18:12:41.679374 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="4d6ca553-e264-4abd-a853-bf86bf3b22bd" containerName="glance-httpd" Oct 10 18:12:41 crc kubenswrapper[4799]: I1010 18:12:41.680517 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Oct 10 18:12:41 crc kubenswrapper[4799]: I1010 18:12:41.684497 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-scripts" Oct 10 18:12:41 crc kubenswrapper[4799]: I1010 18:12:41.684902 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Oct 10 18:12:41 crc kubenswrapper[4799]: I1010 18:12:41.688646 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-4l9wb" Oct 10 18:12:41 crc kubenswrapper[4799]: I1010 18:12:41.692191 4799 scope.go:117] "RemoveContainer" containerID="6ef17f87538e7d672932f8e4b33ace3b8c80f4997198bcb997f948e0f6a49ef3" Oct 10 18:12:41 crc kubenswrapper[4799]: I1010 18:12:41.711925 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 10 18:12:41 crc kubenswrapper[4799]: I1010 18:12:41.719689 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 10 18:12:41 crc kubenswrapper[4799]: I1010 18:12:41.726429 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 10 18:12:41 crc kubenswrapper[4799]: I1010 18:12:41.734365 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Oct 10 18:12:41 crc kubenswrapper[4799]: I1010 18:12:41.736831 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/horizon-7ccb584f4f-z4j4m" podStartSLOduration=2.600627662 podStartE2EDuration="8.736780193s" podCreationTimestamp="2025-10-10 18:12:33 +0000 UTC" firstStartedPulling="2025-10-10 18:12:34.282283972 +0000 UTC m=+6047.790608097" lastFinishedPulling="2025-10-10 18:12:40.418436473 +0000 UTC m=+6053.926760628" observedRunningTime="2025-10-10 18:12:41.650683441 +0000 UTC m=+6055.159007566" watchObservedRunningTime="2025-10-10 18:12:41.736780193 +0000 UTC m=+6055.245104308" Oct 10 18:12:41 crc kubenswrapper[4799]: I1010 18:12:41.739816 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Oct 10 18:12:41 crc kubenswrapper[4799]: I1010 18:12:41.765778 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 10 18:12:41 crc kubenswrapper[4799]: I1010 18:12:41.770373 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/0aeab9ba-576d-4959-9244-459a574bbfe4-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"0aeab9ba-576d-4959-9244-459a574bbfe4\") " pod="openstack/glance-default-external-api-0" Oct 10 18:12:41 crc kubenswrapper[4799]: I1010 18:12:41.770430 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ae0869c7-c5f5-4abe-8db1-cdbe7cd3f7d2-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"ae0869c7-c5f5-4abe-8db1-cdbe7cd3f7d2\") " pod="openstack/glance-default-internal-api-0" Oct 10 18:12:41 crc kubenswrapper[4799]: I1010 18:12:41.770491 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6djxj\" (UniqueName: \"kubernetes.io/projected/0aeab9ba-576d-4959-9244-459a574bbfe4-kube-api-access-6djxj\") pod \"glance-default-external-api-0\" (UID: \"0aeab9ba-576d-4959-9244-459a574bbfe4\") " pod="openstack/glance-default-external-api-0" Oct 10 18:12:41 crc kubenswrapper[4799]: I1010 18:12:41.770514 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ae0869c7-c5f5-4abe-8db1-cdbe7cd3f7d2-scripts\") pod \"glance-default-internal-api-0\" (UID: \"ae0869c7-c5f5-4abe-8db1-cdbe7cd3f7d2\") " pod="openstack/glance-default-internal-api-0" Oct 10 18:12:41 crc kubenswrapper[4799]: I1010 18:12:41.770537 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ae0869c7-c5f5-4abe-8db1-cdbe7cd3f7d2-config-data\") pod \"glance-default-internal-api-0\" (UID: \"ae0869c7-c5f5-4abe-8db1-cdbe7cd3f7d2\") " pod="openstack/glance-default-internal-api-0" Oct 10 18:12:41 crc kubenswrapper[4799]: I1010 18:12:41.770560 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ae0869c7-c5f5-4abe-8db1-cdbe7cd3f7d2-logs\") pod \"glance-default-internal-api-0\" (UID: \"ae0869c7-c5f5-4abe-8db1-cdbe7cd3f7d2\") " pod="openstack/glance-default-internal-api-0" Oct 10 18:12:41 crc kubenswrapper[4799]: I1010 18:12:41.770577 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0aeab9ba-576d-4959-9244-459a574bbfe4-logs\") pod \"glance-default-external-api-0\" (UID: \"0aeab9ba-576d-4959-9244-459a574bbfe4\") " pod="openstack/glance-default-external-api-0" Oct 10 18:12:41 crc kubenswrapper[4799]: I1010 18:12:41.770614 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/ae0869c7-c5f5-4abe-8db1-cdbe7cd3f7d2-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"ae0869c7-c5f5-4abe-8db1-cdbe7cd3f7d2\") " pod="openstack/glance-default-internal-api-0" Oct 10 18:12:41 crc kubenswrapper[4799]: I1010 18:12:41.770644 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0aeab9ba-576d-4959-9244-459a574bbfe4-scripts\") pod \"glance-default-external-api-0\" (UID: \"0aeab9ba-576d-4959-9244-459a574bbfe4\") " pod="openstack/glance-default-external-api-0" Oct 10 18:12:41 crc kubenswrapper[4799]: I1010 18:12:41.770660 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/ae0869c7-c5f5-4abe-8db1-cdbe7cd3f7d2-ceph\") pod \"glance-default-internal-api-0\" (UID: \"ae0869c7-c5f5-4abe-8db1-cdbe7cd3f7d2\") " pod="openstack/glance-default-internal-api-0" Oct 10 18:12:41 crc kubenswrapper[4799]: I1010 18:12:41.770712 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/0aeab9ba-576d-4959-9244-459a574bbfe4-ceph\") pod \"glance-default-external-api-0\" (UID: \"0aeab9ba-576d-4959-9244-459a574bbfe4\") " pod="openstack/glance-default-external-api-0" Oct 10 18:12:41 crc kubenswrapper[4799]: I1010 18:12:41.770730 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0aeab9ba-576d-4959-9244-459a574bbfe4-config-data\") pod \"glance-default-external-api-0\" (UID: \"0aeab9ba-576d-4959-9244-459a574bbfe4\") " pod="openstack/glance-default-external-api-0" Oct 10 18:12:41 crc kubenswrapper[4799]: I1010 18:12:41.770987 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0aeab9ba-576d-4959-9244-459a574bbfe4-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"0aeab9ba-576d-4959-9244-459a574bbfe4\") " pod="openstack/glance-default-external-api-0" Oct 10 18:12:41 crc kubenswrapper[4799]: I1010 18:12:41.771010 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b8vps\" (UniqueName: \"kubernetes.io/projected/ae0869c7-c5f5-4abe-8db1-cdbe7cd3f7d2-kube-api-access-b8vps\") pod \"glance-default-internal-api-0\" (UID: \"ae0869c7-c5f5-4abe-8db1-cdbe7cd3f7d2\") " pod="openstack/glance-default-internal-api-0" Oct 10 18:12:41 crc kubenswrapper[4799]: I1010 18:12:41.772463 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/horizon-5c878c4b95-t5jdl" podStartSLOduration=2.909892196 podStartE2EDuration="9.772441063s" podCreationTimestamp="2025-10-10 18:12:32 +0000 UTC" firstStartedPulling="2025-10-10 18:12:33.550338461 +0000 UTC m=+6047.058662576" lastFinishedPulling="2025-10-10 18:12:40.412887288 +0000 UTC m=+6053.921211443" observedRunningTime="2025-10-10 18:12:41.678656783 +0000 UTC m=+6055.186980918" watchObservedRunningTime="2025-10-10 18:12:41.772441063 +0000 UTC m=+6055.280765178" Oct 10 18:12:41 crc kubenswrapper[4799]: I1010 18:12:41.873096 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/0aeab9ba-576d-4959-9244-459a574bbfe4-ceph\") pod \"glance-default-external-api-0\" (UID: \"0aeab9ba-576d-4959-9244-459a574bbfe4\") " pod="openstack/glance-default-external-api-0" Oct 10 18:12:41 crc kubenswrapper[4799]: I1010 18:12:41.873135 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0aeab9ba-576d-4959-9244-459a574bbfe4-config-data\") pod \"glance-default-external-api-0\" (UID: \"0aeab9ba-576d-4959-9244-459a574bbfe4\") " pod="openstack/glance-default-external-api-0" Oct 10 18:12:41 crc kubenswrapper[4799]: I1010 18:12:41.873177 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0aeab9ba-576d-4959-9244-459a574bbfe4-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"0aeab9ba-576d-4959-9244-459a574bbfe4\") " pod="openstack/glance-default-external-api-0" Oct 10 18:12:41 crc kubenswrapper[4799]: I1010 18:12:41.873202 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b8vps\" (UniqueName: \"kubernetes.io/projected/ae0869c7-c5f5-4abe-8db1-cdbe7cd3f7d2-kube-api-access-b8vps\") pod \"glance-default-internal-api-0\" (UID: \"ae0869c7-c5f5-4abe-8db1-cdbe7cd3f7d2\") " pod="openstack/glance-default-internal-api-0" Oct 10 18:12:41 crc kubenswrapper[4799]: I1010 18:12:41.873232 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/0aeab9ba-576d-4959-9244-459a574bbfe4-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"0aeab9ba-576d-4959-9244-459a574bbfe4\") " pod="openstack/glance-default-external-api-0" Oct 10 18:12:41 crc kubenswrapper[4799]: I1010 18:12:41.873256 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ae0869c7-c5f5-4abe-8db1-cdbe7cd3f7d2-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"ae0869c7-c5f5-4abe-8db1-cdbe7cd3f7d2\") " pod="openstack/glance-default-internal-api-0" Oct 10 18:12:41 crc kubenswrapper[4799]: I1010 18:12:41.873727 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/0aeab9ba-576d-4959-9244-459a574bbfe4-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"0aeab9ba-576d-4959-9244-459a574bbfe4\") " pod="openstack/glance-default-external-api-0" Oct 10 18:12:41 crc kubenswrapper[4799]: I1010 18:12:41.873794 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6djxj\" (UniqueName: \"kubernetes.io/projected/0aeab9ba-576d-4959-9244-459a574bbfe4-kube-api-access-6djxj\") pod \"glance-default-external-api-0\" (UID: \"0aeab9ba-576d-4959-9244-459a574bbfe4\") " pod="openstack/glance-default-external-api-0" Oct 10 18:12:41 crc kubenswrapper[4799]: I1010 18:12:41.873819 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ae0869c7-c5f5-4abe-8db1-cdbe7cd3f7d2-scripts\") pod \"glance-default-internal-api-0\" (UID: \"ae0869c7-c5f5-4abe-8db1-cdbe7cd3f7d2\") " pod="openstack/glance-default-internal-api-0" Oct 10 18:12:41 crc kubenswrapper[4799]: I1010 18:12:41.873839 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ae0869c7-c5f5-4abe-8db1-cdbe7cd3f7d2-config-data\") pod \"glance-default-internal-api-0\" (UID: \"ae0869c7-c5f5-4abe-8db1-cdbe7cd3f7d2\") " pod="openstack/glance-default-internal-api-0" Oct 10 18:12:41 crc kubenswrapper[4799]: I1010 18:12:41.873861 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ae0869c7-c5f5-4abe-8db1-cdbe7cd3f7d2-logs\") pod \"glance-default-internal-api-0\" (UID: \"ae0869c7-c5f5-4abe-8db1-cdbe7cd3f7d2\") " pod="openstack/glance-default-internal-api-0" Oct 10 18:12:41 crc kubenswrapper[4799]: I1010 18:12:41.873878 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0aeab9ba-576d-4959-9244-459a574bbfe4-logs\") pod \"glance-default-external-api-0\" (UID: \"0aeab9ba-576d-4959-9244-459a574bbfe4\") " pod="openstack/glance-default-external-api-0" Oct 10 18:12:41 crc kubenswrapper[4799]: I1010 18:12:41.873912 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/ae0869c7-c5f5-4abe-8db1-cdbe7cd3f7d2-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"ae0869c7-c5f5-4abe-8db1-cdbe7cd3f7d2\") " pod="openstack/glance-default-internal-api-0" Oct 10 18:12:41 crc kubenswrapper[4799]: I1010 18:12:41.873937 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0aeab9ba-576d-4959-9244-459a574bbfe4-scripts\") pod \"glance-default-external-api-0\" (UID: \"0aeab9ba-576d-4959-9244-459a574bbfe4\") " pod="openstack/glance-default-external-api-0" Oct 10 18:12:41 crc kubenswrapper[4799]: I1010 18:12:41.873952 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/ae0869c7-c5f5-4abe-8db1-cdbe7cd3f7d2-ceph\") pod \"glance-default-internal-api-0\" (UID: \"ae0869c7-c5f5-4abe-8db1-cdbe7cd3f7d2\") " pod="openstack/glance-default-internal-api-0" Oct 10 18:12:41 crc kubenswrapper[4799]: I1010 18:12:41.874291 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/ae0869c7-c5f5-4abe-8db1-cdbe7cd3f7d2-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"ae0869c7-c5f5-4abe-8db1-cdbe7cd3f7d2\") " pod="openstack/glance-default-internal-api-0" Oct 10 18:12:41 crc kubenswrapper[4799]: I1010 18:12:41.874510 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ae0869c7-c5f5-4abe-8db1-cdbe7cd3f7d2-logs\") pod \"glance-default-internal-api-0\" (UID: \"ae0869c7-c5f5-4abe-8db1-cdbe7cd3f7d2\") " pod="openstack/glance-default-internal-api-0" Oct 10 18:12:41 crc kubenswrapper[4799]: I1010 18:12:41.874593 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0aeab9ba-576d-4959-9244-459a574bbfe4-logs\") pod \"glance-default-external-api-0\" (UID: \"0aeab9ba-576d-4959-9244-459a574bbfe4\") " pod="openstack/glance-default-external-api-0" Oct 10 18:12:41 crc kubenswrapper[4799]: I1010 18:12:41.877497 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/0aeab9ba-576d-4959-9244-459a574bbfe4-ceph\") pod \"glance-default-external-api-0\" (UID: \"0aeab9ba-576d-4959-9244-459a574bbfe4\") " pod="openstack/glance-default-external-api-0" Oct 10 18:12:41 crc kubenswrapper[4799]: I1010 18:12:41.878878 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0aeab9ba-576d-4959-9244-459a574bbfe4-config-data\") pod \"glance-default-external-api-0\" (UID: \"0aeab9ba-576d-4959-9244-459a574bbfe4\") " pod="openstack/glance-default-external-api-0" Oct 10 18:12:41 crc kubenswrapper[4799]: I1010 18:12:41.880694 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0aeab9ba-576d-4959-9244-459a574bbfe4-scripts\") pod \"glance-default-external-api-0\" (UID: \"0aeab9ba-576d-4959-9244-459a574bbfe4\") " pod="openstack/glance-default-external-api-0" Oct 10 18:12:41 crc kubenswrapper[4799]: I1010 18:12:41.884078 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0aeab9ba-576d-4959-9244-459a574bbfe4-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"0aeab9ba-576d-4959-9244-459a574bbfe4\") " pod="openstack/glance-default-external-api-0" Oct 10 18:12:41 crc kubenswrapper[4799]: I1010 18:12:41.884649 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ae0869c7-c5f5-4abe-8db1-cdbe7cd3f7d2-config-data\") pod \"glance-default-internal-api-0\" (UID: \"ae0869c7-c5f5-4abe-8db1-cdbe7cd3f7d2\") " pod="openstack/glance-default-internal-api-0" Oct 10 18:12:41 crc kubenswrapper[4799]: I1010 18:12:41.886126 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/ae0869c7-c5f5-4abe-8db1-cdbe7cd3f7d2-ceph\") pod \"glance-default-internal-api-0\" (UID: \"ae0869c7-c5f5-4abe-8db1-cdbe7cd3f7d2\") " pod="openstack/glance-default-internal-api-0" Oct 10 18:12:41 crc kubenswrapper[4799]: I1010 18:12:41.896264 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b8vps\" (UniqueName: \"kubernetes.io/projected/ae0869c7-c5f5-4abe-8db1-cdbe7cd3f7d2-kube-api-access-b8vps\") pod \"glance-default-internal-api-0\" (UID: \"ae0869c7-c5f5-4abe-8db1-cdbe7cd3f7d2\") " pod="openstack/glance-default-internal-api-0" Oct 10 18:12:41 crc kubenswrapper[4799]: I1010 18:12:41.897682 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ae0869c7-c5f5-4abe-8db1-cdbe7cd3f7d2-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"ae0869c7-c5f5-4abe-8db1-cdbe7cd3f7d2\") " pod="openstack/glance-default-internal-api-0" Oct 10 18:12:41 crc kubenswrapper[4799]: I1010 18:12:41.901214 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ae0869c7-c5f5-4abe-8db1-cdbe7cd3f7d2-scripts\") pod \"glance-default-internal-api-0\" (UID: \"ae0869c7-c5f5-4abe-8db1-cdbe7cd3f7d2\") " pod="openstack/glance-default-internal-api-0" Oct 10 18:12:41 crc kubenswrapper[4799]: I1010 18:12:41.912871 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6djxj\" (UniqueName: \"kubernetes.io/projected/0aeab9ba-576d-4959-9244-459a574bbfe4-kube-api-access-6djxj\") pod \"glance-default-external-api-0\" (UID: \"0aeab9ba-576d-4959-9244-459a574bbfe4\") " pod="openstack/glance-default-external-api-0" Oct 10 18:12:42 crc kubenswrapper[4799]: I1010 18:12:42.002377 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Oct 10 18:12:42 crc kubenswrapper[4799]: I1010 18:12:42.061546 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Oct 10 18:12:42 crc kubenswrapper[4799]: I1010 18:12:42.645244 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 10 18:12:42 crc kubenswrapper[4799]: I1010 18:12:42.711403 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 10 18:12:43 crc kubenswrapper[4799]: I1010 18:12:43.121625 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-5c878c4b95-t5jdl" Oct 10 18:12:43 crc kubenswrapper[4799]: I1010 18:12:43.220203 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/horizon-6bf9dd6ff7-gfnlb" Oct 10 18:12:43 crc kubenswrapper[4799]: I1010 18:12:43.220527 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-6bf9dd6ff7-gfnlb" Oct 10 18:12:43 crc kubenswrapper[4799]: I1010 18:12:43.473485 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4d6ca553-e264-4abd-a853-bf86bf3b22bd" path="/var/lib/kubelet/pods/4d6ca553-e264-4abd-a853-bf86bf3b22bd/volumes" Oct 10 18:12:43 crc kubenswrapper[4799]: I1010 18:12:43.475255 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7619fca7-982b-4e59-aa11-127f345ffbc0" path="/var/lib/kubelet/pods/7619fca7-982b-4e59-aa11-127f345ffbc0/volumes" Oct 10 18:12:43 crc kubenswrapper[4799]: I1010 18:12:43.613965 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"ae0869c7-c5f5-4abe-8db1-cdbe7cd3f7d2","Type":"ContainerStarted","Data":"31670ce6547a36f64fa18182f865fb2205cf2240211fae386dbc6a4cfbc1135f"} Oct 10 18:12:43 crc kubenswrapper[4799]: I1010 18:12:43.614298 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"ae0869c7-c5f5-4abe-8db1-cdbe7cd3f7d2","Type":"ContainerStarted","Data":"424156778db2bb978258b53d6f2903bacb98b724010ae810edace404522d3656"} Oct 10 18:12:43 crc kubenswrapper[4799]: I1010 18:12:43.616091 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"0aeab9ba-576d-4959-9244-459a574bbfe4","Type":"ContainerStarted","Data":"5600a3b88202670dc0eb9a3417cf1273b018cf7de17466e924160ee716c2a540"} Oct 10 18:12:43 crc kubenswrapper[4799]: I1010 18:12:43.616170 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"0aeab9ba-576d-4959-9244-459a574bbfe4","Type":"ContainerStarted","Data":"c8f163a646be64393164998d47b858d08557f8372e54b3b1e44ceee2dc8a96bf"} Oct 10 18:12:43 crc kubenswrapper[4799]: I1010 18:12:43.788612 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/horizon-7ccb584f4f-z4j4m" Oct 10 18:12:43 crc kubenswrapper[4799]: I1010 18:12:43.789488 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-7ccb584f4f-z4j4m" Oct 10 18:12:44 crc kubenswrapper[4799]: I1010 18:12:44.059380 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-db-sync-jq7rj"] Oct 10 18:12:44 crc kubenswrapper[4799]: I1010 18:12:44.067002 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-db-sync-jq7rj"] Oct 10 18:12:44 crc kubenswrapper[4799]: I1010 18:12:44.630422 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"ae0869c7-c5f5-4abe-8db1-cdbe7cd3f7d2","Type":"ContainerStarted","Data":"f950971cc05b09b649fe10e0aece9cbb304702ef81dcb93f301250ffcc095cef"} Oct 10 18:12:44 crc kubenswrapper[4799]: I1010 18:12:44.632566 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"0aeab9ba-576d-4959-9244-459a574bbfe4","Type":"ContainerStarted","Data":"dce11a875b70af7013e9af345641a10329dd4f620b864dfb009487ac12197b04"} Oct 10 18:12:44 crc kubenswrapper[4799]: I1010 18:12:44.648895 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=3.648863505 podStartE2EDuration="3.648863505s" podCreationTimestamp="2025-10-10 18:12:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-10 18:12:44.648482976 +0000 UTC m=+6058.156807091" watchObservedRunningTime="2025-10-10 18:12:44.648863505 +0000 UTC m=+6058.157187660" Oct 10 18:12:44 crc kubenswrapper[4799]: I1010 18:12:44.678303 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=3.678277933 podStartE2EDuration="3.678277933s" podCreationTimestamp="2025-10-10 18:12:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-10 18:12:44.675543397 +0000 UTC m=+6058.183867512" watchObservedRunningTime="2025-10-10 18:12:44.678277933 +0000 UTC m=+6058.186602058" Oct 10 18:12:45 crc kubenswrapper[4799]: I1010 18:12:45.416881 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bfbf2401-767f-4a5c-a4a3-c12ac0833631" path="/var/lib/kubelet/pods/bfbf2401-767f-4a5c-a4a3-c12ac0833631/volumes" Oct 10 18:12:52 crc kubenswrapper[4799]: I1010 18:12:52.003603 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Oct 10 18:12:52 crc kubenswrapper[4799]: I1010 18:12:52.004217 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Oct 10 18:12:52 crc kubenswrapper[4799]: I1010 18:12:52.051536 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Oct 10 18:12:52 crc kubenswrapper[4799]: I1010 18:12:52.061937 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Oct 10 18:12:52 crc kubenswrapper[4799]: I1010 18:12:52.063971 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Oct 10 18:12:52 crc kubenswrapper[4799]: I1010 18:12:52.074782 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Oct 10 18:12:52 crc kubenswrapper[4799]: I1010 18:12:52.121416 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Oct 10 18:12:52 crc kubenswrapper[4799]: I1010 18:12:52.131175 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Oct 10 18:12:52 crc kubenswrapper[4799]: I1010 18:12:52.739707 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Oct 10 18:12:52 crc kubenswrapper[4799]: I1010 18:12:52.740156 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Oct 10 18:12:52 crc kubenswrapper[4799]: I1010 18:12:52.740179 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Oct 10 18:12:52 crc kubenswrapper[4799]: I1010 18:12:52.740196 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Oct 10 18:12:53 crc kubenswrapper[4799]: I1010 18:12:53.229175 4799 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-6bf9dd6ff7-gfnlb" podUID="bf3c5fcf-3145-40c0-b93c-ba5eac936b43" containerName="horizon" probeResult="failure" output="Get \"http://10.217.1.124:8080/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.1.124:8080: connect: connection refused" Oct 10 18:12:53 crc kubenswrapper[4799]: I1010 18:12:53.791078 4799 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-7ccb584f4f-z4j4m" podUID="5eefa605-65ba-438e-9d40-58c6225ff2ff" containerName="horizon" probeResult="failure" output="Get \"http://10.217.1.125:8080/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.1.125:8080: connect: connection refused" Oct 10 18:12:54 crc kubenswrapper[4799]: I1010 18:12:54.764354 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Oct 10 18:12:54 crc kubenswrapper[4799]: I1010 18:12:54.784300 4799 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Oct 10 18:12:54 crc kubenswrapper[4799]: I1010 18:12:54.788949 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Oct 10 18:12:54 crc kubenswrapper[4799]: I1010 18:12:54.789024 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Oct 10 18:12:54 crc kubenswrapper[4799]: I1010 18:12:54.789100 4799 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Oct 10 18:12:54 crc kubenswrapper[4799]: I1010 18:12:54.813663 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Oct 10 18:13:04 crc kubenswrapper[4799]: I1010 18:13:04.986392 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/horizon-6bf9dd6ff7-gfnlb" Oct 10 18:13:05 crc kubenswrapper[4799]: I1010 18:13:05.460213 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/horizon-7ccb584f4f-z4j4m" Oct 10 18:13:06 crc kubenswrapper[4799]: I1010 18:13:06.513648 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/horizon-6bf9dd6ff7-gfnlb" Oct 10 18:13:07 crc kubenswrapper[4799]: I1010 18:13:07.161679 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/horizon-7ccb584f4f-z4j4m" Oct 10 18:13:07 crc kubenswrapper[4799]: I1010 18:13:07.248197 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-6bf9dd6ff7-gfnlb"] Oct 10 18:13:07 crc kubenswrapper[4799]: I1010 18:13:07.248776 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-6bf9dd6ff7-gfnlb" podUID="bf3c5fcf-3145-40c0-b93c-ba5eac936b43" containerName="horizon-log" containerID="cri-o://3e5e7d54699c3f096b3ac31e44e7e67f30ebc5eed9584ccd462783c3502dd417" gracePeriod=30 Oct 10 18:13:07 crc kubenswrapper[4799]: I1010 18:13:07.248935 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-6bf9dd6ff7-gfnlb" podUID="bf3c5fcf-3145-40c0-b93c-ba5eac936b43" containerName="horizon" containerID="cri-o://8c70c62a434b2bf7a5e0f029253bc8c75e1b8f500a3f7025df5a8fecd7c64408" gracePeriod=30 Oct 10 18:13:09 crc kubenswrapper[4799]: I1010 18:13:09.250247 4799 scope.go:117] "RemoveContainer" containerID="ebafbe89a2a75b4b3527b5913fd4e2a1fc5d7a4886ac2a3f624e74497e2056df" Oct 10 18:13:09 crc kubenswrapper[4799]: I1010 18:13:09.280954 4799 scope.go:117] "RemoveContainer" containerID="78947021d40eb34394d54abd0f689020e16e60125938395ad6afee925ac82930" Oct 10 18:13:09 crc kubenswrapper[4799]: I1010 18:13:09.379871 4799 scope.go:117] "RemoveContainer" containerID="a72a10290936cdfe6e78e4f33f1f5c5189c99ba95b2770b0072d0dd686258f10" Oct 10 18:13:11 crc kubenswrapper[4799]: I1010 18:13:11.060965 4799 generic.go:334] "Generic (PLEG): container finished" podID="bf3c5fcf-3145-40c0-b93c-ba5eac936b43" containerID="8c70c62a434b2bf7a5e0f029253bc8c75e1b8f500a3f7025df5a8fecd7c64408" exitCode=0 Oct 10 18:13:11 crc kubenswrapper[4799]: I1010 18:13:11.061049 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-6bf9dd6ff7-gfnlb" event={"ID":"bf3c5fcf-3145-40c0-b93c-ba5eac936b43","Type":"ContainerDied","Data":"8c70c62a434b2bf7a5e0f029253bc8c75e1b8f500a3f7025df5a8fecd7c64408"} Oct 10 18:13:12 crc kubenswrapper[4799]: I1010 18:13:12.082900 4799 generic.go:334] "Generic (PLEG): container finished" podID="75ca2da4-fcc5-4b58-9e57-5555df0b3ab7" containerID="ee9fc7fc3e78ef13cd3247e11f7f009113d4fec2f9f8f35f0e2024b069163e39" exitCode=137 Oct 10 18:13:12 crc kubenswrapper[4799]: I1010 18:13:12.083323 4799 generic.go:334] "Generic (PLEG): container finished" podID="75ca2da4-fcc5-4b58-9e57-5555df0b3ab7" containerID="6326bb1d1e25431d162328177b4897e073738f078e156cda7b3b08ab999f811f" exitCode=137 Oct 10 18:13:12 crc kubenswrapper[4799]: I1010 18:13:12.082962 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-5c878c4b95-t5jdl" event={"ID":"75ca2da4-fcc5-4b58-9e57-5555df0b3ab7","Type":"ContainerDied","Data":"ee9fc7fc3e78ef13cd3247e11f7f009113d4fec2f9f8f35f0e2024b069163e39"} Oct 10 18:13:12 crc kubenswrapper[4799]: I1010 18:13:12.083359 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-5c878c4b95-t5jdl" event={"ID":"75ca2da4-fcc5-4b58-9e57-5555df0b3ab7","Type":"ContainerDied","Data":"6326bb1d1e25431d162328177b4897e073738f078e156cda7b3b08ab999f811f"} Oct 10 18:13:12 crc kubenswrapper[4799]: I1010 18:13:12.083370 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-5c878c4b95-t5jdl" event={"ID":"75ca2da4-fcc5-4b58-9e57-5555df0b3ab7","Type":"ContainerDied","Data":"b5c491c61e7627329c1cc77b97a75860165b115a22b8b058ec525351efb32ad4"} Oct 10 18:13:12 crc kubenswrapper[4799]: I1010 18:13:12.083381 4799 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b5c491c61e7627329c1cc77b97a75860165b115a22b8b058ec525351efb32ad4" Oct 10 18:13:12 crc kubenswrapper[4799]: I1010 18:13:12.139378 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-5c878c4b95-t5jdl" Oct 10 18:13:12 crc kubenswrapper[4799]: I1010 18:13:12.231399 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ljl8n\" (UniqueName: \"kubernetes.io/projected/75ca2da4-fcc5-4b58-9e57-5555df0b3ab7-kube-api-access-ljl8n\") pod \"75ca2da4-fcc5-4b58-9e57-5555df0b3ab7\" (UID: \"75ca2da4-fcc5-4b58-9e57-5555df0b3ab7\") " Oct 10 18:13:12 crc kubenswrapper[4799]: I1010 18:13:12.231442 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/75ca2da4-fcc5-4b58-9e57-5555df0b3ab7-config-data\") pod \"75ca2da4-fcc5-4b58-9e57-5555df0b3ab7\" (UID: \"75ca2da4-fcc5-4b58-9e57-5555df0b3ab7\") " Oct 10 18:13:12 crc kubenswrapper[4799]: I1010 18:13:12.231499 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/75ca2da4-fcc5-4b58-9e57-5555df0b3ab7-scripts\") pod \"75ca2da4-fcc5-4b58-9e57-5555df0b3ab7\" (UID: \"75ca2da4-fcc5-4b58-9e57-5555df0b3ab7\") " Oct 10 18:13:12 crc kubenswrapper[4799]: I1010 18:13:12.231522 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/75ca2da4-fcc5-4b58-9e57-5555df0b3ab7-logs\") pod \"75ca2da4-fcc5-4b58-9e57-5555df0b3ab7\" (UID: \"75ca2da4-fcc5-4b58-9e57-5555df0b3ab7\") " Oct 10 18:13:12 crc kubenswrapper[4799]: I1010 18:13:12.232061 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/75ca2da4-fcc5-4b58-9e57-5555df0b3ab7-logs" (OuterVolumeSpecName: "logs") pod "75ca2da4-fcc5-4b58-9e57-5555df0b3ab7" (UID: "75ca2da4-fcc5-4b58-9e57-5555df0b3ab7"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 10 18:13:12 crc kubenswrapper[4799]: I1010 18:13:12.232116 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/75ca2da4-fcc5-4b58-9e57-5555df0b3ab7-horizon-secret-key\") pod \"75ca2da4-fcc5-4b58-9e57-5555df0b3ab7\" (UID: \"75ca2da4-fcc5-4b58-9e57-5555df0b3ab7\") " Oct 10 18:13:12 crc kubenswrapper[4799]: I1010 18:13:12.232766 4799 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/75ca2da4-fcc5-4b58-9e57-5555df0b3ab7-logs\") on node \"crc\" DevicePath \"\"" Oct 10 18:13:12 crc kubenswrapper[4799]: I1010 18:13:12.242968 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/75ca2da4-fcc5-4b58-9e57-5555df0b3ab7-kube-api-access-ljl8n" (OuterVolumeSpecName: "kube-api-access-ljl8n") pod "75ca2da4-fcc5-4b58-9e57-5555df0b3ab7" (UID: "75ca2da4-fcc5-4b58-9e57-5555df0b3ab7"). InnerVolumeSpecName "kube-api-access-ljl8n". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 18:13:12 crc kubenswrapper[4799]: I1010 18:13:12.243002 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/75ca2da4-fcc5-4b58-9e57-5555df0b3ab7-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "75ca2da4-fcc5-4b58-9e57-5555df0b3ab7" (UID: "75ca2da4-fcc5-4b58-9e57-5555df0b3ab7"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 18:13:12 crc kubenswrapper[4799]: I1010 18:13:12.263716 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/75ca2da4-fcc5-4b58-9e57-5555df0b3ab7-scripts" (OuterVolumeSpecName: "scripts") pod "75ca2da4-fcc5-4b58-9e57-5555df0b3ab7" (UID: "75ca2da4-fcc5-4b58-9e57-5555df0b3ab7"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 18:13:12 crc kubenswrapper[4799]: I1010 18:13:12.282845 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/75ca2da4-fcc5-4b58-9e57-5555df0b3ab7-config-data" (OuterVolumeSpecName: "config-data") pod "75ca2da4-fcc5-4b58-9e57-5555df0b3ab7" (UID: "75ca2da4-fcc5-4b58-9e57-5555df0b3ab7"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 18:13:12 crc kubenswrapper[4799]: I1010 18:13:12.339177 4799 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/75ca2da4-fcc5-4b58-9e57-5555df0b3ab7-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Oct 10 18:13:12 crc kubenswrapper[4799]: I1010 18:13:12.340138 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ljl8n\" (UniqueName: \"kubernetes.io/projected/75ca2da4-fcc5-4b58-9e57-5555df0b3ab7-kube-api-access-ljl8n\") on node \"crc\" DevicePath \"\"" Oct 10 18:13:12 crc kubenswrapper[4799]: I1010 18:13:12.340234 4799 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/75ca2da4-fcc5-4b58-9e57-5555df0b3ab7-config-data\") on node \"crc\" DevicePath \"\"" Oct 10 18:13:12 crc kubenswrapper[4799]: I1010 18:13:12.340258 4799 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/75ca2da4-fcc5-4b58-9e57-5555df0b3ab7-scripts\") on node \"crc\" DevicePath \"\"" Oct 10 18:13:13 crc kubenswrapper[4799]: I1010 18:13:13.095937 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-5c878c4b95-t5jdl" Oct 10 18:13:13 crc kubenswrapper[4799]: I1010 18:13:13.149598 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-5c878c4b95-t5jdl"] Oct 10 18:13:13 crc kubenswrapper[4799]: I1010 18:13:13.163320 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/horizon-5c878c4b95-t5jdl"] Oct 10 18:13:13 crc kubenswrapper[4799]: I1010 18:13:13.221115 4799 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-6bf9dd6ff7-gfnlb" podUID="bf3c5fcf-3145-40c0-b93c-ba5eac936b43" containerName="horizon" probeResult="failure" output="Get \"http://10.217.1.124:8080/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.1.124:8080: connect: connection refused" Oct 10 18:13:13 crc kubenswrapper[4799]: I1010 18:13:13.426633 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="75ca2da4-fcc5-4b58-9e57-5555df0b3ab7" path="/var/lib/kubelet/pods/75ca2da4-fcc5-4b58-9e57-5555df0b3ab7/volumes" Oct 10 18:13:14 crc kubenswrapper[4799]: I1010 18:13:14.042095 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-db-create-g5czp"] Oct 10 18:13:14 crc kubenswrapper[4799]: I1010 18:13:14.054190 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-db-create-g5czp"] Oct 10 18:13:15 crc kubenswrapper[4799]: I1010 18:13:15.248597 4799 patch_prober.go:28] interesting pod/machine-config-daemon-rh8zc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 10 18:13:15 crc kubenswrapper[4799]: I1010 18:13:15.248967 4799 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 10 18:13:15 crc kubenswrapper[4799]: I1010 18:13:15.427005 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="80bb2405-1710-46ed-9414-e92c883f1e49" path="/var/lib/kubelet/pods/80bb2405-1710-46ed-9414-e92c883f1e49/volumes" Oct 10 18:13:15 crc kubenswrapper[4799]: I1010 18:13:15.550339 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-5c94bc8f8f-92blf"] Oct 10 18:13:15 crc kubenswrapper[4799]: E1010 18:13:15.550694 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="75ca2da4-fcc5-4b58-9e57-5555df0b3ab7" containerName="horizon" Oct 10 18:13:15 crc kubenswrapper[4799]: I1010 18:13:15.550710 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="75ca2da4-fcc5-4b58-9e57-5555df0b3ab7" containerName="horizon" Oct 10 18:13:15 crc kubenswrapper[4799]: E1010 18:13:15.550731 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="75ca2da4-fcc5-4b58-9e57-5555df0b3ab7" containerName="horizon-log" Oct 10 18:13:15 crc kubenswrapper[4799]: I1010 18:13:15.550738 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="75ca2da4-fcc5-4b58-9e57-5555df0b3ab7" containerName="horizon-log" Oct 10 18:13:15 crc kubenswrapper[4799]: I1010 18:13:15.550943 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="75ca2da4-fcc5-4b58-9e57-5555df0b3ab7" containerName="horizon" Oct 10 18:13:15 crc kubenswrapper[4799]: I1010 18:13:15.550957 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="75ca2da4-fcc5-4b58-9e57-5555df0b3ab7" containerName="horizon-log" Oct 10 18:13:15 crc kubenswrapper[4799]: I1010 18:13:15.552440 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-5c94bc8f8f-92blf" Oct 10 18:13:15 crc kubenswrapper[4799]: I1010 18:13:15.579901 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-5c94bc8f8f-92blf"] Oct 10 18:13:15 crc kubenswrapper[4799]: I1010 18:13:15.629011 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6cf64fd1-1e8a-4fb5-becf-b52d1629e093-logs\") pod \"horizon-5c94bc8f8f-92blf\" (UID: \"6cf64fd1-1e8a-4fb5-becf-b52d1629e093\") " pod="openstack/horizon-5c94bc8f8f-92blf" Oct 10 18:13:15 crc kubenswrapper[4799]: I1010 18:13:15.629078 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/6cf64fd1-1e8a-4fb5-becf-b52d1629e093-config-data\") pod \"horizon-5c94bc8f8f-92blf\" (UID: \"6cf64fd1-1e8a-4fb5-becf-b52d1629e093\") " pod="openstack/horizon-5c94bc8f8f-92blf" Oct 10 18:13:15 crc kubenswrapper[4799]: I1010 18:13:15.629140 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/6cf64fd1-1e8a-4fb5-becf-b52d1629e093-horizon-secret-key\") pod \"horizon-5c94bc8f8f-92blf\" (UID: \"6cf64fd1-1e8a-4fb5-becf-b52d1629e093\") " pod="openstack/horizon-5c94bc8f8f-92blf" Oct 10 18:13:15 crc kubenswrapper[4799]: I1010 18:13:15.629232 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dnpvl\" (UniqueName: \"kubernetes.io/projected/6cf64fd1-1e8a-4fb5-becf-b52d1629e093-kube-api-access-dnpvl\") pod \"horizon-5c94bc8f8f-92blf\" (UID: \"6cf64fd1-1e8a-4fb5-becf-b52d1629e093\") " pod="openstack/horizon-5c94bc8f8f-92blf" Oct 10 18:13:15 crc kubenswrapper[4799]: I1010 18:13:15.629277 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/6cf64fd1-1e8a-4fb5-becf-b52d1629e093-scripts\") pod \"horizon-5c94bc8f8f-92blf\" (UID: \"6cf64fd1-1e8a-4fb5-becf-b52d1629e093\") " pod="openstack/horizon-5c94bc8f8f-92blf" Oct 10 18:13:15 crc kubenswrapper[4799]: I1010 18:13:15.731236 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6cf64fd1-1e8a-4fb5-becf-b52d1629e093-logs\") pod \"horizon-5c94bc8f8f-92blf\" (UID: \"6cf64fd1-1e8a-4fb5-becf-b52d1629e093\") " pod="openstack/horizon-5c94bc8f8f-92blf" Oct 10 18:13:15 crc kubenswrapper[4799]: I1010 18:13:15.731300 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/6cf64fd1-1e8a-4fb5-becf-b52d1629e093-config-data\") pod \"horizon-5c94bc8f8f-92blf\" (UID: \"6cf64fd1-1e8a-4fb5-becf-b52d1629e093\") " pod="openstack/horizon-5c94bc8f8f-92blf" Oct 10 18:13:15 crc kubenswrapper[4799]: I1010 18:13:15.731352 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/6cf64fd1-1e8a-4fb5-becf-b52d1629e093-horizon-secret-key\") pod \"horizon-5c94bc8f8f-92blf\" (UID: \"6cf64fd1-1e8a-4fb5-becf-b52d1629e093\") " pod="openstack/horizon-5c94bc8f8f-92blf" Oct 10 18:13:15 crc kubenswrapper[4799]: I1010 18:13:15.731405 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dnpvl\" (UniqueName: \"kubernetes.io/projected/6cf64fd1-1e8a-4fb5-becf-b52d1629e093-kube-api-access-dnpvl\") pod \"horizon-5c94bc8f8f-92blf\" (UID: \"6cf64fd1-1e8a-4fb5-becf-b52d1629e093\") " pod="openstack/horizon-5c94bc8f8f-92blf" Oct 10 18:13:15 crc kubenswrapper[4799]: I1010 18:13:15.731447 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/6cf64fd1-1e8a-4fb5-becf-b52d1629e093-scripts\") pod \"horizon-5c94bc8f8f-92blf\" (UID: \"6cf64fd1-1e8a-4fb5-becf-b52d1629e093\") " pod="openstack/horizon-5c94bc8f8f-92blf" Oct 10 18:13:15 crc kubenswrapper[4799]: I1010 18:13:15.731735 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6cf64fd1-1e8a-4fb5-becf-b52d1629e093-logs\") pod \"horizon-5c94bc8f8f-92blf\" (UID: \"6cf64fd1-1e8a-4fb5-becf-b52d1629e093\") " pod="openstack/horizon-5c94bc8f8f-92blf" Oct 10 18:13:15 crc kubenswrapper[4799]: I1010 18:13:15.732129 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/6cf64fd1-1e8a-4fb5-becf-b52d1629e093-scripts\") pod \"horizon-5c94bc8f8f-92blf\" (UID: \"6cf64fd1-1e8a-4fb5-becf-b52d1629e093\") " pod="openstack/horizon-5c94bc8f8f-92blf" Oct 10 18:13:15 crc kubenswrapper[4799]: I1010 18:13:15.734041 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/6cf64fd1-1e8a-4fb5-becf-b52d1629e093-config-data\") pod \"horizon-5c94bc8f8f-92blf\" (UID: \"6cf64fd1-1e8a-4fb5-becf-b52d1629e093\") " pod="openstack/horizon-5c94bc8f8f-92blf" Oct 10 18:13:15 crc kubenswrapper[4799]: I1010 18:13:15.759664 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dnpvl\" (UniqueName: \"kubernetes.io/projected/6cf64fd1-1e8a-4fb5-becf-b52d1629e093-kube-api-access-dnpvl\") pod \"horizon-5c94bc8f8f-92blf\" (UID: \"6cf64fd1-1e8a-4fb5-becf-b52d1629e093\") " pod="openstack/horizon-5c94bc8f8f-92blf" Oct 10 18:13:15 crc kubenswrapper[4799]: I1010 18:13:15.760799 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/6cf64fd1-1e8a-4fb5-becf-b52d1629e093-horizon-secret-key\") pod \"horizon-5c94bc8f8f-92blf\" (UID: \"6cf64fd1-1e8a-4fb5-becf-b52d1629e093\") " pod="openstack/horizon-5c94bc8f8f-92blf" Oct 10 18:13:15 crc kubenswrapper[4799]: I1010 18:13:15.879409 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-5c94bc8f8f-92blf" Oct 10 18:13:16 crc kubenswrapper[4799]: W1010 18:13:16.446442 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6cf64fd1_1e8a_4fb5_becf_b52d1629e093.slice/crio-f21ec416c22d48ce949b0356988646b270cfeaa211a60e11758cd019b0d03b71 WatchSource:0}: Error finding container f21ec416c22d48ce949b0356988646b270cfeaa211a60e11758cd019b0d03b71: Status 404 returned error can't find the container with id f21ec416c22d48ce949b0356988646b270cfeaa211a60e11758cd019b0d03b71 Oct 10 18:13:16 crc kubenswrapper[4799]: I1010 18:13:16.448310 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-5c94bc8f8f-92blf"] Oct 10 18:13:16 crc kubenswrapper[4799]: I1010 18:13:16.972247 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/heat-db-create-b8wvd"] Oct 10 18:13:16 crc kubenswrapper[4799]: I1010 18:13:16.974205 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-db-create-b8wvd" Oct 10 18:13:16 crc kubenswrapper[4799]: I1010 18:13:16.981348 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-db-create-b8wvd"] Oct 10 18:13:17 crc kubenswrapper[4799]: I1010 18:13:17.065909 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cchqv\" (UniqueName: \"kubernetes.io/projected/49cea7a4-c56f-4c1f-bd9a-99c3e9945625-kube-api-access-cchqv\") pod \"heat-db-create-b8wvd\" (UID: \"49cea7a4-c56f-4c1f-bd9a-99c3e9945625\") " pod="openstack/heat-db-create-b8wvd" Oct 10 18:13:17 crc kubenswrapper[4799]: I1010 18:13:17.144990 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-5c94bc8f8f-92blf" event={"ID":"6cf64fd1-1e8a-4fb5-becf-b52d1629e093","Type":"ContainerStarted","Data":"69557588c5dec63e2d019b2a57e8d2640f4cb0f78aa1dd1bd231be0b4fe3b17d"} Oct 10 18:13:17 crc kubenswrapper[4799]: I1010 18:13:17.145040 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-5c94bc8f8f-92blf" event={"ID":"6cf64fd1-1e8a-4fb5-becf-b52d1629e093","Type":"ContainerStarted","Data":"ca5c6f4ddfd9521e5dc463e713e560efa9dac5126ed8ad8dfc12b727c1998b92"} Oct 10 18:13:17 crc kubenswrapper[4799]: I1010 18:13:17.145050 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-5c94bc8f8f-92blf" event={"ID":"6cf64fd1-1e8a-4fb5-becf-b52d1629e093","Type":"ContainerStarted","Data":"f21ec416c22d48ce949b0356988646b270cfeaa211a60e11758cd019b0d03b71"} Oct 10 18:13:17 crc kubenswrapper[4799]: I1010 18:13:17.169338 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cchqv\" (UniqueName: \"kubernetes.io/projected/49cea7a4-c56f-4c1f-bd9a-99c3e9945625-kube-api-access-cchqv\") pod \"heat-db-create-b8wvd\" (UID: \"49cea7a4-c56f-4c1f-bd9a-99c3e9945625\") " pod="openstack/heat-db-create-b8wvd" Oct 10 18:13:17 crc kubenswrapper[4799]: I1010 18:13:17.193441 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cchqv\" (UniqueName: \"kubernetes.io/projected/49cea7a4-c56f-4c1f-bd9a-99c3e9945625-kube-api-access-cchqv\") pod \"heat-db-create-b8wvd\" (UID: \"49cea7a4-c56f-4c1f-bd9a-99c3e9945625\") " pod="openstack/heat-db-create-b8wvd" Oct 10 18:13:17 crc kubenswrapper[4799]: I1010 18:13:17.297027 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-db-create-b8wvd" Oct 10 18:13:17 crc kubenswrapper[4799]: I1010 18:13:17.802219 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/horizon-5c94bc8f8f-92blf" podStartSLOduration=2.80219263 podStartE2EDuration="2.80219263s" podCreationTimestamp="2025-10-10 18:13:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-10 18:13:17.168299922 +0000 UTC m=+6090.676624047" watchObservedRunningTime="2025-10-10 18:13:17.80219263 +0000 UTC m=+6091.310516785" Oct 10 18:13:17 crc kubenswrapper[4799]: I1010 18:13:17.819722 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-db-create-b8wvd"] Oct 10 18:13:17 crc kubenswrapper[4799]: W1010 18:13:17.825476 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod49cea7a4_c56f_4c1f_bd9a_99c3e9945625.slice/crio-67286b172c032078370bdedfc6fd0d888a05e8d38769758debf90a34f67b6ef0 WatchSource:0}: Error finding container 67286b172c032078370bdedfc6fd0d888a05e8d38769758debf90a34f67b6ef0: Status 404 returned error can't find the container with id 67286b172c032078370bdedfc6fd0d888a05e8d38769758debf90a34f67b6ef0 Oct 10 18:13:18 crc kubenswrapper[4799]: I1010 18:13:18.158945 4799 generic.go:334] "Generic (PLEG): container finished" podID="49cea7a4-c56f-4c1f-bd9a-99c3e9945625" containerID="6441e6290c896e3e4381f62eeb031cd865a2075abd055f223c24cd8b070a254a" exitCode=0 Oct 10 18:13:18 crc kubenswrapper[4799]: I1010 18:13:18.160101 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-db-create-b8wvd" event={"ID":"49cea7a4-c56f-4c1f-bd9a-99c3e9945625","Type":"ContainerDied","Data":"6441e6290c896e3e4381f62eeb031cd865a2075abd055f223c24cd8b070a254a"} Oct 10 18:13:18 crc kubenswrapper[4799]: I1010 18:13:18.160200 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-db-create-b8wvd" event={"ID":"49cea7a4-c56f-4c1f-bd9a-99c3e9945625","Type":"ContainerStarted","Data":"67286b172c032078370bdedfc6fd0d888a05e8d38769758debf90a34f67b6ef0"} Oct 10 18:13:19 crc kubenswrapper[4799]: I1010 18:13:19.650879 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/heat-db-create-b8wvd" Oct 10 18:13:19 crc kubenswrapper[4799]: I1010 18:13:19.828272 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cchqv\" (UniqueName: \"kubernetes.io/projected/49cea7a4-c56f-4c1f-bd9a-99c3e9945625-kube-api-access-cchqv\") pod \"49cea7a4-c56f-4c1f-bd9a-99c3e9945625\" (UID: \"49cea7a4-c56f-4c1f-bd9a-99c3e9945625\") " Oct 10 18:13:19 crc kubenswrapper[4799]: I1010 18:13:19.839063 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49cea7a4-c56f-4c1f-bd9a-99c3e9945625-kube-api-access-cchqv" (OuterVolumeSpecName: "kube-api-access-cchqv") pod "49cea7a4-c56f-4c1f-bd9a-99c3e9945625" (UID: "49cea7a4-c56f-4c1f-bd9a-99c3e9945625"). InnerVolumeSpecName "kube-api-access-cchqv". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 18:13:19 crc kubenswrapper[4799]: I1010 18:13:19.931504 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cchqv\" (UniqueName: \"kubernetes.io/projected/49cea7a4-c56f-4c1f-bd9a-99c3e9945625-kube-api-access-cchqv\") on node \"crc\" DevicePath \"\"" Oct 10 18:13:20 crc kubenswrapper[4799]: I1010 18:13:20.187121 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-db-create-b8wvd" event={"ID":"49cea7a4-c56f-4c1f-bd9a-99c3e9945625","Type":"ContainerDied","Data":"67286b172c032078370bdedfc6fd0d888a05e8d38769758debf90a34f67b6ef0"} Oct 10 18:13:20 crc kubenswrapper[4799]: I1010 18:13:20.187191 4799 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="67286b172c032078370bdedfc6fd0d888a05e8d38769758debf90a34f67b6ef0" Oct 10 18:13:20 crc kubenswrapper[4799]: I1010 18:13:20.187279 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/heat-db-create-b8wvd" Oct 10 18:13:23 crc kubenswrapper[4799]: I1010 18:13:23.221798 4799 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-6bf9dd6ff7-gfnlb" podUID="bf3c5fcf-3145-40c0-b93c-ba5eac936b43" containerName="horizon" probeResult="failure" output="Get \"http://10.217.1.124:8080/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.1.124:8080: connect: connection refused" Oct 10 18:13:24 crc kubenswrapper[4799]: I1010 18:13:24.045838 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-8aed-account-create-bj7jg"] Oct 10 18:13:24 crc kubenswrapper[4799]: I1010 18:13:24.058949 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-8aed-account-create-bj7jg"] Oct 10 18:13:25 crc kubenswrapper[4799]: I1010 18:13:25.420011 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="06167591-831b-4d1c-950f-60158682fc9b" path="/var/lib/kubelet/pods/06167591-831b-4d1c-950f-60158682fc9b/volumes" Oct 10 18:13:25 crc kubenswrapper[4799]: I1010 18:13:25.880425 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/horizon-5c94bc8f8f-92blf" Oct 10 18:13:25 crc kubenswrapper[4799]: I1010 18:13:25.880641 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-5c94bc8f8f-92blf" Oct 10 18:13:27 crc kubenswrapper[4799]: I1010 18:13:27.100594 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/heat-c9f2-account-create-7vv8z"] Oct 10 18:13:27 crc kubenswrapper[4799]: E1010 18:13:27.101557 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="49cea7a4-c56f-4c1f-bd9a-99c3e9945625" containerName="mariadb-database-create" Oct 10 18:13:27 crc kubenswrapper[4799]: I1010 18:13:27.101573 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="49cea7a4-c56f-4c1f-bd9a-99c3e9945625" containerName="mariadb-database-create" Oct 10 18:13:27 crc kubenswrapper[4799]: I1010 18:13:27.101821 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="49cea7a4-c56f-4c1f-bd9a-99c3e9945625" containerName="mariadb-database-create" Oct 10 18:13:27 crc kubenswrapper[4799]: I1010 18:13:27.102738 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-c9f2-account-create-7vv8z" Oct 10 18:13:27 crc kubenswrapper[4799]: I1010 18:13:27.106999 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"heat-db-secret" Oct 10 18:13:27 crc kubenswrapper[4799]: I1010 18:13:27.122126 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-st8cf\" (UniqueName: \"kubernetes.io/projected/0c7f4a5a-92e2-471c-a606-e71c722a026d-kube-api-access-st8cf\") pod \"heat-c9f2-account-create-7vv8z\" (UID: \"0c7f4a5a-92e2-471c-a606-e71c722a026d\") " pod="openstack/heat-c9f2-account-create-7vv8z" Oct 10 18:13:27 crc kubenswrapper[4799]: I1010 18:13:27.130226 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-c9f2-account-create-7vv8z"] Oct 10 18:13:27 crc kubenswrapper[4799]: I1010 18:13:27.223578 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-st8cf\" (UniqueName: \"kubernetes.io/projected/0c7f4a5a-92e2-471c-a606-e71c722a026d-kube-api-access-st8cf\") pod \"heat-c9f2-account-create-7vv8z\" (UID: \"0c7f4a5a-92e2-471c-a606-e71c722a026d\") " pod="openstack/heat-c9f2-account-create-7vv8z" Oct 10 18:13:27 crc kubenswrapper[4799]: I1010 18:13:27.256943 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-st8cf\" (UniqueName: \"kubernetes.io/projected/0c7f4a5a-92e2-471c-a606-e71c722a026d-kube-api-access-st8cf\") pod \"heat-c9f2-account-create-7vv8z\" (UID: \"0c7f4a5a-92e2-471c-a606-e71c722a026d\") " pod="openstack/heat-c9f2-account-create-7vv8z" Oct 10 18:13:27 crc kubenswrapper[4799]: I1010 18:13:27.434545 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-c9f2-account-create-7vv8z" Oct 10 18:13:27 crc kubenswrapper[4799]: I1010 18:13:27.989500 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-c9f2-account-create-7vv8z"] Oct 10 18:13:28 crc kubenswrapper[4799]: I1010 18:13:28.299244 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-c9f2-account-create-7vv8z" event={"ID":"0c7f4a5a-92e2-471c-a606-e71c722a026d","Type":"ContainerStarted","Data":"fa2c5567ac3491e3b5b60d5ff7582729052655b3bb66662b9e445c5bdb38aa7a"} Oct 10 18:13:28 crc kubenswrapper[4799]: I1010 18:13:28.299339 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-c9f2-account-create-7vv8z" event={"ID":"0c7f4a5a-92e2-471c-a606-e71c722a026d","Type":"ContainerStarted","Data":"be6fb04353889643ef7c6858d864901288b0cafffa91de35cdb6f3e567c20127"} Oct 10 18:13:28 crc kubenswrapper[4799]: I1010 18:13:28.324464 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/heat-c9f2-account-create-7vv8z" podStartSLOduration=1.324437755 podStartE2EDuration="1.324437755s" podCreationTimestamp="2025-10-10 18:13:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-10 18:13:28.319412742 +0000 UTC m=+6101.827736897" watchObservedRunningTime="2025-10-10 18:13:28.324437755 +0000 UTC m=+6101.832761910" Oct 10 18:13:29 crc kubenswrapper[4799]: I1010 18:13:29.317812 4799 generic.go:334] "Generic (PLEG): container finished" podID="0c7f4a5a-92e2-471c-a606-e71c722a026d" containerID="fa2c5567ac3491e3b5b60d5ff7582729052655b3bb66662b9e445c5bdb38aa7a" exitCode=0 Oct 10 18:13:29 crc kubenswrapper[4799]: I1010 18:13:29.317953 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-c9f2-account-create-7vv8z" event={"ID":"0c7f4a5a-92e2-471c-a606-e71c722a026d","Type":"ContainerDied","Data":"fa2c5567ac3491e3b5b60d5ff7582729052655b3bb66662b9e445c5bdb38aa7a"} Oct 10 18:13:30 crc kubenswrapper[4799]: I1010 18:13:30.048050 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-db-sync-j8njx"] Oct 10 18:13:30 crc kubenswrapper[4799]: I1010 18:13:30.066881 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-db-sync-j8njx"] Oct 10 18:13:30 crc kubenswrapper[4799]: I1010 18:13:30.808372 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/heat-c9f2-account-create-7vv8z" Oct 10 18:13:30 crc kubenswrapper[4799]: I1010 18:13:30.905213 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-st8cf\" (UniqueName: \"kubernetes.io/projected/0c7f4a5a-92e2-471c-a606-e71c722a026d-kube-api-access-st8cf\") pod \"0c7f4a5a-92e2-471c-a606-e71c722a026d\" (UID: \"0c7f4a5a-92e2-471c-a606-e71c722a026d\") " Oct 10 18:13:30 crc kubenswrapper[4799]: I1010 18:13:30.911432 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0c7f4a5a-92e2-471c-a606-e71c722a026d-kube-api-access-st8cf" (OuterVolumeSpecName: "kube-api-access-st8cf") pod "0c7f4a5a-92e2-471c-a606-e71c722a026d" (UID: "0c7f4a5a-92e2-471c-a606-e71c722a026d"). InnerVolumeSpecName "kube-api-access-st8cf". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 18:13:31 crc kubenswrapper[4799]: I1010 18:13:31.007345 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-st8cf\" (UniqueName: \"kubernetes.io/projected/0c7f4a5a-92e2-471c-a606-e71c722a026d-kube-api-access-st8cf\") on node \"crc\" DevicePath \"\"" Oct 10 18:13:31 crc kubenswrapper[4799]: I1010 18:13:31.364660 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-c9f2-account-create-7vv8z" event={"ID":"0c7f4a5a-92e2-471c-a606-e71c722a026d","Type":"ContainerDied","Data":"be6fb04353889643ef7c6858d864901288b0cafffa91de35cdb6f3e567c20127"} Oct 10 18:13:31 crc kubenswrapper[4799]: I1010 18:13:31.364712 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/heat-c9f2-account-create-7vv8z" Oct 10 18:13:31 crc kubenswrapper[4799]: I1010 18:13:31.364716 4799 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="be6fb04353889643ef7c6858d864901288b0cafffa91de35cdb6f3e567c20127" Oct 10 18:13:31 crc kubenswrapper[4799]: I1010 18:13:31.423357 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4638bb90-28e7-4aec-a5fe-7bdb4195d49d" path="/var/lib/kubelet/pods/4638bb90-28e7-4aec-a5fe-7bdb4195d49d/volumes" Oct 10 18:13:32 crc kubenswrapper[4799]: I1010 18:13:32.234035 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/heat-db-sync-7bz79"] Oct 10 18:13:32 crc kubenswrapper[4799]: E1010 18:13:32.234807 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0c7f4a5a-92e2-471c-a606-e71c722a026d" containerName="mariadb-account-create" Oct 10 18:13:32 crc kubenswrapper[4799]: I1010 18:13:32.234832 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="0c7f4a5a-92e2-471c-a606-e71c722a026d" containerName="mariadb-account-create" Oct 10 18:13:32 crc kubenswrapper[4799]: I1010 18:13:32.235130 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="0c7f4a5a-92e2-471c-a606-e71c722a026d" containerName="mariadb-account-create" Oct 10 18:13:32 crc kubenswrapper[4799]: I1010 18:13:32.236110 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-db-sync-7bz79" Oct 10 18:13:32 crc kubenswrapper[4799]: I1010 18:13:32.239926 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"heat-heat-dockercfg-fbd69" Oct 10 18:13:32 crc kubenswrapper[4799]: I1010 18:13:32.245016 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"heat-config-data" Oct 10 18:13:32 crc kubenswrapper[4799]: I1010 18:13:32.265010 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-db-sync-7bz79"] Oct 10 18:13:32 crc kubenswrapper[4799]: I1010 18:13:32.333137 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/76688729-9f62-43ec-8061-bd200fc10fd0-config-data\") pod \"heat-db-sync-7bz79\" (UID: \"76688729-9f62-43ec-8061-bd200fc10fd0\") " pod="openstack/heat-db-sync-7bz79" Oct 10 18:13:32 crc kubenswrapper[4799]: I1010 18:13:32.333531 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sv255\" (UniqueName: \"kubernetes.io/projected/76688729-9f62-43ec-8061-bd200fc10fd0-kube-api-access-sv255\") pod \"heat-db-sync-7bz79\" (UID: \"76688729-9f62-43ec-8061-bd200fc10fd0\") " pod="openstack/heat-db-sync-7bz79" Oct 10 18:13:32 crc kubenswrapper[4799]: I1010 18:13:32.333669 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/76688729-9f62-43ec-8061-bd200fc10fd0-combined-ca-bundle\") pod \"heat-db-sync-7bz79\" (UID: \"76688729-9f62-43ec-8061-bd200fc10fd0\") " pod="openstack/heat-db-sync-7bz79" Oct 10 18:13:32 crc kubenswrapper[4799]: I1010 18:13:32.436180 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/76688729-9f62-43ec-8061-bd200fc10fd0-combined-ca-bundle\") pod \"heat-db-sync-7bz79\" (UID: \"76688729-9f62-43ec-8061-bd200fc10fd0\") " pod="openstack/heat-db-sync-7bz79" Oct 10 18:13:32 crc kubenswrapper[4799]: I1010 18:13:32.436387 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/76688729-9f62-43ec-8061-bd200fc10fd0-config-data\") pod \"heat-db-sync-7bz79\" (UID: \"76688729-9f62-43ec-8061-bd200fc10fd0\") " pod="openstack/heat-db-sync-7bz79" Oct 10 18:13:32 crc kubenswrapper[4799]: I1010 18:13:32.436493 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sv255\" (UniqueName: \"kubernetes.io/projected/76688729-9f62-43ec-8061-bd200fc10fd0-kube-api-access-sv255\") pod \"heat-db-sync-7bz79\" (UID: \"76688729-9f62-43ec-8061-bd200fc10fd0\") " pod="openstack/heat-db-sync-7bz79" Oct 10 18:13:32 crc kubenswrapper[4799]: I1010 18:13:32.447920 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/76688729-9f62-43ec-8061-bd200fc10fd0-config-data\") pod \"heat-db-sync-7bz79\" (UID: \"76688729-9f62-43ec-8061-bd200fc10fd0\") " pod="openstack/heat-db-sync-7bz79" Oct 10 18:13:32 crc kubenswrapper[4799]: I1010 18:13:32.448862 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/76688729-9f62-43ec-8061-bd200fc10fd0-combined-ca-bundle\") pod \"heat-db-sync-7bz79\" (UID: \"76688729-9f62-43ec-8061-bd200fc10fd0\") " pod="openstack/heat-db-sync-7bz79" Oct 10 18:13:32 crc kubenswrapper[4799]: I1010 18:13:32.460376 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sv255\" (UniqueName: \"kubernetes.io/projected/76688729-9f62-43ec-8061-bd200fc10fd0-kube-api-access-sv255\") pod \"heat-db-sync-7bz79\" (UID: \"76688729-9f62-43ec-8061-bd200fc10fd0\") " pod="openstack/heat-db-sync-7bz79" Oct 10 18:13:32 crc kubenswrapper[4799]: I1010 18:13:32.616537 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-db-sync-7bz79" Oct 10 18:13:33 crc kubenswrapper[4799]: I1010 18:13:33.107168 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-db-sync-7bz79"] Oct 10 18:13:33 crc kubenswrapper[4799]: W1010 18:13:33.107624 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod76688729_9f62_43ec_8061_bd200fc10fd0.slice/crio-050b7f881cf6a64c0bfc1d1095f869644fc54dc54bb542aff0600f3b1c401180 WatchSource:0}: Error finding container 050b7f881cf6a64c0bfc1d1095f869644fc54dc54bb542aff0600f3b1c401180: Status 404 returned error can't find the container with id 050b7f881cf6a64c0bfc1d1095f869644fc54dc54bb542aff0600f3b1c401180 Oct 10 18:13:33 crc kubenswrapper[4799]: I1010 18:13:33.220609 4799 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-6bf9dd6ff7-gfnlb" podUID="bf3c5fcf-3145-40c0-b93c-ba5eac936b43" containerName="horizon" probeResult="failure" output="Get \"http://10.217.1.124:8080/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.1.124:8080: connect: connection refused" Oct 10 18:13:33 crc kubenswrapper[4799]: I1010 18:13:33.220859 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-6bf9dd6ff7-gfnlb" Oct 10 18:13:33 crc kubenswrapper[4799]: I1010 18:13:33.389439 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-db-sync-7bz79" event={"ID":"76688729-9f62-43ec-8061-bd200fc10fd0","Type":"ContainerStarted","Data":"050b7f881cf6a64c0bfc1d1095f869644fc54dc54bb542aff0600f3b1c401180"} Oct 10 18:13:37 crc kubenswrapper[4799]: I1010 18:13:37.443498 4799 generic.go:334] "Generic (PLEG): container finished" podID="bf3c5fcf-3145-40c0-b93c-ba5eac936b43" containerID="3e5e7d54699c3f096b3ac31e44e7e67f30ebc5eed9584ccd462783c3502dd417" exitCode=137 Oct 10 18:13:37 crc kubenswrapper[4799]: I1010 18:13:37.443573 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-6bf9dd6ff7-gfnlb" event={"ID":"bf3c5fcf-3145-40c0-b93c-ba5eac936b43","Type":"ContainerDied","Data":"3e5e7d54699c3f096b3ac31e44e7e67f30ebc5eed9584ccd462783c3502dd417"} Oct 10 18:13:37 crc kubenswrapper[4799]: I1010 18:13:37.698366 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/horizon-5c94bc8f8f-92blf" Oct 10 18:13:39 crc kubenswrapper[4799]: I1010 18:13:39.226818 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/horizon-5c94bc8f8f-92blf" Oct 10 18:13:39 crc kubenswrapper[4799]: I1010 18:13:39.308362 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-7ccb584f4f-z4j4m"] Oct 10 18:13:39 crc kubenswrapper[4799]: I1010 18:13:39.308649 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-7ccb584f4f-z4j4m" podUID="5eefa605-65ba-438e-9d40-58c6225ff2ff" containerName="horizon-log" containerID="cri-o://f8ac406c405deb39da7cf2c72457034ef28402867d0205f2f9ccc0ab1dc32509" gracePeriod=30 Oct 10 18:13:39 crc kubenswrapper[4799]: I1010 18:13:39.309083 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-7ccb584f4f-z4j4m" podUID="5eefa605-65ba-438e-9d40-58c6225ff2ff" containerName="horizon" containerID="cri-o://d80062c40fe28666e86970019eb6e0ea8482f96a9581f3c179dd9de92d6e9969" gracePeriod=30 Oct 10 18:13:40 crc kubenswrapper[4799]: I1010 18:13:40.381101 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-6bf9dd6ff7-gfnlb" Oct 10 18:13:40 crc kubenswrapper[4799]: I1010 18:13:40.488616 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-6bf9dd6ff7-gfnlb" event={"ID":"bf3c5fcf-3145-40c0-b93c-ba5eac936b43","Type":"ContainerDied","Data":"12ea61ba5e5ecf31743b11e071ad4ed7c670835c867d4cee32af2dabfdc93896"} Oct 10 18:13:40 crc kubenswrapper[4799]: I1010 18:13:40.488666 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-6bf9dd6ff7-gfnlb" Oct 10 18:13:40 crc kubenswrapper[4799]: I1010 18:13:40.488716 4799 scope.go:117] "RemoveContainer" containerID="8c70c62a434b2bf7a5e0f029253bc8c75e1b8f500a3f7025df5a8fecd7c64408" Oct 10 18:13:40 crc kubenswrapper[4799]: I1010 18:13:40.491464 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-db-sync-7bz79" event={"ID":"76688729-9f62-43ec-8061-bd200fc10fd0","Type":"ContainerStarted","Data":"3df0bd3443b480f13da760730e3f1da57e42ca8cc8cf342f19793be3fc4a0605"} Oct 10 18:13:40 crc kubenswrapper[4799]: I1010 18:13:40.508443 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/heat-db-sync-7bz79" podStartSLOduration=1.487596341 podStartE2EDuration="8.508425134s" podCreationTimestamp="2025-10-10 18:13:32 +0000 UTC" firstStartedPulling="2025-10-10 18:13:33.110604866 +0000 UTC m=+6106.618928981" lastFinishedPulling="2025-10-10 18:13:40.131433659 +0000 UTC m=+6113.639757774" observedRunningTime="2025-10-10 18:13:40.504821056 +0000 UTC m=+6114.013145231" watchObservedRunningTime="2025-10-10 18:13:40.508425134 +0000 UTC m=+6114.016749249" Oct 10 18:13:40 crc kubenswrapper[4799]: I1010 18:13:40.521648 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/bf3c5fcf-3145-40c0-b93c-ba5eac936b43-horizon-secret-key\") pod \"bf3c5fcf-3145-40c0-b93c-ba5eac936b43\" (UID: \"bf3c5fcf-3145-40c0-b93c-ba5eac936b43\") " Oct 10 18:13:40 crc kubenswrapper[4799]: I1010 18:13:40.521723 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/bf3c5fcf-3145-40c0-b93c-ba5eac936b43-logs\") pod \"bf3c5fcf-3145-40c0-b93c-ba5eac936b43\" (UID: \"bf3c5fcf-3145-40c0-b93c-ba5eac936b43\") " Oct 10 18:13:40 crc kubenswrapper[4799]: I1010 18:13:40.521818 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/bf3c5fcf-3145-40c0-b93c-ba5eac936b43-config-data\") pod \"bf3c5fcf-3145-40c0-b93c-ba5eac936b43\" (UID: \"bf3c5fcf-3145-40c0-b93c-ba5eac936b43\") " Oct 10 18:13:40 crc kubenswrapper[4799]: I1010 18:13:40.521954 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-r9sd6\" (UniqueName: \"kubernetes.io/projected/bf3c5fcf-3145-40c0-b93c-ba5eac936b43-kube-api-access-r9sd6\") pod \"bf3c5fcf-3145-40c0-b93c-ba5eac936b43\" (UID: \"bf3c5fcf-3145-40c0-b93c-ba5eac936b43\") " Oct 10 18:13:40 crc kubenswrapper[4799]: I1010 18:13:40.521986 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/bf3c5fcf-3145-40c0-b93c-ba5eac936b43-scripts\") pod \"bf3c5fcf-3145-40c0-b93c-ba5eac936b43\" (UID: \"bf3c5fcf-3145-40c0-b93c-ba5eac936b43\") " Oct 10 18:13:40 crc kubenswrapper[4799]: I1010 18:13:40.522491 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bf3c5fcf-3145-40c0-b93c-ba5eac936b43-logs" (OuterVolumeSpecName: "logs") pod "bf3c5fcf-3145-40c0-b93c-ba5eac936b43" (UID: "bf3c5fcf-3145-40c0-b93c-ba5eac936b43"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 10 18:13:40 crc kubenswrapper[4799]: I1010 18:13:40.523599 4799 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/bf3c5fcf-3145-40c0-b93c-ba5eac936b43-logs\") on node \"crc\" DevicePath \"\"" Oct 10 18:13:40 crc kubenswrapper[4799]: I1010 18:13:40.528438 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf3c5fcf-3145-40c0-b93c-ba5eac936b43-kube-api-access-r9sd6" (OuterVolumeSpecName: "kube-api-access-r9sd6") pod "bf3c5fcf-3145-40c0-b93c-ba5eac936b43" (UID: "bf3c5fcf-3145-40c0-b93c-ba5eac936b43"). InnerVolumeSpecName "kube-api-access-r9sd6". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 18:13:40 crc kubenswrapper[4799]: I1010 18:13:40.529070 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bf3c5fcf-3145-40c0-b93c-ba5eac936b43-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "bf3c5fcf-3145-40c0-b93c-ba5eac936b43" (UID: "bf3c5fcf-3145-40c0-b93c-ba5eac936b43"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 18:13:40 crc kubenswrapper[4799]: I1010 18:13:40.552257 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bf3c5fcf-3145-40c0-b93c-ba5eac936b43-scripts" (OuterVolumeSpecName: "scripts") pod "bf3c5fcf-3145-40c0-b93c-ba5eac936b43" (UID: "bf3c5fcf-3145-40c0-b93c-ba5eac936b43"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 18:13:40 crc kubenswrapper[4799]: I1010 18:13:40.554329 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bf3c5fcf-3145-40c0-b93c-ba5eac936b43-config-data" (OuterVolumeSpecName: "config-data") pod "bf3c5fcf-3145-40c0-b93c-ba5eac936b43" (UID: "bf3c5fcf-3145-40c0-b93c-ba5eac936b43"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 18:13:40 crc kubenswrapper[4799]: I1010 18:13:40.625977 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-r9sd6\" (UniqueName: \"kubernetes.io/projected/bf3c5fcf-3145-40c0-b93c-ba5eac936b43-kube-api-access-r9sd6\") on node \"crc\" DevicePath \"\"" Oct 10 18:13:40 crc kubenswrapper[4799]: I1010 18:13:40.626024 4799 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/bf3c5fcf-3145-40c0-b93c-ba5eac936b43-scripts\") on node \"crc\" DevicePath \"\"" Oct 10 18:13:40 crc kubenswrapper[4799]: I1010 18:13:40.626041 4799 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/bf3c5fcf-3145-40c0-b93c-ba5eac936b43-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Oct 10 18:13:40 crc kubenswrapper[4799]: I1010 18:13:40.626055 4799 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/bf3c5fcf-3145-40c0-b93c-ba5eac936b43-config-data\") on node \"crc\" DevicePath \"\"" Oct 10 18:13:40 crc kubenswrapper[4799]: I1010 18:13:40.702729 4799 scope.go:117] "RemoveContainer" containerID="3e5e7d54699c3f096b3ac31e44e7e67f30ebc5eed9584ccd462783c3502dd417" Oct 10 18:13:40 crc kubenswrapper[4799]: I1010 18:13:40.853966 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-6bf9dd6ff7-gfnlb"] Oct 10 18:13:40 crc kubenswrapper[4799]: I1010 18:13:40.869056 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/horizon-6bf9dd6ff7-gfnlb"] Oct 10 18:13:41 crc kubenswrapper[4799]: I1010 18:13:41.420331 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bf3c5fcf-3145-40c0-b93c-ba5eac936b43" path="/var/lib/kubelet/pods/bf3c5fcf-3145-40c0-b93c-ba5eac936b43/volumes" Oct 10 18:13:42 crc kubenswrapper[4799]: I1010 18:13:42.514966 4799 generic.go:334] "Generic (PLEG): container finished" podID="76688729-9f62-43ec-8061-bd200fc10fd0" containerID="3df0bd3443b480f13da760730e3f1da57e42ca8cc8cf342f19793be3fc4a0605" exitCode=0 Oct 10 18:13:42 crc kubenswrapper[4799]: I1010 18:13:42.515064 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-db-sync-7bz79" event={"ID":"76688729-9f62-43ec-8061-bd200fc10fd0","Type":"ContainerDied","Data":"3df0bd3443b480f13da760730e3f1da57e42ca8cc8cf342f19793be3fc4a0605"} Oct 10 18:13:43 crc kubenswrapper[4799]: I1010 18:13:43.540288 4799 generic.go:334] "Generic (PLEG): container finished" podID="5eefa605-65ba-438e-9d40-58c6225ff2ff" containerID="d80062c40fe28666e86970019eb6e0ea8482f96a9581f3c179dd9de92d6e9969" exitCode=0 Oct 10 18:13:43 crc kubenswrapper[4799]: I1010 18:13:43.540413 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-7ccb584f4f-z4j4m" event={"ID":"5eefa605-65ba-438e-9d40-58c6225ff2ff","Type":"ContainerDied","Data":"d80062c40fe28666e86970019eb6e0ea8482f96a9581f3c179dd9de92d6e9969"} Oct 10 18:13:43 crc kubenswrapper[4799]: I1010 18:13:43.788074 4799 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-7ccb584f4f-z4j4m" podUID="5eefa605-65ba-438e-9d40-58c6225ff2ff" containerName="horizon" probeResult="failure" output="Get \"http://10.217.1.125:8080/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.1.125:8080: connect: connection refused" Oct 10 18:13:43 crc kubenswrapper[4799]: I1010 18:13:43.992463 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/heat-db-sync-7bz79" Oct 10 18:13:44 crc kubenswrapper[4799]: I1010 18:13:44.103381 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sv255\" (UniqueName: \"kubernetes.io/projected/76688729-9f62-43ec-8061-bd200fc10fd0-kube-api-access-sv255\") pod \"76688729-9f62-43ec-8061-bd200fc10fd0\" (UID: \"76688729-9f62-43ec-8061-bd200fc10fd0\") " Oct 10 18:13:44 crc kubenswrapper[4799]: I1010 18:13:44.103791 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/76688729-9f62-43ec-8061-bd200fc10fd0-config-data\") pod \"76688729-9f62-43ec-8061-bd200fc10fd0\" (UID: \"76688729-9f62-43ec-8061-bd200fc10fd0\") " Oct 10 18:13:44 crc kubenswrapper[4799]: I1010 18:13:44.103863 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/76688729-9f62-43ec-8061-bd200fc10fd0-combined-ca-bundle\") pod \"76688729-9f62-43ec-8061-bd200fc10fd0\" (UID: \"76688729-9f62-43ec-8061-bd200fc10fd0\") " Oct 10 18:13:44 crc kubenswrapper[4799]: I1010 18:13:44.110275 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/76688729-9f62-43ec-8061-bd200fc10fd0-kube-api-access-sv255" (OuterVolumeSpecName: "kube-api-access-sv255") pod "76688729-9f62-43ec-8061-bd200fc10fd0" (UID: "76688729-9f62-43ec-8061-bd200fc10fd0"). InnerVolumeSpecName "kube-api-access-sv255". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 18:13:44 crc kubenswrapper[4799]: I1010 18:13:44.147818 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/76688729-9f62-43ec-8061-bd200fc10fd0-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "76688729-9f62-43ec-8061-bd200fc10fd0" (UID: "76688729-9f62-43ec-8061-bd200fc10fd0"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 18:13:44 crc kubenswrapper[4799]: I1010 18:13:44.206153 4799 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/76688729-9f62-43ec-8061-bd200fc10fd0-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 10 18:13:44 crc kubenswrapper[4799]: I1010 18:13:44.206195 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sv255\" (UniqueName: \"kubernetes.io/projected/76688729-9f62-43ec-8061-bd200fc10fd0-kube-api-access-sv255\") on node \"crc\" DevicePath \"\"" Oct 10 18:13:44 crc kubenswrapper[4799]: I1010 18:13:44.210676 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/76688729-9f62-43ec-8061-bd200fc10fd0-config-data" (OuterVolumeSpecName: "config-data") pod "76688729-9f62-43ec-8061-bd200fc10fd0" (UID: "76688729-9f62-43ec-8061-bd200fc10fd0"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 18:13:44 crc kubenswrapper[4799]: I1010 18:13:44.308907 4799 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/76688729-9f62-43ec-8061-bd200fc10fd0-config-data\") on node \"crc\" DevicePath \"\"" Oct 10 18:13:44 crc kubenswrapper[4799]: I1010 18:13:44.557800 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-db-sync-7bz79" event={"ID":"76688729-9f62-43ec-8061-bd200fc10fd0","Type":"ContainerDied","Data":"050b7f881cf6a64c0bfc1d1095f869644fc54dc54bb542aff0600f3b1c401180"} Oct 10 18:13:44 crc kubenswrapper[4799]: I1010 18:13:44.557868 4799 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="050b7f881cf6a64c0bfc1d1095f869644fc54dc54bb542aff0600f3b1c401180" Oct 10 18:13:44 crc kubenswrapper[4799]: I1010 18:13:44.558015 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/heat-db-sync-7bz79" Oct 10 18:13:45 crc kubenswrapper[4799]: I1010 18:13:45.248999 4799 patch_prober.go:28] interesting pod/machine-config-daemon-rh8zc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 10 18:13:45 crc kubenswrapper[4799]: I1010 18:13:45.249336 4799 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 10 18:13:46 crc kubenswrapper[4799]: I1010 18:13:46.175473 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/heat-engine-78d7fd67f8-xrdlh"] Oct 10 18:13:46 crc kubenswrapper[4799]: E1010 18:13:46.176130 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="76688729-9f62-43ec-8061-bd200fc10fd0" containerName="heat-db-sync" Oct 10 18:13:46 crc kubenswrapper[4799]: I1010 18:13:46.176145 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="76688729-9f62-43ec-8061-bd200fc10fd0" containerName="heat-db-sync" Oct 10 18:13:46 crc kubenswrapper[4799]: E1010 18:13:46.176191 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bf3c5fcf-3145-40c0-b93c-ba5eac936b43" containerName="horizon-log" Oct 10 18:13:46 crc kubenswrapper[4799]: I1010 18:13:46.176197 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="bf3c5fcf-3145-40c0-b93c-ba5eac936b43" containerName="horizon-log" Oct 10 18:13:46 crc kubenswrapper[4799]: E1010 18:13:46.176208 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bf3c5fcf-3145-40c0-b93c-ba5eac936b43" containerName="horizon" Oct 10 18:13:46 crc kubenswrapper[4799]: I1010 18:13:46.176215 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="bf3c5fcf-3145-40c0-b93c-ba5eac936b43" containerName="horizon" Oct 10 18:13:46 crc kubenswrapper[4799]: I1010 18:13:46.176387 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="bf3c5fcf-3145-40c0-b93c-ba5eac936b43" containerName="horizon" Oct 10 18:13:46 crc kubenswrapper[4799]: I1010 18:13:46.176406 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="bf3c5fcf-3145-40c0-b93c-ba5eac936b43" containerName="horizon-log" Oct 10 18:13:46 crc kubenswrapper[4799]: I1010 18:13:46.176417 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="76688729-9f62-43ec-8061-bd200fc10fd0" containerName="heat-db-sync" Oct 10 18:13:46 crc kubenswrapper[4799]: I1010 18:13:46.177081 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-engine-78d7fd67f8-xrdlh" Oct 10 18:13:46 crc kubenswrapper[4799]: I1010 18:13:46.178629 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"heat-heat-dockercfg-fbd69" Oct 10 18:13:46 crc kubenswrapper[4799]: I1010 18:13:46.178981 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"heat-engine-config-data" Oct 10 18:13:46 crc kubenswrapper[4799]: I1010 18:13:46.179353 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"heat-config-data" Oct 10 18:13:46 crc kubenswrapper[4799]: I1010 18:13:46.207252 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-engine-78d7fd67f8-xrdlh"] Oct 10 18:13:46 crc kubenswrapper[4799]: I1010 18:13:46.257062 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-npzrp\" (UniqueName: \"kubernetes.io/projected/6f976566-a6af-40d3-81e1-085366e2b6fe-kube-api-access-npzrp\") pod \"heat-engine-78d7fd67f8-xrdlh\" (UID: \"6f976566-a6af-40d3-81e1-085366e2b6fe\") " pod="openstack/heat-engine-78d7fd67f8-xrdlh" Oct 10 18:13:46 crc kubenswrapper[4799]: I1010 18:13:46.257105 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/6f976566-a6af-40d3-81e1-085366e2b6fe-config-data-custom\") pod \"heat-engine-78d7fd67f8-xrdlh\" (UID: \"6f976566-a6af-40d3-81e1-085366e2b6fe\") " pod="openstack/heat-engine-78d7fd67f8-xrdlh" Oct 10 18:13:46 crc kubenswrapper[4799]: I1010 18:13:46.257155 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6f976566-a6af-40d3-81e1-085366e2b6fe-config-data\") pod \"heat-engine-78d7fd67f8-xrdlh\" (UID: \"6f976566-a6af-40d3-81e1-085366e2b6fe\") " pod="openstack/heat-engine-78d7fd67f8-xrdlh" Oct 10 18:13:46 crc kubenswrapper[4799]: I1010 18:13:46.257184 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6f976566-a6af-40d3-81e1-085366e2b6fe-combined-ca-bundle\") pod \"heat-engine-78d7fd67f8-xrdlh\" (UID: \"6f976566-a6af-40d3-81e1-085366e2b6fe\") " pod="openstack/heat-engine-78d7fd67f8-xrdlh" Oct 10 18:13:46 crc kubenswrapper[4799]: I1010 18:13:46.317806 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/heat-cfnapi-699b8f6d77-rp46k"] Oct 10 18:13:46 crc kubenswrapper[4799]: I1010 18:13:46.319202 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-cfnapi-699b8f6d77-rp46k" Oct 10 18:13:46 crc kubenswrapper[4799]: I1010 18:13:46.321168 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"heat-cfnapi-config-data" Oct 10 18:13:46 crc kubenswrapper[4799]: I1010 18:13:46.330519 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-cfnapi-699b8f6d77-rp46k"] Oct 10 18:13:46 crc kubenswrapper[4799]: I1010 18:13:46.360141 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-npzrp\" (UniqueName: \"kubernetes.io/projected/6f976566-a6af-40d3-81e1-085366e2b6fe-kube-api-access-npzrp\") pod \"heat-engine-78d7fd67f8-xrdlh\" (UID: \"6f976566-a6af-40d3-81e1-085366e2b6fe\") " pod="openstack/heat-engine-78d7fd67f8-xrdlh" Oct 10 18:13:46 crc kubenswrapper[4799]: I1010 18:13:46.360187 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/6f976566-a6af-40d3-81e1-085366e2b6fe-config-data-custom\") pod \"heat-engine-78d7fd67f8-xrdlh\" (UID: \"6f976566-a6af-40d3-81e1-085366e2b6fe\") " pod="openstack/heat-engine-78d7fd67f8-xrdlh" Oct 10 18:13:46 crc kubenswrapper[4799]: I1010 18:13:46.360241 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6f976566-a6af-40d3-81e1-085366e2b6fe-config-data\") pod \"heat-engine-78d7fd67f8-xrdlh\" (UID: \"6f976566-a6af-40d3-81e1-085366e2b6fe\") " pod="openstack/heat-engine-78d7fd67f8-xrdlh" Oct 10 18:13:46 crc kubenswrapper[4799]: I1010 18:13:46.360275 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6f976566-a6af-40d3-81e1-085366e2b6fe-combined-ca-bundle\") pod \"heat-engine-78d7fd67f8-xrdlh\" (UID: \"6f976566-a6af-40d3-81e1-085366e2b6fe\") " pod="openstack/heat-engine-78d7fd67f8-xrdlh" Oct 10 18:13:46 crc kubenswrapper[4799]: I1010 18:13:46.369691 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6f976566-a6af-40d3-81e1-085366e2b6fe-config-data\") pod \"heat-engine-78d7fd67f8-xrdlh\" (UID: \"6f976566-a6af-40d3-81e1-085366e2b6fe\") " pod="openstack/heat-engine-78d7fd67f8-xrdlh" Oct 10 18:13:46 crc kubenswrapper[4799]: I1010 18:13:46.379007 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/heat-api-6c6c4696cc-82gmt"] Oct 10 18:13:46 crc kubenswrapper[4799]: I1010 18:13:46.380445 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-api-6c6c4696cc-82gmt" Oct 10 18:13:46 crc kubenswrapper[4799]: I1010 18:13:46.385872 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"heat-api-config-data" Oct 10 18:13:46 crc kubenswrapper[4799]: I1010 18:13:46.386193 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6f976566-a6af-40d3-81e1-085366e2b6fe-combined-ca-bundle\") pod \"heat-engine-78d7fd67f8-xrdlh\" (UID: \"6f976566-a6af-40d3-81e1-085366e2b6fe\") " pod="openstack/heat-engine-78d7fd67f8-xrdlh" Oct 10 18:13:46 crc kubenswrapper[4799]: I1010 18:13:46.393062 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/6f976566-a6af-40d3-81e1-085366e2b6fe-config-data-custom\") pod \"heat-engine-78d7fd67f8-xrdlh\" (UID: \"6f976566-a6af-40d3-81e1-085366e2b6fe\") " pod="openstack/heat-engine-78d7fd67f8-xrdlh" Oct 10 18:13:46 crc kubenswrapper[4799]: I1010 18:13:46.394975 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-npzrp\" (UniqueName: \"kubernetes.io/projected/6f976566-a6af-40d3-81e1-085366e2b6fe-kube-api-access-npzrp\") pod \"heat-engine-78d7fd67f8-xrdlh\" (UID: \"6f976566-a6af-40d3-81e1-085366e2b6fe\") " pod="openstack/heat-engine-78d7fd67f8-xrdlh" Oct 10 18:13:46 crc kubenswrapper[4799]: I1010 18:13:46.397632 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-api-6c6c4696cc-82gmt"] Oct 10 18:13:46 crc kubenswrapper[4799]: I1010 18:13:46.461486 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/2e588bc1-697a-43cc-9b80-18937afee8bd-config-data-custom\") pod \"heat-api-6c6c4696cc-82gmt\" (UID: \"2e588bc1-697a-43cc-9b80-18937afee8bd\") " pod="openstack/heat-api-6c6c4696cc-82gmt" Oct 10 18:13:46 crc kubenswrapper[4799]: I1010 18:13:46.461878 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/506276c2-7a4c-4603-9ab2-052c409fb136-combined-ca-bundle\") pod \"heat-cfnapi-699b8f6d77-rp46k\" (UID: \"506276c2-7a4c-4603-9ab2-052c409fb136\") " pod="openstack/heat-cfnapi-699b8f6d77-rp46k" Oct 10 18:13:46 crc kubenswrapper[4799]: I1010 18:13:46.461931 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fsrlp\" (UniqueName: \"kubernetes.io/projected/2e588bc1-697a-43cc-9b80-18937afee8bd-kube-api-access-fsrlp\") pod \"heat-api-6c6c4696cc-82gmt\" (UID: \"2e588bc1-697a-43cc-9b80-18937afee8bd\") " pod="openstack/heat-api-6c6c4696cc-82gmt" Oct 10 18:13:46 crc kubenswrapper[4799]: I1010 18:13:46.461954 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2e588bc1-697a-43cc-9b80-18937afee8bd-combined-ca-bundle\") pod \"heat-api-6c6c4696cc-82gmt\" (UID: \"2e588bc1-697a-43cc-9b80-18937afee8bd\") " pod="openstack/heat-api-6c6c4696cc-82gmt" Oct 10 18:13:46 crc kubenswrapper[4799]: I1010 18:13:46.461996 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/506276c2-7a4c-4603-9ab2-052c409fb136-config-data-custom\") pod \"heat-cfnapi-699b8f6d77-rp46k\" (UID: \"506276c2-7a4c-4603-9ab2-052c409fb136\") " pod="openstack/heat-cfnapi-699b8f6d77-rp46k" Oct 10 18:13:46 crc kubenswrapper[4799]: I1010 18:13:46.462027 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/506276c2-7a4c-4603-9ab2-052c409fb136-config-data\") pod \"heat-cfnapi-699b8f6d77-rp46k\" (UID: \"506276c2-7a4c-4603-9ab2-052c409fb136\") " pod="openstack/heat-cfnapi-699b8f6d77-rp46k" Oct 10 18:13:46 crc kubenswrapper[4799]: I1010 18:13:46.462047 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2e588bc1-697a-43cc-9b80-18937afee8bd-config-data\") pod \"heat-api-6c6c4696cc-82gmt\" (UID: \"2e588bc1-697a-43cc-9b80-18937afee8bd\") " pod="openstack/heat-api-6c6c4696cc-82gmt" Oct 10 18:13:46 crc kubenswrapper[4799]: I1010 18:13:46.462125 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ldzdm\" (UniqueName: \"kubernetes.io/projected/506276c2-7a4c-4603-9ab2-052c409fb136-kube-api-access-ldzdm\") pod \"heat-cfnapi-699b8f6d77-rp46k\" (UID: \"506276c2-7a4c-4603-9ab2-052c409fb136\") " pod="openstack/heat-cfnapi-699b8f6d77-rp46k" Oct 10 18:13:46 crc kubenswrapper[4799]: I1010 18:13:46.501219 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-engine-78d7fd67f8-xrdlh" Oct 10 18:13:46 crc kubenswrapper[4799]: I1010 18:13:46.564074 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2e588bc1-697a-43cc-9b80-18937afee8bd-combined-ca-bundle\") pod \"heat-api-6c6c4696cc-82gmt\" (UID: \"2e588bc1-697a-43cc-9b80-18937afee8bd\") " pod="openstack/heat-api-6c6c4696cc-82gmt" Oct 10 18:13:46 crc kubenswrapper[4799]: I1010 18:13:46.564294 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/506276c2-7a4c-4603-9ab2-052c409fb136-config-data-custom\") pod \"heat-cfnapi-699b8f6d77-rp46k\" (UID: \"506276c2-7a4c-4603-9ab2-052c409fb136\") " pod="openstack/heat-cfnapi-699b8f6d77-rp46k" Oct 10 18:13:46 crc kubenswrapper[4799]: I1010 18:13:46.564320 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/506276c2-7a4c-4603-9ab2-052c409fb136-config-data\") pod \"heat-cfnapi-699b8f6d77-rp46k\" (UID: \"506276c2-7a4c-4603-9ab2-052c409fb136\") " pod="openstack/heat-cfnapi-699b8f6d77-rp46k" Oct 10 18:13:46 crc kubenswrapper[4799]: I1010 18:13:46.564336 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2e588bc1-697a-43cc-9b80-18937afee8bd-config-data\") pod \"heat-api-6c6c4696cc-82gmt\" (UID: \"2e588bc1-697a-43cc-9b80-18937afee8bd\") " pod="openstack/heat-api-6c6c4696cc-82gmt" Oct 10 18:13:46 crc kubenswrapper[4799]: I1010 18:13:46.564414 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ldzdm\" (UniqueName: \"kubernetes.io/projected/506276c2-7a4c-4603-9ab2-052c409fb136-kube-api-access-ldzdm\") pod \"heat-cfnapi-699b8f6d77-rp46k\" (UID: \"506276c2-7a4c-4603-9ab2-052c409fb136\") " pod="openstack/heat-cfnapi-699b8f6d77-rp46k" Oct 10 18:13:46 crc kubenswrapper[4799]: I1010 18:13:46.564470 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/2e588bc1-697a-43cc-9b80-18937afee8bd-config-data-custom\") pod \"heat-api-6c6c4696cc-82gmt\" (UID: \"2e588bc1-697a-43cc-9b80-18937afee8bd\") " pod="openstack/heat-api-6c6c4696cc-82gmt" Oct 10 18:13:46 crc kubenswrapper[4799]: I1010 18:13:46.564521 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/506276c2-7a4c-4603-9ab2-052c409fb136-combined-ca-bundle\") pod \"heat-cfnapi-699b8f6d77-rp46k\" (UID: \"506276c2-7a4c-4603-9ab2-052c409fb136\") " pod="openstack/heat-cfnapi-699b8f6d77-rp46k" Oct 10 18:13:46 crc kubenswrapper[4799]: I1010 18:13:46.564547 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fsrlp\" (UniqueName: \"kubernetes.io/projected/2e588bc1-697a-43cc-9b80-18937afee8bd-kube-api-access-fsrlp\") pod \"heat-api-6c6c4696cc-82gmt\" (UID: \"2e588bc1-697a-43cc-9b80-18937afee8bd\") " pod="openstack/heat-api-6c6c4696cc-82gmt" Oct 10 18:13:46 crc kubenswrapper[4799]: I1010 18:13:46.572272 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2e588bc1-697a-43cc-9b80-18937afee8bd-combined-ca-bundle\") pod \"heat-api-6c6c4696cc-82gmt\" (UID: \"2e588bc1-697a-43cc-9b80-18937afee8bd\") " pod="openstack/heat-api-6c6c4696cc-82gmt" Oct 10 18:13:46 crc kubenswrapper[4799]: I1010 18:13:46.573678 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/506276c2-7a4c-4603-9ab2-052c409fb136-config-data\") pod \"heat-cfnapi-699b8f6d77-rp46k\" (UID: \"506276c2-7a4c-4603-9ab2-052c409fb136\") " pod="openstack/heat-cfnapi-699b8f6d77-rp46k" Oct 10 18:13:46 crc kubenswrapper[4799]: I1010 18:13:46.573771 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/506276c2-7a4c-4603-9ab2-052c409fb136-config-data-custom\") pod \"heat-cfnapi-699b8f6d77-rp46k\" (UID: \"506276c2-7a4c-4603-9ab2-052c409fb136\") " pod="openstack/heat-cfnapi-699b8f6d77-rp46k" Oct 10 18:13:46 crc kubenswrapper[4799]: I1010 18:13:46.580321 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2e588bc1-697a-43cc-9b80-18937afee8bd-config-data\") pod \"heat-api-6c6c4696cc-82gmt\" (UID: \"2e588bc1-697a-43cc-9b80-18937afee8bd\") " pod="openstack/heat-api-6c6c4696cc-82gmt" Oct 10 18:13:46 crc kubenswrapper[4799]: I1010 18:13:46.588764 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/506276c2-7a4c-4603-9ab2-052c409fb136-combined-ca-bundle\") pod \"heat-cfnapi-699b8f6d77-rp46k\" (UID: \"506276c2-7a4c-4603-9ab2-052c409fb136\") " pod="openstack/heat-cfnapi-699b8f6d77-rp46k" Oct 10 18:13:46 crc kubenswrapper[4799]: I1010 18:13:46.589425 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/2e588bc1-697a-43cc-9b80-18937afee8bd-config-data-custom\") pod \"heat-api-6c6c4696cc-82gmt\" (UID: \"2e588bc1-697a-43cc-9b80-18937afee8bd\") " pod="openstack/heat-api-6c6c4696cc-82gmt" Oct 10 18:13:46 crc kubenswrapper[4799]: I1010 18:13:46.593694 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fsrlp\" (UniqueName: \"kubernetes.io/projected/2e588bc1-697a-43cc-9b80-18937afee8bd-kube-api-access-fsrlp\") pod \"heat-api-6c6c4696cc-82gmt\" (UID: \"2e588bc1-697a-43cc-9b80-18937afee8bd\") " pod="openstack/heat-api-6c6c4696cc-82gmt" Oct 10 18:13:46 crc kubenswrapper[4799]: I1010 18:13:46.605584 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ldzdm\" (UniqueName: \"kubernetes.io/projected/506276c2-7a4c-4603-9ab2-052c409fb136-kube-api-access-ldzdm\") pod \"heat-cfnapi-699b8f6d77-rp46k\" (UID: \"506276c2-7a4c-4603-9ab2-052c409fb136\") " pod="openstack/heat-cfnapi-699b8f6d77-rp46k" Oct 10 18:13:46 crc kubenswrapper[4799]: I1010 18:13:46.639166 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-cfnapi-699b8f6d77-rp46k" Oct 10 18:13:46 crc kubenswrapper[4799]: I1010 18:13:46.771939 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-api-6c6c4696cc-82gmt" Oct 10 18:13:46 crc kubenswrapper[4799]: I1010 18:13:46.784419 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-engine-78d7fd67f8-xrdlh"] Oct 10 18:13:47 crc kubenswrapper[4799]: I1010 18:13:47.091213 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-cfnapi-699b8f6d77-rp46k"] Oct 10 18:13:47 crc kubenswrapper[4799]: W1010 18:13:47.095619 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod506276c2_7a4c_4603_9ab2_052c409fb136.slice/crio-0510b6617f911b6b70a34a5ab7e0d74a8ca1aeb527320f8b95d3d90080ca9731 WatchSource:0}: Error finding container 0510b6617f911b6b70a34a5ab7e0d74a8ca1aeb527320f8b95d3d90080ca9731: Status 404 returned error can't find the container with id 0510b6617f911b6b70a34a5ab7e0d74a8ca1aeb527320f8b95d3d90080ca9731 Oct 10 18:13:47 crc kubenswrapper[4799]: I1010 18:13:47.212773 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-api-6c6c4696cc-82gmt"] Oct 10 18:13:47 crc kubenswrapper[4799]: W1010 18:13:47.223261 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2e588bc1_697a_43cc_9b80_18937afee8bd.slice/crio-7e2cf68ca661f41a945bd2ebad1051dfba969ebb106b7d3e7dd64f6c88b23618 WatchSource:0}: Error finding container 7e2cf68ca661f41a945bd2ebad1051dfba969ebb106b7d3e7dd64f6c88b23618: Status 404 returned error can't find the container with id 7e2cf68ca661f41a945bd2ebad1051dfba969ebb106b7d3e7dd64f6c88b23618 Oct 10 18:13:47 crc kubenswrapper[4799]: I1010 18:13:47.604197 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-engine-78d7fd67f8-xrdlh" event={"ID":"6f976566-a6af-40d3-81e1-085366e2b6fe","Type":"ContainerStarted","Data":"6ff93e4a0ea92f325ee87cacea51e9bb966933711c15ee97752c5fbdc8afcf72"} Oct 10 18:13:47 crc kubenswrapper[4799]: I1010 18:13:47.604240 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-engine-78d7fd67f8-xrdlh" event={"ID":"6f976566-a6af-40d3-81e1-085366e2b6fe","Type":"ContainerStarted","Data":"81bbed437f1e93898643473dd2df5c6c7deb7b3876f937afc4a28cde14218ab6"} Oct 10 18:13:47 crc kubenswrapper[4799]: I1010 18:13:47.604356 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/heat-engine-78d7fd67f8-xrdlh" Oct 10 18:13:47 crc kubenswrapper[4799]: I1010 18:13:47.605476 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-api-6c6c4696cc-82gmt" event={"ID":"2e588bc1-697a-43cc-9b80-18937afee8bd","Type":"ContainerStarted","Data":"7e2cf68ca661f41a945bd2ebad1051dfba969ebb106b7d3e7dd64f6c88b23618"} Oct 10 18:13:47 crc kubenswrapper[4799]: I1010 18:13:47.606659 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-cfnapi-699b8f6d77-rp46k" event={"ID":"506276c2-7a4c-4603-9ab2-052c409fb136","Type":"ContainerStarted","Data":"0510b6617f911b6b70a34a5ab7e0d74a8ca1aeb527320f8b95d3d90080ca9731"} Oct 10 18:13:49 crc kubenswrapper[4799]: I1010 18:13:49.628906 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-api-6c6c4696cc-82gmt" event={"ID":"2e588bc1-697a-43cc-9b80-18937afee8bd","Type":"ContainerStarted","Data":"db9303af3542d039066a64323b1b85dd44f52438b27209b0413d8f3346f3ef52"} Oct 10 18:13:49 crc kubenswrapper[4799]: I1010 18:13:49.629432 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/heat-api-6c6c4696cc-82gmt" Oct 10 18:13:49 crc kubenswrapper[4799]: I1010 18:13:49.631311 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-cfnapi-699b8f6d77-rp46k" event={"ID":"506276c2-7a4c-4603-9ab2-052c409fb136","Type":"ContainerStarted","Data":"3833da56146a7c53e86d01f93420780bb24e26cc36c5d105f75b2d2cf94120aa"} Oct 10 18:13:49 crc kubenswrapper[4799]: I1010 18:13:49.631814 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/heat-cfnapi-699b8f6d77-rp46k" Oct 10 18:13:49 crc kubenswrapper[4799]: I1010 18:13:49.646234 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/heat-engine-78d7fd67f8-xrdlh" podStartSLOduration=3.646217165 podStartE2EDuration="3.646217165s" podCreationTimestamp="2025-10-10 18:13:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-10 18:13:47.628231634 +0000 UTC m=+6121.136555749" watchObservedRunningTime="2025-10-10 18:13:49.646217165 +0000 UTC m=+6123.154541280" Oct 10 18:13:49 crc kubenswrapper[4799]: I1010 18:13:49.651553 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/heat-api-6c6c4696cc-82gmt" podStartSLOduration=1.938200452 podStartE2EDuration="3.651542425s" podCreationTimestamp="2025-10-10 18:13:46 +0000 UTC" firstStartedPulling="2025-10-10 18:13:47.226440933 +0000 UTC m=+6120.734765038" lastFinishedPulling="2025-10-10 18:13:48.939782876 +0000 UTC m=+6122.448107011" observedRunningTime="2025-10-10 18:13:49.642508885 +0000 UTC m=+6123.150833000" watchObservedRunningTime="2025-10-10 18:13:49.651542425 +0000 UTC m=+6123.159866540" Oct 10 18:13:49 crc kubenswrapper[4799]: I1010 18:13:49.665444 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/heat-cfnapi-699b8f6d77-rp46k" podStartSLOduration=1.831729003 podStartE2EDuration="3.665398424s" podCreationTimestamp="2025-10-10 18:13:46 +0000 UTC" firstStartedPulling="2025-10-10 18:13:47.098482889 +0000 UTC m=+6120.606807004" lastFinishedPulling="2025-10-10 18:13:48.93215229 +0000 UTC m=+6122.440476425" observedRunningTime="2025-10-10 18:13:49.661900848 +0000 UTC m=+6123.170224983" watchObservedRunningTime="2025-10-10 18:13:49.665398424 +0000 UTC m=+6123.173722549" Oct 10 18:13:53 crc kubenswrapper[4799]: I1010 18:13:53.787659 4799 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-7ccb584f4f-z4j4m" podUID="5eefa605-65ba-438e-9d40-58c6225ff2ff" containerName="horizon" probeResult="failure" output="Get \"http://10.217.1.125:8080/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.1.125:8080: connect: connection refused" Oct 10 18:13:57 crc kubenswrapper[4799]: I1010 18:13:57.856451 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/heat-cfnapi-699b8f6d77-rp46k" Oct 10 18:13:58 crc kubenswrapper[4799]: I1010 18:13:58.051966 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/heat-api-6c6c4696cc-82gmt" Oct 10 18:14:03 crc kubenswrapper[4799]: I1010 18:14:03.788327 4799 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-7ccb584f4f-z4j4m" podUID="5eefa605-65ba-438e-9d40-58c6225ff2ff" containerName="horizon" probeResult="failure" output="Get \"http://10.217.1.125:8080/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.1.125:8080: connect: connection refused" Oct 10 18:14:03 crc kubenswrapper[4799]: I1010 18:14:03.789185 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-7ccb584f4f-z4j4m" Oct 10 18:14:06 crc kubenswrapper[4799]: I1010 18:14:06.605380 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/heat-engine-78d7fd67f8-xrdlh" Oct 10 18:14:09 crc kubenswrapper[4799]: I1010 18:14:09.639108 4799 scope.go:117] "RemoveContainer" containerID="8adbc00f08ae1e0fe2cc01d0bd8926a2ebfd5bddc1932ef6256585089c184ae7" Oct 10 18:14:09 crc kubenswrapper[4799]: I1010 18:14:09.684709 4799 scope.go:117] "RemoveContainer" containerID="dcd21a975134541c976755f676b7dfd64e2570805cf5722972d467e07a190b0c" Oct 10 18:14:09 crc kubenswrapper[4799]: I1010 18:14:09.732334 4799 scope.go:117] "RemoveContainer" containerID="680516a7ccd4b2d45746a58f833cb75b023766d7ade5e0e37ae374462bc329c7" Oct 10 18:14:09 crc kubenswrapper[4799]: I1010 18:14:09.846230 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-7ccb584f4f-z4j4m" Oct 10 18:14:09 crc kubenswrapper[4799]: I1010 18:14:09.947804 4799 generic.go:334] "Generic (PLEG): container finished" podID="5eefa605-65ba-438e-9d40-58c6225ff2ff" containerID="f8ac406c405deb39da7cf2c72457034ef28402867d0205f2f9ccc0ab1dc32509" exitCode=137 Oct 10 18:14:09 crc kubenswrapper[4799]: I1010 18:14:09.947861 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-7ccb584f4f-z4j4m" Oct 10 18:14:09 crc kubenswrapper[4799]: I1010 18:14:09.947873 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-7ccb584f4f-z4j4m" event={"ID":"5eefa605-65ba-438e-9d40-58c6225ff2ff","Type":"ContainerDied","Data":"f8ac406c405deb39da7cf2c72457034ef28402867d0205f2f9ccc0ab1dc32509"} Oct 10 18:14:09 crc kubenswrapper[4799]: I1010 18:14:09.948286 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-7ccb584f4f-z4j4m" event={"ID":"5eefa605-65ba-438e-9d40-58c6225ff2ff","Type":"ContainerDied","Data":"92d0b4b759b3b1dd90c574f38597afc90e388b4d31bb6e9846c0c8a48ade150a"} Oct 10 18:14:09 crc kubenswrapper[4799]: I1010 18:14:09.948319 4799 scope.go:117] "RemoveContainer" containerID="d80062c40fe28666e86970019eb6e0ea8482f96a9581f3c179dd9de92d6e9969" Oct 10 18:14:09 crc kubenswrapper[4799]: I1010 18:14:09.963477 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5eefa605-65ba-438e-9d40-58c6225ff2ff-logs\") pod \"5eefa605-65ba-438e-9d40-58c6225ff2ff\" (UID: \"5eefa605-65ba-438e-9d40-58c6225ff2ff\") " Oct 10 18:14:09 crc kubenswrapper[4799]: I1010 18:14:09.963523 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/5eefa605-65ba-438e-9d40-58c6225ff2ff-config-data\") pod \"5eefa605-65ba-438e-9d40-58c6225ff2ff\" (UID: \"5eefa605-65ba-438e-9d40-58c6225ff2ff\") " Oct 10 18:14:09 crc kubenswrapper[4799]: I1010 18:14:09.963561 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/5eefa605-65ba-438e-9d40-58c6225ff2ff-horizon-secret-key\") pod \"5eefa605-65ba-438e-9d40-58c6225ff2ff\" (UID: \"5eefa605-65ba-438e-9d40-58c6225ff2ff\") " Oct 10 18:14:09 crc kubenswrapper[4799]: I1010 18:14:09.963802 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/5eefa605-65ba-438e-9d40-58c6225ff2ff-scripts\") pod \"5eefa605-65ba-438e-9d40-58c6225ff2ff\" (UID: \"5eefa605-65ba-438e-9d40-58c6225ff2ff\") " Oct 10 18:14:09 crc kubenswrapper[4799]: I1010 18:14:09.963862 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tjjn9\" (UniqueName: \"kubernetes.io/projected/5eefa605-65ba-438e-9d40-58c6225ff2ff-kube-api-access-tjjn9\") pod \"5eefa605-65ba-438e-9d40-58c6225ff2ff\" (UID: \"5eefa605-65ba-438e-9d40-58c6225ff2ff\") " Oct 10 18:14:09 crc kubenswrapper[4799]: I1010 18:14:09.963920 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5eefa605-65ba-438e-9d40-58c6225ff2ff-logs" (OuterVolumeSpecName: "logs") pod "5eefa605-65ba-438e-9d40-58c6225ff2ff" (UID: "5eefa605-65ba-438e-9d40-58c6225ff2ff"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 10 18:14:09 crc kubenswrapper[4799]: I1010 18:14:09.964333 4799 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5eefa605-65ba-438e-9d40-58c6225ff2ff-logs\") on node \"crc\" DevicePath \"\"" Oct 10 18:14:09 crc kubenswrapper[4799]: I1010 18:14:09.974036 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5eefa605-65ba-438e-9d40-58c6225ff2ff-kube-api-access-tjjn9" (OuterVolumeSpecName: "kube-api-access-tjjn9") pod "5eefa605-65ba-438e-9d40-58c6225ff2ff" (UID: "5eefa605-65ba-438e-9d40-58c6225ff2ff"). InnerVolumeSpecName "kube-api-access-tjjn9". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 18:14:09 crc kubenswrapper[4799]: I1010 18:14:09.979942 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5eefa605-65ba-438e-9d40-58c6225ff2ff-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "5eefa605-65ba-438e-9d40-58c6225ff2ff" (UID: "5eefa605-65ba-438e-9d40-58c6225ff2ff"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 18:14:10 crc kubenswrapper[4799]: I1010 18:14:10.005512 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5eefa605-65ba-438e-9d40-58c6225ff2ff-scripts" (OuterVolumeSpecName: "scripts") pod "5eefa605-65ba-438e-9d40-58c6225ff2ff" (UID: "5eefa605-65ba-438e-9d40-58c6225ff2ff"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 18:14:10 crc kubenswrapper[4799]: I1010 18:14:10.005940 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5eefa605-65ba-438e-9d40-58c6225ff2ff-config-data" (OuterVolumeSpecName: "config-data") pod "5eefa605-65ba-438e-9d40-58c6225ff2ff" (UID: "5eefa605-65ba-438e-9d40-58c6225ff2ff"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 18:14:10 crc kubenswrapper[4799]: I1010 18:14:10.065331 4799 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/5eefa605-65ba-438e-9d40-58c6225ff2ff-scripts\") on node \"crc\" DevicePath \"\"" Oct 10 18:14:10 crc kubenswrapper[4799]: I1010 18:14:10.065521 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tjjn9\" (UniqueName: \"kubernetes.io/projected/5eefa605-65ba-438e-9d40-58c6225ff2ff-kube-api-access-tjjn9\") on node \"crc\" DevicePath \"\"" Oct 10 18:14:10 crc kubenswrapper[4799]: I1010 18:14:10.065580 4799 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/5eefa605-65ba-438e-9d40-58c6225ff2ff-config-data\") on node \"crc\" DevicePath \"\"" Oct 10 18:14:10 crc kubenswrapper[4799]: I1010 18:14:10.065631 4799 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/5eefa605-65ba-438e-9d40-58c6225ff2ff-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Oct 10 18:14:10 crc kubenswrapper[4799]: I1010 18:14:10.113605 4799 scope.go:117] "RemoveContainer" containerID="f8ac406c405deb39da7cf2c72457034ef28402867d0205f2f9ccc0ab1dc32509" Oct 10 18:14:10 crc kubenswrapper[4799]: I1010 18:14:10.133571 4799 scope.go:117] "RemoveContainer" containerID="d80062c40fe28666e86970019eb6e0ea8482f96a9581f3c179dd9de92d6e9969" Oct 10 18:14:10 crc kubenswrapper[4799]: E1010 18:14:10.133951 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d80062c40fe28666e86970019eb6e0ea8482f96a9581f3c179dd9de92d6e9969\": container with ID starting with d80062c40fe28666e86970019eb6e0ea8482f96a9581f3c179dd9de92d6e9969 not found: ID does not exist" containerID="d80062c40fe28666e86970019eb6e0ea8482f96a9581f3c179dd9de92d6e9969" Oct 10 18:14:10 crc kubenswrapper[4799]: I1010 18:14:10.133976 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d80062c40fe28666e86970019eb6e0ea8482f96a9581f3c179dd9de92d6e9969"} err="failed to get container status \"d80062c40fe28666e86970019eb6e0ea8482f96a9581f3c179dd9de92d6e9969\": rpc error: code = NotFound desc = could not find container \"d80062c40fe28666e86970019eb6e0ea8482f96a9581f3c179dd9de92d6e9969\": container with ID starting with d80062c40fe28666e86970019eb6e0ea8482f96a9581f3c179dd9de92d6e9969 not found: ID does not exist" Oct 10 18:14:10 crc kubenswrapper[4799]: I1010 18:14:10.133999 4799 scope.go:117] "RemoveContainer" containerID="f8ac406c405deb39da7cf2c72457034ef28402867d0205f2f9ccc0ab1dc32509" Oct 10 18:14:10 crc kubenswrapper[4799]: E1010 18:14:10.134391 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f8ac406c405deb39da7cf2c72457034ef28402867d0205f2f9ccc0ab1dc32509\": container with ID starting with f8ac406c405deb39da7cf2c72457034ef28402867d0205f2f9ccc0ab1dc32509 not found: ID does not exist" containerID="f8ac406c405deb39da7cf2c72457034ef28402867d0205f2f9ccc0ab1dc32509" Oct 10 18:14:10 crc kubenswrapper[4799]: I1010 18:14:10.134465 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f8ac406c405deb39da7cf2c72457034ef28402867d0205f2f9ccc0ab1dc32509"} err="failed to get container status \"f8ac406c405deb39da7cf2c72457034ef28402867d0205f2f9ccc0ab1dc32509\": rpc error: code = NotFound desc = could not find container \"f8ac406c405deb39da7cf2c72457034ef28402867d0205f2f9ccc0ab1dc32509\": container with ID starting with f8ac406c405deb39da7cf2c72457034ef28402867d0205f2f9ccc0ab1dc32509 not found: ID does not exist" Oct 10 18:14:10 crc kubenswrapper[4799]: I1010 18:14:10.302030 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-7ccb584f4f-z4j4m"] Oct 10 18:14:10 crc kubenswrapper[4799]: I1010 18:14:10.314248 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/horizon-7ccb584f4f-z4j4m"] Oct 10 18:14:11 crc kubenswrapper[4799]: I1010 18:14:11.419696 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5eefa605-65ba-438e-9d40-58c6225ff2ff" path="/var/lib/kubelet/pods/5eefa605-65ba-438e-9d40-58c6225ff2ff/volumes" Oct 10 18:14:15 crc kubenswrapper[4799]: I1010 18:14:15.248869 4799 patch_prober.go:28] interesting pod/machine-config-daemon-rh8zc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 10 18:14:15 crc kubenswrapper[4799]: I1010 18:14:15.249504 4799 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 10 18:14:15 crc kubenswrapper[4799]: I1010 18:14:15.249565 4799 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" Oct 10 18:14:15 crc kubenswrapper[4799]: I1010 18:14:15.250715 4799 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"78eb2a5dbad4fabd2d68def3efca3798c9b19e24aad44cc0581450cbe14e2a76"} pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 10 18:14:15 crc kubenswrapper[4799]: I1010 18:14:15.250838 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" containerName="machine-config-daemon" containerID="cri-o://78eb2a5dbad4fabd2d68def3efca3798c9b19e24aad44cc0581450cbe14e2a76" gracePeriod=600 Oct 10 18:14:16 crc kubenswrapper[4799]: I1010 18:14:16.061739 4799 generic.go:334] "Generic (PLEG): container finished" podID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" containerID="78eb2a5dbad4fabd2d68def3efca3798c9b19e24aad44cc0581450cbe14e2a76" exitCode=0 Oct 10 18:14:16 crc kubenswrapper[4799]: I1010 18:14:16.063905 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" event={"ID":"6cebefda-e31d-4be2-9bf4-8e1f8ec002cb","Type":"ContainerDied","Data":"78eb2a5dbad4fabd2d68def3efca3798c9b19e24aad44cc0581450cbe14e2a76"} Oct 10 18:14:16 crc kubenswrapper[4799]: I1010 18:14:16.063981 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" event={"ID":"6cebefda-e31d-4be2-9bf4-8e1f8ec002cb","Type":"ContainerStarted","Data":"1f0ceb697c256a28cbd9c8e1e3aa08e1dc732ac4382bc8944609e36db615c835"} Oct 10 18:14:16 crc kubenswrapper[4799]: I1010 18:14:16.064037 4799 scope.go:117] "RemoveContainer" containerID="6ae067b7971fd6480cb0c3ccf44d4e22f837ba4674373b4b5903247a9af39cf1" Oct 10 18:14:16 crc kubenswrapper[4799]: I1010 18:14:16.243034 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dmrqq6"] Oct 10 18:14:16 crc kubenswrapper[4799]: E1010 18:14:16.243807 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5eefa605-65ba-438e-9d40-58c6225ff2ff" containerName="horizon" Oct 10 18:14:16 crc kubenswrapper[4799]: I1010 18:14:16.243825 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="5eefa605-65ba-438e-9d40-58c6225ff2ff" containerName="horizon" Oct 10 18:14:16 crc kubenswrapper[4799]: E1010 18:14:16.243863 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5eefa605-65ba-438e-9d40-58c6225ff2ff" containerName="horizon-log" Oct 10 18:14:16 crc kubenswrapper[4799]: I1010 18:14:16.243870 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="5eefa605-65ba-438e-9d40-58c6225ff2ff" containerName="horizon-log" Oct 10 18:14:16 crc kubenswrapper[4799]: I1010 18:14:16.244048 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="5eefa605-65ba-438e-9d40-58c6225ff2ff" containerName="horizon-log" Oct 10 18:14:16 crc kubenswrapper[4799]: I1010 18:14:16.244071 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="5eefa605-65ba-438e-9d40-58c6225ff2ff" containerName="horizon" Oct 10 18:14:16 crc kubenswrapper[4799]: I1010 18:14:16.245424 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dmrqq6" Oct 10 18:14:16 crc kubenswrapper[4799]: I1010 18:14:16.252993 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Oct 10 18:14:16 crc kubenswrapper[4799]: I1010 18:14:16.264213 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dmrqq6"] Oct 10 18:14:16 crc kubenswrapper[4799]: I1010 18:14:16.405618 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/0c273054-c72f-4e27-88e5-a7366ceb9dde-util\") pod \"a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dmrqq6\" (UID: \"0c273054-c72f-4e27-88e5-a7366ceb9dde\") " pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dmrqq6" Oct 10 18:14:16 crc kubenswrapper[4799]: I1010 18:14:16.405744 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ddlf4\" (UniqueName: \"kubernetes.io/projected/0c273054-c72f-4e27-88e5-a7366ceb9dde-kube-api-access-ddlf4\") pod \"a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dmrqq6\" (UID: \"0c273054-c72f-4e27-88e5-a7366ceb9dde\") " pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dmrqq6" Oct 10 18:14:16 crc kubenswrapper[4799]: I1010 18:14:16.406194 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/0c273054-c72f-4e27-88e5-a7366ceb9dde-bundle\") pod \"a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dmrqq6\" (UID: \"0c273054-c72f-4e27-88e5-a7366ceb9dde\") " pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dmrqq6" Oct 10 18:14:16 crc kubenswrapper[4799]: I1010 18:14:16.508889 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/0c273054-c72f-4e27-88e5-a7366ceb9dde-bundle\") pod \"a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dmrqq6\" (UID: \"0c273054-c72f-4e27-88e5-a7366ceb9dde\") " pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dmrqq6" Oct 10 18:14:16 crc kubenswrapper[4799]: I1010 18:14:16.509016 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/0c273054-c72f-4e27-88e5-a7366ceb9dde-util\") pod \"a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dmrqq6\" (UID: \"0c273054-c72f-4e27-88e5-a7366ceb9dde\") " pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dmrqq6" Oct 10 18:14:16 crc kubenswrapper[4799]: I1010 18:14:16.509119 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ddlf4\" (UniqueName: \"kubernetes.io/projected/0c273054-c72f-4e27-88e5-a7366ceb9dde-kube-api-access-ddlf4\") pod \"a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dmrqq6\" (UID: \"0c273054-c72f-4e27-88e5-a7366ceb9dde\") " pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dmrqq6" Oct 10 18:14:16 crc kubenswrapper[4799]: I1010 18:14:16.510007 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/0c273054-c72f-4e27-88e5-a7366ceb9dde-bundle\") pod \"a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dmrqq6\" (UID: \"0c273054-c72f-4e27-88e5-a7366ceb9dde\") " pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dmrqq6" Oct 10 18:14:16 crc kubenswrapper[4799]: I1010 18:14:16.510434 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/0c273054-c72f-4e27-88e5-a7366ceb9dde-util\") pod \"a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dmrqq6\" (UID: \"0c273054-c72f-4e27-88e5-a7366ceb9dde\") " pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dmrqq6" Oct 10 18:14:16 crc kubenswrapper[4799]: I1010 18:14:16.533371 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ddlf4\" (UniqueName: \"kubernetes.io/projected/0c273054-c72f-4e27-88e5-a7366ceb9dde-kube-api-access-ddlf4\") pod \"a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dmrqq6\" (UID: \"0c273054-c72f-4e27-88e5-a7366ceb9dde\") " pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dmrqq6" Oct 10 18:14:16 crc kubenswrapper[4799]: I1010 18:14:16.608941 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dmrqq6" Oct 10 18:14:17 crc kubenswrapper[4799]: I1010 18:14:17.113677 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dmrqq6"] Oct 10 18:14:17 crc kubenswrapper[4799]: W1010 18:14:17.120707 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod0c273054_c72f_4e27_88e5_a7366ceb9dde.slice/crio-9b10318018a22e1fd186814e181b6c86658317ed279361e04c28d8aca63603bf WatchSource:0}: Error finding container 9b10318018a22e1fd186814e181b6c86658317ed279361e04c28d8aca63603bf: Status 404 returned error can't find the container with id 9b10318018a22e1fd186814e181b6c86658317ed279361e04c28d8aca63603bf Oct 10 18:14:18 crc kubenswrapper[4799]: I1010 18:14:18.096802 4799 generic.go:334] "Generic (PLEG): container finished" podID="0c273054-c72f-4e27-88e5-a7366ceb9dde" containerID="84d7d475445ea73521f848325d6296b14efc3aa02f85b9530a19f3b78837abec" exitCode=0 Oct 10 18:14:18 crc kubenswrapper[4799]: I1010 18:14:18.096985 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dmrqq6" event={"ID":"0c273054-c72f-4e27-88e5-a7366ceb9dde","Type":"ContainerDied","Data":"84d7d475445ea73521f848325d6296b14efc3aa02f85b9530a19f3b78837abec"} Oct 10 18:14:18 crc kubenswrapper[4799]: I1010 18:14:18.097625 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dmrqq6" event={"ID":"0c273054-c72f-4e27-88e5-a7366ceb9dde","Type":"ContainerStarted","Data":"9b10318018a22e1fd186814e181b6c86658317ed279361e04c28d8aca63603bf"} Oct 10 18:14:20 crc kubenswrapper[4799]: I1010 18:14:20.139624 4799 generic.go:334] "Generic (PLEG): container finished" podID="0c273054-c72f-4e27-88e5-a7366ceb9dde" containerID="7ae02a4f7c3469f59185acd5f4f9bd4c68b2ea92cdca35b5b8b020b77fe64d1f" exitCode=0 Oct 10 18:14:20 crc kubenswrapper[4799]: I1010 18:14:20.140077 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dmrqq6" event={"ID":"0c273054-c72f-4e27-88e5-a7366ceb9dde","Type":"ContainerDied","Data":"7ae02a4f7c3469f59185acd5f4f9bd4c68b2ea92cdca35b5b8b020b77fe64d1f"} Oct 10 18:14:20 crc kubenswrapper[4799]: E1010 18:14:20.954530 4799 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod0c273054_c72f_4e27_88e5_a7366ceb9dde.slice/crio-878ac91748739f6f9aacf4c1a4e7413679288bff21f9bc9e5e45e466575dc32b.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod0c273054_c72f_4e27_88e5_a7366ceb9dde.slice/crio-conmon-878ac91748739f6f9aacf4c1a4e7413679288bff21f9bc9e5e45e466575dc32b.scope\": RecentStats: unable to find data in memory cache]" Oct 10 18:14:21 crc kubenswrapper[4799]: I1010 18:14:21.154363 4799 generic.go:334] "Generic (PLEG): container finished" podID="0c273054-c72f-4e27-88e5-a7366ceb9dde" containerID="878ac91748739f6f9aacf4c1a4e7413679288bff21f9bc9e5e45e466575dc32b" exitCode=0 Oct 10 18:14:21 crc kubenswrapper[4799]: I1010 18:14:21.154429 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dmrqq6" event={"ID":"0c273054-c72f-4e27-88e5-a7366ceb9dde","Type":"ContainerDied","Data":"878ac91748739f6f9aacf4c1a4e7413679288bff21f9bc9e5e45e466575dc32b"} Oct 10 18:14:22 crc kubenswrapper[4799]: I1010 18:14:22.672184 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dmrqq6" Oct 10 18:14:22 crc kubenswrapper[4799]: I1010 18:14:22.793987 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/0c273054-c72f-4e27-88e5-a7366ceb9dde-util\") pod \"0c273054-c72f-4e27-88e5-a7366ceb9dde\" (UID: \"0c273054-c72f-4e27-88e5-a7366ceb9dde\") " Oct 10 18:14:22 crc kubenswrapper[4799]: I1010 18:14:22.794309 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/0c273054-c72f-4e27-88e5-a7366ceb9dde-bundle\") pod \"0c273054-c72f-4e27-88e5-a7366ceb9dde\" (UID: \"0c273054-c72f-4e27-88e5-a7366ceb9dde\") " Oct 10 18:14:22 crc kubenswrapper[4799]: I1010 18:14:22.794359 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ddlf4\" (UniqueName: \"kubernetes.io/projected/0c273054-c72f-4e27-88e5-a7366ceb9dde-kube-api-access-ddlf4\") pod \"0c273054-c72f-4e27-88e5-a7366ceb9dde\" (UID: \"0c273054-c72f-4e27-88e5-a7366ceb9dde\") " Oct 10 18:14:22 crc kubenswrapper[4799]: I1010 18:14:22.795850 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0c273054-c72f-4e27-88e5-a7366ceb9dde-bundle" (OuterVolumeSpecName: "bundle") pod "0c273054-c72f-4e27-88e5-a7366ceb9dde" (UID: "0c273054-c72f-4e27-88e5-a7366ceb9dde"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 10 18:14:22 crc kubenswrapper[4799]: I1010 18:14:22.807836 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0c273054-c72f-4e27-88e5-a7366ceb9dde-kube-api-access-ddlf4" (OuterVolumeSpecName: "kube-api-access-ddlf4") pod "0c273054-c72f-4e27-88e5-a7366ceb9dde" (UID: "0c273054-c72f-4e27-88e5-a7366ceb9dde"). InnerVolumeSpecName "kube-api-access-ddlf4". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 18:14:22 crc kubenswrapper[4799]: I1010 18:14:22.810907 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0c273054-c72f-4e27-88e5-a7366ceb9dde-util" (OuterVolumeSpecName: "util") pod "0c273054-c72f-4e27-88e5-a7366ceb9dde" (UID: "0c273054-c72f-4e27-88e5-a7366ceb9dde"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 10 18:14:22 crc kubenswrapper[4799]: I1010 18:14:22.896987 4799 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/0c273054-c72f-4e27-88e5-a7366ceb9dde-util\") on node \"crc\" DevicePath \"\"" Oct 10 18:14:22 crc kubenswrapper[4799]: I1010 18:14:22.897031 4799 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/0c273054-c72f-4e27-88e5-a7366ceb9dde-bundle\") on node \"crc\" DevicePath \"\"" Oct 10 18:14:22 crc kubenswrapper[4799]: I1010 18:14:22.897051 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ddlf4\" (UniqueName: \"kubernetes.io/projected/0c273054-c72f-4e27-88e5-a7366ceb9dde-kube-api-access-ddlf4\") on node \"crc\" DevicePath \"\"" Oct 10 18:14:23 crc kubenswrapper[4799]: I1010 18:14:23.186221 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dmrqq6" event={"ID":"0c273054-c72f-4e27-88e5-a7366ceb9dde","Type":"ContainerDied","Data":"9b10318018a22e1fd186814e181b6c86658317ed279361e04c28d8aca63603bf"} Oct 10 18:14:23 crc kubenswrapper[4799]: I1010 18:14:23.186271 4799 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9b10318018a22e1fd186814e181b6c86658317ed279361e04c28d8aca63603bf" Oct 10 18:14:23 crc kubenswrapper[4799]: I1010 18:14:23.186317 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dmrqq6" Oct 10 18:14:30 crc kubenswrapper[4799]: I1010 18:14:30.084321 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-db-create-n52tc"] Oct 10 18:14:30 crc kubenswrapper[4799]: I1010 18:14:30.091220 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-db-create-xfnk5"] Oct 10 18:14:30 crc kubenswrapper[4799]: I1010 18:14:30.105363 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-db-create-z2qgp"] Oct 10 18:14:30 crc kubenswrapper[4799]: I1010 18:14:30.111807 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-db-create-xfnk5"] Oct 10 18:14:30 crc kubenswrapper[4799]: I1010 18:14:30.127820 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-db-create-z2qgp"] Oct 10 18:14:30 crc kubenswrapper[4799]: I1010 18:14:30.131402 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-db-create-n52tc"] Oct 10 18:14:31 crc kubenswrapper[4799]: I1010 18:14:31.413327 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="55013235-ff21-4a02-bb37-8eef30a18d79" path="/var/lib/kubelet/pods/55013235-ff21-4a02-bb37-8eef30a18d79/volumes" Oct 10 18:14:31 crc kubenswrapper[4799]: I1010 18:14:31.414137 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="dd05328d-e9e7-4afb-ae9e-2729453b99db" path="/var/lib/kubelet/pods/dd05328d-e9e7-4afb-ae9e-2729453b99db/volumes" Oct 10 18:14:31 crc kubenswrapper[4799]: I1010 18:14:31.414574 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e57db8dc-b992-4e8f-9d44-e23579585d4c" path="/var/lib/kubelet/pods/e57db8dc-b992-4e8f-9d44-e23579585d4c/volumes" Oct 10 18:14:33 crc kubenswrapper[4799]: I1010 18:14:33.340781 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/obo-prometheus-operator-7c8cf85677-bk4jp"] Oct 10 18:14:33 crc kubenswrapper[4799]: E1010 18:14:33.345985 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0c273054-c72f-4e27-88e5-a7366ceb9dde" containerName="extract" Oct 10 18:14:33 crc kubenswrapper[4799]: I1010 18:14:33.346020 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="0c273054-c72f-4e27-88e5-a7366ceb9dde" containerName="extract" Oct 10 18:14:33 crc kubenswrapper[4799]: E1010 18:14:33.346063 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0c273054-c72f-4e27-88e5-a7366ceb9dde" containerName="pull" Oct 10 18:14:33 crc kubenswrapper[4799]: I1010 18:14:33.346069 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="0c273054-c72f-4e27-88e5-a7366ceb9dde" containerName="pull" Oct 10 18:14:33 crc kubenswrapper[4799]: E1010 18:14:33.346085 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0c273054-c72f-4e27-88e5-a7366ceb9dde" containerName="util" Oct 10 18:14:33 crc kubenswrapper[4799]: I1010 18:14:33.346090 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="0c273054-c72f-4e27-88e5-a7366ceb9dde" containerName="util" Oct 10 18:14:33 crc kubenswrapper[4799]: I1010 18:14:33.346420 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="0c273054-c72f-4e27-88e5-a7366ceb9dde" containerName="extract" Oct 10 18:14:33 crc kubenswrapper[4799]: I1010 18:14:33.347198 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-7c8cf85677-bk4jp" Oct 10 18:14:33 crc kubenswrapper[4799]: I1010 18:14:33.350717 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operators"/"openshift-service-ca.crt" Oct 10 18:14:33 crc kubenswrapper[4799]: I1010 18:14:33.350963 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"obo-prometheus-operator-dockercfg-cb2xk" Oct 10 18:14:33 crc kubenswrapper[4799]: I1010 18:14:33.351979 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operators"/"kube-root-ca.crt" Oct 10 18:14:33 crc kubenswrapper[4799]: I1010 18:14:33.412821 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f2x22\" (UniqueName: \"kubernetes.io/projected/f4d71c7a-ca15-4516-804a-a318b170705a-kube-api-access-f2x22\") pod \"obo-prometheus-operator-7c8cf85677-bk4jp\" (UID: \"f4d71c7a-ca15-4516-804a-a318b170705a\") " pod="openshift-operators/obo-prometheus-operator-7c8cf85677-bk4jp" Oct 10 18:14:33 crc kubenswrapper[4799]: I1010 18:14:33.428348 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-7c8cf85677-bk4jp"] Oct 10 18:14:33 crc kubenswrapper[4799]: I1010 18:14:33.484148 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-7549bdddd6-wfjc9"] Oct 10 18:14:33 crc kubenswrapper[4799]: I1010 18:14:33.485903 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-7549bdddd6-wfjc9" Oct 10 18:14:33 crc kubenswrapper[4799]: I1010 18:14:33.487275 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"obo-prometheus-operator-admission-webhook-service-cert" Oct 10 18:14:33 crc kubenswrapper[4799]: I1010 18:14:33.487317 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"obo-prometheus-operator-admission-webhook-dockercfg-m7h4b" Oct 10 18:14:33 crc kubenswrapper[4799]: I1010 18:14:33.504711 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-7549bdddd6-wfjc9"] Oct 10 18:14:33 crc kubenswrapper[4799]: I1010 18:14:33.514642 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f2x22\" (UniqueName: \"kubernetes.io/projected/f4d71c7a-ca15-4516-804a-a318b170705a-kube-api-access-f2x22\") pod \"obo-prometheus-operator-7c8cf85677-bk4jp\" (UID: \"f4d71c7a-ca15-4516-804a-a318b170705a\") " pod="openshift-operators/obo-prometheus-operator-7c8cf85677-bk4jp" Oct 10 18:14:33 crc kubenswrapper[4799]: I1010 18:14:33.529984 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-7549bdddd6-d69ks"] Oct 10 18:14:33 crc kubenswrapper[4799]: I1010 18:14:33.531423 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-7549bdddd6-d69ks" Oct 10 18:14:33 crc kubenswrapper[4799]: I1010 18:14:33.544360 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f2x22\" (UniqueName: \"kubernetes.io/projected/f4d71c7a-ca15-4516-804a-a318b170705a-kube-api-access-f2x22\") pod \"obo-prometheus-operator-7c8cf85677-bk4jp\" (UID: \"f4d71c7a-ca15-4516-804a-a318b170705a\") " pod="openshift-operators/obo-prometheus-operator-7c8cf85677-bk4jp" Oct 10 18:14:33 crc kubenswrapper[4799]: I1010 18:14:33.563120 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-7549bdddd6-d69ks"] Oct 10 18:14:33 crc kubenswrapper[4799]: I1010 18:14:33.624922 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/b4149afd-c602-43fa-b00c-f0b0738b2193-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-7549bdddd6-wfjc9\" (UID: \"b4149afd-c602-43fa-b00c-f0b0738b2193\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-7549bdddd6-wfjc9" Oct 10 18:14:33 crc kubenswrapper[4799]: I1010 18:14:33.625360 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/b4149afd-c602-43fa-b00c-f0b0738b2193-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-7549bdddd6-wfjc9\" (UID: \"b4149afd-c602-43fa-b00c-f0b0738b2193\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-7549bdddd6-wfjc9" Oct 10 18:14:33 crc kubenswrapper[4799]: I1010 18:14:33.625572 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/a0f9c29a-475d-41c0-8519-8cb77db2ae52-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-7549bdddd6-d69ks\" (UID: \"a0f9c29a-475d-41c0-8519-8cb77db2ae52\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-7549bdddd6-d69ks" Oct 10 18:14:33 crc kubenswrapper[4799]: I1010 18:14:33.625593 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/a0f9c29a-475d-41c0-8519-8cb77db2ae52-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-7549bdddd6-d69ks\" (UID: \"a0f9c29a-475d-41c0-8519-8cb77db2ae52\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-7549bdddd6-d69ks" Oct 10 18:14:33 crc kubenswrapper[4799]: I1010 18:14:33.668183 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-7c8cf85677-bk4jp" Oct 10 18:14:33 crc kubenswrapper[4799]: I1010 18:14:33.672116 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/observability-operator-cc5f78dfc-jn5lf"] Oct 10 18:14:33 crc kubenswrapper[4799]: I1010 18:14:33.673489 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/observability-operator-cc5f78dfc-jn5lf" Oct 10 18:14:33 crc kubenswrapper[4799]: I1010 18:14:33.684935 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"observability-operator-tls" Oct 10 18:14:33 crc kubenswrapper[4799]: I1010 18:14:33.684975 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/observability-operator-cc5f78dfc-jn5lf"] Oct 10 18:14:33 crc kubenswrapper[4799]: I1010 18:14:33.685324 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"observability-operator-sa-dockercfg-hnsqg" Oct 10 18:14:33 crc kubenswrapper[4799]: I1010 18:14:33.727894 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/b4149afd-c602-43fa-b00c-f0b0738b2193-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-7549bdddd6-wfjc9\" (UID: \"b4149afd-c602-43fa-b00c-f0b0738b2193\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-7549bdddd6-wfjc9" Oct 10 18:14:33 crc kubenswrapper[4799]: I1010 18:14:33.727943 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"observability-operator-tls\" (UniqueName: \"kubernetes.io/secret/f5d3a877-feb5-48ab-b4a4-becae9e904e8-observability-operator-tls\") pod \"observability-operator-cc5f78dfc-jn5lf\" (UID: \"f5d3a877-feb5-48ab-b4a4-becae9e904e8\") " pod="openshift-operators/observability-operator-cc5f78dfc-jn5lf" Oct 10 18:14:33 crc kubenswrapper[4799]: I1010 18:14:33.727992 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/b4149afd-c602-43fa-b00c-f0b0738b2193-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-7549bdddd6-wfjc9\" (UID: \"b4149afd-c602-43fa-b00c-f0b0738b2193\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-7549bdddd6-wfjc9" Oct 10 18:14:33 crc kubenswrapper[4799]: I1010 18:14:33.728017 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7d4v7\" (UniqueName: \"kubernetes.io/projected/f5d3a877-feb5-48ab-b4a4-becae9e904e8-kube-api-access-7d4v7\") pod \"observability-operator-cc5f78dfc-jn5lf\" (UID: \"f5d3a877-feb5-48ab-b4a4-becae9e904e8\") " pod="openshift-operators/observability-operator-cc5f78dfc-jn5lf" Oct 10 18:14:33 crc kubenswrapper[4799]: I1010 18:14:33.728107 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/a0f9c29a-475d-41c0-8519-8cb77db2ae52-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-7549bdddd6-d69ks\" (UID: \"a0f9c29a-475d-41c0-8519-8cb77db2ae52\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-7549bdddd6-d69ks" Oct 10 18:14:33 crc kubenswrapper[4799]: I1010 18:14:33.728124 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/a0f9c29a-475d-41c0-8519-8cb77db2ae52-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-7549bdddd6-d69ks\" (UID: \"a0f9c29a-475d-41c0-8519-8cb77db2ae52\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-7549bdddd6-d69ks" Oct 10 18:14:33 crc kubenswrapper[4799]: I1010 18:14:33.733253 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/b4149afd-c602-43fa-b00c-f0b0738b2193-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-7549bdddd6-wfjc9\" (UID: \"b4149afd-c602-43fa-b00c-f0b0738b2193\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-7549bdddd6-wfjc9" Oct 10 18:14:33 crc kubenswrapper[4799]: I1010 18:14:33.735922 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/b4149afd-c602-43fa-b00c-f0b0738b2193-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-7549bdddd6-wfjc9\" (UID: \"b4149afd-c602-43fa-b00c-f0b0738b2193\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-7549bdddd6-wfjc9" Oct 10 18:14:33 crc kubenswrapper[4799]: I1010 18:14:33.748282 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/a0f9c29a-475d-41c0-8519-8cb77db2ae52-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-7549bdddd6-d69ks\" (UID: \"a0f9c29a-475d-41c0-8519-8cb77db2ae52\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-7549bdddd6-d69ks" Oct 10 18:14:33 crc kubenswrapper[4799]: I1010 18:14:33.748326 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/a0f9c29a-475d-41c0-8519-8cb77db2ae52-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-7549bdddd6-d69ks\" (UID: \"a0f9c29a-475d-41c0-8519-8cb77db2ae52\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-7549bdddd6-d69ks" Oct 10 18:14:33 crc kubenswrapper[4799]: I1010 18:14:33.807179 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-7549bdddd6-wfjc9" Oct 10 18:14:33 crc kubenswrapper[4799]: I1010 18:14:33.829418 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"observability-operator-tls\" (UniqueName: \"kubernetes.io/secret/f5d3a877-feb5-48ab-b4a4-becae9e904e8-observability-operator-tls\") pod \"observability-operator-cc5f78dfc-jn5lf\" (UID: \"f5d3a877-feb5-48ab-b4a4-becae9e904e8\") " pod="openshift-operators/observability-operator-cc5f78dfc-jn5lf" Oct 10 18:14:33 crc kubenswrapper[4799]: I1010 18:14:33.829491 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7d4v7\" (UniqueName: \"kubernetes.io/projected/f5d3a877-feb5-48ab-b4a4-becae9e904e8-kube-api-access-7d4v7\") pod \"observability-operator-cc5f78dfc-jn5lf\" (UID: \"f5d3a877-feb5-48ab-b4a4-becae9e904e8\") " pod="openshift-operators/observability-operator-cc5f78dfc-jn5lf" Oct 10 18:14:33 crc kubenswrapper[4799]: I1010 18:14:33.835475 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"observability-operator-tls\" (UniqueName: \"kubernetes.io/secret/f5d3a877-feb5-48ab-b4a4-becae9e904e8-observability-operator-tls\") pod \"observability-operator-cc5f78dfc-jn5lf\" (UID: \"f5d3a877-feb5-48ab-b4a4-becae9e904e8\") " pod="openshift-operators/observability-operator-cc5f78dfc-jn5lf" Oct 10 18:14:33 crc kubenswrapper[4799]: I1010 18:14:33.870435 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/perses-operator-54bc95c9fb-ddg9b"] Oct 10 18:14:33 crc kubenswrapper[4799]: I1010 18:14:33.871634 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7d4v7\" (UniqueName: \"kubernetes.io/projected/f5d3a877-feb5-48ab-b4a4-becae9e904e8-kube-api-access-7d4v7\") pod \"observability-operator-cc5f78dfc-jn5lf\" (UID: \"f5d3a877-feb5-48ab-b4a4-becae9e904e8\") " pod="openshift-operators/observability-operator-cc5f78dfc-jn5lf" Oct 10 18:14:33 crc kubenswrapper[4799]: I1010 18:14:33.871655 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/perses-operator-54bc95c9fb-ddg9b" Oct 10 18:14:33 crc kubenswrapper[4799]: I1010 18:14:33.876515 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"perses-operator-dockercfg-c7fxz" Oct 10 18:14:33 crc kubenswrapper[4799]: I1010 18:14:33.886729 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/perses-operator-54bc95c9fb-ddg9b"] Oct 10 18:14:33 crc kubenswrapper[4799]: I1010 18:14:33.903496 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-7549bdddd6-d69ks" Oct 10 18:14:33 crc kubenswrapper[4799]: I1010 18:14:33.943800 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4qvqf\" (UniqueName: \"kubernetes.io/projected/e2ad530e-a05b-4fef-85e7-85a827687bb6-kube-api-access-4qvqf\") pod \"perses-operator-54bc95c9fb-ddg9b\" (UID: \"e2ad530e-a05b-4fef-85e7-85a827687bb6\") " pod="openshift-operators/perses-operator-54bc95c9fb-ddg9b" Oct 10 18:14:33 crc kubenswrapper[4799]: I1010 18:14:33.943858 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openshift-service-ca\" (UniqueName: \"kubernetes.io/configmap/e2ad530e-a05b-4fef-85e7-85a827687bb6-openshift-service-ca\") pod \"perses-operator-54bc95c9fb-ddg9b\" (UID: \"e2ad530e-a05b-4fef-85e7-85a827687bb6\") " pod="openshift-operators/perses-operator-54bc95c9fb-ddg9b" Oct 10 18:14:34 crc kubenswrapper[4799]: I1010 18:14:33.997027 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/observability-operator-cc5f78dfc-jn5lf" Oct 10 18:14:34 crc kubenswrapper[4799]: I1010 18:14:34.045136 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openshift-service-ca\" (UniqueName: \"kubernetes.io/configmap/e2ad530e-a05b-4fef-85e7-85a827687bb6-openshift-service-ca\") pod \"perses-operator-54bc95c9fb-ddg9b\" (UID: \"e2ad530e-a05b-4fef-85e7-85a827687bb6\") " pod="openshift-operators/perses-operator-54bc95c9fb-ddg9b" Oct 10 18:14:34 crc kubenswrapper[4799]: I1010 18:14:34.045568 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4qvqf\" (UniqueName: \"kubernetes.io/projected/e2ad530e-a05b-4fef-85e7-85a827687bb6-kube-api-access-4qvqf\") pod \"perses-operator-54bc95c9fb-ddg9b\" (UID: \"e2ad530e-a05b-4fef-85e7-85a827687bb6\") " pod="openshift-operators/perses-operator-54bc95c9fb-ddg9b" Oct 10 18:14:34 crc kubenswrapper[4799]: I1010 18:14:34.046727 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openshift-service-ca\" (UniqueName: \"kubernetes.io/configmap/e2ad530e-a05b-4fef-85e7-85a827687bb6-openshift-service-ca\") pod \"perses-operator-54bc95c9fb-ddg9b\" (UID: \"e2ad530e-a05b-4fef-85e7-85a827687bb6\") " pod="openshift-operators/perses-operator-54bc95c9fb-ddg9b" Oct 10 18:14:34 crc kubenswrapper[4799]: I1010 18:14:34.078224 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4qvqf\" (UniqueName: \"kubernetes.io/projected/e2ad530e-a05b-4fef-85e7-85a827687bb6-kube-api-access-4qvqf\") pod \"perses-operator-54bc95c9fb-ddg9b\" (UID: \"e2ad530e-a05b-4fef-85e7-85a827687bb6\") " pod="openshift-operators/perses-operator-54bc95c9fb-ddg9b" Oct 10 18:14:34 crc kubenswrapper[4799]: I1010 18:14:34.324400 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/perses-operator-54bc95c9fb-ddg9b" Oct 10 18:14:34 crc kubenswrapper[4799]: I1010 18:14:34.370592 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-7c8cf85677-bk4jp"] Oct 10 18:14:34 crc kubenswrapper[4799]: W1010 18:14:34.393230 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf4d71c7a_ca15_4516_804a_a318b170705a.slice/crio-d8faef572e9fdeb3bf7191886016e57fcf501052044363e0e3a5b84d125c8502 WatchSource:0}: Error finding container d8faef572e9fdeb3bf7191886016e57fcf501052044363e0e3a5b84d125c8502: Status 404 returned error can't find the container with id d8faef572e9fdeb3bf7191886016e57fcf501052044363e0e3a5b84d125c8502 Oct 10 18:14:34 crc kubenswrapper[4799]: I1010 18:14:34.510645 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-7549bdddd6-wfjc9"] Oct 10 18:14:34 crc kubenswrapper[4799]: I1010 18:14:34.668797 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-7549bdddd6-d69ks"] Oct 10 18:14:34 crc kubenswrapper[4799]: I1010 18:14:34.785370 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/observability-operator-cc5f78dfc-jn5lf"] Oct 10 18:14:34 crc kubenswrapper[4799]: W1010 18:14:34.810889 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf5d3a877_feb5_48ab_b4a4_becae9e904e8.slice/crio-6c85ded1af0d5e28d0498d3a7ca1640baa38c550daa5defebf68a277e994e0fd WatchSource:0}: Error finding container 6c85ded1af0d5e28d0498d3a7ca1640baa38c550daa5defebf68a277e994e0fd: Status 404 returned error can't find the container with id 6c85ded1af0d5e28d0498d3a7ca1640baa38c550daa5defebf68a277e994e0fd Oct 10 18:14:34 crc kubenswrapper[4799]: I1010 18:14:34.979148 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/perses-operator-54bc95c9fb-ddg9b"] Oct 10 18:14:34 crc kubenswrapper[4799]: W1010 18:14:34.980249 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode2ad530e_a05b_4fef_85e7_85a827687bb6.slice/crio-29cf8b68d0c1512a96a71cc1bfa5029e5090a51947d3305ec5857da4d61c6310 WatchSource:0}: Error finding container 29cf8b68d0c1512a96a71cc1bfa5029e5090a51947d3305ec5857da4d61c6310: Status 404 returned error can't find the container with id 29cf8b68d0c1512a96a71cc1bfa5029e5090a51947d3305ec5857da4d61c6310 Oct 10 18:14:35 crc kubenswrapper[4799]: I1010 18:14:35.337251 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-admission-webhook-7549bdddd6-d69ks" event={"ID":"a0f9c29a-475d-41c0-8519-8cb77db2ae52","Type":"ContainerStarted","Data":"c548d1311cada13a11e6f250732c180f6c3d8a24b08515f0cfa912d2e4b84dce"} Oct 10 18:14:35 crc kubenswrapper[4799]: I1010 18:14:35.338981 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/perses-operator-54bc95c9fb-ddg9b" event={"ID":"e2ad530e-a05b-4fef-85e7-85a827687bb6","Type":"ContainerStarted","Data":"29cf8b68d0c1512a96a71cc1bfa5029e5090a51947d3305ec5857da4d61c6310"} Oct 10 18:14:35 crc kubenswrapper[4799]: I1010 18:14:35.340866 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-7c8cf85677-bk4jp" event={"ID":"f4d71c7a-ca15-4516-804a-a318b170705a","Type":"ContainerStarted","Data":"d8faef572e9fdeb3bf7191886016e57fcf501052044363e0e3a5b84d125c8502"} Oct 10 18:14:35 crc kubenswrapper[4799]: I1010 18:14:35.345067 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/observability-operator-cc5f78dfc-jn5lf" event={"ID":"f5d3a877-feb5-48ab-b4a4-becae9e904e8","Type":"ContainerStarted","Data":"6c85ded1af0d5e28d0498d3a7ca1640baa38c550daa5defebf68a277e994e0fd"} Oct 10 18:14:35 crc kubenswrapper[4799]: I1010 18:14:35.347622 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-admission-webhook-7549bdddd6-wfjc9" event={"ID":"b4149afd-c602-43fa-b00c-f0b0738b2193","Type":"ContainerStarted","Data":"2471a06240c042aa76e9b53d330c49535c680993b42f0c2d196f28f723adba27"} Oct 10 18:14:40 crc kubenswrapper[4799]: I1010 18:14:40.044297 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-600c-account-create-jl2pr"] Oct 10 18:14:40 crc kubenswrapper[4799]: I1010 18:14:40.052220 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-600c-account-create-jl2pr"] Oct 10 18:14:41 crc kubenswrapper[4799]: I1010 18:14:41.046715 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-6e42-account-create-2nqmr"] Oct 10 18:14:41 crc kubenswrapper[4799]: I1010 18:14:41.067910 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-6a8f-account-create-2q2ts"] Oct 10 18:14:41 crc kubenswrapper[4799]: I1010 18:14:41.079914 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-6e42-account-create-2nqmr"] Oct 10 18:14:41 crc kubenswrapper[4799]: I1010 18:14:41.089769 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-6a8f-account-create-2q2ts"] Oct 10 18:14:41 crc kubenswrapper[4799]: I1010 18:14:41.420657 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="39ac58cf-e2da-416f-9456-2a9eaa8060a9" path="/var/lib/kubelet/pods/39ac58cf-e2da-416f-9456-2a9eaa8060a9/volumes" Oct 10 18:14:41 crc kubenswrapper[4799]: I1010 18:14:41.422420 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7ff134f4-2451-4260-bd13-814729886f38" path="/var/lib/kubelet/pods/7ff134f4-2451-4260-bd13-814729886f38/volumes" Oct 10 18:14:41 crc kubenswrapper[4799]: I1010 18:14:41.422964 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a2435e3e-477f-40b8-820f-1d9e2c80db7e" path="/var/lib/kubelet/pods/a2435e3e-477f-40b8-820f-1d9e2c80db7e/volumes" Oct 10 18:14:44 crc kubenswrapper[4799]: I1010 18:14:44.435451 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-admission-webhook-7549bdddd6-d69ks" event={"ID":"a0f9c29a-475d-41c0-8519-8cb77db2ae52","Type":"ContainerStarted","Data":"6e644a8549e4fea2f23737f2f3db69131e56396f4a7e31725947183890924438"} Oct 10 18:14:44 crc kubenswrapper[4799]: I1010 18:14:44.437135 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/perses-operator-54bc95c9fb-ddg9b" event={"ID":"e2ad530e-a05b-4fef-85e7-85a827687bb6","Type":"ContainerStarted","Data":"26ec1d3bda5ea459eb0410f0a3cca323ae00a282ec2fce21153da0863ec7b115"} Oct 10 18:14:44 crc kubenswrapper[4799]: I1010 18:14:44.437265 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operators/perses-operator-54bc95c9fb-ddg9b" Oct 10 18:14:44 crc kubenswrapper[4799]: I1010 18:14:44.438673 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-7c8cf85677-bk4jp" event={"ID":"f4d71c7a-ca15-4516-804a-a318b170705a","Type":"ContainerStarted","Data":"d056c8a37618eda6563e59fee7940e0b441f1943b90ae6662b3c99a2550574c8"} Oct 10 18:14:44 crc kubenswrapper[4799]: I1010 18:14:44.440131 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-admission-webhook-7549bdddd6-wfjc9" event={"ID":"b4149afd-c602-43fa-b00c-f0b0738b2193","Type":"ContainerStarted","Data":"85c4a70c827af73802650dfaad2b90ce29416424dbf9f1c9d805ed8c21c1a4bc"} Oct 10 18:14:44 crc kubenswrapper[4799]: I1010 18:14:44.441555 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/observability-operator-cc5f78dfc-jn5lf" event={"ID":"f5d3a877-feb5-48ab-b4a4-becae9e904e8","Type":"ContainerStarted","Data":"b992070f22c2c40cf146aa9631297edc04e86c1f2acf0f3d29ca062253f10a5f"} Oct 10 18:14:44 crc kubenswrapper[4799]: I1010 18:14:44.441806 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operators/observability-operator-cc5f78dfc-jn5lf" Oct 10 18:14:44 crc kubenswrapper[4799]: I1010 18:14:44.447574 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operators/observability-operator-cc5f78dfc-jn5lf" Oct 10 18:14:44 crc kubenswrapper[4799]: I1010 18:14:44.463792 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/obo-prometheus-operator-admission-webhook-7549bdddd6-d69ks" podStartSLOduration=3.125959642 podStartE2EDuration="11.463733259s" podCreationTimestamp="2025-10-10 18:14:33 +0000 UTC" firstStartedPulling="2025-10-10 18:14:34.692634385 +0000 UTC m=+6168.200958500" lastFinishedPulling="2025-10-10 18:14:43.030408002 +0000 UTC m=+6176.538732117" observedRunningTime="2025-10-10 18:14:44.460326466 +0000 UTC m=+6177.968650581" watchObservedRunningTime="2025-10-10 18:14:44.463733259 +0000 UTC m=+6177.972057374" Oct 10 18:14:44 crc kubenswrapper[4799]: I1010 18:14:44.519884 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/perses-operator-54bc95c9fb-ddg9b" podStartSLOduration=3.323334241 podStartE2EDuration="11.51986694s" podCreationTimestamp="2025-10-10 18:14:33 +0000 UTC" firstStartedPulling="2025-10-10 18:14:34.983033105 +0000 UTC m=+6168.491357220" lastFinishedPulling="2025-10-10 18:14:43.179565804 +0000 UTC m=+6176.687889919" observedRunningTime="2025-10-10 18:14:44.516962739 +0000 UTC m=+6178.025286864" watchObservedRunningTime="2025-10-10 18:14:44.51986694 +0000 UTC m=+6178.028191055" Oct 10 18:14:44 crc kubenswrapper[4799]: I1010 18:14:44.526803 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/obo-prometheus-operator-7c8cf85677-bk4jp" podStartSLOduration=2.888936114 podStartE2EDuration="11.526777448s" podCreationTimestamp="2025-10-10 18:14:33 +0000 UTC" firstStartedPulling="2025-10-10 18:14:34.396565446 +0000 UTC m=+6167.904889561" lastFinishedPulling="2025-10-10 18:14:43.03440678 +0000 UTC m=+6176.542730895" observedRunningTime="2025-10-10 18:14:44.499963174 +0000 UTC m=+6178.008287289" watchObservedRunningTime="2025-10-10 18:14:44.526777448 +0000 UTC m=+6178.035101563" Oct 10 18:14:44 crc kubenswrapper[4799]: I1010 18:14:44.541456 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/obo-prometheus-operator-admission-webhook-7549bdddd6-wfjc9" podStartSLOduration=3.0706538820000002 podStartE2EDuration="11.541438466s" podCreationTimestamp="2025-10-10 18:14:33 +0000 UTC" firstStartedPulling="2025-10-10 18:14:34.558277165 +0000 UTC m=+6168.066601280" lastFinishedPulling="2025-10-10 18:14:43.029061749 +0000 UTC m=+6176.537385864" observedRunningTime="2025-10-10 18:14:44.540611996 +0000 UTC m=+6178.048936121" watchObservedRunningTime="2025-10-10 18:14:44.541438466 +0000 UTC m=+6178.049762571" Oct 10 18:14:44 crc kubenswrapper[4799]: I1010 18:14:44.564674 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/observability-operator-cc5f78dfc-jn5lf" podStartSLOduration=3.185135177 podStartE2EDuration="11.564657293s" podCreationTimestamp="2025-10-10 18:14:33 +0000 UTC" firstStartedPulling="2025-10-10 18:14:34.814423129 +0000 UTC m=+6168.322747244" lastFinishedPulling="2025-10-10 18:14:43.193945245 +0000 UTC m=+6176.702269360" observedRunningTime="2025-10-10 18:14:44.560143703 +0000 UTC m=+6178.068467818" watchObservedRunningTime="2025-10-10 18:14:44.564657293 +0000 UTC m=+6178.072981398" Oct 10 18:14:50 crc kubenswrapper[4799]: I1010 18:14:50.030273 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-dcsnk"] Oct 10 18:14:50 crc kubenswrapper[4799]: I1010 18:14:50.040943 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-dcsnk"] Oct 10 18:14:51 crc kubenswrapper[4799]: I1010 18:14:51.424396 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="43af56e2-6fae-4aec-bf32-201d0d17faa7" path="/var/lib/kubelet/pods/43af56e2-6fae-4aec-bf32-201d0d17faa7/volumes" Oct 10 18:14:54 crc kubenswrapper[4799]: I1010 18:14:54.327923 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operators/perses-operator-54bc95c9fb-ddg9b" Oct 10 18:14:57 crc kubenswrapper[4799]: I1010 18:14:57.416780 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-9nwgm"] Oct 10 18:14:57 crc kubenswrapper[4799]: I1010 18:14:57.420120 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-9nwgm" Oct 10 18:14:57 crc kubenswrapper[4799]: I1010 18:14:57.434442 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-9nwgm"] Oct 10 18:14:57 crc kubenswrapper[4799]: I1010 18:14:57.459664 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8414f950-bcd6-4ed2-8f66-c2830d2acfaf-catalog-content\") pod \"redhat-marketplace-9nwgm\" (UID: \"8414f950-bcd6-4ed2-8f66-c2830d2acfaf\") " pod="openshift-marketplace/redhat-marketplace-9nwgm" Oct 10 18:14:57 crc kubenswrapper[4799]: I1010 18:14:57.460447 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lrfk6\" (UniqueName: \"kubernetes.io/projected/8414f950-bcd6-4ed2-8f66-c2830d2acfaf-kube-api-access-lrfk6\") pod \"redhat-marketplace-9nwgm\" (UID: \"8414f950-bcd6-4ed2-8f66-c2830d2acfaf\") " pod="openshift-marketplace/redhat-marketplace-9nwgm" Oct 10 18:14:57 crc kubenswrapper[4799]: I1010 18:14:57.460539 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8414f950-bcd6-4ed2-8f66-c2830d2acfaf-utilities\") pod \"redhat-marketplace-9nwgm\" (UID: \"8414f950-bcd6-4ed2-8f66-c2830d2acfaf\") " pod="openshift-marketplace/redhat-marketplace-9nwgm" Oct 10 18:14:57 crc kubenswrapper[4799]: I1010 18:14:57.562704 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lrfk6\" (UniqueName: \"kubernetes.io/projected/8414f950-bcd6-4ed2-8f66-c2830d2acfaf-kube-api-access-lrfk6\") pod \"redhat-marketplace-9nwgm\" (UID: \"8414f950-bcd6-4ed2-8f66-c2830d2acfaf\") " pod="openshift-marketplace/redhat-marketplace-9nwgm" Oct 10 18:14:57 crc kubenswrapper[4799]: I1010 18:14:57.562772 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8414f950-bcd6-4ed2-8f66-c2830d2acfaf-utilities\") pod \"redhat-marketplace-9nwgm\" (UID: \"8414f950-bcd6-4ed2-8f66-c2830d2acfaf\") " pod="openshift-marketplace/redhat-marketplace-9nwgm" Oct 10 18:14:57 crc kubenswrapper[4799]: I1010 18:14:57.562805 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8414f950-bcd6-4ed2-8f66-c2830d2acfaf-catalog-content\") pod \"redhat-marketplace-9nwgm\" (UID: \"8414f950-bcd6-4ed2-8f66-c2830d2acfaf\") " pod="openshift-marketplace/redhat-marketplace-9nwgm" Oct 10 18:14:57 crc kubenswrapper[4799]: I1010 18:14:57.563262 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8414f950-bcd6-4ed2-8f66-c2830d2acfaf-utilities\") pod \"redhat-marketplace-9nwgm\" (UID: \"8414f950-bcd6-4ed2-8f66-c2830d2acfaf\") " pod="openshift-marketplace/redhat-marketplace-9nwgm" Oct 10 18:14:57 crc kubenswrapper[4799]: I1010 18:14:57.563284 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8414f950-bcd6-4ed2-8f66-c2830d2acfaf-catalog-content\") pod \"redhat-marketplace-9nwgm\" (UID: \"8414f950-bcd6-4ed2-8f66-c2830d2acfaf\") " pod="openshift-marketplace/redhat-marketplace-9nwgm" Oct 10 18:14:57 crc kubenswrapper[4799]: I1010 18:14:57.586747 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lrfk6\" (UniqueName: \"kubernetes.io/projected/8414f950-bcd6-4ed2-8f66-c2830d2acfaf-kube-api-access-lrfk6\") pod \"redhat-marketplace-9nwgm\" (UID: \"8414f950-bcd6-4ed2-8f66-c2830d2acfaf\") " pod="openshift-marketplace/redhat-marketplace-9nwgm" Oct 10 18:14:57 crc kubenswrapper[4799]: I1010 18:14:57.762318 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-9nwgm" Oct 10 18:14:58 crc kubenswrapper[4799]: I1010 18:14:58.130904 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/openstackclient"] Oct 10 18:14:58 crc kubenswrapper[4799]: I1010 18:14:58.131387 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/openstackclient" podUID="00daf52d-7faf-4cc7-80e2-7b9db14d2196" containerName="openstackclient" containerID="cri-o://1799817d4e72e1ae52fb951ccb4b28f64b395902e023fab586dee0d6ebbe54ce" gracePeriod=2 Oct 10 18:14:58 crc kubenswrapper[4799]: I1010 18:14:58.140304 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/openstackclient"] Oct 10 18:14:58 crc kubenswrapper[4799]: I1010 18:14:58.198820 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstackclient"] Oct 10 18:14:58 crc kubenswrapper[4799]: E1010 18:14:58.199317 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="00daf52d-7faf-4cc7-80e2-7b9db14d2196" containerName="openstackclient" Oct 10 18:14:58 crc kubenswrapper[4799]: I1010 18:14:58.199330 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="00daf52d-7faf-4cc7-80e2-7b9db14d2196" containerName="openstackclient" Oct 10 18:14:58 crc kubenswrapper[4799]: I1010 18:14:58.199539 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="00daf52d-7faf-4cc7-80e2-7b9db14d2196" containerName="openstackclient" Oct 10 18:14:58 crc kubenswrapper[4799]: I1010 18:14:58.200282 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Oct 10 18:14:58 crc kubenswrapper[4799]: I1010 18:14:58.226127 4799 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openstack/openstackclient" oldPodUID="00daf52d-7faf-4cc7-80e2-7b9db14d2196" podUID="b0e07cac-b1ab-436c-95ac-8c150d84e709" Oct 10 18:14:58 crc kubenswrapper[4799]: I1010 18:14:58.226175 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Oct 10 18:14:58 crc kubenswrapper[4799]: I1010 18:14:58.276143 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/b0e07cac-b1ab-436c-95ac-8c150d84e709-openstack-config-secret\") pod \"openstackclient\" (UID: \"b0e07cac-b1ab-436c-95ac-8c150d84e709\") " pod="openstack/openstackclient" Oct 10 18:14:58 crc kubenswrapper[4799]: I1010 18:14:58.276219 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l45ht\" (UniqueName: \"kubernetes.io/projected/b0e07cac-b1ab-436c-95ac-8c150d84e709-kube-api-access-l45ht\") pod \"openstackclient\" (UID: \"b0e07cac-b1ab-436c-95ac-8c150d84e709\") " pod="openstack/openstackclient" Oct 10 18:14:58 crc kubenswrapper[4799]: I1010 18:14:58.276276 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/b0e07cac-b1ab-436c-95ac-8c150d84e709-openstack-config\") pod \"openstackclient\" (UID: \"b0e07cac-b1ab-436c-95ac-8c150d84e709\") " pod="openstack/openstackclient" Oct 10 18:14:58 crc kubenswrapper[4799]: W1010 18:14:58.338467 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod8414f950_bcd6_4ed2_8f66_c2830d2acfaf.slice/crio-d187d3c7801557593e592e7305b639a5b4753ba933c7513bd70e9ff43d92142e WatchSource:0}: Error finding container d187d3c7801557593e592e7305b639a5b4753ba933c7513bd70e9ff43d92142e: Status 404 returned error can't find the container with id d187d3c7801557593e592e7305b639a5b4753ba933c7513bd70e9ff43d92142e Oct 10 18:14:58 crc kubenswrapper[4799]: I1010 18:14:58.364451 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-9nwgm"] Oct 10 18:14:58 crc kubenswrapper[4799]: I1010 18:14:58.381904 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/b0e07cac-b1ab-436c-95ac-8c150d84e709-openstack-config-secret\") pod \"openstackclient\" (UID: \"b0e07cac-b1ab-436c-95ac-8c150d84e709\") " pod="openstack/openstackclient" Oct 10 18:14:58 crc kubenswrapper[4799]: I1010 18:14:58.381964 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l45ht\" (UniqueName: \"kubernetes.io/projected/b0e07cac-b1ab-436c-95ac-8c150d84e709-kube-api-access-l45ht\") pod \"openstackclient\" (UID: \"b0e07cac-b1ab-436c-95ac-8c150d84e709\") " pod="openstack/openstackclient" Oct 10 18:14:58 crc kubenswrapper[4799]: I1010 18:14:58.382001 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/b0e07cac-b1ab-436c-95ac-8c150d84e709-openstack-config\") pod \"openstackclient\" (UID: \"b0e07cac-b1ab-436c-95ac-8c150d84e709\") " pod="openstack/openstackclient" Oct 10 18:14:58 crc kubenswrapper[4799]: I1010 18:14:58.383685 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/b0e07cac-b1ab-436c-95ac-8c150d84e709-openstack-config\") pod \"openstackclient\" (UID: \"b0e07cac-b1ab-436c-95ac-8c150d84e709\") " pod="openstack/openstackclient" Oct 10 18:14:58 crc kubenswrapper[4799]: I1010 18:14:58.418492 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/kube-state-metrics-0"] Oct 10 18:14:58 crc kubenswrapper[4799]: I1010 18:14:58.425409 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l45ht\" (UniqueName: \"kubernetes.io/projected/b0e07cac-b1ab-436c-95ac-8c150d84e709-kube-api-access-l45ht\") pod \"openstackclient\" (UID: \"b0e07cac-b1ab-436c-95ac-8c150d84e709\") " pod="openstack/openstackclient" Oct 10 18:14:58 crc kubenswrapper[4799]: I1010 18:14:58.427230 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/b0e07cac-b1ab-436c-95ac-8c150d84e709-openstack-config-secret\") pod \"openstackclient\" (UID: \"b0e07cac-b1ab-436c-95ac-8c150d84e709\") " pod="openstack/openstackclient" Oct 10 18:14:58 crc kubenswrapper[4799]: I1010 18:14:58.427682 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Oct 10 18:14:58 crc kubenswrapper[4799]: I1010 18:14:58.432711 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"telemetry-ceilometer-dockercfg-6l5xt" Oct 10 18:14:58 crc kubenswrapper[4799]: I1010 18:14:58.460740 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Oct 10 18:14:58 crc kubenswrapper[4799]: I1010 18:14:58.483926 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l7trx\" (UniqueName: \"kubernetes.io/projected/171aaf88-50cd-47d9-a43f-1df1d7ed96f8-kube-api-access-l7trx\") pod \"kube-state-metrics-0\" (UID: \"171aaf88-50cd-47d9-a43f-1df1d7ed96f8\") " pod="openstack/kube-state-metrics-0" Oct 10 18:14:58 crc kubenswrapper[4799]: I1010 18:14:58.548481 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Oct 10 18:14:58 crc kubenswrapper[4799]: I1010 18:14:58.586346 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l7trx\" (UniqueName: \"kubernetes.io/projected/171aaf88-50cd-47d9-a43f-1df1d7ed96f8-kube-api-access-l7trx\") pod \"kube-state-metrics-0\" (UID: \"171aaf88-50cd-47d9-a43f-1df1d7ed96f8\") " pod="openstack/kube-state-metrics-0" Oct 10 18:14:58 crc kubenswrapper[4799]: I1010 18:14:58.598999 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-9nwgm" event={"ID":"8414f950-bcd6-4ed2-8f66-c2830d2acfaf","Type":"ContainerStarted","Data":"d187d3c7801557593e592e7305b639a5b4753ba933c7513bd70e9ff43d92142e"} Oct 10 18:14:58 crc kubenswrapper[4799]: I1010 18:14:58.620485 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l7trx\" (UniqueName: \"kubernetes.io/projected/171aaf88-50cd-47d9-a43f-1df1d7ed96f8-kube-api-access-l7trx\") pod \"kube-state-metrics-0\" (UID: \"171aaf88-50cd-47d9-a43f-1df1d7ed96f8\") " pod="openstack/kube-state-metrics-0" Oct 10 18:14:58 crc kubenswrapper[4799]: I1010 18:14:58.817975 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Oct 10 18:14:59 crc kubenswrapper[4799]: I1010 18:14:59.058265 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/alertmanager-metric-storage-0"] Oct 10 18:14:59 crc kubenswrapper[4799]: I1010 18:14:59.060916 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/alertmanager-metric-storage-0" Oct 10 18:14:59 crc kubenswrapper[4799]: I1010 18:14:59.066261 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"alertmanager-metric-storage-tls-assets-0" Oct 10 18:14:59 crc kubenswrapper[4799]: I1010 18:14:59.066477 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"alertmanager-metric-storage-web-config" Oct 10 18:14:59 crc kubenswrapper[4799]: I1010 18:14:59.066586 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"alertmanager-metric-storage-generated" Oct 10 18:14:59 crc kubenswrapper[4799]: I1010 18:14:59.066689 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"metric-storage-alertmanager-dockercfg-wxhws" Oct 10 18:14:59 crc kubenswrapper[4799]: I1010 18:14:59.071822 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/alertmanager-metric-storage-0"] Oct 10 18:14:59 crc kubenswrapper[4799]: I1010 18:14:59.101268 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/3885dc5d-b551-4371-b731-0ffda7c18126-web-config\") pod \"alertmanager-metric-storage-0\" (UID: \"3885dc5d-b551-4371-b731-0ffda7c18126\") " pod="openstack/alertmanager-metric-storage-0" Oct 10 18:14:59 crc kubenswrapper[4799]: I1010 18:14:59.101383 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lj29x\" (UniqueName: \"kubernetes.io/projected/3885dc5d-b551-4371-b731-0ffda7c18126-kube-api-access-lj29x\") pod \"alertmanager-metric-storage-0\" (UID: \"3885dc5d-b551-4371-b731-0ffda7c18126\") " pod="openstack/alertmanager-metric-storage-0" Oct 10 18:14:59 crc kubenswrapper[4799]: I1010 18:14:59.101410 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/secret/3885dc5d-b551-4371-b731-0ffda7c18126-config-volume\") pod \"alertmanager-metric-storage-0\" (UID: \"3885dc5d-b551-4371-b731-0ffda7c18126\") " pod="openstack/alertmanager-metric-storage-0" Oct 10 18:14:59 crc kubenswrapper[4799]: I1010 18:14:59.101602 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"alertmanager-metric-storage-db\" (UniqueName: \"kubernetes.io/empty-dir/3885dc5d-b551-4371-b731-0ffda7c18126-alertmanager-metric-storage-db\") pod \"alertmanager-metric-storage-0\" (UID: \"3885dc5d-b551-4371-b731-0ffda7c18126\") " pod="openstack/alertmanager-metric-storage-0" Oct 10 18:14:59 crc kubenswrapper[4799]: I1010 18:14:59.101703 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/3885dc5d-b551-4371-b731-0ffda7c18126-tls-assets\") pod \"alertmanager-metric-storage-0\" (UID: \"3885dc5d-b551-4371-b731-0ffda7c18126\") " pod="openstack/alertmanager-metric-storage-0" Oct 10 18:14:59 crc kubenswrapper[4799]: I1010 18:14:59.101780 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/3885dc5d-b551-4371-b731-0ffda7c18126-config-out\") pod \"alertmanager-metric-storage-0\" (UID: \"3885dc5d-b551-4371-b731-0ffda7c18126\") " pod="openstack/alertmanager-metric-storage-0" Oct 10 18:14:59 crc kubenswrapper[4799]: I1010 18:14:59.203574 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/3885dc5d-b551-4371-b731-0ffda7c18126-tls-assets\") pod \"alertmanager-metric-storage-0\" (UID: \"3885dc5d-b551-4371-b731-0ffda7c18126\") " pod="openstack/alertmanager-metric-storage-0" Oct 10 18:14:59 crc kubenswrapper[4799]: I1010 18:14:59.203622 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/3885dc5d-b551-4371-b731-0ffda7c18126-config-out\") pod \"alertmanager-metric-storage-0\" (UID: \"3885dc5d-b551-4371-b731-0ffda7c18126\") " pod="openstack/alertmanager-metric-storage-0" Oct 10 18:14:59 crc kubenswrapper[4799]: I1010 18:14:59.203684 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/3885dc5d-b551-4371-b731-0ffda7c18126-web-config\") pod \"alertmanager-metric-storage-0\" (UID: \"3885dc5d-b551-4371-b731-0ffda7c18126\") " pod="openstack/alertmanager-metric-storage-0" Oct 10 18:14:59 crc kubenswrapper[4799]: I1010 18:14:59.203783 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lj29x\" (UniqueName: \"kubernetes.io/projected/3885dc5d-b551-4371-b731-0ffda7c18126-kube-api-access-lj29x\") pod \"alertmanager-metric-storage-0\" (UID: \"3885dc5d-b551-4371-b731-0ffda7c18126\") " pod="openstack/alertmanager-metric-storage-0" Oct 10 18:14:59 crc kubenswrapper[4799]: I1010 18:14:59.203802 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/secret/3885dc5d-b551-4371-b731-0ffda7c18126-config-volume\") pod \"alertmanager-metric-storage-0\" (UID: \"3885dc5d-b551-4371-b731-0ffda7c18126\") " pod="openstack/alertmanager-metric-storage-0" Oct 10 18:14:59 crc kubenswrapper[4799]: I1010 18:14:59.203836 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"alertmanager-metric-storage-db\" (UniqueName: \"kubernetes.io/empty-dir/3885dc5d-b551-4371-b731-0ffda7c18126-alertmanager-metric-storage-db\") pod \"alertmanager-metric-storage-0\" (UID: \"3885dc5d-b551-4371-b731-0ffda7c18126\") " pod="openstack/alertmanager-metric-storage-0" Oct 10 18:14:59 crc kubenswrapper[4799]: I1010 18:14:59.204550 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"alertmanager-metric-storage-db\" (UniqueName: \"kubernetes.io/empty-dir/3885dc5d-b551-4371-b731-0ffda7c18126-alertmanager-metric-storage-db\") pod \"alertmanager-metric-storage-0\" (UID: \"3885dc5d-b551-4371-b731-0ffda7c18126\") " pod="openstack/alertmanager-metric-storage-0" Oct 10 18:14:59 crc kubenswrapper[4799]: I1010 18:14:59.216212 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/secret/3885dc5d-b551-4371-b731-0ffda7c18126-config-volume\") pod \"alertmanager-metric-storage-0\" (UID: \"3885dc5d-b551-4371-b731-0ffda7c18126\") " pod="openstack/alertmanager-metric-storage-0" Oct 10 18:14:59 crc kubenswrapper[4799]: I1010 18:14:59.220775 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/3885dc5d-b551-4371-b731-0ffda7c18126-web-config\") pod \"alertmanager-metric-storage-0\" (UID: \"3885dc5d-b551-4371-b731-0ffda7c18126\") " pod="openstack/alertmanager-metric-storage-0" Oct 10 18:14:59 crc kubenswrapper[4799]: I1010 18:14:59.221130 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/3885dc5d-b551-4371-b731-0ffda7c18126-tls-assets\") pod \"alertmanager-metric-storage-0\" (UID: \"3885dc5d-b551-4371-b731-0ffda7c18126\") " pod="openstack/alertmanager-metric-storage-0" Oct 10 18:14:59 crc kubenswrapper[4799]: I1010 18:14:59.237659 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/3885dc5d-b551-4371-b731-0ffda7c18126-config-out\") pod \"alertmanager-metric-storage-0\" (UID: \"3885dc5d-b551-4371-b731-0ffda7c18126\") " pod="openstack/alertmanager-metric-storage-0" Oct 10 18:14:59 crc kubenswrapper[4799]: I1010 18:14:59.241891 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lj29x\" (UniqueName: \"kubernetes.io/projected/3885dc5d-b551-4371-b731-0ffda7c18126-kube-api-access-lj29x\") pod \"alertmanager-metric-storage-0\" (UID: \"3885dc5d-b551-4371-b731-0ffda7c18126\") " pod="openstack/alertmanager-metric-storage-0" Oct 10 18:14:59 crc kubenswrapper[4799]: I1010 18:14:59.337490 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Oct 10 18:14:59 crc kubenswrapper[4799]: I1010 18:14:59.476239 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/alertmanager-metric-storage-0" Oct 10 18:14:59 crc kubenswrapper[4799]: I1010 18:14:59.624316 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"b0e07cac-b1ab-436c-95ac-8c150d84e709","Type":"ContainerStarted","Data":"79a2d893cea487dd22fa66f0fd2e58c8914fd9709cdcd6a4714498214879b80e"} Oct 10 18:14:59 crc kubenswrapper[4799]: I1010 18:14:59.625946 4799 generic.go:334] "Generic (PLEG): container finished" podID="8414f950-bcd6-4ed2-8f66-c2830d2acfaf" containerID="5002cc8f91fdf81f75131cb53984a7679c1458bb43cd6e97e5ea4ac6728eb6cf" exitCode=0 Oct 10 18:14:59 crc kubenswrapper[4799]: I1010 18:14:59.625970 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-9nwgm" event={"ID":"8414f950-bcd6-4ed2-8f66-c2830d2acfaf","Type":"ContainerDied","Data":"5002cc8f91fdf81f75131cb53984a7679c1458bb43cd6e97e5ea4ac6728eb6cf"} Oct 10 18:14:59 crc kubenswrapper[4799]: I1010 18:14:59.631459 4799 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 10 18:14:59 crc kubenswrapper[4799]: I1010 18:14:59.741377 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Oct 10 18:14:59 crc kubenswrapper[4799]: I1010 18:14:59.866911 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/prometheus-metric-storage-0"] Oct 10 18:14:59 crc kubenswrapper[4799]: I1010 18:14:59.869510 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/prometheus-metric-storage-0" Oct 10 18:14:59 crc kubenswrapper[4799]: I1010 18:14:59.875318 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"prometheus-metric-storage-rulefiles-0" Oct 10 18:14:59 crc kubenswrapper[4799]: I1010 18:14:59.875633 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage-web-config" Oct 10 18:14:59 crc kubenswrapper[4799]: I1010 18:14:59.875780 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"metric-storage-prometheus-dockercfg-m9p6m" Oct 10 18:14:59 crc kubenswrapper[4799]: I1010 18:14:59.875891 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage" Oct 10 18:14:59 crc kubenswrapper[4799]: I1010 18:14:59.882331 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage-thanos-prometheus-http-client-file" Oct 10 18:14:59 crc kubenswrapper[4799]: I1010 18:14:59.888464 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage-tls-assets-0" Oct 10 18:14:59 crc kubenswrapper[4799]: I1010 18:14:59.911728 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/prometheus-metric-storage-0"] Oct 10 18:15:00 crc kubenswrapper[4799]: I1010 18:15:00.026859 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/98cdb179-8d6d-47f5-8bfa-c0d77def55df-prometheus-metric-storage-rulefiles-0\") pod \"prometheus-metric-storage-0\" (UID: \"98cdb179-8d6d-47f5-8bfa-c0d77def55df\") " pod="openstack/prometheus-metric-storage-0" Oct 10 18:15:00 crc kubenswrapper[4799]: I1010 18:15:00.026928 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-37f9a422-5435-4ba4-ac81-2b5eae2123a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-37f9a422-5435-4ba4-ac81-2b5eae2123a8\") pod \"prometheus-metric-storage-0\" (UID: \"98cdb179-8d6d-47f5-8bfa-c0d77def55df\") " pod="openstack/prometheus-metric-storage-0" Oct 10 18:15:00 crc kubenswrapper[4799]: I1010 18:15:00.026979 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/98cdb179-8d6d-47f5-8bfa-c0d77def55df-web-config\") pod \"prometheus-metric-storage-0\" (UID: \"98cdb179-8d6d-47f5-8bfa-c0d77def55df\") " pod="openstack/prometheus-metric-storage-0" Oct 10 18:15:00 crc kubenswrapper[4799]: I1010 18:15:00.026997 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/98cdb179-8d6d-47f5-8bfa-c0d77def55df-config-out\") pod \"prometheus-metric-storage-0\" (UID: \"98cdb179-8d6d-47f5-8bfa-c0d77def55df\") " pod="openstack/prometheus-metric-storage-0" Oct 10 18:15:00 crc kubenswrapper[4799]: I1010 18:15:00.027085 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/98cdb179-8d6d-47f5-8bfa-c0d77def55df-config\") pod \"prometheus-metric-storage-0\" (UID: \"98cdb179-8d6d-47f5-8bfa-c0d77def55df\") " pod="openstack/prometheus-metric-storage-0" Oct 10 18:15:00 crc kubenswrapper[4799]: I1010 18:15:00.027106 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-snk8p\" (UniqueName: \"kubernetes.io/projected/98cdb179-8d6d-47f5-8bfa-c0d77def55df-kube-api-access-snk8p\") pod \"prometheus-metric-storage-0\" (UID: \"98cdb179-8d6d-47f5-8bfa-c0d77def55df\") " pod="openstack/prometheus-metric-storage-0" Oct 10 18:15:00 crc kubenswrapper[4799]: I1010 18:15:00.027133 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/98cdb179-8d6d-47f5-8bfa-c0d77def55df-thanos-prometheus-http-client-file\") pod \"prometheus-metric-storage-0\" (UID: \"98cdb179-8d6d-47f5-8bfa-c0d77def55df\") " pod="openstack/prometheus-metric-storage-0" Oct 10 18:15:00 crc kubenswrapper[4799]: I1010 18:15:00.027157 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/98cdb179-8d6d-47f5-8bfa-c0d77def55df-tls-assets\") pod \"prometheus-metric-storage-0\" (UID: \"98cdb179-8d6d-47f5-8bfa-c0d77def55df\") " pod="openstack/prometheus-metric-storage-0" Oct 10 18:15:00 crc kubenswrapper[4799]: I1010 18:15:00.136534 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/98cdb179-8d6d-47f5-8bfa-c0d77def55df-tls-assets\") pod \"prometheus-metric-storage-0\" (UID: \"98cdb179-8d6d-47f5-8bfa-c0d77def55df\") " pod="openstack/prometheus-metric-storage-0" Oct 10 18:15:00 crc kubenswrapper[4799]: I1010 18:15:00.136607 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/98cdb179-8d6d-47f5-8bfa-c0d77def55df-prometheus-metric-storage-rulefiles-0\") pod \"prometheus-metric-storage-0\" (UID: \"98cdb179-8d6d-47f5-8bfa-c0d77def55df\") " pod="openstack/prometheus-metric-storage-0" Oct 10 18:15:00 crc kubenswrapper[4799]: I1010 18:15:00.136644 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-37f9a422-5435-4ba4-ac81-2b5eae2123a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-37f9a422-5435-4ba4-ac81-2b5eae2123a8\") pod \"prometheus-metric-storage-0\" (UID: \"98cdb179-8d6d-47f5-8bfa-c0d77def55df\") " pod="openstack/prometheus-metric-storage-0" Oct 10 18:15:00 crc kubenswrapper[4799]: I1010 18:15:00.136692 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/98cdb179-8d6d-47f5-8bfa-c0d77def55df-web-config\") pod \"prometheus-metric-storage-0\" (UID: \"98cdb179-8d6d-47f5-8bfa-c0d77def55df\") " pod="openstack/prometheus-metric-storage-0" Oct 10 18:15:00 crc kubenswrapper[4799]: I1010 18:15:00.136708 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/98cdb179-8d6d-47f5-8bfa-c0d77def55df-config-out\") pod \"prometheus-metric-storage-0\" (UID: \"98cdb179-8d6d-47f5-8bfa-c0d77def55df\") " pod="openstack/prometheus-metric-storage-0" Oct 10 18:15:00 crc kubenswrapper[4799]: I1010 18:15:00.136820 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/98cdb179-8d6d-47f5-8bfa-c0d77def55df-config\") pod \"prometheus-metric-storage-0\" (UID: \"98cdb179-8d6d-47f5-8bfa-c0d77def55df\") " pod="openstack/prometheus-metric-storage-0" Oct 10 18:15:00 crc kubenswrapper[4799]: I1010 18:15:00.136842 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-snk8p\" (UniqueName: \"kubernetes.io/projected/98cdb179-8d6d-47f5-8bfa-c0d77def55df-kube-api-access-snk8p\") pod \"prometheus-metric-storage-0\" (UID: \"98cdb179-8d6d-47f5-8bfa-c0d77def55df\") " pod="openstack/prometheus-metric-storage-0" Oct 10 18:15:00 crc kubenswrapper[4799]: I1010 18:15:00.136868 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/98cdb179-8d6d-47f5-8bfa-c0d77def55df-thanos-prometheus-http-client-file\") pod \"prometheus-metric-storage-0\" (UID: \"98cdb179-8d6d-47f5-8bfa-c0d77def55df\") " pod="openstack/prometheus-metric-storage-0" Oct 10 18:15:00 crc kubenswrapper[4799]: I1010 18:15:00.146722 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/98cdb179-8d6d-47f5-8bfa-c0d77def55df-config-out\") pod \"prometheus-metric-storage-0\" (UID: \"98cdb179-8d6d-47f5-8bfa-c0d77def55df\") " pod="openstack/prometheus-metric-storage-0" Oct 10 18:15:00 crc kubenswrapper[4799]: I1010 18:15:00.149626 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/98cdb179-8d6d-47f5-8bfa-c0d77def55df-prometheus-metric-storage-rulefiles-0\") pod \"prometheus-metric-storage-0\" (UID: \"98cdb179-8d6d-47f5-8bfa-c0d77def55df\") " pod="openstack/prometheus-metric-storage-0" Oct 10 18:15:00 crc kubenswrapper[4799]: I1010 18:15:00.170423 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/98cdb179-8d6d-47f5-8bfa-c0d77def55df-config\") pod \"prometheus-metric-storage-0\" (UID: \"98cdb179-8d6d-47f5-8bfa-c0d77def55df\") " pod="openstack/prometheus-metric-storage-0" Oct 10 18:15:00 crc kubenswrapper[4799]: I1010 18:15:00.171300 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/98cdb179-8d6d-47f5-8bfa-c0d77def55df-tls-assets\") pod \"prometheus-metric-storage-0\" (UID: \"98cdb179-8d6d-47f5-8bfa-c0d77def55df\") " pod="openstack/prometheus-metric-storage-0" Oct 10 18:15:00 crc kubenswrapper[4799]: I1010 18:15:00.173256 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/98cdb179-8d6d-47f5-8bfa-c0d77def55df-thanos-prometheus-http-client-file\") pod \"prometheus-metric-storage-0\" (UID: \"98cdb179-8d6d-47f5-8bfa-c0d77def55df\") " pod="openstack/prometheus-metric-storage-0" Oct 10 18:15:00 crc kubenswrapper[4799]: I1010 18:15:00.173853 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/98cdb179-8d6d-47f5-8bfa-c0d77def55df-web-config\") pod \"prometheus-metric-storage-0\" (UID: \"98cdb179-8d6d-47f5-8bfa-c0d77def55df\") " pod="openstack/prometheus-metric-storage-0" Oct 10 18:15:00 crc kubenswrapper[4799]: I1010 18:15:00.176389 4799 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Oct 10 18:15:00 crc kubenswrapper[4799]: I1010 18:15:00.176489 4799 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-37f9a422-5435-4ba4-ac81-2b5eae2123a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-37f9a422-5435-4ba4-ac81-2b5eae2123a8\") pod \"prometheus-metric-storage-0\" (UID: \"98cdb179-8d6d-47f5-8bfa-c0d77def55df\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/39d51ebd196c1febf3f5caa935f7308d76a2a49c6408070306bd2410732db00c/globalmount\"" pod="openstack/prometheus-metric-storage-0" Oct 10 18:15:00 crc kubenswrapper[4799]: I1010 18:15:00.206338 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29335335-9xp6n"] Oct 10 18:15:00 crc kubenswrapper[4799]: I1010 18:15:00.208550 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29335335-9xp6n" Oct 10 18:15:00 crc kubenswrapper[4799]: I1010 18:15:00.210215 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-snk8p\" (UniqueName: \"kubernetes.io/projected/98cdb179-8d6d-47f5-8bfa-c0d77def55df-kube-api-access-snk8p\") pod \"prometheus-metric-storage-0\" (UID: \"98cdb179-8d6d-47f5-8bfa-c0d77def55df\") " pod="openstack/prometheus-metric-storage-0" Oct 10 18:15:00 crc kubenswrapper[4799]: I1010 18:15:00.213580 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Oct 10 18:15:00 crc kubenswrapper[4799]: I1010 18:15:00.216581 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Oct 10 18:15:00 crc kubenswrapper[4799]: I1010 18:15:00.266212 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29335335-9xp6n"] Oct 10 18:15:00 crc kubenswrapper[4799]: I1010 18:15:00.342648 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/a0d00f09-0bc5-40df-bdc8-3b973e57c60b-secret-volume\") pod \"collect-profiles-29335335-9xp6n\" (UID: \"a0d00f09-0bc5-40df-bdc8-3b973e57c60b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29335335-9xp6n" Oct 10 18:15:00 crc kubenswrapper[4799]: I1010 18:15:00.343183 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cvvxj\" (UniqueName: \"kubernetes.io/projected/a0d00f09-0bc5-40df-bdc8-3b973e57c60b-kube-api-access-cvvxj\") pod \"collect-profiles-29335335-9xp6n\" (UID: \"a0d00f09-0bc5-40df-bdc8-3b973e57c60b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29335335-9xp6n" Oct 10 18:15:00 crc kubenswrapper[4799]: I1010 18:15:00.343241 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/a0d00f09-0bc5-40df-bdc8-3b973e57c60b-config-volume\") pod \"collect-profiles-29335335-9xp6n\" (UID: \"a0d00f09-0bc5-40df-bdc8-3b973e57c60b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29335335-9xp6n" Oct 10 18:15:00 crc kubenswrapper[4799]: I1010 18:15:00.352446 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-37f9a422-5435-4ba4-ac81-2b5eae2123a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-37f9a422-5435-4ba4-ac81-2b5eae2123a8\") pod \"prometheus-metric-storage-0\" (UID: \"98cdb179-8d6d-47f5-8bfa-c0d77def55df\") " pod="openstack/prometheus-metric-storage-0" Oct 10 18:15:00 crc kubenswrapper[4799]: I1010 18:15:00.356457 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/alertmanager-metric-storage-0"] Oct 10 18:15:00 crc kubenswrapper[4799]: I1010 18:15:00.451196 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/a0d00f09-0bc5-40df-bdc8-3b973e57c60b-config-volume\") pod \"collect-profiles-29335335-9xp6n\" (UID: \"a0d00f09-0bc5-40df-bdc8-3b973e57c60b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29335335-9xp6n" Oct 10 18:15:00 crc kubenswrapper[4799]: I1010 18:15:00.451260 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/a0d00f09-0bc5-40df-bdc8-3b973e57c60b-secret-volume\") pod \"collect-profiles-29335335-9xp6n\" (UID: \"a0d00f09-0bc5-40df-bdc8-3b973e57c60b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29335335-9xp6n" Oct 10 18:15:00 crc kubenswrapper[4799]: I1010 18:15:00.451380 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cvvxj\" (UniqueName: \"kubernetes.io/projected/a0d00f09-0bc5-40df-bdc8-3b973e57c60b-kube-api-access-cvvxj\") pod \"collect-profiles-29335335-9xp6n\" (UID: \"a0d00f09-0bc5-40df-bdc8-3b973e57c60b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29335335-9xp6n" Oct 10 18:15:00 crc kubenswrapper[4799]: I1010 18:15:00.471307 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/a0d00f09-0bc5-40df-bdc8-3b973e57c60b-secret-volume\") pod \"collect-profiles-29335335-9xp6n\" (UID: \"a0d00f09-0bc5-40df-bdc8-3b973e57c60b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29335335-9xp6n" Oct 10 18:15:00 crc kubenswrapper[4799]: I1010 18:15:00.472094 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cvvxj\" (UniqueName: \"kubernetes.io/projected/a0d00f09-0bc5-40df-bdc8-3b973e57c60b-kube-api-access-cvvxj\") pod \"collect-profiles-29335335-9xp6n\" (UID: \"a0d00f09-0bc5-40df-bdc8-3b973e57c60b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29335335-9xp6n" Oct 10 18:15:00 crc kubenswrapper[4799]: I1010 18:15:00.476240 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/a0d00f09-0bc5-40df-bdc8-3b973e57c60b-config-volume\") pod \"collect-profiles-29335335-9xp6n\" (UID: \"a0d00f09-0bc5-40df-bdc8-3b973e57c60b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29335335-9xp6n" Oct 10 18:15:00 crc kubenswrapper[4799]: I1010 18:15:00.521226 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/prometheus-metric-storage-0" Oct 10 18:15:00 crc kubenswrapper[4799]: I1010 18:15:00.650911 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"171aaf88-50cd-47d9-a43f-1df1d7ed96f8","Type":"ContainerStarted","Data":"89b6c5c9812ea8b63daf9b68de391bc05a83d4c4583ce761f1c8cd304407f6f5"} Oct 10 18:15:00 crc kubenswrapper[4799]: I1010 18:15:00.653300 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"b0e07cac-b1ab-436c-95ac-8c150d84e709","Type":"ContainerStarted","Data":"0d485df20ca7c4cbbcf74d422b59a256ef4ce6d778642ec7e662a9329bb6185f"} Oct 10 18:15:00 crc kubenswrapper[4799]: I1010 18:15:00.657225 4799 generic.go:334] "Generic (PLEG): container finished" podID="00daf52d-7faf-4cc7-80e2-7b9db14d2196" containerID="1799817d4e72e1ae52fb951ccb4b28f64b395902e023fab586dee0d6ebbe54ce" exitCode=137 Oct 10 18:15:00 crc kubenswrapper[4799]: I1010 18:15:00.662098 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/alertmanager-metric-storage-0" event={"ID":"3885dc5d-b551-4371-b731-0ffda7c18126","Type":"ContainerStarted","Data":"6266b95c9d20dec36bb60bfed504a56c10f40eb0454d2290bf438e5bf5dcdae3"} Oct 10 18:15:00 crc kubenswrapper[4799]: I1010 18:15:00.690013 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstackclient" podStartSLOduration=2.689989855 podStartE2EDuration="2.689989855s" podCreationTimestamp="2025-10-10 18:14:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-10 18:15:00.67547023 +0000 UTC m=+6194.183794345" watchObservedRunningTime="2025-10-10 18:15:00.689989855 +0000 UTC m=+6194.198313970" Oct 10 18:15:00 crc kubenswrapper[4799]: I1010 18:15:00.705732 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29335335-9xp6n" Oct 10 18:15:00 crc kubenswrapper[4799]: I1010 18:15:00.941830 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Oct 10 18:15:01 crc kubenswrapper[4799]: I1010 18:15:01.071915 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/00daf52d-7faf-4cc7-80e2-7b9db14d2196-openstack-config\") pod \"00daf52d-7faf-4cc7-80e2-7b9db14d2196\" (UID: \"00daf52d-7faf-4cc7-80e2-7b9db14d2196\") " Oct 10 18:15:01 crc kubenswrapper[4799]: I1010 18:15:01.072063 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/00daf52d-7faf-4cc7-80e2-7b9db14d2196-openstack-config-secret\") pod \"00daf52d-7faf-4cc7-80e2-7b9db14d2196\" (UID: \"00daf52d-7faf-4cc7-80e2-7b9db14d2196\") " Oct 10 18:15:01 crc kubenswrapper[4799]: I1010 18:15:01.072111 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7sgb9\" (UniqueName: \"kubernetes.io/projected/00daf52d-7faf-4cc7-80e2-7b9db14d2196-kube-api-access-7sgb9\") pod \"00daf52d-7faf-4cc7-80e2-7b9db14d2196\" (UID: \"00daf52d-7faf-4cc7-80e2-7b9db14d2196\") " Oct 10 18:15:01 crc kubenswrapper[4799]: I1010 18:15:01.097864 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/00daf52d-7faf-4cc7-80e2-7b9db14d2196-kube-api-access-7sgb9" (OuterVolumeSpecName: "kube-api-access-7sgb9") pod "00daf52d-7faf-4cc7-80e2-7b9db14d2196" (UID: "00daf52d-7faf-4cc7-80e2-7b9db14d2196"). InnerVolumeSpecName "kube-api-access-7sgb9". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 18:15:01 crc kubenswrapper[4799]: I1010 18:15:01.164572 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/00daf52d-7faf-4cc7-80e2-7b9db14d2196-openstack-config" (OuterVolumeSpecName: "openstack-config") pod "00daf52d-7faf-4cc7-80e2-7b9db14d2196" (UID: "00daf52d-7faf-4cc7-80e2-7b9db14d2196"). InnerVolumeSpecName "openstack-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 18:15:01 crc kubenswrapper[4799]: I1010 18:15:01.175846 4799 reconciler_common.go:293] "Volume detached for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/00daf52d-7faf-4cc7-80e2-7b9db14d2196-openstack-config\") on node \"crc\" DevicePath \"\"" Oct 10 18:15:01 crc kubenswrapper[4799]: I1010 18:15:01.175875 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7sgb9\" (UniqueName: \"kubernetes.io/projected/00daf52d-7faf-4cc7-80e2-7b9db14d2196-kube-api-access-7sgb9\") on node \"crc\" DevicePath \"\"" Oct 10 18:15:01 crc kubenswrapper[4799]: I1010 18:15:01.183692 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/00daf52d-7faf-4cc7-80e2-7b9db14d2196-openstack-config-secret" (OuterVolumeSpecName: "openstack-config-secret") pod "00daf52d-7faf-4cc7-80e2-7b9db14d2196" (UID: "00daf52d-7faf-4cc7-80e2-7b9db14d2196"). InnerVolumeSpecName "openstack-config-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 18:15:01 crc kubenswrapper[4799]: I1010 18:15:01.278086 4799 reconciler_common.go:293] "Volume detached for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/00daf52d-7faf-4cc7-80e2-7b9db14d2196-openstack-config-secret\") on node \"crc\" DevicePath \"\"" Oct 10 18:15:01 crc kubenswrapper[4799]: I1010 18:15:01.414612 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="00daf52d-7faf-4cc7-80e2-7b9db14d2196" path="/var/lib/kubelet/pods/00daf52d-7faf-4cc7-80e2-7b9db14d2196/volumes" Oct 10 18:15:01 crc kubenswrapper[4799]: I1010 18:15:01.418861 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/prometheus-metric-storage-0"] Oct 10 18:15:01 crc kubenswrapper[4799]: I1010 18:15:01.585735 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29335335-9xp6n"] Oct 10 18:15:01 crc kubenswrapper[4799]: W1010 18:15:01.635274 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda0d00f09_0bc5_40df_bdc8_3b973e57c60b.slice/crio-bf5a0f9cfd5fa0a644d6fc70fc2fefb9604bc8c804928117f62c5ebb777ea455 WatchSource:0}: Error finding container bf5a0f9cfd5fa0a644d6fc70fc2fefb9604bc8c804928117f62c5ebb777ea455: Status 404 returned error can't find the container with id bf5a0f9cfd5fa0a644d6fc70fc2fefb9604bc8c804928117f62c5ebb777ea455 Oct 10 18:15:01 crc kubenswrapper[4799]: I1010 18:15:01.674278 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"171aaf88-50cd-47d9-a43f-1df1d7ed96f8","Type":"ContainerStarted","Data":"748d183913693ad8daec42f6a83595f90bdbfbc32ab642e87bbc382ee03b7f20"} Oct 10 18:15:01 crc kubenswrapper[4799]: I1010 18:15:01.675100 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/kube-state-metrics-0" Oct 10 18:15:01 crc kubenswrapper[4799]: I1010 18:15:01.676379 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Oct 10 18:15:01 crc kubenswrapper[4799]: I1010 18:15:01.676389 4799 scope.go:117] "RemoveContainer" containerID="1799817d4e72e1ae52fb951ccb4b28f64b395902e023fab586dee0d6ebbe54ce" Oct 10 18:15:01 crc kubenswrapper[4799]: I1010 18:15:01.685004 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29335335-9xp6n" event={"ID":"a0d00f09-0bc5-40df-bdc8-3b973e57c60b","Type":"ContainerStarted","Data":"bf5a0f9cfd5fa0a644d6fc70fc2fefb9604bc8c804928117f62c5ebb777ea455"} Oct 10 18:15:01 crc kubenswrapper[4799]: I1010 18:15:01.691799 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/kube-state-metrics-0" podStartSLOduration=3.069407979 podStartE2EDuration="3.691780825s" podCreationTimestamp="2025-10-10 18:14:58 +0000 UTC" firstStartedPulling="2025-10-10 18:14:59.753026638 +0000 UTC m=+6193.261350753" lastFinishedPulling="2025-10-10 18:15:00.375399484 +0000 UTC m=+6193.883723599" observedRunningTime="2025-10-10 18:15:01.686947577 +0000 UTC m=+6195.195271692" watchObservedRunningTime="2025-10-10 18:15:01.691780825 +0000 UTC m=+6195.200104930" Oct 10 18:15:01 crc kubenswrapper[4799]: I1010 18:15:01.699046 4799 generic.go:334] "Generic (PLEG): container finished" podID="8414f950-bcd6-4ed2-8f66-c2830d2acfaf" containerID="97e16790b1657ac6cf023c171d63d3bd485610c6edc1c37d6b27fb4101dd4c6c" exitCode=0 Oct 10 18:15:01 crc kubenswrapper[4799]: I1010 18:15:01.699131 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-9nwgm" event={"ID":"8414f950-bcd6-4ed2-8f66-c2830d2acfaf","Type":"ContainerDied","Data":"97e16790b1657ac6cf023c171d63d3bd485610c6edc1c37d6b27fb4101dd4c6c"} Oct 10 18:15:01 crc kubenswrapper[4799]: I1010 18:15:01.703327 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"98cdb179-8d6d-47f5-8bfa-c0d77def55df","Type":"ContainerStarted","Data":"7a687c1d4cafd687ab0ce7f7d23761c95d6ae8eb5d46b16fd220194a547f40cc"} Oct 10 18:15:02 crc kubenswrapper[4799]: I1010 18:15:02.721520 4799 generic.go:334] "Generic (PLEG): container finished" podID="a0d00f09-0bc5-40df-bdc8-3b973e57c60b" containerID="a750c77142ebc474f2a876dd5a9c832bc8db4cdf377e1de81edde855501204c5" exitCode=0 Oct 10 18:15:02 crc kubenswrapper[4799]: I1010 18:15:02.721592 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29335335-9xp6n" event={"ID":"a0d00f09-0bc5-40df-bdc8-3b973e57c60b","Type":"ContainerDied","Data":"a750c77142ebc474f2a876dd5a9c832bc8db4cdf377e1de81edde855501204c5"} Oct 10 18:15:03 crc kubenswrapper[4799]: I1010 18:15:03.732542 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-9nwgm" event={"ID":"8414f950-bcd6-4ed2-8f66-c2830d2acfaf","Type":"ContainerStarted","Data":"2530dc23d6e5e4a58b9330579cc678d37e835b8c76969a8d48af402c6828ccbf"} Oct 10 18:15:03 crc kubenswrapper[4799]: I1010 18:15:03.764747 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-9nwgm" podStartSLOduration=3.300975297 podStartE2EDuration="6.764715749s" podCreationTimestamp="2025-10-10 18:14:57 +0000 UTC" firstStartedPulling="2025-10-10 18:14:59.631236075 +0000 UTC m=+6193.139560190" lastFinishedPulling="2025-10-10 18:15:03.094976517 +0000 UTC m=+6196.603300642" observedRunningTime="2025-10-10 18:15:03.751183268 +0000 UTC m=+6197.259507403" watchObservedRunningTime="2025-10-10 18:15:03.764715749 +0000 UTC m=+6197.273039914" Oct 10 18:15:04 crc kubenswrapper[4799]: I1010 18:15:04.210740 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29335335-9xp6n" Oct 10 18:15:04 crc kubenswrapper[4799]: I1010 18:15:04.360562 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/a0d00f09-0bc5-40df-bdc8-3b973e57c60b-secret-volume\") pod \"a0d00f09-0bc5-40df-bdc8-3b973e57c60b\" (UID: \"a0d00f09-0bc5-40df-bdc8-3b973e57c60b\") " Oct 10 18:15:04 crc kubenswrapper[4799]: I1010 18:15:04.360837 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/a0d00f09-0bc5-40df-bdc8-3b973e57c60b-config-volume\") pod \"a0d00f09-0bc5-40df-bdc8-3b973e57c60b\" (UID: \"a0d00f09-0bc5-40df-bdc8-3b973e57c60b\") " Oct 10 18:15:04 crc kubenswrapper[4799]: I1010 18:15:04.360996 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cvvxj\" (UniqueName: \"kubernetes.io/projected/a0d00f09-0bc5-40df-bdc8-3b973e57c60b-kube-api-access-cvvxj\") pod \"a0d00f09-0bc5-40df-bdc8-3b973e57c60b\" (UID: \"a0d00f09-0bc5-40df-bdc8-3b973e57c60b\") " Oct 10 18:15:04 crc kubenswrapper[4799]: I1010 18:15:04.361507 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a0d00f09-0bc5-40df-bdc8-3b973e57c60b-config-volume" (OuterVolumeSpecName: "config-volume") pod "a0d00f09-0bc5-40df-bdc8-3b973e57c60b" (UID: "a0d00f09-0bc5-40df-bdc8-3b973e57c60b"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 18:15:04 crc kubenswrapper[4799]: I1010 18:15:04.362164 4799 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/a0d00f09-0bc5-40df-bdc8-3b973e57c60b-config-volume\") on node \"crc\" DevicePath \"\"" Oct 10 18:15:04 crc kubenswrapper[4799]: I1010 18:15:04.365956 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a0d00f09-0bc5-40df-bdc8-3b973e57c60b-kube-api-access-cvvxj" (OuterVolumeSpecName: "kube-api-access-cvvxj") pod "a0d00f09-0bc5-40df-bdc8-3b973e57c60b" (UID: "a0d00f09-0bc5-40df-bdc8-3b973e57c60b"). InnerVolumeSpecName "kube-api-access-cvvxj". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 18:15:04 crc kubenswrapper[4799]: I1010 18:15:04.366596 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a0d00f09-0bc5-40df-bdc8-3b973e57c60b-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "a0d00f09-0bc5-40df-bdc8-3b973e57c60b" (UID: "a0d00f09-0bc5-40df-bdc8-3b973e57c60b"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 18:15:04 crc kubenswrapper[4799]: I1010 18:15:04.465122 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cvvxj\" (UniqueName: \"kubernetes.io/projected/a0d00f09-0bc5-40df-bdc8-3b973e57c60b-kube-api-access-cvvxj\") on node \"crc\" DevicePath \"\"" Oct 10 18:15:04 crc kubenswrapper[4799]: I1010 18:15:04.465181 4799 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/a0d00f09-0bc5-40df-bdc8-3b973e57c60b-secret-volume\") on node \"crc\" DevicePath \"\"" Oct 10 18:15:04 crc kubenswrapper[4799]: I1010 18:15:04.745884 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29335335-9xp6n" Oct 10 18:15:04 crc kubenswrapper[4799]: I1010 18:15:04.745884 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29335335-9xp6n" event={"ID":"a0d00f09-0bc5-40df-bdc8-3b973e57c60b","Type":"ContainerDied","Data":"bf5a0f9cfd5fa0a644d6fc70fc2fefb9604bc8c804928117f62c5ebb777ea455"} Oct 10 18:15:04 crc kubenswrapper[4799]: I1010 18:15:04.745969 4799 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="bf5a0f9cfd5fa0a644d6fc70fc2fefb9604bc8c804928117f62c5ebb777ea455" Oct 10 18:15:05 crc kubenswrapper[4799]: I1010 18:15:05.283021 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29335290-gd5c6"] Oct 10 18:15:05 crc kubenswrapper[4799]: I1010 18:15:05.294443 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29335290-gd5c6"] Oct 10 18:15:05 crc kubenswrapper[4799]: I1010 18:15:05.418852 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ad8b20fe-7b38-4f83-ba94-986013d30ea5" path="/var/lib/kubelet/pods/ad8b20fe-7b38-4f83-ba94-986013d30ea5/volumes" Oct 10 18:15:07 crc kubenswrapper[4799]: I1010 18:15:07.763463 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-9nwgm" Oct 10 18:15:07 crc kubenswrapper[4799]: I1010 18:15:07.764128 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-9nwgm" Oct 10 18:15:07 crc kubenswrapper[4799]: I1010 18:15:07.776091 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"98cdb179-8d6d-47f5-8bfa-c0d77def55df","Type":"ContainerStarted","Data":"af577833d97f0f181c1fabe23463eba3917f1b4c3701e1f1e8572c8331b0dcb9"} Oct 10 18:15:07 crc kubenswrapper[4799]: I1010 18:15:07.778367 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/alertmanager-metric-storage-0" event={"ID":"3885dc5d-b551-4371-b731-0ffda7c18126","Type":"ContainerStarted","Data":"c94a092b934958c8c4a926bdd5595e48233c337f1a84893132c5eacd06b8d070"} Oct 10 18:15:07 crc kubenswrapper[4799]: I1010 18:15:07.835632 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-9nwgm" Oct 10 18:15:08 crc kubenswrapper[4799]: I1010 18:15:08.826478 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/kube-state-metrics-0" Oct 10 18:15:08 crc kubenswrapper[4799]: I1010 18:15:08.892259 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-9nwgm" Oct 10 18:15:09 crc kubenswrapper[4799]: I1010 18:15:09.051423 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-rfnmm"] Oct 10 18:15:09 crc kubenswrapper[4799]: I1010 18:15:09.056361 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-rfnmm"] Oct 10 18:15:09 crc kubenswrapper[4799]: I1010 18:15:09.419185 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a8a8adcb-59fe-4f76-a7dd-490195701120" path="/var/lib/kubelet/pods/a8a8adcb-59fe-4f76-a7dd-490195701120/volumes" Oct 10 18:15:09 crc kubenswrapper[4799]: I1010 18:15:09.593947 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-9nwgm"] Oct 10 18:15:09 crc kubenswrapper[4799]: I1010 18:15:09.947909 4799 scope.go:117] "RemoveContainer" containerID="9624dc8ea5d0347c35f7e7ab7c7f9a4e58284e339c93af675f78bf25d899ee36" Oct 10 18:15:10 crc kubenswrapper[4799]: I1010 18:15:10.007480 4799 scope.go:117] "RemoveContainer" containerID="afd029883384655ab6781db94c6a93fbe761608618564bb089d9cf86a9fc29d6" Oct 10 18:15:10 crc kubenswrapper[4799]: I1010 18:15:10.064052 4799 scope.go:117] "RemoveContainer" containerID="41bb182737dc3bfee9c2f3d6425c1446b6ea9bc2bbbf9c6972496f7df633f981" Oct 10 18:15:10 crc kubenswrapper[4799]: I1010 18:15:10.142677 4799 scope.go:117] "RemoveContainer" containerID="b8a9d483839a0f7141408ac98e54d3805bb710ab4ece61f1a7c874fc0cdbca2d" Oct 10 18:15:10 crc kubenswrapper[4799]: I1010 18:15:10.177822 4799 scope.go:117] "RemoveContainer" containerID="f64befd8f0cf9c5f121099deea53861218bc7a1b177edc4df84e51dc3f4c537e" Oct 10 18:15:10 crc kubenswrapper[4799]: I1010 18:15:10.236136 4799 scope.go:117] "RemoveContainer" containerID="27083c69e73d75ec1d54f851c219aa39b224642c1cf83a4961cfa4fad3007bc7" Oct 10 18:15:10 crc kubenswrapper[4799]: I1010 18:15:10.281538 4799 scope.go:117] "RemoveContainer" containerID="64ebecf8d5b5ba4ffdd96eaf54fda535d9fa414f4f239dd9bbfde375c51c0dc5" Oct 10 18:15:10 crc kubenswrapper[4799]: I1010 18:15:10.311587 4799 scope.go:117] "RemoveContainer" containerID="9f5e33abf26de7058cb51879143496bd6fdee40346a1e25c8f031d6ec4f33ea6" Oct 10 18:15:10 crc kubenswrapper[4799]: I1010 18:15:10.338766 4799 scope.go:117] "RemoveContainer" containerID="628a3fb55758d762250afd6974f8b3b5570c41eedb259c89f7f3f7ddc394d78b" Oct 10 18:15:10 crc kubenswrapper[4799]: I1010 18:15:10.813182 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-9nwgm" podUID="8414f950-bcd6-4ed2-8f66-c2830d2acfaf" containerName="registry-server" containerID="cri-o://2530dc23d6e5e4a58b9330579cc678d37e835b8c76969a8d48af402c6828ccbf" gracePeriod=2 Oct 10 18:15:11 crc kubenswrapper[4799]: I1010 18:15:11.029736 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-cell-mapping-qbxgb"] Oct 10 18:15:11 crc kubenswrapper[4799]: I1010 18:15:11.039251 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-cell-mapping-qbxgb"] Oct 10 18:15:11 crc kubenswrapper[4799]: I1010 18:15:11.420572 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8d1fbe12-3328-4be8-a6bf-8a89c61bea63" path="/var/lib/kubelet/pods/8d1fbe12-3328-4be8-a6bf-8a89c61bea63/volumes" Oct 10 18:15:11 crc kubenswrapper[4799]: I1010 18:15:11.449397 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-9nwgm" Oct 10 18:15:11 crc kubenswrapper[4799]: I1010 18:15:11.547432 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8414f950-bcd6-4ed2-8f66-c2830d2acfaf-utilities\") pod \"8414f950-bcd6-4ed2-8f66-c2830d2acfaf\" (UID: \"8414f950-bcd6-4ed2-8f66-c2830d2acfaf\") " Oct 10 18:15:11 crc kubenswrapper[4799]: I1010 18:15:11.547715 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lrfk6\" (UniqueName: \"kubernetes.io/projected/8414f950-bcd6-4ed2-8f66-c2830d2acfaf-kube-api-access-lrfk6\") pod \"8414f950-bcd6-4ed2-8f66-c2830d2acfaf\" (UID: \"8414f950-bcd6-4ed2-8f66-c2830d2acfaf\") " Oct 10 18:15:11 crc kubenswrapper[4799]: I1010 18:15:11.547777 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8414f950-bcd6-4ed2-8f66-c2830d2acfaf-catalog-content\") pod \"8414f950-bcd6-4ed2-8f66-c2830d2acfaf\" (UID: \"8414f950-bcd6-4ed2-8f66-c2830d2acfaf\") " Oct 10 18:15:11 crc kubenswrapper[4799]: I1010 18:15:11.548209 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8414f950-bcd6-4ed2-8f66-c2830d2acfaf-utilities" (OuterVolumeSpecName: "utilities") pod "8414f950-bcd6-4ed2-8f66-c2830d2acfaf" (UID: "8414f950-bcd6-4ed2-8f66-c2830d2acfaf"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 10 18:15:11 crc kubenswrapper[4799]: I1010 18:15:11.548401 4799 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8414f950-bcd6-4ed2-8f66-c2830d2acfaf-utilities\") on node \"crc\" DevicePath \"\"" Oct 10 18:15:11 crc kubenswrapper[4799]: I1010 18:15:11.566857 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8414f950-bcd6-4ed2-8f66-c2830d2acfaf-kube-api-access-lrfk6" (OuterVolumeSpecName: "kube-api-access-lrfk6") pod "8414f950-bcd6-4ed2-8f66-c2830d2acfaf" (UID: "8414f950-bcd6-4ed2-8f66-c2830d2acfaf"). InnerVolumeSpecName "kube-api-access-lrfk6". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 18:15:11 crc kubenswrapper[4799]: I1010 18:15:11.570584 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8414f950-bcd6-4ed2-8f66-c2830d2acfaf-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "8414f950-bcd6-4ed2-8f66-c2830d2acfaf" (UID: "8414f950-bcd6-4ed2-8f66-c2830d2acfaf"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 10 18:15:11 crc kubenswrapper[4799]: I1010 18:15:11.650221 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lrfk6\" (UniqueName: \"kubernetes.io/projected/8414f950-bcd6-4ed2-8f66-c2830d2acfaf-kube-api-access-lrfk6\") on node \"crc\" DevicePath \"\"" Oct 10 18:15:11 crc kubenswrapper[4799]: I1010 18:15:11.650251 4799 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8414f950-bcd6-4ed2-8f66-c2830d2acfaf-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 10 18:15:11 crc kubenswrapper[4799]: I1010 18:15:11.827884 4799 generic.go:334] "Generic (PLEG): container finished" podID="8414f950-bcd6-4ed2-8f66-c2830d2acfaf" containerID="2530dc23d6e5e4a58b9330579cc678d37e835b8c76969a8d48af402c6828ccbf" exitCode=0 Oct 10 18:15:11 crc kubenswrapper[4799]: I1010 18:15:11.827932 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-9nwgm" event={"ID":"8414f950-bcd6-4ed2-8f66-c2830d2acfaf","Type":"ContainerDied","Data":"2530dc23d6e5e4a58b9330579cc678d37e835b8c76969a8d48af402c6828ccbf"} Oct 10 18:15:11 crc kubenswrapper[4799]: I1010 18:15:11.827961 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-9nwgm" event={"ID":"8414f950-bcd6-4ed2-8f66-c2830d2acfaf","Type":"ContainerDied","Data":"d187d3c7801557593e592e7305b639a5b4753ba933c7513bd70e9ff43d92142e"} Oct 10 18:15:11 crc kubenswrapper[4799]: I1010 18:15:11.827978 4799 scope.go:117] "RemoveContainer" containerID="2530dc23d6e5e4a58b9330579cc678d37e835b8c76969a8d48af402c6828ccbf" Oct 10 18:15:11 crc kubenswrapper[4799]: I1010 18:15:11.828114 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-9nwgm" Oct 10 18:15:11 crc kubenswrapper[4799]: I1010 18:15:11.870265 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-9nwgm"] Oct 10 18:15:11 crc kubenswrapper[4799]: I1010 18:15:11.874149 4799 scope.go:117] "RemoveContainer" containerID="97e16790b1657ac6cf023c171d63d3bd485610c6edc1c37d6b27fb4101dd4c6c" Oct 10 18:15:11 crc kubenswrapper[4799]: I1010 18:15:11.897784 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-9nwgm"] Oct 10 18:15:11 crc kubenswrapper[4799]: I1010 18:15:11.923624 4799 scope.go:117] "RemoveContainer" containerID="5002cc8f91fdf81f75131cb53984a7679c1458bb43cd6e97e5ea4ac6728eb6cf" Oct 10 18:15:11 crc kubenswrapper[4799]: I1010 18:15:11.971090 4799 scope.go:117] "RemoveContainer" containerID="2530dc23d6e5e4a58b9330579cc678d37e835b8c76969a8d48af402c6828ccbf" Oct 10 18:15:11 crc kubenswrapper[4799]: E1010 18:15:11.971524 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2530dc23d6e5e4a58b9330579cc678d37e835b8c76969a8d48af402c6828ccbf\": container with ID starting with 2530dc23d6e5e4a58b9330579cc678d37e835b8c76969a8d48af402c6828ccbf not found: ID does not exist" containerID="2530dc23d6e5e4a58b9330579cc678d37e835b8c76969a8d48af402c6828ccbf" Oct 10 18:15:11 crc kubenswrapper[4799]: I1010 18:15:11.971577 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2530dc23d6e5e4a58b9330579cc678d37e835b8c76969a8d48af402c6828ccbf"} err="failed to get container status \"2530dc23d6e5e4a58b9330579cc678d37e835b8c76969a8d48af402c6828ccbf\": rpc error: code = NotFound desc = could not find container \"2530dc23d6e5e4a58b9330579cc678d37e835b8c76969a8d48af402c6828ccbf\": container with ID starting with 2530dc23d6e5e4a58b9330579cc678d37e835b8c76969a8d48af402c6828ccbf not found: ID does not exist" Oct 10 18:15:11 crc kubenswrapper[4799]: I1010 18:15:11.971612 4799 scope.go:117] "RemoveContainer" containerID="97e16790b1657ac6cf023c171d63d3bd485610c6edc1c37d6b27fb4101dd4c6c" Oct 10 18:15:11 crc kubenswrapper[4799]: E1010 18:15:11.972164 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"97e16790b1657ac6cf023c171d63d3bd485610c6edc1c37d6b27fb4101dd4c6c\": container with ID starting with 97e16790b1657ac6cf023c171d63d3bd485610c6edc1c37d6b27fb4101dd4c6c not found: ID does not exist" containerID="97e16790b1657ac6cf023c171d63d3bd485610c6edc1c37d6b27fb4101dd4c6c" Oct 10 18:15:11 crc kubenswrapper[4799]: I1010 18:15:11.972194 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"97e16790b1657ac6cf023c171d63d3bd485610c6edc1c37d6b27fb4101dd4c6c"} err="failed to get container status \"97e16790b1657ac6cf023c171d63d3bd485610c6edc1c37d6b27fb4101dd4c6c\": rpc error: code = NotFound desc = could not find container \"97e16790b1657ac6cf023c171d63d3bd485610c6edc1c37d6b27fb4101dd4c6c\": container with ID starting with 97e16790b1657ac6cf023c171d63d3bd485610c6edc1c37d6b27fb4101dd4c6c not found: ID does not exist" Oct 10 18:15:11 crc kubenswrapper[4799]: I1010 18:15:11.972217 4799 scope.go:117] "RemoveContainer" containerID="5002cc8f91fdf81f75131cb53984a7679c1458bb43cd6e97e5ea4ac6728eb6cf" Oct 10 18:15:11 crc kubenswrapper[4799]: E1010 18:15:11.972521 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5002cc8f91fdf81f75131cb53984a7679c1458bb43cd6e97e5ea4ac6728eb6cf\": container with ID starting with 5002cc8f91fdf81f75131cb53984a7679c1458bb43cd6e97e5ea4ac6728eb6cf not found: ID does not exist" containerID="5002cc8f91fdf81f75131cb53984a7679c1458bb43cd6e97e5ea4ac6728eb6cf" Oct 10 18:15:11 crc kubenswrapper[4799]: I1010 18:15:11.972548 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5002cc8f91fdf81f75131cb53984a7679c1458bb43cd6e97e5ea4ac6728eb6cf"} err="failed to get container status \"5002cc8f91fdf81f75131cb53984a7679c1458bb43cd6e97e5ea4ac6728eb6cf\": rpc error: code = NotFound desc = could not find container \"5002cc8f91fdf81f75131cb53984a7679c1458bb43cd6e97e5ea4ac6728eb6cf\": container with ID starting with 5002cc8f91fdf81f75131cb53984a7679c1458bb43cd6e97e5ea4ac6728eb6cf not found: ID does not exist" Oct 10 18:15:13 crc kubenswrapper[4799]: I1010 18:15:13.424953 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8414f950-bcd6-4ed2-8f66-c2830d2acfaf" path="/var/lib/kubelet/pods/8414f950-bcd6-4ed2-8f66-c2830d2acfaf/volumes" Oct 10 18:15:14 crc kubenswrapper[4799]: I1010 18:15:14.870157 4799 generic.go:334] "Generic (PLEG): container finished" podID="3885dc5d-b551-4371-b731-0ffda7c18126" containerID="c94a092b934958c8c4a926bdd5595e48233c337f1a84893132c5eacd06b8d070" exitCode=0 Oct 10 18:15:14 crc kubenswrapper[4799]: I1010 18:15:14.870281 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/alertmanager-metric-storage-0" event={"ID":"3885dc5d-b551-4371-b731-0ffda7c18126","Type":"ContainerDied","Data":"c94a092b934958c8c4a926bdd5595e48233c337f1a84893132c5eacd06b8d070"} Oct 10 18:15:17 crc kubenswrapper[4799]: I1010 18:15:17.912325 4799 generic.go:334] "Generic (PLEG): container finished" podID="98cdb179-8d6d-47f5-8bfa-c0d77def55df" containerID="af577833d97f0f181c1fabe23463eba3917f1b4c3701e1f1e8572c8331b0dcb9" exitCode=0 Oct 10 18:15:17 crc kubenswrapper[4799]: I1010 18:15:17.912447 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"98cdb179-8d6d-47f5-8bfa-c0d77def55df","Type":"ContainerDied","Data":"af577833d97f0f181c1fabe23463eba3917f1b4c3701e1f1e8572c8331b0dcb9"} Oct 10 18:15:17 crc kubenswrapper[4799]: I1010 18:15:17.916903 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/alertmanager-metric-storage-0" event={"ID":"3885dc5d-b551-4371-b731-0ffda7c18126","Type":"ContainerStarted","Data":"e89e74dfb1b18a646792d9ad238767d8c683fadbb07a98b95d80d19d45c8bc1a"} Oct 10 18:15:22 crc kubenswrapper[4799]: I1010 18:15:22.988004 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/alertmanager-metric-storage-0" event={"ID":"3885dc5d-b551-4371-b731-0ffda7c18126","Type":"ContainerStarted","Data":"1ce8a55450b1e0acd907b5eeda978fc27e5ae27d8dc8c0063846016d0ef05d13"} Oct 10 18:15:22 crc kubenswrapper[4799]: I1010 18:15:22.988667 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/alertmanager-metric-storage-0" Oct 10 18:15:22 crc kubenswrapper[4799]: I1010 18:15:22.992820 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/alertmanager-metric-storage-0" Oct 10 18:15:23 crc kubenswrapper[4799]: I1010 18:15:23.018976 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/alertmanager-metric-storage-0" podStartSLOduration=7.035029839 podStartE2EDuration="24.018946046s" podCreationTimestamp="2025-10-10 18:14:59 +0000 UTC" firstStartedPulling="2025-10-10 18:15:00.491932559 +0000 UTC m=+6194.000256674" lastFinishedPulling="2025-10-10 18:15:17.475848766 +0000 UTC m=+6210.984172881" observedRunningTime="2025-10-10 18:15:23.007941088 +0000 UTC m=+6216.516265273" watchObservedRunningTime="2025-10-10 18:15:23.018946046 +0000 UTC m=+6216.527270191" Oct 10 18:15:25 crc kubenswrapper[4799]: I1010 18:15:25.020700 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"98cdb179-8d6d-47f5-8bfa-c0d77def55df","Type":"ContainerStarted","Data":"d2c910b13f7e9fe6c90d74e10ea206f1055886998584b2471793d65a51a38b5d"} Oct 10 18:15:28 crc kubenswrapper[4799]: I1010 18:15:28.053244 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-cell-mapping-hphzd"] Oct 10 18:15:28 crc kubenswrapper[4799]: I1010 18:15:28.071928 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-cell-mapping-hphzd"] Oct 10 18:15:29 crc kubenswrapper[4799]: I1010 18:15:29.428261 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="af7d9738-f698-4bfa-9294-daf47a91c9ab" path="/var/lib/kubelet/pods/af7d9738-f698-4bfa-9294-daf47a91c9ab/volumes" Oct 10 18:15:31 crc kubenswrapper[4799]: I1010 18:15:31.104277 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"98cdb179-8d6d-47f5-8bfa-c0d77def55df","Type":"ContainerStarted","Data":"2bdb29967c085771cc850664bf7c2325ae2af79dd0f33d04be4c15fea268432f"} Oct 10 18:15:34 crc kubenswrapper[4799]: I1010 18:15:34.139368 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"98cdb179-8d6d-47f5-8bfa-c0d77def55df","Type":"ContainerStarted","Data":"d73b6c9b4dc593af766660b04f6d6d4dc0467c5885bd3d77af517e5356827482"} Oct 10 18:15:34 crc kubenswrapper[4799]: I1010 18:15:34.185366 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/prometheus-metric-storage-0" podStartSLOduration=4.554699634 podStartE2EDuration="36.185336369s" podCreationTimestamp="2025-10-10 18:14:58 +0000 UTC" firstStartedPulling="2025-10-10 18:15:01.414266409 +0000 UTC m=+6194.922590524" lastFinishedPulling="2025-10-10 18:15:33.044903144 +0000 UTC m=+6226.553227259" observedRunningTime="2025-10-10 18:15:34.175989761 +0000 UTC m=+6227.684313946" watchObservedRunningTime="2025-10-10 18:15:34.185336369 +0000 UTC m=+6227.693660524" Oct 10 18:15:35 crc kubenswrapper[4799]: I1010 18:15:35.522979 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/prometheus-metric-storage-0" Oct 10 18:15:40 crc kubenswrapper[4799]: I1010 18:15:40.152633 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Oct 10 18:15:40 crc kubenswrapper[4799]: E1010 18:15:40.153893 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a0d00f09-0bc5-40df-bdc8-3b973e57c60b" containerName="collect-profiles" Oct 10 18:15:40 crc kubenswrapper[4799]: I1010 18:15:40.153909 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="a0d00f09-0bc5-40df-bdc8-3b973e57c60b" containerName="collect-profiles" Oct 10 18:15:40 crc kubenswrapper[4799]: E1010 18:15:40.153945 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8414f950-bcd6-4ed2-8f66-c2830d2acfaf" containerName="extract-utilities" Oct 10 18:15:40 crc kubenswrapper[4799]: I1010 18:15:40.153953 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="8414f950-bcd6-4ed2-8f66-c2830d2acfaf" containerName="extract-utilities" Oct 10 18:15:40 crc kubenswrapper[4799]: E1010 18:15:40.153968 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8414f950-bcd6-4ed2-8f66-c2830d2acfaf" containerName="registry-server" Oct 10 18:15:40 crc kubenswrapper[4799]: I1010 18:15:40.153976 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="8414f950-bcd6-4ed2-8f66-c2830d2acfaf" containerName="registry-server" Oct 10 18:15:40 crc kubenswrapper[4799]: E1010 18:15:40.154003 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8414f950-bcd6-4ed2-8f66-c2830d2acfaf" containerName="extract-content" Oct 10 18:15:40 crc kubenswrapper[4799]: I1010 18:15:40.154013 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="8414f950-bcd6-4ed2-8f66-c2830d2acfaf" containerName="extract-content" Oct 10 18:15:40 crc kubenswrapper[4799]: I1010 18:15:40.158380 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="8414f950-bcd6-4ed2-8f66-c2830d2acfaf" containerName="registry-server" Oct 10 18:15:40 crc kubenswrapper[4799]: I1010 18:15:40.158446 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="a0d00f09-0bc5-40df-bdc8-3b973e57c60b" containerName="collect-profiles" Oct 10 18:15:40 crc kubenswrapper[4799]: I1010 18:15:40.171780 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 10 18:15:40 crc kubenswrapper[4799]: I1010 18:15:40.176039 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Oct 10 18:15:40 crc kubenswrapper[4799]: I1010 18:15:40.176656 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Oct 10 18:15:40 crc kubenswrapper[4799]: I1010 18:15:40.204282 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/84184e7b-2222-4f92-9cdd-894da6f28546-run-httpd\") pod \"ceilometer-0\" (UID: \"84184e7b-2222-4f92-9cdd-894da6f28546\") " pod="openstack/ceilometer-0" Oct 10 18:15:40 crc kubenswrapper[4799]: I1010 18:15:40.204338 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m28dz\" (UniqueName: \"kubernetes.io/projected/84184e7b-2222-4f92-9cdd-894da6f28546-kube-api-access-m28dz\") pod \"ceilometer-0\" (UID: \"84184e7b-2222-4f92-9cdd-894da6f28546\") " pod="openstack/ceilometer-0" Oct 10 18:15:40 crc kubenswrapper[4799]: I1010 18:15:40.204413 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/84184e7b-2222-4f92-9cdd-894da6f28546-scripts\") pod \"ceilometer-0\" (UID: \"84184e7b-2222-4f92-9cdd-894da6f28546\") " pod="openstack/ceilometer-0" Oct 10 18:15:40 crc kubenswrapper[4799]: I1010 18:15:40.204466 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/84184e7b-2222-4f92-9cdd-894da6f28546-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"84184e7b-2222-4f92-9cdd-894da6f28546\") " pod="openstack/ceilometer-0" Oct 10 18:15:40 crc kubenswrapper[4799]: I1010 18:15:40.204500 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/84184e7b-2222-4f92-9cdd-894da6f28546-log-httpd\") pod \"ceilometer-0\" (UID: \"84184e7b-2222-4f92-9cdd-894da6f28546\") " pod="openstack/ceilometer-0" Oct 10 18:15:40 crc kubenswrapper[4799]: I1010 18:15:40.204552 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/84184e7b-2222-4f92-9cdd-894da6f28546-config-data\") pod \"ceilometer-0\" (UID: \"84184e7b-2222-4f92-9cdd-894da6f28546\") " pod="openstack/ceilometer-0" Oct 10 18:15:40 crc kubenswrapper[4799]: I1010 18:15:40.204635 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/84184e7b-2222-4f92-9cdd-894da6f28546-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"84184e7b-2222-4f92-9cdd-894da6f28546\") " pod="openstack/ceilometer-0" Oct 10 18:15:40 crc kubenswrapper[4799]: I1010 18:15:40.207820 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 10 18:15:40 crc kubenswrapper[4799]: I1010 18:15:40.307281 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/84184e7b-2222-4f92-9cdd-894da6f28546-log-httpd\") pod \"ceilometer-0\" (UID: \"84184e7b-2222-4f92-9cdd-894da6f28546\") " pod="openstack/ceilometer-0" Oct 10 18:15:40 crc kubenswrapper[4799]: I1010 18:15:40.307424 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/84184e7b-2222-4f92-9cdd-894da6f28546-config-data\") pod \"ceilometer-0\" (UID: \"84184e7b-2222-4f92-9cdd-894da6f28546\") " pod="openstack/ceilometer-0" Oct 10 18:15:40 crc kubenswrapper[4799]: I1010 18:15:40.307536 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/84184e7b-2222-4f92-9cdd-894da6f28546-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"84184e7b-2222-4f92-9cdd-894da6f28546\") " pod="openstack/ceilometer-0" Oct 10 18:15:40 crc kubenswrapper[4799]: I1010 18:15:40.307584 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/84184e7b-2222-4f92-9cdd-894da6f28546-run-httpd\") pod \"ceilometer-0\" (UID: \"84184e7b-2222-4f92-9cdd-894da6f28546\") " pod="openstack/ceilometer-0" Oct 10 18:15:40 crc kubenswrapper[4799]: I1010 18:15:40.307628 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m28dz\" (UniqueName: \"kubernetes.io/projected/84184e7b-2222-4f92-9cdd-894da6f28546-kube-api-access-m28dz\") pod \"ceilometer-0\" (UID: \"84184e7b-2222-4f92-9cdd-894da6f28546\") " pod="openstack/ceilometer-0" Oct 10 18:15:40 crc kubenswrapper[4799]: I1010 18:15:40.307715 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/84184e7b-2222-4f92-9cdd-894da6f28546-scripts\") pod \"ceilometer-0\" (UID: \"84184e7b-2222-4f92-9cdd-894da6f28546\") " pod="openstack/ceilometer-0" Oct 10 18:15:40 crc kubenswrapper[4799]: I1010 18:15:40.307770 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/84184e7b-2222-4f92-9cdd-894da6f28546-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"84184e7b-2222-4f92-9cdd-894da6f28546\") " pod="openstack/ceilometer-0" Oct 10 18:15:40 crc kubenswrapper[4799]: I1010 18:15:40.308129 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/84184e7b-2222-4f92-9cdd-894da6f28546-run-httpd\") pod \"ceilometer-0\" (UID: \"84184e7b-2222-4f92-9cdd-894da6f28546\") " pod="openstack/ceilometer-0" Oct 10 18:15:40 crc kubenswrapper[4799]: I1010 18:15:40.308787 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/84184e7b-2222-4f92-9cdd-894da6f28546-log-httpd\") pod \"ceilometer-0\" (UID: \"84184e7b-2222-4f92-9cdd-894da6f28546\") " pod="openstack/ceilometer-0" Oct 10 18:15:40 crc kubenswrapper[4799]: I1010 18:15:40.313281 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/84184e7b-2222-4f92-9cdd-894da6f28546-config-data\") pod \"ceilometer-0\" (UID: \"84184e7b-2222-4f92-9cdd-894da6f28546\") " pod="openstack/ceilometer-0" Oct 10 18:15:40 crc kubenswrapper[4799]: I1010 18:15:40.314185 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/84184e7b-2222-4f92-9cdd-894da6f28546-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"84184e7b-2222-4f92-9cdd-894da6f28546\") " pod="openstack/ceilometer-0" Oct 10 18:15:40 crc kubenswrapper[4799]: I1010 18:15:40.314475 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/84184e7b-2222-4f92-9cdd-894da6f28546-scripts\") pod \"ceilometer-0\" (UID: \"84184e7b-2222-4f92-9cdd-894da6f28546\") " pod="openstack/ceilometer-0" Oct 10 18:15:40 crc kubenswrapper[4799]: I1010 18:15:40.318220 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/84184e7b-2222-4f92-9cdd-894da6f28546-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"84184e7b-2222-4f92-9cdd-894da6f28546\") " pod="openstack/ceilometer-0" Oct 10 18:15:40 crc kubenswrapper[4799]: I1010 18:15:40.328902 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m28dz\" (UniqueName: \"kubernetes.io/projected/84184e7b-2222-4f92-9cdd-894da6f28546-kube-api-access-m28dz\") pod \"ceilometer-0\" (UID: \"84184e7b-2222-4f92-9cdd-894da6f28546\") " pod="openstack/ceilometer-0" Oct 10 18:15:40 crc kubenswrapper[4799]: I1010 18:15:40.501251 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 10 18:15:40 crc kubenswrapper[4799]: I1010 18:15:40.942238 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 10 18:15:41 crc kubenswrapper[4799]: I1010 18:15:41.241552 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"84184e7b-2222-4f92-9cdd-894da6f28546","Type":"ContainerStarted","Data":"3715331f46f994c050eaeb4836f389d5bcfb91945cfbc7838363d0a3c7d3de96"} Oct 10 18:15:42 crc kubenswrapper[4799]: I1010 18:15:42.251336 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"84184e7b-2222-4f92-9cdd-894da6f28546","Type":"ContainerStarted","Data":"20c94c17225e5f2b5cbd4f8270aea0afadc3357ded681feaac42cd4c21c5e99e"} Oct 10 18:15:43 crc kubenswrapper[4799]: I1010 18:15:43.264598 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"84184e7b-2222-4f92-9cdd-894da6f28546","Type":"ContainerStarted","Data":"adcd0d89227e727b66ba9c1292acdab7839cbf41eae08e79830eb25383c75771"} Oct 10 18:15:44 crc kubenswrapper[4799]: I1010 18:15:44.273559 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"84184e7b-2222-4f92-9cdd-894da6f28546","Type":"ContainerStarted","Data":"8da5dff04d2334e0bf51cc6d287f828b1a5c42d57ce989e3aabfa854b09ddea2"} Oct 10 18:15:45 crc kubenswrapper[4799]: I1010 18:15:45.286520 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"84184e7b-2222-4f92-9cdd-894da6f28546","Type":"ContainerStarted","Data":"6590690d51c4cee10c08e904dded7720fe192a9713241212d736e56870650444"} Oct 10 18:15:45 crc kubenswrapper[4799]: I1010 18:15:45.287484 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Oct 10 18:15:45 crc kubenswrapper[4799]: I1010 18:15:45.318398 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=1.307976668 podStartE2EDuration="5.318370537s" podCreationTimestamp="2025-10-10 18:15:40 +0000 UTC" firstStartedPulling="2025-10-10 18:15:40.950670564 +0000 UTC m=+6234.458994679" lastFinishedPulling="2025-10-10 18:15:44.961064423 +0000 UTC m=+6238.469388548" observedRunningTime="2025-10-10 18:15:45.30865996 +0000 UTC m=+6238.816984075" watchObservedRunningTime="2025-10-10 18:15:45.318370537 +0000 UTC m=+6238.826694672" Oct 10 18:15:45 crc kubenswrapper[4799]: I1010 18:15:45.522326 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/prometheus-metric-storage-0" Oct 10 18:15:45 crc kubenswrapper[4799]: I1010 18:15:45.525278 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/prometheus-metric-storage-0" Oct 10 18:15:46 crc kubenswrapper[4799]: I1010 18:15:46.298535 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/prometheus-metric-storage-0" Oct 10 18:15:51 crc kubenswrapper[4799]: I1010 18:15:51.349137 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/aodh-db-create-87qtg"] Oct 10 18:15:51 crc kubenswrapper[4799]: I1010 18:15:51.350997 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-db-create-87qtg" Oct 10 18:15:51 crc kubenswrapper[4799]: I1010 18:15:51.368020 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/aodh-db-create-87qtg"] Oct 10 18:15:51 crc kubenswrapper[4799]: I1010 18:15:51.485311 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8jbbr\" (UniqueName: \"kubernetes.io/projected/7fb0e2f5-1600-4bed-a4b4-8c9d6b1b4910-kube-api-access-8jbbr\") pod \"aodh-db-create-87qtg\" (UID: \"7fb0e2f5-1600-4bed-a4b4-8c9d6b1b4910\") " pod="openstack/aodh-db-create-87qtg" Oct 10 18:15:51 crc kubenswrapper[4799]: I1010 18:15:51.589322 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8jbbr\" (UniqueName: \"kubernetes.io/projected/7fb0e2f5-1600-4bed-a4b4-8c9d6b1b4910-kube-api-access-8jbbr\") pod \"aodh-db-create-87qtg\" (UID: \"7fb0e2f5-1600-4bed-a4b4-8c9d6b1b4910\") " pod="openstack/aodh-db-create-87qtg" Oct 10 18:15:51 crc kubenswrapper[4799]: I1010 18:15:51.615834 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8jbbr\" (UniqueName: \"kubernetes.io/projected/7fb0e2f5-1600-4bed-a4b4-8c9d6b1b4910-kube-api-access-8jbbr\") pod \"aodh-db-create-87qtg\" (UID: \"7fb0e2f5-1600-4bed-a4b4-8c9d6b1b4910\") " pod="openstack/aodh-db-create-87qtg" Oct 10 18:15:51 crc kubenswrapper[4799]: I1010 18:15:51.670576 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-db-create-87qtg" Oct 10 18:15:52 crc kubenswrapper[4799]: I1010 18:15:52.203467 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/aodh-db-create-87qtg"] Oct 10 18:15:52 crc kubenswrapper[4799]: I1010 18:15:52.370948 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-db-create-87qtg" event={"ID":"7fb0e2f5-1600-4bed-a4b4-8c9d6b1b4910","Type":"ContainerStarted","Data":"c4873da564d64fc886e0581004d90334504066465f2a7ff2f3d6ea2d5e378688"} Oct 10 18:15:53 crc kubenswrapper[4799]: I1010 18:15:53.381099 4799 generic.go:334] "Generic (PLEG): container finished" podID="7fb0e2f5-1600-4bed-a4b4-8c9d6b1b4910" containerID="43a67b59bdbbbe1b81a179f4bc34c4a74dd5f27f8dc7ec84f142f567fa4c23cc" exitCode=0 Oct 10 18:15:53 crc kubenswrapper[4799]: I1010 18:15:53.381178 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-db-create-87qtg" event={"ID":"7fb0e2f5-1600-4bed-a4b4-8c9d6b1b4910","Type":"ContainerDied","Data":"43a67b59bdbbbe1b81a179f4bc34c4a74dd5f27f8dc7ec84f142f567fa4c23cc"} Oct 10 18:15:54 crc kubenswrapper[4799]: I1010 18:15:54.814910 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-db-create-87qtg" Oct 10 18:15:54 crc kubenswrapper[4799]: I1010 18:15:54.871290 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8jbbr\" (UniqueName: \"kubernetes.io/projected/7fb0e2f5-1600-4bed-a4b4-8c9d6b1b4910-kube-api-access-8jbbr\") pod \"7fb0e2f5-1600-4bed-a4b4-8c9d6b1b4910\" (UID: \"7fb0e2f5-1600-4bed-a4b4-8c9d6b1b4910\") " Oct 10 18:15:54 crc kubenswrapper[4799]: I1010 18:15:54.879484 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7fb0e2f5-1600-4bed-a4b4-8c9d6b1b4910-kube-api-access-8jbbr" (OuterVolumeSpecName: "kube-api-access-8jbbr") pod "7fb0e2f5-1600-4bed-a4b4-8c9d6b1b4910" (UID: "7fb0e2f5-1600-4bed-a4b4-8c9d6b1b4910"). InnerVolumeSpecName "kube-api-access-8jbbr". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 18:15:54 crc kubenswrapper[4799]: I1010 18:15:54.974572 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8jbbr\" (UniqueName: \"kubernetes.io/projected/7fb0e2f5-1600-4bed-a4b4-8c9d6b1b4910-kube-api-access-8jbbr\") on node \"crc\" DevicePath \"\"" Oct 10 18:15:55 crc kubenswrapper[4799]: I1010 18:15:55.410369 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-db-create-87qtg" Oct 10 18:15:55 crc kubenswrapper[4799]: I1010 18:15:55.420705 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-db-create-87qtg" event={"ID":"7fb0e2f5-1600-4bed-a4b4-8c9d6b1b4910","Type":"ContainerDied","Data":"c4873da564d64fc886e0581004d90334504066465f2a7ff2f3d6ea2d5e378688"} Oct 10 18:15:55 crc kubenswrapper[4799]: I1010 18:15:55.420781 4799 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c4873da564d64fc886e0581004d90334504066465f2a7ff2f3d6ea2d5e378688" Oct 10 18:16:01 crc kubenswrapper[4799]: I1010 18:16:01.392901 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/aodh-038d-account-create-nz4st"] Oct 10 18:16:01 crc kubenswrapper[4799]: E1010 18:16:01.394402 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7fb0e2f5-1600-4bed-a4b4-8c9d6b1b4910" containerName="mariadb-database-create" Oct 10 18:16:01 crc kubenswrapper[4799]: I1010 18:16:01.394432 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="7fb0e2f5-1600-4bed-a4b4-8c9d6b1b4910" containerName="mariadb-database-create" Oct 10 18:16:01 crc kubenswrapper[4799]: I1010 18:16:01.394887 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="7fb0e2f5-1600-4bed-a4b4-8c9d6b1b4910" containerName="mariadb-database-create" Oct 10 18:16:01 crc kubenswrapper[4799]: I1010 18:16:01.396546 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-038d-account-create-nz4st" Oct 10 18:16:01 crc kubenswrapper[4799]: I1010 18:16:01.399240 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"aodh-db-secret" Oct 10 18:16:01 crc kubenswrapper[4799]: I1010 18:16:01.419998 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/aodh-038d-account-create-nz4st"] Oct 10 18:16:01 crc kubenswrapper[4799]: I1010 18:16:01.420418 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m8jvf\" (UniqueName: \"kubernetes.io/projected/87713929-7b88-4f21-ae38-68b78557c50b-kube-api-access-m8jvf\") pod \"aodh-038d-account-create-nz4st\" (UID: \"87713929-7b88-4f21-ae38-68b78557c50b\") " pod="openstack/aodh-038d-account-create-nz4st" Oct 10 18:16:01 crc kubenswrapper[4799]: I1010 18:16:01.523054 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m8jvf\" (UniqueName: \"kubernetes.io/projected/87713929-7b88-4f21-ae38-68b78557c50b-kube-api-access-m8jvf\") pod \"aodh-038d-account-create-nz4st\" (UID: \"87713929-7b88-4f21-ae38-68b78557c50b\") " pod="openstack/aodh-038d-account-create-nz4st" Oct 10 18:16:01 crc kubenswrapper[4799]: I1010 18:16:01.552627 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m8jvf\" (UniqueName: \"kubernetes.io/projected/87713929-7b88-4f21-ae38-68b78557c50b-kube-api-access-m8jvf\") pod \"aodh-038d-account-create-nz4st\" (UID: \"87713929-7b88-4f21-ae38-68b78557c50b\") " pod="openstack/aodh-038d-account-create-nz4st" Oct 10 18:16:01 crc kubenswrapper[4799]: I1010 18:16:01.741197 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-038d-account-create-nz4st" Oct 10 18:16:02 crc kubenswrapper[4799]: I1010 18:16:02.308817 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/aodh-038d-account-create-nz4st"] Oct 10 18:16:02 crc kubenswrapper[4799]: I1010 18:16:02.498187 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-038d-account-create-nz4st" event={"ID":"87713929-7b88-4f21-ae38-68b78557c50b","Type":"ContainerStarted","Data":"9e0a6bea68f798a5421a2218c58872a50c307da483aed25f81cf37077ec74c25"} Oct 10 18:16:03 crc kubenswrapper[4799]: I1010 18:16:03.519855 4799 generic.go:334] "Generic (PLEG): container finished" podID="87713929-7b88-4f21-ae38-68b78557c50b" containerID="6155b3f4a3400207569b724c93b6baece6e4c6973d23496d8d42284f6467ee84" exitCode=0 Oct 10 18:16:03 crc kubenswrapper[4799]: I1010 18:16:03.519986 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-038d-account-create-nz4st" event={"ID":"87713929-7b88-4f21-ae38-68b78557c50b","Type":"ContainerDied","Data":"6155b3f4a3400207569b724c93b6baece6e4c6973d23496d8d42284f6467ee84"} Oct 10 18:16:05 crc kubenswrapper[4799]: I1010 18:16:05.031590 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-038d-account-create-nz4st" Oct 10 18:16:05 crc kubenswrapper[4799]: I1010 18:16:05.202866 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-m8jvf\" (UniqueName: \"kubernetes.io/projected/87713929-7b88-4f21-ae38-68b78557c50b-kube-api-access-m8jvf\") pod \"87713929-7b88-4f21-ae38-68b78557c50b\" (UID: \"87713929-7b88-4f21-ae38-68b78557c50b\") " Oct 10 18:16:05 crc kubenswrapper[4799]: I1010 18:16:05.211977 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/87713929-7b88-4f21-ae38-68b78557c50b-kube-api-access-m8jvf" (OuterVolumeSpecName: "kube-api-access-m8jvf") pod "87713929-7b88-4f21-ae38-68b78557c50b" (UID: "87713929-7b88-4f21-ae38-68b78557c50b"). InnerVolumeSpecName "kube-api-access-m8jvf". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 18:16:05 crc kubenswrapper[4799]: I1010 18:16:05.306483 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-m8jvf\" (UniqueName: \"kubernetes.io/projected/87713929-7b88-4f21-ae38-68b78557c50b-kube-api-access-m8jvf\") on node \"crc\" DevicePath \"\"" Oct 10 18:16:05 crc kubenswrapper[4799]: I1010 18:16:05.540121 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-038d-account-create-nz4st" event={"ID":"87713929-7b88-4f21-ae38-68b78557c50b","Type":"ContainerDied","Data":"9e0a6bea68f798a5421a2218c58872a50c307da483aed25f81cf37077ec74c25"} Oct 10 18:16:05 crc kubenswrapper[4799]: I1010 18:16:05.540167 4799 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9e0a6bea68f798a5421a2218c58872a50c307da483aed25f81cf37077ec74c25" Oct 10 18:16:05 crc kubenswrapper[4799]: I1010 18:16:05.540231 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-038d-account-create-nz4st" Oct 10 18:16:06 crc kubenswrapper[4799]: I1010 18:16:06.835408 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/aodh-db-sync-rtv46"] Oct 10 18:16:06 crc kubenswrapper[4799]: E1010 18:16:06.836470 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="87713929-7b88-4f21-ae38-68b78557c50b" containerName="mariadb-account-create" Oct 10 18:16:06 crc kubenswrapper[4799]: I1010 18:16:06.836491 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="87713929-7b88-4f21-ae38-68b78557c50b" containerName="mariadb-account-create" Oct 10 18:16:06 crc kubenswrapper[4799]: I1010 18:16:06.836834 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="87713929-7b88-4f21-ae38-68b78557c50b" containerName="mariadb-account-create" Oct 10 18:16:06 crc kubenswrapper[4799]: I1010 18:16:06.837784 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-db-sync-rtv46" Oct 10 18:16:06 crc kubenswrapper[4799]: I1010 18:16:06.841352 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eb3620b8-4e83-4af0-ad67-16c6a8aaa0bb-combined-ca-bundle\") pod \"aodh-db-sync-rtv46\" (UID: \"eb3620b8-4e83-4af0-ad67-16c6a8aaa0bb\") " pod="openstack/aodh-db-sync-rtv46" Oct 10 18:16:06 crc kubenswrapper[4799]: I1010 18:16:06.841503 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lbjtf\" (UniqueName: \"kubernetes.io/projected/eb3620b8-4e83-4af0-ad67-16c6a8aaa0bb-kube-api-access-lbjtf\") pod \"aodh-db-sync-rtv46\" (UID: \"eb3620b8-4e83-4af0-ad67-16c6a8aaa0bb\") " pod="openstack/aodh-db-sync-rtv46" Oct 10 18:16:06 crc kubenswrapper[4799]: I1010 18:16:06.841570 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/eb3620b8-4e83-4af0-ad67-16c6a8aaa0bb-config-data\") pod \"aodh-db-sync-rtv46\" (UID: \"eb3620b8-4e83-4af0-ad67-16c6a8aaa0bb\") " pod="openstack/aodh-db-sync-rtv46" Oct 10 18:16:06 crc kubenswrapper[4799]: I1010 18:16:06.841712 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/eb3620b8-4e83-4af0-ad67-16c6a8aaa0bb-scripts\") pod \"aodh-db-sync-rtv46\" (UID: \"eb3620b8-4e83-4af0-ad67-16c6a8aaa0bb\") " pod="openstack/aodh-db-sync-rtv46" Oct 10 18:16:06 crc kubenswrapper[4799]: I1010 18:16:06.843041 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"aodh-scripts" Oct 10 18:16:06 crc kubenswrapper[4799]: I1010 18:16:06.843045 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"telemetry-autoscaling-dockercfg-2k5m5" Oct 10 18:16:06 crc kubenswrapper[4799]: I1010 18:16:06.843343 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"aodh-config-data" Oct 10 18:16:06 crc kubenswrapper[4799]: I1010 18:16:06.864339 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/aodh-db-sync-rtv46"] Oct 10 18:16:06 crc kubenswrapper[4799]: I1010 18:16:06.944394 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/eb3620b8-4e83-4af0-ad67-16c6a8aaa0bb-config-data\") pod \"aodh-db-sync-rtv46\" (UID: \"eb3620b8-4e83-4af0-ad67-16c6a8aaa0bb\") " pod="openstack/aodh-db-sync-rtv46" Oct 10 18:16:06 crc kubenswrapper[4799]: I1010 18:16:06.944549 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/eb3620b8-4e83-4af0-ad67-16c6a8aaa0bb-scripts\") pod \"aodh-db-sync-rtv46\" (UID: \"eb3620b8-4e83-4af0-ad67-16c6a8aaa0bb\") " pod="openstack/aodh-db-sync-rtv46" Oct 10 18:16:06 crc kubenswrapper[4799]: I1010 18:16:06.944687 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eb3620b8-4e83-4af0-ad67-16c6a8aaa0bb-combined-ca-bundle\") pod \"aodh-db-sync-rtv46\" (UID: \"eb3620b8-4e83-4af0-ad67-16c6a8aaa0bb\") " pod="openstack/aodh-db-sync-rtv46" Oct 10 18:16:06 crc kubenswrapper[4799]: I1010 18:16:06.944769 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lbjtf\" (UniqueName: \"kubernetes.io/projected/eb3620b8-4e83-4af0-ad67-16c6a8aaa0bb-kube-api-access-lbjtf\") pod \"aodh-db-sync-rtv46\" (UID: \"eb3620b8-4e83-4af0-ad67-16c6a8aaa0bb\") " pod="openstack/aodh-db-sync-rtv46" Oct 10 18:16:06 crc kubenswrapper[4799]: I1010 18:16:06.950645 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/eb3620b8-4e83-4af0-ad67-16c6a8aaa0bb-scripts\") pod \"aodh-db-sync-rtv46\" (UID: \"eb3620b8-4e83-4af0-ad67-16c6a8aaa0bb\") " pod="openstack/aodh-db-sync-rtv46" Oct 10 18:16:06 crc kubenswrapper[4799]: I1010 18:16:06.951113 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eb3620b8-4e83-4af0-ad67-16c6a8aaa0bb-combined-ca-bundle\") pod \"aodh-db-sync-rtv46\" (UID: \"eb3620b8-4e83-4af0-ad67-16c6a8aaa0bb\") " pod="openstack/aodh-db-sync-rtv46" Oct 10 18:16:06 crc kubenswrapper[4799]: I1010 18:16:06.963865 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/eb3620b8-4e83-4af0-ad67-16c6a8aaa0bb-config-data\") pod \"aodh-db-sync-rtv46\" (UID: \"eb3620b8-4e83-4af0-ad67-16c6a8aaa0bb\") " pod="openstack/aodh-db-sync-rtv46" Oct 10 18:16:06 crc kubenswrapper[4799]: I1010 18:16:06.968745 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lbjtf\" (UniqueName: \"kubernetes.io/projected/eb3620b8-4e83-4af0-ad67-16c6a8aaa0bb-kube-api-access-lbjtf\") pod \"aodh-db-sync-rtv46\" (UID: \"eb3620b8-4e83-4af0-ad67-16c6a8aaa0bb\") " pod="openstack/aodh-db-sync-rtv46" Oct 10 18:16:07 crc kubenswrapper[4799]: I1010 18:16:07.166145 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-db-sync-rtv46" Oct 10 18:16:07 crc kubenswrapper[4799]: I1010 18:16:07.688246 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/aodh-db-sync-rtv46"] Oct 10 18:16:08 crc kubenswrapper[4799]: I1010 18:16:08.577280 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-db-sync-rtv46" event={"ID":"eb3620b8-4e83-4af0-ad67-16c6a8aaa0bb","Type":"ContainerStarted","Data":"3ac2e5365459ce9cdef613e5380f3793c8bb58107f3bc695602dcc568f768f3f"} Oct 10 18:16:10 crc kubenswrapper[4799]: I1010 18:16:10.586209 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Oct 10 18:16:10 crc kubenswrapper[4799]: I1010 18:16:10.663414 4799 scope.go:117] "RemoveContainer" containerID="5d5087310f4cf1ba0ad902d87ab9cb2a26f05f9187f4bc3a3a67d019205c7b3d" Oct 10 18:16:13 crc kubenswrapper[4799]: I1010 18:16:13.304570 4799 scope.go:117] "RemoveContainer" containerID="bfd496f44ac8bb7a6b44b9240f3b4a21fdda6e8b9ff22c4519d59683a6630cc7" Oct 10 18:16:13 crc kubenswrapper[4799]: I1010 18:16:13.364647 4799 scope.go:117] "RemoveContainer" containerID="5673edc64fbbc65660e1306ab5b2c3bfbcf9730efd6d9ce4894975150994e908" Oct 10 18:16:13 crc kubenswrapper[4799]: I1010 18:16:13.486035 4799 scope.go:117] "RemoveContainer" containerID="fc16a722f190cc8007d428edd16bbb2a95388dcf8c5e37789f36b923514d7e78" Oct 10 18:16:13 crc kubenswrapper[4799]: I1010 18:16:13.642614 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-db-sync-rtv46" event={"ID":"eb3620b8-4e83-4af0-ad67-16c6a8aaa0bb","Type":"ContainerStarted","Data":"49cb8a7391883186389d41e61c313ad64ac184ce2a3b845fc4ab437ef9f38f7e"} Oct 10 18:16:13 crc kubenswrapper[4799]: I1010 18:16:13.667411 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/aodh-db-sync-rtv46" podStartSLOduration=1.9970641869999999 podStartE2EDuration="7.667384245s" podCreationTimestamp="2025-10-10 18:16:06 +0000 UTC" firstStartedPulling="2025-10-10 18:16:07.694308525 +0000 UTC m=+6261.202632640" lastFinishedPulling="2025-10-10 18:16:13.364628543 +0000 UTC m=+6266.872952698" observedRunningTime="2025-10-10 18:16:13.661956043 +0000 UTC m=+6267.170280178" watchObservedRunningTime="2025-10-10 18:16:13.667384245 +0000 UTC m=+6267.175708380" Oct 10 18:16:14 crc kubenswrapper[4799]: I1010 18:16:14.048693 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-db-create-nh82m"] Oct 10 18:16:14 crc kubenswrapper[4799]: I1010 18:16:14.060242 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-db-create-nh82m"] Oct 10 18:16:15 crc kubenswrapper[4799]: I1010 18:16:15.248492 4799 patch_prober.go:28] interesting pod/machine-config-daemon-rh8zc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 10 18:16:15 crc kubenswrapper[4799]: I1010 18:16:15.248931 4799 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 10 18:16:15 crc kubenswrapper[4799]: I1010 18:16:15.427024 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="89463cf0-a6a9-4a2a-834c-cdf8914a4410" path="/var/lib/kubelet/pods/89463cf0-a6a9-4a2a-834c-cdf8914a4410/volumes" Oct 10 18:16:15 crc kubenswrapper[4799]: I1010 18:16:15.669207 4799 generic.go:334] "Generic (PLEG): container finished" podID="eb3620b8-4e83-4af0-ad67-16c6a8aaa0bb" containerID="49cb8a7391883186389d41e61c313ad64ac184ce2a3b845fc4ab437ef9f38f7e" exitCode=0 Oct 10 18:16:15 crc kubenswrapper[4799]: I1010 18:16:15.669251 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-db-sync-rtv46" event={"ID":"eb3620b8-4e83-4af0-ad67-16c6a8aaa0bb","Type":"ContainerDied","Data":"49cb8a7391883186389d41e61c313ad64ac184ce2a3b845fc4ab437ef9f38f7e"} Oct 10 18:16:17 crc kubenswrapper[4799]: I1010 18:16:17.197326 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-db-sync-rtv46" Oct 10 18:16:17 crc kubenswrapper[4799]: I1010 18:16:17.287930 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eb3620b8-4e83-4af0-ad67-16c6a8aaa0bb-combined-ca-bundle\") pod \"eb3620b8-4e83-4af0-ad67-16c6a8aaa0bb\" (UID: \"eb3620b8-4e83-4af0-ad67-16c6a8aaa0bb\") " Oct 10 18:16:17 crc kubenswrapper[4799]: I1010 18:16:17.288125 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/eb3620b8-4e83-4af0-ad67-16c6a8aaa0bb-config-data\") pod \"eb3620b8-4e83-4af0-ad67-16c6a8aaa0bb\" (UID: \"eb3620b8-4e83-4af0-ad67-16c6a8aaa0bb\") " Oct 10 18:16:17 crc kubenswrapper[4799]: I1010 18:16:17.288182 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lbjtf\" (UniqueName: \"kubernetes.io/projected/eb3620b8-4e83-4af0-ad67-16c6a8aaa0bb-kube-api-access-lbjtf\") pod \"eb3620b8-4e83-4af0-ad67-16c6a8aaa0bb\" (UID: \"eb3620b8-4e83-4af0-ad67-16c6a8aaa0bb\") " Oct 10 18:16:17 crc kubenswrapper[4799]: I1010 18:16:17.288216 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/eb3620b8-4e83-4af0-ad67-16c6a8aaa0bb-scripts\") pod \"eb3620b8-4e83-4af0-ad67-16c6a8aaa0bb\" (UID: \"eb3620b8-4e83-4af0-ad67-16c6a8aaa0bb\") " Oct 10 18:16:17 crc kubenswrapper[4799]: I1010 18:16:17.295470 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/eb3620b8-4e83-4af0-ad67-16c6a8aaa0bb-kube-api-access-lbjtf" (OuterVolumeSpecName: "kube-api-access-lbjtf") pod "eb3620b8-4e83-4af0-ad67-16c6a8aaa0bb" (UID: "eb3620b8-4e83-4af0-ad67-16c6a8aaa0bb"). InnerVolumeSpecName "kube-api-access-lbjtf". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 18:16:17 crc kubenswrapper[4799]: I1010 18:16:17.297313 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/eb3620b8-4e83-4af0-ad67-16c6a8aaa0bb-scripts" (OuterVolumeSpecName: "scripts") pod "eb3620b8-4e83-4af0-ad67-16c6a8aaa0bb" (UID: "eb3620b8-4e83-4af0-ad67-16c6a8aaa0bb"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 18:16:17 crc kubenswrapper[4799]: I1010 18:16:17.331790 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/eb3620b8-4e83-4af0-ad67-16c6a8aaa0bb-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "eb3620b8-4e83-4af0-ad67-16c6a8aaa0bb" (UID: "eb3620b8-4e83-4af0-ad67-16c6a8aaa0bb"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 18:16:17 crc kubenswrapper[4799]: I1010 18:16:17.343114 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/eb3620b8-4e83-4af0-ad67-16c6a8aaa0bb-config-data" (OuterVolumeSpecName: "config-data") pod "eb3620b8-4e83-4af0-ad67-16c6a8aaa0bb" (UID: "eb3620b8-4e83-4af0-ad67-16c6a8aaa0bb"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 18:16:17 crc kubenswrapper[4799]: I1010 18:16:17.390527 4799 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/eb3620b8-4e83-4af0-ad67-16c6a8aaa0bb-config-data\") on node \"crc\" DevicePath \"\"" Oct 10 18:16:17 crc kubenswrapper[4799]: I1010 18:16:17.390590 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lbjtf\" (UniqueName: \"kubernetes.io/projected/eb3620b8-4e83-4af0-ad67-16c6a8aaa0bb-kube-api-access-lbjtf\") on node \"crc\" DevicePath \"\"" Oct 10 18:16:17 crc kubenswrapper[4799]: I1010 18:16:17.390613 4799 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/eb3620b8-4e83-4af0-ad67-16c6a8aaa0bb-scripts\") on node \"crc\" DevicePath \"\"" Oct 10 18:16:17 crc kubenswrapper[4799]: I1010 18:16:17.390634 4799 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eb3620b8-4e83-4af0-ad67-16c6a8aaa0bb-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 10 18:16:17 crc kubenswrapper[4799]: I1010 18:16:17.700389 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-db-sync-rtv46" event={"ID":"eb3620b8-4e83-4af0-ad67-16c6a8aaa0bb","Type":"ContainerDied","Data":"3ac2e5365459ce9cdef613e5380f3793c8bb58107f3bc695602dcc568f768f3f"} Oct 10 18:16:17 crc kubenswrapper[4799]: I1010 18:16:17.700455 4799 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3ac2e5365459ce9cdef613e5380f3793c8bb58107f3bc695602dcc568f768f3f" Oct 10 18:16:17 crc kubenswrapper[4799]: I1010 18:16:17.700498 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-db-sync-rtv46" Oct 10 18:16:21 crc kubenswrapper[4799]: I1010 18:16:21.972819 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/aodh-0"] Oct 10 18:16:21 crc kubenswrapper[4799]: E1010 18:16:21.977638 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eb3620b8-4e83-4af0-ad67-16c6a8aaa0bb" containerName="aodh-db-sync" Oct 10 18:16:21 crc kubenswrapper[4799]: I1010 18:16:21.978362 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="eb3620b8-4e83-4af0-ad67-16c6a8aaa0bb" containerName="aodh-db-sync" Oct 10 18:16:21 crc kubenswrapper[4799]: I1010 18:16:21.978662 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="eb3620b8-4e83-4af0-ad67-16c6a8aaa0bb" containerName="aodh-db-sync" Oct 10 18:16:21 crc kubenswrapper[4799]: I1010 18:16:21.987016 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-0" Oct 10 18:16:21 crc kubenswrapper[4799]: I1010 18:16:21.990145 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"aodh-config-data" Oct 10 18:16:21 crc kubenswrapper[4799]: I1010 18:16:21.990421 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"aodh-scripts" Oct 10 18:16:21 crc kubenswrapper[4799]: I1010 18:16:21.990570 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"telemetry-autoscaling-dockercfg-2k5m5" Oct 10 18:16:22 crc kubenswrapper[4799]: I1010 18:16:22.005867 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/aodh-0"] Oct 10 18:16:22 crc kubenswrapper[4799]: I1010 18:16:22.147155 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/91a68421-466f-4eaa-a89b-59fa972a0726-scripts\") pod \"aodh-0\" (UID: \"91a68421-466f-4eaa-a89b-59fa972a0726\") " pod="openstack/aodh-0" Oct 10 18:16:22 crc kubenswrapper[4799]: I1010 18:16:22.147217 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-knfmq\" (UniqueName: \"kubernetes.io/projected/91a68421-466f-4eaa-a89b-59fa972a0726-kube-api-access-knfmq\") pod \"aodh-0\" (UID: \"91a68421-466f-4eaa-a89b-59fa972a0726\") " pod="openstack/aodh-0" Oct 10 18:16:22 crc kubenswrapper[4799]: I1010 18:16:22.147718 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/91a68421-466f-4eaa-a89b-59fa972a0726-config-data\") pod \"aodh-0\" (UID: \"91a68421-466f-4eaa-a89b-59fa972a0726\") " pod="openstack/aodh-0" Oct 10 18:16:22 crc kubenswrapper[4799]: I1010 18:16:22.147783 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/91a68421-466f-4eaa-a89b-59fa972a0726-combined-ca-bundle\") pod \"aodh-0\" (UID: \"91a68421-466f-4eaa-a89b-59fa972a0726\") " pod="openstack/aodh-0" Oct 10 18:16:22 crc kubenswrapper[4799]: I1010 18:16:22.249216 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/91a68421-466f-4eaa-a89b-59fa972a0726-config-data\") pod \"aodh-0\" (UID: \"91a68421-466f-4eaa-a89b-59fa972a0726\") " pod="openstack/aodh-0" Oct 10 18:16:22 crc kubenswrapper[4799]: I1010 18:16:22.249262 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/91a68421-466f-4eaa-a89b-59fa972a0726-combined-ca-bundle\") pod \"aodh-0\" (UID: \"91a68421-466f-4eaa-a89b-59fa972a0726\") " pod="openstack/aodh-0" Oct 10 18:16:22 crc kubenswrapper[4799]: I1010 18:16:22.249354 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/91a68421-466f-4eaa-a89b-59fa972a0726-scripts\") pod \"aodh-0\" (UID: \"91a68421-466f-4eaa-a89b-59fa972a0726\") " pod="openstack/aodh-0" Oct 10 18:16:22 crc kubenswrapper[4799]: I1010 18:16:22.249390 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-knfmq\" (UniqueName: \"kubernetes.io/projected/91a68421-466f-4eaa-a89b-59fa972a0726-kube-api-access-knfmq\") pod \"aodh-0\" (UID: \"91a68421-466f-4eaa-a89b-59fa972a0726\") " pod="openstack/aodh-0" Oct 10 18:16:22 crc kubenswrapper[4799]: I1010 18:16:22.257668 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/91a68421-466f-4eaa-a89b-59fa972a0726-combined-ca-bundle\") pod \"aodh-0\" (UID: \"91a68421-466f-4eaa-a89b-59fa972a0726\") " pod="openstack/aodh-0" Oct 10 18:16:22 crc kubenswrapper[4799]: I1010 18:16:22.259207 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/91a68421-466f-4eaa-a89b-59fa972a0726-config-data\") pod \"aodh-0\" (UID: \"91a68421-466f-4eaa-a89b-59fa972a0726\") " pod="openstack/aodh-0" Oct 10 18:16:22 crc kubenswrapper[4799]: I1010 18:16:22.267356 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/91a68421-466f-4eaa-a89b-59fa972a0726-scripts\") pod \"aodh-0\" (UID: \"91a68421-466f-4eaa-a89b-59fa972a0726\") " pod="openstack/aodh-0" Oct 10 18:16:22 crc kubenswrapper[4799]: I1010 18:16:22.270494 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-knfmq\" (UniqueName: \"kubernetes.io/projected/91a68421-466f-4eaa-a89b-59fa972a0726-kube-api-access-knfmq\") pod \"aodh-0\" (UID: \"91a68421-466f-4eaa-a89b-59fa972a0726\") " pod="openstack/aodh-0" Oct 10 18:16:22 crc kubenswrapper[4799]: I1010 18:16:22.311905 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-0" Oct 10 18:16:22 crc kubenswrapper[4799]: I1010 18:16:22.897507 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/aodh-0"] Oct 10 18:16:23 crc kubenswrapper[4799]: I1010 18:16:23.775427 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-0" event={"ID":"91a68421-466f-4eaa-a89b-59fa972a0726","Type":"ContainerStarted","Data":"a1e163161a71e8827745f52e7ae60e7bffa96d6504249221ca63a67a6ad6fc4e"} Oct 10 18:16:23 crc kubenswrapper[4799]: I1010 18:16:23.776010 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-0" event={"ID":"91a68421-466f-4eaa-a89b-59fa972a0726","Type":"ContainerStarted","Data":"b5fd4b0868e62f5bdaccf7b307592e750d278b4c486dc26e6df65f6c1d6b1612"} Oct 10 18:16:24 crc kubenswrapper[4799]: I1010 18:16:24.035797 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-d79c-account-create-pktxw"] Oct 10 18:16:24 crc kubenswrapper[4799]: I1010 18:16:24.058051 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-d79c-account-create-pktxw"] Oct 10 18:16:24 crc kubenswrapper[4799]: I1010 18:16:24.310093 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 10 18:16:24 crc kubenswrapper[4799]: I1010 18:16:24.310449 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="84184e7b-2222-4f92-9cdd-894da6f28546" containerName="ceilometer-central-agent" containerID="cri-o://20c94c17225e5f2b5cbd4f8270aea0afadc3357ded681feaac42cd4c21c5e99e" gracePeriod=30 Oct 10 18:16:24 crc kubenswrapper[4799]: I1010 18:16:24.310697 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="84184e7b-2222-4f92-9cdd-894da6f28546" containerName="proxy-httpd" containerID="cri-o://6590690d51c4cee10c08e904dded7720fe192a9713241212d736e56870650444" gracePeriod=30 Oct 10 18:16:24 crc kubenswrapper[4799]: I1010 18:16:24.311018 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="84184e7b-2222-4f92-9cdd-894da6f28546" containerName="ceilometer-notification-agent" containerID="cri-o://adcd0d89227e727b66ba9c1292acdab7839cbf41eae08e79830eb25383c75771" gracePeriod=30 Oct 10 18:16:24 crc kubenswrapper[4799]: I1010 18:16:24.311176 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="84184e7b-2222-4f92-9cdd-894da6f28546" containerName="sg-core" containerID="cri-o://8da5dff04d2334e0bf51cc6d287f828b1a5c42d57ce989e3aabfa854b09ddea2" gracePeriod=30 Oct 10 18:16:24 crc kubenswrapper[4799]: E1010 18:16:24.472711 4799 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod84184e7b_2222_4f92_9cdd_894da6f28546.slice/crio-conmon-6590690d51c4cee10c08e904dded7720fe192a9713241212d736e56870650444.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod84184e7b_2222_4f92_9cdd_894da6f28546.slice/crio-conmon-8da5dff04d2334e0bf51cc6d287f828b1a5c42d57ce989e3aabfa854b09ddea2.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod84184e7b_2222_4f92_9cdd_894da6f28546.slice/crio-6590690d51c4cee10c08e904dded7720fe192a9713241212d736e56870650444.scope\": RecentStats: unable to find data in memory cache]" Oct 10 18:16:24 crc kubenswrapper[4799]: I1010 18:16:24.788437 4799 generic.go:334] "Generic (PLEG): container finished" podID="84184e7b-2222-4f92-9cdd-894da6f28546" containerID="6590690d51c4cee10c08e904dded7720fe192a9713241212d736e56870650444" exitCode=0 Oct 10 18:16:24 crc kubenswrapper[4799]: I1010 18:16:24.788477 4799 generic.go:334] "Generic (PLEG): container finished" podID="84184e7b-2222-4f92-9cdd-894da6f28546" containerID="8da5dff04d2334e0bf51cc6d287f828b1a5c42d57ce989e3aabfa854b09ddea2" exitCode=2 Oct 10 18:16:24 crc kubenswrapper[4799]: I1010 18:16:24.788487 4799 generic.go:334] "Generic (PLEG): container finished" podID="84184e7b-2222-4f92-9cdd-894da6f28546" containerID="20c94c17225e5f2b5cbd4f8270aea0afadc3357ded681feaac42cd4c21c5e99e" exitCode=0 Oct 10 18:16:24 crc kubenswrapper[4799]: I1010 18:16:24.788514 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"84184e7b-2222-4f92-9cdd-894da6f28546","Type":"ContainerDied","Data":"6590690d51c4cee10c08e904dded7720fe192a9713241212d736e56870650444"} Oct 10 18:16:24 crc kubenswrapper[4799]: I1010 18:16:24.788548 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"84184e7b-2222-4f92-9cdd-894da6f28546","Type":"ContainerDied","Data":"8da5dff04d2334e0bf51cc6d287f828b1a5c42d57ce989e3aabfa854b09ddea2"} Oct 10 18:16:24 crc kubenswrapper[4799]: I1010 18:16:24.788564 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"84184e7b-2222-4f92-9cdd-894da6f28546","Type":"ContainerDied","Data":"20c94c17225e5f2b5cbd4f8270aea0afadc3357ded681feaac42cd4c21c5e99e"} Oct 10 18:16:25 crc kubenswrapper[4799]: I1010 18:16:25.306718 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 10 18:16:25 crc kubenswrapper[4799]: I1010 18:16:25.413429 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d6f6bcef-c635-424f-a956-eaf79c6df15f" path="/var/lib/kubelet/pods/d6f6bcef-c635-424f-a956-eaf79c6df15f/volumes" Oct 10 18:16:25 crc kubenswrapper[4799]: I1010 18:16:25.432312 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/84184e7b-2222-4f92-9cdd-894da6f28546-combined-ca-bundle\") pod \"84184e7b-2222-4f92-9cdd-894da6f28546\" (UID: \"84184e7b-2222-4f92-9cdd-894da6f28546\") " Oct 10 18:16:25 crc kubenswrapper[4799]: I1010 18:16:25.432457 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/84184e7b-2222-4f92-9cdd-894da6f28546-run-httpd\") pod \"84184e7b-2222-4f92-9cdd-894da6f28546\" (UID: \"84184e7b-2222-4f92-9cdd-894da6f28546\") " Oct 10 18:16:25 crc kubenswrapper[4799]: I1010 18:16:25.432534 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/84184e7b-2222-4f92-9cdd-894da6f28546-config-data\") pod \"84184e7b-2222-4f92-9cdd-894da6f28546\" (UID: \"84184e7b-2222-4f92-9cdd-894da6f28546\") " Oct 10 18:16:25 crc kubenswrapper[4799]: I1010 18:16:25.432623 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/84184e7b-2222-4f92-9cdd-894da6f28546-log-httpd\") pod \"84184e7b-2222-4f92-9cdd-894da6f28546\" (UID: \"84184e7b-2222-4f92-9cdd-894da6f28546\") " Oct 10 18:16:25 crc kubenswrapper[4799]: I1010 18:16:25.432683 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/84184e7b-2222-4f92-9cdd-894da6f28546-scripts\") pod \"84184e7b-2222-4f92-9cdd-894da6f28546\" (UID: \"84184e7b-2222-4f92-9cdd-894da6f28546\") " Oct 10 18:16:25 crc kubenswrapper[4799]: I1010 18:16:25.432711 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-m28dz\" (UniqueName: \"kubernetes.io/projected/84184e7b-2222-4f92-9cdd-894da6f28546-kube-api-access-m28dz\") pod \"84184e7b-2222-4f92-9cdd-894da6f28546\" (UID: \"84184e7b-2222-4f92-9cdd-894da6f28546\") " Oct 10 18:16:25 crc kubenswrapper[4799]: I1010 18:16:25.432733 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/84184e7b-2222-4f92-9cdd-894da6f28546-sg-core-conf-yaml\") pod \"84184e7b-2222-4f92-9cdd-894da6f28546\" (UID: \"84184e7b-2222-4f92-9cdd-894da6f28546\") " Oct 10 18:16:25 crc kubenswrapper[4799]: I1010 18:16:25.432963 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/84184e7b-2222-4f92-9cdd-894da6f28546-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "84184e7b-2222-4f92-9cdd-894da6f28546" (UID: "84184e7b-2222-4f92-9cdd-894da6f28546"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 10 18:16:25 crc kubenswrapper[4799]: I1010 18:16:25.433189 4799 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/84184e7b-2222-4f92-9cdd-894da6f28546-run-httpd\") on node \"crc\" DevicePath \"\"" Oct 10 18:16:25 crc kubenswrapper[4799]: I1010 18:16:25.433239 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/84184e7b-2222-4f92-9cdd-894da6f28546-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "84184e7b-2222-4f92-9cdd-894da6f28546" (UID: "84184e7b-2222-4f92-9cdd-894da6f28546"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 10 18:16:25 crc kubenswrapper[4799]: I1010 18:16:25.441895 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/84184e7b-2222-4f92-9cdd-894da6f28546-scripts" (OuterVolumeSpecName: "scripts") pod "84184e7b-2222-4f92-9cdd-894da6f28546" (UID: "84184e7b-2222-4f92-9cdd-894da6f28546"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 18:16:25 crc kubenswrapper[4799]: I1010 18:16:25.441925 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/84184e7b-2222-4f92-9cdd-894da6f28546-kube-api-access-m28dz" (OuterVolumeSpecName: "kube-api-access-m28dz") pod "84184e7b-2222-4f92-9cdd-894da6f28546" (UID: "84184e7b-2222-4f92-9cdd-894da6f28546"). InnerVolumeSpecName "kube-api-access-m28dz". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 18:16:25 crc kubenswrapper[4799]: I1010 18:16:25.468685 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/84184e7b-2222-4f92-9cdd-894da6f28546-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "84184e7b-2222-4f92-9cdd-894da6f28546" (UID: "84184e7b-2222-4f92-9cdd-894da6f28546"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 18:16:25 crc kubenswrapper[4799]: I1010 18:16:25.524663 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/84184e7b-2222-4f92-9cdd-894da6f28546-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "84184e7b-2222-4f92-9cdd-894da6f28546" (UID: "84184e7b-2222-4f92-9cdd-894da6f28546"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 18:16:25 crc kubenswrapper[4799]: I1010 18:16:25.536263 4799 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/84184e7b-2222-4f92-9cdd-894da6f28546-log-httpd\") on node \"crc\" DevicePath \"\"" Oct 10 18:16:25 crc kubenswrapper[4799]: I1010 18:16:25.536326 4799 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/84184e7b-2222-4f92-9cdd-894da6f28546-scripts\") on node \"crc\" DevicePath \"\"" Oct 10 18:16:25 crc kubenswrapper[4799]: I1010 18:16:25.536337 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-m28dz\" (UniqueName: \"kubernetes.io/projected/84184e7b-2222-4f92-9cdd-894da6f28546-kube-api-access-m28dz\") on node \"crc\" DevicePath \"\"" Oct 10 18:16:25 crc kubenswrapper[4799]: I1010 18:16:25.536348 4799 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/84184e7b-2222-4f92-9cdd-894da6f28546-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Oct 10 18:16:25 crc kubenswrapper[4799]: I1010 18:16:25.536357 4799 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/84184e7b-2222-4f92-9cdd-894da6f28546-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 10 18:16:25 crc kubenswrapper[4799]: I1010 18:16:25.561048 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/84184e7b-2222-4f92-9cdd-894da6f28546-config-data" (OuterVolumeSpecName: "config-data") pod "84184e7b-2222-4f92-9cdd-894da6f28546" (UID: "84184e7b-2222-4f92-9cdd-894da6f28546"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 18:16:25 crc kubenswrapper[4799]: I1010 18:16:25.642538 4799 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/84184e7b-2222-4f92-9cdd-894da6f28546-config-data\") on node \"crc\" DevicePath \"\"" Oct 10 18:16:25 crc kubenswrapper[4799]: I1010 18:16:25.800899 4799 generic.go:334] "Generic (PLEG): container finished" podID="84184e7b-2222-4f92-9cdd-894da6f28546" containerID="adcd0d89227e727b66ba9c1292acdab7839cbf41eae08e79830eb25383c75771" exitCode=0 Oct 10 18:16:25 crc kubenswrapper[4799]: I1010 18:16:25.801048 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"84184e7b-2222-4f92-9cdd-894da6f28546","Type":"ContainerDied","Data":"adcd0d89227e727b66ba9c1292acdab7839cbf41eae08e79830eb25383c75771"} Oct 10 18:16:25 crc kubenswrapper[4799]: I1010 18:16:25.801149 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 10 18:16:25 crc kubenswrapper[4799]: I1010 18:16:25.802943 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"84184e7b-2222-4f92-9cdd-894da6f28546","Type":"ContainerDied","Data":"3715331f46f994c050eaeb4836f389d5bcfb91945cfbc7838363d0a3c7d3de96"} Oct 10 18:16:25 crc kubenswrapper[4799]: I1010 18:16:25.803079 4799 scope.go:117] "RemoveContainer" containerID="6590690d51c4cee10c08e904dded7720fe192a9713241212d736e56870650444" Oct 10 18:16:25 crc kubenswrapper[4799]: I1010 18:16:25.804600 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-0" event={"ID":"91a68421-466f-4eaa-a89b-59fa972a0726","Type":"ContainerStarted","Data":"10c7165e77e4c4b6212163c1e595e09f0330b4d41dc60b686ff65ac1df3315d9"} Oct 10 18:16:25 crc kubenswrapper[4799]: I1010 18:16:25.848518 4799 scope.go:117] "RemoveContainer" containerID="8da5dff04d2334e0bf51cc6d287f828b1a5c42d57ce989e3aabfa854b09ddea2" Oct 10 18:16:25 crc kubenswrapper[4799]: I1010 18:16:25.883348 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 10 18:16:25 crc kubenswrapper[4799]: I1010 18:16:25.902997 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Oct 10 18:16:25 crc kubenswrapper[4799]: I1010 18:16:25.928818 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Oct 10 18:16:25 crc kubenswrapper[4799]: E1010 18:16:25.929352 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="84184e7b-2222-4f92-9cdd-894da6f28546" containerName="ceilometer-notification-agent" Oct 10 18:16:25 crc kubenswrapper[4799]: I1010 18:16:25.929372 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="84184e7b-2222-4f92-9cdd-894da6f28546" containerName="ceilometer-notification-agent" Oct 10 18:16:25 crc kubenswrapper[4799]: E1010 18:16:25.929390 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="84184e7b-2222-4f92-9cdd-894da6f28546" containerName="ceilometer-central-agent" Oct 10 18:16:25 crc kubenswrapper[4799]: I1010 18:16:25.929398 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="84184e7b-2222-4f92-9cdd-894da6f28546" containerName="ceilometer-central-agent" Oct 10 18:16:25 crc kubenswrapper[4799]: E1010 18:16:25.929414 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="84184e7b-2222-4f92-9cdd-894da6f28546" containerName="sg-core" Oct 10 18:16:25 crc kubenswrapper[4799]: I1010 18:16:25.929419 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="84184e7b-2222-4f92-9cdd-894da6f28546" containerName="sg-core" Oct 10 18:16:25 crc kubenswrapper[4799]: E1010 18:16:25.929431 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="84184e7b-2222-4f92-9cdd-894da6f28546" containerName="proxy-httpd" Oct 10 18:16:25 crc kubenswrapper[4799]: I1010 18:16:25.929438 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="84184e7b-2222-4f92-9cdd-894da6f28546" containerName="proxy-httpd" Oct 10 18:16:25 crc kubenswrapper[4799]: I1010 18:16:25.929613 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="84184e7b-2222-4f92-9cdd-894da6f28546" containerName="ceilometer-central-agent" Oct 10 18:16:25 crc kubenswrapper[4799]: I1010 18:16:25.929628 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="84184e7b-2222-4f92-9cdd-894da6f28546" containerName="ceilometer-notification-agent" Oct 10 18:16:25 crc kubenswrapper[4799]: I1010 18:16:25.929643 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="84184e7b-2222-4f92-9cdd-894da6f28546" containerName="proxy-httpd" Oct 10 18:16:25 crc kubenswrapper[4799]: I1010 18:16:25.929658 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="84184e7b-2222-4f92-9cdd-894da6f28546" containerName="sg-core" Oct 10 18:16:25 crc kubenswrapper[4799]: I1010 18:16:25.931599 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 10 18:16:25 crc kubenswrapper[4799]: I1010 18:16:25.943431 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Oct 10 18:16:25 crc kubenswrapper[4799]: I1010 18:16:25.944440 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Oct 10 18:16:25 crc kubenswrapper[4799]: I1010 18:16:25.959912 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 10 18:16:25 crc kubenswrapper[4799]: I1010 18:16:25.968012 4799 scope.go:117] "RemoveContainer" containerID="adcd0d89227e727b66ba9c1292acdab7839cbf41eae08e79830eb25383c75771" Oct 10 18:16:26 crc kubenswrapper[4799]: I1010 18:16:26.039917 4799 scope.go:117] "RemoveContainer" containerID="20c94c17225e5f2b5cbd4f8270aea0afadc3357ded681feaac42cd4c21c5e99e" Oct 10 18:16:26 crc kubenswrapper[4799]: I1010 18:16:26.055027 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/aa7adfb3-8f69-4867-a36d-daac33771cb0-log-httpd\") pod \"ceilometer-0\" (UID: \"aa7adfb3-8f69-4867-a36d-daac33771cb0\") " pod="openstack/ceilometer-0" Oct 10 18:16:26 crc kubenswrapper[4799]: I1010 18:16:26.055101 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aa7adfb3-8f69-4867-a36d-daac33771cb0-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"aa7adfb3-8f69-4867-a36d-daac33771cb0\") " pod="openstack/ceilometer-0" Oct 10 18:16:26 crc kubenswrapper[4799]: I1010 18:16:26.055123 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/aa7adfb3-8f69-4867-a36d-daac33771cb0-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"aa7adfb3-8f69-4867-a36d-daac33771cb0\") " pod="openstack/ceilometer-0" Oct 10 18:16:26 crc kubenswrapper[4799]: I1010 18:16:26.055167 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vg49h\" (UniqueName: \"kubernetes.io/projected/aa7adfb3-8f69-4867-a36d-daac33771cb0-kube-api-access-vg49h\") pod \"ceilometer-0\" (UID: \"aa7adfb3-8f69-4867-a36d-daac33771cb0\") " pod="openstack/ceilometer-0" Oct 10 18:16:26 crc kubenswrapper[4799]: I1010 18:16:26.055221 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/aa7adfb3-8f69-4867-a36d-daac33771cb0-scripts\") pod \"ceilometer-0\" (UID: \"aa7adfb3-8f69-4867-a36d-daac33771cb0\") " pod="openstack/ceilometer-0" Oct 10 18:16:26 crc kubenswrapper[4799]: I1010 18:16:26.055259 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/aa7adfb3-8f69-4867-a36d-daac33771cb0-run-httpd\") pod \"ceilometer-0\" (UID: \"aa7adfb3-8f69-4867-a36d-daac33771cb0\") " pod="openstack/ceilometer-0" Oct 10 18:16:26 crc kubenswrapper[4799]: I1010 18:16:26.055279 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/aa7adfb3-8f69-4867-a36d-daac33771cb0-config-data\") pod \"ceilometer-0\" (UID: \"aa7adfb3-8f69-4867-a36d-daac33771cb0\") " pod="openstack/ceilometer-0" Oct 10 18:16:26 crc kubenswrapper[4799]: I1010 18:16:26.127286 4799 scope.go:117] "RemoveContainer" containerID="6590690d51c4cee10c08e904dded7720fe192a9713241212d736e56870650444" Oct 10 18:16:26 crc kubenswrapper[4799]: E1010 18:16:26.127640 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6590690d51c4cee10c08e904dded7720fe192a9713241212d736e56870650444\": container with ID starting with 6590690d51c4cee10c08e904dded7720fe192a9713241212d736e56870650444 not found: ID does not exist" containerID="6590690d51c4cee10c08e904dded7720fe192a9713241212d736e56870650444" Oct 10 18:16:26 crc kubenswrapper[4799]: I1010 18:16:26.127668 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6590690d51c4cee10c08e904dded7720fe192a9713241212d736e56870650444"} err="failed to get container status \"6590690d51c4cee10c08e904dded7720fe192a9713241212d736e56870650444\": rpc error: code = NotFound desc = could not find container \"6590690d51c4cee10c08e904dded7720fe192a9713241212d736e56870650444\": container with ID starting with 6590690d51c4cee10c08e904dded7720fe192a9713241212d736e56870650444 not found: ID does not exist" Oct 10 18:16:26 crc kubenswrapper[4799]: I1010 18:16:26.127687 4799 scope.go:117] "RemoveContainer" containerID="8da5dff04d2334e0bf51cc6d287f828b1a5c42d57ce989e3aabfa854b09ddea2" Oct 10 18:16:26 crc kubenswrapper[4799]: E1010 18:16:26.128246 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8da5dff04d2334e0bf51cc6d287f828b1a5c42d57ce989e3aabfa854b09ddea2\": container with ID starting with 8da5dff04d2334e0bf51cc6d287f828b1a5c42d57ce989e3aabfa854b09ddea2 not found: ID does not exist" containerID="8da5dff04d2334e0bf51cc6d287f828b1a5c42d57ce989e3aabfa854b09ddea2" Oct 10 18:16:26 crc kubenswrapper[4799]: I1010 18:16:26.128267 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8da5dff04d2334e0bf51cc6d287f828b1a5c42d57ce989e3aabfa854b09ddea2"} err="failed to get container status \"8da5dff04d2334e0bf51cc6d287f828b1a5c42d57ce989e3aabfa854b09ddea2\": rpc error: code = NotFound desc = could not find container \"8da5dff04d2334e0bf51cc6d287f828b1a5c42d57ce989e3aabfa854b09ddea2\": container with ID starting with 8da5dff04d2334e0bf51cc6d287f828b1a5c42d57ce989e3aabfa854b09ddea2 not found: ID does not exist" Oct 10 18:16:26 crc kubenswrapper[4799]: I1010 18:16:26.128280 4799 scope.go:117] "RemoveContainer" containerID="adcd0d89227e727b66ba9c1292acdab7839cbf41eae08e79830eb25383c75771" Oct 10 18:16:26 crc kubenswrapper[4799]: E1010 18:16:26.128591 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"adcd0d89227e727b66ba9c1292acdab7839cbf41eae08e79830eb25383c75771\": container with ID starting with adcd0d89227e727b66ba9c1292acdab7839cbf41eae08e79830eb25383c75771 not found: ID does not exist" containerID="adcd0d89227e727b66ba9c1292acdab7839cbf41eae08e79830eb25383c75771" Oct 10 18:16:26 crc kubenswrapper[4799]: I1010 18:16:26.128634 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"adcd0d89227e727b66ba9c1292acdab7839cbf41eae08e79830eb25383c75771"} err="failed to get container status \"adcd0d89227e727b66ba9c1292acdab7839cbf41eae08e79830eb25383c75771\": rpc error: code = NotFound desc = could not find container \"adcd0d89227e727b66ba9c1292acdab7839cbf41eae08e79830eb25383c75771\": container with ID starting with adcd0d89227e727b66ba9c1292acdab7839cbf41eae08e79830eb25383c75771 not found: ID does not exist" Oct 10 18:16:26 crc kubenswrapper[4799]: I1010 18:16:26.128665 4799 scope.go:117] "RemoveContainer" containerID="20c94c17225e5f2b5cbd4f8270aea0afadc3357ded681feaac42cd4c21c5e99e" Oct 10 18:16:26 crc kubenswrapper[4799]: E1010 18:16:26.129160 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"20c94c17225e5f2b5cbd4f8270aea0afadc3357ded681feaac42cd4c21c5e99e\": container with ID starting with 20c94c17225e5f2b5cbd4f8270aea0afadc3357ded681feaac42cd4c21c5e99e not found: ID does not exist" containerID="20c94c17225e5f2b5cbd4f8270aea0afadc3357ded681feaac42cd4c21c5e99e" Oct 10 18:16:26 crc kubenswrapper[4799]: I1010 18:16:26.129183 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"20c94c17225e5f2b5cbd4f8270aea0afadc3357ded681feaac42cd4c21c5e99e"} err="failed to get container status \"20c94c17225e5f2b5cbd4f8270aea0afadc3357ded681feaac42cd4c21c5e99e\": rpc error: code = NotFound desc = could not find container \"20c94c17225e5f2b5cbd4f8270aea0afadc3357ded681feaac42cd4c21c5e99e\": container with ID starting with 20c94c17225e5f2b5cbd4f8270aea0afadc3357ded681feaac42cd4c21c5e99e not found: ID does not exist" Oct 10 18:16:26 crc kubenswrapper[4799]: I1010 18:16:26.156931 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/aa7adfb3-8f69-4867-a36d-daac33771cb0-scripts\") pod \"ceilometer-0\" (UID: \"aa7adfb3-8f69-4867-a36d-daac33771cb0\") " pod="openstack/ceilometer-0" Oct 10 18:16:26 crc kubenswrapper[4799]: I1010 18:16:26.156996 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/aa7adfb3-8f69-4867-a36d-daac33771cb0-run-httpd\") pod \"ceilometer-0\" (UID: \"aa7adfb3-8f69-4867-a36d-daac33771cb0\") " pod="openstack/ceilometer-0" Oct 10 18:16:26 crc kubenswrapper[4799]: I1010 18:16:26.157017 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/aa7adfb3-8f69-4867-a36d-daac33771cb0-config-data\") pod \"ceilometer-0\" (UID: \"aa7adfb3-8f69-4867-a36d-daac33771cb0\") " pod="openstack/ceilometer-0" Oct 10 18:16:26 crc kubenswrapper[4799]: I1010 18:16:26.157070 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/aa7adfb3-8f69-4867-a36d-daac33771cb0-log-httpd\") pod \"ceilometer-0\" (UID: \"aa7adfb3-8f69-4867-a36d-daac33771cb0\") " pod="openstack/ceilometer-0" Oct 10 18:16:26 crc kubenswrapper[4799]: I1010 18:16:26.157129 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aa7adfb3-8f69-4867-a36d-daac33771cb0-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"aa7adfb3-8f69-4867-a36d-daac33771cb0\") " pod="openstack/ceilometer-0" Oct 10 18:16:26 crc kubenswrapper[4799]: I1010 18:16:26.157149 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/aa7adfb3-8f69-4867-a36d-daac33771cb0-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"aa7adfb3-8f69-4867-a36d-daac33771cb0\") " pod="openstack/ceilometer-0" Oct 10 18:16:26 crc kubenswrapper[4799]: I1010 18:16:26.157194 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vg49h\" (UniqueName: \"kubernetes.io/projected/aa7adfb3-8f69-4867-a36d-daac33771cb0-kube-api-access-vg49h\") pod \"ceilometer-0\" (UID: \"aa7adfb3-8f69-4867-a36d-daac33771cb0\") " pod="openstack/ceilometer-0" Oct 10 18:16:26 crc kubenswrapper[4799]: I1010 18:16:26.157693 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/aa7adfb3-8f69-4867-a36d-daac33771cb0-run-httpd\") pod \"ceilometer-0\" (UID: \"aa7adfb3-8f69-4867-a36d-daac33771cb0\") " pod="openstack/ceilometer-0" Oct 10 18:16:26 crc kubenswrapper[4799]: I1010 18:16:26.157740 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/aa7adfb3-8f69-4867-a36d-daac33771cb0-log-httpd\") pod \"ceilometer-0\" (UID: \"aa7adfb3-8f69-4867-a36d-daac33771cb0\") " pod="openstack/ceilometer-0" Oct 10 18:16:26 crc kubenswrapper[4799]: I1010 18:16:26.162112 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/aa7adfb3-8f69-4867-a36d-daac33771cb0-scripts\") pod \"ceilometer-0\" (UID: \"aa7adfb3-8f69-4867-a36d-daac33771cb0\") " pod="openstack/ceilometer-0" Oct 10 18:16:26 crc kubenswrapper[4799]: I1010 18:16:26.170423 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/aa7adfb3-8f69-4867-a36d-daac33771cb0-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"aa7adfb3-8f69-4867-a36d-daac33771cb0\") " pod="openstack/ceilometer-0" Oct 10 18:16:26 crc kubenswrapper[4799]: I1010 18:16:26.171073 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/aa7adfb3-8f69-4867-a36d-daac33771cb0-config-data\") pod \"ceilometer-0\" (UID: \"aa7adfb3-8f69-4867-a36d-daac33771cb0\") " pod="openstack/ceilometer-0" Oct 10 18:16:26 crc kubenswrapper[4799]: I1010 18:16:26.174643 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vg49h\" (UniqueName: \"kubernetes.io/projected/aa7adfb3-8f69-4867-a36d-daac33771cb0-kube-api-access-vg49h\") pod \"ceilometer-0\" (UID: \"aa7adfb3-8f69-4867-a36d-daac33771cb0\") " pod="openstack/ceilometer-0" Oct 10 18:16:26 crc kubenswrapper[4799]: I1010 18:16:26.175245 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aa7adfb3-8f69-4867-a36d-daac33771cb0-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"aa7adfb3-8f69-4867-a36d-daac33771cb0\") " pod="openstack/ceilometer-0" Oct 10 18:16:26 crc kubenswrapper[4799]: I1010 18:16:26.284999 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 10 18:16:26 crc kubenswrapper[4799]: I1010 18:16:26.744144 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 10 18:16:26 crc kubenswrapper[4799]: I1010 18:16:26.829068 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"aa7adfb3-8f69-4867-a36d-daac33771cb0","Type":"ContainerStarted","Data":"a7b3d311c3e85347c3417b35def2dc9f4de4a1ca2c956380975be792fdffd24f"} Oct 10 18:16:27 crc kubenswrapper[4799]: I1010 18:16:27.424046 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="84184e7b-2222-4f92-9cdd-894da6f28546" path="/var/lib/kubelet/pods/84184e7b-2222-4f92-9cdd-894da6f28546/volumes" Oct 10 18:16:27 crc kubenswrapper[4799]: I1010 18:16:27.843707 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"aa7adfb3-8f69-4867-a36d-daac33771cb0","Type":"ContainerStarted","Data":"5a69839289cd398c246a3bb2febf28aecf046403bad2a3403961cd2db0a7b1c9"} Oct 10 18:16:27 crc kubenswrapper[4799]: I1010 18:16:27.847337 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-0" event={"ID":"91a68421-466f-4eaa-a89b-59fa972a0726","Type":"ContainerStarted","Data":"d6f11f93f5f677bcc430793c741e1a1918e3106db22a9ec3a7333a4327e83587"} Oct 10 18:16:28 crc kubenswrapper[4799]: I1010 18:16:28.859210 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-0" event={"ID":"91a68421-466f-4eaa-a89b-59fa972a0726","Type":"ContainerStarted","Data":"d5e686d4860026c1d35681eb4ae04347ebd8cc3b79cc7cd890e13e521c469e88"} Oct 10 18:16:28 crc kubenswrapper[4799]: I1010 18:16:28.865902 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"aa7adfb3-8f69-4867-a36d-daac33771cb0","Type":"ContainerStarted","Data":"2c89c14c0db1e7423dc94cf96d9fe6d4d8ce14b70bad05381a670607d4c644a2"} Oct 10 18:16:28 crc kubenswrapper[4799]: I1010 18:16:28.885602 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/aodh-0" podStartSLOduration=2.6499815939999998 podStartE2EDuration="7.885580099s" podCreationTimestamp="2025-10-10 18:16:21 +0000 UTC" firstStartedPulling="2025-10-10 18:16:22.918643527 +0000 UTC m=+6276.426967642" lastFinishedPulling="2025-10-10 18:16:28.154242032 +0000 UTC m=+6281.662566147" observedRunningTime="2025-10-10 18:16:28.877981343 +0000 UTC m=+6282.386305488" watchObservedRunningTime="2025-10-10 18:16:28.885580099 +0000 UTC m=+6282.393904214" Oct 10 18:16:29 crc kubenswrapper[4799]: I1010 18:16:29.886137 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"aa7adfb3-8f69-4867-a36d-daac33771cb0","Type":"ContainerStarted","Data":"ca63f5d8695242c9c7e5f9d8eac01829a3d8ea11069c9dee7e4ed9dd4fdd3cff"} Oct 10 18:16:30 crc kubenswrapper[4799]: I1010 18:16:30.905683 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"aa7adfb3-8f69-4867-a36d-daac33771cb0","Type":"ContainerStarted","Data":"49685a3108a4c6ee9a3d5187929d9de2bb85dd7e17303254457dcd2766440dd2"} Oct 10 18:16:30 crc kubenswrapper[4799]: I1010 18:16:30.906320 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Oct 10 18:16:30 crc kubenswrapper[4799]: I1010 18:16:30.944275 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.3379381 podStartE2EDuration="5.944246583s" podCreationTimestamp="2025-10-10 18:16:25 +0000 UTC" firstStartedPulling="2025-10-10 18:16:26.743898186 +0000 UTC m=+6280.252222301" lastFinishedPulling="2025-10-10 18:16:30.350206629 +0000 UTC m=+6283.858530784" observedRunningTime="2025-10-10 18:16:30.934338611 +0000 UTC m=+6284.442662766" watchObservedRunningTime="2025-10-10 18:16:30.944246583 +0000 UTC m=+6284.452570728" Oct 10 18:16:32 crc kubenswrapper[4799]: I1010 18:16:32.038445 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-db-sync-mdtk5"] Oct 10 18:16:32 crc kubenswrapper[4799]: I1010 18:16:32.050772 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-db-sync-mdtk5"] Oct 10 18:16:33 crc kubenswrapper[4799]: I1010 18:16:33.426456 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a3f0852c-cf99-44c9-b9ed-fcbfb98a6c7a" path="/var/lib/kubelet/pods/a3f0852c-cf99-44c9-b9ed-fcbfb98a6c7a/volumes" Oct 10 18:16:35 crc kubenswrapper[4799]: I1010 18:16:35.448644 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/manila-db-create-v7rtp"] Oct 10 18:16:35 crc kubenswrapper[4799]: I1010 18:16:35.451858 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-db-create-v7rtp" Oct 10 18:16:35 crc kubenswrapper[4799]: I1010 18:16:35.458110 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-db-create-v7rtp"] Oct 10 18:16:35 crc kubenswrapper[4799]: I1010 18:16:35.589520 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-svh5t\" (UniqueName: \"kubernetes.io/projected/09be6bcc-c080-4408-b59f-b745601e9939-kube-api-access-svh5t\") pod \"manila-db-create-v7rtp\" (UID: \"09be6bcc-c080-4408-b59f-b745601e9939\") " pod="openstack/manila-db-create-v7rtp" Oct 10 18:16:35 crc kubenswrapper[4799]: I1010 18:16:35.692424 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-svh5t\" (UniqueName: \"kubernetes.io/projected/09be6bcc-c080-4408-b59f-b745601e9939-kube-api-access-svh5t\") pod \"manila-db-create-v7rtp\" (UID: \"09be6bcc-c080-4408-b59f-b745601e9939\") " pod="openstack/manila-db-create-v7rtp" Oct 10 18:16:35 crc kubenswrapper[4799]: I1010 18:16:35.718794 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-svh5t\" (UniqueName: \"kubernetes.io/projected/09be6bcc-c080-4408-b59f-b745601e9939-kube-api-access-svh5t\") pod \"manila-db-create-v7rtp\" (UID: \"09be6bcc-c080-4408-b59f-b745601e9939\") " pod="openstack/manila-db-create-v7rtp" Oct 10 18:16:35 crc kubenswrapper[4799]: I1010 18:16:35.786600 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-db-create-v7rtp" Oct 10 18:16:36 crc kubenswrapper[4799]: I1010 18:16:36.337924 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-db-create-v7rtp"] Oct 10 18:16:37 crc kubenswrapper[4799]: I1010 18:16:37.003485 4799 generic.go:334] "Generic (PLEG): container finished" podID="09be6bcc-c080-4408-b59f-b745601e9939" containerID="1f9d34e22e530992d9c33c241556f25f95bd93ccb61d3d670829f850cbcb3347" exitCode=0 Oct 10 18:16:37 crc kubenswrapper[4799]: I1010 18:16:37.003558 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-db-create-v7rtp" event={"ID":"09be6bcc-c080-4408-b59f-b745601e9939","Type":"ContainerDied","Data":"1f9d34e22e530992d9c33c241556f25f95bd93ccb61d3d670829f850cbcb3347"} Oct 10 18:16:37 crc kubenswrapper[4799]: I1010 18:16:37.004552 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-db-create-v7rtp" event={"ID":"09be6bcc-c080-4408-b59f-b745601e9939","Type":"ContainerStarted","Data":"3186f0d6bcddcc0471cc3935b2e52987436ee1d4e185c16618509ff50fb8b412"} Oct 10 18:16:38 crc kubenswrapper[4799]: I1010 18:16:38.538914 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/manila-db-create-v7rtp" Oct 10 18:16:38 crc kubenswrapper[4799]: I1010 18:16:38.669971 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-svh5t\" (UniqueName: \"kubernetes.io/projected/09be6bcc-c080-4408-b59f-b745601e9939-kube-api-access-svh5t\") pod \"09be6bcc-c080-4408-b59f-b745601e9939\" (UID: \"09be6bcc-c080-4408-b59f-b745601e9939\") " Oct 10 18:16:38 crc kubenswrapper[4799]: I1010 18:16:38.678122 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09be6bcc-c080-4408-b59f-b745601e9939-kube-api-access-svh5t" (OuterVolumeSpecName: "kube-api-access-svh5t") pod "09be6bcc-c080-4408-b59f-b745601e9939" (UID: "09be6bcc-c080-4408-b59f-b745601e9939"). InnerVolumeSpecName "kube-api-access-svh5t". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 18:16:38 crc kubenswrapper[4799]: I1010 18:16:38.772949 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-svh5t\" (UniqueName: \"kubernetes.io/projected/09be6bcc-c080-4408-b59f-b745601e9939-kube-api-access-svh5t\") on node \"crc\" DevicePath \"\"" Oct 10 18:16:39 crc kubenswrapper[4799]: I1010 18:16:39.039911 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-db-create-v7rtp" event={"ID":"09be6bcc-c080-4408-b59f-b745601e9939","Type":"ContainerDied","Data":"3186f0d6bcddcc0471cc3935b2e52987436ee1d4e185c16618509ff50fb8b412"} Oct 10 18:16:39 crc kubenswrapper[4799]: I1010 18:16:39.040269 4799 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3186f0d6bcddcc0471cc3935b2e52987436ee1d4e185c16618509ff50fb8b412" Oct 10 18:16:39 crc kubenswrapper[4799]: I1010 18:16:39.039934 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/manila-db-create-v7rtp" Oct 10 18:16:45 crc kubenswrapper[4799]: I1010 18:16:45.249445 4799 patch_prober.go:28] interesting pod/machine-config-daemon-rh8zc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 10 18:16:45 crc kubenswrapper[4799]: I1010 18:16:45.250218 4799 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 10 18:16:45 crc kubenswrapper[4799]: I1010 18:16:45.567270 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/manila-1b7d-account-create-bzw8s"] Oct 10 18:16:45 crc kubenswrapper[4799]: E1010 18:16:45.568228 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="09be6bcc-c080-4408-b59f-b745601e9939" containerName="mariadb-database-create" Oct 10 18:16:45 crc kubenswrapper[4799]: I1010 18:16:45.568275 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="09be6bcc-c080-4408-b59f-b745601e9939" containerName="mariadb-database-create" Oct 10 18:16:45 crc kubenswrapper[4799]: I1010 18:16:45.568923 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="09be6bcc-c080-4408-b59f-b745601e9939" containerName="mariadb-database-create" Oct 10 18:16:45 crc kubenswrapper[4799]: I1010 18:16:45.570584 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-1b7d-account-create-bzw8s" Oct 10 18:16:45 crc kubenswrapper[4799]: I1010 18:16:45.574640 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"manila-db-secret" Oct 10 18:16:45 crc kubenswrapper[4799]: I1010 18:16:45.602902 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-1b7d-account-create-bzw8s"] Oct 10 18:16:45 crc kubenswrapper[4799]: I1010 18:16:45.767121 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mgd78\" (UniqueName: \"kubernetes.io/projected/c5fd0979-3afa-415a-93b5-a6981524b6a8-kube-api-access-mgd78\") pod \"manila-1b7d-account-create-bzw8s\" (UID: \"c5fd0979-3afa-415a-93b5-a6981524b6a8\") " pod="openstack/manila-1b7d-account-create-bzw8s" Oct 10 18:16:45 crc kubenswrapper[4799]: I1010 18:16:45.869826 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mgd78\" (UniqueName: \"kubernetes.io/projected/c5fd0979-3afa-415a-93b5-a6981524b6a8-kube-api-access-mgd78\") pod \"manila-1b7d-account-create-bzw8s\" (UID: \"c5fd0979-3afa-415a-93b5-a6981524b6a8\") " pod="openstack/manila-1b7d-account-create-bzw8s" Oct 10 18:16:45 crc kubenswrapper[4799]: I1010 18:16:45.906069 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mgd78\" (UniqueName: \"kubernetes.io/projected/c5fd0979-3afa-415a-93b5-a6981524b6a8-kube-api-access-mgd78\") pod \"manila-1b7d-account-create-bzw8s\" (UID: \"c5fd0979-3afa-415a-93b5-a6981524b6a8\") " pod="openstack/manila-1b7d-account-create-bzw8s" Oct 10 18:16:45 crc kubenswrapper[4799]: I1010 18:16:45.913676 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-1b7d-account-create-bzw8s" Oct 10 18:16:46 crc kubenswrapper[4799]: W1010 18:16:46.435409 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc5fd0979_3afa_415a_93b5_a6981524b6a8.slice/crio-fb4846c87ed47dfcb7680bb8894c603178115e808a86e10f9434d76b79a938b0 WatchSource:0}: Error finding container fb4846c87ed47dfcb7680bb8894c603178115e808a86e10f9434d76b79a938b0: Status 404 returned error can't find the container with id fb4846c87ed47dfcb7680bb8894c603178115e808a86e10f9434d76b79a938b0 Oct 10 18:16:46 crc kubenswrapper[4799]: I1010 18:16:46.438286 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-1b7d-account-create-bzw8s"] Oct 10 18:16:47 crc kubenswrapper[4799]: I1010 18:16:47.136314 4799 generic.go:334] "Generic (PLEG): container finished" podID="c5fd0979-3afa-415a-93b5-a6981524b6a8" containerID="ac061486347082cb7715350b7d7a5f48ba84d3c842e88bbfa380b73e2dfcbd6e" exitCode=0 Oct 10 18:16:47 crc kubenswrapper[4799]: I1010 18:16:47.136402 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-1b7d-account-create-bzw8s" event={"ID":"c5fd0979-3afa-415a-93b5-a6981524b6a8","Type":"ContainerDied","Data":"ac061486347082cb7715350b7d7a5f48ba84d3c842e88bbfa380b73e2dfcbd6e"} Oct 10 18:16:47 crc kubenswrapper[4799]: I1010 18:16:47.136696 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-1b7d-account-create-bzw8s" event={"ID":"c5fd0979-3afa-415a-93b5-a6981524b6a8","Type":"ContainerStarted","Data":"fb4846c87ed47dfcb7680bb8894c603178115e808a86e10f9434d76b79a938b0"} Oct 10 18:16:48 crc kubenswrapper[4799]: I1010 18:16:48.657018 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/manila-1b7d-account-create-bzw8s" Oct 10 18:16:48 crc kubenswrapper[4799]: I1010 18:16:48.841201 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mgd78\" (UniqueName: \"kubernetes.io/projected/c5fd0979-3afa-415a-93b5-a6981524b6a8-kube-api-access-mgd78\") pod \"c5fd0979-3afa-415a-93b5-a6981524b6a8\" (UID: \"c5fd0979-3afa-415a-93b5-a6981524b6a8\") " Oct 10 18:16:48 crc kubenswrapper[4799]: I1010 18:16:48.855394 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c5fd0979-3afa-415a-93b5-a6981524b6a8-kube-api-access-mgd78" (OuterVolumeSpecName: "kube-api-access-mgd78") pod "c5fd0979-3afa-415a-93b5-a6981524b6a8" (UID: "c5fd0979-3afa-415a-93b5-a6981524b6a8"). InnerVolumeSpecName "kube-api-access-mgd78". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 18:16:48 crc kubenswrapper[4799]: I1010 18:16:48.943568 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mgd78\" (UniqueName: \"kubernetes.io/projected/c5fd0979-3afa-415a-93b5-a6981524b6a8-kube-api-access-mgd78\") on node \"crc\" DevicePath \"\"" Oct 10 18:16:49 crc kubenswrapper[4799]: I1010 18:16:49.164199 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/manila-1b7d-account-create-bzw8s" Oct 10 18:16:49 crc kubenswrapper[4799]: I1010 18:16:49.164120 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-1b7d-account-create-bzw8s" event={"ID":"c5fd0979-3afa-415a-93b5-a6981524b6a8","Type":"ContainerDied","Data":"fb4846c87ed47dfcb7680bb8894c603178115e808a86e10f9434d76b79a938b0"} Oct 10 18:16:49 crc kubenswrapper[4799]: I1010 18:16:49.164462 4799 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="fb4846c87ed47dfcb7680bb8894c603178115e808a86e10f9434d76b79a938b0" Oct 10 18:16:50 crc kubenswrapper[4799]: I1010 18:16:50.925589 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/manila-db-sync-w57kf"] Oct 10 18:16:50 crc kubenswrapper[4799]: E1010 18:16:50.926789 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c5fd0979-3afa-415a-93b5-a6981524b6a8" containerName="mariadb-account-create" Oct 10 18:16:50 crc kubenswrapper[4799]: I1010 18:16:50.926816 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="c5fd0979-3afa-415a-93b5-a6981524b6a8" containerName="mariadb-account-create" Oct 10 18:16:50 crc kubenswrapper[4799]: I1010 18:16:50.927272 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="c5fd0979-3afa-415a-93b5-a6981524b6a8" containerName="mariadb-account-create" Oct 10 18:16:50 crc kubenswrapper[4799]: I1010 18:16:50.928594 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-db-sync-w57kf" Oct 10 18:16:50 crc kubenswrapper[4799]: I1010 18:16:50.930921 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"manila-manila-dockercfg-g8927" Oct 10 18:16:50 crc kubenswrapper[4799]: I1010 18:16:50.932136 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"manila-config-data" Oct 10 18:16:50 crc kubenswrapper[4799]: I1010 18:16:50.938628 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-db-sync-w57kf"] Oct 10 18:16:51 crc kubenswrapper[4799]: I1010 18:16:51.095115 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fskhg\" (UniqueName: \"kubernetes.io/projected/12fe564f-024c-412e-b497-b33c59ab57a6-kube-api-access-fskhg\") pod \"manila-db-sync-w57kf\" (UID: \"12fe564f-024c-412e-b497-b33c59ab57a6\") " pod="openstack/manila-db-sync-w57kf" Oct 10 18:16:51 crc kubenswrapper[4799]: I1010 18:16:51.095198 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/12fe564f-024c-412e-b497-b33c59ab57a6-combined-ca-bundle\") pod \"manila-db-sync-w57kf\" (UID: \"12fe564f-024c-412e-b497-b33c59ab57a6\") " pod="openstack/manila-db-sync-w57kf" Oct 10 18:16:51 crc kubenswrapper[4799]: I1010 18:16:51.095247 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"job-config-data\" (UniqueName: \"kubernetes.io/secret/12fe564f-024c-412e-b497-b33c59ab57a6-job-config-data\") pod \"manila-db-sync-w57kf\" (UID: \"12fe564f-024c-412e-b497-b33c59ab57a6\") " pod="openstack/manila-db-sync-w57kf" Oct 10 18:16:51 crc kubenswrapper[4799]: I1010 18:16:51.095489 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/12fe564f-024c-412e-b497-b33c59ab57a6-config-data\") pod \"manila-db-sync-w57kf\" (UID: \"12fe564f-024c-412e-b497-b33c59ab57a6\") " pod="openstack/manila-db-sync-w57kf" Oct 10 18:16:51 crc kubenswrapper[4799]: I1010 18:16:51.198091 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"job-config-data\" (UniqueName: \"kubernetes.io/secret/12fe564f-024c-412e-b497-b33c59ab57a6-job-config-data\") pod \"manila-db-sync-w57kf\" (UID: \"12fe564f-024c-412e-b497-b33c59ab57a6\") " pod="openstack/manila-db-sync-w57kf" Oct 10 18:16:51 crc kubenswrapper[4799]: I1010 18:16:51.198328 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/12fe564f-024c-412e-b497-b33c59ab57a6-config-data\") pod \"manila-db-sync-w57kf\" (UID: \"12fe564f-024c-412e-b497-b33c59ab57a6\") " pod="openstack/manila-db-sync-w57kf" Oct 10 18:16:51 crc kubenswrapper[4799]: I1010 18:16:51.198479 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fskhg\" (UniqueName: \"kubernetes.io/projected/12fe564f-024c-412e-b497-b33c59ab57a6-kube-api-access-fskhg\") pod \"manila-db-sync-w57kf\" (UID: \"12fe564f-024c-412e-b497-b33c59ab57a6\") " pod="openstack/manila-db-sync-w57kf" Oct 10 18:16:51 crc kubenswrapper[4799]: I1010 18:16:51.198542 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/12fe564f-024c-412e-b497-b33c59ab57a6-combined-ca-bundle\") pod \"manila-db-sync-w57kf\" (UID: \"12fe564f-024c-412e-b497-b33c59ab57a6\") " pod="openstack/manila-db-sync-w57kf" Oct 10 18:16:51 crc kubenswrapper[4799]: I1010 18:16:51.204234 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/12fe564f-024c-412e-b497-b33c59ab57a6-combined-ca-bundle\") pod \"manila-db-sync-w57kf\" (UID: \"12fe564f-024c-412e-b497-b33c59ab57a6\") " pod="openstack/manila-db-sync-w57kf" Oct 10 18:16:51 crc kubenswrapper[4799]: I1010 18:16:51.205528 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/12fe564f-024c-412e-b497-b33c59ab57a6-config-data\") pod \"manila-db-sync-w57kf\" (UID: \"12fe564f-024c-412e-b497-b33c59ab57a6\") " pod="openstack/manila-db-sync-w57kf" Oct 10 18:16:51 crc kubenswrapper[4799]: I1010 18:16:51.205876 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"job-config-data\" (UniqueName: \"kubernetes.io/secret/12fe564f-024c-412e-b497-b33c59ab57a6-job-config-data\") pod \"manila-db-sync-w57kf\" (UID: \"12fe564f-024c-412e-b497-b33c59ab57a6\") " pod="openstack/manila-db-sync-w57kf" Oct 10 18:16:51 crc kubenswrapper[4799]: I1010 18:16:51.220156 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fskhg\" (UniqueName: \"kubernetes.io/projected/12fe564f-024c-412e-b497-b33c59ab57a6-kube-api-access-fskhg\") pod \"manila-db-sync-w57kf\" (UID: \"12fe564f-024c-412e-b497-b33c59ab57a6\") " pod="openstack/manila-db-sync-w57kf" Oct 10 18:16:51 crc kubenswrapper[4799]: I1010 18:16:51.287116 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-db-sync-w57kf" Oct 10 18:16:51 crc kubenswrapper[4799]: I1010 18:16:51.882711 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-db-sync-w57kf"] Oct 10 18:16:51 crc kubenswrapper[4799]: W1010 18:16:51.895041 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod12fe564f_024c_412e_b497_b33c59ab57a6.slice/crio-0af05a70994e3af34de18f42362a22805526f980a0debfd693fa70aa72253463 WatchSource:0}: Error finding container 0af05a70994e3af34de18f42362a22805526f980a0debfd693fa70aa72253463: Status 404 returned error can't find the container with id 0af05a70994e3af34de18f42362a22805526f980a0debfd693fa70aa72253463 Oct 10 18:16:52 crc kubenswrapper[4799]: I1010 18:16:52.208583 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-db-sync-w57kf" event={"ID":"12fe564f-024c-412e-b497-b33c59ab57a6","Type":"ContainerStarted","Data":"0af05a70994e3af34de18f42362a22805526f980a0debfd693fa70aa72253463"} Oct 10 18:16:56 crc kubenswrapper[4799]: I1010 18:16:56.290530 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Oct 10 18:16:57 crc kubenswrapper[4799]: I1010 18:16:57.269882 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-db-sync-w57kf" event={"ID":"12fe564f-024c-412e-b497-b33c59ab57a6","Type":"ContainerStarted","Data":"5e0ff81020c44541736e3f63b3b543d3a8530c212888ae5f8e23384a1ce119b6"} Oct 10 18:16:57 crc kubenswrapper[4799]: I1010 18:16:57.300565 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/manila-db-sync-w57kf" podStartSLOduration=2.985739555 podStartE2EDuration="7.300534727s" podCreationTimestamp="2025-10-10 18:16:50 +0000 UTC" firstStartedPulling="2025-10-10 18:16:51.904453094 +0000 UTC m=+6305.412777209" lastFinishedPulling="2025-10-10 18:16:56.219248256 +0000 UTC m=+6309.727572381" observedRunningTime="2025-10-10 18:16:57.288710129 +0000 UTC m=+6310.797034324" watchObservedRunningTime="2025-10-10 18:16:57.300534727 +0000 UTC m=+6310.808858872" Oct 10 18:16:59 crc kubenswrapper[4799]: I1010 18:16:59.295463 4799 generic.go:334] "Generic (PLEG): container finished" podID="12fe564f-024c-412e-b497-b33c59ab57a6" containerID="5e0ff81020c44541736e3f63b3b543d3a8530c212888ae5f8e23384a1ce119b6" exitCode=0 Oct 10 18:16:59 crc kubenswrapper[4799]: I1010 18:16:59.295677 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-db-sync-w57kf" event={"ID":"12fe564f-024c-412e-b497-b33c59ab57a6","Type":"ContainerDied","Data":"5e0ff81020c44541736e3f63b3b543d3a8530c212888ae5f8e23384a1ce119b6"} Oct 10 18:17:00 crc kubenswrapper[4799]: I1010 18:17:00.926595 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/manila-db-sync-w57kf" Oct 10 18:17:00 crc kubenswrapper[4799]: I1010 18:17:00.961422 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fskhg\" (UniqueName: \"kubernetes.io/projected/12fe564f-024c-412e-b497-b33c59ab57a6-kube-api-access-fskhg\") pod \"12fe564f-024c-412e-b497-b33c59ab57a6\" (UID: \"12fe564f-024c-412e-b497-b33c59ab57a6\") " Oct 10 18:17:00 crc kubenswrapper[4799]: I1010 18:17:00.961518 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/12fe564f-024c-412e-b497-b33c59ab57a6-config-data\") pod \"12fe564f-024c-412e-b497-b33c59ab57a6\" (UID: \"12fe564f-024c-412e-b497-b33c59ab57a6\") " Oct 10 18:17:00 crc kubenswrapper[4799]: I1010 18:17:00.961574 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/12fe564f-024c-412e-b497-b33c59ab57a6-combined-ca-bundle\") pod \"12fe564f-024c-412e-b497-b33c59ab57a6\" (UID: \"12fe564f-024c-412e-b497-b33c59ab57a6\") " Oct 10 18:17:00 crc kubenswrapper[4799]: I1010 18:17:00.961927 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"job-config-data\" (UniqueName: \"kubernetes.io/secret/12fe564f-024c-412e-b497-b33c59ab57a6-job-config-data\") pod \"12fe564f-024c-412e-b497-b33c59ab57a6\" (UID: \"12fe564f-024c-412e-b497-b33c59ab57a6\") " Oct 10 18:17:00 crc kubenswrapper[4799]: I1010 18:17:00.983156 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/12fe564f-024c-412e-b497-b33c59ab57a6-kube-api-access-fskhg" (OuterVolumeSpecName: "kube-api-access-fskhg") pod "12fe564f-024c-412e-b497-b33c59ab57a6" (UID: "12fe564f-024c-412e-b497-b33c59ab57a6"). InnerVolumeSpecName "kube-api-access-fskhg". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 18:17:00 crc kubenswrapper[4799]: I1010 18:17:00.983529 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/12fe564f-024c-412e-b497-b33c59ab57a6-job-config-data" (OuterVolumeSpecName: "job-config-data") pod "12fe564f-024c-412e-b497-b33c59ab57a6" (UID: "12fe564f-024c-412e-b497-b33c59ab57a6"). InnerVolumeSpecName "job-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 18:17:00 crc kubenswrapper[4799]: I1010 18:17:00.990941 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/12fe564f-024c-412e-b497-b33c59ab57a6-config-data" (OuterVolumeSpecName: "config-data") pod "12fe564f-024c-412e-b497-b33c59ab57a6" (UID: "12fe564f-024c-412e-b497-b33c59ab57a6"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 18:17:01 crc kubenswrapper[4799]: I1010 18:17:01.021930 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/12fe564f-024c-412e-b497-b33c59ab57a6-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "12fe564f-024c-412e-b497-b33c59ab57a6" (UID: "12fe564f-024c-412e-b497-b33c59ab57a6"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 18:17:01 crc kubenswrapper[4799]: I1010 18:17:01.065658 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fskhg\" (UniqueName: \"kubernetes.io/projected/12fe564f-024c-412e-b497-b33c59ab57a6-kube-api-access-fskhg\") on node \"crc\" DevicePath \"\"" Oct 10 18:17:01 crc kubenswrapper[4799]: I1010 18:17:01.065730 4799 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/12fe564f-024c-412e-b497-b33c59ab57a6-config-data\") on node \"crc\" DevicePath \"\"" Oct 10 18:17:01 crc kubenswrapper[4799]: I1010 18:17:01.065785 4799 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/12fe564f-024c-412e-b497-b33c59ab57a6-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 10 18:17:01 crc kubenswrapper[4799]: I1010 18:17:01.065809 4799 reconciler_common.go:293] "Volume detached for volume \"job-config-data\" (UniqueName: \"kubernetes.io/secret/12fe564f-024c-412e-b497-b33c59ab57a6-job-config-data\") on node \"crc\" DevicePath \"\"" Oct 10 18:17:01 crc kubenswrapper[4799]: I1010 18:17:01.325045 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-db-sync-w57kf" event={"ID":"12fe564f-024c-412e-b497-b33c59ab57a6","Type":"ContainerDied","Data":"0af05a70994e3af34de18f42362a22805526f980a0debfd693fa70aa72253463"} Oct 10 18:17:01 crc kubenswrapper[4799]: I1010 18:17:01.325121 4799 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0af05a70994e3af34de18f42362a22805526f980a0debfd693fa70aa72253463" Oct 10 18:17:01 crc kubenswrapper[4799]: I1010 18:17:01.325141 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/manila-db-sync-w57kf" Oct 10 18:17:01 crc kubenswrapper[4799]: I1010 18:17:01.832134 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-84c8d469bf-8gjr7"] Oct 10 18:17:01 crc kubenswrapper[4799]: E1010 18:17:01.840036 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="12fe564f-024c-412e-b497-b33c59ab57a6" containerName="manila-db-sync" Oct 10 18:17:01 crc kubenswrapper[4799]: I1010 18:17:01.840082 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="12fe564f-024c-412e-b497-b33c59ab57a6" containerName="manila-db-sync" Oct 10 18:17:01 crc kubenswrapper[4799]: I1010 18:17:01.840569 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="12fe564f-024c-412e-b497-b33c59ab57a6" containerName="manila-db-sync" Oct 10 18:17:01 crc kubenswrapper[4799]: I1010 18:17:01.842004 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-84c8d469bf-8gjr7" Oct 10 18:17:01 crc kubenswrapper[4799]: I1010 18:17:01.859228 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/manila-scheduler-0"] Oct 10 18:17:01 crc kubenswrapper[4799]: I1010 18:17:01.866344 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-scheduler-0" Oct 10 18:17:01 crc kubenswrapper[4799]: I1010 18:17:01.868138 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"manila-scripts" Oct 10 18:17:01 crc kubenswrapper[4799]: I1010 18:17:01.868362 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"manila-manila-dockercfg-g8927" Oct 10 18:17:01 crc kubenswrapper[4799]: I1010 18:17:01.869017 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"manila-config-data" Oct 10 18:17:01 crc kubenswrapper[4799]: I1010 18:17:01.869424 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"manila-scheduler-config-data" Oct 10 18:17:01 crc kubenswrapper[4799]: I1010 18:17:01.870556 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-84c8d469bf-8gjr7"] Oct 10 18:17:01 crc kubenswrapper[4799]: I1010 18:17:01.918430 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/manila-share-share1-0"] Oct 10 18:17:01 crc kubenswrapper[4799]: I1010 18:17:01.925076 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/27d181f5-9c3d-4193-80d5-8a45ea9a282f-ovsdbserver-nb\") pod \"dnsmasq-dns-84c8d469bf-8gjr7\" (UID: \"27d181f5-9c3d-4193-80d5-8a45ea9a282f\") " pod="openstack/dnsmasq-dns-84c8d469bf-8gjr7" Oct 10 18:17:01 crc kubenswrapper[4799]: I1010 18:17:01.925191 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8j4jv\" (UniqueName: \"kubernetes.io/projected/27d181f5-9c3d-4193-80d5-8a45ea9a282f-kube-api-access-8j4jv\") pod \"dnsmasq-dns-84c8d469bf-8gjr7\" (UID: \"27d181f5-9c3d-4193-80d5-8a45ea9a282f\") " pod="openstack/dnsmasq-dns-84c8d469bf-8gjr7" Oct 10 18:17:01 crc kubenswrapper[4799]: I1010 18:17:01.925304 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/27d181f5-9c3d-4193-80d5-8a45ea9a282f-dns-svc\") pod \"dnsmasq-dns-84c8d469bf-8gjr7\" (UID: \"27d181f5-9c3d-4193-80d5-8a45ea9a282f\") " pod="openstack/dnsmasq-dns-84c8d469bf-8gjr7" Oct 10 18:17:01 crc kubenswrapper[4799]: I1010 18:17:01.925406 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/27d181f5-9c3d-4193-80d5-8a45ea9a282f-config\") pod \"dnsmasq-dns-84c8d469bf-8gjr7\" (UID: \"27d181f5-9c3d-4193-80d5-8a45ea9a282f\") " pod="openstack/dnsmasq-dns-84c8d469bf-8gjr7" Oct 10 18:17:01 crc kubenswrapper[4799]: I1010 18:17:01.925453 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/27d181f5-9c3d-4193-80d5-8a45ea9a282f-ovsdbserver-sb\") pod \"dnsmasq-dns-84c8d469bf-8gjr7\" (UID: \"27d181f5-9c3d-4193-80d5-8a45ea9a282f\") " pod="openstack/dnsmasq-dns-84c8d469bf-8gjr7" Oct 10 18:17:01 crc kubenswrapper[4799]: I1010 18:17:01.930596 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-share-share1-0" Oct 10 18:17:01 crc kubenswrapper[4799]: I1010 18:17:01.938525 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"manila-share-share1-config-data" Oct 10 18:17:01 crc kubenswrapper[4799]: I1010 18:17:01.944102 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-scheduler-0"] Oct 10 18:17:01 crc kubenswrapper[4799]: I1010 18:17:01.993439 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-share-share1-0"] Oct 10 18:17:02 crc kubenswrapper[4799]: I1010 18:17:02.027475 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wfcdq\" (UniqueName: \"kubernetes.io/projected/fc512908-5386-4b26-8563-1bb18eeb5a7b-kube-api-access-wfcdq\") pod \"manila-share-share1-0\" (UID: \"fc512908-5386-4b26-8563-1bb18eeb5a7b\") " pod="openstack/manila-share-share1-0" Oct 10 18:17:02 crc kubenswrapper[4799]: I1010 18:17:02.027533 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6d5974bf-9794-4599-92ab-7d9f833aa967-combined-ca-bundle\") pod \"manila-scheduler-0\" (UID: \"6d5974bf-9794-4599-92ab-7d9f833aa967\") " pod="openstack/manila-scheduler-0" Oct 10 18:17:02 crc kubenswrapper[4799]: I1010 18:17:02.027552 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nbpnw\" (UniqueName: \"kubernetes.io/projected/6d5974bf-9794-4599-92ab-7d9f833aa967-kube-api-access-nbpnw\") pod \"manila-scheduler-0\" (UID: \"6d5974bf-9794-4599-92ab-7d9f833aa967\") " pod="openstack/manila-scheduler-0" Oct 10 18:17:02 crc kubenswrapper[4799]: I1010 18:17:02.027590 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fc512908-5386-4b26-8563-1bb18eeb5a7b-config-data\") pod \"manila-share-share1-0\" (UID: \"fc512908-5386-4b26-8563-1bb18eeb5a7b\") " pod="openstack/manila-share-share1-0" Oct 10 18:17:02 crc kubenswrapper[4799]: I1010 18:17:02.027606 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/fc512908-5386-4b26-8563-1bb18eeb5a7b-etc-machine-id\") pod \"manila-share-share1-0\" (UID: \"fc512908-5386-4b26-8563-1bb18eeb5a7b\") " pod="openstack/manila-share-share1-0" Oct 10 18:17:02 crc kubenswrapper[4799]: I1010 18:17:02.027638 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/27d181f5-9c3d-4193-80d5-8a45ea9a282f-ovsdbserver-nb\") pod \"dnsmasq-dns-84c8d469bf-8gjr7\" (UID: \"27d181f5-9c3d-4193-80d5-8a45ea9a282f\") " pod="openstack/dnsmasq-dns-84c8d469bf-8gjr7" Oct 10 18:17:02 crc kubenswrapper[4799]: I1010 18:17:02.027653 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/6d5974bf-9794-4599-92ab-7d9f833aa967-etc-machine-id\") pod \"manila-scheduler-0\" (UID: \"6d5974bf-9794-4599-92ab-7d9f833aa967\") " pod="openstack/manila-scheduler-0" Oct 10 18:17:02 crc kubenswrapper[4799]: I1010 18:17:02.027675 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-manila\" (UniqueName: \"kubernetes.io/host-path/fc512908-5386-4b26-8563-1bb18eeb5a7b-var-lib-manila\") pod \"manila-share-share1-0\" (UID: \"fc512908-5386-4b26-8563-1bb18eeb5a7b\") " pod="openstack/manila-share-share1-0" Oct 10 18:17:02 crc kubenswrapper[4799]: I1010 18:17:02.027700 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/6d5974bf-9794-4599-92ab-7d9f833aa967-config-data-custom\") pod \"manila-scheduler-0\" (UID: \"6d5974bf-9794-4599-92ab-7d9f833aa967\") " pod="openstack/manila-scheduler-0" Oct 10 18:17:02 crc kubenswrapper[4799]: I1010 18:17:02.027713 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fc512908-5386-4b26-8563-1bb18eeb5a7b-combined-ca-bundle\") pod \"manila-share-share1-0\" (UID: \"fc512908-5386-4b26-8563-1bb18eeb5a7b\") " pod="openstack/manila-share-share1-0" Oct 10 18:17:02 crc kubenswrapper[4799]: I1010 18:17:02.027749 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8j4jv\" (UniqueName: \"kubernetes.io/projected/27d181f5-9c3d-4193-80d5-8a45ea9a282f-kube-api-access-8j4jv\") pod \"dnsmasq-dns-84c8d469bf-8gjr7\" (UID: \"27d181f5-9c3d-4193-80d5-8a45ea9a282f\") " pod="openstack/dnsmasq-dns-84c8d469bf-8gjr7" Oct 10 18:17:02 crc kubenswrapper[4799]: I1010 18:17:02.027785 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6d5974bf-9794-4599-92ab-7d9f833aa967-config-data\") pod \"manila-scheduler-0\" (UID: \"6d5974bf-9794-4599-92ab-7d9f833aa967\") " pod="openstack/manila-scheduler-0" Oct 10 18:17:02 crc kubenswrapper[4799]: I1010 18:17:02.027829 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/27d181f5-9c3d-4193-80d5-8a45ea9a282f-dns-svc\") pod \"dnsmasq-dns-84c8d469bf-8gjr7\" (UID: \"27d181f5-9c3d-4193-80d5-8a45ea9a282f\") " pod="openstack/dnsmasq-dns-84c8d469bf-8gjr7" Oct 10 18:17:02 crc kubenswrapper[4799]: I1010 18:17:02.027846 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fc512908-5386-4b26-8563-1bb18eeb5a7b-scripts\") pod \"manila-share-share1-0\" (UID: \"fc512908-5386-4b26-8563-1bb18eeb5a7b\") " pod="openstack/manila-share-share1-0" Oct 10 18:17:02 crc kubenswrapper[4799]: I1010 18:17:02.027870 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/fc512908-5386-4b26-8563-1bb18eeb5a7b-ceph\") pod \"manila-share-share1-0\" (UID: \"fc512908-5386-4b26-8563-1bb18eeb5a7b\") " pod="openstack/manila-share-share1-0" Oct 10 18:17:02 crc kubenswrapper[4799]: I1010 18:17:02.027896 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/fc512908-5386-4b26-8563-1bb18eeb5a7b-config-data-custom\") pod \"manila-share-share1-0\" (UID: \"fc512908-5386-4b26-8563-1bb18eeb5a7b\") " pod="openstack/manila-share-share1-0" Oct 10 18:17:02 crc kubenswrapper[4799]: I1010 18:17:02.027928 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/27d181f5-9c3d-4193-80d5-8a45ea9a282f-config\") pod \"dnsmasq-dns-84c8d469bf-8gjr7\" (UID: \"27d181f5-9c3d-4193-80d5-8a45ea9a282f\") " pod="openstack/dnsmasq-dns-84c8d469bf-8gjr7" Oct 10 18:17:02 crc kubenswrapper[4799]: I1010 18:17:02.027953 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/27d181f5-9c3d-4193-80d5-8a45ea9a282f-ovsdbserver-sb\") pod \"dnsmasq-dns-84c8d469bf-8gjr7\" (UID: \"27d181f5-9c3d-4193-80d5-8a45ea9a282f\") " pod="openstack/dnsmasq-dns-84c8d469bf-8gjr7" Oct 10 18:17:02 crc kubenswrapper[4799]: I1010 18:17:02.027994 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6d5974bf-9794-4599-92ab-7d9f833aa967-scripts\") pod \"manila-scheduler-0\" (UID: \"6d5974bf-9794-4599-92ab-7d9f833aa967\") " pod="openstack/manila-scheduler-0" Oct 10 18:17:02 crc kubenswrapper[4799]: I1010 18:17:02.028773 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/27d181f5-9c3d-4193-80d5-8a45ea9a282f-ovsdbserver-nb\") pod \"dnsmasq-dns-84c8d469bf-8gjr7\" (UID: \"27d181f5-9c3d-4193-80d5-8a45ea9a282f\") " pod="openstack/dnsmasq-dns-84c8d469bf-8gjr7" Oct 10 18:17:02 crc kubenswrapper[4799]: I1010 18:17:02.029590 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/27d181f5-9c3d-4193-80d5-8a45ea9a282f-dns-svc\") pod \"dnsmasq-dns-84c8d469bf-8gjr7\" (UID: \"27d181f5-9c3d-4193-80d5-8a45ea9a282f\") " pod="openstack/dnsmasq-dns-84c8d469bf-8gjr7" Oct 10 18:17:02 crc kubenswrapper[4799]: I1010 18:17:02.030253 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/27d181f5-9c3d-4193-80d5-8a45ea9a282f-config\") pod \"dnsmasq-dns-84c8d469bf-8gjr7\" (UID: \"27d181f5-9c3d-4193-80d5-8a45ea9a282f\") " pod="openstack/dnsmasq-dns-84c8d469bf-8gjr7" Oct 10 18:17:02 crc kubenswrapper[4799]: I1010 18:17:02.030560 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/27d181f5-9c3d-4193-80d5-8a45ea9a282f-ovsdbserver-sb\") pod \"dnsmasq-dns-84c8d469bf-8gjr7\" (UID: \"27d181f5-9c3d-4193-80d5-8a45ea9a282f\") " pod="openstack/dnsmasq-dns-84c8d469bf-8gjr7" Oct 10 18:17:02 crc kubenswrapper[4799]: I1010 18:17:02.053414 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8j4jv\" (UniqueName: \"kubernetes.io/projected/27d181f5-9c3d-4193-80d5-8a45ea9a282f-kube-api-access-8j4jv\") pod \"dnsmasq-dns-84c8d469bf-8gjr7\" (UID: \"27d181f5-9c3d-4193-80d5-8a45ea9a282f\") " pod="openstack/dnsmasq-dns-84c8d469bf-8gjr7" Oct 10 18:17:02 crc kubenswrapper[4799]: I1010 18:17:02.056958 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/manila-api-0"] Oct 10 18:17:02 crc kubenswrapper[4799]: I1010 18:17:02.059048 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-api-0" Oct 10 18:17:02 crc kubenswrapper[4799]: I1010 18:17:02.062127 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"manila-api-config-data" Oct 10 18:17:02 crc kubenswrapper[4799]: I1010 18:17:02.062592 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-api-0"] Oct 10 18:17:02 crc kubenswrapper[4799]: I1010 18:17:02.129861 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6d5974bf-9794-4599-92ab-7d9f833aa967-config-data\") pod \"manila-scheduler-0\" (UID: \"6d5974bf-9794-4599-92ab-7d9f833aa967\") " pod="openstack/manila-scheduler-0" Oct 10 18:17:02 crc kubenswrapper[4799]: I1010 18:17:02.129909 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9d2ccf03-5031-462b-8dd8-7b716885ec78-combined-ca-bundle\") pod \"manila-api-0\" (UID: \"9d2ccf03-5031-462b-8dd8-7b716885ec78\") " pod="openstack/manila-api-0" Oct 10 18:17:02 crc kubenswrapper[4799]: I1010 18:17:02.129943 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9d2ccf03-5031-462b-8dd8-7b716885ec78-scripts\") pod \"manila-api-0\" (UID: \"9d2ccf03-5031-462b-8dd8-7b716885ec78\") " pod="openstack/manila-api-0" Oct 10 18:17:02 crc kubenswrapper[4799]: I1010 18:17:02.129972 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fc512908-5386-4b26-8563-1bb18eeb5a7b-scripts\") pod \"manila-share-share1-0\" (UID: \"fc512908-5386-4b26-8563-1bb18eeb5a7b\") " pod="openstack/manila-share-share1-0" Oct 10 18:17:02 crc kubenswrapper[4799]: I1010 18:17:02.130009 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/fc512908-5386-4b26-8563-1bb18eeb5a7b-ceph\") pod \"manila-share-share1-0\" (UID: \"fc512908-5386-4b26-8563-1bb18eeb5a7b\") " pod="openstack/manila-share-share1-0" Oct 10 18:17:02 crc kubenswrapper[4799]: I1010 18:17:02.130121 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/fc512908-5386-4b26-8563-1bb18eeb5a7b-config-data-custom\") pod \"manila-share-share1-0\" (UID: \"fc512908-5386-4b26-8563-1bb18eeb5a7b\") " pod="openstack/manila-share-share1-0" Oct 10 18:17:02 crc kubenswrapper[4799]: I1010 18:17:02.130253 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9d2ccf03-5031-462b-8dd8-7b716885ec78-logs\") pod \"manila-api-0\" (UID: \"9d2ccf03-5031-462b-8dd8-7b716885ec78\") " pod="openstack/manila-api-0" Oct 10 18:17:02 crc kubenswrapper[4799]: I1010 18:17:02.130297 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/9d2ccf03-5031-462b-8dd8-7b716885ec78-etc-machine-id\") pod \"manila-api-0\" (UID: \"9d2ccf03-5031-462b-8dd8-7b716885ec78\") " pod="openstack/manila-api-0" Oct 10 18:17:02 crc kubenswrapper[4799]: I1010 18:17:02.130418 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6d5974bf-9794-4599-92ab-7d9f833aa967-scripts\") pod \"manila-scheduler-0\" (UID: \"6d5974bf-9794-4599-92ab-7d9f833aa967\") " pod="openstack/manila-scheduler-0" Oct 10 18:17:02 crc kubenswrapper[4799]: I1010 18:17:02.130453 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wfcdq\" (UniqueName: \"kubernetes.io/projected/fc512908-5386-4b26-8563-1bb18eeb5a7b-kube-api-access-wfcdq\") pod \"manila-share-share1-0\" (UID: \"fc512908-5386-4b26-8563-1bb18eeb5a7b\") " pod="openstack/manila-share-share1-0" Oct 10 18:17:02 crc kubenswrapper[4799]: I1010 18:17:02.130498 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6d5974bf-9794-4599-92ab-7d9f833aa967-combined-ca-bundle\") pod \"manila-scheduler-0\" (UID: \"6d5974bf-9794-4599-92ab-7d9f833aa967\") " pod="openstack/manila-scheduler-0" Oct 10 18:17:02 crc kubenswrapper[4799]: I1010 18:17:02.130518 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nbpnw\" (UniqueName: \"kubernetes.io/projected/6d5974bf-9794-4599-92ab-7d9f833aa967-kube-api-access-nbpnw\") pod \"manila-scheduler-0\" (UID: \"6d5974bf-9794-4599-92ab-7d9f833aa967\") " pod="openstack/manila-scheduler-0" Oct 10 18:17:02 crc kubenswrapper[4799]: I1010 18:17:02.130546 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pdzlq\" (UniqueName: \"kubernetes.io/projected/9d2ccf03-5031-462b-8dd8-7b716885ec78-kube-api-access-pdzlq\") pod \"manila-api-0\" (UID: \"9d2ccf03-5031-462b-8dd8-7b716885ec78\") " pod="openstack/manila-api-0" Oct 10 18:17:02 crc kubenswrapper[4799]: I1010 18:17:02.130610 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fc512908-5386-4b26-8563-1bb18eeb5a7b-config-data\") pod \"manila-share-share1-0\" (UID: \"fc512908-5386-4b26-8563-1bb18eeb5a7b\") " pod="openstack/manila-share-share1-0" Oct 10 18:17:02 crc kubenswrapper[4799]: I1010 18:17:02.130632 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9d2ccf03-5031-462b-8dd8-7b716885ec78-config-data\") pod \"manila-api-0\" (UID: \"9d2ccf03-5031-462b-8dd8-7b716885ec78\") " pod="openstack/manila-api-0" Oct 10 18:17:02 crc kubenswrapper[4799]: I1010 18:17:02.130652 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/fc512908-5386-4b26-8563-1bb18eeb5a7b-etc-machine-id\") pod \"manila-share-share1-0\" (UID: \"fc512908-5386-4b26-8563-1bb18eeb5a7b\") " pod="openstack/manila-share-share1-0" Oct 10 18:17:02 crc kubenswrapper[4799]: I1010 18:17:02.130698 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/6d5974bf-9794-4599-92ab-7d9f833aa967-etc-machine-id\") pod \"manila-scheduler-0\" (UID: \"6d5974bf-9794-4599-92ab-7d9f833aa967\") " pod="openstack/manila-scheduler-0" Oct 10 18:17:02 crc kubenswrapper[4799]: I1010 18:17:02.130729 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/9d2ccf03-5031-462b-8dd8-7b716885ec78-config-data-custom\") pod \"manila-api-0\" (UID: \"9d2ccf03-5031-462b-8dd8-7b716885ec78\") " pod="openstack/manila-api-0" Oct 10 18:17:02 crc kubenswrapper[4799]: I1010 18:17:02.130771 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-manila\" (UniqueName: \"kubernetes.io/host-path/fc512908-5386-4b26-8563-1bb18eeb5a7b-var-lib-manila\") pod \"manila-share-share1-0\" (UID: \"fc512908-5386-4b26-8563-1bb18eeb5a7b\") " pod="openstack/manila-share-share1-0" Oct 10 18:17:02 crc kubenswrapper[4799]: I1010 18:17:02.130832 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/6d5974bf-9794-4599-92ab-7d9f833aa967-config-data-custom\") pod \"manila-scheduler-0\" (UID: \"6d5974bf-9794-4599-92ab-7d9f833aa967\") " pod="openstack/manila-scheduler-0" Oct 10 18:17:02 crc kubenswrapper[4799]: I1010 18:17:02.130849 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fc512908-5386-4b26-8563-1bb18eeb5a7b-combined-ca-bundle\") pod \"manila-share-share1-0\" (UID: \"fc512908-5386-4b26-8563-1bb18eeb5a7b\") " pod="openstack/manila-share-share1-0" Oct 10 18:17:02 crc kubenswrapper[4799]: I1010 18:17:02.131549 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/fc512908-5386-4b26-8563-1bb18eeb5a7b-etc-machine-id\") pod \"manila-share-share1-0\" (UID: \"fc512908-5386-4b26-8563-1bb18eeb5a7b\") " pod="openstack/manila-share-share1-0" Oct 10 18:17:02 crc kubenswrapper[4799]: I1010 18:17:02.131603 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/6d5974bf-9794-4599-92ab-7d9f833aa967-etc-machine-id\") pod \"manila-scheduler-0\" (UID: \"6d5974bf-9794-4599-92ab-7d9f833aa967\") " pod="openstack/manila-scheduler-0" Oct 10 18:17:02 crc kubenswrapper[4799]: I1010 18:17:02.131694 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-manila\" (UniqueName: \"kubernetes.io/host-path/fc512908-5386-4b26-8563-1bb18eeb5a7b-var-lib-manila\") pod \"manila-share-share1-0\" (UID: \"fc512908-5386-4b26-8563-1bb18eeb5a7b\") " pod="openstack/manila-share-share1-0" Oct 10 18:17:02 crc kubenswrapper[4799]: I1010 18:17:02.136650 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6d5974bf-9794-4599-92ab-7d9f833aa967-scripts\") pod \"manila-scheduler-0\" (UID: \"6d5974bf-9794-4599-92ab-7d9f833aa967\") " pod="openstack/manila-scheduler-0" Oct 10 18:17:02 crc kubenswrapper[4799]: I1010 18:17:02.136679 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/fc512908-5386-4b26-8563-1bb18eeb5a7b-ceph\") pod \"manila-share-share1-0\" (UID: \"fc512908-5386-4b26-8563-1bb18eeb5a7b\") " pod="openstack/manila-share-share1-0" Oct 10 18:17:02 crc kubenswrapper[4799]: I1010 18:17:02.136947 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fc512908-5386-4b26-8563-1bb18eeb5a7b-combined-ca-bundle\") pod \"manila-share-share1-0\" (UID: \"fc512908-5386-4b26-8563-1bb18eeb5a7b\") " pod="openstack/manila-share-share1-0" Oct 10 18:17:02 crc kubenswrapper[4799]: I1010 18:17:02.137338 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fc512908-5386-4b26-8563-1bb18eeb5a7b-config-data\") pod \"manila-share-share1-0\" (UID: \"fc512908-5386-4b26-8563-1bb18eeb5a7b\") " pod="openstack/manila-share-share1-0" Oct 10 18:17:02 crc kubenswrapper[4799]: I1010 18:17:02.139218 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fc512908-5386-4b26-8563-1bb18eeb5a7b-scripts\") pod \"manila-share-share1-0\" (UID: \"fc512908-5386-4b26-8563-1bb18eeb5a7b\") " pod="openstack/manila-share-share1-0" Oct 10 18:17:02 crc kubenswrapper[4799]: I1010 18:17:02.139304 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/6d5974bf-9794-4599-92ab-7d9f833aa967-config-data-custom\") pod \"manila-scheduler-0\" (UID: \"6d5974bf-9794-4599-92ab-7d9f833aa967\") " pod="openstack/manila-scheduler-0" Oct 10 18:17:02 crc kubenswrapper[4799]: I1010 18:17:02.139661 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/fc512908-5386-4b26-8563-1bb18eeb5a7b-config-data-custom\") pod \"manila-share-share1-0\" (UID: \"fc512908-5386-4b26-8563-1bb18eeb5a7b\") " pod="openstack/manila-share-share1-0" Oct 10 18:17:02 crc kubenswrapper[4799]: I1010 18:17:02.141997 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6d5974bf-9794-4599-92ab-7d9f833aa967-config-data\") pod \"manila-scheduler-0\" (UID: \"6d5974bf-9794-4599-92ab-7d9f833aa967\") " pod="openstack/manila-scheduler-0" Oct 10 18:17:02 crc kubenswrapper[4799]: I1010 18:17:02.150268 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wfcdq\" (UniqueName: \"kubernetes.io/projected/fc512908-5386-4b26-8563-1bb18eeb5a7b-kube-api-access-wfcdq\") pod \"manila-share-share1-0\" (UID: \"fc512908-5386-4b26-8563-1bb18eeb5a7b\") " pod="openstack/manila-share-share1-0" Oct 10 18:17:02 crc kubenswrapper[4799]: I1010 18:17:02.152911 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6d5974bf-9794-4599-92ab-7d9f833aa967-combined-ca-bundle\") pod \"manila-scheduler-0\" (UID: \"6d5974bf-9794-4599-92ab-7d9f833aa967\") " pod="openstack/manila-scheduler-0" Oct 10 18:17:02 crc kubenswrapper[4799]: I1010 18:17:02.155393 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nbpnw\" (UniqueName: \"kubernetes.io/projected/6d5974bf-9794-4599-92ab-7d9f833aa967-kube-api-access-nbpnw\") pod \"manila-scheduler-0\" (UID: \"6d5974bf-9794-4599-92ab-7d9f833aa967\") " pod="openstack/manila-scheduler-0" Oct 10 18:17:02 crc kubenswrapper[4799]: I1010 18:17:02.175846 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-84c8d469bf-8gjr7" Oct 10 18:17:02 crc kubenswrapper[4799]: I1010 18:17:02.208639 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-scheduler-0" Oct 10 18:17:02 crc kubenswrapper[4799]: I1010 18:17:02.233736 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pdzlq\" (UniqueName: \"kubernetes.io/projected/9d2ccf03-5031-462b-8dd8-7b716885ec78-kube-api-access-pdzlq\") pod \"manila-api-0\" (UID: \"9d2ccf03-5031-462b-8dd8-7b716885ec78\") " pod="openstack/manila-api-0" Oct 10 18:17:02 crc kubenswrapper[4799]: I1010 18:17:02.233847 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9d2ccf03-5031-462b-8dd8-7b716885ec78-config-data\") pod \"manila-api-0\" (UID: \"9d2ccf03-5031-462b-8dd8-7b716885ec78\") " pod="openstack/manila-api-0" Oct 10 18:17:02 crc kubenswrapper[4799]: I1010 18:17:02.233890 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/9d2ccf03-5031-462b-8dd8-7b716885ec78-config-data-custom\") pod \"manila-api-0\" (UID: \"9d2ccf03-5031-462b-8dd8-7b716885ec78\") " pod="openstack/manila-api-0" Oct 10 18:17:02 crc kubenswrapper[4799]: I1010 18:17:02.233953 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9d2ccf03-5031-462b-8dd8-7b716885ec78-combined-ca-bundle\") pod \"manila-api-0\" (UID: \"9d2ccf03-5031-462b-8dd8-7b716885ec78\") " pod="openstack/manila-api-0" Oct 10 18:17:02 crc kubenswrapper[4799]: I1010 18:17:02.233984 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9d2ccf03-5031-462b-8dd8-7b716885ec78-scripts\") pod \"manila-api-0\" (UID: \"9d2ccf03-5031-462b-8dd8-7b716885ec78\") " pod="openstack/manila-api-0" Oct 10 18:17:02 crc kubenswrapper[4799]: I1010 18:17:02.234048 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9d2ccf03-5031-462b-8dd8-7b716885ec78-logs\") pod \"manila-api-0\" (UID: \"9d2ccf03-5031-462b-8dd8-7b716885ec78\") " pod="openstack/manila-api-0" Oct 10 18:17:02 crc kubenswrapper[4799]: I1010 18:17:02.234066 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/9d2ccf03-5031-462b-8dd8-7b716885ec78-etc-machine-id\") pod \"manila-api-0\" (UID: \"9d2ccf03-5031-462b-8dd8-7b716885ec78\") " pod="openstack/manila-api-0" Oct 10 18:17:02 crc kubenswrapper[4799]: I1010 18:17:02.234172 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/9d2ccf03-5031-462b-8dd8-7b716885ec78-etc-machine-id\") pod \"manila-api-0\" (UID: \"9d2ccf03-5031-462b-8dd8-7b716885ec78\") " pod="openstack/manila-api-0" Oct 10 18:17:02 crc kubenswrapper[4799]: I1010 18:17:02.237546 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9d2ccf03-5031-462b-8dd8-7b716885ec78-logs\") pod \"manila-api-0\" (UID: \"9d2ccf03-5031-462b-8dd8-7b716885ec78\") " pod="openstack/manila-api-0" Oct 10 18:17:02 crc kubenswrapper[4799]: I1010 18:17:02.238039 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/9d2ccf03-5031-462b-8dd8-7b716885ec78-config-data-custom\") pod \"manila-api-0\" (UID: \"9d2ccf03-5031-462b-8dd8-7b716885ec78\") " pod="openstack/manila-api-0" Oct 10 18:17:02 crc kubenswrapper[4799]: I1010 18:17:02.238500 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9d2ccf03-5031-462b-8dd8-7b716885ec78-combined-ca-bundle\") pod \"manila-api-0\" (UID: \"9d2ccf03-5031-462b-8dd8-7b716885ec78\") " pod="openstack/manila-api-0" Oct 10 18:17:02 crc kubenswrapper[4799]: I1010 18:17:02.239038 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9d2ccf03-5031-462b-8dd8-7b716885ec78-config-data\") pod \"manila-api-0\" (UID: \"9d2ccf03-5031-462b-8dd8-7b716885ec78\") " pod="openstack/manila-api-0" Oct 10 18:17:02 crc kubenswrapper[4799]: I1010 18:17:02.252102 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9d2ccf03-5031-462b-8dd8-7b716885ec78-scripts\") pod \"manila-api-0\" (UID: \"9d2ccf03-5031-462b-8dd8-7b716885ec78\") " pod="openstack/manila-api-0" Oct 10 18:17:02 crc kubenswrapper[4799]: I1010 18:17:02.252304 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pdzlq\" (UniqueName: \"kubernetes.io/projected/9d2ccf03-5031-462b-8dd8-7b716885ec78-kube-api-access-pdzlq\") pod \"manila-api-0\" (UID: \"9d2ccf03-5031-462b-8dd8-7b716885ec78\") " pod="openstack/manila-api-0" Oct 10 18:17:02 crc kubenswrapper[4799]: I1010 18:17:02.260647 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-share-share1-0" Oct 10 18:17:02 crc kubenswrapper[4799]: I1010 18:17:02.430396 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-api-0" Oct 10 18:17:02 crc kubenswrapper[4799]: I1010 18:17:02.783528 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-84c8d469bf-8gjr7"] Oct 10 18:17:02 crc kubenswrapper[4799]: I1010 18:17:02.921171 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-scheduler-0"] Oct 10 18:17:03 crc kubenswrapper[4799]: I1010 18:17:03.079710 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-api-0"] Oct 10 18:17:03 crc kubenswrapper[4799]: W1010 18:17:03.095622 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9d2ccf03_5031_462b_8dd8_7b716885ec78.slice/crio-ff8dff671991e00752b5a9dbff2c016123943d66ad80e7d95434ba87b3cfb7f1 WatchSource:0}: Error finding container ff8dff671991e00752b5a9dbff2c016123943d66ad80e7d95434ba87b3cfb7f1: Status 404 returned error can't find the container with id ff8dff671991e00752b5a9dbff2c016123943d66ad80e7d95434ba87b3cfb7f1 Oct 10 18:17:03 crc kubenswrapper[4799]: W1010 18:17:03.180247 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podfc512908_5386_4b26_8563_1bb18eeb5a7b.slice/crio-a50a55872b67f287af674ead4bf60d16b6e60930fbd0378957f7ddb96c595942 WatchSource:0}: Error finding container a50a55872b67f287af674ead4bf60d16b6e60930fbd0378957f7ddb96c595942: Status 404 returned error can't find the container with id a50a55872b67f287af674ead4bf60d16b6e60930fbd0378957f7ddb96c595942 Oct 10 18:17:03 crc kubenswrapper[4799]: I1010 18:17:03.187749 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-share-share1-0"] Oct 10 18:17:03 crc kubenswrapper[4799]: I1010 18:17:03.353233 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-scheduler-0" event={"ID":"6d5974bf-9794-4599-92ab-7d9f833aa967","Type":"ContainerStarted","Data":"51aaf8432a48327cfa388c81cfeda149652b06538b297f5fc8b3b299a3de184a"} Oct 10 18:17:03 crc kubenswrapper[4799]: I1010 18:17:03.354263 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-share-share1-0" event={"ID":"fc512908-5386-4b26-8563-1bb18eeb5a7b","Type":"ContainerStarted","Data":"a50a55872b67f287af674ead4bf60d16b6e60930fbd0378957f7ddb96c595942"} Oct 10 18:17:03 crc kubenswrapper[4799]: I1010 18:17:03.355042 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-api-0" event={"ID":"9d2ccf03-5031-462b-8dd8-7b716885ec78","Type":"ContainerStarted","Data":"ff8dff671991e00752b5a9dbff2c016123943d66ad80e7d95434ba87b3cfb7f1"} Oct 10 18:17:03 crc kubenswrapper[4799]: I1010 18:17:03.356830 4799 generic.go:334] "Generic (PLEG): container finished" podID="27d181f5-9c3d-4193-80d5-8a45ea9a282f" containerID="1fdf1270b12ca1643b111f3c1065a02c142581338d6ed4a43ce7bbcb48ae43c8" exitCode=0 Oct 10 18:17:03 crc kubenswrapper[4799]: I1010 18:17:03.356894 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-84c8d469bf-8gjr7" event={"ID":"27d181f5-9c3d-4193-80d5-8a45ea9a282f","Type":"ContainerDied","Data":"1fdf1270b12ca1643b111f3c1065a02c142581338d6ed4a43ce7bbcb48ae43c8"} Oct 10 18:17:03 crc kubenswrapper[4799]: I1010 18:17:03.357132 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-84c8d469bf-8gjr7" event={"ID":"27d181f5-9c3d-4193-80d5-8a45ea9a282f","Type":"ContainerStarted","Data":"80bf6383e5956b2934d453bf620ae69945d45905cd9e0d77eb9680b123d1b671"} Oct 10 18:17:04 crc kubenswrapper[4799]: I1010 18:17:04.377517 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-84c8d469bf-8gjr7" event={"ID":"27d181f5-9c3d-4193-80d5-8a45ea9a282f","Type":"ContainerStarted","Data":"ce17069f8136d8df1650c04a5f150f8d8f4aef4f726b69d077096cdbce2ec164"} Oct 10 18:17:04 crc kubenswrapper[4799]: I1010 18:17:04.377960 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-84c8d469bf-8gjr7" Oct 10 18:17:04 crc kubenswrapper[4799]: I1010 18:17:04.387580 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-scheduler-0" event={"ID":"6d5974bf-9794-4599-92ab-7d9f833aa967","Type":"ContainerStarted","Data":"b38d36712375510f15d70090dfb2d264c5a47b40232e34a97fd6b1245e381eae"} Oct 10 18:17:04 crc kubenswrapper[4799]: I1010 18:17:04.389025 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-api-0" event={"ID":"9d2ccf03-5031-462b-8dd8-7b716885ec78","Type":"ContainerStarted","Data":"6bb62dd9bd165c9dcafe7e0bc710e18e67de9c11fafc1475c5a16cb217fa59f6"} Oct 10 18:17:04 crc kubenswrapper[4799]: I1010 18:17:04.389071 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-api-0" event={"ID":"9d2ccf03-5031-462b-8dd8-7b716885ec78","Type":"ContainerStarted","Data":"7b028d7757338b098421c3f2a7822a3da9e39eac52a5461017425d4f754343eb"} Oct 10 18:17:04 crc kubenswrapper[4799]: I1010 18:17:04.389215 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/manila-api-0" Oct 10 18:17:04 crc kubenswrapper[4799]: I1010 18:17:04.467952 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-84c8d469bf-8gjr7" podStartSLOduration=3.467929159 podStartE2EDuration="3.467929159s" podCreationTimestamp="2025-10-10 18:17:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-10 18:17:04.428141088 +0000 UTC m=+6317.936465203" watchObservedRunningTime="2025-10-10 18:17:04.467929159 +0000 UTC m=+6317.976253274" Oct 10 18:17:05 crc kubenswrapper[4799]: I1010 18:17:05.439043 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-scheduler-0" event={"ID":"6d5974bf-9794-4599-92ab-7d9f833aa967","Type":"ContainerStarted","Data":"adbc6ab5d7fd3a9e9dbcdc03d8e692fe1661c79c3c6298940ec01e835705d2dd"} Oct 10 18:17:05 crc kubenswrapper[4799]: I1010 18:17:05.442577 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/manila-api-0" podStartSLOduration=3.442565916 podStartE2EDuration="3.442565916s" podCreationTimestamp="2025-10-10 18:17:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-10 18:17:04.466138215 +0000 UTC m=+6317.974462340" watchObservedRunningTime="2025-10-10 18:17:05.442565916 +0000 UTC m=+6318.950890031" Oct 10 18:17:05 crc kubenswrapper[4799]: I1010 18:17:05.449788 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/manila-scheduler-0" podStartSLOduration=3.7280159790000003 podStartE2EDuration="4.449779842s" podCreationTimestamp="2025-10-10 18:17:01 +0000 UTC" firstStartedPulling="2025-10-10 18:17:02.937125982 +0000 UTC m=+6316.445450097" lastFinishedPulling="2025-10-10 18:17:03.658889845 +0000 UTC m=+6317.167213960" observedRunningTime="2025-10-10 18:17:05.441789307 +0000 UTC m=+6318.950113422" watchObservedRunningTime="2025-10-10 18:17:05.449779842 +0000 UTC m=+6318.958103957" Oct 10 18:17:10 crc kubenswrapper[4799]: I1010 18:17:10.478168 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-share-share1-0" event={"ID":"fc512908-5386-4b26-8563-1bb18eeb5a7b","Type":"ContainerStarted","Data":"b0467ac4ddf31b1664ceff059e28a3b489bc6770d3655f4a6a4789109ec16404"} Oct 10 18:17:10 crc kubenswrapper[4799]: I1010 18:17:10.478478 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-share-share1-0" event={"ID":"fc512908-5386-4b26-8563-1bb18eeb5a7b","Type":"ContainerStarted","Data":"e2505a4d64a882cb293f643a049aea23cfe22211bd21884b393f4a910303cbce"} Oct 10 18:17:10 crc kubenswrapper[4799]: I1010 18:17:10.511753 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/manila-share-share1-0" podStartSLOduration=3.581262237 podStartE2EDuration="9.511733707s" podCreationTimestamp="2025-10-10 18:17:01 +0000 UTC" firstStartedPulling="2025-10-10 18:17:03.184653616 +0000 UTC m=+6316.692977731" lastFinishedPulling="2025-10-10 18:17:09.115125086 +0000 UTC m=+6322.623449201" observedRunningTime="2025-10-10 18:17:10.511254415 +0000 UTC m=+6324.019578590" watchObservedRunningTime="2025-10-10 18:17:10.511733707 +0000 UTC m=+6324.020057822" Oct 10 18:17:12 crc kubenswrapper[4799]: I1010 18:17:12.178135 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-84c8d469bf-8gjr7" Oct 10 18:17:12 crc kubenswrapper[4799]: I1010 18:17:12.209440 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/manila-scheduler-0" Oct 10 18:17:12 crc kubenswrapper[4799]: I1010 18:17:12.261951 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/manila-share-share1-0" Oct 10 18:17:12 crc kubenswrapper[4799]: I1010 18:17:12.275750 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-78956d764c-x7sls"] Oct 10 18:17:12 crc kubenswrapper[4799]: I1010 18:17:12.276388 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-78956d764c-x7sls" podUID="c899e0ee-7598-4182-8e81-a8d3c7681559" containerName="dnsmasq-dns" containerID="cri-o://0d5343f8d899ee64151835d58b53690e618bf8c7e55c6167ad315047b2bc510c" gracePeriod=10 Oct 10 18:17:12 crc kubenswrapper[4799]: I1010 18:17:12.512461 4799 generic.go:334] "Generic (PLEG): container finished" podID="c899e0ee-7598-4182-8e81-a8d3c7681559" containerID="0d5343f8d899ee64151835d58b53690e618bf8c7e55c6167ad315047b2bc510c" exitCode=0 Oct 10 18:17:12 crc kubenswrapper[4799]: I1010 18:17:12.513493 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-78956d764c-x7sls" event={"ID":"c899e0ee-7598-4182-8e81-a8d3c7681559","Type":"ContainerDied","Data":"0d5343f8d899ee64151835d58b53690e618bf8c7e55c6167ad315047b2bc510c"} Oct 10 18:17:12 crc kubenswrapper[4799]: I1010 18:17:12.845125 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78956d764c-x7sls" Oct 10 18:17:12 crc kubenswrapper[4799]: I1010 18:17:12.888586 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c899e0ee-7598-4182-8e81-a8d3c7681559-ovsdbserver-sb\") pod \"c899e0ee-7598-4182-8e81-a8d3c7681559\" (UID: \"c899e0ee-7598-4182-8e81-a8d3c7681559\") " Oct 10 18:17:12 crc kubenswrapper[4799]: I1010 18:17:12.888646 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c899e0ee-7598-4182-8e81-a8d3c7681559-dns-svc\") pod \"c899e0ee-7598-4182-8e81-a8d3c7681559\" (UID: \"c899e0ee-7598-4182-8e81-a8d3c7681559\") " Oct 10 18:17:12 crc kubenswrapper[4799]: I1010 18:17:12.888719 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rb92v\" (UniqueName: \"kubernetes.io/projected/c899e0ee-7598-4182-8e81-a8d3c7681559-kube-api-access-rb92v\") pod \"c899e0ee-7598-4182-8e81-a8d3c7681559\" (UID: \"c899e0ee-7598-4182-8e81-a8d3c7681559\") " Oct 10 18:17:12 crc kubenswrapper[4799]: I1010 18:17:12.888738 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c899e0ee-7598-4182-8e81-a8d3c7681559-ovsdbserver-nb\") pod \"c899e0ee-7598-4182-8e81-a8d3c7681559\" (UID: \"c899e0ee-7598-4182-8e81-a8d3c7681559\") " Oct 10 18:17:12 crc kubenswrapper[4799]: I1010 18:17:12.888781 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c899e0ee-7598-4182-8e81-a8d3c7681559-config\") pod \"c899e0ee-7598-4182-8e81-a8d3c7681559\" (UID: \"c899e0ee-7598-4182-8e81-a8d3c7681559\") " Oct 10 18:17:12 crc kubenswrapper[4799]: I1010 18:17:12.900433 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c899e0ee-7598-4182-8e81-a8d3c7681559-kube-api-access-rb92v" (OuterVolumeSpecName: "kube-api-access-rb92v") pod "c899e0ee-7598-4182-8e81-a8d3c7681559" (UID: "c899e0ee-7598-4182-8e81-a8d3c7681559"). InnerVolumeSpecName "kube-api-access-rb92v". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 18:17:12 crc kubenswrapper[4799]: I1010 18:17:12.969697 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c899e0ee-7598-4182-8e81-a8d3c7681559-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "c899e0ee-7598-4182-8e81-a8d3c7681559" (UID: "c899e0ee-7598-4182-8e81-a8d3c7681559"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 18:17:12 crc kubenswrapper[4799]: I1010 18:17:12.981346 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c899e0ee-7598-4182-8e81-a8d3c7681559-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "c899e0ee-7598-4182-8e81-a8d3c7681559" (UID: "c899e0ee-7598-4182-8e81-a8d3c7681559"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 18:17:12 crc kubenswrapper[4799]: I1010 18:17:12.981682 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c899e0ee-7598-4182-8e81-a8d3c7681559-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "c899e0ee-7598-4182-8e81-a8d3c7681559" (UID: "c899e0ee-7598-4182-8e81-a8d3c7681559"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 18:17:12 crc kubenswrapper[4799]: I1010 18:17:12.991126 4799 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c899e0ee-7598-4182-8e81-a8d3c7681559-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 10 18:17:12 crc kubenswrapper[4799]: I1010 18:17:12.992237 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rb92v\" (UniqueName: \"kubernetes.io/projected/c899e0ee-7598-4182-8e81-a8d3c7681559-kube-api-access-rb92v\") on node \"crc\" DevicePath \"\"" Oct 10 18:17:12 crc kubenswrapper[4799]: I1010 18:17:12.992249 4799 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c899e0ee-7598-4182-8e81-a8d3c7681559-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 10 18:17:12 crc kubenswrapper[4799]: I1010 18:17:12.982278 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c899e0ee-7598-4182-8e81-a8d3c7681559-config" (OuterVolumeSpecName: "config") pod "c899e0ee-7598-4182-8e81-a8d3c7681559" (UID: "c899e0ee-7598-4182-8e81-a8d3c7681559"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 18:17:13 crc kubenswrapper[4799]: I1010 18:17:13.094350 4799 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c899e0ee-7598-4182-8e81-a8d3c7681559-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 10 18:17:13 crc kubenswrapper[4799]: I1010 18:17:13.094380 4799 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c899e0ee-7598-4182-8e81-a8d3c7681559-config\") on node \"crc\" DevicePath \"\"" Oct 10 18:17:13 crc kubenswrapper[4799]: I1010 18:17:13.522456 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-78956d764c-x7sls" event={"ID":"c899e0ee-7598-4182-8e81-a8d3c7681559","Type":"ContainerDied","Data":"9c240b82fd2e1d66913f227f11d6e0c5aa1d799c2ccff951b8e382640c05d927"} Oct 10 18:17:13 crc kubenswrapper[4799]: I1010 18:17:13.522741 4799 scope.go:117] "RemoveContainer" containerID="0d5343f8d899ee64151835d58b53690e618bf8c7e55c6167ad315047b2bc510c" Oct 10 18:17:13 crc kubenswrapper[4799]: I1010 18:17:13.522569 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78956d764c-x7sls" Oct 10 18:17:13 crc kubenswrapper[4799]: I1010 18:17:13.562469 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-78956d764c-x7sls"] Oct 10 18:17:13 crc kubenswrapper[4799]: I1010 18:17:13.567225 4799 scope.go:117] "RemoveContainer" containerID="5b80f94f09964b8a24711764d9b6f73412dd7bcee480297585a51b32f732d564" Oct 10 18:17:13 crc kubenswrapper[4799]: I1010 18:17:13.573957 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-78956d764c-x7sls"] Oct 10 18:17:13 crc kubenswrapper[4799]: I1010 18:17:13.716309 4799 scope.go:117] "RemoveContainer" containerID="5c1235cfc25c047585184fe9857284ee44a564ceeac07e6fdda7f76b420bf0d3" Oct 10 18:17:13 crc kubenswrapper[4799]: I1010 18:17:13.741315 4799 scope.go:117] "RemoveContainer" containerID="cd39580451f04b80f9732d680d7015623810441c35a8112d7987a850a9879b5a" Oct 10 18:17:13 crc kubenswrapper[4799]: I1010 18:17:13.795011 4799 scope.go:117] "RemoveContainer" containerID="c5ac2f9cd280a52b31ae283a6199cecc76935c5dc55c79db22defd9b26528efc" Oct 10 18:17:14 crc kubenswrapper[4799]: I1010 18:17:14.882952 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 10 18:17:14 crc kubenswrapper[4799]: I1010 18:17:14.883548 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="aa7adfb3-8f69-4867-a36d-daac33771cb0" containerName="ceilometer-central-agent" containerID="cri-o://5a69839289cd398c246a3bb2febf28aecf046403bad2a3403961cd2db0a7b1c9" gracePeriod=30 Oct 10 18:17:14 crc kubenswrapper[4799]: I1010 18:17:14.884001 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="aa7adfb3-8f69-4867-a36d-daac33771cb0" containerName="proxy-httpd" containerID="cri-o://49685a3108a4c6ee9a3d5187929d9de2bb85dd7e17303254457dcd2766440dd2" gracePeriod=30 Oct 10 18:17:14 crc kubenswrapper[4799]: I1010 18:17:14.884050 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="aa7adfb3-8f69-4867-a36d-daac33771cb0" containerName="sg-core" containerID="cri-o://ca63f5d8695242c9c7e5f9d8eac01829a3d8ea11069c9dee7e4ed9dd4fdd3cff" gracePeriod=30 Oct 10 18:17:14 crc kubenswrapper[4799]: I1010 18:17:14.884084 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="aa7adfb3-8f69-4867-a36d-daac33771cb0" containerName="ceilometer-notification-agent" containerID="cri-o://2c89c14c0db1e7423dc94cf96d9fe6d4d8ce14b70bad05381a670607d4c644a2" gracePeriod=30 Oct 10 18:17:15 crc kubenswrapper[4799]: I1010 18:17:15.249062 4799 patch_prober.go:28] interesting pod/machine-config-daemon-rh8zc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 10 18:17:15 crc kubenswrapper[4799]: I1010 18:17:15.249410 4799 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 10 18:17:15 crc kubenswrapper[4799]: I1010 18:17:15.249451 4799 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" Oct 10 18:17:15 crc kubenswrapper[4799]: I1010 18:17:15.250471 4799 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"1f0ceb697c256a28cbd9c8e1e3aa08e1dc732ac4382bc8944609e36db615c835"} pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 10 18:17:15 crc kubenswrapper[4799]: I1010 18:17:15.250558 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" containerName="machine-config-daemon" containerID="cri-o://1f0ceb697c256a28cbd9c8e1e3aa08e1dc732ac4382bc8944609e36db615c835" gracePeriod=600 Oct 10 18:17:15 crc kubenswrapper[4799]: E1010 18:17:15.374163 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 18:17:15 crc kubenswrapper[4799]: I1010 18:17:15.456340 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c899e0ee-7598-4182-8e81-a8d3c7681559" path="/var/lib/kubelet/pods/c899e0ee-7598-4182-8e81-a8d3c7681559/volumes" Oct 10 18:17:15 crc kubenswrapper[4799]: I1010 18:17:15.543828 4799 generic.go:334] "Generic (PLEG): container finished" podID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" containerID="1f0ceb697c256a28cbd9c8e1e3aa08e1dc732ac4382bc8944609e36db615c835" exitCode=0 Oct 10 18:17:15 crc kubenswrapper[4799]: I1010 18:17:15.543891 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" event={"ID":"6cebefda-e31d-4be2-9bf4-8e1f8ec002cb","Type":"ContainerDied","Data":"1f0ceb697c256a28cbd9c8e1e3aa08e1dc732ac4382bc8944609e36db615c835"} Oct 10 18:17:15 crc kubenswrapper[4799]: I1010 18:17:15.543927 4799 scope.go:117] "RemoveContainer" containerID="78eb2a5dbad4fabd2d68def3efca3798c9b19e24aad44cc0581450cbe14e2a76" Oct 10 18:17:15 crc kubenswrapper[4799]: I1010 18:17:15.544291 4799 scope.go:117] "RemoveContainer" containerID="1f0ceb697c256a28cbd9c8e1e3aa08e1dc732ac4382bc8944609e36db615c835" Oct 10 18:17:15 crc kubenswrapper[4799]: E1010 18:17:15.544529 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 18:17:15 crc kubenswrapper[4799]: I1010 18:17:15.548729 4799 generic.go:334] "Generic (PLEG): container finished" podID="aa7adfb3-8f69-4867-a36d-daac33771cb0" containerID="49685a3108a4c6ee9a3d5187929d9de2bb85dd7e17303254457dcd2766440dd2" exitCode=0 Oct 10 18:17:15 crc kubenswrapper[4799]: I1010 18:17:15.548749 4799 generic.go:334] "Generic (PLEG): container finished" podID="aa7adfb3-8f69-4867-a36d-daac33771cb0" containerID="ca63f5d8695242c9c7e5f9d8eac01829a3d8ea11069c9dee7e4ed9dd4fdd3cff" exitCode=2 Oct 10 18:17:15 crc kubenswrapper[4799]: I1010 18:17:15.548773 4799 generic.go:334] "Generic (PLEG): container finished" podID="aa7adfb3-8f69-4867-a36d-daac33771cb0" containerID="5a69839289cd398c246a3bb2febf28aecf046403bad2a3403961cd2db0a7b1c9" exitCode=0 Oct 10 18:17:15 crc kubenswrapper[4799]: I1010 18:17:15.548786 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"aa7adfb3-8f69-4867-a36d-daac33771cb0","Type":"ContainerDied","Data":"49685a3108a4c6ee9a3d5187929d9de2bb85dd7e17303254457dcd2766440dd2"} Oct 10 18:17:15 crc kubenswrapper[4799]: I1010 18:17:15.548804 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"aa7adfb3-8f69-4867-a36d-daac33771cb0","Type":"ContainerDied","Data":"ca63f5d8695242c9c7e5f9d8eac01829a3d8ea11069c9dee7e4ed9dd4fdd3cff"} Oct 10 18:17:15 crc kubenswrapper[4799]: I1010 18:17:15.548814 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"aa7adfb3-8f69-4867-a36d-daac33771cb0","Type":"ContainerDied","Data":"5a69839289cd398c246a3bb2febf28aecf046403bad2a3403961cd2db0a7b1c9"} Oct 10 18:17:20 crc kubenswrapper[4799]: I1010 18:17:20.670396 4799 generic.go:334] "Generic (PLEG): container finished" podID="aa7adfb3-8f69-4867-a36d-daac33771cb0" containerID="2c89c14c0db1e7423dc94cf96d9fe6d4d8ce14b70bad05381a670607d4c644a2" exitCode=0 Oct 10 18:17:20 crc kubenswrapper[4799]: I1010 18:17:20.670470 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"aa7adfb3-8f69-4867-a36d-daac33771cb0","Type":"ContainerDied","Data":"2c89c14c0db1e7423dc94cf96d9fe6d4d8ce14b70bad05381a670607d4c644a2"} Oct 10 18:17:20 crc kubenswrapper[4799]: I1010 18:17:20.670965 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"aa7adfb3-8f69-4867-a36d-daac33771cb0","Type":"ContainerDied","Data":"a7b3d311c3e85347c3417b35def2dc9f4de4a1ca2c956380975be792fdffd24f"} Oct 10 18:17:20 crc kubenswrapper[4799]: I1010 18:17:20.670982 4799 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a7b3d311c3e85347c3417b35def2dc9f4de4a1ca2c956380975be792fdffd24f" Oct 10 18:17:20 crc kubenswrapper[4799]: I1010 18:17:20.673988 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 10 18:17:20 crc kubenswrapper[4799]: I1010 18:17:20.865635 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aa7adfb3-8f69-4867-a36d-daac33771cb0-combined-ca-bundle\") pod \"aa7adfb3-8f69-4867-a36d-daac33771cb0\" (UID: \"aa7adfb3-8f69-4867-a36d-daac33771cb0\") " Oct 10 18:17:20 crc kubenswrapper[4799]: I1010 18:17:20.865772 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/aa7adfb3-8f69-4867-a36d-daac33771cb0-run-httpd\") pod \"aa7adfb3-8f69-4867-a36d-daac33771cb0\" (UID: \"aa7adfb3-8f69-4867-a36d-daac33771cb0\") " Oct 10 18:17:20 crc kubenswrapper[4799]: I1010 18:17:20.865806 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/aa7adfb3-8f69-4867-a36d-daac33771cb0-config-data\") pod \"aa7adfb3-8f69-4867-a36d-daac33771cb0\" (UID: \"aa7adfb3-8f69-4867-a36d-daac33771cb0\") " Oct 10 18:17:20 crc kubenswrapper[4799]: I1010 18:17:20.865825 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/aa7adfb3-8f69-4867-a36d-daac33771cb0-sg-core-conf-yaml\") pod \"aa7adfb3-8f69-4867-a36d-daac33771cb0\" (UID: \"aa7adfb3-8f69-4867-a36d-daac33771cb0\") " Oct 10 18:17:20 crc kubenswrapper[4799]: I1010 18:17:20.865938 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vg49h\" (UniqueName: \"kubernetes.io/projected/aa7adfb3-8f69-4867-a36d-daac33771cb0-kube-api-access-vg49h\") pod \"aa7adfb3-8f69-4867-a36d-daac33771cb0\" (UID: \"aa7adfb3-8f69-4867-a36d-daac33771cb0\") " Oct 10 18:17:20 crc kubenswrapper[4799]: I1010 18:17:20.866019 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/aa7adfb3-8f69-4867-a36d-daac33771cb0-log-httpd\") pod \"aa7adfb3-8f69-4867-a36d-daac33771cb0\" (UID: \"aa7adfb3-8f69-4867-a36d-daac33771cb0\") " Oct 10 18:17:20 crc kubenswrapper[4799]: I1010 18:17:20.866064 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/aa7adfb3-8f69-4867-a36d-daac33771cb0-scripts\") pod \"aa7adfb3-8f69-4867-a36d-daac33771cb0\" (UID: \"aa7adfb3-8f69-4867-a36d-daac33771cb0\") " Oct 10 18:17:20 crc kubenswrapper[4799]: I1010 18:17:20.866205 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/aa7adfb3-8f69-4867-a36d-daac33771cb0-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "aa7adfb3-8f69-4867-a36d-daac33771cb0" (UID: "aa7adfb3-8f69-4867-a36d-daac33771cb0"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 10 18:17:20 crc kubenswrapper[4799]: I1010 18:17:20.866459 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/aa7adfb3-8f69-4867-a36d-daac33771cb0-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "aa7adfb3-8f69-4867-a36d-daac33771cb0" (UID: "aa7adfb3-8f69-4867-a36d-daac33771cb0"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 10 18:17:20 crc kubenswrapper[4799]: I1010 18:17:20.866502 4799 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/aa7adfb3-8f69-4867-a36d-daac33771cb0-run-httpd\") on node \"crc\" DevicePath \"\"" Oct 10 18:17:20 crc kubenswrapper[4799]: I1010 18:17:20.891372 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aa7adfb3-8f69-4867-a36d-daac33771cb0-scripts" (OuterVolumeSpecName: "scripts") pod "aa7adfb3-8f69-4867-a36d-daac33771cb0" (UID: "aa7adfb3-8f69-4867-a36d-daac33771cb0"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 18:17:20 crc kubenswrapper[4799]: I1010 18:17:20.897621 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/aa7adfb3-8f69-4867-a36d-daac33771cb0-kube-api-access-vg49h" (OuterVolumeSpecName: "kube-api-access-vg49h") pod "aa7adfb3-8f69-4867-a36d-daac33771cb0" (UID: "aa7adfb3-8f69-4867-a36d-daac33771cb0"). InnerVolumeSpecName "kube-api-access-vg49h". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 18:17:20 crc kubenswrapper[4799]: I1010 18:17:20.905402 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aa7adfb3-8f69-4867-a36d-daac33771cb0-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "aa7adfb3-8f69-4867-a36d-daac33771cb0" (UID: "aa7adfb3-8f69-4867-a36d-daac33771cb0"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 18:17:20 crc kubenswrapper[4799]: I1010 18:17:20.951979 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aa7adfb3-8f69-4867-a36d-daac33771cb0-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "aa7adfb3-8f69-4867-a36d-daac33771cb0" (UID: "aa7adfb3-8f69-4867-a36d-daac33771cb0"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 18:17:20 crc kubenswrapper[4799]: I1010 18:17:20.965273 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aa7adfb3-8f69-4867-a36d-daac33771cb0-config-data" (OuterVolumeSpecName: "config-data") pod "aa7adfb3-8f69-4867-a36d-daac33771cb0" (UID: "aa7adfb3-8f69-4867-a36d-daac33771cb0"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 18:17:20 crc kubenswrapper[4799]: I1010 18:17:20.968219 4799 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/aa7adfb3-8f69-4867-a36d-daac33771cb0-scripts\") on node \"crc\" DevicePath \"\"" Oct 10 18:17:20 crc kubenswrapper[4799]: I1010 18:17:20.968280 4799 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aa7adfb3-8f69-4867-a36d-daac33771cb0-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 10 18:17:20 crc kubenswrapper[4799]: I1010 18:17:20.968296 4799 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/aa7adfb3-8f69-4867-a36d-daac33771cb0-config-data\") on node \"crc\" DevicePath \"\"" Oct 10 18:17:20 crc kubenswrapper[4799]: I1010 18:17:20.968309 4799 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/aa7adfb3-8f69-4867-a36d-daac33771cb0-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Oct 10 18:17:20 crc kubenswrapper[4799]: I1010 18:17:20.968320 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vg49h\" (UniqueName: \"kubernetes.io/projected/aa7adfb3-8f69-4867-a36d-daac33771cb0-kube-api-access-vg49h\") on node \"crc\" DevicePath \"\"" Oct 10 18:17:20 crc kubenswrapper[4799]: I1010 18:17:20.968332 4799 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/aa7adfb3-8f69-4867-a36d-daac33771cb0-log-httpd\") on node \"crc\" DevicePath \"\"" Oct 10 18:17:21 crc kubenswrapper[4799]: I1010 18:17:21.680990 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 10 18:17:21 crc kubenswrapper[4799]: I1010 18:17:21.714389 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 10 18:17:21 crc kubenswrapper[4799]: I1010 18:17:21.738649 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Oct 10 18:17:21 crc kubenswrapper[4799]: I1010 18:17:21.755145 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Oct 10 18:17:21 crc kubenswrapper[4799]: E1010 18:17:21.755712 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c899e0ee-7598-4182-8e81-a8d3c7681559" containerName="dnsmasq-dns" Oct 10 18:17:21 crc kubenswrapper[4799]: I1010 18:17:21.755735 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="c899e0ee-7598-4182-8e81-a8d3c7681559" containerName="dnsmasq-dns" Oct 10 18:17:21 crc kubenswrapper[4799]: E1010 18:17:21.755783 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aa7adfb3-8f69-4867-a36d-daac33771cb0" containerName="ceilometer-central-agent" Oct 10 18:17:21 crc kubenswrapper[4799]: I1010 18:17:21.755792 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="aa7adfb3-8f69-4867-a36d-daac33771cb0" containerName="ceilometer-central-agent" Oct 10 18:17:21 crc kubenswrapper[4799]: E1010 18:17:21.755809 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aa7adfb3-8f69-4867-a36d-daac33771cb0" containerName="sg-core" Oct 10 18:17:21 crc kubenswrapper[4799]: I1010 18:17:21.755833 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="aa7adfb3-8f69-4867-a36d-daac33771cb0" containerName="sg-core" Oct 10 18:17:21 crc kubenswrapper[4799]: E1010 18:17:21.755842 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c899e0ee-7598-4182-8e81-a8d3c7681559" containerName="init" Oct 10 18:17:21 crc kubenswrapper[4799]: I1010 18:17:21.755851 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="c899e0ee-7598-4182-8e81-a8d3c7681559" containerName="init" Oct 10 18:17:21 crc kubenswrapper[4799]: E1010 18:17:21.755893 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aa7adfb3-8f69-4867-a36d-daac33771cb0" containerName="ceilometer-notification-agent" Oct 10 18:17:21 crc kubenswrapper[4799]: I1010 18:17:21.755902 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="aa7adfb3-8f69-4867-a36d-daac33771cb0" containerName="ceilometer-notification-agent" Oct 10 18:17:21 crc kubenswrapper[4799]: E1010 18:17:21.755923 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aa7adfb3-8f69-4867-a36d-daac33771cb0" containerName="proxy-httpd" Oct 10 18:17:21 crc kubenswrapper[4799]: I1010 18:17:21.755931 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="aa7adfb3-8f69-4867-a36d-daac33771cb0" containerName="proxy-httpd" Oct 10 18:17:21 crc kubenswrapper[4799]: I1010 18:17:21.756188 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="c899e0ee-7598-4182-8e81-a8d3c7681559" containerName="dnsmasq-dns" Oct 10 18:17:21 crc kubenswrapper[4799]: I1010 18:17:21.756217 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="aa7adfb3-8f69-4867-a36d-daac33771cb0" containerName="sg-core" Oct 10 18:17:21 crc kubenswrapper[4799]: I1010 18:17:21.756244 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="aa7adfb3-8f69-4867-a36d-daac33771cb0" containerName="ceilometer-central-agent" Oct 10 18:17:21 crc kubenswrapper[4799]: I1010 18:17:21.756257 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="aa7adfb3-8f69-4867-a36d-daac33771cb0" containerName="ceilometer-notification-agent" Oct 10 18:17:21 crc kubenswrapper[4799]: I1010 18:17:21.756298 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="aa7adfb3-8f69-4867-a36d-daac33771cb0" containerName="proxy-httpd" Oct 10 18:17:21 crc kubenswrapper[4799]: I1010 18:17:21.760125 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 10 18:17:21 crc kubenswrapper[4799]: I1010 18:17:21.762167 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Oct 10 18:17:21 crc kubenswrapper[4799]: I1010 18:17:21.763116 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Oct 10 18:17:21 crc kubenswrapper[4799]: I1010 18:17:21.765127 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 10 18:17:21 crc kubenswrapper[4799]: I1010 18:17:21.788733 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cf924bb3-b276-45c5-bcaa-bb6dc669a569-config-data\") pod \"ceilometer-0\" (UID: \"cf924bb3-b276-45c5-bcaa-bb6dc669a569\") " pod="openstack/ceilometer-0" Oct 10 18:17:21 crc kubenswrapper[4799]: I1010 18:17:21.789010 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/cf924bb3-b276-45c5-bcaa-bb6dc669a569-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"cf924bb3-b276-45c5-bcaa-bb6dc669a569\") " pod="openstack/ceilometer-0" Oct 10 18:17:21 crc kubenswrapper[4799]: I1010 18:17:21.789171 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4rfkg\" (UniqueName: \"kubernetes.io/projected/cf924bb3-b276-45c5-bcaa-bb6dc669a569-kube-api-access-4rfkg\") pod \"ceilometer-0\" (UID: \"cf924bb3-b276-45c5-bcaa-bb6dc669a569\") " pod="openstack/ceilometer-0" Oct 10 18:17:21 crc kubenswrapper[4799]: I1010 18:17:21.789322 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/cf924bb3-b276-45c5-bcaa-bb6dc669a569-log-httpd\") pod \"ceilometer-0\" (UID: \"cf924bb3-b276-45c5-bcaa-bb6dc669a569\") " pod="openstack/ceilometer-0" Oct 10 18:17:21 crc kubenswrapper[4799]: I1010 18:17:21.789496 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cf924bb3-b276-45c5-bcaa-bb6dc669a569-scripts\") pod \"ceilometer-0\" (UID: \"cf924bb3-b276-45c5-bcaa-bb6dc669a569\") " pod="openstack/ceilometer-0" Oct 10 18:17:21 crc kubenswrapper[4799]: I1010 18:17:21.789656 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/cf924bb3-b276-45c5-bcaa-bb6dc669a569-run-httpd\") pod \"ceilometer-0\" (UID: \"cf924bb3-b276-45c5-bcaa-bb6dc669a569\") " pod="openstack/ceilometer-0" Oct 10 18:17:21 crc kubenswrapper[4799]: I1010 18:17:21.789893 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cf924bb3-b276-45c5-bcaa-bb6dc669a569-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"cf924bb3-b276-45c5-bcaa-bb6dc669a569\") " pod="openstack/ceilometer-0" Oct 10 18:17:21 crc kubenswrapper[4799]: I1010 18:17:21.892296 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cf924bb3-b276-45c5-bcaa-bb6dc669a569-config-data\") pod \"ceilometer-0\" (UID: \"cf924bb3-b276-45c5-bcaa-bb6dc669a569\") " pod="openstack/ceilometer-0" Oct 10 18:17:21 crc kubenswrapper[4799]: I1010 18:17:21.892451 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/cf924bb3-b276-45c5-bcaa-bb6dc669a569-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"cf924bb3-b276-45c5-bcaa-bb6dc669a569\") " pod="openstack/ceilometer-0" Oct 10 18:17:21 crc kubenswrapper[4799]: I1010 18:17:21.892538 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4rfkg\" (UniqueName: \"kubernetes.io/projected/cf924bb3-b276-45c5-bcaa-bb6dc669a569-kube-api-access-4rfkg\") pod \"ceilometer-0\" (UID: \"cf924bb3-b276-45c5-bcaa-bb6dc669a569\") " pod="openstack/ceilometer-0" Oct 10 18:17:21 crc kubenswrapper[4799]: I1010 18:17:21.892590 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/cf924bb3-b276-45c5-bcaa-bb6dc669a569-log-httpd\") pod \"ceilometer-0\" (UID: \"cf924bb3-b276-45c5-bcaa-bb6dc669a569\") " pod="openstack/ceilometer-0" Oct 10 18:17:21 crc kubenswrapper[4799]: I1010 18:17:21.892672 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cf924bb3-b276-45c5-bcaa-bb6dc669a569-scripts\") pod \"ceilometer-0\" (UID: \"cf924bb3-b276-45c5-bcaa-bb6dc669a569\") " pod="openstack/ceilometer-0" Oct 10 18:17:21 crc kubenswrapper[4799]: I1010 18:17:21.892817 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/cf924bb3-b276-45c5-bcaa-bb6dc669a569-run-httpd\") pod \"ceilometer-0\" (UID: \"cf924bb3-b276-45c5-bcaa-bb6dc669a569\") " pod="openstack/ceilometer-0" Oct 10 18:17:21 crc kubenswrapper[4799]: I1010 18:17:21.893006 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cf924bb3-b276-45c5-bcaa-bb6dc669a569-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"cf924bb3-b276-45c5-bcaa-bb6dc669a569\") " pod="openstack/ceilometer-0" Oct 10 18:17:21 crc kubenswrapper[4799]: I1010 18:17:21.893749 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/cf924bb3-b276-45c5-bcaa-bb6dc669a569-log-httpd\") pod \"ceilometer-0\" (UID: \"cf924bb3-b276-45c5-bcaa-bb6dc669a569\") " pod="openstack/ceilometer-0" Oct 10 18:17:21 crc kubenswrapper[4799]: I1010 18:17:21.893823 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/cf924bb3-b276-45c5-bcaa-bb6dc669a569-run-httpd\") pod \"ceilometer-0\" (UID: \"cf924bb3-b276-45c5-bcaa-bb6dc669a569\") " pod="openstack/ceilometer-0" Oct 10 18:17:21 crc kubenswrapper[4799]: I1010 18:17:21.902115 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cf924bb3-b276-45c5-bcaa-bb6dc669a569-scripts\") pod \"ceilometer-0\" (UID: \"cf924bb3-b276-45c5-bcaa-bb6dc669a569\") " pod="openstack/ceilometer-0" Oct 10 18:17:21 crc kubenswrapper[4799]: I1010 18:17:21.906314 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/cf924bb3-b276-45c5-bcaa-bb6dc669a569-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"cf924bb3-b276-45c5-bcaa-bb6dc669a569\") " pod="openstack/ceilometer-0" Oct 10 18:17:21 crc kubenswrapper[4799]: I1010 18:17:21.906487 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cf924bb3-b276-45c5-bcaa-bb6dc669a569-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"cf924bb3-b276-45c5-bcaa-bb6dc669a569\") " pod="openstack/ceilometer-0" Oct 10 18:17:21 crc kubenswrapper[4799]: I1010 18:17:21.916019 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cf924bb3-b276-45c5-bcaa-bb6dc669a569-config-data\") pod \"ceilometer-0\" (UID: \"cf924bb3-b276-45c5-bcaa-bb6dc669a569\") " pod="openstack/ceilometer-0" Oct 10 18:17:21 crc kubenswrapper[4799]: I1010 18:17:21.921815 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4rfkg\" (UniqueName: \"kubernetes.io/projected/cf924bb3-b276-45c5-bcaa-bb6dc669a569-kube-api-access-4rfkg\") pod \"ceilometer-0\" (UID: \"cf924bb3-b276-45c5-bcaa-bb6dc669a569\") " pod="openstack/ceilometer-0" Oct 10 18:17:22 crc kubenswrapper[4799]: I1010 18:17:22.108989 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 10 18:17:22 crc kubenswrapper[4799]: W1010 18:17:22.700653 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podcf924bb3_b276_45c5_bcaa_bb6dc669a569.slice/crio-b391f79878a22f0abc636ae072638a7e9231f5897b219e37ac0d904db3e3b416 WatchSource:0}: Error finding container b391f79878a22f0abc636ae072638a7e9231f5897b219e37ac0d904db3e3b416: Status 404 returned error can't find the container with id b391f79878a22f0abc636ae072638a7e9231f5897b219e37ac0d904db3e3b416 Oct 10 18:17:22 crc kubenswrapper[4799]: I1010 18:17:22.755865 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 10 18:17:23 crc kubenswrapper[4799]: I1010 18:17:23.437703 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="aa7adfb3-8f69-4867-a36d-daac33771cb0" path="/var/lib/kubelet/pods/aa7adfb3-8f69-4867-a36d-daac33771cb0/volumes" Oct 10 18:17:23 crc kubenswrapper[4799]: I1010 18:17:23.705773 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"cf924bb3-b276-45c5-bcaa-bb6dc669a569","Type":"ContainerStarted","Data":"85934ca564895a726aa0ce306c7673dc19a660cf990a288208627f79aec9dc56"} Oct 10 18:17:23 crc kubenswrapper[4799]: I1010 18:17:23.706033 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"cf924bb3-b276-45c5-bcaa-bb6dc669a569","Type":"ContainerStarted","Data":"b391f79878a22f0abc636ae072638a7e9231f5897b219e37ac0d904db3e3b416"} Oct 10 18:17:23 crc kubenswrapper[4799]: I1010 18:17:23.717813 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/manila-scheduler-0" Oct 10 18:17:23 crc kubenswrapper[4799]: I1010 18:17:23.789537 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/manila-api-0" Oct 10 18:17:23 crc kubenswrapper[4799]: I1010 18:17:23.928133 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/manila-share-share1-0" Oct 10 18:17:24 crc kubenswrapper[4799]: I1010 18:17:24.716376 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"cf924bb3-b276-45c5-bcaa-bb6dc669a569","Type":"ContainerStarted","Data":"a5a6e06a688757c6140eb0af3d92b151e0dfbd697825315a216e46b457276b3e"} Oct 10 18:17:25 crc kubenswrapper[4799]: I1010 18:17:25.731838 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"cf924bb3-b276-45c5-bcaa-bb6dc669a569","Type":"ContainerStarted","Data":"41e6527325d77ad72a68d48500ef5aebf7e5b9fcd23a9e6888bde15bad0e0aa3"} Oct 10 18:17:27 crc kubenswrapper[4799]: I1010 18:17:27.418046 4799 scope.go:117] "RemoveContainer" containerID="1f0ceb697c256a28cbd9c8e1e3aa08e1dc732ac4382bc8944609e36db615c835" Oct 10 18:17:27 crc kubenswrapper[4799]: E1010 18:17:27.419407 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 18:17:27 crc kubenswrapper[4799]: I1010 18:17:27.755629 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"cf924bb3-b276-45c5-bcaa-bb6dc669a569","Type":"ContainerStarted","Data":"1fd072daaaa7e2422c7b6d9e17aafbadbe12ca12770d47f7d36cd7d27fe42388"} Oct 10 18:17:27 crc kubenswrapper[4799]: I1010 18:17:27.756998 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Oct 10 18:17:27 crc kubenswrapper[4799]: I1010 18:17:27.791398 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.99510975 podStartE2EDuration="6.791373152s" podCreationTimestamp="2025-10-10 18:17:21 +0000 UTC" firstStartedPulling="2025-10-10 18:17:22.708966128 +0000 UTC m=+6336.217290243" lastFinishedPulling="2025-10-10 18:17:26.50522952 +0000 UTC m=+6340.013553645" observedRunningTime="2025-10-10 18:17:27.782809683 +0000 UTC m=+6341.291133858" watchObservedRunningTime="2025-10-10 18:17:27.791373152 +0000 UTC m=+6341.299697277" Oct 10 18:17:40 crc kubenswrapper[4799]: I1010 18:17:40.403191 4799 scope.go:117] "RemoveContainer" containerID="1f0ceb697c256a28cbd9c8e1e3aa08e1dc732ac4382bc8944609e36db615c835" Oct 10 18:17:40 crc kubenswrapper[4799]: E1010 18:17:40.404399 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 18:17:52 crc kubenswrapper[4799]: I1010 18:17:52.120838 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Oct 10 18:17:52 crc kubenswrapper[4799]: I1010 18:17:52.402340 4799 scope.go:117] "RemoveContainer" containerID="1f0ceb697c256a28cbd9c8e1e3aa08e1dc732ac4382bc8944609e36db615c835" Oct 10 18:17:52 crc kubenswrapper[4799]: E1010 18:17:52.402775 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 18:18:05 crc kubenswrapper[4799]: I1010 18:18:05.407086 4799 scope.go:117] "RemoveContainer" containerID="1f0ceb697c256a28cbd9c8e1e3aa08e1dc732ac4382bc8944609e36db615c835" Oct 10 18:18:05 crc kubenswrapper[4799]: E1010 18:18:05.408173 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 18:18:13 crc kubenswrapper[4799]: I1010 18:18:13.765181 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-6d4c5c46b9-hw5n7"] Oct 10 18:18:13 crc kubenswrapper[4799]: I1010 18:18:13.767799 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6d4c5c46b9-hw5n7" Oct 10 18:18:13 crc kubenswrapper[4799]: I1010 18:18:13.771565 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1" Oct 10 18:18:13 crc kubenswrapper[4799]: I1010 18:18:13.788724 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6d4c5c46b9-hw5n7"] Oct 10 18:18:13 crc kubenswrapper[4799]: I1010 18:18:13.933009 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8mtpg\" (UniqueName: \"kubernetes.io/projected/dca6ec6b-b3be-4391-8f9b-1125167bc0b1-kube-api-access-8mtpg\") pod \"dnsmasq-dns-6d4c5c46b9-hw5n7\" (UID: \"dca6ec6b-b3be-4391-8f9b-1125167bc0b1\") " pod="openstack/dnsmasq-dns-6d4c5c46b9-hw5n7" Oct 10 18:18:13 crc kubenswrapper[4799]: I1010 18:18:13.933392 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/dca6ec6b-b3be-4391-8f9b-1125167bc0b1-ovsdbserver-nb\") pod \"dnsmasq-dns-6d4c5c46b9-hw5n7\" (UID: \"dca6ec6b-b3be-4391-8f9b-1125167bc0b1\") " pod="openstack/dnsmasq-dns-6d4c5c46b9-hw5n7" Oct 10 18:18:13 crc kubenswrapper[4799]: I1010 18:18:13.933632 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/dca6ec6b-b3be-4391-8f9b-1125167bc0b1-ovsdbserver-sb\") pod \"dnsmasq-dns-6d4c5c46b9-hw5n7\" (UID: \"dca6ec6b-b3be-4391-8f9b-1125167bc0b1\") " pod="openstack/dnsmasq-dns-6d4c5c46b9-hw5n7" Oct 10 18:18:13 crc kubenswrapper[4799]: I1010 18:18:13.933704 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/dca6ec6b-b3be-4391-8f9b-1125167bc0b1-config\") pod \"dnsmasq-dns-6d4c5c46b9-hw5n7\" (UID: \"dca6ec6b-b3be-4391-8f9b-1125167bc0b1\") " pod="openstack/dnsmasq-dns-6d4c5c46b9-hw5n7" Oct 10 18:18:13 crc kubenswrapper[4799]: I1010 18:18:13.933883 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/dca6ec6b-b3be-4391-8f9b-1125167bc0b1-dns-svc\") pod \"dnsmasq-dns-6d4c5c46b9-hw5n7\" (UID: \"dca6ec6b-b3be-4391-8f9b-1125167bc0b1\") " pod="openstack/dnsmasq-dns-6d4c5c46b9-hw5n7" Oct 10 18:18:13 crc kubenswrapper[4799]: I1010 18:18:13.934212 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-cell1\" (UniqueName: \"kubernetes.io/configmap/dca6ec6b-b3be-4391-8f9b-1125167bc0b1-openstack-cell1\") pod \"dnsmasq-dns-6d4c5c46b9-hw5n7\" (UID: \"dca6ec6b-b3be-4391-8f9b-1125167bc0b1\") " pod="openstack/dnsmasq-dns-6d4c5c46b9-hw5n7" Oct 10 18:18:14 crc kubenswrapper[4799]: I1010 18:18:14.036442 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-cell1\" (UniqueName: \"kubernetes.io/configmap/dca6ec6b-b3be-4391-8f9b-1125167bc0b1-openstack-cell1\") pod \"dnsmasq-dns-6d4c5c46b9-hw5n7\" (UID: \"dca6ec6b-b3be-4391-8f9b-1125167bc0b1\") " pod="openstack/dnsmasq-dns-6d4c5c46b9-hw5n7" Oct 10 18:18:14 crc kubenswrapper[4799]: I1010 18:18:14.036618 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8mtpg\" (UniqueName: \"kubernetes.io/projected/dca6ec6b-b3be-4391-8f9b-1125167bc0b1-kube-api-access-8mtpg\") pod \"dnsmasq-dns-6d4c5c46b9-hw5n7\" (UID: \"dca6ec6b-b3be-4391-8f9b-1125167bc0b1\") " pod="openstack/dnsmasq-dns-6d4c5c46b9-hw5n7" Oct 10 18:18:14 crc kubenswrapper[4799]: I1010 18:18:14.036646 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/dca6ec6b-b3be-4391-8f9b-1125167bc0b1-ovsdbserver-nb\") pod \"dnsmasq-dns-6d4c5c46b9-hw5n7\" (UID: \"dca6ec6b-b3be-4391-8f9b-1125167bc0b1\") " pod="openstack/dnsmasq-dns-6d4c5c46b9-hw5n7" Oct 10 18:18:14 crc kubenswrapper[4799]: I1010 18:18:14.036703 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/dca6ec6b-b3be-4391-8f9b-1125167bc0b1-ovsdbserver-sb\") pod \"dnsmasq-dns-6d4c5c46b9-hw5n7\" (UID: \"dca6ec6b-b3be-4391-8f9b-1125167bc0b1\") " pod="openstack/dnsmasq-dns-6d4c5c46b9-hw5n7" Oct 10 18:18:14 crc kubenswrapper[4799]: I1010 18:18:14.036733 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/dca6ec6b-b3be-4391-8f9b-1125167bc0b1-config\") pod \"dnsmasq-dns-6d4c5c46b9-hw5n7\" (UID: \"dca6ec6b-b3be-4391-8f9b-1125167bc0b1\") " pod="openstack/dnsmasq-dns-6d4c5c46b9-hw5n7" Oct 10 18:18:14 crc kubenswrapper[4799]: I1010 18:18:14.036955 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/dca6ec6b-b3be-4391-8f9b-1125167bc0b1-dns-svc\") pod \"dnsmasq-dns-6d4c5c46b9-hw5n7\" (UID: \"dca6ec6b-b3be-4391-8f9b-1125167bc0b1\") " pod="openstack/dnsmasq-dns-6d4c5c46b9-hw5n7" Oct 10 18:18:14 crc kubenswrapper[4799]: I1010 18:18:14.037304 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-cell1\" (UniqueName: \"kubernetes.io/configmap/dca6ec6b-b3be-4391-8f9b-1125167bc0b1-openstack-cell1\") pod \"dnsmasq-dns-6d4c5c46b9-hw5n7\" (UID: \"dca6ec6b-b3be-4391-8f9b-1125167bc0b1\") " pod="openstack/dnsmasq-dns-6d4c5c46b9-hw5n7" Oct 10 18:18:14 crc kubenswrapper[4799]: I1010 18:18:14.037646 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/dca6ec6b-b3be-4391-8f9b-1125167bc0b1-ovsdbserver-nb\") pod \"dnsmasq-dns-6d4c5c46b9-hw5n7\" (UID: \"dca6ec6b-b3be-4391-8f9b-1125167bc0b1\") " pod="openstack/dnsmasq-dns-6d4c5c46b9-hw5n7" Oct 10 18:18:14 crc kubenswrapper[4799]: I1010 18:18:14.037812 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/dca6ec6b-b3be-4391-8f9b-1125167bc0b1-config\") pod \"dnsmasq-dns-6d4c5c46b9-hw5n7\" (UID: \"dca6ec6b-b3be-4391-8f9b-1125167bc0b1\") " pod="openstack/dnsmasq-dns-6d4c5c46b9-hw5n7" Oct 10 18:18:14 crc kubenswrapper[4799]: I1010 18:18:14.037949 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/dca6ec6b-b3be-4391-8f9b-1125167bc0b1-ovsdbserver-sb\") pod \"dnsmasq-dns-6d4c5c46b9-hw5n7\" (UID: \"dca6ec6b-b3be-4391-8f9b-1125167bc0b1\") " pod="openstack/dnsmasq-dns-6d4c5c46b9-hw5n7" Oct 10 18:18:14 crc kubenswrapper[4799]: I1010 18:18:14.038041 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/dca6ec6b-b3be-4391-8f9b-1125167bc0b1-dns-svc\") pod \"dnsmasq-dns-6d4c5c46b9-hw5n7\" (UID: \"dca6ec6b-b3be-4391-8f9b-1125167bc0b1\") " pod="openstack/dnsmasq-dns-6d4c5c46b9-hw5n7" Oct 10 18:18:14 crc kubenswrapper[4799]: I1010 18:18:14.066924 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8mtpg\" (UniqueName: \"kubernetes.io/projected/dca6ec6b-b3be-4391-8f9b-1125167bc0b1-kube-api-access-8mtpg\") pod \"dnsmasq-dns-6d4c5c46b9-hw5n7\" (UID: \"dca6ec6b-b3be-4391-8f9b-1125167bc0b1\") " pod="openstack/dnsmasq-dns-6d4c5c46b9-hw5n7" Oct 10 18:18:14 crc kubenswrapper[4799]: I1010 18:18:14.089046 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6d4c5c46b9-hw5n7" Oct 10 18:18:14 crc kubenswrapper[4799]: I1010 18:18:14.571022 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6d4c5c46b9-hw5n7"] Oct 10 18:18:14 crc kubenswrapper[4799]: W1010 18:18:14.578915 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poddca6ec6b_b3be_4391_8f9b_1125167bc0b1.slice/crio-1d89bc74ba924eb1e4c23688183f0ea65c02ab876d955c0d9246e59bd9e5bd57 WatchSource:0}: Error finding container 1d89bc74ba924eb1e4c23688183f0ea65c02ab876d955c0d9246e59bd9e5bd57: Status 404 returned error can't find the container with id 1d89bc74ba924eb1e4c23688183f0ea65c02ab876d955c0d9246e59bd9e5bd57 Oct 10 18:18:15 crc kubenswrapper[4799]: I1010 18:18:15.489016 4799 generic.go:334] "Generic (PLEG): container finished" podID="dca6ec6b-b3be-4391-8f9b-1125167bc0b1" containerID="f7ab34d127a1a4cf6e42ad2b79a9f8e94cf5b9d5baf5588a286a0c7d1ea0b75b" exitCode=0 Oct 10 18:18:15 crc kubenswrapper[4799]: I1010 18:18:15.489094 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6d4c5c46b9-hw5n7" event={"ID":"dca6ec6b-b3be-4391-8f9b-1125167bc0b1","Type":"ContainerDied","Data":"f7ab34d127a1a4cf6e42ad2b79a9f8e94cf5b9d5baf5588a286a0c7d1ea0b75b"} Oct 10 18:18:15 crc kubenswrapper[4799]: I1010 18:18:15.489422 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6d4c5c46b9-hw5n7" event={"ID":"dca6ec6b-b3be-4391-8f9b-1125167bc0b1","Type":"ContainerStarted","Data":"1d89bc74ba924eb1e4c23688183f0ea65c02ab876d955c0d9246e59bd9e5bd57"} Oct 10 18:18:16 crc kubenswrapper[4799]: I1010 18:18:16.507441 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6d4c5c46b9-hw5n7" event={"ID":"dca6ec6b-b3be-4391-8f9b-1125167bc0b1","Type":"ContainerStarted","Data":"1b52ddb130219486ef68a6f9813a7ef4a511e2e97e5c9634284568b97a1f8765"} Oct 10 18:18:16 crc kubenswrapper[4799]: I1010 18:18:16.507745 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-6d4c5c46b9-hw5n7" Oct 10 18:18:16 crc kubenswrapper[4799]: I1010 18:18:16.556530 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-6d4c5c46b9-hw5n7" podStartSLOduration=3.556465549 podStartE2EDuration="3.556465549s" podCreationTimestamp="2025-10-10 18:18:13 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-10 18:18:16.54093169 +0000 UTC m=+6390.049255835" watchObservedRunningTime="2025-10-10 18:18:16.556465549 +0000 UTC m=+6390.064789704" Oct 10 18:18:20 crc kubenswrapper[4799]: I1010 18:18:20.402809 4799 scope.go:117] "RemoveContainer" containerID="1f0ceb697c256a28cbd9c8e1e3aa08e1dc732ac4382bc8944609e36db615c835" Oct 10 18:18:20 crc kubenswrapper[4799]: E1010 18:18:20.403596 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 18:18:24 crc kubenswrapper[4799]: I1010 18:18:24.091989 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-6d4c5c46b9-hw5n7" Oct 10 18:18:24 crc kubenswrapper[4799]: I1010 18:18:24.166139 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-84c8d469bf-8gjr7"] Oct 10 18:18:24 crc kubenswrapper[4799]: I1010 18:18:24.166375 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-84c8d469bf-8gjr7" podUID="27d181f5-9c3d-4193-80d5-8a45ea9a282f" containerName="dnsmasq-dns" containerID="cri-o://ce17069f8136d8df1650c04a5f150f8d8f4aef4f726b69d077096cdbce2ec164" gracePeriod=10 Oct 10 18:18:24 crc kubenswrapper[4799]: I1010 18:18:24.308739 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-cdbd4bc47-gjsnm"] Oct 10 18:18:24 crc kubenswrapper[4799]: I1010 18:18:24.311080 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-cdbd4bc47-gjsnm" Oct 10 18:18:24 crc kubenswrapper[4799]: I1010 18:18:24.331393 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-cdbd4bc47-gjsnm"] Oct 10 18:18:24 crc kubenswrapper[4799]: I1010 18:18:24.412334 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6c6k8\" (UniqueName: \"kubernetes.io/projected/42f9f525-e7cc-4e82-8a3f-49e481628714-kube-api-access-6c6k8\") pod \"dnsmasq-dns-cdbd4bc47-gjsnm\" (UID: \"42f9f525-e7cc-4e82-8a3f-49e481628714\") " pod="openstack/dnsmasq-dns-cdbd4bc47-gjsnm" Oct 10 18:18:24 crc kubenswrapper[4799]: I1010 18:18:24.412670 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/42f9f525-e7cc-4e82-8a3f-49e481628714-ovsdbserver-nb\") pod \"dnsmasq-dns-cdbd4bc47-gjsnm\" (UID: \"42f9f525-e7cc-4e82-8a3f-49e481628714\") " pod="openstack/dnsmasq-dns-cdbd4bc47-gjsnm" Oct 10 18:18:24 crc kubenswrapper[4799]: I1010 18:18:24.412723 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/42f9f525-e7cc-4e82-8a3f-49e481628714-ovsdbserver-sb\") pod \"dnsmasq-dns-cdbd4bc47-gjsnm\" (UID: \"42f9f525-e7cc-4e82-8a3f-49e481628714\") " pod="openstack/dnsmasq-dns-cdbd4bc47-gjsnm" Oct 10 18:18:24 crc kubenswrapper[4799]: I1010 18:18:24.412779 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/42f9f525-e7cc-4e82-8a3f-49e481628714-dns-svc\") pod \"dnsmasq-dns-cdbd4bc47-gjsnm\" (UID: \"42f9f525-e7cc-4e82-8a3f-49e481628714\") " pod="openstack/dnsmasq-dns-cdbd4bc47-gjsnm" Oct 10 18:18:24 crc kubenswrapper[4799]: I1010 18:18:24.412849 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-cell1\" (UniqueName: \"kubernetes.io/configmap/42f9f525-e7cc-4e82-8a3f-49e481628714-openstack-cell1\") pod \"dnsmasq-dns-cdbd4bc47-gjsnm\" (UID: \"42f9f525-e7cc-4e82-8a3f-49e481628714\") " pod="openstack/dnsmasq-dns-cdbd4bc47-gjsnm" Oct 10 18:18:24 crc kubenswrapper[4799]: I1010 18:18:24.412875 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/42f9f525-e7cc-4e82-8a3f-49e481628714-config\") pod \"dnsmasq-dns-cdbd4bc47-gjsnm\" (UID: \"42f9f525-e7cc-4e82-8a3f-49e481628714\") " pod="openstack/dnsmasq-dns-cdbd4bc47-gjsnm" Oct 10 18:18:24 crc kubenswrapper[4799]: I1010 18:18:24.514906 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/42f9f525-e7cc-4e82-8a3f-49e481628714-ovsdbserver-nb\") pod \"dnsmasq-dns-cdbd4bc47-gjsnm\" (UID: \"42f9f525-e7cc-4e82-8a3f-49e481628714\") " pod="openstack/dnsmasq-dns-cdbd4bc47-gjsnm" Oct 10 18:18:24 crc kubenswrapper[4799]: I1010 18:18:24.515065 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/42f9f525-e7cc-4e82-8a3f-49e481628714-ovsdbserver-sb\") pod \"dnsmasq-dns-cdbd4bc47-gjsnm\" (UID: \"42f9f525-e7cc-4e82-8a3f-49e481628714\") " pod="openstack/dnsmasq-dns-cdbd4bc47-gjsnm" Oct 10 18:18:24 crc kubenswrapper[4799]: I1010 18:18:24.515143 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/42f9f525-e7cc-4e82-8a3f-49e481628714-dns-svc\") pod \"dnsmasq-dns-cdbd4bc47-gjsnm\" (UID: \"42f9f525-e7cc-4e82-8a3f-49e481628714\") " pod="openstack/dnsmasq-dns-cdbd4bc47-gjsnm" Oct 10 18:18:24 crc kubenswrapper[4799]: I1010 18:18:24.515312 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-cell1\" (UniqueName: \"kubernetes.io/configmap/42f9f525-e7cc-4e82-8a3f-49e481628714-openstack-cell1\") pod \"dnsmasq-dns-cdbd4bc47-gjsnm\" (UID: \"42f9f525-e7cc-4e82-8a3f-49e481628714\") " pod="openstack/dnsmasq-dns-cdbd4bc47-gjsnm" Oct 10 18:18:24 crc kubenswrapper[4799]: I1010 18:18:24.515338 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/42f9f525-e7cc-4e82-8a3f-49e481628714-config\") pod \"dnsmasq-dns-cdbd4bc47-gjsnm\" (UID: \"42f9f525-e7cc-4e82-8a3f-49e481628714\") " pod="openstack/dnsmasq-dns-cdbd4bc47-gjsnm" Oct 10 18:18:24 crc kubenswrapper[4799]: I1010 18:18:24.515456 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6c6k8\" (UniqueName: \"kubernetes.io/projected/42f9f525-e7cc-4e82-8a3f-49e481628714-kube-api-access-6c6k8\") pod \"dnsmasq-dns-cdbd4bc47-gjsnm\" (UID: \"42f9f525-e7cc-4e82-8a3f-49e481628714\") " pod="openstack/dnsmasq-dns-cdbd4bc47-gjsnm" Oct 10 18:18:24 crc kubenswrapper[4799]: I1010 18:18:24.515796 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/42f9f525-e7cc-4e82-8a3f-49e481628714-ovsdbserver-nb\") pod \"dnsmasq-dns-cdbd4bc47-gjsnm\" (UID: \"42f9f525-e7cc-4e82-8a3f-49e481628714\") " pod="openstack/dnsmasq-dns-cdbd4bc47-gjsnm" Oct 10 18:18:24 crc kubenswrapper[4799]: I1010 18:18:24.517172 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/42f9f525-e7cc-4e82-8a3f-49e481628714-dns-svc\") pod \"dnsmasq-dns-cdbd4bc47-gjsnm\" (UID: \"42f9f525-e7cc-4e82-8a3f-49e481628714\") " pod="openstack/dnsmasq-dns-cdbd4bc47-gjsnm" Oct 10 18:18:24 crc kubenswrapper[4799]: I1010 18:18:24.517229 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-cell1\" (UniqueName: \"kubernetes.io/configmap/42f9f525-e7cc-4e82-8a3f-49e481628714-openstack-cell1\") pod \"dnsmasq-dns-cdbd4bc47-gjsnm\" (UID: \"42f9f525-e7cc-4e82-8a3f-49e481628714\") " pod="openstack/dnsmasq-dns-cdbd4bc47-gjsnm" Oct 10 18:18:24 crc kubenswrapper[4799]: I1010 18:18:24.517337 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/42f9f525-e7cc-4e82-8a3f-49e481628714-config\") pod \"dnsmasq-dns-cdbd4bc47-gjsnm\" (UID: \"42f9f525-e7cc-4e82-8a3f-49e481628714\") " pod="openstack/dnsmasq-dns-cdbd4bc47-gjsnm" Oct 10 18:18:24 crc kubenswrapper[4799]: I1010 18:18:24.519516 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/42f9f525-e7cc-4e82-8a3f-49e481628714-ovsdbserver-sb\") pod \"dnsmasq-dns-cdbd4bc47-gjsnm\" (UID: \"42f9f525-e7cc-4e82-8a3f-49e481628714\") " pod="openstack/dnsmasq-dns-cdbd4bc47-gjsnm" Oct 10 18:18:24 crc kubenswrapper[4799]: I1010 18:18:24.532918 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6c6k8\" (UniqueName: \"kubernetes.io/projected/42f9f525-e7cc-4e82-8a3f-49e481628714-kube-api-access-6c6k8\") pod \"dnsmasq-dns-cdbd4bc47-gjsnm\" (UID: \"42f9f525-e7cc-4e82-8a3f-49e481628714\") " pod="openstack/dnsmasq-dns-cdbd4bc47-gjsnm" Oct 10 18:18:24 crc kubenswrapper[4799]: I1010 18:18:24.622338 4799 generic.go:334] "Generic (PLEG): container finished" podID="27d181f5-9c3d-4193-80d5-8a45ea9a282f" containerID="ce17069f8136d8df1650c04a5f150f8d8f4aef4f726b69d077096cdbce2ec164" exitCode=0 Oct 10 18:18:24 crc kubenswrapper[4799]: I1010 18:18:24.622384 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-84c8d469bf-8gjr7" event={"ID":"27d181f5-9c3d-4193-80d5-8a45ea9a282f","Type":"ContainerDied","Data":"ce17069f8136d8df1650c04a5f150f8d8f4aef4f726b69d077096cdbce2ec164"} Oct 10 18:18:24 crc kubenswrapper[4799]: I1010 18:18:24.653415 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-cdbd4bc47-gjsnm" Oct 10 18:18:24 crc kubenswrapper[4799]: I1010 18:18:24.783726 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-84c8d469bf-8gjr7" Oct 10 18:18:24 crc kubenswrapper[4799]: I1010 18:18:24.928636 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/27d181f5-9c3d-4193-80d5-8a45ea9a282f-ovsdbserver-nb\") pod \"27d181f5-9c3d-4193-80d5-8a45ea9a282f\" (UID: \"27d181f5-9c3d-4193-80d5-8a45ea9a282f\") " Oct 10 18:18:24 crc kubenswrapper[4799]: I1010 18:18:24.929047 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/27d181f5-9c3d-4193-80d5-8a45ea9a282f-ovsdbserver-sb\") pod \"27d181f5-9c3d-4193-80d5-8a45ea9a282f\" (UID: \"27d181f5-9c3d-4193-80d5-8a45ea9a282f\") " Oct 10 18:18:24 crc kubenswrapper[4799]: I1010 18:18:24.929145 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/27d181f5-9c3d-4193-80d5-8a45ea9a282f-dns-svc\") pod \"27d181f5-9c3d-4193-80d5-8a45ea9a282f\" (UID: \"27d181f5-9c3d-4193-80d5-8a45ea9a282f\") " Oct 10 18:18:24 crc kubenswrapper[4799]: I1010 18:18:24.929220 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8j4jv\" (UniqueName: \"kubernetes.io/projected/27d181f5-9c3d-4193-80d5-8a45ea9a282f-kube-api-access-8j4jv\") pod \"27d181f5-9c3d-4193-80d5-8a45ea9a282f\" (UID: \"27d181f5-9c3d-4193-80d5-8a45ea9a282f\") " Oct 10 18:18:24 crc kubenswrapper[4799]: I1010 18:18:24.929249 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/27d181f5-9c3d-4193-80d5-8a45ea9a282f-config\") pod \"27d181f5-9c3d-4193-80d5-8a45ea9a282f\" (UID: \"27d181f5-9c3d-4193-80d5-8a45ea9a282f\") " Oct 10 18:18:24 crc kubenswrapper[4799]: I1010 18:18:24.940362 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/27d181f5-9c3d-4193-80d5-8a45ea9a282f-kube-api-access-8j4jv" (OuterVolumeSpecName: "kube-api-access-8j4jv") pod "27d181f5-9c3d-4193-80d5-8a45ea9a282f" (UID: "27d181f5-9c3d-4193-80d5-8a45ea9a282f"). InnerVolumeSpecName "kube-api-access-8j4jv". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 18:18:24 crc kubenswrapper[4799]: I1010 18:18:24.993970 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/27d181f5-9c3d-4193-80d5-8a45ea9a282f-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "27d181f5-9c3d-4193-80d5-8a45ea9a282f" (UID: "27d181f5-9c3d-4193-80d5-8a45ea9a282f"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 18:18:25 crc kubenswrapper[4799]: I1010 18:18:25.000477 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/27d181f5-9c3d-4193-80d5-8a45ea9a282f-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "27d181f5-9c3d-4193-80d5-8a45ea9a282f" (UID: "27d181f5-9c3d-4193-80d5-8a45ea9a282f"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 18:18:25 crc kubenswrapper[4799]: I1010 18:18:25.003288 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/27d181f5-9c3d-4193-80d5-8a45ea9a282f-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "27d181f5-9c3d-4193-80d5-8a45ea9a282f" (UID: "27d181f5-9c3d-4193-80d5-8a45ea9a282f"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 18:18:25 crc kubenswrapper[4799]: I1010 18:18:25.006423 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/27d181f5-9c3d-4193-80d5-8a45ea9a282f-config" (OuterVolumeSpecName: "config") pod "27d181f5-9c3d-4193-80d5-8a45ea9a282f" (UID: "27d181f5-9c3d-4193-80d5-8a45ea9a282f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 18:18:25 crc kubenswrapper[4799]: I1010 18:18:25.032732 4799 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/27d181f5-9c3d-4193-80d5-8a45ea9a282f-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 10 18:18:25 crc kubenswrapper[4799]: I1010 18:18:25.032823 4799 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/27d181f5-9c3d-4193-80d5-8a45ea9a282f-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 10 18:18:25 crc kubenswrapper[4799]: I1010 18:18:25.032833 4799 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/27d181f5-9c3d-4193-80d5-8a45ea9a282f-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 10 18:18:25 crc kubenswrapper[4799]: I1010 18:18:25.032884 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8j4jv\" (UniqueName: \"kubernetes.io/projected/27d181f5-9c3d-4193-80d5-8a45ea9a282f-kube-api-access-8j4jv\") on node \"crc\" DevicePath \"\"" Oct 10 18:18:25 crc kubenswrapper[4799]: I1010 18:18:25.032896 4799 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/27d181f5-9c3d-4193-80d5-8a45ea9a282f-config\") on node \"crc\" DevicePath \"\"" Oct 10 18:18:25 crc kubenswrapper[4799]: I1010 18:18:25.284994 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-cdbd4bc47-gjsnm"] Oct 10 18:18:25 crc kubenswrapper[4799]: W1010 18:18:25.289865 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod42f9f525_e7cc_4e82_8a3f_49e481628714.slice/crio-739980818e4f7e497f66c4ef57a1c66682fd5e71c4b5a809bdea6a3d05ceeb15 WatchSource:0}: Error finding container 739980818e4f7e497f66c4ef57a1c66682fd5e71c4b5a809bdea6a3d05ceeb15: Status 404 returned error can't find the container with id 739980818e4f7e497f66c4ef57a1c66682fd5e71c4b5a809bdea6a3d05ceeb15 Oct 10 18:18:25 crc kubenswrapper[4799]: I1010 18:18:25.633407 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-84c8d469bf-8gjr7" event={"ID":"27d181f5-9c3d-4193-80d5-8a45ea9a282f","Type":"ContainerDied","Data":"80bf6383e5956b2934d453bf620ae69945d45905cd9e0d77eb9680b123d1b671"} Oct 10 18:18:25 crc kubenswrapper[4799]: I1010 18:18:25.633722 4799 scope.go:117] "RemoveContainer" containerID="ce17069f8136d8df1650c04a5f150f8d8f4aef4f726b69d077096cdbce2ec164" Oct 10 18:18:25 crc kubenswrapper[4799]: I1010 18:18:25.633521 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-84c8d469bf-8gjr7" Oct 10 18:18:25 crc kubenswrapper[4799]: I1010 18:18:25.636161 4799 generic.go:334] "Generic (PLEG): container finished" podID="42f9f525-e7cc-4e82-8a3f-49e481628714" containerID="2061092cd51a11610eb3a4678c9a89b9d01de180b68e73a1011af416e1c029f6" exitCode=0 Oct 10 18:18:25 crc kubenswrapper[4799]: I1010 18:18:25.636212 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-cdbd4bc47-gjsnm" event={"ID":"42f9f525-e7cc-4e82-8a3f-49e481628714","Type":"ContainerDied","Data":"2061092cd51a11610eb3a4678c9a89b9d01de180b68e73a1011af416e1c029f6"} Oct 10 18:18:25 crc kubenswrapper[4799]: I1010 18:18:25.636242 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-cdbd4bc47-gjsnm" event={"ID":"42f9f525-e7cc-4e82-8a3f-49e481628714","Type":"ContainerStarted","Data":"739980818e4f7e497f66c4ef57a1c66682fd5e71c4b5a809bdea6a3d05ceeb15"} Oct 10 18:18:25 crc kubenswrapper[4799]: I1010 18:18:25.658047 4799 scope.go:117] "RemoveContainer" containerID="1fdf1270b12ca1643b111f3c1065a02c142581338d6ed4a43ce7bbcb48ae43c8" Oct 10 18:18:25 crc kubenswrapper[4799]: I1010 18:18:25.678655 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-84c8d469bf-8gjr7"] Oct 10 18:18:25 crc kubenswrapper[4799]: I1010 18:18:25.687989 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-84c8d469bf-8gjr7"] Oct 10 18:18:26 crc kubenswrapper[4799]: I1010 18:18:26.651105 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-cdbd4bc47-gjsnm" event={"ID":"42f9f525-e7cc-4e82-8a3f-49e481628714","Type":"ContainerStarted","Data":"5197e42518c2d0e9d866fb121bcdf759bb0bf83d82f588478d0702d4232fa1d9"} Oct 10 18:18:26 crc kubenswrapper[4799]: I1010 18:18:26.651413 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-cdbd4bc47-gjsnm" Oct 10 18:18:26 crc kubenswrapper[4799]: I1010 18:18:26.681445 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-cdbd4bc47-gjsnm" podStartSLOduration=2.681418733 podStartE2EDuration="2.681418733s" podCreationTimestamp="2025-10-10 18:18:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-10 18:18:26.672721671 +0000 UTC m=+6400.181045796" watchObservedRunningTime="2025-10-10 18:18:26.681418733 +0000 UTC m=+6400.189742878" Oct 10 18:18:27 crc kubenswrapper[4799]: I1010 18:18:27.426368 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="27d181f5-9c3d-4193-80d5-8a45ea9a282f" path="/var/lib/kubelet/pods/27d181f5-9c3d-4193-80d5-8a45ea9a282f/volumes" Oct 10 18:18:34 crc kubenswrapper[4799]: I1010 18:18:34.403144 4799 scope.go:117] "RemoveContainer" containerID="1f0ceb697c256a28cbd9c8e1e3aa08e1dc732ac4382bc8944609e36db615c835" Oct 10 18:18:34 crc kubenswrapper[4799]: E1010 18:18:34.404198 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 18:18:34 crc kubenswrapper[4799]: I1010 18:18:34.655036 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-cdbd4bc47-gjsnm" Oct 10 18:18:34 crc kubenswrapper[4799]: I1010 18:18:34.768992 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6d4c5c46b9-hw5n7"] Oct 10 18:18:34 crc kubenswrapper[4799]: I1010 18:18:34.769267 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-6d4c5c46b9-hw5n7" podUID="dca6ec6b-b3be-4391-8f9b-1125167bc0b1" containerName="dnsmasq-dns" containerID="cri-o://1b52ddb130219486ef68a6f9813a7ef4a511e2e97e5c9634284568b97a1f8765" gracePeriod=10 Oct 10 18:18:35 crc kubenswrapper[4799]: I1010 18:18:35.360298 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6d4c5c46b9-hw5n7" Oct 10 18:18:35 crc kubenswrapper[4799]: I1010 18:18:35.511071 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/dca6ec6b-b3be-4391-8f9b-1125167bc0b1-dns-svc\") pod \"dca6ec6b-b3be-4391-8f9b-1125167bc0b1\" (UID: \"dca6ec6b-b3be-4391-8f9b-1125167bc0b1\") " Oct 10 18:18:35 crc kubenswrapper[4799]: I1010 18:18:35.511457 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/dca6ec6b-b3be-4391-8f9b-1125167bc0b1-ovsdbserver-sb\") pod \"dca6ec6b-b3be-4391-8f9b-1125167bc0b1\" (UID: \"dca6ec6b-b3be-4391-8f9b-1125167bc0b1\") " Oct 10 18:18:35 crc kubenswrapper[4799]: I1010 18:18:35.511509 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/dca6ec6b-b3be-4391-8f9b-1125167bc0b1-ovsdbserver-nb\") pod \"dca6ec6b-b3be-4391-8f9b-1125167bc0b1\" (UID: \"dca6ec6b-b3be-4391-8f9b-1125167bc0b1\") " Oct 10 18:18:35 crc kubenswrapper[4799]: I1010 18:18:35.511581 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-cell1\" (UniqueName: \"kubernetes.io/configmap/dca6ec6b-b3be-4391-8f9b-1125167bc0b1-openstack-cell1\") pod \"dca6ec6b-b3be-4391-8f9b-1125167bc0b1\" (UID: \"dca6ec6b-b3be-4391-8f9b-1125167bc0b1\") " Oct 10 18:18:35 crc kubenswrapper[4799]: I1010 18:18:35.511654 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8mtpg\" (UniqueName: \"kubernetes.io/projected/dca6ec6b-b3be-4391-8f9b-1125167bc0b1-kube-api-access-8mtpg\") pod \"dca6ec6b-b3be-4391-8f9b-1125167bc0b1\" (UID: \"dca6ec6b-b3be-4391-8f9b-1125167bc0b1\") " Oct 10 18:18:35 crc kubenswrapper[4799]: I1010 18:18:35.511899 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/dca6ec6b-b3be-4391-8f9b-1125167bc0b1-config\") pod \"dca6ec6b-b3be-4391-8f9b-1125167bc0b1\" (UID: \"dca6ec6b-b3be-4391-8f9b-1125167bc0b1\") " Oct 10 18:18:35 crc kubenswrapper[4799]: I1010 18:18:35.521398 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dca6ec6b-b3be-4391-8f9b-1125167bc0b1-kube-api-access-8mtpg" (OuterVolumeSpecName: "kube-api-access-8mtpg") pod "dca6ec6b-b3be-4391-8f9b-1125167bc0b1" (UID: "dca6ec6b-b3be-4391-8f9b-1125167bc0b1"). InnerVolumeSpecName "kube-api-access-8mtpg". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 18:18:35 crc kubenswrapper[4799]: I1010 18:18:35.566928 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/dca6ec6b-b3be-4391-8f9b-1125167bc0b1-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "dca6ec6b-b3be-4391-8f9b-1125167bc0b1" (UID: "dca6ec6b-b3be-4391-8f9b-1125167bc0b1"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 18:18:35 crc kubenswrapper[4799]: I1010 18:18:35.575265 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/dca6ec6b-b3be-4391-8f9b-1125167bc0b1-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "dca6ec6b-b3be-4391-8f9b-1125167bc0b1" (UID: "dca6ec6b-b3be-4391-8f9b-1125167bc0b1"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 18:18:35 crc kubenswrapper[4799]: I1010 18:18:35.578368 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/dca6ec6b-b3be-4391-8f9b-1125167bc0b1-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "dca6ec6b-b3be-4391-8f9b-1125167bc0b1" (UID: "dca6ec6b-b3be-4391-8f9b-1125167bc0b1"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 18:18:35 crc kubenswrapper[4799]: I1010 18:18:35.588461 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/dca6ec6b-b3be-4391-8f9b-1125167bc0b1-config" (OuterVolumeSpecName: "config") pod "dca6ec6b-b3be-4391-8f9b-1125167bc0b1" (UID: "dca6ec6b-b3be-4391-8f9b-1125167bc0b1"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 18:18:35 crc kubenswrapper[4799]: I1010 18:18:35.603880 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/dca6ec6b-b3be-4391-8f9b-1125167bc0b1-openstack-cell1" (OuterVolumeSpecName: "openstack-cell1") pod "dca6ec6b-b3be-4391-8f9b-1125167bc0b1" (UID: "dca6ec6b-b3be-4391-8f9b-1125167bc0b1"). InnerVolumeSpecName "openstack-cell1". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 18:18:35 crc kubenswrapper[4799]: I1010 18:18:35.615784 4799 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/dca6ec6b-b3be-4391-8f9b-1125167bc0b1-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 10 18:18:35 crc kubenswrapper[4799]: I1010 18:18:35.615827 4799 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/dca6ec6b-b3be-4391-8f9b-1125167bc0b1-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 10 18:18:35 crc kubenswrapper[4799]: I1010 18:18:35.615849 4799 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/dca6ec6b-b3be-4391-8f9b-1125167bc0b1-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 10 18:18:35 crc kubenswrapper[4799]: I1010 18:18:35.615870 4799 reconciler_common.go:293] "Volume detached for volume \"openstack-cell1\" (UniqueName: \"kubernetes.io/configmap/dca6ec6b-b3be-4391-8f9b-1125167bc0b1-openstack-cell1\") on node \"crc\" DevicePath \"\"" Oct 10 18:18:35 crc kubenswrapper[4799]: I1010 18:18:35.615890 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8mtpg\" (UniqueName: \"kubernetes.io/projected/dca6ec6b-b3be-4391-8f9b-1125167bc0b1-kube-api-access-8mtpg\") on node \"crc\" DevicePath \"\"" Oct 10 18:18:35 crc kubenswrapper[4799]: I1010 18:18:35.615909 4799 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/dca6ec6b-b3be-4391-8f9b-1125167bc0b1-config\") on node \"crc\" DevicePath \"\"" Oct 10 18:18:35 crc kubenswrapper[4799]: I1010 18:18:35.778046 4799 generic.go:334] "Generic (PLEG): container finished" podID="dca6ec6b-b3be-4391-8f9b-1125167bc0b1" containerID="1b52ddb130219486ef68a6f9813a7ef4a511e2e97e5c9634284568b97a1f8765" exitCode=0 Oct 10 18:18:35 crc kubenswrapper[4799]: I1010 18:18:35.778111 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6d4c5c46b9-hw5n7" event={"ID":"dca6ec6b-b3be-4391-8f9b-1125167bc0b1","Type":"ContainerDied","Data":"1b52ddb130219486ef68a6f9813a7ef4a511e2e97e5c9634284568b97a1f8765"} Oct 10 18:18:35 crc kubenswrapper[4799]: I1010 18:18:35.778151 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6d4c5c46b9-hw5n7" event={"ID":"dca6ec6b-b3be-4391-8f9b-1125167bc0b1","Type":"ContainerDied","Data":"1d89bc74ba924eb1e4c23688183f0ea65c02ab876d955c0d9246e59bd9e5bd57"} Oct 10 18:18:35 crc kubenswrapper[4799]: I1010 18:18:35.778183 4799 scope.go:117] "RemoveContainer" containerID="1b52ddb130219486ef68a6f9813a7ef4a511e2e97e5c9634284568b97a1f8765" Oct 10 18:18:35 crc kubenswrapper[4799]: I1010 18:18:35.778406 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6d4c5c46b9-hw5n7" Oct 10 18:18:35 crc kubenswrapper[4799]: I1010 18:18:35.828911 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6d4c5c46b9-hw5n7"] Oct 10 18:18:35 crc kubenswrapper[4799]: I1010 18:18:35.838816 4799 scope.go:117] "RemoveContainer" containerID="f7ab34d127a1a4cf6e42ad2b79a9f8e94cf5b9d5baf5588a286a0c7d1ea0b75b" Oct 10 18:18:35 crc kubenswrapper[4799]: I1010 18:18:35.838862 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-6d4c5c46b9-hw5n7"] Oct 10 18:18:35 crc kubenswrapper[4799]: I1010 18:18:35.867977 4799 scope.go:117] "RemoveContainer" containerID="1b52ddb130219486ef68a6f9813a7ef4a511e2e97e5c9634284568b97a1f8765" Oct 10 18:18:35 crc kubenswrapper[4799]: E1010 18:18:35.869461 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1b52ddb130219486ef68a6f9813a7ef4a511e2e97e5c9634284568b97a1f8765\": container with ID starting with 1b52ddb130219486ef68a6f9813a7ef4a511e2e97e5c9634284568b97a1f8765 not found: ID does not exist" containerID="1b52ddb130219486ef68a6f9813a7ef4a511e2e97e5c9634284568b97a1f8765" Oct 10 18:18:35 crc kubenswrapper[4799]: I1010 18:18:35.869555 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1b52ddb130219486ef68a6f9813a7ef4a511e2e97e5c9634284568b97a1f8765"} err="failed to get container status \"1b52ddb130219486ef68a6f9813a7ef4a511e2e97e5c9634284568b97a1f8765\": rpc error: code = NotFound desc = could not find container \"1b52ddb130219486ef68a6f9813a7ef4a511e2e97e5c9634284568b97a1f8765\": container with ID starting with 1b52ddb130219486ef68a6f9813a7ef4a511e2e97e5c9634284568b97a1f8765 not found: ID does not exist" Oct 10 18:18:35 crc kubenswrapper[4799]: I1010 18:18:35.869616 4799 scope.go:117] "RemoveContainer" containerID="f7ab34d127a1a4cf6e42ad2b79a9f8e94cf5b9d5baf5588a286a0c7d1ea0b75b" Oct 10 18:18:35 crc kubenswrapper[4799]: E1010 18:18:35.870792 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f7ab34d127a1a4cf6e42ad2b79a9f8e94cf5b9d5baf5588a286a0c7d1ea0b75b\": container with ID starting with f7ab34d127a1a4cf6e42ad2b79a9f8e94cf5b9d5baf5588a286a0c7d1ea0b75b not found: ID does not exist" containerID="f7ab34d127a1a4cf6e42ad2b79a9f8e94cf5b9d5baf5588a286a0c7d1ea0b75b" Oct 10 18:18:35 crc kubenswrapper[4799]: I1010 18:18:35.870868 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f7ab34d127a1a4cf6e42ad2b79a9f8e94cf5b9d5baf5588a286a0c7d1ea0b75b"} err="failed to get container status \"f7ab34d127a1a4cf6e42ad2b79a9f8e94cf5b9d5baf5588a286a0c7d1ea0b75b\": rpc error: code = NotFound desc = could not find container \"f7ab34d127a1a4cf6e42ad2b79a9f8e94cf5b9d5baf5588a286a0c7d1ea0b75b\": container with ID starting with f7ab34d127a1a4cf6e42ad2b79a9f8e94cf5b9d5baf5588a286a0c7d1ea0b75b not found: ID does not exist" Oct 10 18:18:37 crc kubenswrapper[4799]: I1010 18:18:37.431143 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="dca6ec6b-b3be-4391-8f9b-1125167bc0b1" path="/var/lib/kubelet/pods/dca6ec6b-b3be-4391-8f9b-1125167bc0b1/volumes" Oct 10 18:18:46 crc kubenswrapper[4799]: I1010 18:18:46.386144 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cn6jt7"] Oct 10 18:18:46 crc kubenswrapper[4799]: E1010 18:18:46.387370 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dca6ec6b-b3be-4391-8f9b-1125167bc0b1" containerName="init" Oct 10 18:18:46 crc kubenswrapper[4799]: I1010 18:18:46.387391 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="dca6ec6b-b3be-4391-8f9b-1125167bc0b1" containerName="init" Oct 10 18:18:46 crc kubenswrapper[4799]: E1010 18:18:46.387432 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="27d181f5-9c3d-4193-80d5-8a45ea9a282f" containerName="dnsmasq-dns" Oct 10 18:18:46 crc kubenswrapper[4799]: I1010 18:18:46.387445 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="27d181f5-9c3d-4193-80d5-8a45ea9a282f" containerName="dnsmasq-dns" Oct 10 18:18:46 crc kubenswrapper[4799]: E1010 18:18:46.387471 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="27d181f5-9c3d-4193-80d5-8a45ea9a282f" containerName="init" Oct 10 18:18:46 crc kubenswrapper[4799]: I1010 18:18:46.387483 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="27d181f5-9c3d-4193-80d5-8a45ea9a282f" containerName="init" Oct 10 18:18:46 crc kubenswrapper[4799]: E1010 18:18:46.387548 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dca6ec6b-b3be-4391-8f9b-1125167bc0b1" containerName="dnsmasq-dns" Oct 10 18:18:46 crc kubenswrapper[4799]: I1010 18:18:46.387561 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="dca6ec6b-b3be-4391-8f9b-1125167bc0b1" containerName="dnsmasq-dns" Oct 10 18:18:46 crc kubenswrapper[4799]: I1010 18:18:46.387968 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="dca6ec6b-b3be-4391-8f9b-1125167bc0b1" containerName="dnsmasq-dns" Oct 10 18:18:46 crc kubenswrapper[4799]: I1010 18:18:46.387992 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="27d181f5-9c3d-4193-80d5-8a45ea9a282f" containerName="dnsmasq-dns" Oct 10 18:18:46 crc kubenswrapper[4799]: I1010 18:18:46.389232 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cn6jt7" Oct 10 18:18:46 crc kubenswrapper[4799]: I1010 18:18:46.391369 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-rdlhr" Oct 10 18:18:46 crc kubenswrapper[4799]: I1010 18:18:46.392274 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 10 18:18:46 crc kubenswrapper[4799]: I1010 18:18:46.392526 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Oct 10 18:18:46 crc kubenswrapper[4799]: I1010 18:18:46.392728 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-adoption-secret" Oct 10 18:18:46 crc kubenswrapper[4799]: I1010 18:18:46.405844 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cn6jt7"] Oct 10 18:18:46 crc kubenswrapper[4799]: I1010 18:18:46.491208 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/7d5d2b84-7192-4c14-83c7-2fe15a984da5-ceph\") pod \"pre-adoption-validation-openstack-pre-adoption-openstack-cn6jt7\" (UID: \"7d5d2b84-7192-4c14-83c7-2fe15a984da5\") " pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cn6jt7" Oct 10 18:18:46 crc kubenswrapper[4799]: I1010 18:18:46.491290 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-87654\" (UniqueName: \"kubernetes.io/projected/7d5d2b84-7192-4c14-83c7-2fe15a984da5-kube-api-access-87654\") pod \"pre-adoption-validation-openstack-pre-adoption-openstack-cn6jt7\" (UID: \"7d5d2b84-7192-4c14-83c7-2fe15a984da5\") " pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cn6jt7" Oct 10 18:18:46 crc kubenswrapper[4799]: I1010 18:18:46.491327 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/7d5d2b84-7192-4c14-83c7-2fe15a984da5-ssh-key\") pod \"pre-adoption-validation-openstack-pre-adoption-openstack-cn6jt7\" (UID: \"7d5d2b84-7192-4c14-83c7-2fe15a984da5\") " pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cn6jt7" Oct 10 18:18:46 crc kubenswrapper[4799]: I1010 18:18:46.491402 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pre-adoption-validation-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7d5d2b84-7192-4c14-83c7-2fe15a984da5-pre-adoption-validation-combined-ca-bundle\") pod \"pre-adoption-validation-openstack-pre-adoption-openstack-cn6jt7\" (UID: \"7d5d2b84-7192-4c14-83c7-2fe15a984da5\") " pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cn6jt7" Oct 10 18:18:46 crc kubenswrapper[4799]: I1010 18:18:46.491427 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/7d5d2b84-7192-4c14-83c7-2fe15a984da5-inventory\") pod \"pre-adoption-validation-openstack-pre-adoption-openstack-cn6jt7\" (UID: \"7d5d2b84-7192-4c14-83c7-2fe15a984da5\") " pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cn6jt7" Oct 10 18:18:46 crc kubenswrapper[4799]: I1010 18:18:46.594139 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-87654\" (UniqueName: \"kubernetes.io/projected/7d5d2b84-7192-4c14-83c7-2fe15a984da5-kube-api-access-87654\") pod \"pre-adoption-validation-openstack-pre-adoption-openstack-cn6jt7\" (UID: \"7d5d2b84-7192-4c14-83c7-2fe15a984da5\") " pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cn6jt7" Oct 10 18:18:46 crc kubenswrapper[4799]: I1010 18:18:46.594271 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/7d5d2b84-7192-4c14-83c7-2fe15a984da5-ssh-key\") pod \"pre-adoption-validation-openstack-pre-adoption-openstack-cn6jt7\" (UID: \"7d5d2b84-7192-4c14-83c7-2fe15a984da5\") " pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cn6jt7" Oct 10 18:18:46 crc kubenswrapper[4799]: I1010 18:18:46.594454 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pre-adoption-validation-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7d5d2b84-7192-4c14-83c7-2fe15a984da5-pre-adoption-validation-combined-ca-bundle\") pod \"pre-adoption-validation-openstack-pre-adoption-openstack-cn6jt7\" (UID: \"7d5d2b84-7192-4c14-83c7-2fe15a984da5\") " pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cn6jt7" Oct 10 18:18:46 crc kubenswrapper[4799]: I1010 18:18:46.594550 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/7d5d2b84-7192-4c14-83c7-2fe15a984da5-inventory\") pod \"pre-adoption-validation-openstack-pre-adoption-openstack-cn6jt7\" (UID: \"7d5d2b84-7192-4c14-83c7-2fe15a984da5\") " pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cn6jt7" Oct 10 18:18:46 crc kubenswrapper[4799]: I1010 18:18:46.594935 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/7d5d2b84-7192-4c14-83c7-2fe15a984da5-ceph\") pod \"pre-adoption-validation-openstack-pre-adoption-openstack-cn6jt7\" (UID: \"7d5d2b84-7192-4c14-83c7-2fe15a984da5\") " pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cn6jt7" Oct 10 18:18:46 crc kubenswrapper[4799]: I1010 18:18:46.600689 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/7d5d2b84-7192-4c14-83c7-2fe15a984da5-inventory\") pod \"pre-adoption-validation-openstack-pre-adoption-openstack-cn6jt7\" (UID: \"7d5d2b84-7192-4c14-83c7-2fe15a984da5\") " pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cn6jt7" Oct 10 18:18:46 crc kubenswrapper[4799]: I1010 18:18:46.602817 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pre-adoption-validation-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7d5d2b84-7192-4c14-83c7-2fe15a984da5-pre-adoption-validation-combined-ca-bundle\") pod \"pre-adoption-validation-openstack-pre-adoption-openstack-cn6jt7\" (UID: \"7d5d2b84-7192-4c14-83c7-2fe15a984da5\") " pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cn6jt7" Oct 10 18:18:46 crc kubenswrapper[4799]: I1010 18:18:46.608360 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/7d5d2b84-7192-4c14-83c7-2fe15a984da5-ceph\") pod \"pre-adoption-validation-openstack-pre-adoption-openstack-cn6jt7\" (UID: \"7d5d2b84-7192-4c14-83c7-2fe15a984da5\") " pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cn6jt7" Oct 10 18:18:46 crc kubenswrapper[4799]: I1010 18:18:46.613348 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/7d5d2b84-7192-4c14-83c7-2fe15a984da5-ssh-key\") pod \"pre-adoption-validation-openstack-pre-adoption-openstack-cn6jt7\" (UID: \"7d5d2b84-7192-4c14-83c7-2fe15a984da5\") " pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cn6jt7" Oct 10 18:18:46 crc kubenswrapper[4799]: I1010 18:18:46.616096 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-87654\" (UniqueName: \"kubernetes.io/projected/7d5d2b84-7192-4c14-83c7-2fe15a984da5-kube-api-access-87654\") pod \"pre-adoption-validation-openstack-pre-adoption-openstack-cn6jt7\" (UID: \"7d5d2b84-7192-4c14-83c7-2fe15a984da5\") " pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cn6jt7" Oct 10 18:18:46 crc kubenswrapper[4799]: I1010 18:18:46.709592 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cn6jt7" Oct 10 18:18:47 crc kubenswrapper[4799]: I1010 18:18:47.289563 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cn6jt7"] Oct 10 18:18:47 crc kubenswrapper[4799]: W1010 18:18:47.299972 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7d5d2b84_7192_4c14_83c7_2fe15a984da5.slice/crio-feb5d467c01619b12f1021253cedcaa164e740155f4722160bbcba77c4647a00 WatchSource:0}: Error finding container feb5d467c01619b12f1021253cedcaa164e740155f4722160bbcba77c4647a00: Status 404 returned error can't find the container with id feb5d467c01619b12f1021253cedcaa164e740155f4722160bbcba77c4647a00 Oct 10 18:18:47 crc kubenswrapper[4799]: I1010 18:18:47.412647 4799 scope.go:117] "RemoveContainer" containerID="1f0ceb697c256a28cbd9c8e1e3aa08e1dc732ac4382bc8944609e36db615c835" Oct 10 18:18:47 crc kubenswrapper[4799]: E1010 18:18:47.413410 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 18:18:47 crc kubenswrapper[4799]: I1010 18:18:47.942679 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cn6jt7" event={"ID":"7d5d2b84-7192-4c14-83c7-2fe15a984da5","Type":"ContainerStarted","Data":"feb5d467c01619b12f1021253cedcaa164e740155f4722160bbcba77c4647a00"} Oct 10 18:18:56 crc kubenswrapper[4799]: I1010 18:18:56.114346 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 10 18:18:57 crc kubenswrapper[4799]: I1010 18:18:57.053924 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cn6jt7" event={"ID":"7d5d2b84-7192-4c14-83c7-2fe15a984da5","Type":"ContainerStarted","Data":"19b1ff9a138ba55b5d7d85759b74ee66d0e060058119480dfb606f2b11241a9e"} Oct 10 18:18:59 crc kubenswrapper[4799]: I1010 18:18:59.404227 4799 scope.go:117] "RemoveContainer" containerID="1f0ceb697c256a28cbd9c8e1e3aa08e1dc732ac4382bc8944609e36db615c835" Oct 10 18:18:59 crc kubenswrapper[4799]: E1010 18:18:59.405836 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 18:19:10 crc kubenswrapper[4799]: I1010 18:19:10.221993 4799 generic.go:334] "Generic (PLEG): container finished" podID="7d5d2b84-7192-4c14-83c7-2fe15a984da5" containerID="19b1ff9a138ba55b5d7d85759b74ee66d0e060058119480dfb606f2b11241a9e" exitCode=0 Oct 10 18:19:10 crc kubenswrapper[4799]: I1010 18:19:10.222130 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cn6jt7" event={"ID":"7d5d2b84-7192-4c14-83c7-2fe15a984da5","Type":"ContainerDied","Data":"19b1ff9a138ba55b5d7d85759b74ee66d0e060058119480dfb606f2b11241a9e"} Oct 10 18:19:11 crc kubenswrapper[4799]: I1010 18:19:11.402386 4799 scope.go:117] "RemoveContainer" containerID="1f0ceb697c256a28cbd9c8e1e3aa08e1dc732ac4382bc8944609e36db615c835" Oct 10 18:19:11 crc kubenswrapper[4799]: E1010 18:19:11.402903 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 18:19:11 crc kubenswrapper[4799]: I1010 18:19:11.828082 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cn6jt7" Oct 10 18:19:11 crc kubenswrapper[4799]: I1010 18:19:11.943672 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/7d5d2b84-7192-4c14-83c7-2fe15a984da5-ceph\") pod \"7d5d2b84-7192-4c14-83c7-2fe15a984da5\" (UID: \"7d5d2b84-7192-4c14-83c7-2fe15a984da5\") " Oct 10 18:19:11 crc kubenswrapper[4799]: I1010 18:19:11.943809 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-87654\" (UniqueName: \"kubernetes.io/projected/7d5d2b84-7192-4c14-83c7-2fe15a984da5-kube-api-access-87654\") pod \"7d5d2b84-7192-4c14-83c7-2fe15a984da5\" (UID: \"7d5d2b84-7192-4c14-83c7-2fe15a984da5\") " Oct 10 18:19:11 crc kubenswrapper[4799]: I1010 18:19:11.943933 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/7d5d2b84-7192-4c14-83c7-2fe15a984da5-ssh-key\") pod \"7d5d2b84-7192-4c14-83c7-2fe15a984da5\" (UID: \"7d5d2b84-7192-4c14-83c7-2fe15a984da5\") " Oct 10 18:19:11 crc kubenswrapper[4799]: I1010 18:19:11.943965 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/7d5d2b84-7192-4c14-83c7-2fe15a984da5-inventory\") pod \"7d5d2b84-7192-4c14-83c7-2fe15a984da5\" (UID: \"7d5d2b84-7192-4c14-83c7-2fe15a984da5\") " Oct 10 18:19:11 crc kubenswrapper[4799]: I1010 18:19:11.944025 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pre-adoption-validation-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7d5d2b84-7192-4c14-83c7-2fe15a984da5-pre-adoption-validation-combined-ca-bundle\") pod \"7d5d2b84-7192-4c14-83c7-2fe15a984da5\" (UID: \"7d5d2b84-7192-4c14-83c7-2fe15a984da5\") " Oct 10 18:19:11 crc kubenswrapper[4799]: I1010 18:19:11.951053 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7d5d2b84-7192-4c14-83c7-2fe15a984da5-pre-adoption-validation-combined-ca-bundle" (OuterVolumeSpecName: "pre-adoption-validation-combined-ca-bundle") pod "7d5d2b84-7192-4c14-83c7-2fe15a984da5" (UID: "7d5d2b84-7192-4c14-83c7-2fe15a984da5"). InnerVolumeSpecName "pre-adoption-validation-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 18:19:11 crc kubenswrapper[4799]: I1010 18:19:11.963684 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7d5d2b84-7192-4c14-83c7-2fe15a984da5-kube-api-access-87654" (OuterVolumeSpecName: "kube-api-access-87654") pod "7d5d2b84-7192-4c14-83c7-2fe15a984da5" (UID: "7d5d2b84-7192-4c14-83c7-2fe15a984da5"). InnerVolumeSpecName "kube-api-access-87654". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 18:19:11 crc kubenswrapper[4799]: I1010 18:19:11.963692 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7d5d2b84-7192-4c14-83c7-2fe15a984da5-ceph" (OuterVolumeSpecName: "ceph") pod "7d5d2b84-7192-4c14-83c7-2fe15a984da5" (UID: "7d5d2b84-7192-4c14-83c7-2fe15a984da5"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 18:19:11 crc kubenswrapper[4799]: I1010 18:19:11.990124 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7d5d2b84-7192-4c14-83c7-2fe15a984da5-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "7d5d2b84-7192-4c14-83c7-2fe15a984da5" (UID: "7d5d2b84-7192-4c14-83c7-2fe15a984da5"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 18:19:12 crc kubenswrapper[4799]: I1010 18:19:12.003948 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7d5d2b84-7192-4c14-83c7-2fe15a984da5-inventory" (OuterVolumeSpecName: "inventory") pod "7d5d2b84-7192-4c14-83c7-2fe15a984da5" (UID: "7d5d2b84-7192-4c14-83c7-2fe15a984da5"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 18:19:12 crc kubenswrapper[4799]: I1010 18:19:12.048519 4799 reconciler_common.go:293] "Volume detached for volume \"pre-adoption-validation-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7d5d2b84-7192-4c14-83c7-2fe15a984da5-pre-adoption-validation-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 10 18:19:12 crc kubenswrapper[4799]: I1010 18:19:12.048570 4799 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/7d5d2b84-7192-4c14-83c7-2fe15a984da5-ceph\") on node \"crc\" DevicePath \"\"" Oct 10 18:19:12 crc kubenswrapper[4799]: I1010 18:19:12.048592 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-87654\" (UniqueName: \"kubernetes.io/projected/7d5d2b84-7192-4c14-83c7-2fe15a984da5-kube-api-access-87654\") on node \"crc\" DevicePath \"\"" Oct 10 18:19:12 crc kubenswrapper[4799]: I1010 18:19:12.048610 4799 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/7d5d2b84-7192-4c14-83c7-2fe15a984da5-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 10 18:19:12 crc kubenswrapper[4799]: I1010 18:19:12.048629 4799 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/7d5d2b84-7192-4c14-83c7-2fe15a984da5-inventory\") on node \"crc\" DevicePath \"\"" Oct 10 18:19:12 crc kubenswrapper[4799]: I1010 18:19:12.249530 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cn6jt7" event={"ID":"7d5d2b84-7192-4c14-83c7-2fe15a984da5","Type":"ContainerDied","Data":"feb5d467c01619b12f1021253cedcaa164e740155f4722160bbcba77c4647a00"} Oct 10 18:19:12 crc kubenswrapper[4799]: I1010 18:19:12.249589 4799 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="feb5d467c01619b12f1021253cedcaa164e740155f4722160bbcba77c4647a00" Oct 10 18:19:12 crc kubenswrapper[4799]: I1010 18:19:12.249615 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cn6jt7" Oct 10 18:19:13 crc kubenswrapper[4799]: I1010 18:19:13.958460 4799 scope.go:117] "RemoveContainer" containerID="6326bb1d1e25431d162328177b4897e073738f078e156cda7b3b08ab999f811f" Oct 10 18:19:13 crc kubenswrapper[4799]: I1010 18:19:13.995881 4799 scope.go:117] "RemoveContainer" containerID="ee9fc7fc3e78ef13cd3247e11f7f009113d4fec2f9f8f35f0e2024b069163e39" Oct 10 18:19:20 crc kubenswrapper[4799]: I1010 18:19:20.110237 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-wpgn4"] Oct 10 18:19:20 crc kubenswrapper[4799]: E1010 18:19:20.111372 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7d5d2b84-7192-4c14-83c7-2fe15a984da5" containerName="pre-adoption-validation-openstack-pre-adoption-openstack-cell1" Oct 10 18:19:20 crc kubenswrapper[4799]: I1010 18:19:20.111390 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="7d5d2b84-7192-4c14-83c7-2fe15a984da5" containerName="pre-adoption-validation-openstack-pre-adoption-openstack-cell1" Oct 10 18:19:20 crc kubenswrapper[4799]: I1010 18:19:20.111713 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="7d5d2b84-7192-4c14-83c7-2fe15a984da5" containerName="pre-adoption-validation-openstack-pre-adoption-openstack-cell1" Oct 10 18:19:20 crc kubenswrapper[4799]: I1010 18:19:20.112677 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-wpgn4" Oct 10 18:19:20 crc kubenswrapper[4799]: I1010 18:19:20.115108 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-adoption-secret" Oct 10 18:19:20 crc kubenswrapper[4799]: I1010 18:19:20.115467 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Oct 10 18:19:20 crc kubenswrapper[4799]: I1010 18:19:20.115806 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-rdlhr" Oct 10 18:19:20 crc kubenswrapper[4799]: I1010 18:19:20.115940 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 10 18:19:20 crc kubenswrapper[4799]: I1010 18:19:20.123182 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-wpgn4"] Oct 10 18:19:20 crc kubenswrapper[4799]: I1010 18:19:20.239693 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-csp4n\" (UniqueName: \"kubernetes.io/projected/c4e679ee-ac3b-4e3c-9869-b86de400033e-kube-api-access-csp4n\") pod \"tripleo-cleanup-tripleo-cleanup-openstack-cell1-wpgn4\" (UID: \"c4e679ee-ac3b-4e3c-9869-b86de400033e\") " pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-wpgn4" Oct 10 18:19:20 crc kubenswrapper[4799]: I1010 18:19:20.239825 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tripleo-cleanup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c4e679ee-ac3b-4e3c-9869-b86de400033e-tripleo-cleanup-combined-ca-bundle\") pod \"tripleo-cleanup-tripleo-cleanup-openstack-cell1-wpgn4\" (UID: \"c4e679ee-ac3b-4e3c-9869-b86de400033e\") " pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-wpgn4" Oct 10 18:19:20 crc kubenswrapper[4799]: I1010 18:19:20.240047 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/c4e679ee-ac3b-4e3c-9869-b86de400033e-ceph\") pod \"tripleo-cleanup-tripleo-cleanup-openstack-cell1-wpgn4\" (UID: \"c4e679ee-ac3b-4e3c-9869-b86de400033e\") " pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-wpgn4" Oct 10 18:19:20 crc kubenswrapper[4799]: I1010 18:19:20.240089 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c4e679ee-ac3b-4e3c-9869-b86de400033e-inventory\") pod \"tripleo-cleanup-tripleo-cleanup-openstack-cell1-wpgn4\" (UID: \"c4e679ee-ac3b-4e3c-9869-b86de400033e\") " pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-wpgn4" Oct 10 18:19:20 crc kubenswrapper[4799]: I1010 18:19:20.240114 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c4e679ee-ac3b-4e3c-9869-b86de400033e-ssh-key\") pod \"tripleo-cleanup-tripleo-cleanup-openstack-cell1-wpgn4\" (UID: \"c4e679ee-ac3b-4e3c-9869-b86de400033e\") " pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-wpgn4" Oct 10 18:19:20 crc kubenswrapper[4799]: I1010 18:19:20.341114 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tripleo-cleanup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c4e679ee-ac3b-4e3c-9869-b86de400033e-tripleo-cleanup-combined-ca-bundle\") pod \"tripleo-cleanup-tripleo-cleanup-openstack-cell1-wpgn4\" (UID: \"c4e679ee-ac3b-4e3c-9869-b86de400033e\") " pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-wpgn4" Oct 10 18:19:20 crc kubenswrapper[4799]: I1010 18:19:20.341210 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/c4e679ee-ac3b-4e3c-9869-b86de400033e-ceph\") pod \"tripleo-cleanup-tripleo-cleanup-openstack-cell1-wpgn4\" (UID: \"c4e679ee-ac3b-4e3c-9869-b86de400033e\") " pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-wpgn4" Oct 10 18:19:20 crc kubenswrapper[4799]: I1010 18:19:20.341248 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c4e679ee-ac3b-4e3c-9869-b86de400033e-inventory\") pod \"tripleo-cleanup-tripleo-cleanup-openstack-cell1-wpgn4\" (UID: \"c4e679ee-ac3b-4e3c-9869-b86de400033e\") " pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-wpgn4" Oct 10 18:19:20 crc kubenswrapper[4799]: I1010 18:19:20.341266 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c4e679ee-ac3b-4e3c-9869-b86de400033e-ssh-key\") pod \"tripleo-cleanup-tripleo-cleanup-openstack-cell1-wpgn4\" (UID: \"c4e679ee-ac3b-4e3c-9869-b86de400033e\") " pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-wpgn4" Oct 10 18:19:20 crc kubenswrapper[4799]: I1010 18:19:20.341345 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-csp4n\" (UniqueName: \"kubernetes.io/projected/c4e679ee-ac3b-4e3c-9869-b86de400033e-kube-api-access-csp4n\") pod \"tripleo-cleanup-tripleo-cleanup-openstack-cell1-wpgn4\" (UID: \"c4e679ee-ac3b-4e3c-9869-b86de400033e\") " pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-wpgn4" Oct 10 18:19:20 crc kubenswrapper[4799]: I1010 18:19:20.349479 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tripleo-cleanup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c4e679ee-ac3b-4e3c-9869-b86de400033e-tripleo-cleanup-combined-ca-bundle\") pod \"tripleo-cleanup-tripleo-cleanup-openstack-cell1-wpgn4\" (UID: \"c4e679ee-ac3b-4e3c-9869-b86de400033e\") " pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-wpgn4" Oct 10 18:19:20 crc kubenswrapper[4799]: I1010 18:19:20.351846 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c4e679ee-ac3b-4e3c-9869-b86de400033e-inventory\") pod \"tripleo-cleanup-tripleo-cleanup-openstack-cell1-wpgn4\" (UID: \"c4e679ee-ac3b-4e3c-9869-b86de400033e\") " pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-wpgn4" Oct 10 18:19:20 crc kubenswrapper[4799]: I1010 18:19:20.354473 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/c4e679ee-ac3b-4e3c-9869-b86de400033e-ceph\") pod \"tripleo-cleanup-tripleo-cleanup-openstack-cell1-wpgn4\" (UID: \"c4e679ee-ac3b-4e3c-9869-b86de400033e\") " pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-wpgn4" Oct 10 18:19:20 crc kubenswrapper[4799]: I1010 18:19:20.357231 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c4e679ee-ac3b-4e3c-9869-b86de400033e-ssh-key\") pod \"tripleo-cleanup-tripleo-cleanup-openstack-cell1-wpgn4\" (UID: \"c4e679ee-ac3b-4e3c-9869-b86de400033e\") " pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-wpgn4" Oct 10 18:19:20 crc kubenswrapper[4799]: I1010 18:19:20.363324 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-csp4n\" (UniqueName: \"kubernetes.io/projected/c4e679ee-ac3b-4e3c-9869-b86de400033e-kube-api-access-csp4n\") pod \"tripleo-cleanup-tripleo-cleanup-openstack-cell1-wpgn4\" (UID: \"c4e679ee-ac3b-4e3c-9869-b86de400033e\") " pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-wpgn4" Oct 10 18:19:20 crc kubenswrapper[4799]: I1010 18:19:20.467870 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-wpgn4" Oct 10 18:19:21 crc kubenswrapper[4799]: I1010 18:19:21.063993 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/octavia-db-create-sms8r"] Oct 10 18:19:21 crc kubenswrapper[4799]: I1010 18:19:21.079381 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/octavia-db-create-sms8r"] Oct 10 18:19:21 crc kubenswrapper[4799]: W1010 18:19:21.086972 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc4e679ee_ac3b_4e3c_9869_b86de400033e.slice/crio-65ff8f62cbeaa94c9ff76cbb40be2b84df278a698b1fec3129cf9bf099168076 WatchSource:0}: Error finding container 65ff8f62cbeaa94c9ff76cbb40be2b84df278a698b1fec3129cf9bf099168076: Status 404 returned error can't find the container with id 65ff8f62cbeaa94c9ff76cbb40be2b84df278a698b1fec3129cf9bf099168076 Oct 10 18:19:21 crc kubenswrapper[4799]: I1010 18:19:21.092121 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-wpgn4"] Oct 10 18:19:21 crc kubenswrapper[4799]: I1010 18:19:21.368670 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-wpgn4" event={"ID":"c4e679ee-ac3b-4e3c-9869-b86de400033e","Type":"ContainerStarted","Data":"65ff8f62cbeaa94c9ff76cbb40be2b84df278a698b1fec3129cf9bf099168076"} Oct 10 18:19:21 crc kubenswrapper[4799]: I1010 18:19:21.423263 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="68c3ec49-6c3d-4520-a5d9-a2e0340854bd" path="/var/lib/kubelet/pods/68c3ec49-6c3d-4520-a5d9-a2e0340854bd/volumes" Oct 10 18:19:22 crc kubenswrapper[4799]: I1010 18:19:22.101526 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-2d26t"] Oct 10 18:19:22 crc kubenswrapper[4799]: I1010 18:19:22.103690 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-2d26t" Oct 10 18:19:22 crc kubenswrapper[4799]: I1010 18:19:22.131708 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-2d26t"] Oct 10 18:19:22 crc kubenswrapper[4799]: I1010 18:19:22.195512 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7d2cfa80-6751-4c4d-acd1-213dd64c059d-catalog-content\") pod \"redhat-operators-2d26t\" (UID: \"7d2cfa80-6751-4c4d-acd1-213dd64c059d\") " pod="openshift-marketplace/redhat-operators-2d26t" Oct 10 18:19:22 crc kubenswrapper[4799]: I1010 18:19:22.195958 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jdwhv\" (UniqueName: \"kubernetes.io/projected/7d2cfa80-6751-4c4d-acd1-213dd64c059d-kube-api-access-jdwhv\") pod \"redhat-operators-2d26t\" (UID: \"7d2cfa80-6751-4c4d-acd1-213dd64c059d\") " pod="openshift-marketplace/redhat-operators-2d26t" Oct 10 18:19:22 crc kubenswrapper[4799]: I1010 18:19:22.196216 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7d2cfa80-6751-4c4d-acd1-213dd64c059d-utilities\") pod \"redhat-operators-2d26t\" (UID: \"7d2cfa80-6751-4c4d-acd1-213dd64c059d\") " pod="openshift-marketplace/redhat-operators-2d26t" Oct 10 18:19:22 crc kubenswrapper[4799]: I1010 18:19:22.298718 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7d2cfa80-6751-4c4d-acd1-213dd64c059d-utilities\") pod \"redhat-operators-2d26t\" (UID: \"7d2cfa80-6751-4c4d-acd1-213dd64c059d\") " pod="openshift-marketplace/redhat-operators-2d26t" Oct 10 18:19:22 crc kubenswrapper[4799]: I1010 18:19:22.298895 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7d2cfa80-6751-4c4d-acd1-213dd64c059d-catalog-content\") pod \"redhat-operators-2d26t\" (UID: \"7d2cfa80-6751-4c4d-acd1-213dd64c059d\") " pod="openshift-marketplace/redhat-operators-2d26t" Oct 10 18:19:22 crc kubenswrapper[4799]: I1010 18:19:22.298957 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jdwhv\" (UniqueName: \"kubernetes.io/projected/7d2cfa80-6751-4c4d-acd1-213dd64c059d-kube-api-access-jdwhv\") pod \"redhat-operators-2d26t\" (UID: \"7d2cfa80-6751-4c4d-acd1-213dd64c059d\") " pod="openshift-marketplace/redhat-operators-2d26t" Oct 10 18:19:22 crc kubenswrapper[4799]: I1010 18:19:22.299575 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7d2cfa80-6751-4c4d-acd1-213dd64c059d-catalog-content\") pod \"redhat-operators-2d26t\" (UID: \"7d2cfa80-6751-4c4d-acd1-213dd64c059d\") " pod="openshift-marketplace/redhat-operators-2d26t" Oct 10 18:19:22 crc kubenswrapper[4799]: I1010 18:19:22.299583 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7d2cfa80-6751-4c4d-acd1-213dd64c059d-utilities\") pod \"redhat-operators-2d26t\" (UID: \"7d2cfa80-6751-4c4d-acd1-213dd64c059d\") " pod="openshift-marketplace/redhat-operators-2d26t" Oct 10 18:19:22 crc kubenswrapper[4799]: I1010 18:19:22.321745 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jdwhv\" (UniqueName: \"kubernetes.io/projected/7d2cfa80-6751-4c4d-acd1-213dd64c059d-kube-api-access-jdwhv\") pod \"redhat-operators-2d26t\" (UID: \"7d2cfa80-6751-4c4d-acd1-213dd64c059d\") " pod="openshift-marketplace/redhat-operators-2d26t" Oct 10 18:19:22 crc kubenswrapper[4799]: I1010 18:19:22.378546 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-wpgn4" event={"ID":"c4e679ee-ac3b-4e3c-9869-b86de400033e","Type":"ContainerStarted","Data":"de61c9fb0e935077f74e48082343ff6b78f505584c56473ce956dedca0492bbe"} Oct 10 18:19:22 crc kubenswrapper[4799]: I1010 18:19:22.396761 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-wpgn4" podStartSLOduration=1.848317619 podStartE2EDuration="2.396727409s" podCreationTimestamp="2025-10-10 18:19:20 +0000 UTC" firstStartedPulling="2025-10-10 18:19:21.089997854 +0000 UTC m=+6454.598321979" lastFinishedPulling="2025-10-10 18:19:21.638407614 +0000 UTC m=+6455.146731769" observedRunningTime="2025-10-10 18:19:22.394022093 +0000 UTC m=+6455.902346218" watchObservedRunningTime="2025-10-10 18:19:22.396727409 +0000 UTC m=+6455.905051534" Oct 10 18:19:22 crc kubenswrapper[4799]: I1010 18:19:22.402181 4799 scope.go:117] "RemoveContainer" containerID="1f0ceb697c256a28cbd9c8e1e3aa08e1dc732ac4382bc8944609e36db615c835" Oct 10 18:19:22 crc kubenswrapper[4799]: E1010 18:19:22.402613 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 18:19:22 crc kubenswrapper[4799]: I1010 18:19:22.423573 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-2d26t" Oct 10 18:19:22 crc kubenswrapper[4799]: I1010 18:19:22.964476 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-2d26t"] Oct 10 18:19:22 crc kubenswrapper[4799]: W1010 18:19:22.973315 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7d2cfa80_6751_4c4d_acd1_213dd64c059d.slice/crio-f4ac0e1c7f7da2e9217a807ef84f63673123c4b73820924c0cd1087cee2f327e WatchSource:0}: Error finding container f4ac0e1c7f7da2e9217a807ef84f63673123c4b73820924c0cd1087cee2f327e: Status 404 returned error can't find the container with id f4ac0e1c7f7da2e9217a807ef84f63673123c4b73820924c0cd1087cee2f327e Oct 10 18:19:23 crc kubenswrapper[4799]: I1010 18:19:23.393188 4799 generic.go:334] "Generic (PLEG): container finished" podID="7d2cfa80-6751-4c4d-acd1-213dd64c059d" containerID="e94ce23a41cfceb425d4937800d014065b34df082facbec67d19cf749ae97548" exitCode=0 Oct 10 18:19:23 crc kubenswrapper[4799]: I1010 18:19:23.393255 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-2d26t" event={"ID":"7d2cfa80-6751-4c4d-acd1-213dd64c059d","Type":"ContainerDied","Data":"e94ce23a41cfceb425d4937800d014065b34df082facbec67d19cf749ae97548"} Oct 10 18:19:23 crc kubenswrapper[4799]: I1010 18:19:23.394858 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-2d26t" event={"ID":"7d2cfa80-6751-4c4d-acd1-213dd64c059d","Type":"ContainerStarted","Data":"f4ac0e1c7f7da2e9217a807ef84f63673123c4b73820924c0cd1087cee2f327e"} Oct 10 18:19:24 crc kubenswrapper[4799]: I1010 18:19:24.408564 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-2d26t" event={"ID":"7d2cfa80-6751-4c4d-acd1-213dd64c059d","Type":"ContainerStarted","Data":"f688b57c82f9531986e9a2b4eaae7c9b744f5fa88d46d83f274dc56f1535d52d"} Oct 10 18:19:28 crc kubenswrapper[4799]: I1010 18:19:28.481564 4799 generic.go:334] "Generic (PLEG): container finished" podID="7d2cfa80-6751-4c4d-acd1-213dd64c059d" containerID="f688b57c82f9531986e9a2b4eaae7c9b744f5fa88d46d83f274dc56f1535d52d" exitCode=0 Oct 10 18:19:28 crc kubenswrapper[4799]: I1010 18:19:28.481687 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-2d26t" event={"ID":"7d2cfa80-6751-4c4d-acd1-213dd64c059d","Type":"ContainerDied","Data":"f688b57c82f9531986e9a2b4eaae7c9b744f5fa88d46d83f274dc56f1535d52d"} Oct 10 18:19:29 crc kubenswrapper[4799]: I1010 18:19:29.511498 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-2d26t" event={"ID":"7d2cfa80-6751-4c4d-acd1-213dd64c059d","Type":"ContainerStarted","Data":"5a1a8d7fe40d2bf9cdc602729fe4fc6943500a7303bddca7e6e4d107b467a10a"} Oct 10 18:19:29 crc kubenswrapper[4799]: I1010 18:19:29.559897 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-2d26t" podStartSLOduration=2.075446918 podStartE2EDuration="7.559870655s" podCreationTimestamp="2025-10-10 18:19:22 +0000 UTC" firstStartedPulling="2025-10-10 18:19:23.396932621 +0000 UTC m=+6456.905256756" lastFinishedPulling="2025-10-10 18:19:28.881356368 +0000 UTC m=+6462.389680493" observedRunningTime="2025-10-10 18:19:29.537900009 +0000 UTC m=+6463.046224164" watchObservedRunningTime="2025-10-10 18:19:29.559870655 +0000 UTC m=+6463.068194780" Oct 10 18:19:32 crc kubenswrapper[4799]: I1010 18:19:32.028963 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/octavia-85a1-account-create-nq577"] Oct 10 18:19:32 crc kubenswrapper[4799]: I1010 18:19:32.038596 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/octavia-85a1-account-create-nq577"] Oct 10 18:19:32 crc kubenswrapper[4799]: I1010 18:19:32.423812 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-2d26t" Oct 10 18:19:32 crc kubenswrapper[4799]: I1010 18:19:32.423873 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-2d26t" Oct 10 18:19:33 crc kubenswrapper[4799]: I1010 18:19:33.417011 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="80a0077a-7750-4ae6-b57b-b248f493764a" path="/var/lib/kubelet/pods/80a0077a-7750-4ae6-b57b-b248f493764a/volumes" Oct 10 18:19:33 crc kubenswrapper[4799]: I1010 18:19:33.488015 4799 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-2d26t" podUID="7d2cfa80-6751-4c4d-acd1-213dd64c059d" containerName="registry-server" probeResult="failure" output=< Oct 10 18:19:33 crc kubenswrapper[4799]: timeout: failed to connect service ":50051" within 1s Oct 10 18:19:33 crc kubenswrapper[4799]: > Oct 10 18:19:34 crc kubenswrapper[4799]: I1010 18:19:34.402539 4799 scope.go:117] "RemoveContainer" containerID="1f0ceb697c256a28cbd9c8e1e3aa08e1dc732ac4382bc8944609e36db615c835" Oct 10 18:19:34 crc kubenswrapper[4799]: E1010 18:19:34.402929 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 18:19:39 crc kubenswrapper[4799]: I1010 18:19:39.061737 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/octavia-persistence-db-create-dqhfx"] Oct 10 18:19:39 crc kubenswrapper[4799]: I1010 18:19:39.066583 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/octavia-persistence-db-create-dqhfx"] Oct 10 18:19:39 crc kubenswrapper[4799]: I1010 18:19:39.415220 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0e69d12b-eaf4-4325-aab1-a9d6bfaca44b" path="/var/lib/kubelet/pods/0e69d12b-eaf4-4325-aab1-a9d6bfaca44b/volumes" Oct 10 18:19:42 crc kubenswrapper[4799]: I1010 18:19:42.512650 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-2d26t" Oct 10 18:19:42 crc kubenswrapper[4799]: I1010 18:19:42.607404 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-2d26t" Oct 10 18:19:45 crc kubenswrapper[4799]: I1010 18:19:45.945278 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-2d26t"] Oct 10 18:19:45 crc kubenswrapper[4799]: I1010 18:19:45.946522 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-2d26t" podUID="7d2cfa80-6751-4c4d-acd1-213dd64c059d" containerName="registry-server" containerID="cri-o://5a1a8d7fe40d2bf9cdc602729fe4fc6943500a7303bddca7e6e4d107b467a10a" gracePeriod=2 Oct 10 18:19:46 crc kubenswrapper[4799]: I1010 18:19:46.504993 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-2d26t" Oct 10 18:19:46 crc kubenswrapper[4799]: I1010 18:19:46.627631 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7d2cfa80-6751-4c4d-acd1-213dd64c059d-catalog-content\") pod \"7d2cfa80-6751-4c4d-acd1-213dd64c059d\" (UID: \"7d2cfa80-6751-4c4d-acd1-213dd64c059d\") " Oct 10 18:19:46 crc kubenswrapper[4799]: I1010 18:19:46.627791 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jdwhv\" (UniqueName: \"kubernetes.io/projected/7d2cfa80-6751-4c4d-acd1-213dd64c059d-kube-api-access-jdwhv\") pod \"7d2cfa80-6751-4c4d-acd1-213dd64c059d\" (UID: \"7d2cfa80-6751-4c4d-acd1-213dd64c059d\") " Oct 10 18:19:46 crc kubenswrapper[4799]: I1010 18:19:46.628218 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7d2cfa80-6751-4c4d-acd1-213dd64c059d-utilities\") pod \"7d2cfa80-6751-4c4d-acd1-213dd64c059d\" (UID: \"7d2cfa80-6751-4c4d-acd1-213dd64c059d\") " Oct 10 18:19:46 crc kubenswrapper[4799]: I1010 18:19:46.628953 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7d2cfa80-6751-4c4d-acd1-213dd64c059d-utilities" (OuterVolumeSpecName: "utilities") pod "7d2cfa80-6751-4c4d-acd1-213dd64c059d" (UID: "7d2cfa80-6751-4c4d-acd1-213dd64c059d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 10 18:19:46 crc kubenswrapper[4799]: I1010 18:19:46.629192 4799 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7d2cfa80-6751-4c4d-acd1-213dd64c059d-utilities\") on node \"crc\" DevicePath \"\"" Oct 10 18:19:46 crc kubenswrapper[4799]: I1010 18:19:46.635611 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7d2cfa80-6751-4c4d-acd1-213dd64c059d-kube-api-access-jdwhv" (OuterVolumeSpecName: "kube-api-access-jdwhv") pod "7d2cfa80-6751-4c4d-acd1-213dd64c059d" (UID: "7d2cfa80-6751-4c4d-acd1-213dd64c059d"). InnerVolumeSpecName "kube-api-access-jdwhv". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 18:19:46 crc kubenswrapper[4799]: I1010 18:19:46.700527 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7d2cfa80-6751-4c4d-acd1-213dd64c059d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "7d2cfa80-6751-4c4d-acd1-213dd64c059d" (UID: "7d2cfa80-6751-4c4d-acd1-213dd64c059d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 10 18:19:46 crc kubenswrapper[4799]: I1010 18:19:46.734665 4799 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7d2cfa80-6751-4c4d-acd1-213dd64c059d-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 10 18:19:46 crc kubenswrapper[4799]: I1010 18:19:46.734746 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jdwhv\" (UniqueName: \"kubernetes.io/projected/7d2cfa80-6751-4c4d-acd1-213dd64c059d-kube-api-access-jdwhv\") on node \"crc\" DevicePath \"\"" Oct 10 18:19:46 crc kubenswrapper[4799]: I1010 18:19:46.749268 4799 generic.go:334] "Generic (PLEG): container finished" podID="7d2cfa80-6751-4c4d-acd1-213dd64c059d" containerID="5a1a8d7fe40d2bf9cdc602729fe4fc6943500a7303bddca7e6e4d107b467a10a" exitCode=0 Oct 10 18:19:46 crc kubenswrapper[4799]: I1010 18:19:46.749327 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-2d26t" event={"ID":"7d2cfa80-6751-4c4d-acd1-213dd64c059d","Type":"ContainerDied","Data":"5a1a8d7fe40d2bf9cdc602729fe4fc6943500a7303bddca7e6e4d107b467a10a"} Oct 10 18:19:46 crc kubenswrapper[4799]: I1010 18:19:46.749369 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-2d26t" event={"ID":"7d2cfa80-6751-4c4d-acd1-213dd64c059d","Type":"ContainerDied","Data":"f4ac0e1c7f7da2e9217a807ef84f63673123c4b73820924c0cd1087cee2f327e"} Oct 10 18:19:46 crc kubenswrapper[4799]: I1010 18:19:46.749379 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-2d26t" Oct 10 18:19:46 crc kubenswrapper[4799]: I1010 18:19:46.749392 4799 scope.go:117] "RemoveContainer" containerID="5a1a8d7fe40d2bf9cdc602729fe4fc6943500a7303bddca7e6e4d107b467a10a" Oct 10 18:19:46 crc kubenswrapper[4799]: I1010 18:19:46.794066 4799 scope.go:117] "RemoveContainer" containerID="f688b57c82f9531986e9a2b4eaae7c9b744f5fa88d46d83f274dc56f1535d52d" Oct 10 18:19:46 crc kubenswrapper[4799]: I1010 18:19:46.807538 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-2d26t"] Oct 10 18:19:46 crc kubenswrapper[4799]: I1010 18:19:46.823224 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-2d26t"] Oct 10 18:19:46 crc kubenswrapper[4799]: I1010 18:19:46.829107 4799 scope.go:117] "RemoveContainer" containerID="e94ce23a41cfceb425d4937800d014065b34df082facbec67d19cf749ae97548" Oct 10 18:19:46 crc kubenswrapper[4799]: I1010 18:19:46.887480 4799 scope.go:117] "RemoveContainer" containerID="5a1a8d7fe40d2bf9cdc602729fe4fc6943500a7303bddca7e6e4d107b467a10a" Oct 10 18:19:46 crc kubenswrapper[4799]: E1010 18:19:46.888737 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5a1a8d7fe40d2bf9cdc602729fe4fc6943500a7303bddca7e6e4d107b467a10a\": container with ID starting with 5a1a8d7fe40d2bf9cdc602729fe4fc6943500a7303bddca7e6e4d107b467a10a not found: ID does not exist" containerID="5a1a8d7fe40d2bf9cdc602729fe4fc6943500a7303bddca7e6e4d107b467a10a" Oct 10 18:19:46 crc kubenswrapper[4799]: I1010 18:19:46.888888 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5a1a8d7fe40d2bf9cdc602729fe4fc6943500a7303bddca7e6e4d107b467a10a"} err="failed to get container status \"5a1a8d7fe40d2bf9cdc602729fe4fc6943500a7303bddca7e6e4d107b467a10a\": rpc error: code = NotFound desc = could not find container \"5a1a8d7fe40d2bf9cdc602729fe4fc6943500a7303bddca7e6e4d107b467a10a\": container with ID starting with 5a1a8d7fe40d2bf9cdc602729fe4fc6943500a7303bddca7e6e4d107b467a10a not found: ID does not exist" Oct 10 18:19:46 crc kubenswrapper[4799]: I1010 18:19:46.888943 4799 scope.go:117] "RemoveContainer" containerID="f688b57c82f9531986e9a2b4eaae7c9b744f5fa88d46d83f274dc56f1535d52d" Oct 10 18:19:46 crc kubenswrapper[4799]: E1010 18:19:46.891545 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f688b57c82f9531986e9a2b4eaae7c9b744f5fa88d46d83f274dc56f1535d52d\": container with ID starting with f688b57c82f9531986e9a2b4eaae7c9b744f5fa88d46d83f274dc56f1535d52d not found: ID does not exist" containerID="f688b57c82f9531986e9a2b4eaae7c9b744f5fa88d46d83f274dc56f1535d52d" Oct 10 18:19:46 crc kubenswrapper[4799]: I1010 18:19:46.893278 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f688b57c82f9531986e9a2b4eaae7c9b744f5fa88d46d83f274dc56f1535d52d"} err="failed to get container status \"f688b57c82f9531986e9a2b4eaae7c9b744f5fa88d46d83f274dc56f1535d52d\": rpc error: code = NotFound desc = could not find container \"f688b57c82f9531986e9a2b4eaae7c9b744f5fa88d46d83f274dc56f1535d52d\": container with ID starting with f688b57c82f9531986e9a2b4eaae7c9b744f5fa88d46d83f274dc56f1535d52d not found: ID does not exist" Oct 10 18:19:46 crc kubenswrapper[4799]: I1010 18:19:46.893415 4799 scope.go:117] "RemoveContainer" containerID="e94ce23a41cfceb425d4937800d014065b34df082facbec67d19cf749ae97548" Oct 10 18:19:46 crc kubenswrapper[4799]: E1010 18:19:46.894010 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e94ce23a41cfceb425d4937800d014065b34df082facbec67d19cf749ae97548\": container with ID starting with e94ce23a41cfceb425d4937800d014065b34df082facbec67d19cf749ae97548 not found: ID does not exist" containerID="e94ce23a41cfceb425d4937800d014065b34df082facbec67d19cf749ae97548" Oct 10 18:19:46 crc kubenswrapper[4799]: I1010 18:19:46.894081 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e94ce23a41cfceb425d4937800d014065b34df082facbec67d19cf749ae97548"} err="failed to get container status \"e94ce23a41cfceb425d4937800d014065b34df082facbec67d19cf749ae97548\": rpc error: code = NotFound desc = could not find container \"e94ce23a41cfceb425d4937800d014065b34df082facbec67d19cf749ae97548\": container with ID starting with e94ce23a41cfceb425d4937800d014065b34df082facbec67d19cf749ae97548 not found: ID does not exist" Oct 10 18:19:47 crc kubenswrapper[4799]: I1010 18:19:47.418998 4799 scope.go:117] "RemoveContainer" containerID="1f0ceb697c256a28cbd9c8e1e3aa08e1dc732ac4382bc8944609e36db615c835" Oct 10 18:19:47 crc kubenswrapper[4799]: E1010 18:19:47.419585 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 18:19:47 crc kubenswrapper[4799]: I1010 18:19:47.426629 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7d2cfa80-6751-4c4d-acd1-213dd64c059d" path="/var/lib/kubelet/pods/7d2cfa80-6751-4c4d-acd1-213dd64c059d/volumes" Oct 10 18:19:50 crc kubenswrapper[4799]: I1010 18:19:50.059220 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/octavia-c8c8-account-create-6q68n"] Oct 10 18:19:50 crc kubenswrapper[4799]: I1010 18:19:50.079657 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/octavia-c8c8-account-create-6q68n"] Oct 10 18:19:51 crc kubenswrapper[4799]: I1010 18:19:51.425914 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="553e946c-8009-464a-a913-289757441985" path="/var/lib/kubelet/pods/553e946c-8009-464a-a913-289757441985/volumes" Oct 10 18:19:54 crc kubenswrapper[4799]: I1010 18:19:54.425372 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-cwwb8"] Oct 10 18:19:54 crc kubenswrapper[4799]: E1010 18:19:54.426895 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7d2cfa80-6751-4c4d-acd1-213dd64c059d" containerName="extract-utilities" Oct 10 18:19:54 crc kubenswrapper[4799]: I1010 18:19:54.426919 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="7d2cfa80-6751-4c4d-acd1-213dd64c059d" containerName="extract-utilities" Oct 10 18:19:54 crc kubenswrapper[4799]: E1010 18:19:54.426976 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7d2cfa80-6751-4c4d-acd1-213dd64c059d" containerName="registry-server" Oct 10 18:19:54 crc kubenswrapper[4799]: I1010 18:19:54.426989 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="7d2cfa80-6751-4c4d-acd1-213dd64c059d" containerName="registry-server" Oct 10 18:19:54 crc kubenswrapper[4799]: E1010 18:19:54.427038 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7d2cfa80-6751-4c4d-acd1-213dd64c059d" containerName="extract-content" Oct 10 18:19:54 crc kubenswrapper[4799]: I1010 18:19:54.427053 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="7d2cfa80-6751-4c4d-acd1-213dd64c059d" containerName="extract-content" Oct 10 18:19:54 crc kubenswrapper[4799]: I1010 18:19:54.427483 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="7d2cfa80-6751-4c4d-acd1-213dd64c059d" containerName="registry-server" Oct 10 18:19:54 crc kubenswrapper[4799]: I1010 18:19:54.436051 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-cwwb8" Oct 10 18:19:54 crc kubenswrapper[4799]: I1010 18:19:54.446617 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-cwwb8"] Oct 10 18:19:54 crc kubenswrapper[4799]: I1010 18:19:54.580664 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-85bgj\" (UniqueName: \"kubernetes.io/projected/5ced4845-1cf2-473e-a725-8dd81279493d-kube-api-access-85bgj\") pod \"community-operators-cwwb8\" (UID: \"5ced4845-1cf2-473e-a725-8dd81279493d\") " pod="openshift-marketplace/community-operators-cwwb8" Oct 10 18:19:54 crc kubenswrapper[4799]: I1010 18:19:54.580867 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5ced4845-1cf2-473e-a725-8dd81279493d-catalog-content\") pod \"community-operators-cwwb8\" (UID: \"5ced4845-1cf2-473e-a725-8dd81279493d\") " pod="openshift-marketplace/community-operators-cwwb8" Oct 10 18:19:54 crc kubenswrapper[4799]: I1010 18:19:54.580909 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5ced4845-1cf2-473e-a725-8dd81279493d-utilities\") pod \"community-operators-cwwb8\" (UID: \"5ced4845-1cf2-473e-a725-8dd81279493d\") " pod="openshift-marketplace/community-operators-cwwb8" Oct 10 18:19:54 crc kubenswrapper[4799]: I1010 18:19:54.684068 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-85bgj\" (UniqueName: \"kubernetes.io/projected/5ced4845-1cf2-473e-a725-8dd81279493d-kube-api-access-85bgj\") pod \"community-operators-cwwb8\" (UID: \"5ced4845-1cf2-473e-a725-8dd81279493d\") " pod="openshift-marketplace/community-operators-cwwb8" Oct 10 18:19:54 crc kubenswrapper[4799]: I1010 18:19:54.684228 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5ced4845-1cf2-473e-a725-8dd81279493d-catalog-content\") pod \"community-operators-cwwb8\" (UID: \"5ced4845-1cf2-473e-a725-8dd81279493d\") " pod="openshift-marketplace/community-operators-cwwb8" Oct 10 18:19:54 crc kubenswrapper[4799]: I1010 18:19:54.684261 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5ced4845-1cf2-473e-a725-8dd81279493d-utilities\") pod \"community-operators-cwwb8\" (UID: \"5ced4845-1cf2-473e-a725-8dd81279493d\") " pod="openshift-marketplace/community-operators-cwwb8" Oct 10 18:19:54 crc kubenswrapper[4799]: I1010 18:19:54.685031 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5ced4845-1cf2-473e-a725-8dd81279493d-utilities\") pod \"community-operators-cwwb8\" (UID: \"5ced4845-1cf2-473e-a725-8dd81279493d\") " pod="openshift-marketplace/community-operators-cwwb8" Oct 10 18:19:54 crc kubenswrapper[4799]: I1010 18:19:54.685141 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5ced4845-1cf2-473e-a725-8dd81279493d-catalog-content\") pod \"community-operators-cwwb8\" (UID: \"5ced4845-1cf2-473e-a725-8dd81279493d\") " pod="openshift-marketplace/community-operators-cwwb8" Oct 10 18:19:54 crc kubenswrapper[4799]: I1010 18:19:54.708167 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-85bgj\" (UniqueName: \"kubernetes.io/projected/5ced4845-1cf2-473e-a725-8dd81279493d-kube-api-access-85bgj\") pod \"community-operators-cwwb8\" (UID: \"5ced4845-1cf2-473e-a725-8dd81279493d\") " pod="openshift-marketplace/community-operators-cwwb8" Oct 10 18:19:54 crc kubenswrapper[4799]: I1010 18:19:54.810129 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-cwwb8" Oct 10 18:19:55 crc kubenswrapper[4799]: I1010 18:19:55.360296 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-cwwb8"] Oct 10 18:19:55 crc kubenswrapper[4799]: I1010 18:19:55.925491 4799 generic.go:334] "Generic (PLEG): container finished" podID="5ced4845-1cf2-473e-a725-8dd81279493d" containerID="acebd8284d9480252f2d6cdb4a8bb2a6e67c66d700531ad40fd83e58f0ea2622" exitCode=0 Oct 10 18:19:55 crc kubenswrapper[4799]: I1010 18:19:55.925951 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-cwwb8" event={"ID":"5ced4845-1cf2-473e-a725-8dd81279493d","Type":"ContainerDied","Data":"acebd8284d9480252f2d6cdb4a8bb2a6e67c66d700531ad40fd83e58f0ea2622"} Oct 10 18:19:55 crc kubenswrapper[4799]: I1010 18:19:55.928045 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-cwwb8" event={"ID":"5ced4845-1cf2-473e-a725-8dd81279493d","Type":"ContainerStarted","Data":"cf7b72c123e15a5c16eb50805c081b02136a1df7b411709a7df75c77b8827274"} Oct 10 18:19:56 crc kubenswrapper[4799]: I1010 18:19:56.940471 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-cwwb8" event={"ID":"5ced4845-1cf2-473e-a725-8dd81279493d","Type":"ContainerStarted","Data":"f0caf08bdf2a41bb08eadd12bec3d059debb3b75c1ff54308d9ebfeb831e1eb9"} Oct 10 18:19:58 crc kubenswrapper[4799]: I1010 18:19:58.976094 4799 generic.go:334] "Generic (PLEG): container finished" podID="5ced4845-1cf2-473e-a725-8dd81279493d" containerID="f0caf08bdf2a41bb08eadd12bec3d059debb3b75c1ff54308d9ebfeb831e1eb9" exitCode=0 Oct 10 18:19:58 crc kubenswrapper[4799]: I1010 18:19:58.976222 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-cwwb8" event={"ID":"5ced4845-1cf2-473e-a725-8dd81279493d","Type":"ContainerDied","Data":"f0caf08bdf2a41bb08eadd12bec3d059debb3b75c1ff54308d9ebfeb831e1eb9"} Oct 10 18:19:59 crc kubenswrapper[4799]: I1010 18:19:59.990820 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-cwwb8" event={"ID":"5ced4845-1cf2-473e-a725-8dd81279493d","Type":"ContainerStarted","Data":"4b0d98460ac82969c2cb0351e6ae2e2472b7197754be3991817ba778c3006810"} Oct 10 18:20:00 crc kubenswrapper[4799]: I1010 18:20:00.025426 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-cwwb8" podStartSLOduration=2.533677418 podStartE2EDuration="6.025402433s" podCreationTimestamp="2025-10-10 18:19:54 +0000 UTC" firstStartedPulling="2025-10-10 18:19:55.92924761 +0000 UTC m=+6489.437571765" lastFinishedPulling="2025-10-10 18:19:59.420972655 +0000 UTC m=+6492.929296780" observedRunningTime="2025-10-10 18:20:00.017354387 +0000 UTC m=+6493.525678522" watchObservedRunningTime="2025-10-10 18:20:00.025402433 +0000 UTC m=+6493.533726558" Oct 10 18:20:01 crc kubenswrapper[4799]: I1010 18:20:01.402334 4799 scope.go:117] "RemoveContainer" containerID="1f0ceb697c256a28cbd9c8e1e3aa08e1dc732ac4382bc8944609e36db615c835" Oct 10 18:20:01 crc kubenswrapper[4799]: E1010 18:20:01.403038 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 18:20:04 crc kubenswrapper[4799]: I1010 18:20:04.810280 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-cwwb8" Oct 10 18:20:04 crc kubenswrapper[4799]: I1010 18:20:04.810932 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-cwwb8" Oct 10 18:20:04 crc kubenswrapper[4799]: I1010 18:20:04.893525 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-cwwb8" Oct 10 18:20:05 crc kubenswrapper[4799]: I1010 18:20:05.115987 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-cwwb8" Oct 10 18:20:05 crc kubenswrapper[4799]: I1010 18:20:05.199892 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-cwwb8"] Oct 10 18:20:07 crc kubenswrapper[4799]: I1010 18:20:07.080045 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-cwwb8" podUID="5ced4845-1cf2-473e-a725-8dd81279493d" containerName="registry-server" containerID="cri-o://4b0d98460ac82969c2cb0351e6ae2e2472b7197754be3991817ba778c3006810" gracePeriod=2 Oct 10 18:20:07 crc kubenswrapper[4799]: I1010 18:20:07.710603 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-cwwb8" Oct 10 18:20:07 crc kubenswrapper[4799]: I1010 18:20:07.761210 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5ced4845-1cf2-473e-a725-8dd81279493d-catalog-content\") pod \"5ced4845-1cf2-473e-a725-8dd81279493d\" (UID: \"5ced4845-1cf2-473e-a725-8dd81279493d\") " Oct 10 18:20:07 crc kubenswrapper[4799]: I1010 18:20:07.761450 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-85bgj\" (UniqueName: \"kubernetes.io/projected/5ced4845-1cf2-473e-a725-8dd81279493d-kube-api-access-85bgj\") pod \"5ced4845-1cf2-473e-a725-8dd81279493d\" (UID: \"5ced4845-1cf2-473e-a725-8dd81279493d\") " Oct 10 18:20:07 crc kubenswrapper[4799]: I1010 18:20:07.761808 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5ced4845-1cf2-473e-a725-8dd81279493d-utilities\") pod \"5ced4845-1cf2-473e-a725-8dd81279493d\" (UID: \"5ced4845-1cf2-473e-a725-8dd81279493d\") " Oct 10 18:20:07 crc kubenswrapper[4799]: I1010 18:20:07.763237 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5ced4845-1cf2-473e-a725-8dd81279493d-utilities" (OuterVolumeSpecName: "utilities") pod "5ced4845-1cf2-473e-a725-8dd81279493d" (UID: "5ced4845-1cf2-473e-a725-8dd81279493d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 10 18:20:07 crc kubenswrapper[4799]: I1010 18:20:07.767133 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5ced4845-1cf2-473e-a725-8dd81279493d-kube-api-access-85bgj" (OuterVolumeSpecName: "kube-api-access-85bgj") pod "5ced4845-1cf2-473e-a725-8dd81279493d" (UID: "5ced4845-1cf2-473e-a725-8dd81279493d"). InnerVolumeSpecName "kube-api-access-85bgj". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 18:20:07 crc kubenswrapper[4799]: I1010 18:20:07.804251 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5ced4845-1cf2-473e-a725-8dd81279493d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5ced4845-1cf2-473e-a725-8dd81279493d" (UID: "5ced4845-1cf2-473e-a725-8dd81279493d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 10 18:20:07 crc kubenswrapper[4799]: I1010 18:20:07.863869 4799 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5ced4845-1cf2-473e-a725-8dd81279493d-utilities\") on node \"crc\" DevicePath \"\"" Oct 10 18:20:07 crc kubenswrapper[4799]: I1010 18:20:07.863905 4799 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5ced4845-1cf2-473e-a725-8dd81279493d-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 10 18:20:07 crc kubenswrapper[4799]: I1010 18:20:07.863918 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-85bgj\" (UniqueName: \"kubernetes.io/projected/5ced4845-1cf2-473e-a725-8dd81279493d-kube-api-access-85bgj\") on node \"crc\" DevicePath \"\"" Oct 10 18:20:08 crc kubenswrapper[4799]: I1010 18:20:08.093343 4799 generic.go:334] "Generic (PLEG): container finished" podID="5ced4845-1cf2-473e-a725-8dd81279493d" containerID="4b0d98460ac82969c2cb0351e6ae2e2472b7197754be3991817ba778c3006810" exitCode=0 Oct 10 18:20:08 crc kubenswrapper[4799]: I1010 18:20:08.093391 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-cwwb8" event={"ID":"5ced4845-1cf2-473e-a725-8dd81279493d","Type":"ContainerDied","Data":"4b0d98460ac82969c2cb0351e6ae2e2472b7197754be3991817ba778c3006810"} Oct 10 18:20:08 crc kubenswrapper[4799]: I1010 18:20:08.093421 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-cwwb8" event={"ID":"5ced4845-1cf2-473e-a725-8dd81279493d","Type":"ContainerDied","Data":"cf7b72c123e15a5c16eb50805c081b02136a1df7b411709a7df75c77b8827274"} Oct 10 18:20:08 crc kubenswrapper[4799]: I1010 18:20:08.093471 4799 scope.go:117] "RemoveContainer" containerID="4b0d98460ac82969c2cb0351e6ae2e2472b7197754be3991817ba778c3006810" Oct 10 18:20:08 crc kubenswrapper[4799]: I1010 18:20:08.093527 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-cwwb8" Oct 10 18:20:08 crc kubenswrapper[4799]: I1010 18:20:08.121130 4799 scope.go:117] "RemoveContainer" containerID="f0caf08bdf2a41bb08eadd12bec3d059debb3b75c1ff54308d9ebfeb831e1eb9" Oct 10 18:20:08 crc kubenswrapper[4799]: I1010 18:20:08.154906 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-cwwb8"] Oct 10 18:20:08 crc kubenswrapper[4799]: I1010 18:20:08.168102 4799 scope.go:117] "RemoveContainer" containerID="acebd8284d9480252f2d6cdb4a8bb2a6e67c66d700531ad40fd83e58f0ea2622" Oct 10 18:20:08 crc kubenswrapper[4799]: I1010 18:20:08.169205 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-cwwb8"] Oct 10 18:20:08 crc kubenswrapper[4799]: I1010 18:20:08.236559 4799 scope.go:117] "RemoveContainer" containerID="4b0d98460ac82969c2cb0351e6ae2e2472b7197754be3991817ba778c3006810" Oct 10 18:20:08 crc kubenswrapper[4799]: E1010 18:20:08.237228 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4b0d98460ac82969c2cb0351e6ae2e2472b7197754be3991817ba778c3006810\": container with ID starting with 4b0d98460ac82969c2cb0351e6ae2e2472b7197754be3991817ba778c3006810 not found: ID does not exist" containerID="4b0d98460ac82969c2cb0351e6ae2e2472b7197754be3991817ba778c3006810" Oct 10 18:20:08 crc kubenswrapper[4799]: I1010 18:20:08.237296 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4b0d98460ac82969c2cb0351e6ae2e2472b7197754be3991817ba778c3006810"} err="failed to get container status \"4b0d98460ac82969c2cb0351e6ae2e2472b7197754be3991817ba778c3006810\": rpc error: code = NotFound desc = could not find container \"4b0d98460ac82969c2cb0351e6ae2e2472b7197754be3991817ba778c3006810\": container with ID starting with 4b0d98460ac82969c2cb0351e6ae2e2472b7197754be3991817ba778c3006810 not found: ID does not exist" Oct 10 18:20:08 crc kubenswrapper[4799]: I1010 18:20:08.237339 4799 scope.go:117] "RemoveContainer" containerID="f0caf08bdf2a41bb08eadd12bec3d059debb3b75c1ff54308d9ebfeb831e1eb9" Oct 10 18:20:08 crc kubenswrapper[4799]: E1010 18:20:08.238435 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f0caf08bdf2a41bb08eadd12bec3d059debb3b75c1ff54308d9ebfeb831e1eb9\": container with ID starting with f0caf08bdf2a41bb08eadd12bec3d059debb3b75c1ff54308d9ebfeb831e1eb9 not found: ID does not exist" containerID="f0caf08bdf2a41bb08eadd12bec3d059debb3b75c1ff54308d9ebfeb831e1eb9" Oct 10 18:20:08 crc kubenswrapper[4799]: I1010 18:20:08.238488 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f0caf08bdf2a41bb08eadd12bec3d059debb3b75c1ff54308d9ebfeb831e1eb9"} err="failed to get container status \"f0caf08bdf2a41bb08eadd12bec3d059debb3b75c1ff54308d9ebfeb831e1eb9\": rpc error: code = NotFound desc = could not find container \"f0caf08bdf2a41bb08eadd12bec3d059debb3b75c1ff54308d9ebfeb831e1eb9\": container with ID starting with f0caf08bdf2a41bb08eadd12bec3d059debb3b75c1ff54308d9ebfeb831e1eb9 not found: ID does not exist" Oct 10 18:20:08 crc kubenswrapper[4799]: I1010 18:20:08.238526 4799 scope.go:117] "RemoveContainer" containerID="acebd8284d9480252f2d6cdb4a8bb2a6e67c66d700531ad40fd83e58f0ea2622" Oct 10 18:20:08 crc kubenswrapper[4799]: E1010 18:20:08.239031 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"acebd8284d9480252f2d6cdb4a8bb2a6e67c66d700531ad40fd83e58f0ea2622\": container with ID starting with acebd8284d9480252f2d6cdb4a8bb2a6e67c66d700531ad40fd83e58f0ea2622 not found: ID does not exist" containerID="acebd8284d9480252f2d6cdb4a8bb2a6e67c66d700531ad40fd83e58f0ea2622" Oct 10 18:20:08 crc kubenswrapper[4799]: I1010 18:20:08.239084 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"acebd8284d9480252f2d6cdb4a8bb2a6e67c66d700531ad40fd83e58f0ea2622"} err="failed to get container status \"acebd8284d9480252f2d6cdb4a8bb2a6e67c66d700531ad40fd83e58f0ea2622\": rpc error: code = NotFound desc = could not find container \"acebd8284d9480252f2d6cdb4a8bb2a6e67c66d700531ad40fd83e58f0ea2622\": container with ID starting with acebd8284d9480252f2d6cdb4a8bb2a6e67c66d700531ad40fd83e58f0ea2622 not found: ID does not exist" Oct 10 18:20:09 crc kubenswrapper[4799]: I1010 18:20:09.423576 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5ced4845-1cf2-473e-a725-8dd81279493d" path="/var/lib/kubelet/pods/5ced4845-1cf2-473e-a725-8dd81279493d/volumes" Oct 10 18:20:14 crc kubenswrapper[4799]: I1010 18:20:14.342708 4799 scope.go:117] "RemoveContainer" containerID="dfe8143778a1b2423893bcad964b129586678c9d374d94729863059bc32c4be9" Oct 10 18:20:14 crc kubenswrapper[4799]: I1010 18:20:14.385752 4799 scope.go:117] "RemoveContainer" containerID="5fd86085811fd050d3e1c53b62c78027061a240943e4b5e18afa917b5f6889a7" Oct 10 18:20:14 crc kubenswrapper[4799]: I1010 18:20:14.457884 4799 scope.go:117] "RemoveContainer" containerID="0ad7ff0ef387bdaebc4ddcf0acab08d06eb3579386cfd15fc10ffd544f638ac0" Oct 10 18:20:14 crc kubenswrapper[4799]: I1010 18:20:14.537033 4799 scope.go:117] "RemoveContainer" containerID="d204f8469e95b71c319d65496b75d85cad86f66b7ba3c28d595793db641a3b41" Oct 10 18:20:15 crc kubenswrapper[4799]: I1010 18:20:15.403564 4799 scope.go:117] "RemoveContainer" containerID="1f0ceb697c256a28cbd9c8e1e3aa08e1dc732ac4382bc8944609e36db615c835" Oct 10 18:20:15 crc kubenswrapper[4799]: E1010 18:20:15.405581 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 18:20:26 crc kubenswrapper[4799]: I1010 18:20:26.080473 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/octavia-db-sync-hfpxv"] Oct 10 18:20:26 crc kubenswrapper[4799]: I1010 18:20:26.102152 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/octavia-db-sync-hfpxv"] Oct 10 18:20:26 crc kubenswrapper[4799]: I1010 18:20:26.402783 4799 scope.go:117] "RemoveContainer" containerID="1f0ceb697c256a28cbd9c8e1e3aa08e1dc732ac4382bc8944609e36db615c835" Oct 10 18:20:26 crc kubenswrapper[4799]: E1010 18:20:26.403277 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 18:20:27 crc kubenswrapper[4799]: I1010 18:20:27.426457 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="188baf6a-09f1-4a8b-9454-b67f2cb0dada" path="/var/lib/kubelet/pods/188baf6a-09f1-4a8b-9454-b67f2cb0dada/volumes" Oct 10 18:20:41 crc kubenswrapper[4799]: I1010 18:20:41.407379 4799 scope.go:117] "RemoveContainer" containerID="1f0ceb697c256a28cbd9c8e1e3aa08e1dc732ac4382bc8944609e36db615c835" Oct 10 18:20:41 crc kubenswrapper[4799]: E1010 18:20:41.418195 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 18:20:52 crc kubenswrapper[4799]: I1010 18:20:52.403031 4799 scope.go:117] "RemoveContainer" containerID="1f0ceb697c256a28cbd9c8e1e3aa08e1dc732ac4382bc8944609e36db615c835" Oct 10 18:20:52 crc kubenswrapper[4799]: E1010 18:20:52.404407 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 18:21:06 crc kubenswrapper[4799]: I1010 18:21:06.402825 4799 scope.go:117] "RemoveContainer" containerID="1f0ceb697c256a28cbd9c8e1e3aa08e1dc732ac4382bc8944609e36db615c835" Oct 10 18:21:06 crc kubenswrapper[4799]: E1010 18:21:06.403661 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 18:21:14 crc kubenswrapper[4799]: I1010 18:21:14.754644 4799 scope.go:117] "RemoveContainer" containerID="e9e71403b7ccff1b358e2120b49b1ac55a6261b0b059c24e89b6e61dd6729a3f" Oct 10 18:21:14 crc kubenswrapper[4799]: I1010 18:21:14.805314 4799 scope.go:117] "RemoveContainer" containerID="21ea28bfc3e0f0e893de774f4a40d2b02f3cb699b7abe1c335903770672fb354" Oct 10 18:21:15 crc kubenswrapper[4799]: I1010 18:21:15.657546 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-cs7rx"] Oct 10 18:21:15 crc kubenswrapper[4799]: E1010 18:21:15.658488 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5ced4845-1cf2-473e-a725-8dd81279493d" containerName="extract-utilities" Oct 10 18:21:15 crc kubenswrapper[4799]: I1010 18:21:15.658559 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="5ced4845-1cf2-473e-a725-8dd81279493d" containerName="extract-utilities" Oct 10 18:21:15 crc kubenswrapper[4799]: E1010 18:21:15.658634 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5ced4845-1cf2-473e-a725-8dd81279493d" containerName="extract-content" Oct 10 18:21:15 crc kubenswrapper[4799]: I1010 18:21:15.658702 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="5ced4845-1cf2-473e-a725-8dd81279493d" containerName="extract-content" Oct 10 18:21:15 crc kubenswrapper[4799]: E1010 18:21:15.658776 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5ced4845-1cf2-473e-a725-8dd81279493d" containerName="registry-server" Oct 10 18:21:15 crc kubenswrapper[4799]: I1010 18:21:15.658839 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="5ced4845-1cf2-473e-a725-8dd81279493d" containerName="registry-server" Oct 10 18:21:15 crc kubenswrapper[4799]: I1010 18:21:15.659124 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="5ced4845-1cf2-473e-a725-8dd81279493d" containerName="registry-server" Oct 10 18:21:15 crc kubenswrapper[4799]: I1010 18:21:15.660708 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-cs7rx" Oct 10 18:21:15 crc kubenswrapper[4799]: I1010 18:21:15.671346 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-cs7rx"] Oct 10 18:21:15 crc kubenswrapper[4799]: I1010 18:21:15.800399 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cl79n\" (UniqueName: \"kubernetes.io/projected/ff216c68-4dcf-46e2-99f4-94d6ceba8173-kube-api-access-cl79n\") pod \"certified-operators-cs7rx\" (UID: \"ff216c68-4dcf-46e2-99f4-94d6ceba8173\") " pod="openshift-marketplace/certified-operators-cs7rx" Oct 10 18:21:15 crc kubenswrapper[4799]: I1010 18:21:15.800488 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ff216c68-4dcf-46e2-99f4-94d6ceba8173-utilities\") pod \"certified-operators-cs7rx\" (UID: \"ff216c68-4dcf-46e2-99f4-94d6ceba8173\") " pod="openshift-marketplace/certified-operators-cs7rx" Oct 10 18:21:15 crc kubenswrapper[4799]: I1010 18:21:15.800677 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ff216c68-4dcf-46e2-99f4-94d6ceba8173-catalog-content\") pod \"certified-operators-cs7rx\" (UID: \"ff216c68-4dcf-46e2-99f4-94d6ceba8173\") " pod="openshift-marketplace/certified-operators-cs7rx" Oct 10 18:21:15 crc kubenswrapper[4799]: I1010 18:21:15.902256 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ff216c68-4dcf-46e2-99f4-94d6ceba8173-catalog-content\") pod \"certified-operators-cs7rx\" (UID: \"ff216c68-4dcf-46e2-99f4-94d6ceba8173\") " pod="openshift-marketplace/certified-operators-cs7rx" Oct 10 18:21:15 crc kubenswrapper[4799]: I1010 18:21:15.902400 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cl79n\" (UniqueName: \"kubernetes.io/projected/ff216c68-4dcf-46e2-99f4-94d6ceba8173-kube-api-access-cl79n\") pod \"certified-operators-cs7rx\" (UID: \"ff216c68-4dcf-46e2-99f4-94d6ceba8173\") " pod="openshift-marketplace/certified-operators-cs7rx" Oct 10 18:21:15 crc kubenswrapper[4799]: I1010 18:21:15.902452 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ff216c68-4dcf-46e2-99f4-94d6ceba8173-utilities\") pod \"certified-operators-cs7rx\" (UID: \"ff216c68-4dcf-46e2-99f4-94d6ceba8173\") " pod="openshift-marketplace/certified-operators-cs7rx" Oct 10 18:21:15 crc kubenswrapper[4799]: I1010 18:21:15.903114 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ff216c68-4dcf-46e2-99f4-94d6ceba8173-utilities\") pod \"certified-operators-cs7rx\" (UID: \"ff216c68-4dcf-46e2-99f4-94d6ceba8173\") " pod="openshift-marketplace/certified-operators-cs7rx" Oct 10 18:21:15 crc kubenswrapper[4799]: I1010 18:21:15.903114 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ff216c68-4dcf-46e2-99f4-94d6ceba8173-catalog-content\") pod \"certified-operators-cs7rx\" (UID: \"ff216c68-4dcf-46e2-99f4-94d6ceba8173\") " pod="openshift-marketplace/certified-operators-cs7rx" Oct 10 18:21:15 crc kubenswrapper[4799]: I1010 18:21:15.933275 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cl79n\" (UniqueName: \"kubernetes.io/projected/ff216c68-4dcf-46e2-99f4-94d6ceba8173-kube-api-access-cl79n\") pod \"certified-operators-cs7rx\" (UID: \"ff216c68-4dcf-46e2-99f4-94d6ceba8173\") " pod="openshift-marketplace/certified-operators-cs7rx" Oct 10 18:21:16 crc kubenswrapper[4799]: I1010 18:21:16.014150 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-cs7rx" Oct 10 18:21:16 crc kubenswrapper[4799]: I1010 18:21:16.564933 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-cs7rx"] Oct 10 18:21:16 crc kubenswrapper[4799]: I1010 18:21:16.956070 4799 generic.go:334] "Generic (PLEG): container finished" podID="ff216c68-4dcf-46e2-99f4-94d6ceba8173" containerID="035363d1d121f05dac59ac30e5b7aa2317474463fa8a95bab52d0c44a9563343" exitCode=0 Oct 10 18:21:16 crc kubenswrapper[4799]: I1010 18:21:16.956440 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-cs7rx" event={"ID":"ff216c68-4dcf-46e2-99f4-94d6ceba8173","Type":"ContainerDied","Data":"035363d1d121f05dac59ac30e5b7aa2317474463fa8a95bab52d0c44a9563343"} Oct 10 18:21:16 crc kubenswrapper[4799]: I1010 18:21:16.956483 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-cs7rx" event={"ID":"ff216c68-4dcf-46e2-99f4-94d6ceba8173","Type":"ContainerStarted","Data":"6c2e333c9adf851780abf10a25f0c7e7e5730bee778651ceadf5cdc54a346a03"} Oct 10 18:21:16 crc kubenswrapper[4799]: I1010 18:21:16.962851 4799 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 10 18:21:17 crc kubenswrapper[4799]: I1010 18:21:17.970149 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-cs7rx" event={"ID":"ff216c68-4dcf-46e2-99f4-94d6ceba8173","Type":"ContainerStarted","Data":"39e246ecb94d491be49c602c68d3b35054d0397d0936416a127a3df30d431405"} Oct 10 18:21:19 crc kubenswrapper[4799]: I1010 18:21:19.993547 4799 generic.go:334] "Generic (PLEG): container finished" podID="ff216c68-4dcf-46e2-99f4-94d6ceba8173" containerID="39e246ecb94d491be49c602c68d3b35054d0397d0936416a127a3df30d431405" exitCode=0 Oct 10 18:21:19 crc kubenswrapper[4799]: I1010 18:21:19.993643 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-cs7rx" event={"ID":"ff216c68-4dcf-46e2-99f4-94d6ceba8173","Type":"ContainerDied","Data":"39e246ecb94d491be49c602c68d3b35054d0397d0936416a127a3df30d431405"} Oct 10 18:21:20 crc kubenswrapper[4799]: I1010 18:21:20.404277 4799 scope.go:117] "RemoveContainer" containerID="1f0ceb697c256a28cbd9c8e1e3aa08e1dc732ac4382bc8944609e36db615c835" Oct 10 18:21:20 crc kubenswrapper[4799]: E1010 18:21:20.404784 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 18:21:21 crc kubenswrapper[4799]: I1010 18:21:21.014370 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-cs7rx" event={"ID":"ff216c68-4dcf-46e2-99f4-94d6ceba8173","Type":"ContainerStarted","Data":"661c605b7f818dc466b9f241f46a7f0d11143149b2c3e7f9bb8be7ed5de5a69f"} Oct 10 18:21:21 crc kubenswrapper[4799]: I1010 18:21:21.056232 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-cs7rx" podStartSLOduration=2.52908317 podStartE2EDuration="6.056205079s" podCreationTimestamp="2025-10-10 18:21:15 +0000 UTC" firstStartedPulling="2025-10-10 18:21:16.962554507 +0000 UTC m=+6570.470878632" lastFinishedPulling="2025-10-10 18:21:20.489676386 +0000 UTC m=+6573.998000541" observedRunningTime="2025-10-10 18:21:21.043893548 +0000 UTC m=+6574.552217703" watchObservedRunningTime="2025-10-10 18:21:21.056205079 +0000 UTC m=+6574.564529204" Oct 10 18:21:26 crc kubenswrapper[4799]: I1010 18:21:26.014922 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-cs7rx" Oct 10 18:21:26 crc kubenswrapper[4799]: I1010 18:21:26.015690 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-cs7rx" Oct 10 18:21:26 crc kubenswrapper[4799]: I1010 18:21:26.109951 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-cs7rx" Oct 10 18:21:26 crc kubenswrapper[4799]: I1010 18:21:26.185081 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-cs7rx" Oct 10 18:21:26 crc kubenswrapper[4799]: I1010 18:21:26.360375 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-cs7rx"] Oct 10 18:21:28 crc kubenswrapper[4799]: I1010 18:21:28.128462 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-cs7rx" podUID="ff216c68-4dcf-46e2-99f4-94d6ceba8173" containerName="registry-server" containerID="cri-o://661c605b7f818dc466b9f241f46a7f0d11143149b2c3e7f9bb8be7ed5de5a69f" gracePeriod=2 Oct 10 18:21:28 crc kubenswrapper[4799]: I1010 18:21:28.641434 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-cs7rx" Oct 10 18:21:28 crc kubenswrapper[4799]: I1010 18:21:28.804772 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ff216c68-4dcf-46e2-99f4-94d6ceba8173-catalog-content\") pod \"ff216c68-4dcf-46e2-99f4-94d6ceba8173\" (UID: \"ff216c68-4dcf-46e2-99f4-94d6ceba8173\") " Oct 10 18:21:28 crc kubenswrapper[4799]: I1010 18:21:28.805094 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ff216c68-4dcf-46e2-99f4-94d6ceba8173-utilities\") pod \"ff216c68-4dcf-46e2-99f4-94d6ceba8173\" (UID: \"ff216c68-4dcf-46e2-99f4-94d6ceba8173\") " Oct 10 18:21:28 crc kubenswrapper[4799]: I1010 18:21:28.805198 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cl79n\" (UniqueName: \"kubernetes.io/projected/ff216c68-4dcf-46e2-99f4-94d6ceba8173-kube-api-access-cl79n\") pod \"ff216c68-4dcf-46e2-99f4-94d6ceba8173\" (UID: \"ff216c68-4dcf-46e2-99f4-94d6ceba8173\") " Oct 10 18:21:28 crc kubenswrapper[4799]: I1010 18:21:28.814084 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ff216c68-4dcf-46e2-99f4-94d6ceba8173-kube-api-access-cl79n" (OuterVolumeSpecName: "kube-api-access-cl79n") pod "ff216c68-4dcf-46e2-99f4-94d6ceba8173" (UID: "ff216c68-4dcf-46e2-99f4-94d6ceba8173"). InnerVolumeSpecName "kube-api-access-cl79n". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 18:21:28 crc kubenswrapper[4799]: I1010 18:21:28.814320 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ff216c68-4dcf-46e2-99f4-94d6ceba8173-utilities" (OuterVolumeSpecName: "utilities") pod "ff216c68-4dcf-46e2-99f4-94d6ceba8173" (UID: "ff216c68-4dcf-46e2-99f4-94d6ceba8173"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 10 18:21:28 crc kubenswrapper[4799]: I1010 18:21:28.887655 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ff216c68-4dcf-46e2-99f4-94d6ceba8173-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "ff216c68-4dcf-46e2-99f4-94d6ceba8173" (UID: "ff216c68-4dcf-46e2-99f4-94d6ceba8173"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 10 18:21:28 crc kubenswrapper[4799]: I1010 18:21:28.908992 4799 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ff216c68-4dcf-46e2-99f4-94d6ceba8173-utilities\") on node \"crc\" DevicePath \"\"" Oct 10 18:21:28 crc kubenswrapper[4799]: I1010 18:21:28.909043 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cl79n\" (UniqueName: \"kubernetes.io/projected/ff216c68-4dcf-46e2-99f4-94d6ceba8173-kube-api-access-cl79n\") on node \"crc\" DevicePath \"\"" Oct 10 18:21:28 crc kubenswrapper[4799]: I1010 18:21:28.909065 4799 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ff216c68-4dcf-46e2-99f4-94d6ceba8173-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 10 18:21:29 crc kubenswrapper[4799]: I1010 18:21:29.143058 4799 generic.go:334] "Generic (PLEG): container finished" podID="ff216c68-4dcf-46e2-99f4-94d6ceba8173" containerID="661c605b7f818dc466b9f241f46a7f0d11143149b2c3e7f9bb8be7ed5de5a69f" exitCode=0 Oct 10 18:21:29 crc kubenswrapper[4799]: I1010 18:21:29.143126 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-cs7rx" event={"ID":"ff216c68-4dcf-46e2-99f4-94d6ceba8173","Type":"ContainerDied","Data":"661c605b7f818dc466b9f241f46a7f0d11143149b2c3e7f9bb8be7ed5de5a69f"} Oct 10 18:21:29 crc kubenswrapper[4799]: I1010 18:21:29.143148 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-cs7rx" Oct 10 18:21:29 crc kubenswrapper[4799]: I1010 18:21:29.143186 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-cs7rx" event={"ID":"ff216c68-4dcf-46e2-99f4-94d6ceba8173","Type":"ContainerDied","Data":"6c2e333c9adf851780abf10a25f0c7e7e5730bee778651ceadf5cdc54a346a03"} Oct 10 18:21:29 crc kubenswrapper[4799]: I1010 18:21:29.143223 4799 scope.go:117] "RemoveContainer" containerID="661c605b7f818dc466b9f241f46a7f0d11143149b2c3e7f9bb8be7ed5de5a69f" Oct 10 18:21:29 crc kubenswrapper[4799]: I1010 18:21:29.187932 4799 scope.go:117] "RemoveContainer" containerID="39e246ecb94d491be49c602c68d3b35054d0397d0936416a127a3df30d431405" Oct 10 18:21:29 crc kubenswrapper[4799]: I1010 18:21:29.197019 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-cs7rx"] Oct 10 18:21:29 crc kubenswrapper[4799]: I1010 18:21:29.207516 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-cs7rx"] Oct 10 18:21:29 crc kubenswrapper[4799]: I1010 18:21:29.235406 4799 scope.go:117] "RemoveContainer" containerID="035363d1d121f05dac59ac30e5b7aa2317474463fa8a95bab52d0c44a9563343" Oct 10 18:21:29 crc kubenswrapper[4799]: I1010 18:21:29.274058 4799 scope.go:117] "RemoveContainer" containerID="661c605b7f818dc466b9f241f46a7f0d11143149b2c3e7f9bb8be7ed5de5a69f" Oct 10 18:21:29 crc kubenswrapper[4799]: E1010 18:21:29.274682 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"661c605b7f818dc466b9f241f46a7f0d11143149b2c3e7f9bb8be7ed5de5a69f\": container with ID starting with 661c605b7f818dc466b9f241f46a7f0d11143149b2c3e7f9bb8be7ed5de5a69f not found: ID does not exist" containerID="661c605b7f818dc466b9f241f46a7f0d11143149b2c3e7f9bb8be7ed5de5a69f" Oct 10 18:21:29 crc kubenswrapper[4799]: I1010 18:21:29.274710 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"661c605b7f818dc466b9f241f46a7f0d11143149b2c3e7f9bb8be7ed5de5a69f"} err="failed to get container status \"661c605b7f818dc466b9f241f46a7f0d11143149b2c3e7f9bb8be7ed5de5a69f\": rpc error: code = NotFound desc = could not find container \"661c605b7f818dc466b9f241f46a7f0d11143149b2c3e7f9bb8be7ed5de5a69f\": container with ID starting with 661c605b7f818dc466b9f241f46a7f0d11143149b2c3e7f9bb8be7ed5de5a69f not found: ID does not exist" Oct 10 18:21:29 crc kubenswrapper[4799]: I1010 18:21:29.274733 4799 scope.go:117] "RemoveContainer" containerID="39e246ecb94d491be49c602c68d3b35054d0397d0936416a127a3df30d431405" Oct 10 18:21:29 crc kubenswrapper[4799]: E1010 18:21:29.275351 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"39e246ecb94d491be49c602c68d3b35054d0397d0936416a127a3df30d431405\": container with ID starting with 39e246ecb94d491be49c602c68d3b35054d0397d0936416a127a3df30d431405 not found: ID does not exist" containerID="39e246ecb94d491be49c602c68d3b35054d0397d0936416a127a3df30d431405" Oct 10 18:21:29 crc kubenswrapper[4799]: I1010 18:21:29.275373 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"39e246ecb94d491be49c602c68d3b35054d0397d0936416a127a3df30d431405"} err="failed to get container status \"39e246ecb94d491be49c602c68d3b35054d0397d0936416a127a3df30d431405\": rpc error: code = NotFound desc = could not find container \"39e246ecb94d491be49c602c68d3b35054d0397d0936416a127a3df30d431405\": container with ID starting with 39e246ecb94d491be49c602c68d3b35054d0397d0936416a127a3df30d431405 not found: ID does not exist" Oct 10 18:21:29 crc kubenswrapper[4799]: I1010 18:21:29.275385 4799 scope.go:117] "RemoveContainer" containerID="035363d1d121f05dac59ac30e5b7aa2317474463fa8a95bab52d0c44a9563343" Oct 10 18:21:29 crc kubenswrapper[4799]: E1010 18:21:29.275946 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"035363d1d121f05dac59ac30e5b7aa2317474463fa8a95bab52d0c44a9563343\": container with ID starting with 035363d1d121f05dac59ac30e5b7aa2317474463fa8a95bab52d0c44a9563343 not found: ID does not exist" containerID="035363d1d121f05dac59ac30e5b7aa2317474463fa8a95bab52d0c44a9563343" Oct 10 18:21:29 crc kubenswrapper[4799]: I1010 18:21:29.276152 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"035363d1d121f05dac59ac30e5b7aa2317474463fa8a95bab52d0c44a9563343"} err="failed to get container status \"035363d1d121f05dac59ac30e5b7aa2317474463fa8a95bab52d0c44a9563343\": rpc error: code = NotFound desc = could not find container \"035363d1d121f05dac59ac30e5b7aa2317474463fa8a95bab52d0c44a9563343\": container with ID starting with 035363d1d121f05dac59ac30e5b7aa2317474463fa8a95bab52d0c44a9563343 not found: ID does not exist" Oct 10 18:21:29 crc kubenswrapper[4799]: I1010 18:21:29.439190 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ff216c68-4dcf-46e2-99f4-94d6ceba8173" path="/var/lib/kubelet/pods/ff216c68-4dcf-46e2-99f4-94d6ceba8173/volumes" Oct 10 18:21:35 crc kubenswrapper[4799]: I1010 18:21:35.403371 4799 scope.go:117] "RemoveContainer" containerID="1f0ceb697c256a28cbd9c8e1e3aa08e1dc732ac4382bc8944609e36db615c835" Oct 10 18:21:35 crc kubenswrapper[4799]: E1010 18:21:35.404428 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 18:21:48 crc kubenswrapper[4799]: I1010 18:21:48.402682 4799 scope.go:117] "RemoveContainer" containerID="1f0ceb697c256a28cbd9c8e1e3aa08e1dc732ac4382bc8944609e36db615c835" Oct 10 18:21:48 crc kubenswrapper[4799]: E1010 18:21:48.403873 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 18:22:01 crc kubenswrapper[4799]: I1010 18:22:01.402973 4799 scope.go:117] "RemoveContainer" containerID="1f0ceb697c256a28cbd9c8e1e3aa08e1dc732ac4382bc8944609e36db615c835" Oct 10 18:22:01 crc kubenswrapper[4799]: E1010 18:22:01.405439 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 18:22:15 crc kubenswrapper[4799]: I1010 18:22:15.402456 4799 scope.go:117] "RemoveContainer" containerID="1f0ceb697c256a28cbd9c8e1e3aa08e1dc732ac4382bc8944609e36db615c835" Oct 10 18:22:15 crc kubenswrapper[4799]: I1010 18:22:15.787873 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" event={"ID":"6cebefda-e31d-4be2-9bf4-8e1f8ec002cb","Type":"ContainerStarted","Data":"eb0abc333441a2aff44658f972e9f97930ec1de7645b49203e1dddf872de470d"} Oct 10 18:23:14 crc kubenswrapper[4799]: I1010 18:23:14.993410 4799 scope.go:117] "RemoveContainer" containerID="49685a3108a4c6ee9a3d5187929d9de2bb85dd7e17303254457dcd2766440dd2" Oct 10 18:23:15 crc kubenswrapper[4799]: I1010 18:23:15.021819 4799 scope.go:117] "RemoveContainer" containerID="5a69839289cd398c246a3bb2febf28aecf046403bad2a3403961cd2db0a7b1c9" Oct 10 18:23:15 crc kubenswrapper[4799]: I1010 18:23:15.060039 4799 scope.go:117] "RemoveContainer" containerID="ca63f5d8695242c9c7e5f9d8eac01829a3d8ea11069c9dee7e4ed9dd4fdd3cff" Oct 10 18:23:15 crc kubenswrapper[4799]: I1010 18:23:15.093958 4799 scope.go:117] "RemoveContainer" containerID="2c89c14c0db1e7423dc94cf96d9fe6d4d8ce14b70bad05381a670607d4c644a2" Oct 10 18:23:20 crc kubenswrapper[4799]: I1010 18:23:20.058432 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/heat-db-create-b8wvd"] Oct 10 18:23:20 crc kubenswrapper[4799]: I1010 18:23:20.066309 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/heat-db-create-b8wvd"] Oct 10 18:23:21 crc kubenswrapper[4799]: I1010 18:23:21.420901 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49cea7a4-c56f-4c1f-bd9a-99c3e9945625" path="/var/lib/kubelet/pods/49cea7a4-c56f-4c1f-bd9a-99c3e9945625/volumes" Oct 10 18:23:31 crc kubenswrapper[4799]: I1010 18:23:31.049055 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/heat-c9f2-account-create-7vv8z"] Oct 10 18:23:31 crc kubenswrapper[4799]: I1010 18:23:31.062417 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/heat-c9f2-account-create-7vv8z"] Oct 10 18:23:31 crc kubenswrapper[4799]: I1010 18:23:31.418799 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0c7f4a5a-92e2-471c-a606-e71c722a026d" path="/var/lib/kubelet/pods/0c7f4a5a-92e2-471c-a606-e71c722a026d/volumes" Oct 10 18:23:44 crc kubenswrapper[4799]: I1010 18:23:44.072354 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/heat-db-sync-7bz79"] Oct 10 18:23:44 crc kubenswrapper[4799]: I1010 18:23:44.084553 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/heat-db-sync-7bz79"] Oct 10 18:23:45 crc kubenswrapper[4799]: I1010 18:23:45.423721 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="76688729-9f62-43ec-8061-bd200fc10fd0" path="/var/lib/kubelet/pods/76688729-9f62-43ec-8061-bd200fc10fd0/volumes" Oct 10 18:24:15 crc kubenswrapper[4799]: I1010 18:24:15.175203 4799 scope.go:117] "RemoveContainer" containerID="fa2c5567ac3491e3b5b60d5ff7582729052655b3bb66662b9e445c5bdb38aa7a" Oct 10 18:24:15 crc kubenswrapper[4799]: I1010 18:24:15.217574 4799 scope.go:117] "RemoveContainer" containerID="3df0bd3443b480f13da760730e3f1da57e42ca8cc8cf342f19793be3fc4a0605" Oct 10 18:24:15 crc kubenswrapper[4799]: I1010 18:24:15.249495 4799 patch_prober.go:28] interesting pod/machine-config-daemon-rh8zc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 10 18:24:15 crc kubenswrapper[4799]: I1010 18:24:15.249579 4799 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 10 18:24:15 crc kubenswrapper[4799]: I1010 18:24:15.292420 4799 scope.go:117] "RemoveContainer" containerID="6441e6290c896e3e4381f62eeb031cd865a2075abd055f223c24cd8b070a254a" Oct 10 18:24:45 crc kubenswrapper[4799]: I1010 18:24:45.249521 4799 patch_prober.go:28] interesting pod/machine-config-daemon-rh8zc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 10 18:24:45 crc kubenswrapper[4799]: I1010 18:24:45.250248 4799 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 10 18:25:15 crc kubenswrapper[4799]: I1010 18:25:15.248671 4799 patch_prober.go:28] interesting pod/machine-config-daemon-rh8zc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 10 18:25:15 crc kubenswrapper[4799]: I1010 18:25:15.249383 4799 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 10 18:25:15 crc kubenswrapper[4799]: I1010 18:25:15.249448 4799 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" Oct 10 18:25:15 crc kubenswrapper[4799]: I1010 18:25:15.250578 4799 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"eb0abc333441a2aff44658f972e9f97930ec1de7645b49203e1dddf872de470d"} pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 10 18:25:15 crc kubenswrapper[4799]: I1010 18:25:15.250684 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" containerName="machine-config-daemon" containerID="cri-o://eb0abc333441a2aff44658f972e9f97930ec1de7645b49203e1dddf872de470d" gracePeriod=600 Oct 10 18:25:16 crc kubenswrapper[4799]: I1010 18:25:16.188043 4799 generic.go:334] "Generic (PLEG): container finished" podID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" containerID="eb0abc333441a2aff44658f972e9f97930ec1de7645b49203e1dddf872de470d" exitCode=0 Oct 10 18:25:16 crc kubenswrapper[4799]: I1010 18:25:16.188301 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" event={"ID":"6cebefda-e31d-4be2-9bf4-8e1f8ec002cb","Type":"ContainerDied","Data":"eb0abc333441a2aff44658f972e9f97930ec1de7645b49203e1dddf872de470d"} Oct 10 18:25:16 crc kubenswrapper[4799]: I1010 18:25:16.188741 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" event={"ID":"6cebefda-e31d-4be2-9bf4-8e1f8ec002cb","Type":"ContainerStarted","Data":"e1753dd33d9c2573eba3d4245d76828dad7bb15c7538442d84b9b903c94df080"} Oct 10 18:25:16 crc kubenswrapper[4799]: I1010 18:25:16.188783 4799 scope.go:117] "RemoveContainer" containerID="1f0ceb697c256a28cbd9c8e1e3aa08e1dc732ac4382bc8944609e36db615c835" Oct 10 18:25:55 crc kubenswrapper[4799]: I1010 18:25:55.059491 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/aodh-db-create-87qtg"] Oct 10 18:25:55 crc kubenswrapper[4799]: I1010 18:25:55.076191 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/aodh-db-create-87qtg"] Oct 10 18:25:55 crc kubenswrapper[4799]: I1010 18:25:55.434131 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7fb0e2f5-1600-4bed-a4b4-8c9d6b1b4910" path="/var/lib/kubelet/pods/7fb0e2f5-1600-4bed-a4b4-8c9d6b1b4910/volumes" Oct 10 18:26:06 crc kubenswrapper[4799]: I1010 18:26:06.034337 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/aodh-038d-account-create-nz4st"] Oct 10 18:26:06 crc kubenswrapper[4799]: I1010 18:26:06.046979 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/aodh-038d-account-create-nz4st"] Oct 10 18:26:07 crc kubenswrapper[4799]: I1010 18:26:07.423175 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="87713929-7b88-4f21-ae38-68b78557c50b" path="/var/lib/kubelet/pods/87713929-7b88-4f21-ae38-68b78557c50b/volumes" Oct 10 18:26:15 crc kubenswrapper[4799]: I1010 18:26:15.453052 4799 scope.go:117] "RemoveContainer" containerID="6155b3f4a3400207569b724c93b6baece6e4c6973d23496d8d42284f6467ee84" Oct 10 18:26:15 crc kubenswrapper[4799]: I1010 18:26:15.544921 4799 scope.go:117] "RemoveContainer" containerID="43a67b59bdbbbe1b81a179f4bc34c4a74dd5f27f8dc7ec84f142f567fa4c23cc" Oct 10 18:26:17 crc kubenswrapper[4799]: I1010 18:26:17.067457 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/aodh-db-sync-rtv46"] Oct 10 18:26:17 crc kubenswrapper[4799]: I1010 18:26:17.082351 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/aodh-db-sync-rtv46"] Oct 10 18:26:17 crc kubenswrapper[4799]: I1010 18:26:17.424465 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="eb3620b8-4e83-4af0-ad67-16c6a8aaa0bb" path="/var/lib/kubelet/pods/eb3620b8-4e83-4af0-ad67-16c6a8aaa0bb/volumes" Oct 10 18:26:39 crc kubenswrapper[4799]: I1010 18:26:39.046998 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/manila-db-create-v7rtp"] Oct 10 18:26:39 crc kubenswrapper[4799]: I1010 18:26:39.078455 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/manila-db-create-v7rtp"] Oct 10 18:26:39 crc kubenswrapper[4799]: I1010 18:26:39.414724 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09be6bcc-c080-4408-b59f-b745601e9939" path="/var/lib/kubelet/pods/09be6bcc-c080-4408-b59f-b745601e9939/volumes" Oct 10 18:26:49 crc kubenswrapper[4799]: I1010 18:26:49.048635 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/manila-1b7d-account-create-bzw8s"] Oct 10 18:26:49 crc kubenswrapper[4799]: I1010 18:26:49.059944 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/manila-1b7d-account-create-bzw8s"] Oct 10 18:26:49 crc kubenswrapper[4799]: I1010 18:26:49.425298 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c5fd0979-3afa-415a-93b5-a6981524b6a8" path="/var/lib/kubelet/pods/c5fd0979-3afa-415a-93b5-a6981524b6a8/volumes" Oct 10 18:27:01 crc kubenswrapper[4799]: I1010 18:27:01.046656 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/manila-db-sync-w57kf"] Oct 10 18:27:01 crc kubenswrapper[4799]: I1010 18:27:01.060618 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/manila-db-sync-w57kf"] Oct 10 18:27:01 crc kubenswrapper[4799]: I1010 18:27:01.434078 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="12fe564f-024c-412e-b497-b33c59ab57a6" path="/var/lib/kubelet/pods/12fe564f-024c-412e-b497-b33c59ab57a6/volumes" Oct 10 18:27:15 crc kubenswrapper[4799]: I1010 18:27:15.248956 4799 patch_prober.go:28] interesting pod/machine-config-daemon-rh8zc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 10 18:27:15 crc kubenswrapper[4799]: I1010 18:27:15.249694 4799 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 10 18:27:15 crc kubenswrapper[4799]: I1010 18:27:15.694359 4799 scope.go:117] "RemoveContainer" containerID="49cb8a7391883186389d41e61c313ad64ac184ce2a3b845fc4ab437ef9f38f7e" Oct 10 18:27:15 crc kubenswrapper[4799]: I1010 18:27:15.719955 4799 scope.go:117] "RemoveContainer" containerID="5e0ff81020c44541736e3f63b3b543d3a8530c212888ae5f8e23384a1ce119b6" Oct 10 18:27:15 crc kubenswrapper[4799]: I1010 18:27:15.784748 4799 scope.go:117] "RemoveContainer" containerID="ac061486347082cb7715350b7d7a5f48ba84d3c842e88bbfa380b73e2dfcbd6e" Oct 10 18:27:15 crc kubenswrapper[4799]: I1010 18:27:15.831995 4799 scope.go:117] "RemoveContainer" containerID="1f9d34e22e530992d9c33c241556f25f95bd93ccb61d3d670829f850cbcb3347" Oct 10 18:27:45 crc kubenswrapper[4799]: I1010 18:27:45.248675 4799 patch_prober.go:28] interesting pod/machine-config-daemon-rh8zc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 10 18:27:45 crc kubenswrapper[4799]: I1010 18:27:45.249580 4799 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 10 18:28:15 crc kubenswrapper[4799]: I1010 18:28:15.248817 4799 patch_prober.go:28] interesting pod/machine-config-daemon-rh8zc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 10 18:28:15 crc kubenswrapper[4799]: I1010 18:28:15.249607 4799 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 10 18:28:15 crc kubenswrapper[4799]: I1010 18:28:15.249673 4799 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" Oct 10 18:28:15 crc kubenswrapper[4799]: I1010 18:28:15.250981 4799 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"e1753dd33d9c2573eba3d4245d76828dad7bb15c7538442d84b9b903c94df080"} pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 10 18:28:15 crc kubenswrapper[4799]: I1010 18:28:15.251096 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" containerName="machine-config-daemon" containerID="cri-o://e1753dd33d9c2573eba3d4245d76828dad7bb15c7538442d84b9b903c94df080" gracePeriod=600 Oct 10 18:28:15 crc kubenswrapper[4799]: E1010 18:28:15.378288 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 18:28:15 crc kubenswrapper[4799]: I1010 18:28:15.488337 4799 generic.go:334] "Generic (PLEG): container finished" podID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" containerID="e1753dd33d9c2573eba3d4245d76828dad7bb15c7538442d84b9b903c94df080" exitCode=0 Oct 10 18:28:15 crc kubenswrapper[4799]: I1010 18:28:15.488531 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" event={"ID":"6cebefda-e31d-4be2-9bf4-8e1f8ec002cb","Type":"ContainerDied","Data":"e1753dd33d9c2573eba3d4245d76828dad7bb15c7538442d84b9b903c94df080"} Oct 10 18:28:15 crc kubenswrapper[4799]: I1010 18:28:15.488849 4799 scope.go:117] "RemoveContainer" containerID="eb0abc333441a2aff44658f972e9f97930ec1de7645b49203e1dddf872de470d" Oct 10 18:28:15 crc kubenswrapper[4799]: I1010 18:28:15.489993 4799 scope.go:117] "RemoveContainer" containerID="e1753dd33d9c2573eba3d4245d76828dad7bb15c7538442d84b9b903c94df080" Oct 10 18:28:15 crc kubenswrapper[4799]: E1010 18:28:15.490377 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 18:28:30 crc kubenswrapper[4799]: I1010 18:28:30.403404 4799 scope.go:117] "RemoveContainer" containerID="e1753dd33d9c2573eba3d4245d76828dad7bb15c7538442d84b9b903c94df080" Oct 10 18:28:30 crc kubenswrapper[4799]: E1010 18:28:30.404776 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 18:28:43 crc kubenswrapper[4799]: I1010 18:28:43.403630 4799 scope.go:117] "RemoveContainer" containerID="e1753dd33d9c2573eba3d4245d76828dad7bb15c7538442d84b9b903c94df080" Oct 10 18:28:43 crc kubenswrapper[4799]: E1010 18:28:43.405022 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 18:28:58 crc kubenswrapper[4799]: I1010 18:28:58.402954 4799 scope.go:117] "RemoveContainer" containerID="e1753dd33d9c2573eba3d4245d76828dad7bb15c7538442d84b9b903c94df080" Oct 10 18:28:58 crc kubenswrapper[4799]: E1010 18:28:58.405645 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 18:29:13 crc kubenswrapper[4799]: I1010 18:29:13.402967 4799 scope.go:117] "RemoveContainer" containerID="e1753dd33d9c2573eba3d4245d76828dad7bb15c7538442d84b9b903c94df080" Oct 10 18:29:13 crc kubenswrapper[4799]: E1010 18:29:13.406617 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 18:29:24 crc kubenswrapper[4799]: I1010 18:29:24.402395 4799 scope.go:117] "RemoveContainer" containerID="e1753dd33d9c2573eba3d4245d76828dad7bb15c7538442d84b9b903c94df080" Oct 10 18:29:24 crc kubenswrapper[4799]: E1010 18:29:24.403662 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 18:29:39 crc kubenswrapper[4799]: I1010 18:29:39.403327 4799 scope.go:117] "RemoveContainer" containerID="e1753dd33d9c2573eba3d4245d76828dad7bb15c7538442d84b9b903c94df080" Oct 10 18:29:39 crc kubenswrapper[4799]: E1010 18:29:39.404153 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 18:29:50 crc kubenswrapper[4799]: I1010 18:29:50.404107 4799 scope.go:117] "RemoveContainer" containerID="e1753dd33d9c2573eba3d4245d76828dad7bb15c7538442d84b9b903c94df080" Oct 10 18:29:50 crc kubenswrapper[4799]: E1010 18:29:50.405330 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 18:30:00 crc kubenswrapper[4799]: I1010 18:30:00.199182 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29335350-hqw4q"] Oct 10 18:30:00 crc kubenswrapper[4799]: E1010 18:30:00.200932 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ff216c68-4dcf-46e2-99f4-94d6ceba8173" containerName="registry-server" Oct 10 18:30:00 crc kubenswrapper[4799]: I1010 18:30:00.200960 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="ff216c68-4dcf-46e2-99f4-94d6ceba8173" containerName="registry-server" Oct 10 18:30:00 crc kubenswrapper[4799]: E1010 18:30:00.200995 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ff216c68-4dcf-46e2-99f4-94d6ceba8173" containerName="extract-utilities" Oct 10 18:30:00 crc kubenswrapper[4799]: I1010 18:30:00.201010 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="ff216c68-4dcf-46e2-99f4-94d6ceba8173" containerName="extract-utilities" Oct 10 18:30:00 crc kubenswrapper[4799]: E1010 18:30:00.201053 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ff216c68-4dcf-46e2-99f4-94d6ceba8173" containerName="extract-content" Oct 10 18:30:00 crc kubenswrapper[4799]: I1010 18:30:00.201067 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="ff216c68-4dcf-46e2-99f4-94d6ceba8173" containerName="extract-content" Oct 10 18:30:00 crc kubenswrapper[4799]: I1010 18:30:00.201440 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="ff216c68-4dcf-46e2-99f4-94d6ceba8173" containerName="registry-server" Oct 10 18:30:00 crc kubenswrapper[4799]: I1010 18:30:00.202749 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29335350-hqw4q" Oct 10 18:30:00 crc kubenswrapper[4799]: I1010 18:30:00.207422 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Oct 10 18:30:00 crc kubenswrapper[4799]: I1010 18:30:00.207614 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Oct 10 18:30:00 crc kubenswrapper[4799]: I1010 18:30:00.223659 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29335350-hqw4q"] Oct 10 18:30:00 crc kubenswrapper[4799]: I1010 18:30:00.355147 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/fe898006-01ae-4272-841a-ea4e097c5dad-config-volume\") pod \"collect-profiles-29335350-hqw4q\" (UID: \"fe898006-01ae-4272-841a-ea4e097c5dad\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29335350-hqw4q" Oct 10 18:30:00 crc kubenswrapper[4799]: I1010 18:30:00.355541 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/fe898006-01ae-4272-841a-ea4e097c5dad-secret-volume\") pod \"collect-profiles-29335350-hqw4q\" (UID: \"fe898006-01ae-4272-841a-ea4e097c5dad\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29335350-hqw4q" Oct 10 18:30:00 crc kubenswrapper[4799]: I1010 18:30:00.355700 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gfzhn\" (UniqueName: \"kubernetes.io/projected/fe898006-01ae-4272-841a-ea4e097c5dad-kube-api-access-gfzhn\") pod \"collect-profiles-29335350-hqw4q\" (UID: \"fe898006-01ae-4272-841a-ea4e097c5dad\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29335350-hqw4q" Oct 10 18:30:00 crc kubenswrapper[4799]: I1010 18:30:00.457279 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gfzhn\" (UniqueName: \"kubernetes.io/projected/fe898006-01ae-4272-841a-ea4e097c5dad-kube-api-access-gfzhn\") pod \"collect-profiles-29335350-hqw4q\" (UID: \"fe898006-01ae-4272-841a-ea4e097c5dad\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29335350-hqw4q" Oct 10 18:30:00 crc kubenswrapper[4799]: I1010 18:30:00.457354 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/fe898006-01ae-4272-841a-ea4e097c5dad-config-volume\") pod \"collect-profiles-29335350-hqw4q\" (UID: \"fe898006-01ae-4272-841a-ea4e097c5dad\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29335350-hqw4q" Oct 10 18:30:00 crc kubenswrapper[4799]: I1010 18:30:00.457381 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/fe898006-01ae-4272-841a-ea4e097c5dad-secret-volume\") pod \"collect-profiles-29335350-hqw4q\" (UID: \"fe898006-01ae-4272-841a-ea4e097c5dad\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29335350-hqw4q" Oct 10 18:30:00 crc kubenswrapper[4799]: I1010 18:30:00.458159 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/fe898006-01ae-4272-841a-ea4e097c5dad-config-volume\") pod \"collect-profiles-29335350-hqw4q\" (UID: \"fe898006-01ae-4272-841a-ea4e097c5dad\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29335350-hqw4q" Oct 10 18:30:00 crc kubenswrapper[4799]: I1010 18:30:00.464412 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/fe898006-01ae-4272-841a-ea4e097c5dad-secret-volume\") pod \"collect-profiles-29335350-hqw4q\" (UID: \"fe898006-01ae-4272-841a-ea4e097c5dad\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29335350-hqw4q" Oct 10 18:30:00 crc kubenswrapper[4799]: I1010 18:30:00.487879 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gfzhn\" (UniqueName: \"kubernetes.io/projected/fe898006-01ae-4272-841a-ea4e097c5dad-kube-api-access-gfzhn\") pod \"collect-profiles-29335350-hqw4q\" (UID: \"fe898006-01ae-4272-841a-ea4e097c5dad\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29335350-hqw4q" Oct 10 18:30:00 crc kubenswrapper[4799]: I1010 18:30:00.540753 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29335350-hqw4q" Oct 10 18:30:01 crc kubenswrapper[4799]: I1010 18:30:01.074215 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29335350-hqw4q"] Oct 10 18:30:01 crc kubenswrapper[4799]: I1010 18:30:01.959429 4799 generic.go:334] "Generic (PLEG): container finished" podID="fe898006-01ae-4272-841a-ea4e097c5dad" containerID="a49d3b420281e1ceeacbbd77f0e90084601e059b92a78a6f31ec8024c8b06a55" exitCode=0 Oct 10 18:30:01 crc kubenswrapper[4799]: I1010 18:30:01.959487 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29335350-hqw4q" event={"ID":"fe898006-01ae-4272-841a-ea4e097c5dad","Type":"ContainerDied","Data":"a49d3b420281e1ceeacbbd77f0e90084601e059b92a78a6f31ec8024c8b06a55"} Oct 10 18:30:01 crc kubenswrapper[4799]: I1010 18:30:01.959701 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29335350-hqw4q" event={"ID":"fe898006-01ae-4272-841a-ea4e097c5dad","Type":"ContainerStarted","Data":"f95d1c5e884fc22b6fe2c8f0bd6cecc17c0269be83bc7ea635c0a8a923fa3021"} Oct 10 18:30:03 crc kubenswrapper[4799]: I1010 18:30:03.438849 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29335350-hqw4q" Oct 10 18:30:03 crc kubenswrapper[4799]: I1010 18:30:03.536913 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/fe898006-01ae-4272-841a-ea4e097c5dad-config-volume\") pod \"fe898006-01ae-4272-841a-ea4e097c5dad\" (UID: \"fe898006-01ae-4272-841a-ea4e097c5dad\") " Oct 10 18:30:03 crc kubenswrapper[4799]: I1010 18:30:03.537854 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fe898006-01ae-4272-841a-ea4e097c5dad-config-volume" (OuterVolumeSpecName: "config-volume") pod "fe898006-01ae-4272-841a-ea4e097c5dad" (UID: "fe898006-01ae-4272-841a-ea4e097c5dad"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 18:30:03 crc kubenswrapper[4799]: I1010 18:30:03.538099 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gfzhn\" (UniqueName: \"kubernetes.io/projected/fe898006-01ae-4272-841a-ea4e097c5dad-kube-api-access-gfzhn\") pod \"fe898006-01ae-4272-841a-ea4e097c5dad\" (UID: \"fe898006-01ae-4272-841a-ea4e097c5dad\") " Oct 10 18:30:03 crc kubenswrapper[4799]: I1010 18:30:03.539528 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/fe898006-01ae-4272-841a-ea4e097c5dad-secret-volume\") pod \"fe898006-01ae-4272-841a-ea4e097c5dad\" (UID: \"fe898006-01ae-4272-841a-ea4e097c5dad\") " Oct 10 18:30:03 crc kubenswrapper[4799]: I1010 18:30:03.541006 4799 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/fe898006-01ae-4272-841a-ea4e097c5dad-config-volume\") on node \"crc\" DevicePath \"\"" Oct 10 18:30:03 crc kubenswrapper[4799]: I1010 18:30:03.544026 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fe898006-01ae-4272-841a-ea4e097c5dad-kube-api-access-gfzhn" (OuterVolumeSpecName: "kube-api-access-gfzhn") pod "fe898006-01ae-4272-841a-ea4e097c5dad" (UID: "fe898006-01ae-4272-841a-ea4e097c5dad"). InnerVolumeSpecName "kube-api-access-gfzhn". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 18:30:03 crc kubenswrapper[4799]: I1010 18:30:03.546988 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fe898006-01ae-4272-841a-ea4e097c5dad-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "fe898006-01ae-4272-841a-ea4e097c5dad" (UID: "fe898006-01ae-4272-841a-ea4e097c5dad"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 18:30:03 crc kubenswrapper[4799]: I1010 18:30:03.643615 4799 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/fe898006-01ae-4272-841a-ea4e097c5dad-secret-volume\") on node \"crc\" DevicePath \"\"" Oct 10 18:30:03 crc kubenswrapper[4799]: I1010 18:30:03.643887 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gfzhn\" (UniqueName: \"kubernetes.io/projected/fe898006-01ae-4272-841a-ea4e097c5dad-kube-api-access-gfzhn\") on node \"crc\" DevicePath \"\"" Oct 10 18:30:03 crc kubenswrapper[4799]: I1010 18:30:03.984173 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29335350-hqw4q" event={"ID":"fe898006-01ae-4272-841a-ea4e097c5dad","Type":"ContainerDied","Data":"f95d1c5e884fc22b6fe2c8f0bd6cecc17c0269be83bc7ea635c0a8a923fa3021"} Oct 10 18:30:03 crc kubenswrapper[4799]: I1010 18:30:03.984212 4799 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f95d1c5e884fc22b6fe2c8f0bd6cecc17c0269be83bc7ea635c0a8a923fa3021" Oct 10 18:30:03 crc kubenswrapper[4799]: I1010 18:30:03.984280 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29335350-hqw4q" Oct 10 18:30:04 crc kubenswrapper[4799]: I1010 18:30:04.403336 4799 scope.go:117] "RemoveContainer" containerID="e1753dd33d9c2573eba3d4245d76828dad7bb15c7538442d84b9b903c94df080" Oct 10 18:30:04 crc kubenswrapper[4799]: E1010 18:30:04.403857 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 18:30:04 crc kubenswrapper[4799]: I1010 18:30:04.541911 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29335305-gm8nb"] Oct 10 18:30:04 crc kubenswrapper[4799]: I1010 18:30:04.555873 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29335305-gm8nb"] Oct 10 18:30:05 crc kubenswrapper[4799]: I1010 18:30:05.442381 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="36939816-2c24-423d-8361-9471625ae3f5" path="/var/lib/kubelet/pods/36939816-2c24-423d-8361-9471625ae3f5/volumes" Oct 10 18:30:11 crc kubenswrapper[4799]: I1010 18:30:11.229053 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-zmmx6"] Oct 10 18:30:11 crc kubenswrapper[4799]: E1010 18:30:11.234733 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fe898006-01ae-4272-841a-ea4e097c5dad" containerName="collect-profiles" Oct 10 18:30:11 crc kubenswrapper[4799]: I1010 18:30:11.234936 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="fe898006-01ae-4272-841a-ea4e097c5dad" containerName="collect-profiles" Oct 10 18:30:11 crc kubenswrapper[4799]: I1010 18:30:11.235436 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="fe898006-01ae-4272-841a-ea4e097c5dad" containerName="collect-profiles" Oct 10 18:30:11 crc kubenswrapper[4799]: I1010 18:30:11.238297 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-zmmx6" Oct 10 18:30:11 crc kubenswrapper[4799]: I1010 18:30:11.252512 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-zmmx6"] Oct 10 18:30:11 crc kubenswrapper[4799]: I1010 18:30:11.347779 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vz99d\" (UniqueName: \"kubernetes.io/projected/a07faa7c-7af7-4751-82c5-9c31acda4ec0-kube-api-access-vz99d\") pod \"community-operators-zmmx6\" (UID: \"a07faa7c-7af7-4751-82c5-9c31acda4ec0\") " pod="openshift-marketplace/community-operators-zmmx6" Oct 10 18:30:11 crc kubenswrapper[4799]: I1010 18:30:11.347877 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a07faa7c-7af7-4751-82c5-9c31acda4ec0-utilities\") pod \"community-operators-zmmx6\" (UID: \"a07faa7c-7af7-4751-82c5-9c31acda4ec0\") " pod="openshift-marketplace/community-operators-zmmx6" Oct 10 18:30:11 crc kubenswrapper[4799]: I1010 18:30:11.348053 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a07faa7c-7af7-4751-82c5-9c31acda4ec0-catalog-content\") pod \"community-operators-zmmx6\" (UID: \"a07faa7c-7af7-4751-82c5-9c31acda4ec0\") " pod="openshift-marketplace/community-operators-zmmx6" Oct 10 18:30:11 crc kubenswrapper[4799]: I1010 18:30:11.450366 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vz99d\" (UniqueName: \"kubernetes.io/projected/a07faa7c-7af7-4751-82c5-9c31acda4ec0-kube-api-access-vz99d\") pod \"community-operators-zmmx6\" (UID: \"a07faa7c-7af7-4751-82c5-9c31acda4ec0\") " pod="openshift-marketplace/community-operators-zmmx6" Oct 10 18:30:11 crc kubenswrapper[4799]: I1010 18:30:11.450461 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a07faa7c-7af7-4751-82c5-9c31acda4ec0-utilities\") pod \"community-operators-zmmx6\" (UID: \"a07faa7c-7af7-4751-82c5-9c31acda4ec0\") " pod="openshift-marketplace/community-operators-zmmx6" Oct 10 18:30:11 crc kubenswrapper[4799]: I1010 18:30:11.450496 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a07faa7c-7af7-4751-82c5-9c31acda4ec0-catalog-content\") pod \"community-operators-zmmx6\" (UID: \"a07faa7c-7af7-4751-82c5-9c31acda4ec0\") " pod="openshift-marketplace/community-operators-zmmx6" Oct 10 18:30:11 crc kubenswrapper[4799]: I1010 18:30:11.451184 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a07faa7c-7af7-4751-82c5-9c31acda4ec0-catalog-content\") pod \"community-operators-zmmx6\" (UID: \"a07faa7c-7af7-4751-82c5-9c31acda4ec0\") " pod="openshift-marketplace/community-operators-zmmx6" Oct 10 18:30:11 crc kubenswrapper[4799]: I1010 18:30:11.451356 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a07faa7c-7af7-4751-82c5-9c31acda4ec0-utilities\") pod \"community-operators-zmmx6\" (UID: \"a07faa7c-7af7-4751-82c5-9c31acda4ec0\") " pod="openshift-marketplace/community-operators-zmmx6" Oct 10 18:30:11 crc kubenswrapper[4799]: I1010 18:30:11.477101 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vz99d\" (UniqueName: \"kubernetes.io/projected/a07faa7c-7af7-4751-82c5-9c31acda4ec0-kube-api-access-vz99d\") pod \"community-operators-zmmx6\" (UID: \"a07faa7c-7af7-4751-82c5-9c31acda4ec0\") " pod="openshift-marketplace/community-operators-zmmx6" Oct 10 18:30:11 crc kubenswrapper[4799]: I1010 18:30:11.566160 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-zmmx6" Oct 10 18:30:12 crc kubenswrapper[4799]: I1010 18:30:12.137654 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-zmmx6"] Oct 10 18:30:13 crc kubenswrapper[4799]: I1010 18:30:13.161553 4799 generic.go:334] "Generic (PLEG): container finished" podID="a07faa7c-7af7-4751-82c5-9c31acda4ec0" containerID="285e78d14cbae9b90ccc895fc76be9821372c5a7e9b71f19373374a477301c89" exitCode=0 Oct 10 18:30:13 crc kubenswrapper[4799]: I1010 18:30:13.161643 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-zmmx6" event={"ID":"a07faa7c-7af7-4751-82c5-9c31acda4ec0","Type":"ContainerDied","Data":"285e78d14cbae9b90ccc895fc76be9821372c5a7e9b71f19373374a477301c89"} Oct 10 18:30:13 crc kubenswrapper[4799]: I1010 18:30:13.161719 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-zmmx6" event={"ID":"a07faa7c-7af7-4751-82c5-9c31acda4ec0","Type":"ContainerStarted","Data":"c9f47c0e03c05e9f04a319533a8f0207619be4e549d0ca8ff2809e8ed888990c"} Oct 10 18:30:13 crc kubenswrapper[4799]: I1010 18:30:13.168422 4799 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 10 18:30:15 crc kubenswrapper[4799]: I1010 18:30:15.190952 4799 generic.go:334] "Generic (PLEG): container finished" podID="a07faa7c-7af7-4751-82c5-9c31acda4ec0" containerID="2259ca275a6430ead49a936e1592e05d5e1603125cc4a5c2fb28ccb6c787d6d5" exitCode=0 Oct 10 18:30:15 crc kubenswrapper[4799]: I1010 18:30:15.191118 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-zmmx6" event={"ID":"a07faa7c-7af7-4751-82c5-9c31acda4ec0","Type":"ContainerDied","Data":"2259ca275a6430ead49a936e1592e05d5e1603125cc4a5c2fb28ccb6c787d6d5"} Oct 10 18:30:15 crc kubenswrapper[4799]: I1010 18:30:15.194571 4799 generic.go:334] "Generic (PLEG): container finished" podID="c4e679ee-ac3b-4e3c-9869-b86de400033e" containerID="de61c9fb0e935077f74e48082343ff6b78f505584c56473ce956dedca0492bbe" exitCode=0 Oct 10 18:30:15 crc kubenswrapper[4799]: I1010 18:30:15.194614 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-wpgn4" event={"ID":"c4e679ee-ac3b-4e3c-9869-b86de400033e","Type":"ContainerDied","Data":"de61c9fb0e935077f74e48082343ff6b78f505584c56473ce956dedca0492bbe"} Oct 10 18:30:15 crc kubenswrapper[4799]: I1010 18:30:15.403606 4799 scope.go:117] "RemoveContainer" containerID="e1753dd33d9c2573eba3d4245d76828dad7bb15c7538442d84b9b903c94df080" Oct 10 18:30:15 crc kubenswrapper[4799]: E1010 18:30:15.405134 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 18:30:16 crc kubenswrapper[4799]: I1010 18:30:16.076208 4799 scope.go:117] "RemoveContainer" containerID="9668bfa07bd433551238d3440b7abcfac0d8f45d2a70d7bf7d07b2d2ca20effe" Oct 10 18:30:16 crc kubenswrapper[4799]: I1010 18:30:16.208438 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-zmmx6" event={"ID":"a07faa7c-7af7-4751-82c5-9c31acda4ec0","Type":"ContainerStarted","Data":"0e65a5555a59c65de3c54257c3bdfc0087505a36a39b2ac4d73f660ada412492"} Oct 10 18:30:16 crc kubenswrapper[4799]: I1010 18:30:16.228198 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-zmmx6" podStartSLOduration=2.676100022 podStartE2EDuration="5.228179096s" podCreationTimestamp="2025-10-10 18:30:11 +0000 UTC" firstStartedPulling="2025-10-10 18:30:13.167999361 +0000 UTC m=+7106.676323506" lastFinishedPulling="2025-10-10 18:30:15.720078435 +0000 UTC m=+7109.228402580" observedRunningTime="2025-10-10 18:30:16.224581988 +0000 UTC m=+7109.732906123" watchObservedRunningTime="2025-10-10 18:30:16.228179096 +0000 UTC m=+7109.736503211" Oct 10 18:30:16 crc kubenswrapper[4799]: I1010 18:30:16.724497 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-wpgn4" Oct 10 18:30:16 crc kubenswrapper[4799]: I1010 18:30:16.785158 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c4e679ee-ac3b-4e3c-9869-b86de400033e-ssh-key\") pod \"c4e679ee-ac3b-4e3c-9869-b86de400033e\" (UID: \"c4e679ee-ac3b-4e3c-9869-b86de400033e\") " Oct 10 18:30:16 crc kubenswrapper[4799]: I1010 18:30:16.785285 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-csp4n\" (UniqueName: \"kubernetes.io/projected/c4e679ee-ac3b-4e3c-9869-b86de400033e-kube-api-access-csp4n\") pod \"c4e679ee-ac3b-4e3c-9869-b86de400033e\" (UID: \"c4e679ee-ac3b-4e3c-9869-b86de400033e\") " Oct 10 18:30:16 crc kubenswrapper[4799]: I1010 18:30:16.785460 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c4e679ee-ac3b-4e3c-9869-b86de400033e-inventory\") pod \"c4e679ee-ac3b-4e3c-9869-b86de400033e\" (UID: \"c4e679ee-ac3b-4e3c-9869-b86de400033e\") " Oct 10 18:30:16 crc kubenswrapper[4799]: I1010 18:30:16.785500 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/c4e679ee-ac3b-4e3c-9869-b86de400033e-ceph\") pod \"c4e679ee-ac3b-4e3c-9869-b86de400033e\" (UID: \"c4e679ee-ac3b-4e3c-9869-b86de400033e\") " Oct 10 18:30:16 crc kubenswrapper[4799]: I1010 18:30:16.785713 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"tripleo-cleanup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c4e679ee-ac3b-4e3c-9869-b86de400033e-tripleo-cleanup-combined-ca-bundle\") pod \"c4e679ee-ac3b-4e3c-9869-b86de400033e\" (UID: \"c4e679ee-ac3b-4e3c-9869-b86de400033e\") " Oct 10 18:30:16 crc kubenswrapper[4799]: I1010 18:30:16.793054 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c4e679ee-ac3b-4e3c-9869-b86de400033e-kube-api-access-csp4n" (OuterVolumeSpecName: "kube-api-access-csp4n") pod "c4e679ee-ac3b-4e3c-9869-b86de400033e" (UID: "c4e679ee-ac3b-4e3c-9869-b86de400033e"). InnerVolumeSpecName "kube-api-access-csp4n". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 18:30:16 crc kubenswrapper[4799]: I1010 18:30:16.795724 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c4e679ee-ac3b-4e3c-9869-b86de400033e-tripleo-cleanup-combined-ca-bundle" (OuterVolumeSpecName: "tripleo-cleanup-combined-ca-bundle") pod "c4e679ee-ac3b-4e3c-9869-b86de400033e" (UID: "c4e679ee-ac3b-4e3c-9869-b86de400033e"). InnerVolumeSpecName "tripleo-cleanup-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 18:30:16 crc kubenswrapper[4799]: I1010 18:30:16.796950 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c4e679ee-ac3b-4e3c-9869-b86de400033e-ceph" (OuterVolumeSpecName: "ceph") pod "c4e679ee-ac3b-4e3c-9869-b86de400033e" (UID: "c4e679ee-ac3b-4e3c-9869-b86de400033e"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 18:30:16 crc kubenswrapper[4799]: I1010 18:30:16.817292 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c4e679ee-ac3b-4e3c-9869-b86de400033e-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "c4e679ee-ac3b-4e3c-9869-b86de400033e" (UID: "c4e679ee-ac3b-4e3c-9869-b86de400033e"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 18:30:16 crc kubenswrapper[4799]: I1010 18:30:16.837193 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c4e679ee-ac3b-4e3c-9869-b86de400033e-inventory" (OuterVolumeSpecName: "inventory") pod "c4e679ee-ac3b-4e3c-9869-b86de400033e" (UID: "c4e679ee-ac3b-4e3c-9869-b86de400033e"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 18:30:16 crc kubenswrapper[4799]: I1010 18:30:16.888451 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-csp4n\" (UniqueName: \"kubernetes.io/projected/c4e679ee-ac3b-4e3c-9869-b86de400033e-kube-api-access-csp4n\") on node \"crc\" DevicePath \"\"" Oct 10 18:30:16 crc kubenswrapper[4799]: I1010 18:30:16.888585 4799 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c4e679ee-ac3b-4e3c-9869-b86de400033e-inventory\") on node \"crc\" DevicePath \"\"" Oct 10 18:30:16 crc kubenswrapper[4799]: I1010 18:30:16.888659 4799 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/c4e679ee-ac3b-4e3c-9869-b86de400033e-ceph\") on node \"crc\" DevicePath \"\"" Oct 10 18:30:16 crc kubenswrapper[4799]: I1010 18:30:16.888714 4799 reconciler_common.go:293] "Volume detached for volume \"tripleo-cleanup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c4e679ee-ac3b-4e3c-9869-b86de400033e-tripleo-cleanup-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 10 18:30:16 crc kubenswrapper[4799]: I1010 18:30:16.888793 4799 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c4e679ee-ac3b-4e3c-9869-b86de400033e-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 10 18:30:17 crc kubenswrapper[4799]: I1010 18:30:17.229295 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-wpgn4" event={"ID":"c4e679ee-ac3b-4e3c-9869-b86de400033e","Type":"ContainerDied","Data":"65ff8f62cbeaa94c9ff76cbb40be2b84df278a698b1fec3129cf9bf099168076"} Oct 10 18:30:17 crc kubenswrapper[4799]: I1010 18:30:17.229379 4799 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="65ff8f62cbeaa94c9ff76cbb40be2b84df278a698b1fec3129cf9bf099168076" Oct 10 18:30:17 crc kubenswrapper[4799]: I1010 18:30:17.229340 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-wpgn4" Oct 10 18:30:21 crc kubenswrapper[4799]: I1010 18:30:21.491419 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-9m54q"] Oct 10 18:30:21 crc kubenswrapper[4799]: E1010 18:30:21.494983 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c4e679ee-ac3b-4e3c-9869-b86de400033e" containerName="tripleo-cleanup-tripleo-cleanup-openstack-cell1" Oct 10 18:30:21 crc kubenswrapper[4799]: I1010 18:30:21.495391 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="c4e679ee-ac3b-4e3c-9869-b86de400033e" containerName="tripleo-cleanup-tripleo-cleanup-openstack-cell1" Oct 10 18:30:21 crc kubenswrapper[4799]: I1010 18:30:21.495925 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="c4e679ee-ac3b-4e3c-9869-b86de400033e" containerName="tripleo-cleanup-tripleo-cleanup-openstack-cell1" Oct 10 18:30:21 crc kubenswrapper[4799]: I1010 18:30:21.498743 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-9m54q" Oct 10 18:30:21 crc kubenswrapper[4799]: I1010 18:30:21.504860 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-9m54q"] Oct 10 18:30:21 crc kubenswrapper[4799]: I1010 18:30:21.567625 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-zmmx6" Oct 10 18:30:21 crc kubenswrapper[4799]: I1010 18:30:21.569699 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-zmmx6" Oct 10 18:30:21 crc kubenswrapper[4799]: I1010 18:30:21.637074 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-zmmx6" Oct 10 18:30:21 crc kubenswrapper[4799]: I1010 18:30:21.640638 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qkthc\" (UniqueName: \"kubernetes.io/projected/4a2edbc5-e998-4b26-a3f7-4f361b3b6e5c-kube-api-access-qkthc\") pod \"redhat-operators-9m54q\" (UID: \"4a2edbc5-e998-4b26-a3f7-4f361b3b6e5c\") " pod="openshift-marketplace/redhat-operators-9m54q" Oct 10 18:30:21 crc kubenswrapper[4799]: I1010 18:30:21.640864 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4a2edbc5-e998-4b26-a3f7-4f361b3b6e5c-utilities\") pod \"redhat-operators-9m54q\" (UID: \"4a2edbc5-e998-4b26-a3f7-4f361b3b6e5c\") " pod="openshift-marketplace/redhat-operators-9m54q" Oct 10 18:30:21 crc kubenswrapper[4799]: I1010 18:30:21.641007 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4a2edbc5-e998-4b26-a3f7-4f361b3b6e5c-catalog-content\") pod \"redhat-operators-9m54q\" (UID: \"4a2edbc5-e998-4b26-a3f7-4f361b3b6e5c\") " pod="openshift-marketplace/redhat-operators-9m54q" Oct 10 18:30:21 crc kubenswrapper[4799]: I1010 18:30:21.742936 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4a2edbc5-e998-4b26-a3f7-4f361b3b6e5c-utilities\") pod \"redhat-operators-9m54q\" (UID: \"4a2edbc5-e998-4b26-a3f7-4f361b3b6e5c\") " pod="openshift-marketplace/redhat-operators-9m54q" Oct 10 18:30:21 crc kubenswrapper[4799]: I1010 18:30:21.742989 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4a2edbc5-e998-4b26-a3f7-4f361b3b6e5c-catalog-content\") pod \"redhat-operators-9m54q\" (UID: \"4a2edbc5-e998-4b26-a3f7-4f361b3b6e5c\") " pod="openshift-marketplace/redhat-operators-9m54q" Oct 10 18:30:21 crc kubenswrapper[4799]: I1010 18:30:21.743131 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qkthc\" (UniqueName: \"kubernetes.io/projected/4a2edbc5-e998-4b26-a3f7-4f361b3b6e5c-kube-api-access-qkthc\") pod \"redhat-operators-9m54q\" (UID: \"4a2edbc5-e998-4b26-a3f7-4f361b3b6e5c\") " pod="openshift-marketplace/redhat-operators-9m54q" Oct 10 18:30:21 crc kubenswrapper[4799]: I1010 18:30:21.743603 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4a2edbc5-e998-4b26-a3f7-4f361b3b6e5c-utilities\") pod \"redhat-operators-9m54q\" (UID: \"4a2edbc5-e998-4b26-a3f7-4f361b3b6e5c\") " pod="openshift-marketplace/redhat-operators-9m54q" Oct 10 18:30:21 crc kubenswrapper[4799]: I1010 18:30:21.743897 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4a2edbc5-e998-4b26-a3f7-4f361b3b6e5c-catalog-content\") pod \"redhat-operators-9m54q\" (UID: \"4a2edbc5-e998-4b26-a3f7-4f361b3b6e5c\") " pod="openshift-marketplace/redhat-operators-9m54q" Oct 10 18:30:21 crc kubenswrapper[4799]: I1010 18:30:21.765601 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qkthc\" (UniqueName: \"kubernetes.io/projected/4a2edbc5-e998-4b26-a3f7-4f361b3b6e5c-kube-api-access-qkthc\") pod \"redhat-operators-9m54q\" (UID: \"4a2edbc5-e998-4b26-a3f7-4f361b3b6e5c\") " pod="openshift-marketplace/redhat-operators-9m54q" Oct 10 18:30:21 crc kubenswrapper[4799]: I1010 18:30:21.877547 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-9m54q" Oct 10 18:30:22 crc kubenswrapper[4799]: I1010 18:30:22.347979 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-zmmx6" Oct 10 18:30:22 crc kubenswrapper[4799]: I1010 18:30:22.360347 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-9m54q"] Oct 10 18:30:23 crc kubenswrapper[4799]: I1010 18:30:23.303984 4799 generic.go:334] "Generic (PLEG): container finished" podID="4a2edbc5-e998-4b26-a3f7-4f361b3b6e5c" containerID="ff509e7195fd70a906ae5423101cf527658bbf4c1a908ef3ed7e56b6cfc24901" exitCode=0 Oct 10 18:30:23 crc kubenswrapper[4799]: I1010 18:30:23.304081 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-9m54q" event={"ID":"4a2edbc5-e998-4b26-a3f7-4f361b3b6e5c","Type":"ContainerDied","Data":"ff509e7195fd70a906ae5423101cf527658bbf4c1a908ef3ed7e56b6cfc24901"} Oct 10 18:30:23 crc kubenswrapper[4799]: I1010 18:30:23.304550 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-9m54q" event={"ID":"4a2edbc5-e998-4b26-a3f7-4f361b3b6e5c","Type":"ContainerStarted","Data":"431a9bdc7aeac9e2e9c1b47dd99a03298e184c63d3c18f6a5765c90819e28958"} Oct 10 18:30:24 crc kubenswrapper[4799]: I1010 18:30:24.053991 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-zmmx6"] Oct 10 18:30:25 crc kubenswrapper[4799]: I1010 18:30:25.333063 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-9m54q" event={"ID":"4a2edbc5-e998-4b26-a3f7-4f361b3b6e5c","Type":"ContainerStarted","Data":"92e45468b7cdbbb88459c0a0ea7e5c8855a33feafd2f5f7e2890996f7aaedf52"} Oct 10 18:30:25 crc kubenswrapper[4799]: I1010 18:30:25.333175 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-zmmx6" podUID="a07faa7c-7af7-4751-82c5-9c31acda4ec0" containerName="registry-server" containerID="cri-o://0e65a5555a59c65de3c54257c3bdfc0087505a36a39b2ac4d73f660ada412492" gracePeriod=2 Oct 10 18:30:25 crc kubenswrapper[4799]: I1010 18:30:25.559253 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/bootstrap-openstack-openstack-cell1-pgztr"] Oct 10 18:30:25 crc kubenswrapper[4799]: I1010 18:30:25.576382 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-openstack-openstack-cell1-pgztr" Oct 10 18:30:25 crc kubenswrapper[4799]: I1010 18:30:25.579582 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/bootstrap-openstack-openstack-cell1-pgztr"] Oct 10 18:30:25 crc kubenswrapper[4799]: I1010 18:30:25.581075 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Oct 10 18:30:25 crc kubenswrapper[4799]: I1010 18:30:25.584914 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 10 18:30:25 crc kubenswrapper[4799]: I1010 18:30:25.586167 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-rdlhr" Oct 10 18:30:25 crc kubenswrapper[4799]: I1010 18:30:25.593040 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-adoption-secret" Oct 10 18:30:25 crc kubenswrapper[4799]: I1010 18:30:25.659415 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w9kc8\" (UniqueName: \"kubernetes.io/projected/797acbff-1308-4140-9468-a7eaaa3e5e75-kube-api-access-w9kc8\") pod \"bootstrap-openstack-openstack-cell1-pgztr\" (UID: \"797acbff-1308-4140-9468-a7eaaa3e5e75\") " pod="openstack/bootstrap-openstack-openstack-cell1-pgztr" Oct 10 18:30:25 crc kubenswrapper[4799]: I1010 18:30:25.659800 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/797acbff-1308-4140-9468-a7eaaa3e5e75-ssh-key\") pod \"bootstrap-openstack-openstack-cell1-pgztr\" (UID: \"797acbff-1308-4140-9468-a7eaaa3e5e75\") " pod="openstack/bootstrap-openstack-openstack-cell1-pgztr" Oct 10 18:30:25 crc kubenswrapper[4799]: I1010 18:30:25.659903 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/797acbff-1308-4140-9468-a7eaaa3e5e75-inventory\") pod \"bootstrap-openstack-openstack-cell1-pgztr\" (UID: \"797acbff-1308-4140-9468-a7eaaa3e5e75\") " pod="openstack/bootstrap-openstack-openstack-cell1-pgztr" Oct 10 18:30:25 crc kubenswrapper[4799]: I1010 18:30:25.659975 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/797acbff-1308-4140-9468-a7eaaa3e5e75-ceph\") pod \"bootstrap-openstack-openstack-cell1-pgztr\" (UID: \"797acbff-1308-4140-9468-a7eaaa3e5e75\") " pod="openstack/bootstrap-openstack-openstack-cell1-pgztr" Oct 10 18:30:25 crc kubenswrapper[4799]: I1010 18:30:25.660147 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/797acbff-1308-4140-9468-a7eaaa3e5e75-bootstrap-combined-ca-bundle\") pod \"bootstrap-openstack-openstack-cell1-pgztr\" (UID: \"797acbff-1308-4140-9468-a7eaaa3e5e75\") " pod="openstack/bootstrap-openstack-openstack-cell1-pgztr" Oct 10 18:30:25 crc kubenswrapper[4799]: I1010 18:30:25.761691 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w9kc8\" (UniqueName: \"kubernetes.io/projected/797acbff-1308-4140-9468-a7eaaa3e5e75-kube-api-access-w9kc8\") pod \"bootstrap-openstack-openstack-cell1-pgztr\" (UID: \"797acbff-1308-4140-9468-a7eaaa3e5e75\") " pod="openstack/bootstrap-openstack-openstack-cell1-pgztr" Oct 10 18:30:25 crc kubenswrapper[4799]: I1010 18:30:25.761745 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/797acbff-1308-4140-9468-a7eaaa3e5e75-ssh-key\") pod \"bootstrap-openstack-openstack-cell1-pgztr\" (UID: \"797acbff-1308-4140-9468-a7eaaa3e5e75\") " pod="openstack/bootstrap-openstack-openstack-cell1-pgztr" Oct 10 18:30:25 crc kubenswrapper[4799]: I1010 18:30:25.761868 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/797acbff-1308-4140-9468-a7eaaa3e5e75-inventory\") pod \"bootstrap-openstack-openstack-cell1-pgztr\" (UID: \"797acbff-1308-4140-9468-a7eaaa3e5e75\") " pod="openstack/bootstrap-openstack-openstack-cell1-pgztr" Oct 10 18:30:25 crc kubenswrapper[4799]: I1010 18:30:25.761907 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/797acbff-1308-4140-9468-a7eaaa3e5e75-ceph\") pod \"bootstrap-openstack-openstack-cell1-pgztr\" (UID: \"797acbff-1308-4140-9468-a7eaaa3e5e75\") " pod="openstack/bootstrap-openstack-openstack-cell1-pgztr" Oct 10 18:30:25 crc kubenswrapper[4799]: I1010 18:30:25.762020 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/797acbff-1308-4140-9468-a7eaaa3e5e75-bootstrap-combined-ca-bundle\") pod \"bootstrap-openstack-openstack-cell1-pgztr\" (UID: \"797acbff-1308-4140-9468-a7eaaa3e5e75\") " pod="openstack/bootstrap-openstack-openstack-cell1-pgztr" Oct 10 18:30:25 crc kubenswrapper[4799]: I1010 18:30:25.768816 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/797acbff-1308-4140-9468-a7eaaa3e5e75-bootstrap-combined-ca-bundle\") pod \"bootstrap-openstack-openstack-cell1-pgztr\" (UID: \"797acbff-1308-4140-9468-a7eaaa3e5e75\") " pod="openstack/bootstrap-openstack-openstack-cell1-pgztr" Oct 10 18:30:25 crc kubenswrapper[4799]: I1010 18:30:25.768852 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/797acbff-1308-4140-9468-a7eaaa3e5e75-inventory\") pod \"bootstrap-openstack-openstack-cell1-pgztr\" (UID: \"797acbff-1308-4140-9468-a7eaaa3e5e75\") " pod="openstack/bootstrap-openstack-openstack-cell1-pgztr" Oct 10 18:30:25 crc kubenswrapper[4799]: I1010 18:30:25.771380 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/797acbff-1308-4140-9468-a7eaaa3e5e75-ceph\") pod \"bootstrap-openstack-openstack-cell1-pgztr\" (UID: \"797acbff-1308-4140-9468-a7eaaa3e5e75\") " pod="openstack/bootstrap-openstack-openstack-cell1-pgztr" Oct 10 18:30:25 crc kubenswrapper[4799]: I1010 18:30:25.778170 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/797acbff-1308-4140-9468-a7eaaa3e5e75-ssh-key\") pod \"bootstrap-openstack-openstack-cell1-pgztr\" (UID: \"797acbff-1308-4140-9468-a7eaaa3e5e75\") " pod="openstack/bootstrap-openstack-openstack-cell1-pgztr" Oct 10 18:30:25 crc kubenswrapper[4799]: I1010 18:30:25.778638 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w9kc8\" (UniqueName: \"kubernetes.io/projected/797acbff-1308-4140-9468-a7eaaa3e5e75-kube-api-access-w9kc8\") pod \"bootstrap-openstack-openstack-cell1-pgztr\" (UID: \"797acbff-1308-4140-9468-a7eaaa3e5e75\") " pod="openstack/bootstrap-openstack-openstack-cell1-pgztr" Oct 10 18:30:25 crc kubenswrapper[4799]: I1010 18:30:25.873206 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-zmmx6" Oct 10 18:30:25 crc kubenswrapper[4799]: I1010 18:30:25.931467 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-openstack-openstack-cell1-pgztr" Oct 10 18:30:25 crc kubenswrapper[4799]: I1010 18:30:25.967931 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a07faa7c-7af7-4751-82c5-9c31acda4ec0-utilities\") pod \"a07faa7c-7af7-4751-82c5-9c31acda4ec0\" (UID: \"a07faa7c-7af7-4751-82c5-9c31acda4ec0\") " Oct 10 18:30:25 crc kubenswrapper[4799]: I1010 18:30:25.967993 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a07faa7c-7af7-4751-82c5-9c31acda4ec0-catalog-content\") pod \"a07faa7c-7af7-4751-82c5-9c31acda4ec0\" (UID: \"a07faa7c-7af7-4751-82c5-9c31acda4ec0\") " Oct 10 18:30:25 crc kubenswrapper[4799]: I1010 18:30:25.968231 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vz99d\" (UniqueName: \"kubernetes.io/projected/a07faa7c-7af7-4751-82c5-9c31acda4ec0-kube-api-access-vz99d\") pod \"a07faa7c-7af7-4751-82c5-9c31acda4ec0\" (UID: \"a07faa7c-7af7-4751-82c5-9c31acda4ec0\") " Oct 10 18:30:25 crc kubenswrapper[4799]: I1010 18:30:25.969934 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a07faa7c-7af7-4751-82c5-9c31acda4ec0-utilities" (OuterVolumeSpecName: "utilities") pod "a07faa7c-7af7-4751-82c5-9c31acda4ec0" (UID: "a07faa7c-7af7-4751-82c5-9c31acda4ec0"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 10 18:30:25 crc kubenswrapper[4799]: I1010 18:30:25.974054 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a07faa7c-7af7-4751-82c5-9c31acda4ec0-kube-api-access-vz99d" (OuterVolumeSpecName: "kube-api-access-vz99d") pod "a07faa7c-7af7-4751-82c5-9c31acda4ec0" (UID: "a07faa7c-7af7-4751-82c5-9c31acda4ec0"). InnerVolumeSpecName "kube-api-access-vz99d". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 18:30:26 crc kubenswrapper[4799]: I1010 18:30:26.027839 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a07faa7c-7af7-4751-82c5-9c31acda4ec0-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "a07faa7c-7af7-4751-82c5-9c31acda4ec0" (UID: "a07faa7c-7af7-4751-82c5-9c31acda4ec0"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 10 18:30:26 crc kubenswrapper[4799]: I1010 18:30:26.072172 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vz99d\" (UniqueName: \"kubernetes.io/projected/a07faa7c-7af7-4751-82c5-9c31acda4ec0-kube-api-access-vz99d\") on node \"crc\" DevicePath \"\"" Oct 10 18:30:26 crc kubenswrapper[4799]: I1010 18:30:26.072214 4799 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a07faa7c-7af7-4751-82c5-9c31acda4ec0-utilities\") on node \"crc\" DevicePath \"\"" Oct 10 18:30:26 crc kubenswrapper[4799]: I1010 18:30:26.072226 4799 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a07faa7c-7af7-4751-82c5-9c31acda4ec0-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 10 18:30:26 crc kubenswrapper[4799]: I1010 18:30:26.343541 4799 generic.go:334] "Generic (PLEG): container finished" podID="a07faa7c-7af7-4751-82c5-9c31acda4ec0" containerID="0e65a5555a59c65de3c54257c3bdfc0087505a36a39b2ac4d73f660ada412492" exitCode=0 Oct 10 18:30:26 crc kubenswrapper[4799]: I1010 18:30:26.343584 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-zmmx6" event={"ID":"a07faa7c-7af7-4751-82c5-9c31acda4ec0","Type":"ContainerDied","Data":"0e65a5555a59c65de3c54257c3bdfc0087505a36a39b2ac4d73f660ada412492"} Oct 10 18:30:26 crc kubenswrapper[4799]: I1010 18:30:26.343657 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-zmmx6" event={"ID":"a07faa7c-7af7-4751-82c5-9c31acda4ec0","Type":"ContainerDied","Data":"c9f47c0e03c05e9f04a319533a8f0207619be4e549d0ca8ff2809e8ed888990c"} Oct 10 18:30:26 crc kubenswrapper[4799]: I1010 18:30:26.343676 4799 scope.go:117] "RemoveContainer" containerID="0e65a5555a59c65de3c54257c3bdfc0087505a36a39b2ac4d73f660ada412492" Oct 10 18:30:26 crc kubenswrapper[4799]: I1010 18:30:26.343688 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-zmmx6" Oct 10 18:30:26 crc kubenswrapper[4799]: I1010 18:30:26.373124 4799 scope.go:117] "RemoveContainer" containerID="2259ca275a6430ead49a936e1592e05d5e1603125cc4a5c2fb28ccb6c787d6d5" Oct 10 18:30:26 crc kubenswrapper[4799]: I1010 18:30:26.397352 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-zmmx6"] Oct 10 18:30:26 crc kubenswrapper[4799]: I1010 18:30:26.404979 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-zmmx6"] Oct 10 18:30:26 crc kubenswrapper[4799]: I1010 18:30:26.422862 4799 scope.go:117] "RemoveContainer" containerID="285e78d14cbae9b90ccc895fc76be9821372c5a7e9b71f19373374a477301c89" Oct 10 18:30:26 crc kubenswrapper[4799]: I1010 18:30:26.451179 4799 scope.go:117] "RemoveContainer" containerID="0e65a5555a59c65de3c54257c3bdfc0087505a36a39b2ac4d73f660ada412492" Oct 10 18:30:26 crc kubenswrapper[4799]: E1010 18:30:26.451741 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0e65a5555a59c65de3c54257c3bdfc0087505a36a39b2ac4d73f660ada412492\": container with ID starting with 0e65a5555a59c65de3c54257c3bdfc0087505a36a39b2ac4d73f660ada412492 not found: ID does not exist" containerID="0e65a5555a59c65de3c54257c3bdfc0087505a36a39b2ac4d73f660ada412492" Oct 10 18:30:26 crc kubenswrapper[4799]: I1010 18:30:26.451795 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0e65a5555a59c65de3c54257c3bdfc0087505a36a39b2ac4d73f660ada412492"} err="failed to get container status \"0e65a5555a59c65de3c54257c3bdfc0087505a36a39b2ac4d73f660ada412492\": rpc error: code = NotFound desc = could not find container \"0e65a5555a59c65de3c54257c3bdfc0087505a36a39b2ac4d73f660ada412492\": container with ID starting with 0e65a5555a59c65de3c54257c3bdfc0087505a36a39b2ac4d73f660ada412492 not found: ID does not exist" Oct 10 18:30:26 crc kubenswrapper[4799]: I1010 18:30:26.451822 4799 scope.go:117] "RemoveContainer" containerID="2259ca275a6430ead49a936e1592e05d5e1603125cc4a5c2fb28ccb6c787d6d5" Oct 10 18:30:26 crc kubenswrapper[4799]: E1010 18:30:26.452269 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2259ca275a6430ead49a936e1592e05d5e1603125cc4a5c2fb28ccb6c787d6d5\": container with ID starting with 2259ca275a6430ead49a936e1592e05d5e1603125cc4a5c2fb28ccb6c787d6d5 not found: ID does not exist" containerID="2259ca275a6430ead49a936e1592e05d5e1603125cc4a5c2fb28ccb6c787d6d5" Oct 10 18:30:26 crc kubenswrapper[4799]: I1010 18:30:26.452344 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2259ca275a6430ead49a936e1592e05d5e1603125cc4a5c2fb28ccb6c787d6d5"} err="failed to get container status \"2259ca275a6430ead49a936e1592e05d5e1603125cc4a5c2fb28ccb6c787d6d5\": rpc error: code = NotFound desc = could not find container \"2259ca275a6430ead49a936e1592e05d5e1603125cc4a5c2fb28ccb6c787d6d5\": container with ID starting with 2259ca275a6430ead49a936e1592e05d5e1603125cc4a5c2fb28ccb6c787d6d5 not found: ID does not exist" Oct 10 18:30:26 crc kubenswrapper[4799]: I1010 18:30:26.452388 4799 scope.go:117] "RemoveContainer" containerID="285e78d14cbae9b90ccc895fc76be9821372c5a7e9b71f19373374a477301c89" Oct 10 18:30:26 crc kubenswrapper[4799]: E1010 18:30:26.452770 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"285e78d14cbae9b90ccc895fc76be9821372c5a7e9b71f19373374a477301c89\": container with ID starting with 285e78d14cbae9b90ccc895fc76be9821372c5a7e9b71f19373374a477301c89 not found: ID does not exist" containerID="285e78d14cbae9b90ccc895fc76be9821372c5a7e9b71f19373374a477301c89" Oct 10 18:30:26 crc kubenswrapper[4799]: I1010 18:30:26.452799 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"285e78d14cbae9b90ccc895fc76be9821372c5a7e9b71f19373374a477301c89"} err="failed to get container status \"285e78d14cbae9b90ccc895fc76be9821372c5a7e9b71f19373374a477301c89\": rpc error: code = NotFound desc = could not find container \"285e78d14cbae9b90ccc895fc76be9821372c5a7e9b71f19373374a477301c89\": container with ID starting with 285e78d14cbae9b90ccc895fc76be9821372c5a7e9b71f19373374a477301c89 not found: ID does not exist" Oct 10 18:30:26 crc kubenswrapper[4799]: I1010 18:30:26.528555 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/bootstrap-openstack-openstack-cell1-pgztr"] Oct 10 18:30:27 crc kubenswrapper[4799]: I1010 18:30:27.359528 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-openstack-openstack-cell1-pgztr" event={"ID":"797acbff-1308-4140-9468-a7eaaa3e5e75","Type":"ContainerStarted","Data":"ed1ae20ac64d3565e176b702181adc59dc2f6170a13e5e00cd6dcb63162aec50"} Oct 10 18:30:27 crc kubenswrapper[4799]: I1010 18:30:27.418544 4799 scope.go:117] "RemoveContainer" containerID="e1753dd33d9c2573eba3d4245d76828dad7bb15c7538442d84b9b903c94df080" Oct 10 18:30:27 crc kubenswrapper[4799]: E1010 18:30:27.418895 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 18:30:27 crc kubenswrapper[4799]: I1010 18:30:27.431634 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a07faa7c-7af7-4751-82c5-9c31acda4ec0" path="/var/lib/kubelet/pods/a07faa7c-7af7-4751-82c5-9c31acda4ec0/volumes" Oct 10 18:30:28 crc kubenswrapper[4799]: I1010 18:30:28.379416 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-openstack-openstack-cell1-pgztr" event={"ID":"797acbff-1308-4140-9468-a7eaaa3e5e75","Type":"ContainerStarted","Data":"1063add3619ab6d30c0e703e232d076098d6a91ce66fc3c39be2c679d0177d28"} Oct 10 18:30:28 crc kubenswrapper[4799]: I1010 18:30:28.409434 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/bootstrap-openstack-openstack-cell1-pgztr" podStartSLOduration=2.737756067 podStartE2EDuration="3.40939635s" podCreationTimestamp="2025-10-10 18:30:25 +0000 UTC" firstStartedPulling="2025-10-10 18:30:26.526557242 +0000 UTC m=+7120.034881387" lastFinishedPulling="2025-10-10 18:30:27.198197545 +0000 UTC m=+7120.706521670" observedRunningTime="2025-10-10 18:30:28.402340547 +0000 UTC m=+7121.910664702" watchObservedRunningTime="2025-10-10 18:30:28.40939635 +0000 UTC m=+7121.917720495" Oct 10 18:30:29 crc kubenswrapper[4799]: I1010 18:30:29.396313 4799 generic.go:334] "Generic (PLEG): container finished" podID="4a2edbc5-e998-4b26-a3f7-4f361b3b6e5c" containerID="92e45468b7cdbbb88459c0a0ea7e5c8855a33feafd2f5f7e2890996f7aaedf52" exitCode=0 Oct 10 18:30:29 crc kubenswrapper[4799]: I1010 18:30:29.396432 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-9m54q" event={"ID":"4a2edbc5-e998-4b26-a3f7-4f361b3b6e5c","Type":"ContainerDied","Data":"92e45468b7cdbbb88459c0a0ea7e5c8855a33feafd2f5f7e2890996f7aaedf52"} Oct 10 18:30:30 crc kubenswrapper[4799]: I1010 18:30:30.414477 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-9m54q" event={"ID":"4a2edbc5-e998-4b26-a3f7-4f361b3b6e5c","Type":"ContainerStarted","Data":"b7757a696388c12230c552a6b18dae224ee707e25098cdefca460a5f42adcb48"} Oct 10 18:30:30 crc kubenswrapper[4799]: I1010 18:30:30.449750 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-9m54q" podStartSLOduration=2.852470315 podStartE2EDuration="9.449724242s" podCreationTimestamp="2025-10-10 18:30:21 +0000 UTC" firstStartedPulling="2025-10-10 18:30:23.306210389 +0000 UTC m=+7116.814534544" lastFinishedPulling="2025-10-10 18:30:29.903464316 +0000 UTC m=+7123.411788471" observedRunningTime="2025-10-10 18:30:30.434732465 +0000 UTC m=+7123.943056610" watchObservedRunningTime="2025-10-10 18:30:30.449724242 +0000 UTC m=+7123.958048387" Oct 10 18:30:31 crc kubenswrapper[4799]: I1010 18:30:31.878044 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-9m54q" Oct 10 18:30:31 crc kubenswrapper[4799]: I1010 18:30:31.878483 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-9m54q" Oct 10 18:30:32 crc kubenswrapper[4799]: I1010 18:30:32.947116 4799 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-9m54q" podUID="4a2edbc5-e998-4b26-a3f7-4f361b3b6e5c" containerName="registry-server" probeResult="failure" output=< Oct 10 18:30:32 crc kubenswrapper[4799]: timeout: failed to connect service ":50051" within 1s Oct 10 18:30:32 crc kubenswrapper[4799]: > Oct 10 18:30:38 crc kubenswrapper[4799]: I1010 18:30:38.402784 4799 scope.go:117] "RemoveContainer" containerID="e1753dd33d9c2573eba3d4245d76828dad7bb15c7538442d84b9b903c94df080" Oct 10 18:30:38 crc kubenswrapper[4799]: E1010 18:30:38.403953 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 18:30:41 crc kubenswrapper[4799]: I1010 18:30:41.930044 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-9m54q" Oct 10 18:30:42 crc kubenswrapper[4799]: I1010 18:30:42.000905 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-9m54q" Oct 10 18:30:42 crc kubenswrapper[4799]: I1010 18:30:42.430751 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-9m54q"] Oct 10 18:30:43 crc kubenswrapper[4799]: I1010 18:30:43.597648 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-9m54q" podUID="4a2edbc5-e998-4b26-a3f7-4f361b3b6e5c" containerName="registry-server" containerID="cri-o://b7757a696388c12230c552a6b18dae224ee707e25098cdefca460a5f42adcb48" gracePeriod=2 Oct 10 18:30:44 crc kubenswrapper[4799]: I1010 18:30:44.315162 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-9m54q" Oct 10 18:30:44 crc kubenswrapper[4799]: I1010 18:30:44.346977 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4a2edbc5-e998-4b26-a3f7-4f361b3b6e5c-utilities\") pod \"4a2edbc5-e998-4b26-a3f7-4f361b3b6e5c\" (UID: \"4a2edbc5-e998-4b26-a3f7-4f361b3b6e5c\") " Oct 10 18:30:44 crc kubenswrapper[4799]: I1010 18:30:44.348477 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4a2edbc5-e998-4b26-a3f7-4f361b3b6e5c-utilities" (OuterVolumeSpecName: "utilities") pod "4a2edbc5-e998-4b26-a3f7-4f361b3b6e5c" (UID: "4a2edbc5-e998-4b26-a3f7-4f361b3b6e5c"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 10 18:30:44 crc kubenswrapper[4799]: I1010 18:30:44.448624 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4a2edbc5-e998-4b26-a3f7-4f361b3b6e5c-catalog-content\") pod \"4a2edbc5-e998-4b26-a3f7-4f361b3b6e5c\" (UID: \"4a2edbc5-e998-4b26-a3f7-4f361b3b6e5c\") " Oct 10 18:30:44 crc kubenswrapper[4799]: I1010 18:30:44.448934 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qkthc\" (UniqueName: \"kubernetes.io/projected/4a2edbc5-e998-4b26-a3f7-4f361b3b6e5c-kube-api-access-qkthc\") pod \"4a2edbc5-e998-4b26-a3f7-4f361b3b6e5c\" (UID: \"4a2edbc5-e998-4b26-a3f7-4f361b3b6e5c\") " Oct 10 18:30:44 crc kubenswrapper[4799]: I1010 18:30:44.449772 4799 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4a2edbc5-e998-4b26-a3f7-4f361b3b6e5c-utilities\") on node \"crc\" DevicePath \"\"" Oct 10 18:30:44 crc kubenswrapper[4799]: I1010 18:30:44.457119 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4a2edbc5-e998-4b26-a3f7-4f361b3b6e5c-kube-api-access-qkthc" (OuterVolumeSpecName: "kube-api-access-qkthc") pod "4a2edbc5-e998-4b26-a3f7-4f361b3b6e5c" (UID: "4a2edbc5-e998-4b26-a3f7-4f361b3b6e5c"). InnerVolumeSpecName "kube-api-access-qkthc". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 18:30:44 crc kubenswrapper[4799]: I1010 18:30:44.528365 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4a2edbc5-e998-4b26-a3f7-4f361b3b6e5c-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "4a2edbc5-e998-4b26-a3f7-4f361b3b6e5c" (UID: "4a2edbc5-e998-4b26-a3f7-4f361b3b6e5c"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 10 18:30:44 crc kubenswrapper[4799]: I1010 18:30:44.551066 4799 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4a2edbc5-e998-4b26-a3f7-4f361b3b6e5c-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 10 18:30:44 crc kubenswrapper[4799]: I1010 18:30:44.551094 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qkthc\" (UniqueName: \"kubernetes.io/projected/4a2edbc5-e998-4b26-a3f7-4f361b3b6e5c-kube-api-access-qkthc\") on node \"crc\" DevicePath \"\"" Oct 10 18:30:44 crc kubenswrapper[4799]: I1010 18:30:44.614363 4799 generic.go:334] "Generic (PLEG): container finished" podID="4a2edbc5-e998-4b26-a3f7-4f361b3b6e5c" containerID="b7757a696388c12230c552a6b18dae224ee707e25098cdefca460a5f42adcb48" exitCode=0 Oct 10 18:30:44 crc kubenswrapper[4799]: I1010 18:30:44.614423 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-9m54q" event={"ID":"4a2edbc5-e998-4b26-a3f7-4f361b3b6e5c","Type":"ContainerDied","Data":"b7757a696388c12230c552a6b18dae224ee707e25098cdefca460a5f42adcb48"} Oct 10 18:30:44 crc kubenswrapper[4799]: I1010 18:30:44.614474 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-9m54q" event={"ID":"4a2edbc5-e998-4b26-a3f7-4f361b3b6e5c","Type":"ContainerDied","Data":"431a9bdc7aeac9e2e9c1b47dd99a03298e184c63d3c18f6a5765c90819e28958"} Oct 10 18:30:44 crc kubenswrapper[4799]: I1010 18:30:44.614483 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-9m54q" Oct 10 18:30:44 crc kubenswrapper[4799]: I1010 18:30:44.614493 4799 scope.go:117] "RemoveContainer" containerID="b7757a696388c12230c552a6b18dae224ee707e25098cdefca460a5f42adcb48" Oct 10 18:30:44 crc kubenswrapper[4799]: I1010 18:30:44.661828 4799 scope.go:117] "RemoveContainer" containerID="92e45468b7cdbbb88459c0a0ea7e5c8855a33feafd2f5f7e2890996f7aaedf52" Oct 10 18:30:44 crc kubenswrapper[4799]: I1010 18:30:44.671806 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-9m54q"] Oct 10 18:30:44 crc kubenswrapper[4799]: I1010 18:30:44.681911 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-9m54q"] Oct 10 18:30:44 crc kubenswrapper[4799]: I1010 18:30:44.705452 4799 scope.go:117] "RemoveContainer" containerID="ff509e7195fd70a906ae5423101cf527658bbf4c1a908ef3ed7e56b6cfc24901" Oct 10 18:30:44 crc kubenswrapper[4799]: I1010 18:30:44.756288 4799 scope.go:117] "RemoveContainer" containerID="b7757a696388c12230c552a6b18dae224ee707e25098cdefca460a5f42adcb48" Oct 10 18:30:44 crc kubenswrapper[4799]: E1010 18:30:44.756975 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b7757a696388c12230c552a6b18dae224ee707e25098cdefca460a5f42adcb48\": container with ID starting with b7757a696388c12230c552a6b18dae224ee707e25098cdefca460a5f42adcb48 not found: ID does not exist" containerID="b7757a696388c12230c552a6b18dae224ee707e25098cdefca460a5f42adcb48" Oct 10 18:30:44 crc kubenswrapper[4799]: I1010 18:30:44.757059 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b7757a696388c12230c552a6b18dae224ee707e25098cdefca460a5f42adcb48"} err="failed to get container status \"b7757a696388c12230c552a6b18dae224ee707e25098cdefca460a5f42adcb48\": rpc error: code = NotFound desc = could not find container \"b7757a696388c12230c552a6b18dae224ee707e25098cdefca460a5f42adcb48\": container with ID starting with b7757a696388c12230c552a6b18dae224ee707e25098cdefca460a5f42adcb48 not found: ID does not exist" Oct 10 18:30:44 crc kubenswrapper[4799]: I1010 18:30:44.757094 4799 scope.go:117] "RemoveContainer" containerID="92e45468b7cdbbb88459c0a0ea7e5c8855a33feafd2f5f7e2890996f7aaedf52" Oct 10 18:30:44 crc kubenswrapper[4799]: E1010 18:30:44.758881 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"92e45468b7cdbbb88459c0a0ea7e5c8855a33feafd2f5f7e2890996f7aaedf52\": container with ID starting with 92e45468b7cdbbb88459c0a0ea7e5c8855a33feafd2f5f7e2890996f7aaedf52 not found: ID does not exist" containerID="92e45468b7cdbbb88459c0a0ea7e5c8855a33feafd2f5f7e2890996f7aaedf52" Oct 10 18:30:44 crc kubenswrapper[4799]: I1010 18:30:44.758929 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"92e45468b7cdbbb88459c0a0ea7e5c8855a33feafd2f5f7e2890996f7aaedf52"} err="failed to get container status \"92e45468b7cdbbb88459c0a0ea7e5c8855a33feafd2f5f7e2890996f7aaedf52\": rpc error: code = NotFound desc = could not find container \"92e45468b7cdbbb88459c0a0ea7e5c8855a33feafd2f5f7e2890996f7aaedf52\": container with ID starting with 92e45468b7cdbbb88459c0a0ea7e5c8855a33feafd2f5f7e2890996f7aaedf52 not found: ID does not exist" Oct 10 18:30:44 crc kubenswrapper[4799]: I1010 18:30:44.758962 4799 scope.go:117] "RemoveContainer" containerID="ff509e7195fd70a906ae5423101cf527658bbf4c1a908ef3ed7e56b6cfc24901" Oct 10 18:30:44 crc kubenswrapper[4799]: E1010 18:30:44.759299 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ff509e7195fd70a906ae5423101cf527658bbf4c1a908ef3ed7e56b6cfc24901\": container with ID starting with ff509e7195fd70a906ae5423101cf527658bbf4c1a908ef3ed7e56b6cfc24901 not found: ID does not exist" containerID="ff509e7195fd70a906ae5423101cf527658bbf4c1a908ef3ed7e56b6cfc24901" Oct 10 18:30:44 crc kubenswrapper[4799]: I1010 18:30:44.759344 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ff509e7195fd70a906ae5423101cf527658bbf4c1a908ef3ed7e56b6cfc24901"} err="failed to get container status \"ff509e7195fd70a906ae5423101cf527658bbf4c1a908ef3ed7e56b6cfc24901\": rpc error: code = NotFound desc = could not find container \"ff509e7195fd70a906ae5423101cf527658bbf4c1a908ef3ed7e56b6cfc24901\": container with ID starting with ff509e7195fd70a906ae5423101cf527658bbf4c1a908ef3ed7e56b6cfc24901 not found: ID does not exist" Oct 10 18:30:45 crc kubenswrapper[4799]: I1010 18:30:45.426047 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4a2edbc5-e998-4b26-a3f7-4f361b3b6e5c" path="/var/lib/kubelet/pods/4a2edbc5-e998-4b26-a3f7-4f361b3b6e5c/volumes" Oct 10 18:30:51 crc kubenswrapper[4799]: I1010 18:30:51.404210 4799 scope.go:117] "RemoveContainer" containerID="e1753dd33d9c2573eba3d4245d76828dad7bb15c7538442d84b9b903c94df080" Oct 10 18:30:51 crc kubenswrapper[4799]: E1010 18:30:51.405255 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 18:31:06 crc kubenswrapper[4799]: I1010 18:31:06.409743 4799 scope.go:117] "RemoveContainer" containerID="e1753dd33d9c2573eba3d4245d76828dad7bb15c7538442d84b9b903c94df080" Oct 10 18:31:06 crc kubenswrapper[4799]: E1010 18:31:06.413046 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 18:31:17 crc kubenswrapper[4799]: I1010 18:31:17.411682 4799 scope.go:117] "RemoveContainer" containerID="e1753dd33d9c2573eba3d4245d76828dad7bb15c7538442d84b9b903c94df080" Oct 10 18:31:17 crc kubenswrapper[4799]: E1010 18:31:17.412798 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 18:31:29 crc kubenswrapper[4799]: I1010 18:31:29.403703 4799 scope.go:117] "RemoveContainer" containerID="e1753dd33d9c2573eba3d4245d76828dad7bb15c7538442d84b9b903c94df080" Oct 10 18:31:29 crc kubenswrapper[4799]: E1010 18:31:29.405025 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 18:31:32 crc kubenswrapper[4799]: I1010 18:31:32.029437 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-ckrqh"] Oct 10 18:31:32 crc kubenswrapper[4799]: E1010 18:31:32.030716 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4a2edbc5-e998-4b26-a3f7-4f361b3b6e5c" containerName="extract-content" Oct 10 18:31:32 crc kubenswrapper[4799]: I1010 18:31:32.030738 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="4a2edbc5-e998-4b26-a3f7-4f361b3b6e5c" containerName="extract-content" Oct 10 18:31:32 crc kubenswrapper[4799]: E1010 18:31:32.030788 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a07faa7c-7af7-4751-82c5-9c31acda4ec0" containerName="registry-server" Oct 10 18:31:32 crc kubenswrapper[4799]: I1010 18:31:32.030803 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="a07faa7c-7af7-4751-82c5-9c31acda4ec0" containerName="registry-server" Oct 10 18:31:32 crc kubenswrapper[4799]: E1010 18:31:32.030826 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a07faa7c-7af7-4751-82c5-9c31acda4ec0" containerName="extract-utilities" Oct 10 18:31:32 crc kubenswrapper[4799]: I1010 18:31:32.030840 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="a07faa7c-7af7-4751-82c5-9c31acda4ec0" containerName="extract-utilities" Oct 10 18:31:32 crc kubenswrapper[4799]: E1010 18:31:32.030869 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4a2edbc5-e998-4b26-a3f7-4f361b3b6e5c" containerName="registry-server" Oct 10 18:31:32 crc kubenswrapper[4799]: I1010 18:31:32.030881 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="4a2edbc5-e998-4b26-a3f7-4f361b3b6e5c" containerName="registry-server" Oct 10 18:31:32 crc kubenswrapper[4799]: E1010 18:31:32.030911 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a07faa7c-7af7-4751-82c5-9c31acda4ec0" containerName="extract-content" Oct 10 18:31:32 crc kubenswrapper[4799]: I1010 18:31:32.030923 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="a07faa7c-7af7-4751-82c5-9c31acda4ec0" containerName="extract-content" Oct 10 18:31:32 crc kubenswrapper[4799]: E1010 18:31:32.030971 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4a2edbc5-e998-4b26-a3f7-4f361b3b6e5c" containerName="extract-utilities" Oct 10 18:31:32 crc kubenswrapper[4799]: I1010 18:31:32.030983 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="4a2edbc5-e998-4b26-a3f7-4f361b3b6e5c" containerName="extract-utilities" Oct 10 18:31:32 crc kubenswrapper[4799]: I1010 18:31:32.031545 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="a07faa7c-7af7-4751-82c5-9c31acda4ec0" containerName="registry-server" Oct 10 18:31:32 crc kubenswrapper[4799]: I1010 18:31:32.031586 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="4a2edbc5-e998-4b26-a3f7-4f361b3b6e5c" containerName="registry-server" Oct 10 18:31:32 crc kubenswrapper[4799]: I1010 18:31:32.034358 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-ckrqh" Oct 10 18:31:32 crc kubenswrapper[4799]: I1010 18:31:32.058016 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-ckrqh"] Oct 10 18:31:32 crc kubenswrapper[4799]: I1010 18:31:32.145310 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e3849df2-bc4e-401f-8645-268aca3327e7-utilities\") pod \"certified-operators-ckrqh\" (UID: \"e3849df2-bc4e-401f-8645-268aca3327e7\") " pod="openshift-marketplace/certified-operators-ckrqh" Oct 10 18:31:32 crc kubenswrapper[4799]: I1010 18:31:32.145615 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e3849df2-bc4e-401f-8645-268aca3327e7-catalog-content\") pod \"certified-operators-ckrqh\" (UID: \"e3849df2-bc4e-401f-8645-268aca3327e7\") " pod="openshift-marketplace/certified-operators-ckrqh" Oct 10 18:31:32 crc kubenswrapper[4799]: I1010 18:31:32.145822 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k47cl\" (UniqueName: \"kubernetes.io/projected/e3849df2-bc4e-401f-8645-268aca3327e7-kube-api-access-k47cl\") pod \"certified-operators-ckrqh\" (UID: \"e3849df2-bc4e-401f-8645-268aca3327e7\") " pod="openshift-marketplace/certified-operators-ckrqh" Oct 10 18:31:32 crc kubenswrapper[4799]: I1010 18:31:32.247996 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e3849df2-bc4e-401f-8645-268aca3327e7-utilities\") pod \"certified-operators-ckrqh\" (UID: \"e3849df2-bc4e-401f-8645-268aca3327e7\") " pod="openshift-marketplace/certified-operators-ckrqh" Oct 10 18:31:32 crc kubenswrapper[4799]: I1010 18:31:32.248159 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e3849df2-bc4e-401f-8645-268aca3327e7-catalog-content\") pod \"certified-operators-ckrqh\" (UID: \"e3849df2-bc4e-401f-8645-268aca3327e7\") " pod="openshift-marketplace/certified-operators-ckrqh" Oct 10 18:31:32 crc kubenswrapper[4799]: I1010 18:31:32.248270 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k47cl\" (UniqueName: \"kubernetes.io/projected/e3849df2-bc4e-401f-8645-268aca3327e7-kube-api-access-k47cl\") pod \"certified-operators-ckrqh\" (UID: \"e3849df2-bc4e-401f-8645-268aca3327e7\") " pod="openshift-marketplace/certified-operators-ckrqh" Oct 10 18:31:32 crc kubenswrapper[4799]: I1010 18:31:32.248538 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e3849df2-bc4e-401f-8645-268aca3327e7-utilities\") pod \"certified-operators-ckrqh\" (UID: \"e3849df2-bc4e-401f-8645-268aca3327e7\") " pod="openshift-marketplace/certified-operators-ckrqh" Oct 10 18:31:32 crc kubenswrapper[4799]: I1010 18:31:32.248684 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e3849df2-bc4e-401f-8645-268aca3327e7-catalog-content\") pod \"certified-operators-ckrqh\" (UID: \"e3849df2-bc4e-401f-8645-268aca3327e7\") " pod="openshift-marketplace/certified-operators-ckrqh" Oct 10 18:31:32 crc kubenswrapper[4799]: I1010 18:31:32.270562 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k47cl\" (UniqueName: \"kubernetes.io/projected/e3849df2-bc4e-401f-8645-268aca3327e7-kube-api-access-k47cl\") pod \"certified-operators-ckrqh\" (UID: \"e3849df2-bc4e-401f-8645-268aca3327e7\") " pod="openshift-marketplace/certified-operators-ckrqh" Oct 10 18:31:32 crc kubenswrapper[4799]: I1010 18:31:32.377739 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-ckrqh" Oct 10 18:31:32 crc kubenswrapper[4799]: I1010 18:31:32.918568 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-ckrqh"] Oct 10 18:31:33 crc kubenswrapper[4799]: I1010 18:31:33.291297 4799 generic.go:334] "Generic (PLEG): container finished" podID="e3849df2-bc4e-401f-8645-268aca3327e7" containerID="58bd64f583e644a183508735a6483c17dc43c1b6ee9788c7bb20ab1cc70e9d55" exitCode=0 Oct 10 18:31:33 crc kubenswrapper[4799]: I1010 18:31:33.291419 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-ckrqh" event={"ID":"e3849df2-bc4e-401f-8645-268aca3327e7","Type":"ContainerDied","Data":"58bd64f583e644a183508735a6483c17dc43c1b6ee9788c7bb20ab1cc70e9d55"} Oct 10 18:31:33 crc kubenswrapper[4799]: I1010 18:31:33.291901 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-ckrqh" event={"ID":"e3849df2-bc4e-401f-8645-268aca3327e7","Type":"ContainerStarted","Data":"2eb43951195c6627c0a394bfc470df267f86197c6596dcd9762c8b9f71b23eba"} Oct 10 18:31:35 crc kubenswrapper[4799]: I1010 18:31:35.318196 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-ckrqh" event={"ID":"e3849df2-bc4e-401f-8645-268aca3327e7","Type":"ContainerStarted","Data":"dc9e909e3b3690980743121f5c21a91651724118229f997b70bbb7a38dcc9b96"} Oct 10 18:31:36 crc kubenswrapper[4799]: I1010 18:31:36.334170 4799 generic.go:334] "Generic (PLEG): container finished" podID="e3849df2-bc4e-401f-8645-268aca3327e7" containerID="dc9e909e3b3690980743121f5c21a91651724118229f997b70bbb7a38dcc9b96" exitCode=0 Oct 10 18:31:36 crc kubenswrapper[4799]: I1010 18:31:36.334291 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-ckrqh" event={"ID":"e3849df2-bc4e-401f-8645-268aca3327e7","Type":"ContainerDied","Data":"dc9e909e3b3690980743121f5c21a91651724118229f997b70bbb7a38dcc9b96"} Oct 10 18:31:37 crc kubenswrapper[4799]: I1010 18:31:37.360932 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-ckrqh" event={"ID":"e3849df2-bc4e-401f-8645-268aca3327e7","Type":"ContainerStarted","Data":"dc5117f49bcc82bec863b1941ddc617c57046ea2cb446b9920d4ef3c3c5070b1"} Oct 10 18:31:37 crc kubenswrapper[4799]: I1010 18:31:37.380175 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-ckrqh" podStartSLOduration=2.840551122 podStartE2EDuration="6.380155767s" podCreationTimestamp="2025-10-10 18:31:31 +0000 UTC" firstStartedPulling="2025-10-10 18:31:33.293910247 +0000 UTC m=+7186.802234402" lastFinishedPulling="2025-10-10 18:31:36.833514902 +0000 UTC m=+7190.341839047" observedRunningTime="2025-10-10 18:31:37.377496062 +0000 UTC m=+7190.885820187" watchObservedRunningTime="2025-10-10 18:31:37.380155767 +0000 UTC m=+7190.888479892" Oct 10 18:31:42 crc kubenswrapper[4799]: I1010 18:31:42.378640 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-ckrqh" Oct 10 18:31:42 crc kubenswrapper[4799]: I1010 18:31:42.379546 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-ckrqh" Oct 10 18:31:42 crc kubenswrapper[4799]: I1010 18:31:42.467431 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-ckrqh" Oct 10 18:31:42 crc kubenswrapper[4799]: I1010 18:31:42.543523 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-ckrqh" Oct 10 18:31:42 crc kubenswrapper[4799]: I1010 18:31:42.710095 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-ckrqh"] Oct 10 18:31:43 crc kubenswrapper[4799]: I1010 18:31:43.402541 4799 scope.go:117] "RemoveContainer" containerID="e1753dd33d9c2573eba3d4245d76828dad7bb15c7538442d84b9b903c94df080" Oct 10 18:31:43 crc kubenswrapper[4799]: E1010 18:31:43.402865 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 18:31:44 crc kubenswrapper[4799]: I1010 18:31:44.441037 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-ckrqh" podUID="e3849df2-bc4e-401f-8645-268aca3327e7" containerName="registry-server" containerID="cri-o://dc5117f49bcc82bec863b1941ddc617c57046ea2cb446b9920d4ef3c3c5070b1" gracePeriod=2 Oct 10 18:31:44 crc kubenswrapper[4799]: I1010 18:31:44.930610 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-ckrqh" Oct 10 18:31:45 crc kubenswrapper[4799]: I1010 18:31:45.064673 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-k47cl\" (UniqueName: \"kubernetes.io/projected/e3849df2-bc4e-401f-8645-268aca3327e7-kube-api-access-k47cl\") pod \"e3849df2-bc4e-401f-8645-268aca3327e7\" (UID: \"e3849df2-bc4e-401f-8645-268aca3327e7\") " Oct 10 18:31:45 crc kubenswrapper[4799]: I1010 18:31:45.064774 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e3849df2-bc4e-401f-8645-268aca3327e7-catalog-content\") pod \"e3849df2-bc4e-401f-8645-268aca3327e7\" (UID: \"e3849df2-bc4e-401f-8645-268aca3327e7\") " Oct 10 18:31:45 crc kubenswrapper[4799]: I1010 18:31:45.065183 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e3849df2-bc4e-401f-8645-268aca3327e7-utilities\") pod \"e3849df2-bc4e-401f-8645-268aca3327e7\" (UID: \"e3849df2-bc4e-401f-8645-268aca3327e7\") " Oct 10 18:31:45 crc kubenswrapper[4799]: I1010 18:31:45.066547 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e3849df2-bc4e-401f-8645-268aca3327e7-utilities" (OuterVolumeSpecName: "utilities") pod "e3849df2-bc4e-401f-8645-268aca3327e7" (UID: "e3849df2-bc4e-401f-8645-268aca3327e7"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 10 18:31:45 crc kubenswrapper[4799]: I1010 18:31:45.069454 4799 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e3849df2-bc4e-401f-8645-268aca3327e7-utilities\") on node \"crc\" DevicePath \"\"" Oct 10 18:31:45 crc kubenswrapper[4799]: I1010 18:31:45.073106 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e3849df2-bc4e-401f-8645-268aca3327e7-kube-api-access-k47cl" (OuterVolumeSpecName: "kube-api-access-k47cl") pod "e3849df2-bc4e-401f-8645-268aca3327e7" (UID: "e3849df2-bc4e-401f-8645-268aca3327e7"). InnerVolumeSpecName "kube-api-access-k47cl". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 18:31:45 crc kubenswrapper[4799]: I1010 18:31:45.173370 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-k47cl\" (UniqueName: \"kubernetes.io/projected/e3849df2-bc4e-401f-8645-268aca3327e7-kube-api-access-k47cl\") on node \"crc\" DevicePath \"\"" Oct 10 18:31:45 crc kubenswrapper[4799]: I1010 18:31:45.460626 4799 generic.go:334] "Generic (PLEG): container finished" podID="e3849df2-bc4e-401f-8645-268aca3327e7" containerID="dc5117f49bcc82bec863b1941ddc617c57046ea2cb446b9920d4ef3c3c5070b1" exitCode=0 Oct 10 18:31:45 crc kubenswrapper[4799]: I1010 18:31:45.460686 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-ckrqh" event={"ID":"e3849df2-bc4e-401f-8645-268aca3327e7","Type":"ContainerDied","Data":"dc5117f49bcc82bec863b1941ddc617c57046ea2cb446b9920d4ef3c3c5070b1"} Oct 10 18:31:45 crc kubenswrapper[4799]: I1010 18:31:45.460729 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-ckrqh" event={"ID":"e3849df2-bc4e-401f-8645-268aca3327e7","Type":"ContainerDied","Data":"2eb43951195c6627c0a394bfc470df267f86197c6596dcd9762c8b9f71b23eba"} Oct 10 18:31:45 crc kubenswrapper[4799]: I1010 18:31:45.460734 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-ckrqh" Oct 10 18:31:45 crc kubenswrapper[4799]: I1010 18:31:45.460751 4799 scope.go:117] "RemoveContainer" containerID="dc5117f49bcc82bec863b1941ddc617c57046ea2cb446b9920d4ef3c3c5070b1" Oct 10 18:31:45 crc kubenswrapper[4799]: I1010 18:31:45.503150 4799 scope.go:117] "RemoveContainer" containerID="dc9e909e3b3690980743121f5c21a91651724118229f997b70bbb7a38dcc9b96" Oct 10 18:31:45 crc kubenswrapper[4799]: I1010 18:31:45.545397 4799 scope.go:117] "RemoveContainer" containerID="58bd64f583e644a183508735a6483c17dc43c1b6ee9788c7bb20ab1cc70e9d55" Oct 10 18:31:45 crc kubenswrapper[4799]: I1010 18:31:45.624016 4799 scope.go:117] "RemoveContainer" containerID="dc5117f49bcc82bec863b1941ddc617c57046ea2cb446b9920d4ef3c3c5070b1" Oct 10 18:31:45 crc kubenswrapper[4799]: E1010 18:31:45.625035 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"dc5117f49bcc82bec863b1941ddc617c57046ea2cb446b9920d4ef3c3c5070b1\": container with ID starting with dc5117f49bcc82bec863b1941ddc617c57046ea2cb446b9920d4ef3c3c5070b1 not found: ID does not exist" containerID="dc5117f49bcc82bec863b1941ddc617c57046ea2cb446b9920d4ef3c3c5070b1" Oct 10 18:31:45 crc kubenswrapper[4799]: I1010 18:31:45.625082 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dc5117f49bcc82bec863b1941ddc617c57046ea2cb446b9920d4ef3c3c5070b1"} err="failed to get container status \"dc5117f49bcc82bec863b1941ddc617c57046ea2cb446b9920d4ef3c3c5070b1\": rpc error: code = NotFound desc = could not find container \"dc5117f49bcc82bec863b1941ddc617c57046ea2cb446b9920d4ef3c3c5070b1\": container with ID starting with dc5117f49bcc82bec863b1941ddc617c57046ea2cb446b9920d4ef3c3c5070b1 not found: ID does not exist" Oct 10 18:31:45 crc kubenswrapper[4799]: I1010 18:31:45.625108 4799 scope.go:117] "RemoveContainer" containerID="dc9e909e3b3690980743121f5c21a91651724118229f997b70bbb7a38dcc9b96" Oct 10 18:31:45 crc kubenswrapper[4799]: E1010 18:31:45.625879 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"dc9e909e3b3690980743121f5c21a91651724118229f997b70bbb7a38dcc9b96\": container with ID starting with dc9e909e3b3690980743121f5c21a91651724118229f997b70bbb7a38dcc9b96 not found: ID does not exist" containerID="dc9e909e3b3690980743121f5c21a91651724118229f997b70bbb7a38dcc9b96" Oct 10 18:31:45 crc kubenswrapper[4799]: I1010 18:31:45.625913 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dc9e909e3b3690980743121f5c21a91651724118229f997b70bbb7a38dcc9b96"} err="failed to get container status \"dc9e909e3b3690980743121f5c21a91651724118229f997b70bbb7a38dcc9b96\": rpc error: code = NotFound desc = could not find container \"dc9e909e3b3690980743121f5c21a91651724118229f997b70bbb7a38dcc9b96\": container with ID starting with dc9e909e3b3690980743121f5c21a91651724118229f997b70bbb7a38dcc9b96 not found: ID does not exist" Oct 10 18:31:45 crc kubenswrapper[4799]: I1010 18:31:45.625932 4799 scope.go:117] "RemoveContainer" containerID="58bd64f583e644a183508735a6483c17dc43c1b6ee9788c7bb20ab1cc70e9d55" Oct 10 18:31:45 crc kubenswrapper[4799]: E1010 18:31:45.626414 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"58bd64f583e644a183508735a6483c17dc43c1b6ee9788c7bb20ab1cc70e9d55\": container with ID starting with 58bd64f583e644a183508735a6483c17dc43c1b6ee9788c7bb20ab1cc70e9d55 not found: ID does not exist" containerID="58bd64f583e644a183508735a6483c17dc43c1b6ee9788c7bb20ab1cc70e9d55" Oct 10 18:31:45 crc kubenswrapper[4799]: I1010 18:31:45.626435 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"58bd64f583e644a183508735a6483c17dc43c1b6ee9788c7bb20ab1cc70e9d55"} err="failed to get container status \"58bd64f583e644a183508735a6483c17dc43c1b6ee9788c7bb20ab1cc70e9d55\": rpc error: code = NotFound desc = could not find container \"58bd64f583e644a183508735a6483c17dc43c1b6ee9788c7bb20ab1cc70e9d55\": container with ID starting with 58bd64f583e644a183508735a6483c17dc43c1b6ee9788c7bb20ab1cc70e9d55 not found: ID does not exist" Oct 10 18:31:45 crc kubenswrapper[4799]: I1010 18:31:45.730361 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e3849df2-bc4e-401f-8645-268aca3327e7-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "e3849df2-bc4e-401f-8645-268aca3327e7" (UID: "e3849df2-bc4e-401f-8645-268aca3327e7"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 10 18:31:45 crc kubenswrapper[4799]: I1010 18:31:45.788846 4799 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e3849df2-bc4e-401f-8645-268aca3327e7-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 10 18:31:45 crc kubenswrapper[4799]: I1010 18:31:45.807666 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-ckrqh"] Oct 10 18:31:45 crc kubenswrapper[4799]: I1010 18:31:45.818709 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-ckrqh"] Oct 10 18:31:47 crc kubenswrapper[4799]: I1010 18:31:47.423240 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e3849df2-bc4e-401f-8645-268aca3327e7" path="/var/lib/kubelet/pods/e3849df2-bc4e-401f-8645-268aca3327e7/volumes" Oct 10 18:31:56 crc kubenswrapper[4799]: I1010 18:31:56.404903 4799 scope.go:117] "RemoveContainer" containerID="e1753dd33d9c2573eba3d4245d76828dad7bb15c7538442d84b9b903c94df080" Oct 10 18:31:56 crc kubenswrapper[4799]: E1010 18:31:56.406397 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 18:32:03 crc kubenswrapper[4799]: I1010 18:32:03.855383 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-sh87m"] Oct 10 18:32:03 crc kubenswrapper[4799]: E1010 18:32:03.856852 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e3849df2-bc4e-401f-8645-268aca3327e7" containerName="registry-server" Oct 10 18:32:03 crc kubenswrapper[4799]: I1010 18:32:03.856876 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="e3849df2-bc4e-401f-8645-268aca3327e7" containerName="registry-server" Oct 10 18:32:03 crc kubenswrapper[4799]: E1010 18:32:03.856927 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e3849df2-bc4e-401f-8645-268aca3327e7" containerName="extract-content" Oct 10 18:32:03 crc kubenswrapper[4799]: I1010 18:32:03.856940 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="e3849df2-bc4e-401f-8645-268aca3327e7" containerName="extract-content" Oct 10 18:32:03 crc kubenswrapper[4799]: E1010 18:32:03.856963 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e3849df2-bc4e-401f-8645-268aca3327e7" containerName="extract-utilities" Oct 10 18:32:03 crc kubenswrapper[4799]: I1010 18:32:03.856977 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="e3849df2-bc4e-401f-8645-268aca3327e7" containerName="extract-utilities" Oct 10 18:32:03 crc kubenswrapper[4799]: I1010 18:32:03.857380 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="e3849df2-bc4e-401f-8645-268aca3327e7" containerName="registry-server" Oct 10 18:32:03 crc kubenswrapper[4799]: I1010 18:32:03.860120 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-sh87m" Oct 10 18:32:03 crc kubenswrapper[4799]: I1010 18:32:03.868791 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-sh87m"] Oct 10 18:32:03 crc kubenswrapper[4799]: I1010 18:32:03.934884 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-82986\" (UniqueName: \"kubernetes.io/projected/5d0371cb-d0e9-452e-90a5-b4f07f20c59d-kube-api-access-82986\") pod \"redhat-marketplace-sh87m\" (UID: \"5d0371cb-d0e9-452e-90a5-b4f07f20c59d\") " pod="openshift-marketplace/redhat-marketplace-sh87m" Oct 10 18:32:03 crc kubenswrapper[4799]: I1010 18:32:03.935265 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5d0371cb-d0e9-452e-90a5-b4f07f20c59d-catalog-content\") pod \"redhat-marketplace-sh87m\" (UID: \"5d0371cb-d0e9-452e-90a5-b4f07f20c59d\") " pod="openshift-marketplace/redhat-marketplace-sh87m" Oct 10 18:32:03 crc kubenswrapper[4799]: I1010 18:32:03.935455 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5d0371cb-d0e9-452e-90a5-b4f07f20c59d-utilities\") pod \"redhat-marketplace-sh87m\" (UID: \"5d0371cb-d0e9-452e-90a5-b4f07f20c59d\") " pod="openshift-marketplace/redhat-marketplace-sh87m" Oct 10 18:32:04 crc kubenswrapper[4799]: I1010 18:32:04.037950 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5d0371cb-d0e9-452e-90a5-b4f07f20c59d-utilities\") pod \"redhat-marketplace-sh87m\" (UID: \"5d0371cb-d0e9-452e-90a5-b4f07f20c59d\") " pod="openshift-marketplace/redhat-marketplace-sh87m" Oct 10 18:32:04 crc kubenswrapper[4799]: I1010 18:32:04.038867 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-82986\" (UniqueName: \"kubernetes.io/projected/5d0371cb-d0e9-452e-90a5-b4f07f20c59d-kube-api-access-82986\") pod \"redhat-marketplace-sh87m\" (UID: \"5d0371cb-d0e9-452e-90a5-b4f07f20c59d\") " pod="openshift-marketplace/redhat-marketplace-sh87m" Oct 10 18:32:04 crc kubenswrapper[4799]: I1010 18:32:04.039009 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5d0371cb-d0e9-452e-90a5-b4f07f20c59d-catalog-content\") pod \"redhat-marketplace-sh87m\" (UID: \"5d0371cb-d0e9-452e-90a5-b4f07f20c59d\") " pod="openshift-marketplace/redhat-marketplace-sh87m" Oct 10 18:32:04 crc kubenswrapper[4799]: I1010 18:32:04.038548 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5d0371cb-d0e9-452e-90a5-b4f07f20c59d-utilities\") pod \"redhat-marketplace-sh87m\" (UID: \"5d0371cb-d0e9-452e-90a5-b4f07f20c59d\") " pod="openshift-marketplace/redhat-marketplace-sh87m" Oct 10 18:32:04 crc kubenswrapper[4799]: I1010 18:32:04.039396 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5d0371cb-d0e9-452e-90a5-b4f07f20c59d-catalog-content\") pod \"redhat-marketplace-sh87m\" (UID: \"5d0371cb-d0e9-452e-90a5-b4f07f20c59d\") " pod="openshift-marketplace/redhat-marketplace-sh87m" Oct 10 18:32:04 crc kubenswrapper[4799]: I1010 18:32:04.055918 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-82986\" (UniqueName: \"kubernetes.io/projected/5d0371cb-d0e9-452e-90a5-b4f07f20c59d-kube-api-access-82986\") pod \"redhat-marketplace-sh87m\" (UID: \"5d0371cb-d0e9-452e-90a5-b4f07f20c59d\") " pod="openshift-marketplace/redhat-marketplace-sh87m" Oct 10 18:32:04 crc kubenswrapper[4799]: I1010 18:32:04.224346 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-sh87m" Oct 10 18:32:04 crc kubenswrapper[4799]: I1010 18:32:04.723349 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-sh87m"] Oct 10 18:32:05 crc kubenswrapper[4799]: I1010 18:32:05.733895 4799 generic.go:334] "Generic (PLEG): container finished" podID="5d0371cb-d0e9-452e-90a5-b4f07f20c59d" containerID="1aa88f0dcc68b8adf14f0c50e6f673936f81168114f91db6d939478f2105cc9e" exitCode=0 Oct 10 18:32:05 crc kubenswrapper[4799]: I1010 18:32:05.734079 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-sh87m" event={"ID":"5d0371cb-d0e9-452e-90a5-b4f07f20c59d","Type":"ContainerDied","Data":"1aa88f0dcc68b8adf14f0c50e6f673936f81168114f91db6d939478f2105cc9e"} Oct 10 18:32:05 crc kubenswrapper[4799]: I1010 18:32:05.734581 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-sh87m" event={"ID":"5d0371cb-d0e9-452e-90a5-b4f07f20c59d","Type":"ContainerStarted","Data":"1c87346b8ae85227abb4609692242810e4bbd59b241348695518cc5dd53b538a"} Oct 10 18:32:07 crc kubenswrapper[4799]: I1010 18:32:07.760226 4799 generic.go:334] "Generic (PLEG): container finished" podID="5d0371cb-d0e9-452e-90a5-b4f07f20c59d" containerID="d8b3bb182c1f8428115436d87f32567c3ed79e39170f9733cc59edc17b67192f" exitCode=0 Oct 10 18:32:07 crc kubenswrapper[4799]: I1010 18:32:07.760340 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-sh87m" event={"ID":"5d0371cb-d0e9-452e-90a5-b4f07f20c59d","Type":"ContainerDied","Data":"d8b3bb182c1f8428115436d87f32567c3ed79e39170f9733cc59edc17b67192f"} Oct 10 18:32:08 crc kubenswrapper[4799]: I1010 18:32:08.402213 4799 scope.go:117] "RemoveContainer" containerID="e1753dd33d9c2573eba3d4245d76828dad7bb15c7538442d84b9b903c94df080" Oct 10 18:32:08 crc kubenswrapper[4799]: E1010 18:32:08.402808 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 18:32:08 crc kubenswrapper[4799]: I1010 18:32:08.773163 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-sh87m" event={"ID":"5d0371cb-d0e9-452e-90a5-b4f07f20c59d","Type":"ContainerStarted","Data":"6c8b27bab89e35bcfc4f1c80e2a0e6362966dc57fc770dbd0543d121e4f4e6f2"} Oct 10 18:32:08 crc kubenswrapper[4799]: I1010 18:32:08.812106 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-sh87m" podStartSLOduration=3.337718703 podStartE2EDuration="5.812084454s" podCreationTimestamp="2025-10-10 18:32:03 +0000 UTC" firstStartedPulling="2025-10-10 18:32:05.736641166 +0000 UTC m=+7219.244965331" lastFinishedPulling="2025-10-10 18:32:08.211006957 +0000 UTC m=+7221.719331082" observedRunningTime="2025-10-10 18:32:08.804513349 +0000 UTC m=+7222.312837464" watchObservedRunningTime="2025-10-10 18:32:08.812084454 +0000 UTC m=+7222.320408579" Oct 10 18:32:14 crc kubenswrapper[4799]: I1010 18:32:14.225162 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-sh87m" Oct 10 18:32:14 crc kubenswrapper[4799]: I1010 18:32:14.225703 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-sh87m" Oct 10 18:32:14 crc kubenswrapper[4799]: I1010 18:32:14.304103 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-sh87m" Oct 10 18:32:14 crc kubenswrapper[4799]: I1010 18:32:14.951467 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-sh87m" Oct 10 18:32:15 crc kubenswrapper[4799]: I1010 18:32:15.051149 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-sh87m"] Oct 10 18:32:16 crc kubenswrapper[4799]: I1010 18:32:16.871341 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-sh87m" podUID="5d0371cb-d0e9-452e-90a5-b4f07f20c59d" containerName="registry-server" containerID="cri-o://6c8b27bab89e35bcfc4f1c80e2a0e6362966dc57fc770dbd0543d121e4f4e6f2" gracePeriod=2 Oct 10 18:32:17 crc kubenswrapper[4799]: I1010 18:32:17.478057 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-sh87m" Oct 10 18:32:17 crc kubenswrapper[4799]: I1010 18:32:17.576934 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5d0371cb-d0e9-452e-90a5-b4f07f20c59d-utilities\") pod \"5d0371cb-d0e9-452e-90a5-b4f07f20c59d\" (UID: \"5d0371cb-d0e9-452e-90a5-b4f07f20c59d\") " Oct 10 18:32:17 crc kubenswrapper[4799]: I1010 18:32:17.577068 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5d0371cb-d0e9-452e-90a5-b4f07f20c59d-catalog-content\") pod \"5d0371cb-d0e9-452e-90a5-b4f07f20c59d\" (UID: \"5d0371cb-d0e9-452e-90a5-b4f07f20c59d\") " Oct 10 18:32:17 crc kubenswrapper[4799]: I1010 18:32:17.577165 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-82986\" (UniqueName: \"kubernetes.io/projected/5d0371cb-d0e9-452e-90a5-b4f07f20c59d-kube-api-access-82986\") pod \"5d0371cb-d0e9-452e-90a5-b4f07f20c59d\" (UID: \"5d0371cb-d0e9-452e-90a5-b4f07f20c59d\") " Oct 10 18:32:17 crc kubenswrapper[4799]: I1010 18:32:17.577744 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5d0371cb-d0e9-452e-90a5-b4f07f20c59d-utilities" (OuterVolumeSpecName: "utilities") pod "5d0371cb-d0e9-452e-90a5-b4f07f20c59d" (UID: "5d0371cb-d0e9-452e-90a5-b4f07f20c59d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 10 18:32:17 crc kubenswrapper[4799]: I1010 18:32:17.585364 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5d0371cb-d0e9-452e-90a5-b4f07f20c59d-kube-api-access-82986" (OuterVolumeSpecName: "kube-api-access-82986") pod "5d0371cb-d0e9-452e-90a5-b4f07f20c59d" (UID: "5d0371cb-d0e9-452e-90a5-b4f07f20c59d"). InnerVolumeSpecName "kube-api-access-82986". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 18:32:17 crc kubenswrapper[4799]: I1010 18:32:17.588824 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5d0371cb-d0e9-452e-90a5-b4f07f20c59d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5d0371cb-d0e9-452e-90a5-b4f07f20c59d" (UID: "5d0371cb-d0e9-452e-90a5-b4f07f20c59d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 10 18:32:17 crc kubenswrapper[4799]: I1010 18:32:17.679093 4799 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5d0371cb-d0e9-452e-90a5-b4f07f20c59d-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 10 18:32:17 crc kubenswrapper[4799]: I1010 18:32:17.679122 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-82986\" (UniqueName: \"kubernetes.io/projected/5d0371cb-d0e9-452e-90a5-b4f07f20c59d-kube-api-access-82986\") on node \"crc\" DevicePath \"\"" Oct 10 18:32:17 crc kubenswrapper[4799]: I1010 18:32:17.679133 4799 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5d0371cb-d0e9-452e-90a5-b4f07f20c59d-utilities\") on node \"crc\" DevicePath \"\"" Oct 10 18:32:17 crc kubenswrapper[4799]: I1010 18:32:17.888263 4799 generic.go:334] "Generic (PLEG): container finished" podID="5d0371cb-d0e9-452e-90a5-b4f07f20c59d" containerID="6c8b27bab89e35bcfc4f1c80e2a0e6362966dc57fc770dbd0543d121e4f4e6f2" exitCode=0 Oct 10 18:32:17 crc kubenswrapper[4799]: I1010 18:32:17.888367 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-sh87m" Oct 10 18:32:17 crc kubenswrapper[4799]: I1010 18:32:17.888372 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-sh87m" event={"ID":"5d0371cb-d0e9-452e-90a5-b4f07f20c59d","Type":"ContainerDied","Data":"6c8b27bab89e35bcfc4f1c80e2a0e6362966dc57fc770dbd0543d121e4f4e6f2"} Oct 10 18:32:17 crc kubenswrapper[4799]: I1010 18:32:17.889538 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-sh87m" event={"ID":"5d0371cb-d0e9-452e-90a5-b4f07f20c59d","Type":"ContainerDied","Data":"1c87346b8ae85227abb4609692242810e4bbd59b241348695518cc5dd53b538a"} Oct 10 18:32:17 crc kubenswrapper[4799]: I1010 18:32:17.889579 4799 scope.go:117] "RemoveContainer" containerID="6c8b27bab89e35bcfc4f1c80e2a0e6362966dc57fc770dbd0543d121e4f4e6f2" Oct 10 18:32:17 crc kubenswrapper[4799]: I1010 18:32:17.939735 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-sh87m"] Oct 10 18:32:17 crc kubenswrapper[4799]: I1010 18:32:17.940207 4799 scope.go:117] "RemoveContainer" containerID="d8b3bb182c1f8428115436d87f32567c3ed79e39170f9733cc59edc17b67192f" Oct 10 18:32:17 crc kubenswrapper[4799]: I1010 18:32:17.963219 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-sh87m"] Oct 10 18:32:17 crc kubenswrapper[4799]: I1010 18:32:17.980494 4799 scope.go:117] "RemoveContainer" containerID="1aa88f0dcc68b8adf14f0c50e6f673936f81168114f91db6d939478f2105cc9e" Oct 10 18:32:18 crc kubenswrapper[4799]: I1010 18:32:18.065487 4799 scope.go:117] "RemoveContainer" containerID="6c8b27bab89e35bcfc4f1c80e2a0e6362966dc57fc770dbd0543d121e4f4e6f2" Oct 10 18:32:18 crc kubenswrapper[4799]: E1010 18:32:18.067675 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6c8b27bab89e35bcfc4f1c80e2a0e6362966dc57fc770dbd0543d121e4f4e6f2\": container with ID starting with 6c8b27bab89e35bcfc4f1c80e2a0e6362966dc57fc770dbd0543d121e4f4e6f2 not found: ID does not exist" containerID="6c8b27bab89e35bcfc4f1c80e2a0e6362966dc57fc770dbd0543d121e4f4e6f2" Oct 10 18:32:18 crc kubenswrapper[4799]: I1010 18:32:18.067900 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6c8b27bab89e35bcfc4f1c80e2a0e6362966dc57fc770dbd0543d121e4f4e6f2"} err="failed to get container status \"6c8b27bab89e35bcfc4f1c80e2a0e6362966dc57fc770dbd0543d121e4f4e6f2\": rpc error: code = NotFound desc = could not find container \"6c8b27bab89e35bcfc4f1c80e2a0e6362966dc57fc770dbd0543d121e4f4e6f2\": container with ID starting with 6c8b27bab89e35bcfc4f1c80e2a0e6362966dc57fc770dbd0543d121e4f4e6f2 not found: ID does not exist" Oct 10 18:32:18 crc kubenswrapper[4799]: I1010 18:32:18.067984 4799 scope.go:117] "RemoveContainer" containerID="d8b3bb182c1f8428115436d87f32567c3ed79e39170f9733cc59edc17b67192f" Oct 10 18:32:18 crc kubenswrapper[4799]: E1010 18:32:18.069428 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d8b3bb182c1f8428115436d87f32567c3ed79e39170f9733cc59edc17b67192f\": container with ID starting with d8b3bb182c1f8428115436d87f32567c3ed79e39170f9733cc59edc17b67192f not found: ID does not exist" containerID="d8b3bb182c1f8428115436d87f32567c3ed79e39170f9733cc59edc17b67192f" Oct 10 18:32:18 crc kubenswrapper[4799]: I1010 18:32:18.069501 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d8b3bb182c1f8428115436d87f32567c3ed79e39170f9733cc59edc17b67192f"} err="failed to get container status \"d8b3bb182c1f8428115436d87f32567c3ed79e39170f9733cc59edc17b67192f\": rpc error: code = NotFound desc = could not find container \"d8b3bb182c1f8428115436d87f32567c3ed79e39170f9733cc59edc17b67192f\": container with ID starting with d8b3bb182c1f8428115436d87f32567c3ed79e39170f9733cc59edc17b67192f not found: ID does not exist" Oct 10 18:32:18 crc kubenswrapper[4799]: I1010 18:32:18.069540 4799 scope.go:117] "RemoveContainer" containerID="1aa88f0dcc68b8adf14f0c50e6f673936f81168114f91db6d939478f2105cc9e" Oct 10 18:32:18 crc kubenswrapper[4799]: E1010 18:32:18.070098 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1aa88f0dcc68b8adf14f0c50e6f673936f81168114f91db6d939478f2105cc9e\": container with ID starting with 1aa88f0dcc68b8adf14f0c50e6f673936f81168114f91db6d939478f2105cc9e not found: ID does not exist" containerID="1aa88f0dcc68b8adf14f0c50e6f673936f81168114f91db6d939478f2105cc9e" Oct 10 18:32:18 crc kubenswrapper[4799]: I1010 18:32:18.070150 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1aa88f0dcc68b8adf14f0c50e6f673936f81168114f91db6d939478f2105cc9e"} err="failed to get container status \"1aa88f0dcc68b8adf14f0c50e6f673936f81168114f91db6d939478f2105cc9e\": rpc error: code = NotFound desc = could not find container \"1aa88f0dcc68b8adf14f0c50e6f673936f81168114f91db6d939478f2105cc9e\": container with ID starting with 1aa88f0dcc68b8adf14f0c50e6f673936f81168114f91db6d939478f2105cc9e not found: ID does not exist" Oct 10 18:32:19 crc kubenswrapper[4799]: I1010 18:32:19.426523 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5d0371cb-d0e9-452e-90a5-b4f07f20c59d" path="/var/lib/kubelet/pods/5d0371cb-d0e9-452e-90a5-b4f07f20c59d/volumes" Oct 10 18:32:22 crc kubenswrapper[4799]: I1010 18:32:22.402982 4799 scope.go:117] "RemoveContainer" containerID="e1753dd33d9c2573eba3d4245d76828dad7bb15c7538442d84b9b903c94df080" Oct 10 18:32:22 crc kubenswrapper[4799]: E1010 18:32:22.404308 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 18:32:36 crc kubenswrapper[4799]: I1010 18:32:36.403222 4799 scope.go:117] "RemoveContainer" containerID="e1753dd33d9c2573eba3d4245d76828dad7bb15c7538442d84b9b903c94df080" Oct 10 18:32:36 crc kubenswrapper[4799]: E1010 18:32:36.404423 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 18:32:48 crc kubenswrapper[4799]: I1010 18:32:48.403208 4799 scope.go:117] "RemoveContainer" containerID="e1753dd33d9c2573eba3d4245d76828dad7bb15c7538442d84b9b903c94df080" Oct 10 18:32:48 crc kubenswrapper[4799]: E1010 18:32:48.404287 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 18:33:02 crc kubenswrapper[4799]: I1010 18:33:02.402558 4799 scope.go:117] "RemoveContainer" containerID="e1753dd33d9c2573eba3d4245d76828dad7bb15c7538442d84b9b903c94df080" Oct 10 18:33:02 crc kubenswrapper[4799]: E1010 18:33:02.403240 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 18:33:15 crc kubenswrapper[4799]: I1010 18:33:15.404367 4799 scope.go:117] "RemoveContainer" containerID="e1753dd33d9c2573eba3d4245d76828dad7bb15c7538442d84b9b903c94df080" Oct 10 18:33:16 crc kubenswrapper[4799]: I1010 18:33:16.653019 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" event={"ID":"6cebefda-e31d-4be2-9bf4-8e1f8ec002cb","Type":"ContainerStarted","Data":"d11b6649908656854cce5d87d576a743fe05f8e582601b1ccaf94446dbe0c646"} Oct 10 18:33:35 crc kubenswrapper[4799]: I1010 18:33:35.900595 4799 generic.go:334] "Generic (PLEG): container finished" podID="797acbff-1308-4140-9468-a7eaaa3e5e75" containerID="1063add3619ab6d30c0e703e232d076098d6a91ce66fc3c39be2c679d0177d28" exitCode=0 Oct 10 18:33:35 crc kubenswrapper[4799]: I1010 18:33:35.900714 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-openstack-openstack-cell1-pgztr" event={"ID":"797acbff-1308-4140-9468-a7eaaa3e5e75","Type":"ContainerDied","Data":"1063add3619ab6d30c0e703e232d076098d6a91ce66fc3c39be2c679d0177d28"} Oct 10 18:33:37 crc kubenswrapper[4799]: I1010 18:33:37.447814 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-openstack-openstack-cell1-pgztr" Oct 10 18:33:37 crc kubenswrapper[4799]: I1010 18:33:37.550901 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/797acbff-1308-4140-9468-a7eaaa3e5e75-ceph\") pod \"797acbff-1308-4140-9468-a7eaaa3e5e75\" (UID: \"797acbff-1308-4140-9468-a7eaaa3e5e75\") " Oct 10 18:33:37 crc kubenswrapper[4799]: I1010 18:33:37.550988 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/797acbff-1308-4140-9468-a7eaaa3e5e75-inventory\") pod \"797acbff-1308-4140-9468-a7eaaa3e5e75\" (UID: \"797acbff-1308-4140-9468-a7eaaa3e5e75\") " Oct 10 18:33:37 crc kubenswrapper[4799]: I1010 18:33:37.551041 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/797acbff-1308-4140-9468-a7eaaa3e5e75-bootstrap-combined-ca-bundle\") pod \"797acbff-1308-4140-9468-a7eaaa3e5e75\" (UID: \"797acbff-1308-4140-9468-a7eaaa3e5e75\") " Oct 10 18:33:37 crc kubenswrapper[4799]: I1010 18:33:37.551178 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w9kc8\" (UniqueName: \"kubernetes.io/projected/797acbff-1308-4140-9468-a7eaaa3e5e75-kube-api-access-w9kc8\") pod \"797acbff-1308-4140-9468-a7eaaa3e5e75\" (UID: \"797acbff-1308-4140-9468-a7eaaa3e5e75\") " Oct 10 18:33:37 crc kubenswrapper[4799]: I1010 18:33:37.551278 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/797acbff-1308-4140-9468-a7eaaa3e5e75-ssh-key\") pod \"797acbff-1308-4140-9468-a7eaaa3e5e75\" (UID: \"797acbff-1308-4140-9468-a7eaaa3e5e75\") " Oct 10 18:33:37 crc kubenswrapper[4799]: I1010 18:33:37.559950 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/797acbff-1308-4140-9468-a7eaaa3e5e75-ceph" (OuterVolumeSpecName: "ceph") pod "797acbff-1308-4140-9468-a7eaaa3e5e75" (UID: "797acbff-1308-4140-9468-a7eaaa3e5e75"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 18:33:37 crc kubenswrapper[4799]: I1010 18:33:37.559993 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/797acbff-1308-4140-9468-a7eaaa3e5e75-bootstrap-combined-ca-bundle" (OuterVolumeSpecName: "bootstrap-combined-ca-bundle") pod "797acbff-1308-4140-9468-a7eaaa3e5e75" (UID: "797acbff-1308-4140-9468-a7eaaa3e5e75"). InnerVolumeSpecName "bootstrap-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 18:33:37 crc kubenswrapper[4799]: I1010 18:33:37.560039 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/797acbff-1308-4140-9468-a7eaaa3e5e75-kube-api-access-w9kc8" (OuterVolumeSpecName: "kube-api-access-w9kc8") pod "797acbff-1308-4140-9468-a7eaaa3e5e75" (UID: "797acbff-1308-4140-9468-a7eaaa3e5e75"). InnerVolumeSpecName "kube-api-access-w9kc8". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 18:33:37 crc kubenswrapper[4799]: I1010 18:33:37.584235 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/797acbff-1308-4140-9468-a7eaaa3e5e75-inventory" (OuterVolumeSpecName: "inventory") pod "797acbff-1308-4140-9468-a7eaaa3e5e75" (UID: "797acbff-1308-4140-9468-a7eaaa3e5e75"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 18:33:37 crc kubenswrapper[4799]: I1010 18:33:37.594367 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/797acbff-1308-4140-9468-a7eaaa3e5e75-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "797acbff-1308-4140-9468-a7eaaa3e5e75" (UID: "797acbff-1308-4140-9468-a7eaaa3e5e75"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 18:33:37 crc kubenswrapper[4799]: I1010 18:33:37.653957 4799 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/797acbff-1308-4140-9468-a7eaaa3e5e75-ceph\") on node \"crc\" DevicePath \"\"" Oct 10 18:33:37 crc kubenswrapper[4799]: I1010 18:33:37.654032 4799 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/797acbff-1308-4140-9468-a7eaaa3e5e75-inventory\") on node \"crc\" DevicePath \"\"" Oct 10 18:33:37 crc kubenswrapper[4799]: I1010 18:33:37.654050 4799 reconciler_common.go:293] "Volume detached for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/797acbff-1308-4140-9468-a7eaaa3e5e75-bootstrap-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 10 18:33:37 crc kubenswrapper[4799]: I1010 18:33:37.654065 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w9kc8\" (UniqueName: \"kubernetes.io/projected/797acbff-1308-4140-9468-a7eaaa3e5e75-kube-api-access-w9kc8\") on node \"crc\" DevicePath \"\"" Oct 10 18:33:37 crc kubenswrapper[4799]: I1010 18:33:37.654076 4799 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/797acbff-1308-4140-9468-a7eaaa3e5e75-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 10 18:33:37 crc kubenswrapper[4799]: I1010 18:33:37.925015 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-openstack-openstack-cell1-pgztr" event={"ID":"797acbff-1308-4140-9468-a7eaaa3e5e75","Type":"ContainerDied","Data":"ed1ae20ac64d3565e176b702181adc59dc2f6170a13e5e00cd6dcb63162aec50"} Oct 10 18:33:37 crc kubenswrapper[4799]: I1010 18:33:37.925057 4799 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ed1ae20ac64d3565e176b702181adc59dc2f6170a13e5e00cd6dcb63162aec50" Oct 10 18:33:37 crc kubenswrapper[4799]: I1010 18:33:37.925081 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-openstack-openstack-cell1-pgztr" Oct 10 18:33:38 crc kubenswrapper[4799]: I1010 18:33:38.041693 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/download-cache-openstack-openstack-cell1-jrjjm"] Oct 10 18:33:38 crc kubenswrapper[4799]: E1010 18:33:38.042653 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5d0371cb-d0e9-452e-90a5-b4f07f20c59d" containerName="extract-utilities" Oct 10 18:33:38 crc kubenswrapper[4799]: I1010 18:33:38.042703 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="5d0371cb-d0e9-452e-90a5-b4f07f20c59d" containerName="extract-utilities" Oct 10 18:33:38 crc kubenswrapper[4799]: E1010 18:33:38.042749 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5d0371cb-d0e9-452e-90a5-b4f07f20c59d" containerName="extract-content" Oct 10 18:33:38 crc kubenswrapper[4799]: I1010 18:33:38.042807 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="5d0371cb-d0e9-452e-90a5-b4f07f20c59d" containerName="extract-content" Oct 10 18:33:38 crc kubenswrapper[4799]: E1010 18:33:38.042843 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="797acbff-1308-4140-9468-a7eaaa3e5e75" containerName="bootstrap-openstack-openstack-cell1" Oct 10 18:33:38 crc kubenswrapper[4799]: I1010 18:33:38.042862 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="797acbff-1308-4140-9468-a7eaaa3e5e75" containerName="bootstrap-openstack-openstack-cell1" Oct 10 18:33:38 crc kubenswrapper[4799]: E1010 18:33:38.042894 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5d0371cb-d0e9-452e-90a5-b4f07f20c59d" containerName="registry-server" Oct 10 18:33:38 crc kubenswrapper[4799]: I1010 18:33:38.042912 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="5d0371cb-d0e9-452e-90a5-b4f07f20c59d" containerName="registry-server" Oct 10 18:33:38 crc kubenswrapper[4799]: I1010 18:33:38.043457 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="797acbff-1308-4140-9468-a7eaaa3e5e75" containerName="bootstrap-openstack-openstack-cell1" Oct 10 18:33:38 crc kubenswrapper[4799]: I1010 18:33:38.043510 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="5d0371cb-d0e9-452e-90a5-b4f07f20c59d" containerName="registry-server" Oct 10 18:33:38 crc kubenswrapper[4799]: I1010 18:33:38.045300 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-openstack-openstack-cell1-jrjjm" Oct 10 18:33:38 crc kubenswrapper[4799]: I1010 18:33:38.051158 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-adoption-secret" Oct 10 18:33:38 crc kubenswrapper[4799]: I1010 18:33:38.051266 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 10 18:33:38 crc kubenswrapper[4799]: I1010 18:33:38.051503 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Oct 10 18:33:38 crc kubenswrapper[4799]: I1010 18:33:38.051944 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-rdlhr" Oct 10 18:33:38 crc kubenswrapper[4799]: I1010 18:33:38.070344 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/download-cache-openstack-openstack-cell1-jrjjm"] Oct 10 18:33:38 crc kubenswrapper[4799]: I1010 18:33:38.170724 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5d50c12a-292f-4bcc-9915-0c2f99f7eb28-inventory\") pod \"download-cache-openstack-openstack-cell1-jrjjm\" (UID: \"5d50c12a-292f-4bcc-9915-0c2f99f7eb28\") " pod="openstack/download-cache-openstack-openstack-cell1-jrjjm" Oct 10 18:33:38 crc kubenswrapper[4799]: I1010 18:33:38.170956 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-znztp\" (UniqueName: \"kubernetes.io/projected/5d50c12a-292f-4bcc-9915-0c2f99f7eb28-kube-api-access-znztp\") pod \"download-cache-openstack-openstack-cell1-jrjjm\" (UID: \"5d50c12a-292f-4bcc-9915-0c2f99f7eb28\") " pod="openstack/download-cache-openstack-openstack-cell1-jrjjm" Oct 10 18:33:38 crc kubenswrapper[4799]: I1010 18:33:38.171190 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/5d50c12a-292f-4bcc-9915-0c2f99f7eb28-ceph\") pod \"download-cache-openstack-openstack-cell1-jrjjm\" (UID: \"5d50c12a-292f-4bcc-9915-0c2f99f7eb28\") " pod="openstack/download-cache-openstack-openstack-cell1-jrjjm" Oct 10 18:33:38 crc kubenswrapper[4799]: I1010 18:33:38.171294 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/5d50c12a-292f-4bcc-9915-0c2f99f7eb28-ssh-key\") pod \"download-cache-openstack-openstack-cell1-jrjjm\" (UID: \"5d50c12a-292f-4bcc-9915-0c2f99f7eb28\") " pod="openstack/download-cache-openstack-openstack-cell1-jrjjm" Oct 10 18:33:38 crc kubenswrapper[4799]: I1010 18:33:38.272451 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-znztp\" (UniqueName: \"kubernetes.io/projected/5d50c12a-292f-4bcc-9915-0c2f99f7eb28-kube-api-access-znztp\") pod \"download-cache-openstack-openstack-cell1-jrjjm\" (UID: \"5d50c12a-292f-4bcc-9915-0c2f99f7eb28\") " pod="openstack/download-cache-openstack-openstack-cell1-jrjjm" Oct 10 18:33:38 crc kubenswrapper[4799]: I1010 18:33:38.272570 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/5d50c12a-292f-4bcc-9915-0c2f99f7eb28-ceph\") pod \"download-cache-openstack-openstack-cell1-jrjjm\" (UID: \"5d50c12a-292f-4bcc-9915-0c2f99f7eb28\") " pod="openstack/download-cache-openstack-openstack-cell1-jrjjm" Oct 10 18:33:38 crc kubenswrapper[4799]: I1010 18:33:38.272594 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/5d50c12a-292f-4bcc-9915-0c2f99f7eb28-ssh-key\") pod \"download-cache-openstack-openstack-cell1-jrjjm\" (UID: \"5d50c12a-292f-4bcc-9915-0c2f99f7eb28\") " pod="openstack/download-cache-openstack-openstack-cell1-jrjjm" Oct 10 18:33:38 crc kubenswrapper[4799]: I1010 18:33:38.272687 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5d50c12a-292f-4bcc-9915-0c2f99f7eb28-inventory\") pod \"download-cache-openstack-openstack-cell1-jrjjm\" (UID: \"5d50c12a-292f-4bcc-9915-0c2f99f7eb28\") " pod="openstack/download-cache-openstack-openstack-cell1-jrjjm" Oct 10 18:33:38 crc kubenswrapper[4799]: I1010 18:33:38.277001 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/5d50c12a-292f-4bcc-9915-0c2f99f7eb28-ssh-key\") pod \"download-cache-openstack-openstack-cell1-jrjjm\" (UID: \"5d50c12a-292f-4bcc-9915-0c2f99f7eb28\") " pod="openstack/download-cache-openstack-openstack-cell1-jrjjm" Oct 10 18:33:38 crc kubenswrapper[4799]: I1010 18:33:38.277310 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5d50c12a-292f-4bcc-9915-0c2f99f7eb28-inventory\") pod \"download-cache-openstack-openstack-cell1-jrjjm\" (UID: \"5d50c12a-292f-4bcc-9915-0c2f99f7eb28\") " pod="openstack/download-cache-openstack-openstack-cell1-jrjjm" Oct 10 18:33:38 crc kubenswrapper[4799]: I1010 18:33:38.277364 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/5d50c12a-292f-4bcc-9915-0c2f99f7eb28-ceph\") pod \"download-cache-openstack-openstack-cell1-jrjjm\" (UID: \"5d50c12a-292f-4bcc-9915-0c2f99f7eb28\") " pod="openstack/download-cache-openstack-openstack-cell1-jrjjm" Oct 10 18:33:38 crc kubenswrapper[4799]: I1010 18:33:38.305037 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-znztp\" (UniqueName: \"kubernetes.io/projected/5d50c12a-292f-4bcc-9915-0c2f99f7eb28-kube-api-access-znztp\") pod \"download-cache-openstack-openstack-cell1-jrjjm\" (UID: \"5d50c12a-292f-4bcc-9915-0c2f99f7eb28\") " pod="openstack/download-cache-openstack-openstack-cell1-jrjjm" Oct 10 18:33:38 crc kubenswrapper[4799]: I1010 18:33:38.368908 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-openstack-openstack-cell1-jrjjm" Oct 10 18:33:39 crc kubenswrapper[4799]: I1010 18:33:39.014435 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/download-cache-openstack-openstack-cell1-jrjjm"] Oct 10 18:33:39 crc kubenswrapper[4799]: I1010 18:33:39.946018 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-openstack-openstack-cell1-jrjjm" event={"ID":"5d50c12a-292f-4bcc-9915-0c2f99f7eb28","Type":"ContainerStarted","Data":"49e294ff36e1ddbdcb08f25581ba5343b6b7bcf22f11d75141c1e12feedeca9a"} Oct 10 18:33:39 crc kubenswrapper[4799]: I1010 18:33:39.946335 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-openstack-openstack-cell1-jrjjm" event={"ID":"5d50c12a-292f-4bcc-9915-0c2f99f7eb28","Type":"ContainerStarted","Data":"00c004a0a94cd6f65d7983191dc4c273b5fbbd36dd97e6e242e767c3e0f5ab66"} Oct 10 18:33:39 crc kubenswrapper[4799]: I1010 18:33:39.970313 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/download-cache-openstack-openstack-cell1-jrjjm" podStartSLOduration=1.420094758 podStartE2EDuration="1.9702974s" podCreationTimestamp="2025-10-10 18:33:38 +0000 UTC" firstStartedPulling="2025-10-10 18:33:39.043595316 +0000 UTC m=+7312.551919431" lastFinishedPulling="2025-10-10 18:33:39.593797788 +0000 UTC m=+7313.102122073" observedRunningTime="2025-10-10 18:33:39.963236818 +0000 UTC m=+7313.471560973" watchObservedRunningTime="2025-10-10 18:33:39.9702974 +0000 UTC m=+7313.478621515" Oct 10 18:35:12 crc kubenswrapper[4799]: I1010 18:35:12.083153 4799 generic.go:334] "Generic (PLEG): container finished" podID="5d50c12a-292f-4bcc-9915-0c2f99f7eb28" containerID="49e294ff36e1ddbdcb08f25581ba5343b6b7bcf22f11d75141c1e12feedeca9a" exitCode=0 Oct 10 18:35:12 crc kubenswrapper[4799]: I1010 18:35:12.083256 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-openstack-openstack-cell1-jrjjm" event={"ID":"5d50c12a-292f-4bcc-9915-0c2f99f7eb28","Type":"ContainerDied","Data":"49e294ff36e1ddbdcb08f25581ba5343b6b7bcf22f11d75141c1e12feedeca9a"} Oct 10 18:35:13 crc kubenswrapper[4799]: I1010 18:35:13.790495 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-openstack-openstack-cell1-jrjjm" Oct 10 18:35:13 crc kubenswrapper[4799]: I1010 18:35:13.971129 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-znztp\" (UniqueName: \"kubernetes.io/projected/5d50c12a-292f-4bcc-9915-0c2f99f7eb28-kube-api-access-znztp\") pod \"5d50c12a-292f-4bcc-9915-0c2f99f7eb28\" (UID: \"5d50c12a-292f-4bcc-9915-0c2f99f7eb28\") " Oct 10 18:35:13 crc kubenswrapper[4799]: I1010 18:35:13.971201 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5d50c12a-292f-4bcc-9915-0c2f99f7eb28-inventory\") pod \"5d50c12a-292f-4bcc-9915-0c2f99f7eb28\" (UID: \"5d50c12a-292f-4bcc-9915-0c2f99f7eb28\") " Oct 10 18:35:13 crc kubenswrapper[4799]: I1010 18:35:13.971345 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/5d50c12a-292f-4bcc-9915-0c2f99f7eb28-ssh-key\") pod \"5d50c12a-292f-4bcc-9915-0c2f99f7eb28\" (UID: \"5d50c12a-292f-4bcc-9915-0c2f99f7eb28\") " Oct 10 18:35:13 crc kubenswrapper[4799]: I1010 18:35:13.971690 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/5d50c12a-292f-4bcc-9915-0c2f99f7eb28-ceph\") pod \"5d50c12a-292f-4bcc-9915-0c2f99f7eb28\" (UID: \"5d50c12a-292f-4bcc-9915-0c2f99f7eb28\") " Oct 10 18:35:13 crc kubenswrapper[4799]: I1010 18:35:13.981168 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5d50c12a-292f-4bcc-9915-0c2f99f7eb28-kube-api-access-znztp" (OuterVolumeSpecName: "kube-api-access-znztp") pod "5d50c12a-292f-4bcc-9915-0c2f99f7eb28" (UID: "5d50c12a-292f-4bcc-9915-0c2f99f7eb28"). InnerVolumeSpecName "kube-api-access-znztp". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 18:35:14 crc kubenswrapper[4799]: I1010 18:35:14.002958 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5d50c12a-292f-4bcc-9915-0c2f99f7eb28-ceph" (OuterVolumeSpecName: "ceph") pod "5d50c12a-292f-4bcc-9915-0c2f99f7eb28" (UID: "5d50c12a-292f-4bcc-9915-0c2f99f7eb28"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 18:35:14 crc kubenswrapper[4799]: I1010 18:35:14.013098 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5d50c12a-292f-4bcc-9915-0c2f99f7eb28-inventory" (OuterVolumeSpecName: "inventory") pod "5d50c12a-292f-4bcc-9915-0c2f99f7eb28" (UID: "5d50c12a-292f-4bcc-9915-0c2f99f7eb28"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 18:35:14 crc kubenswrapper[4799]: I1010 18:35:14.027172 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5d50c12a-292f-4bcc-9915-0c2f99f7eb28-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "5d50c12a-292f-4bcc-9915-0c2f99f7eb28" (UID: "5d50c12a-292f-4bcc-9915-0c2f99f7eb28"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 18:35:14 crc kubenswrapper[4799]: I1010 18:35:14.074037 4799 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/5d50c12a-292f-4bcc-9915-0c2f99f7eb28-ceph\") on node \"crc\" DevicePath \"\"" Oct 10 18:35:14 crc kubenswrapper[4799]: I1010 18:35:14.074072 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-znztp\" (UniqueName: \"kubernetes.io/projected/5d50c12a-292f-4bcc-9915-0c2f99f7eb28-kube-api-access-znztp\") on node \"crc\" DevicePath \"\"" Oct 10 18:35:14 crc kubenswrapper[4799]: I1010 18:35:14.074087 4799 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5d50c12a-292f-4bcc-9915-0c2f99f7eb28-inventory\") on node \"crc\" DevicePath \"\"" Oct 10 18:35:14 crc kubenswrapper[4799]: I1010 18:35:14.074099 4799 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/5d50c12a-292f-4bcc-9915-0c2f99f7eb28-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 10 18:35:14 crc kubenswrapper[4799]: I1010 18:35:14.109971 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-openstack-openstack-cell1-jrjjm" event={"ID":"5d50c12a-292f-4bcc-9915-0c2f99f7eb28","Type":"ContainerDied","Data":"00c004a0a94cd6f65d7983191dc4c273b5fbbd36dd97e6e242e767c3e0f5ab66"} Oct 10 18:35:14 crc kubenswrapper[4799]: I1010 18:35:14.110024 4799 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="00c004a0a94cd6f65d7983191dc4c273b5fbbd36dd97e6e242e767c3e0f5ab66" Oct 10 18:35:14 crc kubenswrapper[4799]: I1010 18:35:14.110085 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-openstack-openstack-cell1-jrjjm" Oct 10 18:35:14 crc kubenswrapper[4799]: I1010 18:35:14.199040 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/configure-network-openstack-openstack-cell1-78wrs"] Oct 10 18:35:14 crc kubenswrapper[4799]: E1010 18:35:14.199507 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5d50c12a-292f-4bcc-9915-0c2f99f7eb28" containerName="download-cache-openstack-openstack-cell1" Oct 10 18:35:14 crc kubenswrapper[4799]: I1010 18:35:14.199527 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="5d50c12a-292f-4bcc-9915-0c2f99f7eb28" containerName="download-cache-openstack-openstack-cell1" Oct 10 18:35:14 crc kubenswrapper[4799]: I1010 18:35:14.199844 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="5d50c12a-292f-4bcc-9915-0c2f99f7eb28" containerName="download-cache-openstack-openstack-cell1" Oct 10 18:35:14 crc kubenswrapper[4799]: I1010 18:35:14.200723 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-openstack-openstack-cell1-78wrs" Oct 10 18:35:14 crc kubenswrapper[4799]: I1010 18:35:14.210196 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Oct 10 18:35:14 crc kubenswrapper[4799]: I1010 18:35:14.210350 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 10 18:35:14 crc kubenswrapper[4799]: I1010 18:35:14.211059 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-rdlhr" Oct 10 18:35:14 crc kubenswrapper[4799]: I1010 18:35:14.211186 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-adoption-secret" Oct 10 18:35:14 crc kubenswrapper[4799]: I1010 18:35:14.222211 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-network-openstack-openstack-cell1-78wrs"] Oct 10 18:35:14 crc kubenswrapper[4799]: I1010 18:35:14.379731 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f452ed12-b58b-47b2-86f9-eb1910405e02-ssh-key\") pod \"configure-network-openstack-openstack-cell1-78wrs\" (UID: \"f452ed12-b58b-47b2-86f9-eb1910405e02\") " pod="openstack/configure-network-openstack-openstack-cell1-78wrs" Oct 10 18:35:14 crc kubenswrapper[4799]: I1010 18:35:14.380195 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f452ed12-b58b-47b2-86f9-eb1910405e02-inventory\") pod \"configure-network-openstack-openstack-cell1-78wrs\" (UID: \"f452ed12-b58b-47b2-86f9-eb1910405e02\") " pod="openstack/configure-network-openstack-openstack-cell1-78wrs" Oct 10 18:35:14 crc kubenswrapper[4799]: I1010 18:35:14.380805 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n22pf\" (UniqueName: \"kubernetes.io/projected/f452ed12-b58b-47b2-86f9-eb1910405e02-kube-api-access-n22pf\") pod \"configure-network-openstack-openstack-cell1-78wrs\" (UID: \"f452ed12-b58b-47b2-86f9-eb1910405e02\") " pod="openstack/configure-network-openstack-openstack-cell1-78wrs" Oct 10 18:35:14 crc kubenswrapper[4799]: I1010 18:35:14.381110 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/f452ed12-b58b-47b2-86f9-eb1910405e02-ceph\") pod \"configure-network-openstack-openstack-cell1-78wrs\" (UID: \"f452ed12-b58b-47b2-86f9-eb1910405e02\") " pod="openstack/configure-network-openstack-openstack-cell1-78wrs" Oct 10 18:35:14 crc kubenswrapper[4799]: I1010 18:35:14.483449 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n22pf\" (UniqueName: \"kubernetes.io/projected/f452ed12-b58b-47b2-86f9-eb1910405e02-kube-api-access-n22pf\") pod \"configure-network-openstack-openstack-cell1-78wrs\" (UID: \"f452ed12-b58b-47b2-86f9-eb1910405e02\") " pod="openstack/configure-network-openstack-openstack-cell1-78wrs" Oct 10 18:35:14 crc kubenswrapper[4799]: I1010 18:35:14.483776 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/f452ed12-b58b-47b2-86f9-eb1910405e02-ceph\") pod \"configure-network-openstack-openstack-cell1-78wrs\" (UID: \"f452ed12-b58b-47b2-86f9-eb1910405e02\") " pod="openstack/configure-network-openstack-openstack-cell1-78wrs" Oct 10 18:35:14 crc kubenswrapper[4799]: I1010 18:35:14.487298 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f452ed12-b58b-47b2-86f9-eb1910405e02-ssh-key\") pod \"configure-network-openstack-openstack-cell1-78wrs\" (UID: \"f452ed12-b58b-47b2-86f9-eb1910405e02\") " pod="openstack/configure-network-openstack-openstack-cell1-78wrs" Oct 10 18:35:14 crc kubenswrapper[4799]: I1010 18:35:14.487717 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f452ed12-b58b-47b2-86f9-eb1910405e02-inventory\") pod \"configure-network-openstack-openstack-cell1-78wrs\" (UID: \"f452ed12-b58b-47b2-86f9-eb1910405e02\") " pod="openstack/configure-network-openstack-openstack-cell1-78wrs" Oct 10 18:35:14 crc kubenswrapper[4799]: I1010 18:35:14.492618 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f452ed12-b58b-47b2-86f9-eb1910405e02-inventory\") pod \"configure-network-openstack-openstack-cell1-78wrs\" (UID: \"f452ed12-b58b-47b2-86f9-eb1910405e02\") " pod="openstack/configure-network-openstack-openstack-cell1-78wrs" Oct 10 18:35:14 crc kubenswrapper[4799]: I1010 18:35:14.492657 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f452ed12-b58b-47b2-86f9-eb1910405e02-ssh-key\") pod \"configure-network-openstack-openstack-cell1-78wrs\" (UID: \"f452ed12-b58b-47b2-86f9-eb1910405e02\") " pod="openstack/configure-network-openstack-openstack-cell1-78wrs" Oct 10 18:35:14 crc kubenswrapper[4799]: I1010 18:35:14.496519 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/f452ed12-b58b-47b2-86f9-eb1910405e02-ceph\") pod \"configure-network-openstack-openstack-cell1-78wrs\" (UID: \"f452ed12-b58b-47b2-86f9-eb1910405e02\") " pod="openstack/configure-network-openstack-openstack-cell1-78wrs" Oct 10 18:35:14 crc kubenswrapper[4799]: I1010 18:35:14.501859 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n22pf\" (UniqueName: \"kubernetes.io/projected/f452ed12-b58b-47b2-86f9-eb1910405e02-kube-api-access-n22pf\") pod \"configure-network-openstack-openstack-cell1-78wrs\" (UID: \"f452ed12-b58b-47b2-86f9-eb1910405e02\") " pod="openstack/configure-network-openstack-openstack-cell1-78wrs" Oct 10 18:35:14 crc kubenswrapper[4799]: I1010 18:35:14.522607 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-openstack-openstack-cell1-78wrs" Oct 10 18:35:15 crc kubenswrapper[4799]: I1010 18:35:15.219440 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-network-openstack-openstack-cell1-78wrs"] Oct 10 18:35:15 crc kubenswrapper[4799]: I1010 18:35:15.239227 4799 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 10 18:35:15 crc kubenswrapper[4799]: I1010 18:35:15.248786 4799 patch_prober.go:28] interesting pod/machine-config-daemon-rh8zc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 10 18:35:15 crc kubenswrapper[4799]: I1010 18:35:15.248867 4799 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 10 18:35:16 crc kubenswrapper[4799]: I1010 18:35:16.139003 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-openstack-openstack-cell1-78wrs" event={"ID":"f452ed12-b58b-47b2-86f9-eb1910405e02","Type":"ContainerStarted","Data":"0febcf7eab7a2495cb423547259bc6a95c5ee6d8dafee317ef13f753e720325c"} Oct 10 18:35:16 crc kubenswrapper[4799]: I1010 18:35:16.139650 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-openstack-openstack-cell1-78wrs" event={"ID":"f452ed12-b58b-47b2-86f9-eb1910405e02","Type":"ContainerStarted","Data":"6d33127ddc26773711e87b3d8035d8181b608ac957e57e654bccb28f315635e8"} Oct 10 18:35:16 crc kubenswrapper[4799]: I1010 18:35:16.169597 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/configure-network-openstack-openstack-cell1-78wrs" podStartSLOduration=1.6276210610000001 podStartE2EDuration="2.169571472s" podCreationTimestamp="2025-10-10 18:35:14 +0000 UTC" firstStartedPulling="2025-10-10 18:35:15.238936341 +0000 UTC m=+7408.747260466" lastFinishedPulling="2025-10-10 18:35:15.780886722 +0000 UTC m=+7409.289210877" observedRunningTime="2025-10-10 18:35:16.164661672 +0000 UTC m=+7409.672985827" watchObservedRunningTime="2025-10-10 18:35:16.169571472 +0000 UTC m=+7409.677895617" Oct 10 18:35:45 crc kubenswrapper[4799]: I1010 18:35:45.248498 4799 patch_prober.go:28] interesting pod/machine-config-daemon-rh8zc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 10 18:35:45 crc kubenswrapper[4799]: I1010 18:35:45.249178 4799 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 10 18:36:15 crc kubenswrapper[4799]: I1010 18:36:15.249532 4799 patch_prober.go:28] interesting pod/machine-config-daemon-rh8zc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 10 18:36:15 crc kubenswrapper[4799]: I1010 18:36:15.252288 4799 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 10 18:36:15 crc kubenswrapper[4799]: I1010 18:36:15.252482 4799 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" Oct 10 18:36:15 crc kubenswrapper[4799]: I1010 18:36:15.254026 4799 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"d11b6649908656854cce5d87d576a743fe05f8e582601b1ccaf94446dbe0c646"} pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 10 18:36:15 crc kubenswrapper[4799]: I1010 18:36:15.254296 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" containerName="machine-config-daemon" containerID="cri-o://d11b6649908656854cce5d87d576a743fe05f8e582601b1ccaf94446dbe0c646" gracePeriod=600 Oct 10 18:36:15 crc kubenswrapper[4799]: I1010 18:36:15.917633 4799 generic.go:334] "Generic (PLEG): container finished" podID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" containerID="d11b6649908656854cce5d87d576a743fe05f8e582601b1ccaf94446dbe0c646" exitCode=0 Oct 10 18:36:15 crc kubenswrapper[4799]: I1010 18:36:15.917717 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" event={"ID":"6cebefda-e31d-4be2-9bf4-8e1f8ec002cb","Type":"ContainerDied","Data":"d11b6649908656854cce5d87d576a743fe05f8e582601b1ccaf94446dbe0c646"} Oct 10 18:36:15 crc kubenswrapper[4799]: I1010 18:36:15.918130 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" event={"ID":"6cebefda-e31d-4be2-9bf4-8e1f8ec002cb","Type":"ContainerStarted","Data":"5d726047dc458172d088ab3478c0245a0f320ffb1060865307a391a8a23b1065"} Oct 10 18:36:15 crc kubenswrapper[4799]: I1010 18:36:15.918163 4799 scope.go:117] "RemoveContainer" containerID="e1753dd33d9c2573eba3d4245d76828dad7bb15c7538442d84b9b903c94df080" Oct 10 18:36:38 crc kubenswrapper[4799]: I1010 18:36:38.215246 4799 generic.go:334] "Generic (PLEG): container finished" podID="f452ed12-b58b-47b2-86f9-eb1910405e02" containerID="0febcf7eab7a2495cb423547259bc6a95c5ee6d8dafee317ef13f753e720325c" exitCode=0 Oct 10 18:36:38 crc kubenswrapper[4799]: I1010 18:36:38.215372 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-openstack-openstack-cell1-78wrs" event={"ID":"f452ed12-b58b-47b2-86f9-eb1910405e02","Type":"ContainerDied","Data":"0febcf7eab7a2495cb423547259bc6a95c5ee6d8dafee317ef13f753e720325c"} Oct 10 18:36:39 crc kubenswrapper[4799]: I1010 18:36:39.832149 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-openstack-openstack-cell1-78wrs" Oct 10 18:36:39 crc kubenswrapper[4799]: I1010 18:36:39.992055 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f452ed12-b58b-47b2-86f9-eb1910405e02-ssh-key\") pod \"f452ed12-b58b-47b2-86f9-eb1910405e02\" (UID: \"f452ed12-b58b-47b2-86f9-eb1910405e02\") " Oct 10 18:36:39 crc kubenswrapper[4799]: I1010 18:36:39.992107 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-n22pf\" (UniqueName: \"kubernetes.io/projected/f452ed12-b58b-47b2-86f9-eb1910405e02-kube-api-access-n22pf\") pod \"f452ed12-b58b-47b2-86f9-eb1910405e02\" (UID: \"f452ed12-b58b-47b2-86f9-eb1910405e02\") " Oct 10 18:36:39 crc kubenswrapper[4799]: I1010 18:36:39.992180 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/f452ed12-b58b-47b2-86f9-eb1910405e02-ceph\") pod \"f452ed12-b58b-47b2-86f9-eb1910405e02\" (UID: \"f452ed12-b58b-47b2-86f9-eb1910405e02\") " Oct 10 18:36:39 crc kubenswrapper[4799]: I1010 18:36:39.992288 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f452ed12-b58b-47b2-86f9-eb1910405e02-inventory\") pod \"f452ed12-b58b-47b2-86f9-eb1910405e02\" (UID: \"f452ed12-b58b-47b2-86f9-eb1910405e02\") " Oct 10 18:36:40 crc kubenswrapper[4799]: I1010 18:36:40.000958 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f452ed12-b58b-47b2-86f9-eb1910405e02-ceph" (OuterVolumeSpecName: "ceph") pod "f452ed12-b58b-47b2-86f9-eb1910405e02" (UID: "f452ed12-b58b-47b2-86f9-eb1910405e02"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 18:36:40 crc kubenswrapper[4799]: I1010 18:36:40.001079 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f452ed12-b58b-47b2-86f9-eb1910405e02-kube-api-access-n22pf" (OuterVolumeSpecName: "kube-api-access-n22pf") pod "f452ed12-b58b-47b2-86f9-eb1910405e02" (UID: "f452ed12-b58b-47b2-86f9-eb1910405e02"). InnerVolumeSpecName "kube-api-access-n22pf". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 18:36:40 crc kubenswrapper[4799]: I1010 18:36:40.029101 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f452ed12-b58b-47b2-86f9-eb1910405e02-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "f452ed12-b58b-47b2-86f9-eb1910405e02" (UID: "f452ed12-b58b-47b2-86f9-eb1910405e02"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 18:36:40 crc kubenswrapper[4799]: I1010 18:36:40.044906 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f452ed12-b58b-47b2-86f9-eb1910405e02-inventory" (OuterVolumeSpecName: "inventory") pod "f452ed12-b58b-47b2-86f9-eb1910405e02" (UID: "f452ed12-b58b-47b2-86f9-eb1910405e02"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 18:36:40 crc kubenswrapper[4799]: I1010 18:36:40.094826 4799 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/f452ed12-b58b-47b2-86f9-eb1910405e02-ceph\") on node \"crc\" DevicePath \"\"" Oct 10 18:36:40 crc kubenswrapper[4799]: I1010 18:36:40.094867 4799 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f452ed12-b58b-47b2-86f9-eb1910405e02-inventory\") on node \"crc\" DevicePath \"\"" Oct 10 18:36:40 crc kubenswrapper[4799]: I1010 18:36:40.094880 4799 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f452ed12-b58b-47b2-86f9-eb1910405e02-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 10 18:36:40 crc kubenswrapper[4799]: I1010 18:36:40.094892 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-n22pf\" (UniqueName: \"kubernetes.io/projected/f452ed12-b58b-47b2-86f9-eb1910405e02-kube-api-access-n22pf\") on node \"crc\" DevicePath \"\"" Oct 10 18:36:40 crc kubenswrapper[4799]: I1010 18:36:40.255064 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-openstack-openstack-cell1-78wrs" event={"ID":"f452ed12-b58b-47b2-86f9-eb1910405e02","Type":"ContainerDied","Data":"6d33127ddc26773711e87b3d8035d8181b608ac957e57e654bccb28f315635e8"} Oct 10 18:36:40 crc kubenswrapper[4799]: I1010 18:36:40.255133 4799 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6d33127ddc26773711e87b3d8035d8181b608ac957e57e654bccb28f315635e8" Oct 10 18:36:40 crc kubenswrapper[4799]: I1010 18:36:40.255143 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-openstack-openstack-cell1-78wrs" Oct 10 18:36:40 crc kubenswrapper[4799]: I1010 18:36:40.357454 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/validate-network-openstack-openstack-cell1-cqxqm"] Oct 10 18:36:40 crc kubenswrapper[4799]: E1010 18:36:40.358551 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f452ed12-b58b-47b2-86f9-eb1910405e02" containerName="configure-network-openstack-openstack-cell1" Oct 10 18:36:40 crc kubenswrapper[4799]: I1010 18:36:40.358581 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="f452ed12-b58b-47b2-86f9-eb1910405e02" containerName="configure-network-openstack-openstack-cell1" Oct 10 18:36:40 crc kubenswrapper[4799]: I1010 18:36:40.359022 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="f452ed12-b58b-47b2-86f9-eb1910405e02" containerName="configure-network-openstack-openstack-cell1" Oct 10 18:36:40 crc kubenswrapper[4799]: I1010 18:36:40.360306 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-openstack-openstack-cell1-cqxqm" Oct 10 18:36:40 crc kubenswrapper[4799]: I1010 18:36:40.365712 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 10 18:36:40 crc kubenswrapper[4799]: I1010 18:36:40.365997 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-adoption-secret" Oct 10 18:36:40 crc kubenswrapper[4799]: I1010 18:36:40.366172 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Oct 10 18:36:40 crc kubenswrapper[4799]: I1010 18:36:40.366338 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-rdlhr" Oct 10 18:36:40 crc kubenswrapper[4799]: I1010 18:36:40.370297 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/validate-network-openstack-openstack-cell1-cqxqm"] Oct 10 18:36:40 crc kubenswrapper[4799]: I1010 18:36:40.503786 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cv9xp\" (UniqueName: \"kubernetes.io/projected/22f1eaab-b5ce-4c1f-82f2-b92e28875983-kube-api-access-cv9xp\") pod \"validate-network-openstack-openstack-cell1-cqxqm\" (UID: \"22f1eaab-b5ce-4c1f-82f2-b92e28875983\") " pod="openstack/validate-network-openstack-openstack-cell1-cqxqm" Oct 10 18:36:40 crc kubenswrapper[4799]: I1010 18:36:40.504081 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/22f1eaab-b5ce-4c1f-82f2-b92e28875983-ssh-key\") pod \"validate-network-openstack-openstack-cell1-cqxqm\" (UID: \"22f1eaab-b5ce-4c1f-82f2-b92e28875983\") " pod="openstack/validate-network-openstack-openstack-cell1-cqxqm" Oct 10 18:36:40 crc kubenswrapper[4799]: I1010 18:36:40.504643 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/22f1eaab-b5ce-4c1f-82f2-b92e28875983-inventory\") pod \"validate-network-openstack-openstack-cell1-cqxqm\" (UID: \"22f1eaab-b5ce-4c1f-82f2-b92e28875983\") " pod="openstack/validate-network-openstack-openstack-cell1-cqxqm" Oct 10 18:36:40 crc kubenswrapper[4799]: I1010 18:36:40.504865 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/22f1eaab-b5ce-4c1f-82f2-b92e28875983-ceph\") pod \"validate-network-openstack-openstack-cell1-cqxqm\" (UID: \"22f1eaab-b5ce-4c1f-82f2-b92e28875983\") " pod="openstack/validate-network-openstack-openstack-cell1-cqxqm" Oct 10 18:36:40 crc kubenswrapper[4799]: I1010 18:36:40.608337 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/22f1eaab-b5ce-4c1f-82f2-b92e28875983-ceph\") pod \"validate-network-openstack-openstack-cell1-cqxqm\" (UID: \"22f1eaab-b5ce-4c1f-82f2-b92e28875983\") " pod="openstack/validate-network-openstack-openstack-cell1-cqxqm" Oct 10 18:36:40 crc kubenswrapper[4799]: I1010 18:36:40.608464 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cv9xp\" (UniqueName: \"kubernetes.io/projected/22f1eaab-b5ce-4c1f-82f2-b92e28875983-kube-api-access-cv9xp\") pod \"validate-network-openstack-openstack-cell1-cqxqm\" (UID: \"22f1eaab-b5ce-4c1f-82f2-b92e28875983\") " pod="openstack/validate-network-openstack-openstack-cell1-cqxqm" Oct 10 18:36:40 crc kubenswrapper[4799]: I1010 18:36:40.608584 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/22f1eaab-b5ce-4c1f-82f2-b92e28875983-ssh-key\") pod \"validate-network-openstack-openstack-cell1-cqxqm\" (UID: \"22f1eaab-b5ce-4c1f-82f2-b92e28875983\") " pod="openstack/validate-network-openstack-openstack-cell1-cqxqm" Oct 10 18:36:40 crc kubenswrapper[4799]: I1010 18:36:40.608809 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/22f1eaab-b5ce-4c1f-82f2-b92e28875983-inventory\") pod \"validate-network-openstack-openstack-cell1-cqxqm\" (UID: \"22f1eaab-b5ce-4c1f-82f2-b92e28875983\") " pod="openstack/validate-network-openstack-openstack-cell1-cqxqm" Oct 10 18:36:40 crc kubenswrapper[4799]: I1010 18:36:40.618113 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/22f1eaab-b5ce-4c1f-82f2-b92e28875983-ceph\") pod \"validate-network-openstack-openstack-cell1-cqxqm\" (UID: \"22f1eaab-b5ce-4c1f-82f2-b92e28875983\") " pod="openstack/validate-network-openstack-openstack-cell1-cqxqm" Oct 10 18:36:40 crc kubenswrapper[4799]: I1010 18:36:40.618172 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/22f1eaab-b5ce-4c1f-82f2-b92e28875983-ssh-key\") pod \"validate-network-openstack-openstack-cell1-cqxqm\" (UID: \"22f1eaab-b5ce-4c1f-82f2-b92e28875983\") " pod="openstack/validate-network-openstack-openstack-cell1-cqxqm" Oct 10 18:36:40 crc kubenswrapper[4799]: I1010 18:36:40.620992 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/22f1eaab-b5ce-4c1f-82f2-b92e28875983-inventory\") pod \"validate-network-openstack-openstack-cell1-cqxqm\" (UID: \"22f1eaab-b5ce-4c1f-82f2-b92e28875983\") " pod="openstack/validate-network-openstack-openstack-cell1-cqxqm" Oct 10 18:36:40 crc kubenswrapper[4799]: I1010 18:36:40.627057 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cv9xp\" (UniqueName: \"kubernetes.io/projected/22f1eaab-b5ce-4c1f-82f2-b92e28875983-kube-api-access-cv9xp\") pod \"validate-network-openstack-openstack-cell1-cqxqm\" (UID: \"22f1eaab-b5ce-4c1f-82f2-b92e28875983\") " pod="openstack/validate-network-openstack-openstack-cell1-cqxqm" Oct 10 18:36:40 crc kubenswrapper[4799]: I1010 18:36:40.758264 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-openstack-openstack-cell1-cqxqm" Oct 10 18:36:41 crc kubenswrapper[4799]: W1010 18:36:41.158383 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod22f1eaab_b5ce_4c1f_82f2_b92e28875983.slice/crio-5d10e3b9775a6d8dddafe6c049c286c6b92eef224c5e1924262f5cb13ee746f3 WatchSource:0}: Error finding container 5d10e3b9775a6d8dddafe6c049c286c6b92eef224c5e1924262f5cb13ee746f3: Status 404 returned error can't find the container with id 5d10e3b9775a6d8dddafe6c049c286c6b92eef224c5e1924262f5cb13ee746f3 Oct 10 18:36:41 crc kubenswrapper[4799]: I1010 18:36:41.161950 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/validate-network-openstack-openstack-cell1-cqxqm"] Oct 10 18:36:41 crc kubenswrapper[4799]: I1010 18:36:41.265607 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-openstack-openstack-cell1-cqxqm" event={"ID":"22f1eaab-b5ce-4c1f-82f2-b92e28875983","Type":"ContainerStarted","Data":"5d10e3b9775a6d8dddafe6c049c286c6b92eef224c5e1924262f5cb13ee746f3"} Oct 10 18:36:42 crc kubenswrapper[4799]: I1010 18:36:42.276246 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-openstack-openstack-cell1-cqxqm" event={"ID":"22f1eaab-b5ce-4c1f-82f2-b92e28875983","Type":"ContainerStarted","Data":"2ec0bc418acdb319541b7ab574a3cb163934990efda791cf2638fc4136a039d0"} Oct 10 18:36:42 crc kubenswrapper[4799]: I1010 18:36:42.292625 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/validate-network-openstack-openstack-cell1-cqxqm" podStartSLOduration=1.5542820179999999 podStartE2EDuration="2.292604393s" podCreationTimestamp="2025-10-10 18:36:40 +0000 UTC" firstStartedPulling="2025-10-10 18:36:41.161333813 +0000 UTC m=+7494.669657938" lastFinishedPulling="2025-10-10 18:36:41.899656168 +0000 UTC m=+7495.407980313" observedRunningTime="2025-10-10 18:36:42.289731652 +0000 UTC m=+7495.798055787" watchObservedRunningTime="2025-10-10 18:36:42.292604393 +0000 UTC m=+7495.800928528" Oct 10 18:36:47 crc kubenswrapper[4799]: I1010 18:36:47.341820 4799 generic.go:334] "Generic (PLEG): container finished" podID="22f1eaab-b5ce-4c1f-82f2-b92e28875983" containerID="2ec0bc418acdb319541b7ab574a3cb163934990efda791cf2638fc4136a039d0" exitCode=0 Oct 10 18:36:47 crc kubenswrapper[4799]: I1010 18:36:47.341953 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-openstack-openstack-cell1-cqxqm" event={"ID":"22f1eaab-b5ce-4c1f-82f2-b92e28875983","Type":"ContainerDied","Data":"2ec0bc418acdb319541b7ab574a3cb163934990efda791cf2638fc4136a039d0"} Oct 10 18:36:48 crc kubenswrapper[4799]: I1010 18:36:48.908906 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-openstack-openstack-cell1-cqxqm" Oct 10 18:36:49 crc kubenswrapper[4799]: I1010 18:36:49.022418 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/22f1eaab-b5ce-4c1f-82f2-b92e28875983-inventory\") pod \"22f1eaab-b5ce-4c1f-82f2-b92e28875983\" (UID: \"22f1eaab-b5ce-4c1f-82f2-b92e28875983\") " Oct 10 18:36:49 crc kubenswrapper[4799]: I1010 18:36:49.022767 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/22f1eaab-b5ce-4c1f-82f2-b92e28875983-ssh-key\") pod \"22f1eaab-b5ce-4c1f-82f2-b92e28875983\" (UID: \"22f1eaab-b5ce-4c1f-82f2-b92e28875983\") " Oct 10 18:36:49 crc kubenswrapper[4799]: I1010 18:36:49.022999 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/22f1eaab-b5ce-4c1f-82f2-b92e28875983-ceph\") pod \"22f1eaab-b5ce-4c1f-82f2-b92e28875983\" (UID: \"22f1eaab-b5ce-4c1f-82f2-b92e28875983\") " Oct 10 18:36:49 crc kubenswrapper[4799]: I1010 18:36:49.023040 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cv9xp\" (UniqueName: \"kubernetes.io/projected/22f1eaab-b5ce-4c1f-82f2-b92e28875983-kube-api-access-cv9xp\") pod \"22f1eaab-b5ce-4c1f-82f2-b92e28875983\" (UID: \"22f1eaab-b5ce-4c1f-82f2-b92e28875983\") " Oct 10 18:36:49 crc kubenswrapper[4799]: I1010 18:36:49.054588 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/22f1eaab-b5ce-4c1f-82f2-b92e28875983-kube-api-access-cv9xp" (OuterVolumeSpecName: "kube-api-access-cv9xp") pod "22f1eaab-b5ce-4c1f-82f2-b92e28875983" (UID: "22f1eaab-b5ce-4c1f-82f2-b92e28875983"). InnerVolumeSpecName "kube-api-access-cv9xp". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 18:36:49 crc kubenswrapper[4799]: I1010 18:36:49.063894 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/22f1eaab-b5ce-4c1f-82f2-b92e28875983-ceph" (OuterVolumeSpecName: "ceph") pod "22f1eaab-b5ce-4c1f-82f2-b92e28875983" (UID: "22f1eaab-b5ce-4c1f-82f2-b92e28875983"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 18:36:49 crc kubenswrapper[4799]: I1010 18:36:49.072840 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/22f1eaab-b5ce-4c1f-82f2-b92e28875983-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "22f1eaab-b5ce-4c1f-82f2-b92e28875983" (UID: "22f1eaab-b5ce-4c1f-82f2-b92e28875983"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 18:36:49 crc kubenswrapper[4799]: I1010 18:36:49.084996 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/22f1eaab-b5ce-4c1f-82f2-b92e28875983-inventory" (OuterVolumeSpecName: "inventory") pod "22f1eaab-b5ce-4c1f-82f2-b92e28875983" (UID: "22f1eaab-b5ce-4c1f-82f2-b92e28875983"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 18:36:49 crc kubenswrapper[4799]: I1010 18:36:49.125615 4799 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/22f1eaab-b5ce-4c1f-82f2-b92e28875983-ceph\") on node \"crc\" DevicePath \"\"" Oct 10 18:36:49 crc kubenswrapper[4799]: I1010 18:36:49.125649 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cv9xp\" (UniqueName: \"kubernetes.io/projected/22f1eaab-b5ce-4c1f-82f2-b92e28875983-kube-api-access-cv9xp\") on node \"crc\" DevicePath \"\"" Oct 10 18:36:49 crc kubenswrapper[4799]: I1010 18:36:49.125664 4799 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/22f1eaab-b5ce-4c1f-82f2-b92e28875983-inventory\") on node \"crc\" DevicePath \"\"" Oct 10 18:36:49 crc kubenswrapper[4799]: I1010 18:36:49.125676 4799 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/22f1eaab-b5ce-4c1f-82f2-b92e28875983-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 10 18:36:49 crc kubenswrapper[4799]: I1010 18:36:49.368741 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-openstack-openstack-cell1-cqxqm" event={"ID":"22f1eaab-b5ce-4c1f-82f2-b92e28875983","Type":"ContainerDied","Data":"5d10e3b9775a6d8dddafe6c049c286c6b92eef224c5e1924262f5cb13ee746f3"} Oct 10 18:36:49 crc kubenswrapper[4799]: I1010 18:36:49.368830 4799 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5d10e3b9775a6d8dddafe6c049c286c6b92eef224c5e1924262f5cb13ee746f3" Oct 10 18:36:49 crc kubenswrapper[4799]: I1010 18:36:49.368829 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-openstack-openstack-cell1-cqxqm" Oct 10 18:36:49 crc kubenswrapper[4799]: I1010 18:36:49.449257 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/install-os-openstack-openstack-cell1-2pnf6"] Oct 10 18:36:49 crc kubenswrapper[4799]: E1010 18:36:49.449648 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="22f1eaab-b5ce-4c1f-82f2-b92e28875983" containerName="validate-network-openstack-openstack-cell1" Oct 10 18:36:49 crc kubenswrapper[4799]: I1010 18:36:49.449664 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="22f1eaab-b5ce-4c1f-82f2-b92e28875983" containerName="validate-network-openstack-openstack-cell1" Oct 10 18:36:49 crc kubenswrapper[4799]: I1010 18:36:49.449936 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="22f1eaab-b5ce-4c1f-82f2-b92e28875983" containerName="validate-network-openstack-openstack-cell1" Oct 10 18:36:49 crc kubenswrapper[4799]: I1010 18:36:49.450672 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-openstack-openstack-cell1-2pnf6" Oct 10 18:36:49 crc kubenswrapper[4799]: I1010 18:36:49.454156 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-rdlhr" Oct 10 18:36:49 crc kubenswrapper[4799]: I1010 18:36:49.454452 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 10 18:36:49 crc kubenswrapper[4799]: I1010 18:36:49.456500 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-adoption-secret" Oct 10 18:36:49 crc kubenswrapper[4799]: I1010 18:36:49.456644 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Oct 10 18:36:49 crc kubenswrapper[4799]: I1010 18:36:49.477887 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-os-openstack-openstack-cell1-2pnf6"] Oct 10 18:36:49 crc kubenswrapper[4799]: I1010 18:36:49.537866 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/94d4ca2f-8a56-4d20-8f4e-2adb8a134bed-inventory\") pod \"install-os-openstack-openstack-cell1-2pnf6\" (UID: \"94d4ca2f-8a56-4d20-8f4e-2adb8a134bed\") " pod="openstack/install-os-openstack-openstack-cell1-2pnf6" Oct 10 18:36:49 crc kubenswrapper[4799]: I1010 18:36:49.538341 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9597f\" (UniqueName: \"kubernetes.io/projected/94d4ca2f-8a56-4d20-8f4e-2adb8a134bed-kube-api-access-9597f\") pod \"install-os-openstack-openstack-cell1-2pnf6\" (UID: \"94d4ca2f-8a56-4d20-8f4e-2adb8a134bed\") " pod="openstack/install-os-openstack-openstack-cell1-2pnf6" Oct 10 18:36:49 crc kubenswrapper[4799]: I1010 18:36:49.538634 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/94d4ca2f-8a56-4d20-8f4e-2adb8a134bed-ssh-key\") pod \"install-os-openstack-openstack-cell1-2pnf6\" (UID: \"94d4ca2f-8a56-4d20-8f4e-2adb8a134bed\") " pod="openstack/install-os-openstack-openstack-cell1-2pnf6" Oct 10 18:36:49 crc kubenswrapper[4799]: I1010 18:36:49.539074 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/94d4ca2f-8a56-4d20-8f4e-2adb8a134bed-ceph\") pod \"install-os-openstack-openstack-cell1-2pnf6\" (UID: \"94d4ca2f-8a56-4d20-8f4e-2adb8a134bed\") " pod="openstack/install-os-openstack-openstack-cell1-2pnf6" Oct 10 18:36:49 crc kubenswrapper[4799]: I1010 18:36:49.641414 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/94d4ca2f-8a56-4d20-8f4e-2adb8a134bed-ceph\") pod \"install-os-openstack-openstack-cell1-2pnf6\" (UID: \"94d4ca2f-8a56-4d20-8f4e-2adb8a134bed\") " pod="openstack/install-os-openstack-openstack-cell1-2pnf6" Oct 10 18:36:49 crc kubenswrapper[4799]: I1010 18:36:49.641500 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/94d4ca2f-8a56-4d20-8f4e-2adb8a134bed-inventory\") pod \"install-os-openstack-openstack-cell1-2pnf6\" (UID: \"94d4ca2f-8a56-4d20-8f4e-2adb8a134bed\") " pod="openstack/install-os-openstack-openstack-cell1-2pnf6" Oct 10 18:36:49 crc kubenswrapper[4799]: I1010 18:36:49.641565 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9597f\" (UniqueName: \"kubernetes.io/projected/94d4ca2f-8a56-4d20-8f4e-2adb8a134bed-kube-api-access-9597f\") pod \"install-os-openstack-openstack-cell1-2pnf6\" (UID: \"94d4ca2f-8a56-4d20-8f4e-2adb8a134bed\") " pod="openstack/install-os-openstack-openstack-cell1-2pnf6" Oct 10 18:36:49 crc kubenswrapper[4799]: I1010 18:36:49.641603 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/94d4ca2f-8a56-4d20-8f4e-2adb8a134bed-ssh-key\") pod \"install-os-openstack-openstack-cell1-2pnf6\" (UID: \"94d4ca2f-8a56-4d20-8f4e-2adb8a134bed\") " pod="openstack/install-os-openstack-openstack-cell1-2pnf6" Oct 10 18:36:49 crc kubenswrapper[4799]: I1010 18:36:49.645353 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/94d4ca2f-8a56-4d20-8f4e-2adb8a134bed-ssh-key\") pod \"install-os-openstack-openstack-cell1-2pnf6\" (UID: \"94d4ca2f-8a56-4d20-8f4e-2adb8a134bed\") " pod="openstack/install-os-openstack-openstack-cell1-2pnf6" Oct 10 18:36:49 crc kubenswrapper[4799]: I1010 18:36:49.648236 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/94d4ca2f-8a56-4d20-8f4e-2adb8a134bed-ceph\") pod \"install-os-openstack-openstack-cell1-2pnf6\" (UID: \"94d4ca2f-8a56-4d20-8f4e-2adb8a134bed\") " pod="openstack/install-os-openstack-openstack-cell1-2pnf6" Oct 10 18:36:49 crc kubenswrapper[4799]: I1010 18:36:49.648936 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/94d4ca2f-8a56-4d20-8f4e-2adb8a134bed-inventory\") pod \"install-os-openstack-openstack-cell1-2pnf6\" (UID: \"94d4ca2f-8a56-4d20-8f4e-2adb8a134bed\") " pod="openstack/install-os-openstack-openstack-cell1-2pnf6" Oct 10 18:36:49 crc kubenswrapper[4799]: I1010 18:36:49.674622 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9597f\" (UniqueName: \"kubernetes.io/projected/94d4ca2f-8a56-4d20-8f4e-2adb8a134bed-kube-api-access-9597f\") pod \"install-os-openstack-openstack-cell1-2pnf6\" (UID: \"94d4ca2f-8a56-4d20-8f4e-2adb8a134bed\") " pod="openstack/install-os-openstack-openstack-cell1-2pnf6" Oct 10 18:36:49 crc kubenswrapper[4799]: I1010 18:36:49.780062 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-openstack-openstack-cell1-2pnf6" Oct 10 18:36:50 crc kubenswrapper[4799]: I1010 18:36:50.488923 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-os-openstack-openstack-cell1-2pnf6"] Oct 10 18:36:50 crc kubenswrapper[4799]: W1010 18:36:50.494697 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod94d4ca2f_8a56_4d20_8f4e_2adb8a134bed.slice/crio-790cee7b8b887b946ca0efad650d7a019492f7232a68baf313f25d210543fb81 WatchSource:0}: Error finding container 790cee7b8b887b946ca0efad650d7a019492f7232a68baf313f25d210543fb81: Status 404 returned error can't find the container with id 790cee7b8b887b946ca0efad650d7a019492f7232a68baf313f25d210543fb81 Oct 10 18:36:51 crc kubenswrapper[4799]: I1010 18:36:51.441713 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-openstack-openstack-cell1-2pnf6" event={"ID":"94d4ca2f-8a56-4d20-8f4e-2adb8a134bed","Type":"ContainerStarted","Data":"e1ec07752212ee37b926897bf5f8879319cdabe2c15f66211aac4a713ffafd7f"} Oct 10 18:36:51 crc kubenswrapper[4799]: I1010 18:36:51.442621 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-openstack-openstack-cell1-2pnf6" event={"ID":"94d4ca2f-8a56-4d20-8f4e-2adb8a134bed","Type":"ContainerStarted","Data":"790cee7b8b887b946ca0efad650d7a019492f7232a68baf313f25d210543fb81"} Oct 10 18:36:51 crc kubenswrapper[4799]: I1010 18:36:51.447579 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/install-os-openstack-openstack-cell1-2pnf6" podStartSLOduration=1.908230934 podStartE2EDuration="2.44755722s" podCreationTimestamp="2025-10-10 18:36:49 +0000 UTC" firstStartedPulling="2025-10-10 18:36:50.49977699 +0000 UTC m=+7504.008101115" lastFinishedPulling="2025-10-10 18:36:51.039103256 +0000 UTC m=+7504.547427401" observedRunningTime="2025-10-10 18:36:51.437267118 +0000 UTC m=+7504.945591253" watchObservedRunningTime="2025-10-10 18:36:51.44755722 +0000 UTC m=+7504.955881345" Oct 10 18:37:38 crc kubenswrapper[4799]: I1010 18:37:38.001894 4799 generic.go:334] "Generic (PLEG): container finished" podID="94d4ca2f-8a56-4d20-8f4e-2adb8a134bed" containerID="e1ec07752212ee37b926897bf5f8879319cdabe2c15f66211aac4a713ffafd7f" exitCode=0 Oct 10 18:37:38 crc kubenswrapper[4799]: I1010 18:37:38.002007 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-openstack-openstack-cell1-2pnf6" event={"ID":"94d4ca2f-8a56-4d20-8f4e-2adb8a134bed","Type":"ContainerDied","Data":"e1ec07752212ee37b926897bf5f8879319cdabe2c15f66211aac4a713ffafd7f"} Oct 10 18:37:39 crc kubenswrapper[4799]: I1010 18:37:39.762325 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-openstack-openstack-cell1-2pnf6" Oct 10 18:37:39 crc kubenswrapper[4799]: I1010 18:37:39.949342 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/94d4ca2f-8a56-4d20-8f4e-2adb8a134bed-inventory\") pod \"94d4ca2f-8a56-4d20-8f4e-2adb8a134bed\" (UID: \"94d4ca2f-8a56-4d20-8f4e-2adb8a134bed\") " Oct 10 18:37:39 crc kubenswrapper[4799]: I1010 18:37:39.949852 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/94d4ca2f-8a56-4d20-8f4e-2adb8a134bed-ceph\") pod \"94d4ca2f-8a56-4d20-8f4e-2adb8a134bed\" (UID: \"94d4ca2f-8a56-4d20-8f4e-2adb8a134bed\") " Oct 10 18:37:39 crc kubenswrapper[4799]: I1010 18:37:39.949916 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9597f\" (UniqueName: \"kubernetes.io/projected/94d4ca2f-8a56-4d20-8f4e-2adb8a134bed-kube-api-access-9597f\") pod \"94d4ca2f-8a56-4d20-8f4e-2adb8a134bed\" (UID: \"94d4ca2f-8a56-4d20-8f4e-2adb8a134bed\") " Oct 10 18:37:39 crc kubenswrapper[4799]: I1010 18:37:39.950049 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/94d4ca2f-8a56-4d20-8f4e-2adb8a134bed-ssh-key\") pod \"94d4ca2f-8a56-4d20-8f4e-2adb8a134bed\" (UID: \"94d4ca2f-8a56-4d20-8f4e-2adb8a134bed\") " Oct 10 18:37:39 crc kubenswrapper[4799]: I1010 18:37:39.956314 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/94d4ca2f-8a56-4d20-8f4e-2adb8a134bed-ceph" (OuterVolumeSpecName: "ceph") pod "94d4ca2f-8a56-4d20-8f4e-2adb8a134bed" (UID: "94d4ca2f-8a56-4d20-8f4e-2adb8a134bed"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 18:37:39 crc kubenswrapper[4799]: I1010 18:37:39.969069 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/94d4ca2f-8a56-4d20-8f4e-2adb8a134bed-kube-api-access-9597f" (OuterVolumeSpecName: "kube-api-access-9597f") pod "94d4ca2f-8a56-4d20-8f4e-2adb8a134bed" (UID: "94d4ca2f-8a56-4d20-8f4e-2adb8a134bed"). InnerVolumeSpecName "kube-api-access-9597f". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 18:37:39 crc kubenswrapper[4799]: I1010 18:37:39.981952 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/94d4ca2f-8a56-4d20-8f4e-2adb8a134bed-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "94d4ca2f-8a56-4d20-8f4e-2adb8a134bed" (UID: "94d4ca2f-8a56-4d20-8f4e-2adb8a134bed"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 18:37:39 crc kubenswrapper[4799]: I1010 18:37:39.998219 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/94d4ca2f-8a56-4d20-8f4e-2adb8a134bed-inventory" (OuterVolumeSpecName: "inventory") pod "94d4ca2f-8a56-4d20-8f4e-2adb8a134bed" (UID: "94d4ca2f-8a56-4d20-8f4e-2adb8a134bed"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 18:37:40 crc kubenswrapper[4799]: I1010 18:37:40.033545 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-openstack-openstack-cell1-2pnf6" event={"ID":"94d4ca2f-8a56-4d20-8f4e-2adb8a134bed","Type":"ContainerDied","Data":"790cee7b8b887b946ca0efad650d7a019492f7232a68baf313f25d210543fb81"} Oct 10 18:37:40 crc kubenswrapper[4799]: I1010 18:37:40.033580 4799 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="790cee7b8b887b946ca0efad650d7a019492f7232a68baf313f25d210543fb81" Oct 10 18:37:40 crc kubenswrapper[4799]: I1010 18:37:40.033611 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-openstack-openstack-cell1-2pnf6" Oct 10 18:37:40 crc kubenswrapper[4799]: I1010 18:37:40.056807 4799 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/94d4ca2f-8a56-4d20-8f4e-2adb8a134bed-ceph\") on node \"crc\" DevicePath \"\"" Oct 10 18:37:40 crc kubenswrapper[4799]: I1010 18:37:40.056864 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9597f\" (UniqueName: \"kubernetes.io/projected/94d4ca2f-8a56-4d20-8f4e-2adb8a134bed-kube-api-access-9597f\") on node \"crc\" DevicePath \"\"" Oct 10 18:37:40 crc kubenswrapper[4799]: I1010 18:37:40.056884 4799 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/94d4ca2f-8a56-4d20-8f4e-2adb8a134bed-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 10 18:37:40 crc kubenswrapper[4799]: I1010 18:37:40.056901 4799 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/94d4ca2f-8a56-4d20-8f4e-2adb8a134bed-inventory\") on node \"crc\" DevicePath \"\"" Oct 10 18:37:40 crc kubenswrapper[4799]: I1010 18:37:40.105948 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/configure-os-openstack-openstack-cell1-wwqb7"] Oct 10 18:37:40 crc kubenswrapper[4799]: E1010 18:37:40.106325 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="94d4ca2f-8a56-4d20-8f4e-2adb8a134bed" containerName="install-os-openstack-openstack-cell1" Oct 10 18:37:40 crc kubenswrapper[4799]: I1010 18:37:40.106342 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="94d4ca2f-8a56-4d20-8f4e-2adb8a134bed" containerName="install-os-openstack-openstack-cell1" Oct 10 18:37:40 crc kubenswrapper[4799]: I1010 18:37:40.106561 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="94d4ca2f-8a56-4d20-8f4e-2adb8a134bed" containerName="install-os-openstack-openstack-cell1" Oct 10 18:37:40 crc kubenswrapper[4799]: I1010 18:37:40.107215 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-openstack-openstack-cell1-wwqb7" Oct 10 18:37:40 crc kubenswrapper[4799]: I1010 18:37:40.109079 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-rdlhr" Oct 10 18:37:40 crc kubenswrapper[4799]: I1010 18:37:40.109675 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Oct 10 18:37:40 crc kubenswrapper[4799]: I1010 18:37:40.109930 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-adoption-secret" Oct 10 18:37:40 crc kubenswrapper[4799]: I1010 18:37:40.110418 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 10 18:37:40 crc kubenswrapper[4799]: I1010 18:37:40.125391 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-os-openstack-openstack-cell1-wwqb7"] Oct 10 18:37:40 crc kubenswrapper[4799]: I1010 18:37:40.157921 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2fn2s\" (UniqueName: \"kubernetes.io/projected/49cf2daa-8a65-4dfd-bd0f-cf28839297fd-kube-api-access-2fn2s\") pod \"configure-os-openstack-openstack-cell1-wwqb7\" (UID: \"49cf2daa-8a65-4dfd-bd0f-cf28839297fd\") " pod="openstack/configure-os-openstack-openstack-cell1-wwqb7" Oct 10 18:37:40 crc kubenswrapper[4799]: I1010 18:37:40.158118 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/49cf2daa-8a65-4dfd-bd0f-cf28839297fd-ceph\") pod \"configure-os-openstack-openstack-cell1-wwqb7\" (UID: \"49cf2daa-8a65-4dfd-bd0f-cf28839297fd\") " pod="openstack/configure-os-openstack-openstack-cell1-wwqb7" Oct 10 18:37:40 crc kubenswrapper[4799]: I1010 18:37:40.158171 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/49cf2daa-8a65-4dfd-bd0f-cf28839297fd-ssh-key\") pod \"configure-os-openstack-openstack-cell1-wwqb7\" (UID: \"49cf2daa-8a65-4dfd-bd0f-cf28839297fd\") " pod="openstack/configure-os-openstack-openstack-cell1-wwqb7" Oct 10 18:37:40 crc kubenswrapper[4799]: I1010 18:37:40.158198 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/49cf2daa-8a65-4dfd-bd0f-cf28839297fd-inventory\") pod \"configure-os-openstack-openstack-cell1-wwqb7\" (UID: \"49cf2daa-8a65-4dfd-bd0f-cf28839297fd\") " pod="openstack/configure-os-openstack-openstack-cell1-wwqb7" Oct 10 18:37:40 crc kubenswrapper[4799]: I1010 18:37:40.259778 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/49cf2daa-8a65-4dfd-bd0f-cf28839297fd-ceph\") pod \"configure-os-openstack-openstack-cell1-wwqb7\" (UID: \"49cf2daa-8a65-4dfd-bd0f-cf28839297fd\") " pod="openstack/configure-os-openstack-openstack-cell1-wwqb7" Oct 10 18:37:40 crc kubenswrapper[4799]: I1010 18:37:40.259851 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/49cf2daa-8a65-4dfd-bd0f-cf28839297fd-ssh-key\") pod \"configure-os-openstack-openstack-cell1-wwqb7\" (UID: \"49cf2daa-8a65-4dfd-bd0f-cf28839297fd\") " pod="openstack/configure-os-openstack-openstack-cell1-wwqb7" Oct 10 18:37:40 crc kubenswrapper[4799]: I1010 18:37:40.259881 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/49cf2daa-8a65-4dfd-bd0f-cf28839297fd-inventory\") pod \"configure-os-openstack-openstack-cell1-wwqb7\" (UID: \"49cf2daa-8a65-4dfd-bd0f-cf28839297fd\") " pod="openstack/configure-os-openstack-openstack-cell1-wwqb7" Oct 10 18:37:40 crc kubenswrapper[4799]: I1010 18:37:40.259990 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2fn2s\" (UniqueName: \"kubernetes.io/projected/49cf2daa-8a65-4dfd-bd0f-cf28839297fd-kube-api-access-2fn2s\") pod \"configure-os-openstack-openstack-cell1-wwqb7\" (UID: \"49cf2daa-8a65-4dfd-bd0f-cf28839297fd\") " pod="openstack/configure-os-openstack-openstack-cell1-wwqb7" Oct 10 18:37:40 crc kubenswrapper[4799]: I1010 18:37:40.272579 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/49cf2daa-8a65-4dfd-bd0f-cf28839297fd-ssh-key\") pod \"configure-os-openstack-openstack-cell1-wwqb7\" (UID: \"49cf2daa-8a65-4dfd-bd0f-cf28839297fd\") " pod="openstack/configure-os-openstack-openstack-cell1-wwqb7" Oct 10 18:37:40 crc kubenswrapper[4799]: I1010 18:37:40.272603 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/49cf2daa-8a65-4dfd-bd0f-cf28839297fd-ceph\") pod \"configure-os-openstack-openstack-cell1-wwqb7\" (UID: \"49cf2daa-8a65-4dfd-bd0f-cf28839297fd\") " pod="openstack/configure-os-openstack-openstack-cell1-wwqb7" Oct 10 18:37:40 crc kubenswrapper[4799]: I1010 18:37:40.272627 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/49cf2daa-8a65-4dfd-bd0f-cf28839297fd-inventory\") pod \"configure-os-openstack-openstack-cell1-wwqb7\" (UID: \"49cf2daa-8a65-4dfd-bd0f-cf28839297fd\") " pod="openstack/configure-os-openstack-openstack-cell1-wwqb7" Oct 10 18:37:40 crc kubenswrapper[4799]: I1010 18:37:40.276311 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2fn2s\" (UniqueName: \"kubernetes.io/projected/49cf2daa-8a65-4dfd-bd0f-cf28839297fd-kube-api-access-2fn2s\") pod \"configure-os-openstack-openstack-cell1-wwqb7\" (UID: \"49cf2daa-8a65-4dfd-bd0f-cf28839297fd\") " pod="openstack/configure-os-openstack-openstack-cell1-wwqb7" Oct 10 18:37:40 crc kubenswrapper[4799]: I1010 18:37:40.429200 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-openstack-openstack-cell1-wwqb7" Oct 10 18:37:41 crc kubenswrapper[4799]: I1010 18:37:41.011151 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-os-openstack-openstack-cell1-wwqb7"] Oct 10 18:37:41 crc kubenswrapper[4799]: I1010 18:37:41.051452 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-openstack-openstack-cell1-wwqb7" event={"ID":"49cf2daa-8a65-4dfd-bd0f-cf28839297fd","Type":"ContainerStarted","Data":"a446c3f5ac051c6fea08c46d9c79fae9d5d3369a838e66e828f196f7a16915d5"} Oct 10 18:37:42 crc kubenswrapper[4799]: I1010 18:37:42.061697 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-openstack-openstack-cell1-wwqb7" event={"ID":"49cf2daa-8a65-4dfd-bd0f-cf28839297fd","Type":"ContainerStarted","Data":"c0148e29a7bee89b1ab48243f41730275b9c28b647ce2638dce557eb18f0aeb5"} Oct 10 18:37:42 crc kubenswrapper[4799]: I1010 18:37:42.086471 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/configure-os-openstack-openstack-cell1-wwqb7" podStartSLOduration=1.624364835 podStartE2EDuration="2.086455831s" podCreationTimestamp="2025-10-10 18:37:40 +0000 UTC" firstStartedPulling="2025-10-10 18:37:41.029735996 +0000 UTC m=+7554.538060151" lastFinishedPulling="2025-10-10 18:37:41.491827002 +0000 UTC m=+7555.000151147" observedRunningTime="2025-10-10 18:37:42.076682132 +0000 UTC m=+7555.585006237" watchObservedRunningTime="2025-10-10 18:37:42.086455831 +0000 UTC m=+7555.594779946" Oct 10 18:38:15 crc kubenswrapper[4799]: I1010 18:38:15.249148 4799 patch_prober.go:28] interesting pod/machine-config-daemon-rh8zc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 10 18:38:15 crc kubenswrapper[4799]: I1010 18:38:15.249798 4799 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 10 18:38:28 crc kubenswrapper[4799]: I1010 18:38:28.675492 4799 generic.go:334] "Generic (PLEG): container finished" podID="49cf2daa-8a65-4dfd-bd0f-cf28839297fd" containerID="c0148e29a7bee89b1ab48243f41730275b9c28b647ce2638dce557eb18f0aeb5" exitCode=0 Oct 10 18:38:28 crc kubenswrapper[4799]: I1010 18:38:28.675717 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-openstack-openstack-cell1-wwqb7" event={"ID":"49cf2daa-8a65-4dfd-bd0f-cf28839297fd","Type":"ContainerDied","Data":"c0148e29a7bee89b1ab48243f41730275b9c28b647ce2638dce557eb18f0aeb5"} Oct 10 18:38:30 crc kubenswrapper[4799]: I1010 18:38:30.314081 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-openstack-openstack-cell1-wwqb7" Oct 10 18:38:30 crc kubenswrapper[4799]: I1010 18:38:30.425270 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/49cf2daa-8a65-4dfd-bd0f-cf28839297fd-ssh-key\") pod \"49cf2daa-8a65-4dfd-bd0f-cf28839297fd\" (UID: \"49cf2daa-8a65-4dfd-bd0f-cf28839297fd\") " Oct 10 18:38:30 crc kubenswrapper[4799]: I1010 18:38:30.425427 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/49cf2daa-8a65-4dfd-bd0f-cf28839297fd-inventory\") pod \"49cf2daa-8a65-4dfd-bd0f-cf28839297fd\" (UID: \"49cf2daa-8a65-4dfd-bd0f-cf28839297fd\") " Oct 10 18:38:30 crc kubenswrapper[4799]: I1010 18:38:30.425492 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2fn2s\" (UniqueName: \"kubernetes.io/projected/49cf2daa-8a65-4dfd-bd0f-cf28839297fd-kube-api-access-2fn2s\") pod \"49cf2daa-8a65-4dfd-bd0f-cf28839297fd\" (UID: \"49cf2daa-8a65-4dfd-bd0f-cf28839297fd\") " Oct 10 18:38:30 crc kubenswrapper[4799]: I1010 18:38:30.426969 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/49cf2daa-8a65-4dfd-bd0f-cf28839297fd-ceph\") pod \"49cf2daa-8a65-4dfd-bd0f-cf28839297fd\" (UID: \"49cf2daa-8a65-4dfd-bd0f-cf28839297fd\") " Oct 10 18:38:30 crc kubenswrapper[4799]: I1010 18:38:30.434565 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49cf2daa-8a65-4dfd-bd0f-cf28839297fd-kube-api-access-2fn2s" (OuterVolumeSpecName: "kube-api-access-2fn2s") pod "49cf2daa-8a65-4dfd-bd0f-cf28839297fd" (UID: "49cf2daa-8a65-4dfd-bd0f-cf28839297fd"). InnerVolumeSpecName "kube-api-access-2fn2s". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 18:38:30 crc kubenswrapper[4799]: I1010 18:38:30.444236 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49cf2daa-8a65-4dfd-bd0f-cf28839297fd-ceph" (OuterVolumeSpecName: "ceph") pod "49cf2daa-8a65-4dfd-bd0f-cf28839297fd" (UID: "49cf2daa-8a65-4dfd-bd0f-cf28839297fd"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 18:38:30 crc kubenswrapper[4799]: I1010 18:38:30.478786 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49cf2daa-8a65-4dfd-bd0f-cf28839297fd-inventory" (OuterVolumeSpecName: "inventory") pod "49cf2daa-8a65-4dfd-bd0f-cf28839297fd" (UID: "49cf2daa-8a65-4dfd-bd0f-cf28839297fd"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 18:38:30 crc kubenswrapper[4799]: I1010 18:38:30.479291 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49cf2daa-8a65-4dfd-bd0f-cf28839297fd-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "49cf2daa-8a65-4dfd-bd0f-cf28839297fd" (UID: "49cf2daa-8a65-4dfd-bd0f-cf28839297fd"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 18:38:30 crc kubenswrapper[4799]: I1010 18:38:30.532275 4799 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/49cf2daa-8a65-4dfd-bd0f-cf28839297fd-ceph\") on node \"crc\" DevicePath \"\"" Oct 10 18:38:30 crc kubenswrapper[4799]: I1010 18:38:30.532301 4799 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/49cf2daa-8a65-4dfd-bd0f-cf28839297fd-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 10 18:38:30 crc kubenswrapper[4799]: I1010 18:38:30.532313 4799 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/49cf2daa-8a65-4dfd-bd0f-cf28839297fd-inventory\") on node \"crc\" DevicePath \"\"" Oct 10 18:38:30 crc kubenswrapper[4799]: I1010 18:38:30.532326 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2fn2s\" (UniqueName: \"kubernetes.io/projected/49cf2daa-8a65-4dfd-bd0f-cf28839297fd-kube-api-access-2fn2s\") on node \"crc\" DevicePath \"\"" Oct 10 18:38:30 crc kubenswrapper[4799]: I1010 18:38:30.706489 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-openstack-openstack-cell1-wwqb7" event={"ID":"49cf2daa-8a65-4dfd-bd0f-cf28839297fd","Type":"ContainerDied","Data":"a446c3f5ac051c6fea08c46d9c79fae9d5d3369a838e66e828f196f7a16915d5"} Oct 10 18:38:30 crc kubenswrapper[4799]: I1010 18:38:30.706788 4799 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a446c3f5ac051c6fea08c46d9c79fae9d5d3369a838e66e828f196f7a16915d5" Oct 10 18:38:30 crc kubenswrapper[4799]: I1010 18:38:30.706571 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-openstack-openstack-cell1-wwqb7" Oct 10 18:38:30 crc kubenswrapper[4799]: I1010 18:38:30.823119 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ssh-known-hosts-openstack-qnt8c"] Oct 10 18:38:30 crc kubenswrapper[4799]: E1010 18:38:30.824204 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="49cf2daa-8a65-4dfd-bd0f-cf28839297fd" containerName="configure-os-openstack-openstack-cell1" Oct 10 18:38:30 crc kubenswrapper[4799]: I1010 18:38:30.824237 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="49cf2daa-8a65-4dfd-bd0f-cf28839297fd" containerName="configure-os-openstack-openstack-cell1" Oct 10 18:38:30 crc kubenswrapper[4799]: I1010 18:38:30.824622 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="49cf2daa-8a65-4dfd-bd0f-cf28839297fd" containerName="configure-os-openstack-openstack-cell1" Oct 10 18:38:30 crc kubenswrapper[4799]: I1010 18:38:30.825714 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-openstack-qnt8c" Oct 10 18:38:30 crc kubenswrapper[4799]: I1010 18:38:30.828124 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Oct 10 18:38:30 crc kubenswrapper[4799]: I1010 18:38:30.828184 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-rdlhr" Oct 10 18:38:30 crc kubenswrapper[4799]: I1010 18:38:30.828549 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-adoption-secret" Oct 10 18:38:30 crc kubenswrapper[4799]: I1010 18:38:30.828679 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 10 18:38:30 crc kubenswrapper[4799]: I1010 18:38:30.831397 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ssh-known-hosts-openstack-qnt8c"] Oct 10 18:38:30 crc kubenswrapper[4799]: I1010 18:38:30.940797 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/24ca7de6-8478-4c9f-b3b2-e1ffe93ec6d2-inventory-0\") pod \"ssh-known-hosts-openstack-qnt8c\" (UID: \"24ca7de6-8478-4c9f-b3b2-e1ffe93ec6d2\") " pod="openstack/ssh-known-hosts-openstack-qnt8c" Oct 10 18:38:30 crc kubenswrapper[4799]: I1010 18:38:30.940864 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/24ca7de6-8478-4c9f-b3b2-e1ffe93ec6d2-ceph\") pod \"ssh-known-hosts-openstack-qnt8c\" (UID: \"24ca7de6-8478-4c9f-b3b2-e1ffe93ec6d2\") " pod="openstack/ssh-known-hosts-openstack-qnt8c" Oct 10 18:38:30 crc kubenswrapper[4799]: I1010 18:38:30.940894 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xks7w\" (UniqueName: \"kubernetes.io/projected/24ca7de6-8478-4c9f-b3b2-e1ffe93ec6d2-kube-api-access-xks7w\") pod \"ssh-known-hosts-openstack-qnt8c\" (UID: \"24ca7de6-8478-4c9f-b3b2-e1ffe93ec6d2\") " pod="openstack/ssh-known-hosts-openstack-qnt8c" Oct 10 18:38:30 crc kubenswrapper[4799]: I1010 18:38:30.940982 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/24ca7de6-8478-4c9f-b3b2-e1ffe93ec6d2-ssh-key-openstack-cell1\") pod \"ssh-known-hosts-openstack-qnt8c\" (UID: \"24ca7de6-8478-4c9f-b3b2-e1ffe93ec6d2\") " pod="openstack/ssh-known-hosts-openstack-qnt8c" Oct 10 18:38:31 crc kubenswrapper[4799]: I1010 18:38:31.043255 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/24ca7de6-8478-4c9f-b3b2-e1ffe93ec6d2-inventory-0\") pod \"ssh-known-hosts-openstack-qnt8c\" (UID: \"24ca7de6-8478-4c9f-b3b2-e1ffe93ec6d2\") " pod="openstack/ssh-known-hosts-openstack-qnt8c" Oct 10 18:38:31 crc kubenswrapper[4799]: I1010 18:38:31.043315 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/24ca7de6-8478-4c9f-b3b2-e1ffe93ec6d2-ceph\") pod \"ssh-known-hosts-openstack-qnt8c\" (UID: \"24ca7de6-8478-4c9f-b3b2-e1ffe93ec6d2\") " pod="openstack/ssh-known-hosts-openstack-qnt8c" Oct 10 18:38:31 crc kubenswrapper[4799]: I1010 18:38:31.043335 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xks7w\" (UniqueName: \"kubernetes.io/projected/24ca7de6-8478-4c9f-b3b2-e1ffe93ec6d2-kube-api-access-xks7w\") pod \"ssh-known-hosts-openstack-qnt8c\" (UID: \"24ca7de6-8478-4c9f-b3b2-e1ffe93ec6d2\") " pod="openstack/ssh-known-hosts-openstack-qnt8c" Oct 10 18:38:31 crc kubenswrapper[4799]: I1010 18:38:31.043381 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/24ca7de6-8478-4c9f-b3b2-e1ffe93ec6d2-ssh-key-openstack-cell1\") pod \"ssh-known-hosts-openstack-qnt8c\" (UID: \"24ca7de6-8478-4c9f-b3b2-e1ffe93ec6d2\") " pod="openstack/ssh-known-hosts-openstack-qnt8c" Oct 10 18:38:31 crc kubenswrapper[4799]: I1010 18:38:31.047271 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/24ca7de6-8478-4c9f-b3b2-e1ffe93ec6d2-ssh-key-openstack-cell1\") pod \"ssh-known-hosts-openstack-qnt8c\" (UID: \"24ca7de6-8478-4c9f-b3b2-e1ffe93ec6d2\") " pod="openstack/ssh-known-hosts-openstack-qnt8c" Oct 10 18:38:31 crc kubenswrapper[4799]: I1010 18:38:31.052524 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/24ca7de6-8478-4c9f-b3b2-e1ffe93ec6d2-ceph\") pod \"ssh-known-hosts-openstack-qnt8c\" (UID: \"24ca7de6-8478-4c9f-b3b2-e1ffe93ec6d2\") " pod="openstack/ssh-known-hosts-openstack-qnt8c" Oct 10 18:38:31 crc kubenswrapper[4799]: I1010 18:38:31.052567 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/24ca7de6-8478-4c9f-b3b2-e1ffe93ec6d2-inventory-0\") pod \"ssh-known-hosts-openstack-qnt8c\" (UID: \"24ca7de6-8478-4c9f-b3b2-e1ffe93ec6d2\") " pod="openstack/ssh-known-hosts-openstack-qnt8c" Oct 10 18:38:31 crc kubenswrapper[4799]: I1010 18:38:31.064966 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xks7w\" (UniqueName: \"kubernetes.io/projected/24ca7de6-8478-4c9f-b3b2-e1ffe93ec6d2-kube-api-access-xks7w\") pod \"ssh-known-hosts-openstack-qnt8c\" (UID: \"24ca7de6-8478-4c9f-b3b2-e1ffe93ec6d2\") " pod="openstack/ssh-known-hosts-openstack-qnt8c" Oct 10 18:38:31 crc kubenswrapper[4799]: I1010 18:38:31.196036 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-openstack-qnt8c" Oct 10 18:38:31 crc kubenswrapper[4799]: I1010 18:38:31.892280 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ssh-known-hosts-openstack-qnt8c"] Oct 10 18:38:32 crc kubenswrapper[4799]: I1010 18:38:32.733152 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-openstack-qnt8c" event={"ID":"24ca7de6-8478-4c9f-b3b2-e1ffe93ec6d2","Type":"ContainerStarted","Data":"29f7e45800b3d49e7084feb47817094a48b8cd6c838800eb43c8307a33e7e31a"} Oct 10 18:38:32 crc kubenswrapper[4799]: I1010 18:38:32.756534 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ssh-known-hosts-openstack-qnt8c" podStartSLOduration=2.254016601 podStartE2EDuration="2.756517086s" podCreationTimestamp="2025-10-10 18:38:30 +0000 UTC" firstStartedPulling="2025-10-10 18:38:31.886114409 +0000 UTC m=+7605.394438534" lastFinishedPulling="2025-10-10 18:38:32.388614904 +0000 UTC m=+7605.896939019" observedRunningTime="2025-10-10 18:38:32.752646591 +0000 UTC m=+7606.260970726" watchObservedRunningTime="2025-10-10 18:38:32.756517086 +0000 UTC m=+7606.264841201" Oct 10 18:38:33 crc kubenswrapper[4799]: I1010 18:38:33.749013 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-openstack-qnt8c" event={"ID":"24ca7de6-8478-4c9f-b3b2-e1ffe93ec6d2","Type":"ContainerStarted","Data":"11b99d020c45b483626ad592839508977815e23e0ff57a575dd645ce107677e5"} Oct 10 18:38:42 crc kubenswrapper[4799]: I1010 18:38:42.910902 4799 generic.go:334] "Generic (PLEG): container finished" podID="24ca7de6-8478-4c9f-b3b2-e1ffe93ec6d2" containerID="11b99d020c45b483626ad592839508977815e23e0ff57a575dd645ce107677e5" exitCode=0 Oct 10 18:38:42 crc kubenswrapper[4799]: I1010 18:38:42.911128 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-openstack-qnt8c" event={"ID":"24ca7de6-8478-4c9f-b3b2-e1ffe93ec6d2","Type":"ContainerDied","Data":"11b99d020c45b483626ad592839508977815e23e0ff57a575dd645ce107677e5"} Oct 10 18:38:44 crc kubenswrapper[4799]: I1010 18:38:44.385151 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-openstack-qnt8c" Oct 10 18:38:44 crc kubenswrapper[4799]: I1010 18:38:44.427835 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/24ca7de6-8478-4c9f-b3b2-e1ffe93ec6d2-ssh-key-openstack-cell1\") pod \"24ca7de6-8478-4c9f-b3b2-e1ffe93ec6d2\" (UID: \"24ca7de6-8478-4c9f-b3b2-e1ffe93ec6d2\") " Oct 10 18:38:44 crc kubenswrapper[4799]: I1010 18:38:44.428077 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/24ca7de6-8478-4c9f-b3b2-e1ffe93ec6d2-ceph\") pod \"24ca7de6-8478-4c9f-b3b2-e1ffe93ec6d2\" (UID: \"24ca7de6-8478-4c9f-b3b2-e1ffe93ec6d2\") " Oct 10 18:38:44 crc kubenswrapper[4799]: I1010 18:38:44.428141 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/24ca7de6-8478-4c9f-b3b2-e1ffe93ec6d2-inventory-0\") pod \"24ca7de6-8478-4c9f-b3b2-e1ffe93ec6d2\" (UID: \"24ca7de6-8478-4c9f-b3b2-e1ffe93ec6d2\") " Oct 10 18:38:44 crc kubenswrapper[4799]: I1010 18:38:44.428211 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xks7w\" (UniqueName: \"kubernetes.io/projected/24ca7de6-8478-4c9f-b3b2-e1ffe93ec6d2-kube-api-access-xks7w\") pod \"24ca7de6-8478-4c9f-b3b2-e1ffe93ec6d2\" (UID: \"24ca7de6-8478-4c9f-b3b2-e1ffe93ec6d2\") " Oct 10 18:38:44 crc kubenswrapper[4799]: I1010 18:38:44.454178 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/24ca7de6-8478-4c9f-b3b2-e1ffe93ec6d2-kube-api-access-xks7w" (OuterVolumeSpecName: "kube-api-access-xks7w") pod "24ca7de6-8478-4c9f-b3b2-e1ffe93ec6d2" (UID: "24ca7de6-8478-4c9f-b3b2-e1ffe93ec6d2"). InnerVolumeSpecName "kube-api-access-xks7w". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 18:38:44 crc kubenswrapper[4799]: I1010 18:38:44.454365 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/24ca7de6-8478-4c9f-b3b2-e1ffe93ec6d2-ceph" (OuterVolumeSpecName: "ceph") pod "24ca7de6-8478-4c9f-b3b2-e1ffe93ec6d2" (UID: "24ca7de6-8478-4c9f-b3b2-e1ffe93ec6d2"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 18:38:44 crc kubenswrapper[4799]: I1010 18:38:44.481879 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/24ca7de6-8478-4c9f-b3b2-e1ffe93ec6d2-ssh-key-openstack-cell1" (OuterVolumeSpecName: "ssh-key-openstack-cell1") pod "24ca7de6-8478-4c9f-b3b2-e1ffe93ec6d2" (UID: "24ca7de6-8478-4c9f-b3b2-e1ffe93ec6d2"). InnerVolumeSpecName "ssh-key-openstack-cell1". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 18:38:44 crc kubenswrapper[4799]: I1010 18:38:44.486861 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/24ca7de6-8478-4c9f-b3b2-e1ffe93ec6d2-inventory-0" (OuterVolumeSpecName: "inventory-0") pod "24ca7de6-8478-4c9f-b3b2-e1ffe93ec6d2" (UID: "24ca7de6-8478-4c9f-b3b2-e1ffe93ec6d2"). InnerVolumeSpecName "inventory-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 18:38:44 crc kubenswrapper[4799]: I1010 18:38:44.530836 4799 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/24ca7de6-8478-4c9f-b3b2-e1ffe93ec6d2-ceph\") on node \"crc\" DevicePath \"\"" Oct 10 18:38:44 crc kubenswrapper[4799]: I1010 18:38:44.530874 4799 reconciler_common.go:293] "Volume detached for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/24ca7de6-8478-4c9f-b3b2-e1ffe93ec6d2-inventory-0\") on node \"crc\" DevicePath \"\"" Oct 10 18:38:44 crc kubenswrapper[4799]: I1010 18:38:44.530890 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xks7w\" (UniqueName: \"kubernetes.io/projected/24ca7de6-8478-4c9f-b3b2-e1ffe93ec6d2-kube-api-access-xks7w\") on node \"crc\" DevicePath \"\"" Oct 10 18:38:44 crc kubenswrapper[4799]: I1010 18:38:44.530901 4799 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/24ca7de6-8478-4c9f-b3b2-e1ffe93ec6d2-ssh-key-openstack-cell1\") on node \"crc\" DevicePath \"\"" Oct 10 18:38:44 crc kubenswrapper[4799]: I1010 18:38:44.950167 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-openstack-qnt8c" event={"ID":"24ca7de6-8478-4c9f-b3b2-e1ffe93ec6d2","Type":"ContainerDied","Data":"29f7e45800b3d49e7084feb47817094a48b8cd6c838800eb43c8307a33e7e31a"} Oct 10 18:38:44 crc kubenswrapper[4799]: I1010 18:38:44.950200 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-openstack-qnt8c" Oct 10 18:38:44 crc kubenswrapper[4799]: I1010 18:38:44.950215 4799 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="29f7e45800b3d49e7084feb47817094a48b8cd6c838800eb43c8307a33e7e31a" Oct 10 18:38:45 crc kubenswrapper[4799]: I1010 18:38:45.038500 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/run-os-openstack-openstack-cell1-25fp2"] Oct 10 18:38:45 crc kubenswrapper[4799]: E1010 18:38:45.040083 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="24ca7de6-8478-4c9f-b3b2-e1ffe93ec6d2" containerName="ssh-known-hosts-openstack" Oct 10 18:38:45 crc kubenswrapper[4799]: I1010 18:38:45.040112 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="24ca7de6-8478-4c9f-b3b2-e1ffe93ec6d2" containerName="ssh-known-hosts-openstack" Oct 10 18:38:45 crc kubenswrapper[4799]: I1010 18:38:45.040424 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="24ca7de6-8478-4c9f-b3b2-e1ffe93ec6d2" containerName="ssh-known-hosts-openstack" Oct 10 18:38:45 crc kubenswrapper[4799]: I1010 18:38:45.041288 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-openstack-openstack-cell1-25fp2" Oct 10 18:38:45 crc kubenswrapper[4799]: I1010 18:38:45.048233 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 10 18:38:45 crc kubenswrapper[4799]: I1010 18:38:45.048554 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-adoption-secret" Oct 10 18:38:45 crc kubenswrapper[4799]: I1010 18:38:45.048940 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Oct 10 18:38:45 crc kubenswrapper[4799]: I1010 18:38:45.048980 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-rdlhr" Oct 10 18:38:45 crc kubenswrapper[4799]: I1010 18:38:45.050670 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/run-os-openstack-openstack-cell1-25fp2"] Oct 10 18:38:45 crc kubenswrapper[4799]: I1010 18:38:45.144994 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/18788551-0d2a-43c8-9aa2-d712be9c3c9f-ssh-key\") pod \"run-os-openstack-openstack-cell1-25fp2\" (UID: \"18788551-0d2a-43c8-9aa2-d712be9c3c9f\") " pod="openstack/run-os-openstack-openstack-cell1-25fp2" Oct 10 18:38:45 crc kubenswrapper[4799]: I1010 18:38:45.145281 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vr9tt\" (UniqueName: \"kubernetes.io/projected/18788551-0d2a-43c8-9aa2-d712be9c3c9f-kube-api-access-vr9tt\") pod \"run-os-openstack-openstack-cell1-25fp2\" (UID: \"18788551-0d2a-43c8-9aa2-d712be9c3c9f\") " pod="openstack/run-os-openstack-openstack-cell1-25fp2" Oct 10 18:38:45 crc kubenswrapper[4799]: I1010 18:38:45.145432 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/18788551-0d2a-43c8-9aa2-d712be9c3c9f-ceph\") pod \"run-os-openstack-openstack-cell1-25fp2\" (UID: \"18788551-0d2a-43c8-9aa2-d712be9c3c9f\") " pod="openstack/run-os-openstack-openstack-cell1-25fp2" Oct 10 18:38:45 crc kubenswrapper[4799]: I1010 18:38:45.146550 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/18788551-0d2a-43c8-9aa2-d712be9c3c9f-inventory\") pod \"run-os-openstack-openstack-cell1-25fp2\" (UID: \"18788551-0d2a-43c8-9aa2-d712be9c3c9f\") " pod="openstack/run-os-openstack-openstack-cell1-25fp2" Oct 10 18:38:45 crc kubenswrapper[4799]: I1010 18:38:45.248626 4799 patch_prober.go:28] interesting pod/machine-config-daemon-rh8zc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 10 18:38:45 crc kubenswrapper[4799]: I1010 18:38:45.248693 4799 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 10 18:38:45 crc kubenswrapper[4799]: I1010 18:38:45.249209 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/18788551-0d2a-43c8-9aa2-d712be9c3c9f-ssh-key\") pod \"run-os-openstack-openstack-cell1-25fp2\" (UID: \"18788551-0d2a-43c8-9aa2-d712be9c3c9f\") " pod="openstack/run-os-openstack-openstack-cell1-25fp2" Oct 10 18:38:45 crc kubenswrapper[4799]: I1010 18:38:45.249269 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vr9tt\" (UniqueName: \"kubernetes.io/projected/18788551-0d2a-43c8-9aa2-d712be9c3c9f-kube-api-access-vr9tt\") pod \"run-os-openstack-openstack-cell1-25fp2\" (UID: \"18788551-0d2a-43c8-9aa2-d712be9c3c9f\") " pod="openstack/run-os-openstack-openstack-cell1-25fp2" Oct 10 18:38:45 crc kubenswrapper[4799]: I1010 18:38:45.249336 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/18788551-0d2a-43c8-9aa2-d712be9c3c9f-ceph\") pod \"run-os-openstack-openstack-cell1-25fp2\" (UID: \"18788551-0d2a-43c8-9aa2-d712be9c3c9f\") " pod="openstack/run-os-openstack-openstack-cell1-25fp2" Oct 10 18:38:45 crc kubenswrapper[4799]: I1010 18:38:45.249549 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/18788551-0d2a-43c8-9aa2-d712be9c3c9f-inventory\") pod \"run-os-openstack-openstack-cell1-25fp2\" (UID: \"18788551-0d2a-43c8-9aa2-d712be9c3c9f\") " pod="openstack/run-os-openstack-openstack-cell1-25fp2" Oct 10 18:38:45 crc kubenswrapper[4799]: I1010 18:38:45.254944 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/18788551-0d2a-43c8-9aa2-d712be9c3c9f-ceph\") pod \"run-os-openstack-openstack-cell1-25fp2\" (UID: \"18788551-0d2a-43c8-9aa2-d712be9c3c9f\") " pod="openstack/run-os-openstack-openstack-cell1-25fp2" Oct 10 18:38:45 crc kubenswrapper[4799]: I1010 18:38:45.255711 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/18788551-0d2a-43c8-9aa2-d712be9c3c9f-ssh-key\") pod \"run-os-openstack-openstack-cell1-25fp2\" (UID: \"18788551-0d2a-43c8-9aa2-d712be9c3c9f\") " pod="openstack/run-os-openstack-openstack-cell1-25fp2" Oct 10 18:38:45 crc kubenswrapper[4799]: I1010 18:38:45.256688 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/18788551-0d2a-43c8-9aa2-d712be9c3c9f-inventory\") pod \"run-os-openstack-openstack-cell1-25fp2\" (UID: \"18788551-0d2a-43c8-9aa2-d712be9c3c9f\") " pod="openstack/run-os-openstack-openstack-cell1-25fp2" Oct 10 18:38:45 crc kubenswrapper[4799]: I1010 18:38:45.268284 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vr9tt\" (UniqueName: \"kubernetes.io/projected/18788551-0d2a-43c8-9aa2-d712be9c3c9f-kube-api-access-vr9tt\") pod \"run-os-openstack-openstack-cell1-25fp2\" (UID: \"18788551-0d2a-43c8-9aa2-d712be9c3c9f\") " pod="openstack/run-os-openstack-openstack-cell1-25fp2" Oct 10 18:38:45 crc kubenswrapper[4799]: I1010 18:38:45.373898 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-openstack-openstack-cell1-25fp2" Oct 10 18:38:45 crc kubenswrapper[4799]: I1010 18:38:45.979865 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/run-os-openstack-openstack-cell1-25fp2"] Oct 10 18:38:46 crc kubenswrapper[4799]: I1010 18:38:46.976834 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-openstack-openstack-cell1-25fp2" event={"ID":"18788551-0d2a-43c8-9aa2-d712be9c3c9f","Type":"ContainerStarted","Data":"cb0fd22876438195f53753f7e6ec8306db073761d7221116ed119b24421d7d92"} Oct 10 18:38:46 crc kubenswrapper[4799]: I1010 18:38:46.977223 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-openstack-openstack-cell1-25fp2" event={"ID":"18788551-0d2a-43c8-9aa2-d712be9c3c9f","Type":"ContainerStarted","Data":"14ff713a31ed4915ff0c6cc3c3c5a8463772b2e281f6f243b93a4df2964d7740"} Oct 10 18:38:47 crc kubenswrapper[4799]: I1010 18:38:47.006261 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/run-os-openstack-openstack-cell1-25fp2" podStartSLOduration=1.55054618 podStartE2EDuration="2.00623956s" podCreationTimestamp="2025-10-10 18:38:45 +0000 UTC" firstStartedPulling="2025-10-10 18:38:45.97877139 +0000 UTC m=+7619.487095515" lastFinishedPulling="2025-10-10 18:38:46.43446477 +0000 UTC m=+7619.942788895" observedRunningTime="2025-10-10 18:38:46.998453219 +0000 UTC m=+7620.506777334" watchObservedRunningTime="2025-10-10 18:38:47.00623956 +0000 UTC m=+7620.514563695" Oct 10 18:38:55 crc kubenswrapper[4799]: I1010 18:38:55.088876 4799 generic.go:334] "Generic (PLEG): container finished" podID="18788551-0d2a-43c8-9aa2-d712be9c3c9f" containerID="cb0fd22876438195f53753f7e6ec8306db073761d7221116ed119b24421d7d92" exitCode=0 Oct 10 18:38:55 crc kubenswrapper[4799]: I1010 18:38:55.088964 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-openstack-openstack-cell1-25fp2" event={"ID":"18788551-0d2a-43c8-9aa2-d712be9c3c9f","Type":"ContainerDied","Data":"cb0fd22876438195f53753f7e6ec8306db073761d7221116ed119b24421d7d92"} Oct 10 18:38:56 crc kubenswrapper[4799]: I1010 18:38:56.653443 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-openstack-openstack-cell1-25fp2" Oct 10 18:38:56 crc kubenswrapper[4799]: I1010 18:38:56.723481 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/18788551-0d2a-43c8-9aa2-d712be9c3c9f-ceph\") pod \"18788551-0d2a-43c8-9aa2-d712be9c3c9f\" (UID: \"18788551-0d2a-43c8-9aa2-d712be9c3c9f\") " Oct 10 18:38:56 crc kubenswrapper[4799]: I1010 18:38:56.723548 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vr9tt\" (UniqueName: \"kubernetes.io/projected/18788551-0d2a-43c8-9aa2-d712be9c3c9f-kube-api-access-vr9tt\") pod \"18788551-0d2a-43c8-9aa2-d712be9c3c9f\" (UID: \"18788551-0d2a-43c8-9aa2-d712be9c3c9f\") " Oct 10 18:38:56 crc kubenswrapper[4799]: I1010 18:38:56.723572 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/18788551-0d2a-43c8-9aa2-d712be9c3c9f-inventory\") pod \"18788551-0d2a-43c8-9aa2-d712be9c3c9f\" (UID: \"18788551-0d2a-43c8-9aa2-d712be9c3c9f\") " Oct 10 18:38:56 crc kubenswrapper[4799]: I1010 18:38:56.723774 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/18788551-0d2a-43c8-9aa2-d712be9c3c9f-ssh-key\") pod \"18788551-0d2a-43c8-9aa2-d712be9c3c9f\" (UID: \"18788551-0d2a-43c8-9aa2-d712be9c3c9f\") " Oct 10 18:38:56 crc kubenswrapper[4799]: I1010 18:38:56.735918 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/18788551-0d2a-43c8-9aa2-d712be9c3c9f-kube-api-access-vr9tt" (OuterVolumeSpecName: "kube-api-access-vr9tt") pod "18788551-0d2a-43c8-9aa2-d712be9c3c9f" (UID: "18788551-0d2a-43c8-9aa2-d712be9c3c9f"). InnerVolumeSpecName "kube-api-access-vr9tt". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 18:38:56 crc kubenswrapper[4799]: I1010 18:38:56.735947 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/18788551-0d2a-43c8-9aa2-d712be9c3c9f-ceph" (OuterVolumeSpecName: "ceph") pod "18788551-0d2a-43c8-9aa2-d712be9c3c9f" (UID: "18788551-0d2a-43c8-9aa2-d712be9c3c9f"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 18:38:56 crc kubenswrapper[4799]: I1010 18:38:56.762857 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/18788551-0d2a-43c8-9aa2-d712be9c3c9f-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "18788551-0d2a-43c8-9aa2-d712be9c3c9f" (UID: "18788551-0d2a-43c8-9aa2-d712be9c3c9f"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 18:38:56 crc kubenswrapper[4799]: I1010 18:38:56.781269 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/18788551-0d2a-43c8-9aa2-d712be9c3c9f-inventory" (OuterVolumeSpecName: "inventory") pod "18788551-0d2a-43c8-9aa2-d712be9c3c9f" (UID: "18788551-0d2a-43c8-9aa2-d712be9c3c9f"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 18:38:56 crc kubenswrapper[4799]: I1010 18:38:56.827892 4799 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/18788551-0d2a-43c8-9aa2-d712be9c3c9f-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 10 18:38:56 crc kubenswrapper[4799]: I1010 18:38:56.827945 4799 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/18788551-0d2a-43c8-9aa2-d712be9c3c9f-ceph\") on node \"crc\" DevicePath \"\"" Oct 10 18:38:56 crc kubenswrapper[4799]: I1010 18:38:56.827972 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vr9tt\" (UniqueName: \"kubernetes.io/projected/18788551-0d2a-43c8-9aa2-d712be9c3c9f-kube-api-access-vr9tt\") on node \"crc\" DevicePath \"\"" Oct 10 18:38:56 crc kubenswrapper[4799]: I1010 18:38:56.827998 4799 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/18788551-0d2a-43c8-9aa2-d712be9c3c9f-inventory\") on node \"crc\" DevicePath \"\"" Oct 10 18:38:57 crc kubenswrapper[4799]: I1010 18:38:57.135546 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-openstack-openstack-cell1-25fp2" event={"ID":"18788551-0d2a-43c8-9aa2-d712be9c3c9f","Type":"ContainerDied","Data":"14ff713a31ed4915ff0c6cc3c3c5a8463772b2e281f6f243b93a4df2964d7740"} Oct 10 18:38:57 crc kubenswrapper[4799]: I1010 18:38:57.135592 4799 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="14ff713a31ed4915ff0c6cc3c3c5a8463772b2e281f6f243b93a4df2964d7740" Oct 10 18:38:57 crc kubenswrapper[4799]: I1010 18:38:57.135659 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-openstack-openstack-cell1-25fp2" Oct 10 18:38:57 crc kubenswrapper[4799]: I1010 18:38:57.220174 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/reboot-os-openstack-openstack-cell1-nl6wz"] Oct 10 18:38:57 crc kubenswrapper[4799]: E1010 18:38:57.220726 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="18788551-0d2a-43c8-9aa2-d712be9c3c9f" containerName="run-os-openstack-openstack-cell1" Oct 10 18:38:57 crc kubenswrapper[4799]: I1010 18:38:57.220741 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="18788551-0d2a-43c8-9aa2-d712be9c3c9f" containerName="run-os-openstack-openstack-cell1" Oct 10 18:38:57 crc kubenswrapper[4799]: I1010 18:38:57.220986 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="18788551-0d2a-43c8-9aa2-d712be9c3c9f" containerName="run-os-openstack-openstack-cell1" Oct 10 18:38:57 crc kubenswrapper[4799]: I1010 18:38:57.221811 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-openstack-openstack-cell1-nl6wz" Oct 10 18:38:57 crc kubenswrapper[4799]: I1010 18:38:57.232700 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/reboot-os-openstack-openstack-cell1-nl6wz"] Oct 10 18:38:57 crc kubenswrapper[4799]: I1010 18:38:57.251454 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 10 18:38:57 crc kubenswrapper[4799]: I1010 18:38:57.251470 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-adoption-secret" Oct 10 18:38:57 crc kubenswrapper[4799]: I1010 18:38:57.258037 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Oct 10 18:38:57 crc kubenswrapper[4799]: I1010 18:38:57.258330 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-rdlhr" Oct 10 18:38:57 crc kubenswrapper[4799]: I1010 18:38:57.341429 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dbmpp\" (UniqueName: \"kubernetes.io/projected/902c2ae4-7de9-4c43-9fb5-9c7ea89e1b31-kube-api-access-dbmpp\") pod \"reboot-os-openstack-openstack-cell1-nl6wz\" (UID: \"902c2ae4-7de9-4c43-9fb5-9c7ea89e1b31\") " pod="openstack/reboot-os-openstack-openstack-cell1-nl6wz" Oct 10 18:38:57 crc kubenswrapper[4799]: I1010 18:38:57.341706 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/902c2ae4-7de9-4c43-9fb5-9c7ea89e1b31-inventory\") pod \"reboot-os-openstack-openstack-cell1-nl6wz\" (UID: \"902c2ae4-7de9-4c43-9fb5-9c7ea89e1b31\") " pod="openstack/reboot-os-openstack-openstack-cell1-nl6wz" Oct 10 18:38:57 crc kubenswrapper[4799]: I1010 18:38:57.341894 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/902c2ae4-7de9-4c43-9fb5-9c7ea89e1b31-ceph\") pod \"reboot-os-openstack-openstack-cell1-nl6wz\" (UID: \"902c2ae4-7de9-4c43-9fb5-9c7ea89e1b31\") " pod="openstack/reboot-os-openstack-openstack-cell1-nl6wz" Oct 10 18:38:57 crc kubenswrapper[4799]: I1010 18:38:57.342043 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/902c2ae4-7de9-4c43-9fb5-9c7ea89e1b31-ssh-key\") pod \"reboot-os-openstack-openstack-cell1-nl6wz\" (UID: \"902c2ae4-7de9-4c43-9fb5-9c7ea89e1b31\") " pod="openstack/reboot-os-openstack-openstack-cell1-nl6wz" Oct 10 18:38:57 crc kubenswrapper[4799]: E1010 18:38:57.424444 4799 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod18788551_0d2a_43c8_9aa2_d712be9c3c9f.slice/crio-14ff713a31ed4915ff0c6cc3c3c5a8463772b2e281f6f243b93a4df2964d7740\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod18788551_0d2a_43c8_9aa2_d712be9c3c9f.slice\": RecentStats: unable to find data in memory cache]" Oct 10 18:38:57 crc kubenswrapper[4799]: I1010 18:38:57.444141 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/902c2ae4-7de9-4c43-9fb5-9c7ea89e1b31-inventory\") pod \"reboot-os-openstack-openstack-cell1-nl6wz\" (UID: \"902c2ae4-7de9-4c43-9fb5-9c7ea89e1b31\") " pod="openstack/reboot-os-openstack-openstack-cell1-nl6wz" Oct 10 18:38:57 crc kubenswrapper[4799]: I1010 18:38:57.444442 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/902c2ae4-7de9-4c43-9fb5-9c7ea89e1b31-ceph\") pod \"reboot-os-openstack-openstack-cell1-nl6wz\" (UID: \"902c2ae4-7de9-4c43-9fb5-9c7ea89e1b31\") " pod="openstack/reboot-os-openstack-openstack-cell1-nl6wz" Oct 10 18:38:57 crc kubenswrapper[4799]: I1010 18:38:57.444465 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/902c2ae4-7de9-4c43-9fb5-9c7ea89e1b31-ssh-key\") pod \"reboot-os-openstack-openstack-cell1-nl6wz\" (UID: \"902c2ae4-7de9-4c43-9fb5-9c7ea89e1b31\") " pod="openstack/reboot-os-openstack-openstack-cell1-nl6wz" Oct 10 18:38:57 crc kubenswrapper[4799]: I1010 18:38:57.445149 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dbmpp\" (UniqueName: \"kubernetes.io/projected/902c2ae4-7de9-4c43-9fb5-9c7ea89e1b31-kube-api-access-dbmpp\") pod \"reboot-os-openstack-openstack-cell1-nl6wz\" (UID: \"902c2ae4-7de9-4c43-9fb5-9c7ea89e1b31\") " pod="openstack/reboot-os-openstack-openstack-cell1-nl6wz" Oct 10 18:38:57 crc kubenswrapper[4799]: I1010 18:38:57.452230 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/902c2ae4-7de9-4c43-9fb5-9c7ea89e1b31-ssh-key\") pod \"reboot-os-openstack-openstack-cell1-nl6wz\" (UID: \"902c2ae4-7de9-4c43-9fb5-9c7ea89e1b31\") " pod="openstack/reboot-os-openstack-openstack-cell1-nl6wz" Oct 10 18:38:57 crc kubenswrapper[4799]: I1010 18:38:57.455423 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/902c2ae4-7de9-4c43-9fb5-9c7ea89e1b31-inventory\") pod \"reboot-os-openstack-openstack-cell1-nl6wz\" (UID: \"902c2ae4-7de9-4c43-9fb5-9c7ea89e1b31\") " pod="openstack/reboot-os-openstack-openstack-cell1-nl6wz" Oct 10 18:38:57 crc kubenswrapper[4799]: I1010 18:38:57.459156 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/902c2ae4-7de9-4c43-9fb5-9c7ea89e1b31-ceph\") pod \"reboot-os-openstack-openstack-cell1-nl6wz\" (UID: \"902c2ae4-7de9-4c43-9fb5-9c7ea89e1b31\") " pod="openstack/reboot-os-openstack-openstack-cell1-nl6wz" Oct 10 18:38:57 crc kubenswrapper[4799]: I1010 18:38:57.467223 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dbmpp\" (UniqueName: \"kubernetes.io/projected/902c2ae4-7de9-4c43-9fb5-9c7ea89e1b31-kube-api-access-dbmpp\") pod \"reboot-os-openstack-openstack-cell1-nl6wz\" (UID: \"902c2ae4-7de9-4c43-9fb5-9c7ea89e1b31\") " pod="openstack/reboot-os-openstack-openstack-cell1-nl6wz" Oct 10 18:38:57 crc kubenswrapper[4799]: I1010 18:38:57.587388 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-openstack-openstack-cell1-nl6wz" Oct 10 18:38:58 crc kubenswrapper[4799]: I1010 18:38:58.197243 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/reboot-os-openstack-openstack-cell1-nl6wz"] Oct 10 18:38:59 crc kubenswrapper[4799]: I1010 18:38:59.175832 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-openstack-openstack-cell1-nl6wz" event={"ID":"902c2ae4-7de9-4c43-9fb5-9c7ea89e1b31","Type":"ContainerStarted","Data":"851701ba043cd8583de4832c26841fbf95f2ecd909751c9dabdbec8521e72480"} Oct 10 18:38:59 crc kubenswrapper[4799]: I1010 18:38:59.176307 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-openstack-openstack-cell1-nl6wz" event={"ID":"902c2ae4-7de9-4c43-9fb5-9c7ea89e1b31","Type":"ContainerStarted","Data":"2dcd30bda596abf36e78fab2ac7c4974ebf8af2f3c4944b12f9cb08a39e5fb7e"} Oct 10 18:38:59 crc kubenswrapper[4799]: I1010 18:38:59.205862 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/reboot-os-openstack-openstack-cell1-nl6wz" podStartSLOduration=1.629366007 podStartE2EDuration="2.205837371s" podCreationTimestamp="2025-10-10 18:38:57 +0000 UTC" firstStartedPulling="2025-10-10 18:38:58.178937846 +0000 UTC m=+7631.687261961" lastFinishedPulling="2025-10-10 18:38:58.75540922 +0000 UTC m=+7632.263733325" observedRunningTime="2025-10-10 18:38:59.193438948 +0000 UTC m=+7632.701763083" watchObservedRunningTime="2025-10-10 18:38:59.205837371 +0000 UTC m=+7632.714161506" Oct 10 18:39:15 crc kubenswrapper[4799]: I1010 18:39:15.249131 4799 patch_prober.go:28] interesting pod/machine-config-daemon-rh8zc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 10 18:39:15 crc kubenswrapper[4799]: I1010 18:39:15.249625 4799 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 10 18:39:15 crc kubenswrapper[4799]: I1010 18:39:15.249692 4799 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" Oct 10 18:39:15 crc kubenswrapper[4799]: I1010 18:39:15.250549 4799 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"5d726047dc458172d088ab3478c0245a0f320ffb1060865307a391a8a23b1065"} pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 10 18:39:15 crc kubenswrapper[4799]: I1010 18:39:15.250675 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" containerName="machine-config-daemon" containerID="cri-o://5d726047dc458172d088ab3478c0245a0f320ffb1060865307a391a8a23b1065" gracePeriod=600 Oct 10 18:39:15 crc kubenswrapper[4799]: E1010 18:39:15.379878 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 18:39:15 crc kubenswrapper[4799]: I1010 18:39:15.381415 4799 generic.go:334] "Generic (PLEG): container finished" podID="902c2ae4-7de9-4c43-9fb5-9c7ea89e1b31" containerID="851701ba043cd8583de4832c26841fbf95f2ecd909751c9dabdbec8521e72480" exitCode=0 Oct 10 18:39:15 crc kubenswrapper[4799]: I1010 18:39:15.381480 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-openstack-openstack-cell1-nl6wz" event={"ID":"902c2ae4-7de9-4c43-9fb5-9c7ea89e1b31","Type":"ContainerDied","Data":"851701ba043cd8583de4832c26841fbf95f2ecd909751c9dabdbec8521e72480"} Oct 10 18:39:15 crc kubenswrapper[4799]: I1010 18:39:15.384789 4799 generic.go:334] "Generic (PLEG): container finished" podID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" containerID="5d726047dc458172d088ab3478c0245a0f320ffb1060865307a391a8a23b1065" exitCode=0 Oct 10 18:39:15 crc kubenswrapper[4799]: I1010 18:39:15.384825 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" event={"ID":"6cebefda-e31d-4be2-9bf4-8e1f8ec002cb","Type":"ContainerDied","Data":"5d726047dc458172d088ab3478c0245a0f320ffb1060865307a391a8a23b1065"} Oct 10 18:39:15 crc kubenswrapper[4799]: I1010 18:39:15.384885 4799 scope.go:117] "RemoveContainer" containerID="d11b6649908656854cce5d87d576a743fe05f8e582601b1ccaf94446dbe0c646" Oct 10 18:39:15 crc kubenswrapper[4799]: I1010 18:39:15.386188 4799 scope.go:117] "RemoveContainer" containerID="5d726047dc458172d088ab3478c0245a0f320ffb1060865307a391a8a23b1065" Oct 10 18:39:15 crc kubenswrapper[4799]: E1010 18:39:15.387096 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 18:39:17 crc kubenswrapper[4799]: I1010 18:39:17.002121 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-openstack-openstack-cell1-nl6wz" Oct 10 18:39:17 crc kubenswrapper[4799]: I1010 18:39:17.163484 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/902c2ae4-7de9-4c43-9fb5-9c7ea89e1b31-inventory\") pod \"902c2ae4-7de9-4c43-9fb5-9c7ea89e1b31\" (UID: \"902c2ae4-7de9-4c43-9fb5-9c7ea89e1b31\") " Oct 10 18:39:17 crc kubenswrapper[4799]: I1010 18:39:17.163578 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dbmpp\" (UniqueName: \"kubernetes.io/projected/902c2ae4-7de9-4c43-9fb5-9c7ea89e1b31-kube-api-access-dbmpp\") pod \"902c2ae4-7de9-4c43-9fb5-9c7ea89e1b31\" (UID: \"902c2ae4-7de9-4c43-9fb5-9c7ea89e1b31\") " Oct 10 18:39:17 crc kubenswrapper[4799]: I1010 18:39:17.163616 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/902c2ae4-7de9-4c43-9fb5-9c7ea89e1b31-ceph\") pod \"902c2ae4-7de9-4c43-9fb5-9c7ea89e1b31\" (UID: \"902c2ae4-7de9-4c43-9fb5-9c7ea89e1b31\") " Oct 10 18:39:17 crc kubenswrapper[4799]: I1010 18:39:17.163853 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/902c2ae4-7de9-4c43-9fb5-9c7ea89e1b31-ssh-key\") pod \"902c2ae4-7de9-4c43-9fb5-9c7ea89e1b31\" (UID: \"902c2ae4-7de9-4c43-9fb5-9c7ea89e1b31\") " Oct 10 18:39:17 crc kubenswrapper[4799]: I1010 18:39:17.172489 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/902c2ae4-7de9-4c43-9fb5-9c7ea89e1b31-kube-api-access-dbmpp" (OuterVolumeSpecName: "kube-api-access-dbmpp") pod "902c2ae4-7de9-4c43-9fb5-9c7ea89e1b31" (UID: "902c2ae4-7de9-4c43-9fb5-9c7ea89e1b31"). InnerVolumeSpecName "kube-api-access-dbmpp". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 18:39:17 crc kubenswrapper[4799]: I1010 18:39:17.178048 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/902c2ae4-7de9-4c43-9fb5-9c7ea89e1b31-ceph" (OuterVolumeSpecName: "ceph") pod "902c2ae4-7de9-4c43-9fb5-9c7ea89e1b31" (UID: "902c2ae4-7de9-4c43-9fb5-9c7ea89e1b31"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 18:39:17 crc kubenswrapper[4799]: I1010 18:39:17.213541 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/902c2ae4-7de9-4c43-9fb5-9c7ea89e1b31-inventory" (OuterVolumeSpecName: "inventory") pod "902c2ae4-7de9-4c43-9fb5-9c7ea89e1b31" (UID: "902c2ae4-7de9-4c43-9fb5-9c7ea89e1b31"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 18:39:17 crc kubenswrapper[4799]: I1010 18:39:17.217646 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/902c2ae4-7de9-4c43-9fb5-9c7ea89e1b31-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "902c2ae4-7de9-4c43-9fb5-9c7ea89e1b31" (UID: "902c2ae4-7de9-4c43-9fb5-9c7ea89e1b31"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 18:39:17 crc kubenswrapper[4799]: I1010 18:39:17.266493 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dbmpp\" (UniqueName: \"kubernetes.io/projected/902c2ae4-7de9-4c43-9fb5-9c7ea89e1b31-kube-api-access-dbmpp\") on node \"crc\" DevicePath \"\"" Oct 10 18:39:17 crc kubenswrapper[4799]: I1010 18:39:17.267063 4799 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/902c2ae4-7de9-4c43-9fb5-9c7ea89e1b31-ceph\") on node \"crc\" DevicePath \"\"" Oct 10 18:39:17 crc kubenswrapper[4799]: I1010 18:39:17.267189 4799 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/902c2ae4-7de9-4c43-9fb5-9c7ea89e1b31-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 10 18:39:17 crc kubenswrapper[4799]: I1010 18:39:17.267279 4799 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/902c2ae4-7de9-4c43-9fb5-9c7ea89e1b31-inventory\") on node \"crc\" DevicePath \"\"" Oct 10 18:39:17 crc kubenswrapper[4799]: I1010 18:39:17.451644 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-openstack-openstack-cell1-nl6wz" Oct 10 18:39:17 crc kubenswrapper[4799]: I1010 18:39:17.454349 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-openstack-openstack-cell1-nl6wz" event={"ID":"902c2ae4-7de9-4c43-9fb5-9c7ea89e1b31","Type":"ContainerDied","Data":"2dcd30bda596abf36e78fab2ac7c4974ebf8af2f3c4944b12f9cb08a39e5fb7e"} Oct 10 18:39:17 crc kubenswrapper[4799]: I1010 18:39:17.454409 4799 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2dcd30bda596abf36e78fab2ac7c4974ebf8af2f3c4944b12f9cb08a39e5fb7e" Oct 10 18:39:17 crc kubenswrapper[4799]: I1010 18:39:17.563416 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/install-certs-openstack-openstack-cell1-6r6fv"] Oct 10 18:39:17 crc kubenswrapper[4799]: E1010 18:39:17.564441 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="902c2ae4-7de9-4c43-9fb5-9c7ea89e1b31" containerName="reboot-os-openstack-openstack-cell1" Oct 10 18:39:17 crc kubenswrapper[4799]: I1010 18:39:17.564458 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="902c2ae4-7de9-4c43-9fb5-9c7ea89e1b31" containerName="reboot-os-openstack-openstack-cell1" Oct 10 18:39:17 crc kubenswrapper[4799]: I1010 18:39:17.565035 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="902c2ae4-7de9-4c43-9fb5-9c7ea89e1b31" containerName="reboot-os-openstack-openstack-cell1" Oct 10 18:39:17 crc kubenswrapper[4799]: I1010 18:39:17.566490 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-openstack-openstack-cell1-6r6fv" Oct 10 18:39:17 crc kubenswrapper[4799]: I1010 18:39:17.568640 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-adoption-secret" Oct 10 18:39:17 crc kubenswrapper[4799]: I1010 18:39:17.569305 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-rdlhr" Oct 10 18:39:17 crc kubenswrapper[4799]: I1010 18:39:17.569605 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Oct 10 18:39:17 crc kubenswrapper[4799]: I1010 18:39:17.570426 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 10 18:39:17 crc kubenswrapper[4799]: I1010 18:39:17.580215 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-certs-openstack-openstack-cell1-6r6fv"] Oct 10 18:39:17 crc kubenswrapper[4799]: I1010 18:39:17.677462 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e5dc63d0-e9a5-4035-8308-ef71ec9be69e-bootstrap-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-6r6fv\" (UID: \"e5dc63d0-e9a5-4035-8308-ef71ec9be69e\") " pod="openstack/install-certs-openstack-openstack-cell1-6r6fv" Oct 10 18:39:17 crc kubenswrapper[4799]: I1010 18:39:17.677532 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e5dc63d0-e9a5-4035-8308-ef71ec9be69e-neutron-metadata-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-6r6fv\" (UID: \"e5dc63d0-e9a5-4035-8308-ef71ec9be69e\") " pod="openstack/install-certs-openstack-openstack-cell1-6r6fv" Oct 10 18:39:17 crc kubenswrapper[4799]: I1010 18:39:17.677708 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e5dc63d0-e9a5-4035-8308-ef71ec9be69e-inventory\") pod \"install-certs-openstack-openstack-cell1-6r6fv\" (UID: \"e5dc63d0-e9a5-4035-8308-ef71ec9be69e\") " pod="openstack/install-certs-openstack-openstack-cell1-6r6fv" Oct 10 18:39:17 crc kubenswrapper[4799]: I1010 18:39:17.677895 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e5dc63d0-e9a5-4035-8308-ef71ec9be69e-libvirt-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-6r6fv\" (UID: \"e5dc63d0-e9a5-4035-8308-ef71ec9be69e\") " pod="openstack/install-certs-openstack-openstack-cell1-6r6fv" Oct 10 18:39:17 crc kubenswrapper[4799]: I1010 18:39:17.677954 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/e5dc63d0-e9a5-4035-8308-ef71ec9be69e-ceph\") pod \"install-certs-openstack-openstack-cell1-6r6fv\" (UID: \"e5dc63d0-e9a5-4035-8308-ef71ec9be69e\") " pod="openstack/install-certs-openstack-openstack-cell1-6r6fv" Oct 10 18:39:17 crc kubenswrapper[4799]: I1010 18:39:17.677972 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e5dc63d0-e9a5-4035-8308-ef71ec9be69e-telemetry-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-6r6fv\" (UID: \"e5dc63d0-e9a5-4035-8308-ef71ec9be69e\") " pod="openstack/install-certs-openstack-openstack-cell1-6r6fv" Oct 10 18:39:17 crc kubenswrapper[4799]: I1010 18:39:17.678166 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/e5dc63d0-e9a5-4035-8308-ef71ec9be69e-ssh-key\") pod \"install-certs-openstack-openstack-cell1-6r6fv\" (UID: \"e5dc63d0-e9a5-4035-8308-ef71ec9be69e\") " pod="openstack/install-certs-openstack-openstack-cell1-6r6fv" Oct 10 18:39:17 crc kubenswrapper[4799]: I1010 18:39:17.678229 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e5dc63d0-e9a5-4035-8308-ef71ec9be69e-nova-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-6r6fv\" (UID: \"e5dc63d0-e9a5-4035-8308-ef71ec9be69e\") " pod="openstack/install-certs-openstack-openstack-cell1-6r6fv" Oct 10 18:39:17 crc kubenswrapper[4799]: I1010 18:39:17.678294 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e5dc63d0-e9a5-4035-8308-ef71ec9be69e-ovn-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-6r6fv\" (UID: \"e5dc63d0-e9a5-4035-8308-ef71ec9be69e\") " pod="openstack/install-certs-openstack-openstack-cell1-6r6fv" Oct 10 18:39:17 crc kubenswrapper[4799]: I1010 18:39:17.678318 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-dhcp-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e5dc63d0-e9a5-4035-8308-ef71ec9be69e-neutron-dhcp-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-6r6fv\" (UID: \"e5dc63d0-e9a5-4035-8308-ef71ec9be69e\") " pod="openstack/install-certs-openstack-openstack-cell1-6r6fv" Oct 10 18:39:17 crc kubenswrapper[4799]: I1010 18:39:17.678561 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f69sc\" (UniqueName: \"kubernetes.io/projected/e5dc63d0-e9a5-4035-8308-ef71ec9be69e-kube-api-access-f69sc\") pod \"install-certs-openstack-openstack-cell1-6r6fv\" (UID: \"e5dc63d0-e9a5-4035-8308-ef71ec9be69e\") " pod="openstack/install-certs-openstack-openstack-cell1-6r6fv" Oct 10 18:39:17 crc kubenswrapper[4799]: I1010 18:39:17.678639 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-sriov-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e5dc63d0-e9a5-4035-8308-ef71ec9be69e-neutron-sriov-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-6r6fv\" (UID: \"e5dc63d0-e9a5-4035-8308-ef71ec9be69e\") " pod="openstack/install-certs-openstack-openstack-cell1-6r6fv" Oct 10 18:39:17 crc kubenswrapper[4799]: I1010 18:39:17.780581 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e5dc63d0-e9a5-4035-8308-ef71ec9be69e-libvirt-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-6r6fv\" (UID: \"e5dc63d0-e9a5-4035-8308-ef71ec9be69e\") " pod="openstack/install-certs-openstack-openstack-cell1-6r6fv" Oct 10 18:39:17 crc kubenswrapper[4799]: I1010 18:39:17.780639 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/e5dc63d0-e9a5-4035-8308-ef71ec9be69e-ceph\") pod \"install-certs-openstack-openstack-cell1-6r6fv\" (UID: \"e5dc63d0-e9a5-4035-8308-ef71ec9be69e\") " pod="openstack/install-certs-openstack-openstack-cell1-6r6fv" Oct 10 18:39:17 crc kubenswrapper[4799]: I1010 18:39:17.780663 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e5dc63d0-e9a5-4035-8308-ef71ec9be69e-telemetry-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-6r6fv\" (UID: \"e5dc63d0-e9a5-4035-8308-ef71ec9be69e\") " pod="openstack/install-certs-openstack-openstack-cell1-6r6fv" Oct 10 18:39:17 crc kubenswrapper[4799]: I1010 18:39:17.780724 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/e5dc63d0-e9a5-4035-8308-ef71ec9be69e-ssh-key\") pod \"install-certs-openstack-openstack-cell1-6r6fv\" (UID: \"e5dc63d0-e9a5-4035-8308-ef71ec9be69e\") " pod="openstack/install-certs-openstack-openstack-cell1-6r6fv" Oct 10 18:39:17 crc kubenswrapper[4799]: I1010 18:39:17.780748 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e5dc63d0-e9a5-4035-8308-ef71ec9be69e-nova-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-6r6fv\" (UID: \"e5dc63d0-e9a5-4035-8308-ef71ec9be69e\") " pod="openstack/install-certs-openstack-openstack-cell1-6r6fv" Oct 10 18:39:17 crc kubenswrapper[4799]: I1010 18:39:17.780801 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e5dc63d0-e9a5-4035-8308-ef71ec9be69e-ovn-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-6r6fv\" (UID: \"e5dc63d0-e9a5-4035-8308-ef71ec9be69e\") " pod="openstack/install-certs-openstack-openstack-cell1-6r6fv" Oct 10 18:39:17 crc kubenswrapper[4799]: I1010 18:39:17.780821 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-dhcp-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e5dc63d0-e9a5-4035-8308-ef71ec9be69e-neutron-dhcp-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-6r6fv\" (UID: \"e5dc63d0-e9a5-4035-8308-ef71ec9be69e\") " pod="openstack/install-certs-openstack-openstack-cell1-6r6fv" Oct 10 18:39:17 crc kubenswrapper[4799]: I1010 18:39:17.780978 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f69sc\" (UniqueName: \"kubernetes.io/projected/e5dc63d0-e9a5-4035-8308-ef71ec9be69e-kube-api-access-f69sc\") pod \"install-certs-openstack-openstack-cell1-6r6fv\" (UID: \"e5dc63d0-e9a5-4035-8308-ef71ec9be69e\") " pod="openstack/install-certs-openstack-openstack-cell1-6r6fv" Oct 10 18:39:17 crc kubenswrapper[4799]: I1010 18:39:17.781012 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-sriov-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e5dc63d0-e9a5-4035-8308-ef71ec9be69e-neutron-sriov-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-6r6fv\" (UID: \"e5dc63d0-e9a5-4035-8308-ef71ec9be69e\") " pod="openstack/install-certs-openstack-openstack-cell1-6r6fv" Oct 10 18:39:17 crc kubenswrapper[4799]: I1010 18:39:17.781066 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e5dc63d0-e9a5-4035-8308-ef71ec9be69e-bootstrap-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-6r6fv\" (UID: \"e5dc63d0-e9a5-4035-8308-ef71ec9be69e\") " pod="openstack/install-certs-openstack-openstack-cell1-6r6fv" Oct 10 18:39:17 crc kubenswrapper[4799]: I1010 18:39:17.781098 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e5dc63d0-e9a5-4035-8308-ef71ec9be69e-neutron-metadata-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-6r6fv\" (UID: \"e5dc63d0-e9a5-4035-8308-ef71ec9be69e\") " pod="openstack/install-certs-openstack-openstack-cell1-6r6fv" Oct 10 18:39:17 crc kubenswrapper[4799]: I1010 18:39:17.781167 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e5dc63d0-e9a5-4035-8308-ef71ec9be69e-inventory\") pod \"install-certs-openstack-openstack-cell1-6r6fv\" (UID: \"e5dc63d0-e9a5-4035-8308-ef71ec9be69e\") " pod="openstack/install-certs-openstack-openstack-cell1-6r6fv" Oct 10 18:39:17 crc kubenswrapper[4799]: I1010 18:39:17.787140 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e5dc63d0-e9a5-4035-8308-ef71ec9be69e-libvirt-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-6r6fv\" (UID: \"e5dc63d0-e9a5-4035-8308-ef71ec9be69e\") " pod="openstack/install-certs-openstack-openstack-cell1-6r6fv" Oct 10 18:39:17 crc kubenswrapper[4799]: I1010 18:39:17.788320 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e5dc63d0-e9a5-4035-8308-ef71ec9be69e-telemetry-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-6r6fv\" (UID: \"e5dc63d0-e9a5-4035-8308-ef71ec9be69e\") " pod="openstack/install-certs-openstack-openstack-cell1-6r6fv" Oct 10 18:39:17 crc kubenswrapper[4799]: I1010 18:39:17.788746 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/e5dc63d0-e9a5-4035-8308-ef71ec9be69e-ceph\") pod \"install-certs-openstack-openstack-cell1-6r6fv\" (UID: \"e5dc63d0-e9a5-4035-8308-ef71ec9be69e\") " pod="openstack/install-certs-openstack-openstack-cell1-6r6fv" Oct 10 18:39:17 crc kubenswrapper[4799]: I1010 18:39:17.790772 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e5dc63d0-e9a5-4035-8308-ef71ec9be69e-inventory\") pod \"install-certs-openstack-openstack-cell1-6r6fv\" (UID: \"e5dc63d0-e9a5-4035-8308-ef71ec9be69e\") " pod="openstack/install-certs-openstack-openstack-cell1-6r6fv" Oct 10 18:39:17 crc kubenswrapper[4799]: I1010 18:39:17.794522 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-sriov-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e5dc63d0-e9a5-4035-8308-ef71ec9be69e-neutron-sriov-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-6r6fv\" (UID: \"e5dc63d0-e9a5-4035-8308-ef71ec9be69e\") " pod="openstack/install-certs-openstack-openstack-cell1-6r6fv" Oct 10 18:39:17 crc kubenswrapper[4799]: I1010 18:39:17.795126 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e5dc63d0-e9a5-4035-8308-ef71ec9be69e-bootstrap-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-6r6fv\" (UID: \"e5dc63d0-e9a5-4035-8308-ef71ec9be69e\") " pod="openstack/install-certs-openstack-openstack-cell1-6r6fv" Oct 10 18:39:17 crc kubenswrapper[4799]: I1010 18:39:17.795385 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e5dc63d0-e9a5-4035-8308-ef71ec9be69e-neutron-metadata-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-6r6fv\" (UID: \"e5dc63d0-e9a5-4035-8308-ef71ec9be69e\") " pod="openstack/install-certs-openstack-openstack-cell1-6r6fv" Oct 10 18:39:17 crc kubenswrapper[4799]: I1010 18:39:17.795796 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e5dc63d0-e9a5-4035-8308-ef71ec9be69e-ovn-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-6r6fv\" (UID: \"e5dc63d0-e9a5-4035-8308-ef71ec9be69e\") " pod="openstack/install-certs-openstack-openstack-cell1-6r6fv" Oct 10 18:39:17 crc kubenswrapper[4799]: I1010 18:39:17.796143 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/e5dc63d0-e9a5-4035-8308-ef71ec9be69e-ssh-key\") pod \"install-certs-openstack-openstack-cell1-6r6fv\" (UID: \"e5dc63d0-e9a5-4035-8308-ef71ec9be69e\") " pod="openstack/install-certs-openstack-openstack-cell1-6r6fv" Oct 10 18:39:17 crc kubenswrapper[4799]: I1010 18:39:17.796580 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e5dc63d0-e9a5-4035-8308-ef71ec9be69e-nova-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-6r6fv\" (UID: \"e5dc63d0-e9a5-4035-8308-ef71ec9be69e\") " pod="openstack/install-certs-openstack-openstack-cell1-6r6fv" Oct 10 18:39:17 crc kubenswrapper[4799]: I1010 18:39:17.797300 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-dhcp-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e5dc63d0-e9a5-4035-8308-ef71ec9be69e-neutron-dhcp-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-6r6fv\" (UID: \"e5dc63d0-e9a5-4035-8308-ef71ec9be69e\") " pod="openstack/install-certs-openstack-openstack-cell1-6r6fv" Oct 10 18:39:17 crc kubenswrapper[4799]: I1010 18:39:17.802876 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f69sc\" (UniqueName: \"kubernetes.io/projected/e5dc63d0-e9a5-4035-8308-ef71ec9be69e-kube-api-access-f69sc\") pod \"install-certs-openstack-openstack-cell1-6r6fv\" (UID: \"e5dc63d0-e9a5-4035-8308-ef71ec9be69e\") " pod="openstack/install-certs-openstack-openstack-cell1-6r6fv" Oct 10 18:39:17 crc kubenswrapper[4799]: I1010 18:39:17.896370 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-openstack-openstack-cell1-6r6fv" Oct 10 18:39:18 crc kubenswrapper[4799]: I1010 18:39:18.517028 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-certs-openstack-openstack-cell1-6r6fv"] Oct 10 18:39:18 crc kubenswrapper[4799]: W1010 18:39:18.521604 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode5dc63d0_e9a5_4035_8308_ef71ec9be69e.slice/crio-e784f25fa3eb58365f674c473ec6586e0099bbb867625d8ffad14605caee0e19 WatchSource:0}: Error finding container e784f25fa3eb58365f674c473ec6586e0099bbb867625d8ffad14605caee0e19: Status 404 returned error can't find the container with id e784f25fa3eb58365f674c473ec6586e0099bbb867625d8ffad14605caee0e19 Oct 10 18:39:19 crc kubenswrapper[4799]: I1010 18:39:19.463046 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-openstack-openstack-cell1-6r6fv" event={"ID":"e5dc63d0-e9a5-4035-8308-ef71ec9be69e","Type":"ContainerStarted","Data":"5aadf80f37c4367fdaf9d801e8bd122ade52eb58565fcc026fc4268516949eae"} Oct 10 18:39:19 crc kubenswrapper[4799]: I1010 18:39:19.463667 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-openstack-openstack-cell1-6r6fv" event={"ID":"e5dc63d0-e9a5-4035-8308-ef71ec9be69e","Type":"ContainerStarted","Data":"e784f25fa3eb58365f674c473ec6586e0099bbb867625d8ffad14605caee0e19"} Oct 10 18:39:19 crc kubenswrapper[4799]: I1010 18:39:19.505931 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/install-certs-openstack-openstack-cell1-6r6fv" podStartSLOduration=2.06192532 podStartE2EDuration="2.505907254s" podCreationTimestamp="2025-10-10 18:39:17 +0000 UTC" firstStartedPulling="2025-10-10 18:39:18.529892983 +0000 UTC m=+7652.038217108" lastFinishedPulling="2025-10-10 18:39:18.973874887 +0000 UTC m=+7652.482199042" observedRunningTime="2025-10-10 18:39:19.488426907 +0000 UTC m=+7652.996751032" watchObservedRunningTime="2025-10-10 18:39:19.505907254 +0000 UTC m=+7653.014231379" Oct 10 18:39:29 crc kubenswrapper[4799]: I1010 18:39:29.404035 4799 scope.go:117] "RemoveContainer" containerID="5d726047dc458172d088ab3478c0245a0f320ffb1060865307a391a8a23b1065" Oct 10 18:39:29 crc kubenswrapper[4799]: E1010 18:39:29.405198 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 18:39:39 crc kubenswrapper[4799]: I1010 18:39:39.723689 4799 generic.go:334] "Generic (PLEG): container finished" podID="e5dc63d0-e9a5-4035-8308-ef71ec9be69e" containerID="5aadf80f37c4367fdaf9d801e8bd122ade52eb58565fcc026fc4268516949eae" exitCode=0 Oct 10 18:39:39 crc kubenswrapper[4799]: I1010 18:39:39.723819 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-openstack-openstack-cell1-6r6fv" event={"ID":"e5dc63d0-e9a5-4035-8308-ef71ec9be69e","Type":"ContainerDied","Data":"5aadf80f37c4367fdaf9d801e8bd122ade52eb58565fcc026fc4268516949eae"} Oct 10 18:39:41 crc kubenswrapper[4799]: I1010 18:39:41.354958 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-openstack-openstack-cell1-6r6fv" Oct 10 18:39:41 crc kubenswrapper[4799]: I1010 18:39:41.516137 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e5dc63d0-e9a5-4035-8308-ef71ec9be69e-bootstrap-combined-ca-bundle\") pod \"e5dc63d0-e9a5-4035-8308-ef71ec9be69e\" (UID: \"e5dc63d0-e9a5-4035-8308-ef71ec9be69e\") " Oct 10 18:39:41 crc kubenswrapper[4799]: I1010 18:39:41.516240 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/e5dc63d0-e9a5-4035-8308-ef71ec9be69e-ssh-key\") pod \"e5dc63d0-e9a5-4035-8308-ef71ec9be69e\" (UID: \"e5dc63d0-e9a5-4035-8308-ef71ec9be69e\") " Oct 10 18:39:41 crc kubenswrapper[4799]: I1010 18:39:41.516281 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e5dc63d0-e9a5-4035-8308-ef71ec9be69e-ovn-combined-ca-bundle\") pod \"e5dc63d0-e9a5-4035-8308-ef71ec9be69e\" (UID: \"e5dc63d0-e9a5-4035-8308-ef71ec9be69e\") " Oct 10 18:39:41 crc kubenswrapper[4799]: I1010 18:39:41.516372 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e5dc63d0-e9a5-4035-8308-ef71ec9be69e-nova-combined-ca-bundle\") pod \"e5dc63d0-e9a5-4035-8308-ef71ec9be69e\" (UID: \"e5dc63d0-e9a5-4035-8308-ef71ec9be69e\") " Oct 10 18:39:41 crc kubenswrapper[4799]: I1010 18:39:41.516406 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-dhcp-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e5dc63d0-e9a5-4035-8308-ef71ec9be69e-neutron-dhcp-combined-ca-bundle\") pod \"e5dc63d0-e9a5-4035-8308-ef71ec9be69e\" (UID: \"e5dc63d0-e9a5-4035-8308-ef71ec9be69e\") " Oct 10 18:39:41 crc kubenswrapper[4799]: I1010 18:39:41.516428 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-f69sc\" (UniqueName: \"kubernetes.io/projected/e5dc63d0-e9a5-4035-8308-ef71ec9be69e-kube-api-access-f69sc\") pod \"e5dc63d0-e9a5-4035-8308-ef71ec9be69e\" (UID: \"e5dc63d0-e9a5-4035-8308-ef71ec9be69e\") " Oct 10 18:39:41 crc kubenswrapper[4799]: I1010 18:39:41.516486 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/e5dc63d0-e9a5-4035-8308-ef71ec9be69e-ceph\") pod \"e5dc63d0-e9a5-4035-8308-ef71ec9be69e\" (UID: \"e5dc63d0-e9a5-4035-8308-ef71ec9be69e\") " Oct 10 18:39:41 crc kubenswrapper[4799]: I1010 18:39:41.516561 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-sriov-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e5dc63d0-e9a5-4035-8308-ef71ec9be69e-neutron-sriov-combined-ca-bundle\") pod \"e5dc63d0-e9a5-4035-8308-ef71ec9be69e\" (UID: \"e5dc63d0-e9a5-4035-8308-ef71ec9be69e\") " Oct 10 18:39:41 crc kubenswrapper[4799]: I1010 18:39:41.516583 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e5dc63d0-e9a5-4035-8308-ef71ec9be69e-libvirt-combined-ca-bundle\") pod \"e5dc63d0-e9a5-4035-8308-ef71ec9be69e\" (UID: \"e5dc63d0-e9a5-4035-8308-ef71ec9be69e\") " Oct 10 18:39:41 crc kubenswrapper[4799]: I1010 18:39:41.516614 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e5dc63d0-e9a5-4035-8308-ef71ec9be69e-inventory\") pod \"e5dc63d0-e9a5-4035-8308-ef71ec9be69e\" (UID: \"e5dc63d0-e9a5-4035-8308-ef71ec9be69e\") " Oct 10 18:39:41 crc kubenswrapper[4799]: I1010 18:39:41.516669 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e5dc63d0-e9a5-4035-8308-ef71ec9be69e-neutron-metadata-combined-ca-bundle\") pod \"e5dc63d0-e9a5-4035-8308-ef71ec9be69e\" (UID: \"e5dc63d0-e9a5-4035-8308-ef71ec9be69e\") " Oct 10 18:39:41 crc kubenswrapper[4799]: I1010 18:39:41.516719 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e5dc63d0-e9a5-4035-8308-ef71ec9be69e-telemetry-combined-ca-bundle\") pod \"e5dc63d0-e9a5-4035-8308-ef71ec9be69e\" (UID: \"e5dc63d0-e9a5-4035-8308-ef71ec9be69e\") " Oct 10 18:39:41 crc kubenswrapper[4799]: I1010 18:39:41.523862 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e5dc63d0-e9a5-4035-8308-ef71ec9be69e-bootstrap-combined-ca-bundle" (OuterVolumeSpecName: "bootstrap-combined-ca-bundle") pod "e5dc63d0-e9a5-4035-8308-ef71ec9be69e" (UID: "e5dc63d0-e9a5-4035-8308-ef71ec9be69e"). InnerVolumeSpecName "bootstrap-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 18:39:41 crc kubenswrapper[4799]: I1010 18:39:41.524403 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e5dc63d0-e9a5-4035-8308-ef71ec9be69e-telemetry-combined-ca-bundle" (OuterVolumeSpecName: "telemetry-combined-ca-bundle") pod "e5dc63d0-e9a5-4035-8308-ef71ec9be69e" (UID: "e5dc63d0-e9a5-4035-8308-ef71ec9be69e"). InnerVolumeSpecName "telemetry-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 18:39:41 crc kubenswrapper[4799]: I1010 18:39:41.525013 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e5dc63d0-e9a5-4035-8308-ef71ec9be69e-neutron-dhcp-combined-ca-bundle" (OuterVolumeSpecName: "neutron-dhcp-combined-ca-bundle") pod "e5dc63d0-e9a5-4035-8308-ef71ec9be69e" (UID: "e5dc63d0-e9a5-4035-8308-ef71ec9be69e"). InnerVolumeSpecName "neutron-dhcp-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 18:39:41 crc kubenswrapper[4799]: I1010 18:39:41.525101 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e5dc63d0-e9a5-4035-8308-ef71ec9be69e-ovn-combined-ca-bundle" (OuterVolumeSpecName: "ovn-combined-ca-bundle") pod "e5dc63d0-e9a5-4035-8308-ef71ec9be69e" (UID: "e5dc63d0-e9a5-4035-8308-ef71ec9be69e"). InnerVolumeSpecName "ovn-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 18:39:41 crc kubenswrapper[4799]: I1010 18:39:41.525807 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e5dc63d0-e9a5-4035-8308-ef71ec9be69e-libvirt-combined-ca-bundle" (OuterVolumeSpecName: "libvirt-combined-ca-bundle") pod "e5dc63d0-e9a5-4035-8308-ef71ec9be69e" (UID: "e5dc63d0-e9a5-4035-8308-ef71ec9be69e"). InnerVolumeSpecName "libvirt-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 18:39:41 crc kubenswrapper[4799]: I1010 18:39:41.526224 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e5dc63d0-e9a5-4035-8308-ef71ec9be69e-neutron-sriov-combined-ca-bundle" (OuterVolumeSpecName: "neutron-sriov-combined-ca-bundle") pod "e5dc63d0-e9a5-4035-8308-ef71ec9be69e" (UID: "e5dc63d0-e9a5-4035-8308-ef71ec9be69e"). InnerVolumeSpecName "neutron-sriov-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 18:39:41 crc kubenswrapper[4799]: I1010 18:39:41.526242 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e5dc63d0-e9a5-4035-8308-ef71ec9be69e-kube-api-access-f69sc" (OuterVolumeSpecName: "kube-api-access-f69sc") pod "e5dc63d0-e9a5-4035-8308-ef71ec9be69e" (UID: "e5dc63d0-e9a5-4035-8308-ef71ec9be69e"). InnerVolumeSpecName "kube-api-access-f69sc". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 18:39:41 crc kubenswrapper[4799]: I1010 18:39:41.527245 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e5dc63d0-e9a5-4035-8308-ef71ec9be69e-nova-combined-ca-bundle" (OuterVolumeSpecName: "nova-combined-ca-bundle") pod "e5dc63d0-e9a5-4035-8308-ef71ec9be69e" (UID: "e5dc63d0-e9a5-4035-8308-ef71ec9be69e"). InnerVolumeSpecName "nova-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 18:39:41 crc kubenswrapper[4799]: I1010 18:39:41.529923 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e5dc63d0-e9a5-4035-8308-ef71ec9be69e-neutron-metadata-combined-ca-bundle" (OuterVolumeSpecName: "neutron-metadata-combined-ca-bundle") pod "e5dc63d0-e9a5-4035-8308-ef71ec9be69e" (UID: "e5dc63d0-e9a5-4035-8308-ef71ec9be69e"). InnerVolumeSpecName "neutron-metadata-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 18:39:41 crc kubenswrapper[4799]: I1010 18:39:41.530016 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e5dc63d0-e9a5-4035-8308-ef71ec9be69e-ceph" (OuterVolumeSpecName: "ceph") pod "e5dc63d0-e9a5-4035-8308-ef71ec9be69e" (UID: "e5dc63d0-e9a5-4035-8308-ef71ec9be69e"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 18:39:41 crc kubenswrapper[4799]: I1010 18:39:41.573864 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e5dc63d0-e9a5-4035-8308-ef71ec9be69e-inventory" (OuterVolumeSpecName: "inventory") pod "e5dc63d0-e9a5-4035-8308-ef71ec9be69e" (UID: "e5dc63d0-e9a5-4035-8308-ef71ec9be69e"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 18:39:41 crc kubenswrapper[4799]: I1010 18:39:41.574074 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e5dc63d0-e9a5-4035-8308-ef71ec9be69e-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "e5dc63d0-e9a5-4035-8308-ef71ec9be69e" (UID: "e5dc63d0-e9a5-4035-8308-ef71ec9be69e"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 18:39:41 crc kubenswrapper[4799]: I1010 18:39:41.620347 4799 reconciler_common.go:293] "Volume detached for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e5dc63d0-e9a5-4035-8308-ef71ec9be69e-bootstrap-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 10 18:39:41 crc kubenswrapper[4799]: I1010 18:39:41.620435 4799 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/e5dc63d0-e9a5-4035-8308-ef71ec9be69e-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 10 18:39:41 crc kubenswrapper[4799]: I1010 18:39:41.620492 4799 reconciler_common.go:293] "Volume detached for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e5dc63d0-e9a5-4035-8308-ef71ec9be69e-ovn-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 10 18:39:41 crc kubenswrapper[4799]: I1010 18:39:41.620513 4799 reconciler_common.go:293] "Volume detached for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e5dc63d0-e9a5-4035-8308-ef71ec9be69e-nova-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 10 18:39:41 crc kubenswrapper[4799]: I1010 18:39:41.620531 4799 reconciler_common.go:293] "Volume detached for volume \"neutron-dhcp-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e5dc63d0-e9a5-4035-8308-ef71ec9be69e-neutron-dhcp-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 10 18:39:41 crc kubenswrapper[4799]: I1010 18:39:41.620593 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-f69sc\" (UniqueName: \"kubernetes.io/projected/e5dc63d0-e9a5-4035-8308-ef71ec9be69e-kube-api-access-f69sc\") on node \"crc\" DevicePath \"\"" Oct 10 18:39:41 crc kubenswrapper[4799]: I1010 18:39:41.620614 4799 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/e5dc63d0-e9a5-4035-8308-ef71ec9be69e-ceph\") on node \"crc\" DevicePath \"\"" Oct 10 18:39:41 crc kubenswrapper[4799]: I1010 18:39:41.620632 4799 reconciler_common.go:293] "Volume detached for volume \"neutron-sriov-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e5dc63d0-e9a5-4035-8308-ef71ec9be69e-neutron-sriov-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 10 18:39:41 crc kubenswrapper[4799]: I1010 18:39:41.620691 4799 reconciler_common.go:293] "Volume detached for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e5dc63d0-e9a5-4035-8308-ef71ec9be69e-libvirt-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 10 18:39:41 crc kubenswrapper[4799]: I1010 18:39:41.620710 4799 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e5dc63d0-e9a5-4035-8308-ef71ec9be69e-inventory\") on node \"crc\" DevicePath \"\"" Oct 10 18:39:41 crc kubenswrapper[4799]: I1010 18:39:41.620727 4799 reconciler_common.go:293] "Volume detached for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e5dc63d0-e9a5-4035-8308-ef71ec9be69e-neutron-metadata-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 10 18:39:41 crc kubenswrapper[4799]: I1010 18:39:41.620803 4799 reconciler_common.go:293] "Volume detached for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e5dc63d0-e9a5-4035-8308-ef71ec9be69e-telemetry-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 10 18:39:41 crc kubenswrapper[4799]: I1010 18:39:41.752965 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-openstack-openstack-cell1-6r6fv" event={"ID":"e5dc63d0-e9a5-4035-8308-ef71ec9be69e","Type":"ContainerDied","Data":"e784f25fa3eb58365f674c473ec6586e0099bbb867625d8ffad14605caee0e19"} Oct 10 18:39:41 crc kubenswrapper[4799]: I1010 18:39:41.753005 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-openstack-openstack-cell1-6r6fv" Oct 10 18:39:41 crc kubenswrapper[4799]: I1010 18:39:41.753028 4799 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e784f25fa3eb58365f674c473ec6586e0099bbb867625d8ffad14605caee0e19" Oct 10 18:39:41 crc kubenswrapper[4799]: I1010 18:39:41.892840 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceph-client-openstack-openstack-cell1-ptt5f"] Oct 10 18:39:41 crc kubenswrapper[4799]: E1010 18:39:41.893697 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e5dc63d0-e9a5-4035-8308-ef71ec9be69e" containerName="install-certs-openstack-openstack-cell1" Oct 10 18:39:41 crc kubenswrapper[4799]: I1010 18:39:41.893745 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="e5dc63d0-e9a5-4035-8308-ef71ec9be69e" containerName="install-certs-openstack-openstack-cell1" Oct 10 18:39:41 crc kubenswrapper[4799]: I1010 18:39:41.894399 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="e5dc63d0-e9a5-4035-8308-ef71ec9be69e" containerName="install-certs-openstack-openstack-cell1" Oct 10 18:39:41 crc kubenswrapper[4799]: I1010 18:39:41.896394 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceph-client-openstack-openstack-cell1-ptt5f" Oct 10 18:39:41 crc kubenswrapper[4799]: I1010 18:39:41.900084 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-rdlhr" Oct 10 18:39:41 crc kubenswrapper[4799]: I1010 18:39:41.900509 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 10 18:39:41 crc kubenswrapper[4799]: I1010 18:39:41.901335 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-adoption-secret" Oct 10 18:39:41 crc kubenswrapper[4799]: I1010 18:39:41.901938 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Oct 10 18:39:41 crc kubenswrapper[4799]: I1010 18:39:41.908413 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceph-client-openstack-openstack-cell1-ptt5f"] Oct 10 18:39:42 crc kubenswrapper[4799]: I1010 18:39:42.028413 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d6kgk\" (UniqueName: \"kubernetes.io/projected/bf2a1658-7b1c-4b1f-af4a-87d92927e9c4-kube-api-access-d6kgk\") pod \"ceph-client-openstack-openstack-cell1-ptt5f\" (UID: \"bf2a1658-7b1c-4b1f-af4a-87d92927e9c4\") " pod="openstack/ceph-client-openstack-openstack-cell1-ptt5f" Oct 10 18:39:42 crc kubenswrapper[4799]: I1010 18:39:42.028663 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/bf2a1658-7b1c-4b1f-af4a-87d92927e9c4-ceph\") pod \"ceph-client-openstack-openstack-cell1-ptt5f\" (UID: \"bf2a1658-7b1c-4b1f-af4a-87d92927e9c4\") " pod="openstack/ceph-client-openstack-openstack-cell1-ptt5f" Oct 10 18:39:42 crc kubenswrapper[4799]: I1010 18:39:42.028923 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/bf2a1658-7b1c-4b1f-af4a-87d92927e9c4-ssh-key\") pod \"ceph-client-openstack-openstack-cell1-ptt5f\" (UID: \"bf2a1658-7b1c-4b1f-af4a-87d92927e9c4\") " pod="openstack/ceph-client-openstack-openstack-cell1-ptt5f" Oct 10 18:39:42 crc kubenswrapper[4799]: I1010 18:39:42.029007 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/bf2a1658-7b1c-4b1f-af4a-87d92927e9c4-inventory\") pod \"ceph-client-openstack-openstack-cell1-ptt5f\" (UID: \"bf2a1658-7b1c-4b1f-af4a-87d92927e9c4\") " pod="openstack/ceph-client-openstack-openstack-cell1-ptt5f" Oct 10 18:39:42 crc kubenswrapper[4799]: I1010 18:39:42.130708 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/bf2a1658-7b1c-4b1f-af4a-87d92927e9c4-ceph\") pod \"ceph-client-openstack-openstack-cell1-ptt5f\" (UID: \"bf2a1658-7b1c-4b1f-af4a-87d92927e9c4\") " pod="openstack/ceph-client-openstack-openstack-cell1-ptt5f" Oct 10 18:39:42 crc kubenswrapper[4799]: I1010 18:39:42.130965 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/bf2a1658-7b1c-4b1f-af4a-87d92927e9c4-ssh-key\") pod \"ceph-client-openstack-openstack-cell1-ptt5f\" (UID: \"bf2a1658-7b1c-4b1f-af4a-87d92927e9c4\") " pod="openstack/ceph-client-openstack-openstack-cell1-ptt5f" Oct 10 18:39:42 crc kubenswrapper[4799]: I1010 18:39:42.131074 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/bf2a1658-7b1c-4b1f-af4a-87d92927e9c4-inventory\") pod \"ceph-client-openstack-openstack-cell1-ptt5f\" (UID: \"bf2a1658-7b1c-4b1f-af4a-87d92927e9c4\") " pod="openstack/ceph-client-openstack-openstack-cell1-ptt5f" Oct 10 18:39:42 crc kubenswrapper[4799]: I1010 18:39:42.131194 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d6kgk\" (UniqueName: \"kubernetes.io/projected/bf2a1658-7b1c-4b1f-af4a-87d92927e9c4-kube-api-access-d6kgk\") pod \"ceph-client-openstack-openstack-cell1-ptt5f\" (UID: \"bf2a1658-7b1c-4b1f-af4a-87d92927e9c4\") " pod="openstack/ceph-client-openstack-openstack-cell1-ptt5f" Oct 10 18:39:42 crc kubenswrapper[4799]: I1010 18:39:42.136830 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/bf2a1658-7b1c-4b1f-af4a-87d92927e9c4-ssh-key\") pod \"ceph-client-openstack-openstack-cell1-ptt5f\" (UID: \"bf2a1658-7b1c-4b1f-af4a-87d92927e9c4\") " pod="openstack/ceph-client-openstack-openstack-cell1-ptt5f" Oct 10 18:39:42 crc kubenswrapper[4799]: I1010 18:39:42.138805 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/bf2a1658-7b1c-4b1f-af4a-87d92927e9c4-ceph\") pod \"ceph-client-openstack-openstack-cell1-ptt5f\" (UID: \"bf2a1658-7b1c-4b1f-af4a-87d92927e9c4\") " pod="openstack/ceph-client-openstack-openstack-cell1-ptt5f" Oct 10 18:39:42 crc kubenswrapper[4799]: I1010 18:39:42.139954 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/bf2a1658-7b1c-4b1f-af4a-87d92927e9c4-inventory\") pod \"ceph-client-openstack-openstack-cell1-ptt5f\" (UID: \"bf2a1658-7b1c-4b1f-af4a-87d92927e9c4\") " pod="openstack/ceph-client-openstack-openstack-cell1-ptt5f" Oct 10 18:39:42 crc kubenswrapper[4799]: I1010 18:39:42.157840 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d6kgk\" (UniqueName: \"kubernetes.io/projected/bf2a1658-7b1c-4b1f-af4a-87d92927e9c4-kube-api-access-d6kgk\") pod \"ceph-client-openstack-openstack-cell1-ptt5f\" (UID: \"bf2a1658-7b1c-4b1f-af4a-87d92927e9c4\") " pod="openstack/ceph-client-openstack-openstack-cell1-ptt5f" Oct 10 18:39:42 crc kubenswrapper[4799]: I1010 18:39:42.222646 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceph-client-openstack-openstack-cell1-ptt5f" Oct 10 18:39:42 crc kubenswrapper[4799]: I1010 18:39:42.830804 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceph-client-openstack-openstack-cell1-ptt5f"] Oct 10 18:39:43 crc kubenswrapper[4799]: I1010 18:39:43.776727 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceph-client-openstack-openstack-cell1-ptt5f" event={"ID":"bf2a1658-7b1c-4b1f-af4a-87d92927e9c4","Type":"ContainerStarted","Data":"b7dfd4ff448df52e5cf4a511e74f4c5508a46a708d7cbcf36805ffbeb4333826"} Oct 10 18:39:43 crc kubenswrapper[4799]: I1010 18:39:43.777665 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceph-client-openstack-openstack-cell1-ptt5f" event={"ID":"bf2a1658-7b1c-4b1f-af4a-87d92927e9c4","Type":"ContainerStarted","Data":"6002cc57043edad1332b48c122d3ddfe9a9986d1a597aa276781ae9f849d74b2"} Oct 10 18:39:43 crc kubenswrapper[4799]: I1010 18:39:43.806522 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceph-client-openstack-openstack-cell1-ptt5f" podStartSLOduration=2.224189148 podStartE2EDuration="2.806504136s" podCreationTimestamp="2025-10-10 18:39:41 +0000 UTC" firstStartedPulling="2025-10-10 18:39:42.836293817 +0000 UTC m=+7676.344617962" lastFinishedPulling="2025-10-10 18:39:43.418608795 +0000 UTC m=+7676.926932950" observedRunningTime="2025-10-10 18:39:43.801717059 +0000 UTC m=+7677.310041214" watchObservedRunningTime="2025-10-10 18:39:43.806504136 +0000 UTC m=+7677.314828261" Oct 10 18:39:44 crc kubenswrapper[4799]: I1010 18:39:44.403200 4799 scope.go:117] "RemoveContainer" containerID="5d726047dc458172d088ab3478c0245a0f320ffb1060865307a391a8a23b1065" Oct 10 18:39:44 crc kubenswrapper[4799]: E1010 18:39:44.403861 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 18:39:48 crc kubenswrapper[4799]: I1010 18:39:48.840940 4799 generic.go:334] "Generic (PLEG): container finished" podID="bf2a1658-7b1c-4b1f-af4a-87d92927e9c4" containerID="b7dfd4ff448df52e5cf4a511e74f4c5508a46a708d7cbcf36805ffbeb4333826" exitCode=0 Oct 10 18:39:48 crc kubenswrapper[4799]: I1010 18:39:48.841664 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceph-client-openstack-openstack-cell1-ptt5f" event={"ID":"bf2a1658-7b1c-4b1f-af4a-87d92927e9c4","Type":"ContainerDied","Data":"b7dfd4ff448df52e5cf4a511e74f4c5508a46a708d7cbcf36805ffbeb4333826"} Oct 10 18:39:49 crc kubenswrapper[4799]: E1010 18:39:49.082239 4799 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podbf2a1658_7b1c_4b1f_af4a_87d92927e9c4.slice/crio-b7dfd4ff448df52e5cf4a511e74f4c5508a46a708d7cbcf36805ffbeb4333826.scope\": RecentStats: unable to find data in memory cache]" Oct 10 18:39:50 crc kubenswrapper[4799]: I1010 18:39:50.518084 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceph-client-openstack-openstack-cell1-ptt5f" Oct 10 18:39:50 crc kubenswrapper[4799]: I1010 18:39:50.648988 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/bf2a1658-7b1c-4b1f-af4a-87d92927e9c4-ceph\") pod \"bf2a1658-7b1c-4b1f-af4a-87d92927e9c4\" (UID: \"bf2a1658-7b1c-4b1f-af4a-87d92927e9c4\") " Oct 10 18:39:50 crc kubenswrapper[4799]: I1010 18:39:50.649122 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/bf2a1658-7b1c-4b1f-af4a-87d92927e9c4-ssh-key\") pod \"bf2a1658-7b1c-4b1f-af4a-87d92927e9c4\" (UID: \"bf2a1658-7b1c-4b1f-af4a-87d92927e9c4\") " Oct 10 18:39:50 crc kubenswrapper[4799]: I1010 18:39:50.649295 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d6kgk\" (UniqueName: \"kubernetes.io/projected/bf2a1658-7b1c-4b1f-af4a-87d92927e9c4-kube-api-access-d6kgk\") pod \"bf2a1658-7b1c-4b1f-af4a-87d92927e9c4\" (UID: \"bf2a1658-7b1c-4b1f-af4a-87d92927e9c4\") " Oct 10 18:39:50 crc kubenswrapper[4799]: I1010 18:39:50.649502 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/bf2a1658-7b1c-4b1f-af4a-87d92927e9c4-inventory\") pod \"bf2a1658-7b1c-4b1f-af4a-87d92927e9c4\" (UID: \"bf2a1658-7b1c-4b1f-af4a-87d92927e9c4\") " Oct 10 18:39:50 crc kubenswrapper[4799]: I1010 18:39:50.658159 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf2a1658-7b1c-4b1f-af4a-87d92927e9c4-kube-api-access-d6kgk" (OuterVolumeSpecName: "kube-api-access-d6kgk") pod "bf2a1658-7b1c-4b1f-af4a-87d92927e9c4" (UID: "bf2a1658-7b1c-4b1f-af4a-87d92927e9c4"). InnerVolumeSpecName "kube-api-access-d6kgk". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 18:39:50 crc kubenswrapper[4799]: I1010 18:39:50.667014 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bf2a1658-7b1c-4b1f-af4a-87d92927e9c4-ceph" (OuterVolumeSpecName: "ceph") pod "bf2a1658-7b1c-4b1f-af4a-87d92927e9c4" (UID: "bf2a1658-7b1c-4b1f-af4a-87d92927e9c4"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 18:39:50 crc kubenswrapper[4799]: I1010 18:39:50.686566 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bf2a1658-7b1c-4b1f-af4a-87d92927e9c4-inventory" (OuterVolumeSpecName: "inventory") pod "bf2a1658-7b1c-4b1f-af4a-87d92927e9c4" (UID: "bf2a1658-7b1c-4b1f-af4a-87d92927e9c4"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 18:39:50 crc kubenswrapper[4799]: I1010 18:39:50.687545 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bf2a1658-7b1c-4b1f-af4a-87d92927e9c4-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "bf2a1658-7b1c-4b1f-af4a-87d92927e9c4" (UID: "bf2a1658-7b1c-4b1f-af4a-87d92927e9c4"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 18:39:50 crc kubenswrapper[4799]: I1010 18:39:50.753608 4799 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/bf2a1658-7b1c-4b1f-af4a-87d92927e9c4-ceph\") on node \"crc\" DevicePath \"\"" Oct 10 18:39:50 crc kubenswrapper[4799]: I1010 18:39:50.753655 4799 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/bf2a1658-7b1c-4b1f-af4a-87d92927e9c4-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 10 18:39:50 crc kubenswrapper[4799]: I1010 18:39:50.753677 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d6kgk\" (UniqueName: \"kubernetes.io/projected/bf2a1658-7b1c-4b1f-af4a-87d92927e9c4-kube-api-access-d6kgk\") on node \"crc\" DevicePath \"\"" Oct 10 18:39:50 crc kubenswrapper[4799]: I1010 18:39:50.753697 4799 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/bf2a1658-7b1c-4b1f-af4a-87d92927e9c4-inventory\") on node \"crc\" DevicePath \"\"" Oct 10 18:39:50 crc kubenswrapper[4799]: I1010 18:39:50.869641 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceph-client-openstack-openstack-cell1-ptt5f" event={"ID":"bf2a1658-7b1c-4b1f-af4a-87d92927e9c4","Type":"ContainerDied","Data":"6002cc57043edad1332b48c122d3ddfe9a9986d1a597aa276781ae9f849d74b2"} Oct 10 18:39:50 crc kubenswrapper[4799]: I1010 18:39:50.869698 4799 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6002cc57043edad1332b48c122d3ddfe9a9986d1a597aa276781ae9f849d74b2" Oct 10 18:39:50 crc kubenswrapper[4799]: I1010 18:39:50.869732 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceph-client-openstack-openstack-cell1-ptt5f" Oct 10 18:39:50 crc kubenswrapper[4799]: I1010 18:39:50.972932 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-openstack-openstack-cell1-ltfpg"] Oct 10 18:39:50 crc kubenswrapper[4799]: E1010 18:39:50.973659 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bf2a1658-7b1c-4b1f-af4a-87d92927e9c4" containerName="ceph-client-openstack-openstack-cell1" Oct 10 18:39:50 crc kubenswrapper[4799]: I1010 18:39:50.973743 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="bf2a1658-7b1c-4b1f-af4a-87d92927e9c4" containerName="ceph-client-openstack-openstack-cell1" Oct 10 18:39:50 crc kubenswrapper[4799]: I1010 18:39:50.974127 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="bf2a1658-7b1c-4b1f-af4a-87d92927e9c4" containerName="ceph-client-openstack-openstack-cell1" Oct 10 18:39:50 crc kubenswrapper[4799]: I1010 18:39:50.975782 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-openstack-openstack-cell1-ltfpg" Oct 10 18:39:50 crc kubenswrapper[4799]: I1010 18:39:50.982679 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 10 18:39:50 crc kubenswrapper[4799]: I1010 18:39:50.983134 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Oct 10 18:39:50 crc kubenswrapper[4799]: I1010 18:39:50.982854 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-adoption-secret" Oct 10 18:39:50 crc kubenswrapper[4799]: I1010 18:39:50.982923 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-rdlhr" Oct 10 18:39:50 crc kubenswrapper[4799]: I1010 18:39:50.982981 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-config" Oct 10 18:39:51 crc kubenswrapper[4799]: I1010 18:39:50.996772 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-openstack-openstack-cell1-ltfpg"] Oct 10 18:39:51 crc kubenswrapper[4799]: I1010 18:39:51.062825 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1ba70610-066c-4a5b-a16d-3555884bf1b2-ovn-combined-ca-bundle\") pod \"ovn-openstack-openstack-cell1-ltfpg\" (UID: \"1ba70610-066c-4a5b-a16d-3555884bf1b2\") " pod="openstack/ovn-openstack-openstack-cell1-ltfpg" Oct 10 18:39:51 crc kubenswrapper[4799]: I1010 18:39:51.062873 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/1ba70610-066c-4a5b-a16d-3555884bf1b2-ovncontroller-config-0\") pod \"ovn-openstack-openstack-cell1-ltfpg\" (UID: \"1ba70610-066c-4a5b-a16d-3555884bf1b2\") " pod="openstack/ovn-openstack-openstack-cell1-ltfpg" Oct 10 18:39:51 crc kubenswrapper[4799]: I1010 18:39:51.063038 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/1ba70610-066c-4a5b-a16d-3555884bf1b2-ceph\") pod \"ovn-openstack-openstack-cell1-ltfpg\" (UID: \"1ba70610-066c-4a5b-a16d-3555884bf1b2\") " pod="openstack/ovn-openstack-openstack-cell1-ltfpg" Oct 10 18:39:51 crc kubenswrapper[4799]: I1010 18:39:51.063116 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7z82b\" (UniqueName: \"kubernetes.io/projected/1ba70610-066c-4a5b-a16d-3555884bf1b2-kube-api-access-7z82b\") pod \"ovn-openstack-openstack-cell1-ltfpg\" (UID: \"1ba70610-066c-4a5b-a16d-3555884bf1b2\") " pod="openstack/ovn-openstack-openstack-cell1-ltfpg" Oct 10 18:39:51 crc kubenswrapper[4799]: I1010 18:39:51.063238 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/1ba70610-066c-4a5b-a16d-3555884bf1b2-inventory\") pod \"ovn-openstack-openstack-cell1-ltfpg\" (UID: \"1ba70610-066c-4a5b-a16d-3555884bf1b2\") " pod="openstack/ovn-openstack-openstack-cell1-ltfpg" Oct 10 18:39:51 crc kubenswrapper[4799]: I1010 18:39:51.063272 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/1ba70610-066c-4a5b-a16d-3555884bf1b2-ssh-key\") pod \"ovn-openstack-openstack-cell1-ltfpg\" (UID: \"1ba70610-066c-4a5b-a16d-3555884bf1b2\") " pod="openstack/ovn-openstack-openstack-cell1-ltfpg" Oct 10 18:39:51 crc kubenswrapper[4799]: I1010 18:39:51.165010 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/1ba70610-066c-4a5b-a16d-3555884bf1b2-inventory\") pod \"ovn-openstack-openstack-cell1-ltfpg\" (UID: \"1ba70610-066c-4a5b-a16d-3555884bf1b2\") " pod="openstack/ovn-openstack-openstack-cell1-ltfpg" Oct 10 18:39:51 crc kubenswrapper[4799]: I1010 18:39:51.165050 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/1ba70610-066c-4a5b-a16d-3555884bf1b2-ssh-key\") pod \"ovn-openstack-openstack-cell1-ltfpg\" (UID: \"1ba70610-066c-4a5b-a16d-3555884bf1b2\") " pod="openstack/ovn-openstack-openstack-cell1-ltfpg" Oct 10 18:39:51 crc kubenswrapper[4799]: I1010 18:39:51.165135 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1ba70610-066c-4a5b-a16d-3555884bf1b2-ovn-combined-ca-bundle\") pod \"ovn-openstack-openstack-cell1-ltfpg\" (UID: \"1ba70610-066c-4a5b-a16d-3555884bf1b2\") " pod="openstack/ovn-openstack-openstack-cell1-ltfpg" Oct 10 18:39:51 crc kubenswrapper[4799]: I1010 18:39:51.165162 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/1ba70610-066c-4a5b-a16d-3555884bf1b2-ovncontroller-config-0\") pod \"ovn-openstack-openstack-cell1-ltfpg\" (UID: \"1ba70610-066c-4a5b-a16d-3555884bf1b2\") " pod="openstack/ovn-openstack-openstack-cell1-ltfpg" Oct 10 18:39:51 crc kubenswrapper[4799]: I1010 18:39:51.165239 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/1ba70610-066c-4a5b-a16d-3555884bf1b2-ceph\") pod \"ovn-openstack-openstack-cell1-ltfpg\" (UID: \"1ba70610-066c-4a5b-a16d-3555884bf1b2\") " pod="openstack/ovn-openstack-openstack-cell1-ltfpg" Oct 10 18:39:51 crc kubenswrapper[4799]: I1010 18:39:51.165269 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7z82b\" (UniqueName: \"kubernetes.io/projected/1ba70610-066c-4a5b-a16d-3555884bf1b2-kube-api-access-7z82b\") pod \"ovn-openstack-openstack-cell1-ltfpg\" (UID: \"1ba70610-066c-4a5b-a16d-3555884bf1b2\") " pod="openstack/ovn-openstack-openstack-cell1-ltfpg" Oct 10 18:39:51 crc kubenswrapper[4799]: I1010 18:39:51.166194 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/1ba70610-066c-4a5b-a16d-3555884bf1b2-ovncontroller-config-0\") pod \"ovn-openstack-openstack-cell1-ltfpg\" (UID: \"1ba70610-066c-4a5b-a16d-3555884bf1b2\") " pod="openstack/ovn-openstack-openstack-cell1-ltfpg" Oct 10 18:39:51 crc kubenswrapper[4799]: I1010 18:39:51.168625 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/1ba70610-066c-4a5b-a16d-3555884bf1b2-ssh-key\") pod \"ovn-openstack-openstack-cell1-ltfpg\" (UID: \"1ba70610-066c-4a5b-a16d-3555884bf1b2\") " pod="openstack/ovn-openstack-openstack-cell1-ltfpg" Oct 10 18:39:51 crc kubenswrapper[4799]: I1010 18:39:51.169441 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/1ba70610-066c-4a5b-a16d-3555884bf1b2-ceph\") pod \"ovn-openstack-openstack-cell1-ltfpg\" (UID: \"1ba70610-066c-4a5b-a16d-3555884bf1b2\") " pod="openstack/ovn-openstack-openstack-cell1-ltfpg" Oct 10 18:39:51 crc kubenswrapper[4799]: I1010 18:39:51.171175 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1ba70610-066c-4a5b-a16d-3555884bf1b2-ovn-combined-ca-bundle\") pod \"ovn-openstack-openstack-cell1-ltfpg\" (UID: \"1ba70610-066c-4a5b-a16d-3555884bf1b2\") " pod="openstack/ovn-openstack-openstack-cell1-ltfpg" Oct 10 18:39:51 crc kubenswrapper[4799]: I1010 18:39:51.173225 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/1ba70610-066c-4a5b-a16d-3555884bf1b2-inventory\") pod \"ovn-openstack-openstack-cell1-ltfpg\" (UID: \"1ba70610-066c-4a5b-a16d-3555884bf1b2\") " pod="openstack/ovn-openstack-openstack-cell1-ltfpg" Oct 10 18:39:51 crc kubenswrapper[4799]: I1010 18:39:51.189113 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7z82b\" (UniqueName: \"kubernetes.io/projected/1ba70610-066c-4a5b-a16d-3555884bf1b2-kube-api-access-7z82b\") pod \"ovn-openstack-openstack-cell1-ltfpg\" (UID: \"1ba70610-066c-4a5b-a16d-3555884bf1b2\") " pod="openstack/ovn-openstack-openstack-cell1-ltfpg" Oct 10 18:39:51 crc kubenswrapper[4799]: I1010 18:39:51.366183 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-openstack-openstack-cell1-ltfpg" Oct 10 18:39:51 crc kubenswrapper[4799]: I1010 18:39:51.947611 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-openstack-openstack-cell1-ltfpg"] Oct 10 18:39:51 crc kubenswrapper[4799]: W1010 18:39:51.965965 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1ba70610_066c_4a5b_a16d_3555884bf1b2.slice/crio-cd9096b3cf16ab6ac268a8e929745bc27ca1a4b65c410171f3f2d089a48d591a WatchSource:0}: Error finding container cd9096b3cf16ab6ac268a8e929745bc27ca1a4b65c410171f3f2d089a48d591a: Status 404 returned error can't find the container with id cd9096b3cf16ab6ac268a8e929745bc27ca1a4b65c410171f3f2d089a48d591a Oct 10 18:39:52 crc kubenswrapper[4799]: I1010 18:39:52.897482 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-openstack-openstack-cell1-ltfpg" event={"ID":"1ba70610-066c-4a5b-a16d-3555884bf1b2","Type":"ContainerStarted","Data":"0376599a520b337cee45e7cb7c26b951edddbd5a8b68411e2fdf17866b45a646"} Oct 10 18:39:52 crc kubenswrapper[4799]: I1010 18:39:52.897853 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-openstack-openstack-cell1-ltfpg" event={"ID":"1ba70610-066c-4a5b-a16d-3555884bf1b2","Type":"ContainerStarted","Data":"cd9096b3cf16ab6ac268a8e929745bc27ca1a4b65c410171f3f2d089a48d591a"} Oct 10 18:39:52 crc kubenswrapper[4799]: I1010 18:39:52.925364 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-openstack-openstack-cell1-ltfpg" podStartSLOduration=2.42543346 podStartE2EDuration="2.92533978s" podCreationTimestamp="2025-10-10 18:39:50 +0000 UTC" firstStartedPulling="2025-10-10 18:39:51.969214417 +0000 UTC m=+7685.477538572" lastFinishedPulling="2025-10-10 18:39:52.469120737 +0000 UTC m=+7685.977444892" observedRunningTime="2025-10-10 18:39:52.914199467 +0000 UTC m=+7686.422523622" watchObservedRunningTime="2025-10-10 18:39:52.92533978 +0000 UTC m=+7686.433663905" Oct 10 18:39:58 crc kubenswrapper[4799]: I1010 18:39:58.403580 4799 scope.go:117] "RemoveContainer" containerID="5d726047dc458172d088ab3478c0245a0f320ffb1060865307a391a8a23b1065" Oct 10 18:39:58 crc kubenswrapper[4799]: E1010 18:39:58.404665 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 18:40:12 crc kubenswrapper[4799]: I1010 18:40:12.403571 4799 scope.go:117] "RemoveContainer" containerID="5d726047dc458172d088ab3478c0245a0f320ffb1060865307a391a8a23b1065" Oct 10 18:40:12 crc kubenswrapper[4799]: E1010 18:40:12.405099 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 18:40:23 crc kubenswrapper[4799]: I1010 18:40:23.403470 4799 scope.go:117] "RemoveContainer" containerID="5d726047dc458172d088ab3478c0245a0f320ffb1060865307a391a8a23b1065" Oct 10 18:40:23 crc kubenswrapper[4799]: E1010 18:40:23.403993 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 18:40:38 crc kubenswrapper[4799]: I1010 18:40:38.404675 4799 scope.go:117] "RemoveContainer" containerID="5d726047dc458172d088ab3478c0245a0f320ffb1060865307a391a8a23b1065" Oct 10 18:40:38 crc kubenswrapper[4799]: E1010 18:40:38.406186 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 18:40:54 crc kubenswrapper[4799]: I1010 18:40:54.402673 4799 scope.go:117] "RemoveContainer" containerID="5d726047dc458172d088ab3478c0245a0f320ffb1060865307a391a8a23b1065" Oct 10 18:40:54 crc kubenswrapper[4799]: E1010 18:40:54.403497 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 18:40:59 crc kubenswrapper[4799]: I1010 18:40:59.262938 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-hlgjf"] Oct 10 18:40:59 crc kubenswrapper[4799]: I1010 18:40:59.267267 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-hlgjf" Oct 10 18:40:59 crc kubenswrapper[4799]: I1010 18:40:59.300990 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-hlgjf"] Oct 10 18:40:59 crc kubenswrapper[4799]: I1010 18:40:59.424509 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9e382578-8174-4dcb-81e1-d4a18fdd6f99-catalog-content\") pod \"community-operators-hlgjf\" (UID: \"9e382578-8174-4dcb-81e1-d4a18fdd6f99\") " pod="openshift-marketplace/community-operators-hlgjf" Oct 10 18:40:59 crc kubenswrapper[4799]: I1010 18:40:59.424796 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9e382578-8174-4dcb-81e1-d4a18fdd6f99-utilities\") pod \"community-operators-hlgjf\" (UID: \"9e382578-8174-4dcb-81e1-d4a18fdd6f99\") " pod="openshift-marketplace/community-operators-hlgjf" Oct 10 18:40:59 crc kubenswrapper[4799]: I1010 18:40:59.425369 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7p5mp\" (UniqueName: \"kubernetes.io/projected/9e382578-8174-4dcb-81e1-d4a18fdd6f99-kube-api-access-7p5mp\") pod \"community-operators-hlgjf\" (UID: \"9e382578-8174-4dcb-81e1-d4a18fdd6f99\") " pod="openshift-marketplace/community-operators-hlgjf" Oct 10 18:40:59 crc kubenswrapper[4799]: I1010 18:40:59.527004 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9e382578-8174-4dcb-81e1-d4a18fdd6f99-catalog-content\") pod \"community-operators-hlgjf\" (UID: \"9e382578-8174-4dcb-81e1-d4a18fdd6f99\") " pod="openshift-marketplace/community-operators-hlgjf" Oct 10 18:40:59 crc kubenswrapper[4799]: I1010 18:40:59.527138 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9e382578-8174-4dcb-81e1-d4a18fdd6f99-utilities\") pod \"community-operators-hlgjf\" (UID: \"9e382578-8174-4dcb-81e1-d4a18fdd6f99\") " pod="openshift-marketplace/community-operators-hlgjf" Oct 10 18:40:59 crc kubenswrapper[4799]: I1010 18:40:59.527298 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7p5mp\" (UniqueName: \"kubernetes.io/projected/9e382578-8174-4dcb-81e1-d4a18fdd6f99-kube-api-access-7p5mp\") pod \"community-operators-hlgjf\" (UID: \"9e382578-8174-4dcb-81e1-d4a18fdd6f99\") " pod="openshift-marketplace/community-operators-hlgjf" Oct 10 18:40:59 crc kubenswrapper[4799]: I1010 18:40:59.527513 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9e382578-8174-4dcb-81e1-d4a18fdd6f99-catalog-content\") pod \"community-operators-hlgjf\" (UID: \"9e382578-8174-4dcb-81e1-d4a18fdd6f99\") " pod="openshift-marketplace/community-operators-hlgjf" Oct 10 18:40:59 crc kubenswrapper[4799]: I1010 18:40:59.527614 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9e382578-8174-4dcb-81e1-d4a18fdd6f99-utilities\") pod \"community-operators-hlgjf\" (UID: \"9e382578-8174-4dcb-81e1-d4a18fdd6f99\") " pod="openshift-marketplace/community-operators-hlgjf" Oct 10 18:40:59 crc kubenswrapper[4799]: I1010 18:40:59.566482 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7p5mp\" (UniqueName: \"kubernetes.io/projected/9e382578-8174-4dcb-81e1-d4a18fdd6f99-kube-api-access-7p5mp\") pod \"community-operators-hlgjf\" (UID: \"9e382578-8174-4dcb-81e1-d4a18fdd6f99\") " pod="openshift-marketplace/community-operators-hlgjf" Oct 10 18:40:59 crc kubenswrapper[4799]: I1010 18:40:59.605842 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-hlgjf" Oct 10 18:41:00 crc kubenswrapper[4799]: I1010 18:41:00.220489 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-hlgjf"] Oct 10 18:41:00 crc kubenswrapper[4799]: W1010 18:41:00.223088 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9e382578_8174_4dcb_81e1_d4a18fdd6f99.slice/crio-49aaba0ede53b0f80a1c44ce1c9b775c7c79052528b05da1ec4b0919df58b3af WatchSource:0}: Error finding container 49aaba0ede53b0f80a1c44ce1c9b775c7c79052528b05da1ec4b0919df58b3af: Status 404 returned error can't find the container with id 49aaba0ede53b0f80a1c44ce1c9b775c7c79052528b05da1ec4b0919df58b3af Oct 10 18:41:00 crc kubenswrapper[4799]: I1010 18:41:00.807019 4799 generic.go:334] "Generic (PLEG): container finished" podID="9e382578-8174-4dcb-81e1-d4a18fdd6f99" containerID="eadc94b5c4e0f93d3b32caea9af724560136e9bf1b970cadb50bccc91a7ee978" exitCode=0 Oct 10 18:41:00 crc kubenswrapper[4799]: I1010 18:41:00.807111 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hlgjf" event={"ID":"9e382578-8174-4dcb-81e1-d4a18fdd6f99","Type":"ContainerDied","Data":"eadc94b5c4e0f93d3b32caea9af724560136e9bf1b970cadb50bccc91a7ee978"} Oct 10 18:41:00 crc kubenswrapper[4799]: I1010 18:41:00.807691 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hlgjf" event={"ID":"9e382578-8174-4dcb-81e1-d4a18fdd6f99","Type":"ContainerStarted","Data":"49aaba0ede53b0f80a1c44ce1c9b775c7c79052528b05da1ec4b0919df58b3af"} Oct 10 18:41:00 crc kubenswrapper[4799]: I1010 18:41:00.809189 4799 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 10 18:41:02 crc kubenswrapper[4799]: I1010 18:41:02.836495 4799 generic.go:334] "Generic (PLEG): container finished" podID="1ba70610-066c-4a5b-a16d-3555884bf1b2" containerID="0376599a520b337cee45e7cb7c26b951edddbd5a8b68411e2fdf17866b45a646" exitCode=0 Oct 10 18:41:02 crc kubenswrapper[4799]: I1010 18:41:02.837485 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-openstack-openstack-cell1-ltfpg" event={"ID":"1ba70610-066c-4a5b-a16d-3555884bf1b2","Type":"ContainerDied","Data":"0376599a520b337cee45e7cb7c26b951edddbd5a8b68411e2fdf17866b45a646"} Oct 10 18:41:02 crc kubenswrapper[4799]: I1010 18:41:02.842915 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hlgjf" event={"ID":"9e382578-8174-4dcb-81e1-d4a18fdd6f99","Type":"ContainerStarted","Data":"fd9394d2c6ebfcb53f755bfe6a3b099f1a16bc3e4d9fb592614ec36c4ddfb084"} Oct 10 18:41:03 crc kubenswrapper[4799]: I1010 18:41:03.860120 4799 generic.go:334] "Generic (PLEG): container finished" podID="9e382578-8174-4dcb-81e1-d4a18fdd6f99" containerID="fd9394d2c6ebfcb53f755bfe6a3b099f1a16bc3e4d9fb592614ec36c4ddfb084" exitCode=0 Oct 10 18:41:03 crc kubenswrapper[4799]: I1010 18:41:03.860265 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hlgjf" event={"ID":"9e382578-8174-4dcb-81e1-d4a18fdd6f99","Type":"ContainerDied","Data":"fd9394d2c6ebfcb53f755bfe6a3b099f1a16bc3e4d9fb592614ec36c4ddfb084"} Oct 10 18:41:04 crc kubenswrapper[4799]: I1010 18:41:04.435656 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-openstack-openstack-cell1-ltfpg" Oct 10 18:41:04 crc kubenswrapper[4799]: I1010 18:41:04.575858 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7z82b\" (UniqueName: \"kubernetes.io/projected/1ba70610-066c-4a5b-a16d-3555884bf1b2-kube-api-access-7z82b\") pod \"1ba70610-066c-4a5b-a16d-3555884bf1b2\" (UID: \"1ba70610-066c-4a5b-a16d-3555884bf1b2\") " Oct 10 18:41:04 crc kubenswrapper[4799]: I1010 18:41:04.576177 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/1ba70610-066c-4a5b-a16d-3555884bf1b2-ssh-key\") pod \"1ba70610-066c-4a5b-a16d-3555884bf1b2\" (UID: \"1ba70610-066c-4a5b-a16d-3555884bf1b2\") " Oct 10 18:41:04 crc kubenswrapper[4799]: I1010 18:41:04.576283 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/1ba70610-066c-4a5b-a16d-3555884bf1b2-inventory\") pod \"1ba70610-066c-4a5b-a16d-3555884bf1b2\" (UID: \"1ba70610-066c-4a5b-a16d-3555884bf1b2\") " Oct 10 18:41:04 crc kubenswrapper[4799]: I1010 18:41:04.576453 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1ba70610-066c-4a5b-a16d-3555884bf1b2-ovn-combined-ca-bundle\") pod \"1ba70610-066c-4a5b-a16d-3555884bf1b2\" (UID: \"1ba70610-066c-4a5b-a16d-3555884bf1b2\") " Oct 10 18:41:04 crc kubenswrapper[4799]: I1010 18:41:04.576529 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/1ba70610-066c-4a5b-a16d-3555884bf1b2-ceph\") pod \"1ba70610-066c-4a5b-a16d-3555884bf1b2\" (UID: \"1ba70610-066c-4a5b-a16d-3555884bf1b2\") " Oct 10 18:41:04 crc kubenswrapper[4799]: I1010 18:41:04.576609 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/1ba70610-066c-4a5b-a16d-3555884bf1b2-ovncontroller-config-0\") pod \"1ba70610-066c-4a5b-a16d-3555884bf1b2\" (UID: \"1ba70610-066c-4a5b-a16d-3555884bf1b2\") " Oct 10 18:41:04 crc kubenswrapper[4799]: I1010 18:41:04.584293 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1ba70610-066c-4a5b-a16d-3555884bf1b2-ovn-combined-ca-bundle" (OuterVolumeSpecName: "ovn-combined-ca-bundle") pod "1ba70610-066c-4a5b-a16d-3555884bf1b2" (UID: "1ba70610-066c-4a5b-a16d-3555884bf1b2"). InnerVolumeSpecName "ovn-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 18:41:04 crc kubenswrapper[4799]: I1010 18:41:04.587948 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1ba70610-066c-4a5b-a16d-3555884bf1b2-ceph" (OuterVolumeSpecName: "ceph") pod "1ba70610-066c-4a5b-a16d-3555884bf1b2" (UID: "1ba70610-066c-4a5b-a16d-3555884bf1b2"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 18:41:04 crc kubenswrapper[4799]: I1010 18:41:04.588016 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1ba70610-066c-4a5b-a16d-3555884bf1b2-kube-api-access-7z82b" (OuterVolumeSpecName: "kube-api-access-7z82b") pod "1ba70610-066c-4a5b-a16d-3555884bf1b2" (UID: "1ba70610-066c-4a5b-a16d-3555884bf1b2"). InnerVolumeSpecName "kube-api-access-7z82b". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 18:41:04 crc kubenswrapper[4799]: I1010 18:41:04.607496 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1ba70610-066c-4a5b-a16d-3555884bf1b2-ovncontroller-config-0" (OuterVolumeSpecName: "ovncontroller-config-0") pod "1ba70610-066c-4a5b-a16d-3555884bf1b2" (UID: "1ba70610-066c-4a5b-a16d-3555884bf1b2"). InnerVolumeSpecName "ovncontroller-config-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 18:41:04 crc kubenswrapper[4799]: I1010 18:41:04.617647 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1ba70610-066c-4a5b-a16d-3555884bf1b2-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "1ba70610-066c-4a5b-a16d-3555884bf1b2" (UID: "1ba70610-066c-4a5b-a16d-3555884bf1b2"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 18:41:04 crc kubenswrapper[4799]: I1010 18:41:04.619844 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1ba70610-066c-4a5b-a16d-3555884bf1b2-inventory" (OuterVolumeSpecName: "inventory") pod "1ba70610-066c-4a5b-a16d-3555884bf1b2" (UID: "1ba70610-066c-4a5b-a16d-3555884bf1b2"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 18:41:04 crc kubenswrapper[4799]: I1010 18:41:04.680230 4799 reconciler_common.go:293] "Volume detached for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1ba70610-066c-4a5b-a16d-3555884bf1b2-ovn-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 10 18:41:04 crc kubenswrapper[4799]: I1010 18:41:04.680269 4799 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/1ba70610-066c-4a5b-a16d-3555884bf1b2-ceph\") on node \"crc\" DevicePath \"\"" Oct 10 18:41:04 crc kubenswrapper[4799]: I1010 18:41:04.680279 4799 reconciler_common.go:293] "Volume detached for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/1ba70610-066c-4a5b-a16d-3555884bf1b2-ovncontroller-config-0\") on node \"crc\" DevicePath \"\"" Oct 10 18:41:04 crc kubenswrapper[4799]: I1010 18:41:04.680288 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7z82b\" (UniqueName: \"kubernetes.io/projected/1ba70610-066c-4a5b-a16d-3555884bf1b2-kube-api-access-7z82b\") on node \"crc\" DevicePath \"\"" Oct 10 18:41:04 crc kubenswrapper[4799]: I1010 18:41:04.680297 4799 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/1ba70610-066c-4a5b-a16d-3555884bf1b2-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 10 18:41:04 crc kubenswrapper[4799]: I1010 18:41:04.680304 4799 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/1ba70610-066c-4a5b-a16d-3555884bf1b2-inventory\") on node \"crc\" DevicePath \"\"" Oct 10 18:41:04 crc kubenswrapper[4799]: I1010 18:41:04.877594 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hlgjf" event={"ID":"9e382578-8174-4dcb-81e1-d4a18fdd6f99","Type":"ContainerStarted","Data":"884e9040a85da12d8bdd8afa24d243d5f61bd8b173437158e02491ddf6b9b70d"} Oct 10 18:41:04 crc kubenswrapper[4799]: I1010 18:41:04.881838 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-openstack-openstack-cell1-ltfpg" event={"ID":"1ba70610-066c-4a5b-a16d-3555884bf1b2","Type":"ContainerDied","Data":"cd9096b3cf16ab6ac268a8e929745bc27ca1a4b65c410171f3f2d089a48d591a"} Oct 10 18:41:04 crc kubenswrapper[4799]: I1010 18:41:04.881884 4799 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="cd9096b3cf16ab6ac268a8e929745bc27ca1a4b65c410171f3f2d089a48d591a" Oct 10 18:41:04 crc kubenswrapper[4799]: I1010 18:41:04.881908 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-openstack-openstack-cell1-ltfpg" Oct 10 18:41:04 crc kubenswrapper[4799]: I1010 18:41:04.928571 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-hlgjf" podStartSLOduration=2.361343243 podStartE2EDuration="5.928550983s" podCreationTimestamp="2025-10-10 18:40:59 +0000 UTC" firstStartedPulling="2025-10-10 18:41:00.808954407 +0000 UTC m=+7754.317278512" lastFinishedPulling="2025-10-10 18:41:04.376162137 +0000 UTC m=+7757.884486252" observedRunningTime="2025-10-10 18:41:04.914338865 +0000 UTC m=+7758.422663010" watchObservedRunningTime="2025-10-10 18:41:04.928550983 +0000 UTC m=+7758.436875108" Oct 10 18:41:04 crc kubenswrapper[4799]: I1010 18:41:04.994374 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-metadata-openstack-openstack-cell1-2pzds"] Oct 10 18:41:04 crc kubenswrapper[4799]: E1010 18:41:04.994944 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1ba70610-066c-4a5b-a16d-3555884bf1b2" containerName="ovn-openstack-openstack-cell1" Oct 10 18:41:04 crc kubenswrapper[4799]: I1010 18:41:04.994966 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="1ba70610-066c-4a5b-a16d-3555884bf1b2" containerName="ovn-openstack-openstack-cell1" Oct 10 18:41:04 crc kubenswrapper[4799]: I1010 18:41:04.995215 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="1ba70610-066c-4a5b-a16d-3555884bf1b2" containerName="ovn-openstack-openstack-cell1" Oct 10 18:41:04 crc kubenswrapper[4799]: I1010 18:41:04.996408 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-openstack-openstack-cell1-2pzds" Oct 10 18:41:04 crc kubenswrapper[4799]: I1010 18:41:04.998875 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-adoption-secret" Oct 10 18:41:04 crc kubenswrapper[4799]: I1010 18:41:04.999102 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Oct 10 18:41:04 crc kubenswrapper[4799]: I1010 18:41:04.999230 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 10 18:41:05 crc kubenswrapper[4799]: I1010 18:41:05.005335 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-ovn-metadata-agent-neutron-config" Oct 10 18:41:05 crc kubenswrapper[4799]: I1010 18:41:05.005407 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-rdlhr" Oct 10 18:41:05 crc kubenswrapper[4799]: I1010 18:41:05.008049 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-metadata-openstack-openstack-cell1-2pzds"] Oct 10 18:41:05 crc kubenswrapper[4799]: I1010 18:41:05.008710 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-neutron-config" Oct 10 18:41:05 crc kubenswrapper[4799]: I1010 18:41:05.095725 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/6851d838-6f19-4e28-87d6-68e79ff22050-nova-metadata-neutron-config-0\") pod \"neutron-metadata-openstack-openstack-cell1-2pzds\" (UID: \"6851d838-6f19-4e28-87d6-68e79ff22050\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-2pzds" Oct 10 18:41:05 crc kubenswrapper[4799]: I1010 18:41:05.096438 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5gs82\" (UniqueName: \"kubernetes.io/projected/6851d838-6f19-4e28-87d6-68e79ff22050-kube-api-access-5gs82\") pod \"neutron-metadata-openstack-openstack-cell1-2pzds\" (UID: \"6851d838-6f19-4e28-87d6-68e79ff22050\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-2pzds" Oct 10 18:41:05 crc kubenswrapper[4799]: I1010 18:41:05.096617 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/6851d838-6f19-4e28-87d6-68e79ff22050-ssh-key\") pod \"neutron-metadata-openstack-openstack-cell1-2pzds\" (UID: \"6851d838-6f19-4e28-87d6-68e79ff22050\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-2pzds" Oct 10 18:41:05 crc kubenswrapper[4799]: I1010 18:41:05.096780 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/6851d838-6f19-4e28-87d6-68e79ff22050-inventory\") pod \"neutron-metadata-openstack-openstack-cell1-2pzds\" (UID: \"6851d838-6f19-4e28-87d6-68e79ff22050\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-2pzds" Oct 10 18:41:05 crc kubenswrapper[4799]: I1010 18:41:05.097014 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/6851d838-6f19-4e28-87d6-68e79ff22050-neutron-ovn-metadata-agent-neutron-config-0\") pod \"neutron-metadata-openstack-openstack-cell1-2pzds\" (UID: \"6851d838-6f19-4e28-87d6-68e79ff22050\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-2pzds" Oct 10 18:41:05 crc kubenswrapper[4799]: I1010 18:41:05.097293 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6851d838-6f19-4e28-87d6-68e79ff22050-neutron-metadata-combined-ca-bundle\") pod \"neutron-metadata-openstack-openstack-cell1-2pzds\" (UID: \"6851d838-6f19-4e28-87d6-68e79ff22050\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-2pzds" Oct 10 18:41:05 crc kubenswrapper[4799]: I1010 18:41:05.097443 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/6851d838-6f19-4e28-87d6-68e79ff22050-ceph\") pod \"neutron-metadata-openstack-openstack-cell1-2pzds\" (UID: \"6851d838-6f19-4e28-87d6-68e79ff22050\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-2pzds" Oct 10 18:41:05 crc kubenswrapper[4799]: I1010 18:41:05.199290 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5gs82\" (UniqueName: \"kubernetes.io/projected/6851d838-6f19-4e28-87d6-68e79ff22050-kube-api-access-5gs82\") pod \"neutron-metadata-openstack-openstack-cell1-2pzds\" (UID: \"6851d838-6f19-4e28-87d6-68e79ff22050\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-2pzds" Oct 10 18:41:05 crc kubenswrapper[4799]: I1010 18:41:05.199348 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/6851d838-6f19-4e28-87d6-68e79ff22050-ssh-key\") pod \"neutron-metadata-openstack-openstack-cell1-2pzds\" (UID: \"6851d838-6f19-4e28-87d6-68e79ff22050\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-2pzds" Oct 10 18:41:05 crc kubenswrapper[4799]: I1010 18:41:05.199501 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/6851d838-6f19-4e28-87d6-68e79ff22050-inventory\") pod \"neutron-metadata-openstack-openstack-cell1-2pzds\" (UID: \"6851d838-6f19-4e28-87d6-68e79ff22050\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-2pzds" Oct 10 18:41:05 crc kubenswrapper[4799]: I1010 18:41:05.199565 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/6851d838-6f19-4e28-87d6-68e79ff22050-neutron-ovn-metadata-agent-neutron-config-0\") pod \"neutron-metadata-openstack-openstack-cell1-2pzds\" (UID: \"6851d838-6f19-4e28-87d6-68e79ff22050\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-2pzds" Oct 10 18:41:05 crc kubenswrapper[4799]: I1010 18:41:05.199607 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6851d838-6f19-4e28-87d6-68e79ff22050-neutron-metadata-combined-ca-bundle\") pod \"neutron-metadata-openstack-openstack-cell1-2pzds\" (UID: \"6851d838-6f19-4e28-87d6-68e79ff22050\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-2pzds" Oct 10 18:41:05 crc kubenswrapper[4799]: I1010 18:41:05.199650 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/6851d838-6f19-4e28-87d6-68e79ff22050-ceph\") pod \"neutron-metadata-openstack-openstack-cell1-2pzds\" (UID: \"6851d838-6f19-4e28-87d6-68e79ff22050\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-2pzds" Oct 10 18:41:05 crc kubenswrapper[4799]: I1010 18:41:05.199789 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/6851d838-6f19-4e28-87d6-68e79ff22050-nova-metadata-neutron-config-0\") pod \"neutron-metadata-openstack-openstack-cell1-2pzds\" (UID: \"6851d838-6f19-4e28-87d6-68e79ff22050\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-2pzds" Oct 10 18:41:05 crc kubenswrapper[4799]: I1010 18:41:05.207080 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/6851d838-6f19-4e28-87d6-68e79ff22050-inventory\") pod \"neutron-metadata-openstack-openstack-cell1-2pzds\" (UID: \"6851d838-6f19-4e28-87d6-68e79ff22050\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-2pzds" Oct 10 18:41:05 crc kubenswrapper[4799]: I1010 18:41:05.207078 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/6851d838-6f19-4e28-87d6-68e79ff22050-neutron-ovn-metadata-agent-neutron-config-0\") pod \"neutron-metadata-openstack-openstack-cell1-2pzds\" (UID: \"6851d838-6f19-4e28-87d6-68e79ff22050\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-2pzds" Oct 10 18:41:05 crc kubenswrapper[4799]: I1010 18:41:05.207141 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/6851d838-6f19-4e28-87d6-68e79ff22050-nova-metadata-neutron-config-0\") pod \"neutron-metadata-openstack-openstack-cell1-2pzds\" (UID: \"6851d838-6f19-4e28-87d6-68e79ff22050\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-2pzds" Oct 10 18:41:05 crc kubenswrapper[4799]: I1010 18:41:05.208603 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/6851d838-6f19-4e28-87d6-68e79ff22050-ceph\") pod \"neutron-metadata-openstack-openstack-cell1-2pzds\" (UID: \"6851d838-6f19-4e28-87d6-68e79ff22050\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-2pzds" Oct 10 18:41:05 crc kubenswrapper[4799]: I1010 18:41:05.210248 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/6851d838-6f19-4e28-87d6-68e79ff22050-ssh-key\") pod \"neutron-metadata-openstack-openstack-cell1-2pzds\" (UID: \"6851d838-6f19-4e28-87d6-68e79ff22050\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-2pzds" Oct 10 18:41:05 crc kubenswrapper[4799]: I1010 18:41:05.211731 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6851d838-6f19-4e28-87d6-68e79ff22050-neutron-metadata-combined-ca-bundle\") pod \"neutron-metadata-openstack-openstack-cell1-2pzds\" (UID: \"6851d838-6f19-4e28-87d6-68e79ff22050\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-2pzds" Oct 10 18:41:05 crc kubenswrapper[4799]: I1010 18:41:05.229775 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5gs82\" (UniqueName: \"kubernetes.io/projected/6851d838-6f19-4e28-87d6-68e79ff22050-kube-api-access-5gs82\") pod \"neutron-metadata-openstack-openstack-cell1-2pzds\" (UID: \"6851d838-6f19-4e28-87d6-68e79ff22050\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-2pzds" Oct 10 18:41:05 crc kubenswrapper[4799]: I1010 18:41:05.314260 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-openstack-openstack-cell1-2pzds" Oct 10 18:41:06 crc kubenswrapper[4799]: I1010 18:41:06.076749 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-metadata-openstack-openstack-cell1-2pzds"] Oct 10 18:41:06 crc kubenswrapper[4799]: I1010 18:41:06.403914 4799 scope.go:117] "RemoveContainer" containerID="5d726047dc458172d088ab3478c0245a0f320ffb1060865307a391a8a23b1065" Oct 10 18:41:06 crc kubenswrapper[4799]: E1010 18:41:06.404381 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 18:41:06 crc kubenswrapper[4799]: I1010 18:41:06.907200 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-openstack-openstack-cell1-2pzds" event={"ID":"6851d838-6f19-4e28-87d6-68e79ff22050","Type":"ContainerStarted","Data":"04f246269470c874ed2432e8789b175d9de36f1fe95d1fa6fe95bf1df7541c79"} Oct 10 18:41:06 crc kubenswrapper[4799]: I1010 18:41:06.907963 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-openstack-openstack-cell1-2pzds" event={"ID":"6851d838-6f19-4e28-87d6-68e79ff22050","Type":"ContainerStarted","Data":"27b8d85044df1c87c4d216c0c75f959f69051895b4867dc02e7b3ce02d4d5e09"} Oct 10 18:41:06 crc kubenswrapper[4799]: I1010 18:41:06.938959 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-metadata-openstack-openstack-cell1-2pzds" podStartSLOduration=2.497306856 podStartE2EDuration="2.938935352s" podCreationTimestamp="2025-10-10 18:41:04 +0000 UTC" firstStartedPulling="2025-10-10 18:41:06.110484232 +0000 UTC m=+7759.618808347" lastFinishedPulling="2025-10-10 18:41:06.552112718 +0000 UTC m=+7760.060436843" observedRunningTime="2025-10-10 18:41:06.928885186 +0000 UTC m=+7760.437209341" watchObservedRunningTime="2025-10-10 18:41:06.938935352 +0000 UTC m=+7760.447259477" Oct 10 18:41:09 crc kubenswrapper[4799]: I1010 18:41:09.607094 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-hlgjf" Oct 10 18:41:09 crc kubenswrapper[4799]: I1010 18:41:09.607689 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-hlgjf" Oct 10 18:41:09 crc kubenswrapper[4799]: I1010 18:41:09.685163 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-hlgjf" Oct 10 18:41:10 crc kubenswrapper[4799]: I1010 18:41:10.034467 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-hlgjf" Oct 10 18:41:10 crc kubenswrapper[4799]: I1010 18:41:10.116003 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-hlgjf"] Oct 10 18:41:11 crc kubenswrapper[4799]: I1010 18:41:11.970458 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-hlgjf" podUID="9e382578-8174-4dcb-81e1-d4a18fdd6f99" containerName="registry-server" containerID="cri-o://884e9040a85da12d8bdd8afa24d243d5f61bd8b173437158e02491ddf6b9b70d" gracePeriod=2 Oct 10 18:41:12 crc kubenswrapper[4799]: I1010 18:41:12.527727 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-hlgjf" Oct 10 18:41:12 crc kubenswrapper[4799]: I1010 18:41:12.687211 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7p5mp\" (UniqueName: \"kubernetes.io/projected/9e382578-8174-4dcb-81e1-d4a18fdd6f99-kube-api-access-7p5mp\") pod \"9e382578-8174-4dcb-81e1-d4a18fdd6f99\" (UID: \"9e382578-8174-4dcb-81e1-d4a18fdd6f99\") " Oct 10 18:41:12 crc kubenswrapper[4799]: I1010 18:41:12.687556 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9e382578-8174-4dcb-81e1-d4a18fdd6f99-catalog-content\") pod \"9e382578-8174-4dcb-81e1-d4a18fdd6f99\" (UID: \"9e382578-8174-4dcb-81e1-d4a18fdd6f99\") " Oct 10 18:41:12 crc kubenswrapper[4799]: I1010 18:41:12.687618 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9e382578-8174-4dcb-81e1-d4a18fdd6f99-utilities\") pod \"9e382578-8174-4dcb-81e1-d4a18fdd6f99\" (UID: \"9e382578-8174-4dcb-81e1-d4a18fdd6f99\") " Oct 10 18:41:12 crc kubenswrapper[4799]: I1010 18:41:12.688553 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9e382578-8174-4dcb-81e1-d4a18fdd6f99-utilities" (OuterVolumeSpecName: "utilities") pod "9e382578-8174-4dcb-81e1-d4a18fdd6f99" (UID: "9e382578-8174-4dcb-81e1-d4a18fdd6f99"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 10 18:41:12 crc kubenswrapper[4799]: I1010 18:41:12.707134 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9e382578-8174-4dcb-81e1-d4a18fdd6f99-kube-api-access-7p5mp" (OuterVolumeSpecName: "kube-api-access-7p5mp") pod "9e382578-8174-4dcb-81e1-d4a18fdd6f99" (UID: "9e382578-8174-4dcb-81e1-d4a18fdd6f99"). InnerVolumeSpecName "kube-api-access-7p5mp". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 18:41:12 crc kubenswrapper[4799]: I1010 18:41:12.765020 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9e382578-8174-4dcb-81e1-d4a18fdd6f99-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "9e382578-8174-4dcb-81e1-d4a18fdd6f99" (UID: "9e382578-8174-4dcb-81e1-d4a18fdd6f99"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 10 18:41:12 crc kubenswrapper[4799]: I1010 18:41:12.790589 4799 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9e382578-8174-4dcb-81e1-d4a18fdd6f99-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 10 18:41:12 crc kubenswrapper[4799]: I1010 18:41:12.790620 4799 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9e382578-8174-4dcb-81e1-d4a18fdd6f99-utilities\") on node \"crc\" DevicePath \"\"" Oct 10 18:41:12 crc kubenswrapper[4799]: I1010 18:41:12.790635 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7p5mp\" (UniqueName: \"kubernetes.io/projected/9e382578-8174-4dcb-81e1-d4a18fdd6f99-kube-api-access-7p5mp\") on node \"crc\" DevicePath \"\"" Oct 10 18:41:13 crc kubenswrapper[4799]: I1010 18:41:13.000347 4799 generic.go:334] "Generic (PLEG): container finished" podID="9e382578-8174-4dcb-81e1-d4a18fdd6f99" containerID="884e9040a85da12d8bdd8afa24d243d5f61bd8b173437158e02491ddf6b9b70d" exitCode=0 Oct 10 18:41:13 crc kubenswrapper[4799]: I1010 18:41:13.000407 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hlgjf" event={"ID":"9e382578-8174-4dcb-81e1-d4a18fdd6f99","Type":"ContainerDied","Data":"884e9040a85da12d8bdd8afa24d243d5f61bd8b173437158e02491ddf6b9b70d"} Oct 10 18:41:13 crc kubenswrapper[4799]: I1010 18:41:13.000446 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hlgjf" event={"ID":"9e382578-8174-4dcb-81e1-d4a18fdd6f99","Type":"ContainerDied","Data":"49aaba0ede53b0f80a1c44ce1c9b775c7c79052528b05da1ec4b0919df58b3af"} Oct 10 18:41:13 crc kubenswrapper[4799]: I1010 18:41:13.000472 4799 scope.go:117] "RemoveContainer" containerID="884e9040a85da12d8bdd8afa24d243d5f61bd8b173437158e02491ddf6b9b70d" Oct 10 18:41:13 crc kubenswrapper[4799]: I1010 18:41:13.000473 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-hlgjf" Oct 10 18:41:13 crc kubenswrapper[4799]: I1010 18:41:13.054454 4799 scope.go:117] "RemoveContainer" containerID="fd9394d2c6ebfcb53f755bfe6a3b099f1a16bc3e4d9fb592614ec36c4ddfb084" Oct 10 18:41:13 crc kubenswrapper[4799]: I1010 18:41:13.054715 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-hlgjf"] Oct 10 18:41:13 crc kubenswrapper[4799]: I1010 18:41:13.067093 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-hlgjf"] Oct 10 18:41:13 crc kubenswrapper[4799]: I1010 18:41:13.091436 4799 scope.go:117] "RemoveContainer" containerID="eadc94b5c4e0f93d3b32caea9af724560136e9bf1b970cadb50bccc91a7ee978" Oct 10 18:41:13 crc kubenswrapper[4799]: I1010 18:41:13.149960 4799 scope.go:117] "RemoveContainer" containerID="884e9040a85da12d8bdd8afa24d243d5f61bd8b173437158e02491ddf6b9b70d" Oct 10 18:41:13 crc kubenswrapper[4799]: E1010 18:41:13.150705 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"884e9040a85da12d8bdd8afa24d243d5f61bd8b173437158e02491ddf6b9b70d\": container with ID starting with 884e9040a85da12d8bdd8afa24d243d5f61bd8b173437158e02491ddf6b9b70d not found: ID does not exist" containerID="884e9040a85da12d8bdd8afa24d243d5f61bd8b173437158e02491ddf6b9b70d" Oct 10 18:41:13 crc kubenswrapper[4799]: I1010 18:41:13.150815 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"884e9040a85da12d8bdd8afa24d243d5f61bd8b173437158e02491ddf6b9b70d"} err="failed to get container status \"884e9040a85da12d8bdd8afa24d243d5f61bd8b173437158e02491ddf6b9b70d\": rpc error: code = NotFound desc = could not find container \"884e9040a85da12d8bdd8afa24d243d5f61bd8b173437158e02491ddf6b9b70d\": container with ID starting with 884e9040a85da12d8bdd8afa24d243d5f61bd8b173437158e02491ddf6b9b70d not found: ID does not exist" Oct 10 18:41:13 crc kubenswrapper[4799]: I1010 18:41:13.150865 4799 scope.go:117] "RemoveContainer" containerID="fd9394d2c6ebfcb53f755bfe6a3b099f1a16bc3e4d9fb592614ec36c4ddfb084" Oct 10 18:41:13 crc kubenswrapper[4799]: E1010 18:41:13.151614 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fd9394d2c6ebfcb53f755bfe6a3b099f1a16bc3e4d9fb592614ec36c4ddfb084\": container with ID starting with fd9394d2c6ebfcb53f755bfe6a3b099f1a16bc3e4d9fb592614ec36c4ddfb084 not found: ID does not exist" containerID="fd9394d2c6ebfcb53f755bfe6a3b099f1a16bc3e4d9fb592614ec36c4ddfb084" Oct 10 18:41:13 crc kubenswrapper[4799]: I1010 18:41:13.151671 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fd9394d2c6ebfcb53f755bfe6a3b099f1a16bc3e4d9fb592614ec36c4ddfb084"} err="failed to get container status \"fd9394d2c6ebfcb53f755bfe6a3b099f1a16bc3e4d9fb592614ec36c4ddfb084\": rpc error: code = NotFound desc = could not find container \"fd9394d2c6ebfcb53f755bfe6a3b099f1a16bc3e4d9fb592614ec36c4ddfb084\": container with ID starting with fd9394d2c6ebfcb53f755bfe6a3b099f1a16bc3e4d9fb592614ec36c4ddfb084 not found: ID does not exist" Oct 10 18:41:13 crc kubenswrapper[4799]: I1010 18:41:13.151705 4799 scope.go:117] "RemoveContainer" containerID="eadc94b5c4e0f93d3b32caea9af724560136e9bf1b970cadb50bccc91a7ee978" Oct 10 18:41:13 crc kubenswrapper[4799]: E1010 18:41:13.152278 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"eadc94b5c4e0f93d3b32caea9af724560136e9bf1b970cadb50bccc91a7ee978\": container with ID starting with eadc94b5c4e0f93d3b32caea9af724560136e9bf1b970cadb50bccc91a7ee978 not found: ID does not exist" containerID="eadc94b5c4e0f93d3b32caea9af724560136e9bf1b970cadb50bccc91a7ee978" Oct 10 18:41:13 crc kubenswrapper[4799]: I1010 18:41:13.152307 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"eadc94b5c4e0f93d3b32caea9af724560136e9bf1b970cadb50bccc91a7ee978"} err="failed to get container status \"eadc94b5c4e0f93d3b32caea9af724560136e9bf1b970cadb50bccc91a7ee978\": rpc error: code = NotFound desc = could not find container \"eadc94b5c4e0f93d3b32caea9af724560136e9bf1b970cadb50bccc91a7ee978\": container with ID starting with eadc94b5c4e0f93d3b32caea9af724560136e9bf1b970cadb50bccc91a7ee978 not found: ID does not exist" Oct 10 18:41:13 crc kubenswrapper[4799]: I1010 18:41:13.425079 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9e382578-8174-4dcb-81e1-d4a18fdd6f99" path="/var/lib/kubelet/pods/9e382578-8174-4dcb-81e1-d4a18fdd6f99/volumes" Oct 10 18:41:21 crc kubenswrapper[4799]: I1010 18:41:21.403208 4799 scope.go:117] "RemoveContainer" containerID="5d726047dc458172d088ab3478c0245a0f320ffb1060865307a391a8a23b1065" Oct 10 18:41:21 crc kubenswrapper[4799]: E1010 18:41:21.404227 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 18:41:35 crc kubenswrapper[4799]: I1010 18:41:35.402661 4799 scope.go:117] "RemoveContainer" containerID="5d726047dc458172d088ab3478c0245a0f320ffb1060865307a391a8a23b1065" Oct 10 18:41:35 crc kubenswrapper[4799]: E1010 18:41:35.404122 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 18:41:48 crc kubenswrapper[4799]: I1010 18:41:48.405939 4799 scope.go:117] "RemoveContainer" containerID="5d726047dc458172d088ab3478c0245a0f320ffb1060865307a391a8a23b1065" Oct 10 18:41:48 crc kubenswrapper[4799]: E1010 18:41:48.406902 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 18:41:59 crc kubenswrapper[4799]: I1010 18:41:59.409252 4799 scope.go:117] "RemoveContainer" containerID="5d726047dc458172d088ab3478c0245a0f320ffb1060865307a391a8a23b1065" Oct 10 18:41:59 crc kubenswrapper[4799]: E1010 18:41:59.410214 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 18:42:03 crc kubenswrapper[4799]: I1010 18:42:03.697316 4799 generic.go:334] "Generic (PLEG): container finished" podID="6851d838-6f19-4e28-87d6-68e79ff22050" containerID="04f246269470c874ed2432e8789b175d9de36f1fe95d1fa6fe95bf1df7541c79" exitCode=0 Oct 10 18:42:03 crc kubenswrapper[4799]: I1010 18:42:03.697428 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-openstack-openstack-cell1-2pzds" event={"ID":"6851d838-6f19-4e28-87d6-68e79ff22050","Type":"ContainerDied","Data":"04f246269470c874ed2432e8789b175d9de36f1fe95d1fa6fe95bf1df7541c79"} Oct 10 18:42:05 crc kubenswrapper[4799]: I1010 18:42:05.262960 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-openstack-openstack-cell1-2pzds" Oct 10 18:42:05 crc kubenswrapper[4799]: I1010 18:42:05.324676 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/6851d838-6f19-4e28-87d6-68e79ff22050-neutron-ovn-metadata-agent-neutron-config-0\") pod \"6851d838-6f19-4e28-87d6-68e79ff22050\" (UID: \"6851d838-6f19-4e28-87d6-68e79ff22050\") " Oct 10 18:42:05 crc kubenswrapper[4799]: I1010 18:42:05.324782 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/6851d838-6f19-4e28-87d6-68e79ff22050-ceph\") pod \"6851d838-6f19-4e28-87d6-68e79ff22050\" (UID: \"6851d838-6f19-4e28-87d6-68e79ff22050\") " Oct 10 18:42:05 crc kubenswrapper[4799]: I1010 18:42:05.334031 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6851d838-6f19-4e28-87d6-68e79ff22050-ceph" (OuterVolumeSpecName: "ceph") pod "6851d838-6f19-4e28-87d6-68e79ff22050" (UID: "6851d838-6f19-4e28-87d6-68e79ff22050"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 18:42:05 crc kubenswrapper[4799]: I1010 18:42:05.377606 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6851d838-6f19-4e28-87d6-68e79ff22050-neutron-ovn-metadata-agent-neutron-config-0" (OuterVolumeSpecName: "neutron-ovn-metadata-agent-neutron-config-0") pod "6851d838-6f19-4e28-87d6-68e79ff22050" (UID: "6851d838-6f19-4e28-87d6-68e79ff22050"). InnerVolumeSpecName "neutron-ovn-metadata-agent-neutron-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 18:42:05 crc kubenswrapper[4799]: I1010 18:42:05.429820 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/6851d838-6f19-4e28-87d6-68e79ff22050-inventory\") pod \"6851d838-6f19-4e28-87d6-68e79ff22050\" (UID: \"6851d838-6f19-4e28-87d6-68e79ff22050\") " Oct 10 18:42:05 crc kubenswrapper[4799]: I1010 18:42:05.430123 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/6851d838-6f19-4e28-87d6-68e79ff22050-nova-metadata-neutron-config-0\") pod \"6851d838-6f19-4e28-87d6-68e79ff22050\" (UID: \"6851d838-6f19-4e28-87d6-68e79ff22050\") " Oct 10 18:42:05 crc kubenswrapper[4799]: I1010 18:42:05.430172 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/6851d838-6f19-4e28-87d6-68e79ff22050-ssh-key\") pod \"6851d838-6f19-4e28-87d6-68e79ff22050\" (UID: \"6851d838-6f19-4e28-87d6-68e79ff22050\") " Oct 10 18:42:05 crc kubenswrapper[4799]: I1010 18:42:05.430213 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6851d838-6f19-4e28-87d6-68e79ff22050-neutron-metadata-combined-ca-bundle\") pod \"6851d838-6f19-4e28-87d6-68e79ff22050\" (UID: \"6851d838-6f19-4e28-87d6-68e79ff22050\") " Oct 10 18:42:05 crc kubenswrapper[4799]: I1010 18:42:05.430276 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5gs82\" (UniqueName: \"kubernetes.io/projected/6851d838-6f19-4e28-87d6-68e79ff22050-kube-api-access-5gs82\") pod \"6851d838-6f19-4e28-87d6-68e79ff22050\" (UID: \"6851d838-6f19-4e28-87d6-68e79ff22050\") " Oct 10 18:42:05 crc kubenswrapper[4799]: I1010 18:42:05.431781 4799 reconciler_common.go:293] "Volume detached for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/6851d838-6f19-4e28-87d6-68e79ff22050-neutron-ovn-metadata-agent-neutron-config-0\") on node \"crc\" DevicePath \"\"" Oct 10 18:42:05 crc kubenswrapper[4799]: I1010 18:42:05.431880 4799 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/6851d838-6f19-4e28-87d6-68e79ff22050-ceph\") on node \"crc\" DevicePath \"\"" Oct 10 18:42:05 crc kubenswrapper[4799]: I1010 18:42:05.434630 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6851d838-6f19-4e28-87d6-68e79ff22050-kube-api-access-5gs82" (OuterVolumeSpecName: "kube-api-access-5gs82") pod "6851d838-6f19-4e28-87d6-68e79ff22050" (UID: "6851d838-6f19-4e28-87d6-68e79ff22050"). InnerVolumeSpecName "kube-api-access-5gs82". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 18:42:05 crc kubenswrapper[4799]: I1010 18:42:05.442570 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6851d838-6f19-4e28-87d6-68e79ff22050-neutron-metadata-combined-ca-bundle" (OuterVolumeSpecName: "neutron-metadata-combined-ca-bundle") pod "6851d838-6f19-4e28-87d6-68e79ff22050" (UID: "6851d838-6f19-4e28-87d6-68e79ff22050"). InnerVolumeSpecName "neutron-metadata-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 18:42:05 crc kubenswrapper[4799]: I1010 18:42:05.463802 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6851d838-6f19-4e28-87d6-68e79ff22050-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "6851d838-6f19-4e28-87d6-68e79ff22050" (UID: "6851d838-6f19-4e28-87d6-68e79ff22050"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 18:42:05 crc kubenswrapper[4799]: I1010 18:42:05.467730 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6851d838-6f19-4e28-87d6-68e79ff22050-inventory" (OuterVolumeSpecName: "inventory") pod "6851d838-6f19-4e28-87d6-68e79ff22050" (UID: "6851d838-6f19-4e28-87d6-68e79ff22050"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 18:42:05 crc kubenswrapper[4799]: I1010 18:42:05.469535 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6851d838-6f19-4e28-87d6-68e79ff22050-nova-metadata-neutron-config-0" (OuterVolumeSpecName: "nova-metadata-neutron-config-0") pod "6851d838-6f19-4e28-87d6-68e79ff22050" (UID: "6851d838-6f19-4e28-87d6-68e79ff22050"). InnerVolumeSpecName "nova-metadata-neutron-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 18:42:05 crc kubenswrapper[4799]: I1010 18:42:05.535102 4799 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/6851d838-6f19-4e28-87d6-68e79ff22050-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 10 18:42:05 crc kubenswrapper[4799]: I1010 18:42:05.535132 4799 reconciler_common.go:293] "Volume detached for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6851d838-6f19-4e28-87d6-68e79ff22050-neutron-metadata-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 10 18:42:05 crc kubenswrapper[4799]: I1010 18:42:05.535145 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5gs82\" (UniqueName: \"kubernetes.io/projected/6851d838-6f19-4e28-87d6-68e79ff22050-kube-api-access-5gs82\") on node \"crc\" DevicePath \"\"" Oct 10 18:42:05 crc kubenswrapper[4799]: I1010 18:42:05.535156 4799 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/6851d838-6f19-4e28-87d6-68e79ff22050-inventory\") on node \"crc\" DevicePath \"\"" Oct 10 18:42:05 crc kubenswrapper[4799]: I1010 18:42:05.535164 4799 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/6851d838-6f19-4e28-87d6-68e79ff22050-nova-metadata-neutron-config-0\") on node \"crc\" DevicePath \"\"" Oct 10 18:42:05 crc kubenswrapper[4799]: I1010 18:42:05.725795 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-openstack-openstack-cell1-2pzds" event={"ID":"6851d838-6f19-4e28-87d6-68e79ff22050","Type":"ContainerDied","Data":"27b8d85044df1c87c4d216c0c75f959f69051895b4867dc02e7b3ce02d4d5e09"} Oct 10 18:42:05 crc kubenswrapper[4799]: I1010 18:42:05.725862 4799 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="27b8d85044df1c87c4d216c0c75f959f69051895b4867dc02e7b3ce02d4d5e09" Oct 10 18:42:05 crc kubenswrapper[4799]: I1010 18:42:05.725970 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-openstack-openstack-cell1-2pzds" Oct 10 18:42:05 crc kubenswrapper[4799]: I1010 18:42:05.867590 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/libvirt-openstack-openstack-cell1-9l9vn"] Oct 10 18:42:05 crc kubenswrapper[4799]: E1010 18:42:05.868485 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9e382578-8174-4dcb-81e1-d4a18fdd6f99" containerName="extract-content" Oct 10 18:42:05 crc kubenswrapper[4799]: I1010 18:42:05.868516 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="9e382578-8174-4dcb-81e1-d4a18fdd6f99" containerName="extract-content" Oct 10 18:42:05 crc kubenswrapper[4799]: E1010 18:42:05.868545 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9e382578-8174-4dcb-81e1-d4a18fdd6f99" containerName="extract-utilities" Oct 10 18:42:05 crc kubenswrapper[4799]: I1010 18:42:05.868559 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="9e382578-8174-4dcb-81e1-d4a18fdd6f99" containerName="extract-utilities" Oct 10 18:42:05 crc kubenswrapper[4799]: E1010 18:42:05.868618 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9e382578-8174-4dcb-81e1-d4a18fdd6f99" containerName="registry-server" Oct 10 18:42:05 crc kubenswrapper[4799]: I1010 18:42:05.868632 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="9e382578-8174-4dcb-81e1-d4a18fdd6f99" containerName="registry-server" Oct 10 18:42:05 crc kubenswrapper[4799]: E1010 18:42:05.868678 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6851d838-6f19-4e28-87d6-68e79ff22050" containerName="neutron-metadata-openstack-openstack-cell1" Oct 10 18:42:05 crc kubenswrapper[4799]: I1010 18:42:05.868692 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="6851d838-6f19-4e28-87d6-68e79ff22050" containerName="neutron-metadata-openstack-openstack-cell1" Oct 10 18:42:05 crc kubenswrapper[4799]: I1010 18:42:05.869139 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="6851d838-6f19-4e28-87d6-68e79ff22050" containerName="neutron-metadata-openstack-openstack-cell1" Oct 10 18:42:05 crc kubenswrapper[4799]: I1010 18:42:05.869174 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="9e382578-8174-4dcb-81e1-d4a18fdd6f99" containerName="registry-server" Oct 10 18:42:05 crc kubenswrapper[4799]: I1010 18:42:05.870474 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-openstack-openstack-cell1-9l9vn" Oct 10 18:42:05 crc kubenswrapper[4799]: I1010 18:42:05.873063 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-rdlhr" Oct 10 18:42:05 crc kubenswrapper[4799]: I1010 18:42:05.873877 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"libvirt-secret" Oct 10 18:42:05 crc kubenswrapper[4799]: I1010 18:42:05.874290 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 10 18:42:05 crc kubenswrapper[4799]: I1010 18:42:05.874452 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-adoption-secret" Oct 10 18:42:05 crc kubenswrapper[4799]: I1010 18:42:05.878617 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Oct 10 18:42:05 crc kubenswrapper[4799]: I1010 18:42:05.901713 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/libvirt-openstack-openstack-cell1-9l9vn"] Oct 10 18:42:06 crc kubenswrapper[4799]: I1010 18:42:06.047383 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/87e27a02-1f1f-4d72-be1d-4662e43bb0e3-ceph\") pod \"libvirt-openstack-openstack-cell1-9l9vn\" (UID: \"87e27a02-1f1f-4d72-be1d-4662e43bb0e3\") " pod="openstack/libvirt-openstack-openstack-cell1-9l9vn" Oct 10 18:42:06 crc kubenswrapper[4799]: I1010 18:42:06.047671 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lqh6v\" (UniqueName: \"kubernetes.io/projected/87e27a02-1f1f-4d72-be1d-4662e43bb0e3-kube-api-access-lqh6v\") pod \"libvirt-openstack-openstack-cell1-9l9vn\" (UID: \"87e27a02-1f1f-4d72-be1d-4662e43bb0e3\") " pod="openstack/libvirt-openstack-openstack-cell1-9l9vn" Oct 10 18:42:06 crc kubenswrapper[4799]: I1010 18:42:06.047784 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/87e27a02-1f1f-4d72-be1d-4662e43bb0e3-libvirt-combined-ca-bundle\") pod \"libvirt-openstack-openstack-cell1-9l9vn\" (UID: \"87e27a02-1f1f-4d72-be1d-4662e43bb0e3\") " pod="openstack/libvirt-openstack-openstack-cell1-9l9vn" Oct 10 18:42:06 crc kubenswrapper[4799]: I1010 18:42:06.047898 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/87e27a02-1f1f-4d72-be1d-4662e43bb0e3-ssh-key\") pod \"libvirt-openstack-openstack-cell1-9l9vn\" (UID: \"87e27a02-1f1f-4d72-be1d-4662e43bb0e3\") " pod="openstack/libvirt-openstack-openstack-cell1-9l9vn" Oct 10 18:42:06 crc kubenswrapper[4799]: I1010 18:42:06.047984 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/87e27a02-1f1f-4d72-be1d-4662e43bb0e3-libvirt-secret-0\") pod \"libvirt-openstack-openstack-cell1-9l9vn\" (UID: \"87e27a02-1f1f-4d72-be1d-4662e43bb0e3\") " pod="openstack/libvirt-openstack-openstack-cell1-9l9vn" Oct 10 18:42:06 crc kubenswrapper[4799]: I1010 18:42:06.048089 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/87e27a02-1f1f-4d72-be1d-4662e43bb0e3-inventory\") pod \"libvirt-openstack-openstack-cell1-9l9vn\" (UID: \"87e27a02-1f1f-4d72-be1d-4662e43bb0e3\") " pod="openstack/libvirt-openstack-openstack-cell1-9l9vn" Oct 10 18:42:06 crc kubenswrapper[4799]: I1010 18:42:06.150304 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/87e27a02-1f1f-4d72-be1d-4662e43bb0e3-inventory\") pod \"libvirt-openstack-openstack-cell1-9l9vn\" (UID: \"87e27a02-1f1f-4d72-be1d-4662e43bb0e3\") " pod="openstack/libvirt-openstack-openstack-cell1-9l9vn" Oct 10 18:42:06 crc kubenswrapper[4799]: I1010 18:42:06.150519 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/87e27a02-1f1f-4d72-be1d-4662e43bb0e3-ceph\") pod \"libvirt-openstack-openstack-cell1-9l9vn\" (UID: \"87e27a02-1f1f-4d72-be1d-4662e43bb0e3\") " pod="openstack/libvirt-openstack-openstack-cell1-9l9vn" Oct 10 18:42:06 crc kubenswrapper[4799]: I1010 18:42:06.150559 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lqh6v\" (UniqueName: \"kubernetes.io/projected/87e27a02-1f1f-4d72-be1d-4662e43bb0e3-kube-api-access-lqh6v\") pod \"libvirt-openstack-openstack-cell1-9l9vn\" (UID: \"87e27a02-1f1f-4d72-be1d-4662e43bb0e3\") " pod="openstack/libvirt-openstack-openstack-cell1-9l9vn" Oct 10 18:42:06 crc kubenswrapper[4799]: I1010 18:42:06.150649 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/87e27a02-1f1f-4d72-be1d-4662e43bb0e3-libvirt-combined-ca-bundle\") pod \"libvirt-openstack-openstack-cell1-9l9vn\" (UID: \"87e27a02-1f1f-4d72-be1d-4662e43bb0e3\") " pod="openstack/libvirt-openstack-openstack-cell1-9l9vn" Oct 10 18:42:06 crc kubenswrapper[4799]: I1010 18:42:06.150836 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/87e27a02-1f1f-4d72-be1d-4662e43bb0e3-ssh-key\") pod \"libvirt-openstack-openstack-cell1-9l9vn\" (UID: \"87e27a02-1f1f-4d72-be1d-4662e43bb0e3\") " pod="openstack/libvirt-openstack-openstack-cell1-9l9vn" Oct 10 18:42:06 crc kubenswrapper[4799]: I1010 18:42:06.150911 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/87e27a02-1f1f-4d72-be1d-4662e43bb0e3-libvirt-secret-0\") pod \"libvirt-openstack-openstack-cell1-9l9vn\" (UID: \"87e27a02-1f1f-4d72-be1d-4662e43bb0e3\") " pod="openstack/libvirt-openstack-openstack-cell1-9l9vn" Oct 10 18:42:06 crc kubenswrapper[4799]: I1010 18:42:06.155839 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/87e27a02-1f1f-4d72-be1d-4662e43bb0e3-ceph\") pod \"libvirt-openstack-openstack-cell1-9l9vn\" (UID: \"87e27a02-1f1f-4d72-be1d-4662e43bb0e3\") " pod="openstack/libvirt-openstack-openstack-cell1-9l9vn" Oct 10 18:42:06 crc kubenswrapper[4799]: I1010 18:42:06.156371 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/87e27a02-1f1f-4d72-be1d-4662e43bb0e3-ssh-key\") pod \"libvirt-openstack-openstack-cell1-9l9vn\" (UID: \"87e27a02-1f1f-4d72-be1d-4662e43bb0e3\") " pod="openstack/libvirt-openstack-openstack-cell1-9l9vn" Oct 10 18:42:06 crc kubenswrapper[4799]: I1010 18:42:06.156625 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/87e27a02-1f1f-4d72-be1d-4662e43bb0e3-libvirt-combined-ca-bundle\") pod \"libvirt-openstack-openstack-cell1-9l9vn\" (UID: \"87e27a02-1f1f-4d72-be1d-4662e43bb0e3\") " pod="openstack/libvirt-openstack-openstack-cell1-9l9vn" Oct 10 18:42:06 crc kubenswrapper[4799]: I1010 18:42:06.158066 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/87e27a02-1f1f-4d72-be1d-4662e43bb0e3-libvirt-secret-0\") pod \"libvirt-openstack-openstack-cell1-9l9vn\" (UID: \"87e27a02-1f1f-4d72-be1d-4662e43bb0e3\") " pod="openstack/libvirt-openstack-openstack-cell1-9l9vn" Oct 10 18:42:06 crc kubenswrapper[4799]: I1010 18:42:06.161392 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/87e27a02-1f1f-4d72-be1d-4662e43bb0e3-inventory\") pod \"libvirt-openstack-openstack-cell1-9l9vn\" (UID: \"87e27a02-1f1f-4d72-be1d-4662e43bb0e3\") " pod="openstack/libvirt-openstack-openstack-cell1-9l9vn" Oct 10 18:42:06 crc kubenswrapper[4799]: I1010 18:42:06.187909 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lqh6v\" (UniqueName: \"kubernetes.io/projected/87e27a02-1f1f-4d72-be1d-4662e43bb0e3-kube-api-access-lqh6v\") pod \"libvirt-openstack-openstack-cell1-9l9vn\" (UID: \"87e27a02-1f1f-4d72-be1d-4662e43bb0e3\") " pod="openstack/libvirt-openstack-openstack-cell1-9l9vn" Oct 10 18:42:06 crc kubenswrapper[4799]: I1010 18:42:06.193258 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-openstack-openstack-cell1-9l9vn" Oct 10 18:42:06 crc kubenswrapper[4799]: I1010 18:42:06.924789 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/libvirt-openstack-openstack-cell1-9l9vn"] Oct 10 18:42:06 crc kubenswrapper[4799]: W1010 18:42:06.930332 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod87e27a02_1f1f_4d72_be1d_4662e43bb0e3.slice/crio-7952367432f067cb9aa51e4114e5006163587b9f90857a3c50e8d9ef2e784dcd WatchSource:0}: Error finding container 7952367432f067cb9aa51e4114e5006163587b9f90857a3c50e8d9ef2e784dcd: Status 404 returned error can't find the container with id 7952367432f067cb9aa51e4114e5006163587b9f90857a3c50e8d9ef2e784dcd Oct 10 18:42:07 crc kubenswrapper[4799]: I1010 18:42:07.757184 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-openstack-openstack-cell1-9l9vn" event={"ID":"87e27a02-1f1f-4d72-be1d-4662e43bb0e3","Type":"ContainerStarted","Data":"7952367432f067cb9aa51e4114e5006163587b9f90857a3c50e8d9ef2e784dcd"} Oct 10 18:42:08 crc kubenswrapper[4799]: I1010 18:42:08.773526 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-openstack-openstack-cell1-9l9vn" event={"ID":"87e27a02-1f1f-4d72-be1d-4662e43bb0e3","Type":"ContainerStarted","Data":"5d316d24b40e6e7e2a8358df044103eaf6b1bf19a4ee0aa22dff891588fd0676"} Oct 10 18:42:08 crc kubenswrapper[4799]: I1010 18:42:08.805066 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/libvirt-openstack-openstack-cell1-9l9vn" podStartSLOduration=3.283814203 podStartE2EDuration="3.805032366s" podCreationTimestamp="2025-10-10 18:42:05 +0000 UTC" firstStartedPulling="2025-10-10 18:42:06.933783722 +0000 UTC m=+7820.442107847" lastFinishedPulling="2025-10-10 18:42:07.455001865 +0000 UTC m=+7820.963326010" observedRunningTime="2025-10-10 18:42:08.793096744 +0000 UTC m=+7822.301420879" watchObservedRunningTime="2025-10-10 18:42:08.805032366 +0000 UTC m=+7822.313356521" Oct 10 18:42:10 crc kubenswrapper[4799]: I1010 18:42:10.197610 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-lq8gg"] Oct 10 18:42:10 crc kubenswrapper[4799]: I1010 18:42:10.206045 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-lq8gg" Oct 10 18:42:10 crc kubenswrapper[4799]: I1010 18:42:10.210574 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-lq8gg"] Oct 10 18:42:10 crc kubenswrapper[4799]: I1010 18:42:10.292045 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8n6wb\" (UniqueName: \"kubernetes.io/projected/4397748c-7aab-4231-abd4-44098d5b3168-kube-api-access-8n6wb\") pod \"redhat-marketplace-lq8gg\" (UID: \"4397748c-7aab-4231-abd4-44098d5b3168\") " pod="openshift-marketplace/redhat-marketplace-lq8gg" Oct 10 18:42:10 crc kubenswrapper[4799]: I1010 18:42:10.292200 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4397748c-7aab-4231-abd4-44098d5b3168-utilities\") pod \"redhat-marketplace-lq8gg\" (UID: \"4397748c-7aab-4231-abd4-44098d5b3168\") " pod="openshift-marketplace/redhat-marketplace-lq8gg" Oct 10 18:42:10 crc kubenswrapper[4799]: I1010 18:42:10.292579 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4397748c-7aab-4231-abd4-44098d5b3168-catalog-content\") pod \"redhat-marketplace-lq8gg\" (UID: \"4397748c-7aab-4231-abd4-44098d5b3168\") " pod="openshift-marketplace/redhat-marketplace-lq8gg" Oct 10 18:42:10 crc kubenswrapper[4799]: I1010 18:42:10.395390 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4397748c-7aab-4231-abd4-44098d5b3168-utilities\") pod \"redhat-marketplace-lq8gg\" (UID: \"4397748c-7aab-4231-abd4-44098d5b3168\") " pod="openshift-marketplace/redhat-marketplace-lq8gg" Oct 10 18:42:10 crc kubenswrapper[4799]: I1010 18:42:10.395526 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4397748c-7aab-4231-abd4-44098d5b3168-catalog-content\") pod \"redhat-marketplace-lq8gg\" (UID: \"4397748c-7aab-4231-abd4-44098d5b3168\") " pod="openshift-marketplace/redhat-marketplace-lq8gg" Oct 10 18:42:10 crc kubenswrapper[4799]: I1010 18:42:10.395615 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8n6wb\" (UniqueName: \"kubernetes.io/projected/4397748c-7aab-4231-abd4-44098d5b3168-kube-api-access-8n6wb\") pod \"redhat-marketplace-lq8gg\" (UID: \"4397748c-7aab-4231-abd4-44098d5b3168\") " pod="openshift-marketplace/redhat-marketplace-lq8gg" Oct 10 18:42:10 crc kubenswrapper[4799]: I1010 18:42:10.396267 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4397748c-7aab-4231-abd4-44098d5b3168-catalog-content\") pod \"redhat-marketplace-lq8gg\" (UID: \"4397748c-7aab-4231-abd4-44098d5b3168\") " pod="openshift-marketplace/redhat-marketplace-lq8gg" Oct 10 18:42:10 crc kubenswrapper[4799]: I1010 18:42:10.396300 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4397748c-7aab-4231-abd4-44098d5b3168-utilities\") pod \"redhat-marketplace-lq8gg\" (UID: \"4397748c-7aab-4231-abd4-44098d5b3168\") " pod="openshift-marketplace/redhat-marketplace-lq8gg" Oct 10 18:42:10 crc kubenswrapper[4799]: I1010 18:42:10.425042 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8n6wb\" (UniqueName: \"kubernetes.io/projected/4397748c-7aab-4231-abd4-44098d5b3168-kube-api-access-8n6wb\") pod \"redhat-marketplace-lq8gg\" (UID: \"4397748c-7aab-4231-abd4-44098d5b3168\") " pod="openshift-marketplace/redhat-marketplace-lq8gg" Oct 10 18:42:10 crc kubenswrapper[4799]: I1010 18:42:10.531371 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-lq8gg" Oct 10 18:42:11 crc kubenswrapper[4799]: I1010 18:42:11.063388 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-lq8gg"] Oct 10 18:42:11 crc kubenswrapper[4799]: I1010 18:42:11.824555 4799 generic.go:334] "Generic (PLEG): container finished" podID="4397748c-7aab-4231-abd4-44098d5b3168" containerID="0a28a62ee590acf807212e44f04d4a7df15aaea783f6d1ef25801437ad998a95" exitCode=0 Oct 10 18:42:11 crc kubenswrapper[4799]: I1010 18:42:11.824872 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-lq8gg" event={"ID":"4397748c-7aab-4231-abd4-44098d5b3168","Type":"ContainerDied","Data":"0a28a62ee590acf807212e44f04d4a7df15aaea783f6d1ef25801437ad998a95"} Oct 10 18:42:11 crc kubenswrapper[4799]: I1010 18:42:11.825073 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-lq8gg" event={"ID":"4397748c-7aab-4231-abd4-44098d5b3168","Type":"ContainerStarted","Data":"38a83a0cfc00cec97e75465c620e2516a09ffda8577a997fe532d453d76be0b0"} Oct 10 18:42:12 crc kubenswrapper[4799]: I1010 18:42:12.402919 4799 scope.go:117] "RemoveContainer" containerID="5d726047dc458172d088ab3478c0245a0f320ffb1060865307a391a8a23b1065" Oct 10 18:42:12 crc kubenswrapper[4799]: E1010 18:42:12.403832 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 18:42:13 crc kubenswrapper[4799]: E1010 18:42:13.423421 4799 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod4397748c_7aab_4231_abd4_44098d5b3168.slice/crio-conmon-973adbb41cfc622ae19496a32dbae0924ed120026d82747ee1e5572de29a9742.scope\": RecentStats: unable to find data in memory cache]" Oct 10 18:42:13 crc kubenswrapper[4799]: I1010 18:42:13.854376 4799 generic.go:334] "Generic (PLEG): container finished" podID="4397748c-7aab-4231-abd4-44098d5b3168" containerID="973adbb41cfc622ae19496a32dbae0924ed120026d82747ee1e5572de29a9742" exitCode=0 Oct 10 18:42:13 crc kubenswrapper[4799]: I1010 18:42:13.854447 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-lq8gg" event={"ID":"4397748c-7aab-4231-abd4-44098d5b3168","Type":"ContainerDied","Data":"973adbb41cfc622ae19496a32dbae0924ed120026d82747ee1e5572de29a9742"} Oct 10 18:42:14 crc kubenswrapper[4799]: I1010 18:42:14.873446 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-lq8gg" event={"ID":"4397748c-7aab-4231-abd4-44098d5b3168","Type":"ContainerStarted","Data":"6f0bd5f97b5ca1deced300395f1fc201ec2e2029f16bb6671a5a5f6023f281a8"} Oct 10 18:42:14 crc kubenswrapper[4799]: I1010 18:42:14.909367 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-lq8gg" podStartSLOduration=2.353652573 podStartE2EDuration="4.909331544s" podCreationTimestamp="2025-10-10 18:42:10 +0000 UTC" firstStartedPulling="2025-10-10 18:42:11.828034182 +0000 UTC m=+7825.336358307" lastFinishedPulling="2025-10-10 18:42:14.383713133 +0000 UTC m=+7827.892037278" observedRunningTime="2025-10-10 18:42:14.898627412 +0000 UTC m=+7828.406951587" watchObservedRunningTime="2025-10-10 18:42:14.909331544 +0000 UTC m=+7828.417655709" Oct 10 18:42:20 crc kubenswrapper[4799]: I1010 18:42:20.531481 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-lq8gg" Oct 10 18:42:20 crc kubenswrapper[4799]: I1010 18:42:20.532096 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-lq8gg" Oct 10 18:42:20 crc kubenswrapper[4799]: I1010 18:42:20.611144 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-lq8gg" Oct 10 18:42:21 crc kubenswrapper[4799]: I1010 18:42:21.082333 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-lq8gg" Oct 10 18:42:21 crc kubenswrapper[4799]: I1010 18:42:21.151644 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-lq8gg"] Oct 10 18:42:23 crc kubenswrapper[4799]: I1010 18:42:23.005838 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-lq8gg" podUID="4397748c-7aab-4231-abd4-44098d5b3168" containerName="registry-server" containerID="cri-o://6f0bd5f97b5ca1deced300395f1fc201ec2e2029f16bb6671a5a5f6023f281a8" gracePeriod=2 Oct 10 18:42:23 crc kubenswrapper[4799]: I1010 18:42:23.613283 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-lq8gg" Oct 10 18:42:23 crc kubenswrapper[4799]: I1010 18:42:23.721533 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4397748c-7aab-4231-abd4-44098d5b3168-utilities\") pod \"4397748c-7aab-4231-abd4-44098d5b3168\" (UID: \"4397748c-7aab-4231-abd4-44098d5b3168\") " Oct 10 18:42:23 crc kubenswrapper[4799]: I1010 18:42:23.721660 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4397748c-7aab-4231-abd4-44098d5b3168-catalog-content\") pod \"4397748c-7aab-4231-abd4-44098d5b3168\" (UID: \"4397748c-7aab-4231-abd4-44098d5b3168\") " Oct 10 18:42:23 crc kubenswrapper[4799]: I1010 18:42:23.721779 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8n6wb\" (UniqueName: \"kubernetes.io/projected/4397748c-7aab-4231-abd4-44098d5b3168-kube-api-access-8n6wb\") pod \"4397748c-7aab-4231-abd4-44098d5b3168\" (UID: \"4397748c-7aab-4231-abd4-44098d5b3168\") " Oct 10 18:42:23 crc kubenswrapper[4799]: I1010 18:42:23.722747 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4397748c-7aab-4231-abd4-44098d5b3168-utilities" (OuterVolumeSpecName: "utilities") pod "4397748c-7aab-4231-abd4-44098d5b3168" (UID: "4397748c-7aab-4231-abd4-44098d5b3168"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 10 18:42:23 crc kubenswrapper[4799]: I1010 18:42:23.729179 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4397748c-7aab-4231-abd4-44098d5b3168-kube-api-access-8n6wb" (OuterVolumeSpecName: "kube-api-access-8n6wb") pod "4397748c-7aab-4231-abd4-44098d5b3168" (UID: "4397748c-7aab-4231-abd4-44098d5b3168"). InnerVolumeSpecName "kube-api-access-8n6wb". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 18:42:23 crc kubenswrapper[4799]: I1010 18:42:23.742449 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4397748c-7aab-4231-abd4-44098d5b3168-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "4397748c-7aab-4231-abd4-44098d5b3168" (UID: "4397748c-7aab-4231-abd4-44098d5b3168"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 10 18:42:23 crc kubenswrapper[4799]: I1010 18:42:23.825157 4799 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4397748c-7aab-4231-abd4-44098d5b3168-utilities\") on node \"crc\" DevicePath \"\"" Oct 10 18:42:23 crc kubenswrapper[4799]: I1010 18:42:23.825225 4799 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4397748c-7aab-4231-abd4-44098d5b3168-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 10 18:42:23 crc kubenswrapper[4799]: I1010 18:42:23.825241 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8n6wb\" (UniqueName: \"kubernetes.io/projected/4397748c-7aab-4231-abd4-44098d5b3168-kube-api-access-8n6wb\") on node \"crc\" DevicePath \"\"" Oct 10 18:42:24 crc kubenswrapper[4799]: I1010 18:42:24.030203 4799 generic.go:334] "Generic (PLEG): container finished" podID="4397748c-7aab-4231-abd4-44098d5b3168" containerID="6f0bd5f97b5ca1deced300395f1fc201ec2e2029f16bb6671a5a5f6023f281a8" exitCode=0 Oct 10 18:42:24 crc kubenswrapper[4799]: I1010 18:42:24.030242 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-lq8gg" event={"ID":"4397748c-7aab-4231-abd4-44098d5b3168","Type":"ContainerDied","Data":"6f0bd5f97b5ca1deced300395f1fc201ec2e2029f16bb6671a5a5f6023f281a8"} Oct 10 18:42:24 crc kubenswrapper[4799]: I1010 18:42:24.030268 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-lq8gg" event={"ID":"4397748c-7aab-4231-abd4-44098d5b3168","Type":"ContainerDied","Data":"38a83a0cfc00cec97e75465c620e2516a09ffda8577a997fe532d453d76be0b0"} Oct 10 18:42:24 crc kubenswrapper[4799]: I1010 18:42:24.030286 4799 scope.go:117] "RemoveContainer" containerID="6f0bd5f97b5ca1deced300395f1fc201ec2e2029f16bb6671a5a5f6023f281a8" Oct 10 18:42:24 crc kubenswrapper[4799]: I1010 18:42:24.030310 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-lq8gg" Oct 10 18:42:24 crc kubenswrapper[4799]: I1010 18:42:24.086122 4799 scope.go:117] "RemoveContainer" containerID="973adbb41cfc622ae19496a32dbae0924ed120026d82747ee1e5572de29a9742" Oct 10 18:42:24 crc kubenswrapper[4799]: I1010 18:42:24.094638 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-lq8gg"] Oct 10 18:42:24 crc kubenswrapper[4799]: I1010 18:42:24.103240 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-lq8gg"] Oct 10 18:42:24 crc kubenswrapper[4799]: I1010 18:42:24.120495 4799 scope.go:117] "RemoveContainer" containerID="0a28a62ee590acf807212e44f04d4a7df15aaea783f6d1ef25801437ad998a95" Oct 10 18:42:24 crc kubenswrapper[4799]: I1010 18:42:24.168795 4799 scope.go:117] "RemoveContainer" containerID="6f0bd5f97b5ca1deced300395f1fc201ec2e2029f16bb6671a5a5f6023f281a8" Oct 10 18:42:24 crc kubenswrapper[4799]: E1010 18:42:24.169399 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6f0bd5f97b5ca1deced300395f1fc201ec2e2029f16bb6671a5a5f6023f281a8\": container with ID starting with 6f0bd5f97b5ca1deced300395f1fc201ec2e2029f16bb6671a5a5f6023f281a8 not found: ID does not exist" containerID="6f0bd5f97b5ca1deced300395f1fc201ec2e2029f16bb6671a5a5f6023f281a8" Oct 10 18:42:24 crc kubenswrapper[4799]: I1010 18:42:24.169443 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6f0bd5f97b5ca1deced300395f1fc201ec2e2029f16bb6671a5a5f6023f281a8"} err="failed to get container status \"6f0bd5f97b5ca1deced300395f1fc201ec2e2029f16bb6671a5a5f6023f281a8\": rpc error: code = NotFound desc = could not find container \"6f0bd5f97b5ca1deced300395f1fc201ec2e2029f16bb6671a5a5f6023f281a8\": container with ID starting with 6f0bd5f97b5ca1deced300395f1fc201ec2e2029f16bb6671a5a5f6023f281a8 not found: ID does not exist" Oct 10 18:42:24 crc kubenswrapper[4799]: I1010 18:42:24.169468 4799 scope.go:117] "RemoveContainer" containerID="973adbb41cfc622ae19496a32dbae0924ed120026d82747ee1e5572de29a9742" Oct 10 18:42:24 crc kubenswrapper[4799]: E1010 18:42:24.170014 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"973adbb41cfc622ae19496a32dbae0924ed120026d82747ee1e5572de29a9742\": container with ID starting with 973adbb41cfc622ae19496a32dbae0924ed120026d82747ee1e5572de29a9742 not found: ID does not exist" containerID="973adbb41cfc622ae19496a32dbae0924ed120026d82747ee1e5572de29a9742" Oct 10 18:42:24 crc kubenswrapper[4799]: I1010 18:42:24.170061 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"973adbb41cfc622ae19496a32dbae0924ed120026d82747ee1e5572de29a9742"} err="failed to get container status \"973adbb41cfc622ae19496a32dbae0924ed120026d82747ee1e5572de29a9742\": rpc error: code = NotFound desc = could not find container \"973adbb41cfc622ae19496a32dbae0924ed120026d82747ee1e5572de29a9742\": container with ID starting with 973adbb41cfc622ae19496a32dbae0924ed120026d82747ee1e5572de29a9742 not found: ID does not exist" Oct 10 18:42:24 crc kubenswrapper[4799]: I1010 18:42:24.170096 4799 scope.go:117] "RemoveContainer" containerID="0a28a62ee590acf807212e44f04d4a7df15aaea783f6d1ef25801437ad998a95" Oct 10 18:42:24 crc kubenswrapper[4799]: E1010 18:42:24.171026 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0a28a62ee590acf807212e44f04d4a7df15aaea783f6d1ef25801437ad998a95\": container with ID starting with 0a28a62ee590acf807212e44f04d4a7df15aaea783f6d1ef25801437ad998a95 not found: ID does not exist" containerID="0a28a62ee590acf807212e44f04d4a7df15aaea783f6d1ef25801437ad998a95" Oct 10 18:42:24 crc kubenswrapper[4799]: I1010 18:42:24.171047 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0a28a62ee590acf807212e44f04d4a7df15aaea783f6d1ef25801437ad998a95"} err="failed to get container status \"0a28a62ee590acf807212e44f04d4a7df15aaea783f6d1ef25801437ad998a95\": rpc error: code = NotFound desc = could not find container \"0a28a62ee590acf807212e44f04d4a7df15aaea783f6d1ef25801437ad998a95\": container with ID starting with 0a28a62ee590acf807212e44f04d4a7df15aaea783f6d1ef25801437ad998a95 not found: ID does not exist" Oct 10 18:42:25 crc kubenswrapper[4799]: I1010 18:42:25.426119 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4397748c-7aab-4231-abd4-44098d5b3168" path="/var/lib/kubelet/pods/4397748c-7aab-4231-abd4-44098d5b3168/volumes" Oct 10 18:42:26 crc kubenswrapper[4799]: I1010 18:42:26.402616 4799 scope.go:117] "RemoveContainer" containerID="5d726047dc458172d088ab3478c0245a0f320ffb1060865307a391a8a23b1065" Oct 10 18:42:26 crc kubenswrapper[4799]: E1010 18:42:26.403420 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 18:42:39 crc kubenswrapper[4799]: I1010 18:42:39.403678 4799 scope.go:117] "RemoveContainer" containerID="5d726047dc458172d088ab3478c0245a0f320ffb1060865307a391a8a23b1065" Oct 10 18:42:39 crc kubenswrapper[4799]: E1010 18:42:39.405544 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 18:42:53 crc kubenswrapper[4799]: I1010 18:42:53.402458 4799 scope.go:117] "RemoveContainer" containerID="5d726047dc458172d088ab3478c0245a0f320ffb1060865307a391a8a23b1065" Oct 10 18:42:53 crc kubenswrapper[4799]: E1010 18:42:53.403298 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 18:43:05 crc kubenswrapper[4799]: I1010 18:43:05.403872 4799 scope.go:117] "RemoveContainer" containerID="5d726047dc458172d088ab3478c0245a0f320ffb1060865307a391a8a23b1065" Oct 10 18:43:05 crc kubenswrapper[4799]: E1010 18:43:05.404960 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 18:43:17 crc kubenswrapper[4799]: I1010 18:43:17.416072 4799 scope.go:117] "RemoveContainer" containerID="5d726047dc458172d088ab3478c0245a0f320ffb1060865307a391a8a23b1065" Oct 10 18:43:17 crc kubenswrapper[4799]: E1010 18:43:17.417165 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 18:43:29 crc kubenswrapper[4799]: I1010 18:43:29.403952 4799 scope.go:117] "RemoveContainer" containerID="5d726047dc458172d088ab3478c0245a0f320ffb1060865307a391a8a23b1065" Oct 10 18:43:29 crc kubenswrapper[4799]: E1010 18:43:29.405167 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 18:43:42 crc kubenswrapper[4799]: I1010 18:43:42.404266 4799 scope.go:117] "RemoveContainer" containerID="5d726047dc458172d088ab3478c0245a0f320ffb1060865307a391a8a23b1065" Oct 10 18:43:42 crc kubenswrapper[4799]: E1010 18:43:42.405052 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 18:43:55 crc kubenswrapper[4799]: I1010 18:43:55.407146 4799 scope.go:117] "RemoveContainer" containerID="5d726047dc458172d088ab3478c0245a0f320ffb1060865307a391a8a23b1065" Oct 10 18:43:55 crc kubenswrapper[4799]: E1010 18:43:55.408361 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 18:44:06 crc kubenswrapper[4799]: I1010 18:44:06.403171 4799 scope.go:117] "RemoveContainer" containerID="5d726047dc458172d088ab3478c0245a0f320ffb1060865307a391a8a23b1065" Oct 10 18:44:06 crc kubenswrapper[4799]: E1010 18:44:06.404211 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 18:44:21 crc kubenswrapper[4799]: I1010 18:44:21.404341 4799 scope.go:117] "RemoveContainer" containerID="5d726047dc458172d088ab3478c0245a0f320ffb1060865307a391a8a23b1065" Oct 10 18:44:22 crc kubenswrapper[4799]: I1010 18:44:22.561050 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" event={"ID":"6cebefda-e31d-4be2-9bf4-8e1f8ec002cb","Type":"ContainerStarted","Data":"909ef57d9736704d016e95ad80ccfb8bd5f48a4f8725478c200f579b545041c4"} Oct 10 18:45:00 crc kubenswrapper[4799]: I1010 18:45:00.170481 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29335365-r7vx7"] Oct 10 18:45:00 crc kubenswrapper[4799]: E1010 18:45:00.172232 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4397748c-7aab-4231-abd4-44098d5b3168" containerName="registry-server" Oct 10 18:45:00 crc kubenswrapper[4799]: I1010 18:45:00.172265 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="4397748c-7aab-4231-abd4-44098d5b3168" containerName="registry-server" Oct 10 18:45:00 crc kubenswrapper[4799]: E1010 18:45:00.172327 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4397748c-7aab-4231-abd4-44098d5b3168" containerName="extract-utilities" Oct 10 18:45:00 crc kubenswrapper[4799]: I1010 18:45:00.172345 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="4397748c-7aab-4231-abd4-44098d5b3168" containerName="extract-utilities" Oct 10 18:45:00 crc kubenswrapper[4799]: E1010 18:45:00.172381 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4397748c-7aab-4231-abd4-44098d5b3168" containerName="extract-content" Oct 10 18:45:00 crc kubenswrapper[4799]: I1010 18:45:00.172397 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="4397748c-7aab-4231-abd4-44098d5b3168" containerName="extract-content" Oct 10 18:45:00 crc kubenswrapper[4799]: I1010 18:45:00.172950 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="4397748c-7aab-4231-abd4-44098d5b3168" containerName="registry-server" Oct 10 18:45:00 crc kubenswrapper[4799]: I1010 18:45:00.174263 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29335365-r7vx7" Oct 10 18:45:00 crc kubenswrapper[4799]: I1010 18:45:00.177606 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Oct 10 18:45:00 crc kubenswrapper[4799]: I1010 18:45:00.178002 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Oct 10 18:45:00 crc kubenswrapper[4799]: I1010 18:45:00.181952 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29335365-r7vx7"] Oct 10 18:45:00 crc kubenswrapper[4799]: I1010 18:45:00.302032 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/303b346d-4cfa-4b36-96ce-b3471bc57878-secret-volume\") pod \"collect-profiles-29335365-r7vx7\" (UID: \"303b346d-4cfa-4b36-96ce-b3471bc57878\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29335365-r7vx7" Oct 10 18:45:00 crc kubenswrapper[4799]: I1010 18:45:00.302535 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/303b346d-4cfa-4b36-96ce-b3471bc57878-config-volume\") pod \"collect-profiles-29335365-r7vx7\" (UID: \"303b346d-4cfa-4b36-96ce-b3471bc57878\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29335365-r7vx7" Oct 10 18:45:00 crc kubenswrapper[4799]: I1010 18:45:00.303001 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sxg7k\" (UniqueName: \"kubernetes.io/projected/303b346d-4cfa-4b36-96ce-b3471bc57878-kube-api-access-sxg7k\") pod \"collect-profiles-29335365-r7vx7\" (UID: \"303b346d-4cfa-4b36-96ce-b3471bc57878\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29335365-r7vx7" Oct 10 18:45:00 crc kubenswrapper[4799]: I1010 18:45:00.404608 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/303b346d-4cfa-4b36-96ce-b3471bc57878-config-volume\") pod \"collect-profiles-29335365-r7vx7\" (UID: \"303b346d-4cfa-4b36-96ce-b3471bc57878\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29335365-r7vx7" Oct 10 18:45:00 crc kubenswrapper[4799]: I1010 18:45:00.404813 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sxg7k\" (UniqueName: \"kubernetes.io/projected/303b346d-4cfa-4b36-96ce-b3471bc57878-kube-api-access-sxg7k\") pod \"collect-profiles-29335365-r7vx7\" (UID: \"303b346d-4cfa-4b36-96ce-b3471bc57878\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29335365-r7vx7" Oct 10 18:45:00 crc kubenswrapper[4799]: I1010 18:45:00.404951 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/303b346d-4cfa-4b36-96ce-b3471bc57878-secret-volume\") pod \"collect-profiles-29335365-r7vx7\" (UID: \"303b346d-4cfa-4b36-96ce-b3471bc57878\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29335365-r7vx7" Oct 10 18:45:00 crc kubenswrapper[4799]: I1010 18:45:00.406808 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/303b346d-4cfa-4b36-96ce-b3471bc57878-config-volume\") pod \"collect-profiles-29335365-r7vx7\" (UID: \"303b346d-4cfa-4b36-96ce-b3471bc57878\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29335365-r7vx7" Oct 10 18:45:00 crc kubenswrapper[4799]: I1010 18:45:00.419158 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/303b346d-4cfa-4b36-96ce-b3471bc57878-secret-volume\") pod \"collect-profiles-29335365-r7vx7\" (UID: \"303b346d-4cfa-4b36-96ce-b3471bc57878\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29335365-r7vx7" Oct 10 18:45:00 crc kubenswrapper[4799]: I1010 18:45:00.422889 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sxg7k\" (UniqueName: \"kubernetes.io/projected/303b346d-4cfa-4b36-96ce-b3471bc57878-kube-api-access-sxg7k\") pod \"collect-profiles-29335365-r7vx7\" (UID: \"303b346d-4cfa-4b36-96ce-b3471bc57878\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29335365-r7vx7" Oct 10 18:45:00 crc kubenswrapper[4799]: I1010 18:45:00.511841 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29335365-r7vx7" Oct 10 18:45:00 crc kubenswrapper[4799]: I1010 18:45:00.862342 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29335365-r7vx7"] Oct 10 18:45:01 crc kubenswrapper[4799]: I1010 18:45:01.073562 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29335365-r7vx7" event={"ID":"303b346d-4cfa-4b36-96ce-b3471bc57878","Type":"ContainerStarted","Data":"8de568719bc4863d9eb376254a38382e4bdbd3bc019482d3f8aa74dfb38b0116"} Oct 10 18:45:01 crc kubenswrapper[4799]: I1010 18:45:01.073626 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29335365-r7vx7" event={"ID":"303b346d-4cfa-4b36-96ce-b3471bc57878","Type":"ContainerStarted","Data":"4cc6b38dd75726632132705eb9defb72da210320651c487011efad3346b05c4d"} Oct 10 18:45:01 crc kubenswrapper[4799]: I1010 18:45:01.116118 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29335365-r7vx7" podStartSLOduration=1.116094637 podStartE2EDuration="1.116094637s" podCreationTimestamp="2025-10-10 18:45:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-10 18:45:01.093012852 +0000 UTC m=+7994.601337007" watchObservedRunningTime="2025-10-10 18:45:01.116094637 +0000 UTC m=+7994.624418792" Oct 10 18:45:02 crc kubenswrapper[4799]: I1010 18:45:02.090458 4799 generic.go:334] "Generic (PLEG): container finished" podID="303b346d-4cfa-4b36-96ce-b3471bc57878" containerID="8de568719bc4863d9eb376254a38382e4bdbd3bc019482d3f8aa74dfb38b0116" exitCode=0 Oct 10 18:45:02 crc kubenswrapper[4799]: I1010 18:45:02.090590 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29335365-r7vx7" event={"ID":"303b346d-4cfa-4b36-96ce-b3471bc57878","Type":"ContainerDied","Data":"8de568719bc4863d9eb376254a38382e4bdbd3bc019482d3f8aa74dfb38b0116"} Oct 10 18:45:03 crc kubenswrapper[4799]: I1010 18:45:03.593636 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29335365-r7vx7" Oct 10 18:45:03 crc kubenswrapper[4799]: I1010 18:45:03.688609 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/303b346d-4cfa-4b36-96ce-b3471bc57878-config-volume\") pod \"303b346d-4cfa-4b36-96ce-b3471bc57878\" (UID: \"303b346d-4cfa-4b36-96ce-b3471bc57878\") " Oct 10 18:45:03 crc kubenswrapper[4799]: I1010 18:45:03.688829 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sxg7k\" (UniqueName: \"kubernetes.io/projected/303b346d-4cfa-4b36-96ce-b3471bc57878-kube-api-access-sxg7k\") pod \"303b346d-4cfa-4b36-96ce-b3471bc57878\" (UID: \"303b346d-4cfa-4b36-96ce-b3471bc57878\") " Oct 10 18:45:03 crc kubenswrapper[4799]: I1010 18:45:03.688895 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/303b346d-4cfa-4b36-96ce-b3471bc57878-secret-volume\") pod \"303b346d-4cfa-4b36-96ce-b3471bc57878\" (UID: \"303b346d-4cfa-4b36-96ce-b3471bc57878\") " Oct 10 18:45:03 crc kubenswrapper[4799]: I1010 18:45:03.690362 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/303b346d-4cfa-4b36-96ce-b3471bc57878-config-volume" (OuterVolumeSpecName: "config-volume") pod "303b346d-4cfa-4b36-96ce-b3471bc57878" (UID: "303b346d-4cfa-4b36-96ce-b3471bc57878"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 18:45:03 crc kubenswrapper[4799]: I1010 18:45:03.701854 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/303b346d-4cfa-4b36-96ce-b3471bc57878-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "303b346d-4cfa-4b36-96ce-b3471bc57878" (UID: "303b346d-4cfa-4b36-96ce-b3471bc57878"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 18:45:03 crc kubenswrapper[4799]: I1010 18:45:03.701984 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/303b346d-4cfa-4b36-96ce-b3471bc57878-kube-api-access-sxg7k" (OuterVolumeSpecName: "kube-api-access-sxg7k") pod "303b346d-4cfa-4b36-96ce-b3471bc57878" (UID: "303b346d-4cfa-4b36-96ce-b3471bc57878"). InnerVolumeSpecName "kube-api-access-sxg7k". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 18:45:03 crc kubenswrapper[4799]: I1010 18:45:03.791924 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sxg7k\" (UniqueName: \"kubernetes.io/projected/303b346d-4cfa-4b36-96ce-b3471bc57878-kube-api-access-sxg7k\") on node \"crc\" DevicePath \"\"" Oct 10 18:45:03 crc kubenswrapper[4799]: I1010 18:45:03.791958 4799 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/303b346d-4cfa-4b36-96ce-b3471bc57878-secret-volume\") on node \"crc\" DevicePath \"\"" Oct 10 18:45:03 crc kubenswrapper[4799]: I1010 18:45:03.791969 4799 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/303b346d-4cfa-4b36-96ce-b3471bc57878-config-volume\") on node \"crc\" DevicePath \"\"" Oct 10 18:45:04 crc kubenswrapper[4799]: I1010 18:45:04.113766 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29335365-r7vx7" event={"ID":"303b346d-4cfa-4b36-96ce-b3471bc57878","Type":"ContainerDied","Data":"4cc6b38dd75726632132705eb9defb72da210320651c487011efad3346b05c4d"} Oct 10 18:45:04 crc kubenswrapper[4799]: I1010 18:45:04.113803 4799 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4cc6b38dd75726632132705eb9defb72da210320651c487011efad3346b05c4d" Oct 10 18:45:04 crc kubenswrapper[4799]: I1010 18:45:04.113836 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29335365-r7vx7" Oct 10 18:45:04 crc kubenswrapper[4799]: I1010 18:45:04.702053 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29335320-vfrdw"] Oct 10 18:45:04 crc kubenswrapper[4799]: I1010 18:45:04.719318 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29335320-vfrdw"] Oct 10 18:45:05 crc kubenswrapper[4799]: I1010 18:45:05.425021 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="24939b83-2fd0-4568-9515-10d4d88f116d" path="/var/lib/kubelet/pods/24939b83-2fd0-4568-9515-10d4d88f116d/volumes" Oct 10 18:45:16 crc kubenswrapper[4799]: I1010 18:45:16.643915 4799 scope.go:117] "RemoveContainer" containerID="707419f8a9ace0477f83d66e9fa0e461e9a31db43033bdaec034ae2e7a918417" Oct 10 18:45:38 crc kubenswrapper[4799]: I1010 18:45:38.320459 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-r4zkl"] Oct 10 18:45:38 crc kubenswrapper[4799]: E1010 18:45:38.321496 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="303b346d-4cfa-4b36-96ce-b3471bc57878" containerName="collect-profiles" Oct 10 18:45:38 crc kubenswrapper[4799]: I1010 18:45:38.321509 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="303b346d-4cfa-4b36-96ce-b3471bc57878" containerName="collect-profiles" Oct 10 18:45:38 crc kubenswrapper[4799]: I1010 18:45:38.321715 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="303b346d-4cfa-4b36-96ce-b3471bc57878" containerName="collect-profiles" Oct 10 18:45:38 crc kubenswrapper[4799]: I1010 18:45:38.323390 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-r4zkl" Oct 10 18:45:38 crc kubenswrapper[4799]: I1010 18:45:38.326486 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f61afcd4-3210-4dc9-9aff-d4b94d785079-catalog-content\") pod \"redhat-operators-r4zkl\" (UID: \"f61afcd4-3210-4dc9-9aff-d4b94d785079\") " pod="openshift-marketplace/redhat-operators-r4zkl" Oct 10 18:45:38 crc kubenswrapper[4799]: I1010 18:45:38.326654 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d4s87\" (UniqueName: \"kubernetes.io/projected/f61afcd4-3210-4dc9-9aff-d4b94d785079-kube-api-access-d4s87\") pod \"redhat-operators-r4zkl\" (UID: \"f61afcd4-3210-4dc9-9aff-d4b94d785079\") " pod="openshift-marketplace/redhat-operators-r4zkl" Oct 10 18:45:38 crc kubenswrapper[4799]: I1010 18:45:38.330508 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f61afcd4-3210-4dc9-9aff-d4b94d785079-utilities\") pod \"redhat-operators-r4zkl\" (UID: \"f61afcd4-3210-4dc9-9aff-d4b94d785079\") " pod="openshift-marketplace/redhat-operators-r4zkl" Oct 10 18:45:38 crc kubenswrapper[4799]: I1010 18:45:38.345102 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-r4zkl"] Oct 10 18:45:38 crc kubenswrapper[4799]: I1010 18:45:38.433554 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f61afcd4-3210-4dc9-9aff-d4b94d785079-utilities\") pod \"redhat-operators-r4zkl\" (UID: \"f61afcd4-3210-4dc9-9aff-d4b94d785079\") " pod="openshift-marketplace/redhat-operators-r4zkl" Oct 10 18:45:38 crc kubenswrapper[4799]: I1010 18:45:38.435816 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f61afcd4-3210-4dc9-9aff-d4b94d785079-catalog-content\") pod \"redhat-operators-r4zkl\" (UID: \"f61afcd4-3210-4dc9-9aff-d4b94d785079\") " pod="openshift-marketplace/redhat-operators-r4zkl" Oct 10 18:45:38 crc kubenswrapper[4799]: I1010 18:45:38.436085 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f61afcd4-3210-4dc9-9aff-d4b94d785079-utilities\") pod \"redhat-operators-r4zkl\" (UID: \"f61afcd4-3210-4dc9-9aff-d4b94d785079\") " pod="openshift-marketplace/redhat-operators-r4zkl" Oct 10 18:45:38 crc kubenswrapper[4799]: I1010 18:45:38.436512 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f61afcd4-3210-4dc9-9aff-d4b94d785079-catalog-content\") pod \"redhat-operators-r4zkl\" (UID: \"f61afcd4-3210-4dc9-9aff-d4b94d785079\") " pod="openshift-marketplace/redhat-operators-r4zkl" Oct 10 18:45:38 crc kubenswrapper[4799]: I1010 18:45:38.437547 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d4s87\" (UniqueName: \"kubernetes.io/projected/f61afcd4-3210-4dc9-9aff-d4b94d785079-kube-api-access-d4s87\") pod \"redhat-operators-r4zkl\" (UID: \"f61afcd4-3210-4dc9-9aff-d4b94d785079\") " pod="openshift-marketplace/redhat-operators-r4zkl" Oct 10 18:45:38 crc kubenswrapper[4799]: I1010 18:45:38.464646 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d4s87\" (UniqueName: \"kubernetes.io/projected/f61afcd4-3210-4dc9-9aff-d4b94d785079-kube-api-access-d4s87\") pod \"redhat-operators-r4zkl\" (UID: \"f61afcd4-3210-4dc9-9aff-d4b94d785079\") " pod="openshift-marketplace/redhat-operators-r4zkl" Oct 10 18:45:38 crc kubenswrapper[4799]: I1010 18:45:38.655067 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-r4zkl" Oct 10 18:45:39 crc kubenswrapper[4799]: I1010 18:45:39.144024 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-r4zkl"] Oct 10 18:45:39 crc kubenswrapper[4799]: W1010 18:45:39.149368 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf61afcd4_3210_4dc9_9aff_d4b94d785079.slice/crio-8c98853ece136f1b374379199943b4b8b8553ab5c79de4bce55e4ebd735c1de7 WatchSource:0}: Error finding container 8c98853ece136f1b374379199943b4b8b8553ab5c79de4bce55e4ebd735c1de7: Status 404 returned error can't find the container with id 8c98853ece136f1b374379199943b4b8b8553ab5c79de4bce55e4ebd735c1de7 Oct 10 18:45:39 crc kubenswrapper[4799]: I1010 18:45:39.600110 4799 generic.go:334] "Generic (PLEG): container finished" podID="f61afcd4-3210-4dc9-9aff-d4b94d785079" containerID="9286e73209ab361adc0b8c0b4bcd260cc17b2b17c9ebef39f6602381a6af8f03" exitCode=0 Oct 10 18:45:39 crc kubenswrapper[4799]: I1010 18:45:39.600338 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-r4zkl" event={"ID":"f61afcd4-3210-4dc9-9aff-d4b94d785079","Type":"ContainerDied","Data":"9286e73209ab361adc0b8c0b4bcd260cc17b2b17c9ebef39f6602381a6af8f03"} Oct 10 18:45:39 crc kubenswrapper[4799]: I1010 18:45:39.600409 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-r4zkl" event={"ID":"f61afcd4-3210-4dc9-9aff-d4b94d785079","Type":"ContainerStarted","Data":"8c98853ece136f1b374379199943b4b8b8553ab5c79de4bce55e4ebd735c1de7"} Oct 10 18:45:41 crc kubenswrapper[4799]: I1010 18:45:41.628845 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-r4zkl" event={"ID":"f61afcd4-3210-4dc9-9aff-d4b94d785079","Type":"ContainerStarted","Data":"dce53fcedfa1724ced5420203f920404e344b859218688c5cd42d2598d91c166"} Oct 10 18:45:43 crc kubenswrapper[4799]: I1010 18:45:43.520991 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-r7xgq"] Oct 10 18:45:43 crc kubenswrapper[4799]: I1010 18:45:43.525025 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-r7xgq" Oct 10 18:45:43 crc kubenswrapper[4799]: I1010 18:45:43.559597 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-r7xgq"] Oct 10 18:45:43 crc kubenswrapper[4799]: I1010 18:45:43.663409 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fcwhp\" (UniqueName: \"kubernetes.io/projected/b28e71eb-09c2-46d4-a2ee-4cc2cf30c816-kube-api-access-fcwhp\") pod \"certified-operators-r7xgq\" (UID: \"b28e71eb-09c2-46d4-a2ee-4cc2cf30c816\") " pod="openshift-marketplace/certified-operators-r7xgq" Oct 10 18:45:43 crc kubenswrapper[4799]: I1010 18:45:43.663502 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b28e71eb-09c2-46d4-a2ee-4cc2cf30c816-utilities\") pod \"certified-operators-r7xgq\" (UID: \"b28e71eb-09c2-46d4-a2ee-4cc2cf30c816\") " pod="openshift-marketplace/certified-operators-r7xgq" Oct 10 18:45:43 crc kubenswrapper[4799]: I1010 18:45:43.663531 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b28e71eb-09c2-46d4-a2ee-4cc2cf30c816-catalog-content\") pod \"certified-operators-r7xgq\" (UID: \"b28e71eb-09c2-46d4-a2ee-4cc2cf30c816\") " pod="openshift-marketplace/certified-operators-r7xgq" Oct 10 18:45:43 crc kubenswrapper[4799]: I1010 18:45:43.765501 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fcwhp\" (UniqueName: \"kubernetes.io/projected/b28e71eb-09c2-46d4-a2ee-4cc2cf30c816-kube-api-access-fcwhp\") pod \"certified-operators-r7xgq\" (UID: \"b28e71eb-09c2-46d4-a2ee-4cc2cf30c816\") " pod="openshift-marketplace/certified-operators-r7xgq" Oct 10 18:45:43 crc kubenswrapper[4799]: I1010 18:45:43.765666 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b28e71eb-09c2-46d4-a2ee-4cc2cf30c816-utilities\") pod \"certified-operators-r7xgq\" (UID: \"b28e71eb-09c2-46d4-a2ee-4cc2cf30c816\") " pod="openshift-marketplace/certified-operators-r7xgq" Oct 10 18:45:43 crc kubenswrapper[4799]: I1010 18:45:43.766099 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b28e71eb-09c2-46d4-a2ee-4cc2cf30c816-catalog-content\") pod \"certified-operators-r7xgq\" (UID: \"b28e71eb-09c2-46d4-a2ee-4cc2cf30c816\") " pod="openshift-marketplace/certified-operators-r7xgq" Oct 10 18:45:43 crc kubenswrapper[4799]: I1010 18:45:43.768086 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b28e71eb-09c2-46d4-a2ee-4cc2cf30c816-utilities\") pod \"certified-operators-r7xgq\" (UID: \"b28e71eb-09c2-46d4-a2ee-4cc2cf30c816\") " pod="openshift-marketplace/certified-operators-r7xgq" Oct 10 18:45:43 crc kubenswrapper[4799]: I1010 18:45:43.768110 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b28e71eb-09c2-46d4-a2ee-4cc2cf30c816-catalog-content\") pod \"certified-operators-r7xgq\" (UID: \"b28e71eb-09c2-46d4-a2ee-4cc2cf30c816\") " pod="openshift-marketplace/certified-operators-r7xgq" Oct 10 18:45:43 crc kubenswrapper[4799]: I1010 18:45:43.785215 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fcwhp\" (UniqueName: \"kubernetes.io/projected/b28e71eb-09c2-46d4-a2ee-4cc2cf30c816-kube-api-access-fcwhp\") pod \"certified-operators-r7xgq\" (UID: \"b28e71eb-09c2-46d4-a2ee-4cc2cf30c816\") " pod="openshift-marketplace/certified-operators-r7xgq" Oct 10 18:45:43 crc kubenswrapper[4799]: I1010 18:45:43.883506 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-r7xgq" Oct 10 18:45:44 crc kubenswrapper[4799]: I1010 18:45:44.374511 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-r7xgq"] Oct 10 18:45:44 crc kubenswrapper[4799]: W1010 18:45:44.381217 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb28e71eb_09c2_46d4_a2ee_4cc2cf30c816.slice/crio-71b9ed3e32f0b7b54adac47f1feb44e13b1d8e71f991e316c365d78fed6f162d WatchSource:0}: Error finding container 71b9ed3e32f0b7b54adac47f1feb44e13b1d8e71f991e316c365d78fed6f162d: Status 404 returned error can't find the container with id 71b9ed3e32f0b7b54adac47f1feb44e13b1d8e71f991e316c365d78fed6f162d Oct 10 18:45:44 crc kubenswrapper[4799]: I1010 18:45:44.662744 4799 generic.go:334] "Generic (PLEG): container finished" podID="b28e71eb-09c2-46d4-a2ee-4cc2cf30c816" containerID="53b4f07d0c89daa1860175c1c6c0e194f4d591de99dbbfa14aaa39b2149580ad" exitCode=0 Oct 10 18:45:44 crc kubenswrapper[4799]: I1010 18:45:44.663002 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-r7xgq" event={"ID":"b28e71eb-09c2-46d4-a2ee-4cc2cf30c816","Type":"ContainerDied","Data":"53b4f07d0c89daa1860175c1c6c0e194f4d591de99dbbfa14aaa39b2149580ad"} Oct 10 18:45:44 crc kubenswrapper[4799]: I1010 18:45:44.663160 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-r7xgq" event={"ID":"b28e71eb-09c2-46d4-a2ee-4cc2cf30c816","Type":"ContainerStarted","Data":"71b9ed3e32f0b7b54adac47f1feb44e13b1d8e71f991e316c365d78fed6f162d"} Oct 10 18:45:44 crc kubenswrapper[4799]: I1010 18:45:44.665813 4799 generic.go:334] "Generic (PLEG): container finished" podID="f61afcd4-3210-4dc9-9aff-d4b94d785079" containerID="dce53fcedfa1724ced5420203f920404e344b859218688c5cd42d2598d91c166" exitCode=0 Oct 10 18:45:44 crc kubenswrapper[4799]: I1010 18:45:44.665838 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-r4zkl" event={"ID":"f61afcd4-3210-4dc9-9aff-d4b94d785079","Type":"ContainerDied","Data":"dce53fcedfa1724ced5420203f920404e344b859218688c5cd42d2598d91c166"} Oct 10 18:45:45 crc kubenswrapper[4799]: I1010 18:45:45.713343 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-r4zkl" event={"ID":"f61afcd4-3210-4dc9-9aff-d4b94d785079","Type":"ContainerStarted","Data":"3a2983f0adbd7bf57fa2dd9337510b7eda191e6c66a9698caa1002c45835f056"} Oct 10 18:45:45 crc kubenswrapper[4799]: I1010 18:45:45.720577 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-r7xgq" event={"ID":"b28e71eb-09c2-46d4-a2ee-4cc2cf30c816","Type":"ContainerStarted","Data":"12767744adfac5e0f465d6cebd29f787d1f8714b2e40deb01ee194e2c5f86de9"} Oct 10 18:45:45 crc kubenswrapper[4799]: I1010 18:45:45.744040 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-r4zkl" podStartSLOduration=2.180456451 podStartE2EDuration="7.744009776s" podCreationTimestamp="2025-10-10 18:45:38 +0000 UTC" firstStartedPulling="2025-10-10 18:45:39.603612437 +0000 UTC m=+8033.111936552" lastFinishedPulling="2025-10-10 18:45:45.167165752 +0000 UTC m=+8038.675489877" observedRunningTime="2025-10-10 18:45:45.737712492 +0000 UTC m=+8039.246036617" watchObservedRunningTime="2025-10-10 18:45:45.744009776 +0000 UTC m=+8039.252333931" Oct 10 18:45:47 crc kubenswrapper[4799]: I1010 18:45:47.746122 4799 generic.go:334] "Generic (PLEG): container finished" podID="b28e71eb-09c2-46d4-a2ee-4cc2cf30c816" containerID="12767744adfac5e0f465d6cebd29f787d1f8714b2e40deb01ee194e2c5f86de9" exitCode=0 Oct 10 18:45:47 crc kubenswrapper[4799]: I1010 18:45:47.746210 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-r7xgq" event={"ID":"b28e71eb-09c2-46d4-a2ee-4cc2cf30c816","Type":"ContainerDied","Data":"12767744adfac5e0f465d6cebd29f787d1f8714b2e40deb01ee194e2c5f86de9"} Oct 10 18:45:48 crc kubenswrapper[4799]: I1010 18:45:48.655850 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-r4zkl" Oct 10 18:45:48 crc kubenswrapper[4799]: I1010 18:45:48.657281 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-r4zkl" Oct 10 18:45:49 crc kubenswrapper[4799]: I1010 18:45:49.706898 4799 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-r4zkl" podUID="f61afcd4-3210-4dc9-9aff-d4b94d785079" containerName="registry-server" probeResult="failure" output=< Oct 10 18:45:49 crc kubenswrapper[4799]: timeout: failed to connect service ":50051" within 1s Oct 10 18:45:49 crc kubenswrapper[4799]: > Oct 10 18:45:49 crc kubenswrapper[4799]: I1010 18:45:49.782208 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-r7xgq" event={"ID":"b28e71eb-09c2-46d4-a2ee-4cc2cf30c816","Type":"ContainerStarted","Data":"c7918d4080372e2aed19144735f1d0b49c2a999d310416cae1996beeee09370f"} Oct 10 18:45:49 crc kubenswrapper[4799]: I1010 18:45:49.807569 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-r7xgq" podStartSLOduration=2.839961004 podStartE2EDuration="6.807552411s" podCreationTimestamp="2025-10-10 18:45:43 +0000 UTC" firstStartedPulling="2025-10-10 18:45:44.665285362 +0000 UTC m=+8038.173609477" lastFinishedPulling="2025-10-10 18:45:48.632876769 +0000 UTC m=+8042.141200884" observedRunningTime="2025-10-10 18:45:49.805252844 +0000 UTC m=+8043.313576989" watchObservedRunningTime="2025-10-10 18:45:49.807552411 +0000 UTC m=+8043.315876536" Oct 10 18:45:53 crc kubenswrapper[4799]: I1010 18:45:53.884046 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-r7xgq" Oct 10 18:45:53 crc kubenswrapper[4799]: I1010 18:45:53.885192 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-r7xgq" Oct 10 18:45:53 crc kubenswrapper[4799]: I1010 18:45:53.954188 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-r7xgq" Oct 10 18:45:54 crc kubenswrapper[4799]: I1010 18:45:54.917056 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-r7xgq" Oct 10 18:45:55 crc kubenswrapper[4799]: I1010 18:45:55.097988 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-r7xgq"] Oct 10 18:45:56 crc kubenswrapper[4799]: I1010 18:45:56.881628 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-r7xgq" podUID="b28e71eb-09c2-46d4-a2ee-4cc2cf30c816" containerName="registry-server" containerID="cri-o://c7918d4080372e2aed19144735f1d0b49c2a999d310416cae1996beeee09370f" gracePeriod=2 Oct 10 18:45:57 crc kubenswrapper[4799]: I1010 18:45:57.472226 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-r7xgq" Oct 10 18:45:57 crc kubenswrapper[4799]: I1010 18:45:57.481950 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b28e71eb-09c2-46d4-a2ee-4cc2cf30c816-catalog-content\") pod \"b28e71eb-09c2-46d4-a2ee-4cc2cf30c816\" (UID: \"b28e71eb-09c2-46d4-a2ee-4cc2cf30c816\") " Oct 10 18:45:57 crc kubenswrapper[4799]: I1010 18:45:57.482149 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fcwhp\" (UniqueName: \"kubernetes.io/projected/b28e71eb-09c2-46d4-a2ee-4cc2cf30c816-kube-api-access-fcwhp\") pod \"b28e71eb-09c2-46d4-a2ee-4cc2cf30c816\" (UID: \"b28e71eb-09c2-46d4-a2ee-4cc2cf30c816\") " Oct 10 18:45:57 crc kubenswrapper[4799]: I1010 18:45:57.482241 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b28e71eb-09c2-46d4-a2ee-4cc2cf30c816-utilities\") pod \"b28e71eb-09c2-46d4-a2ee-4cc2cf30c816\" (UID: \"b28e71eb-09c2-46d4-a2ee-4cc2cf30c816\") " Oct 10 18:45:57 crc kubenswrapper[4799]: I1010 18:45:57.483005 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b28e71eb-09c2-46d4-a2ee-4cc2cf30c816-utilities" (OuterVolumeSpecName: "utilities") pod "b28e71eb-09c2-46d4-a2ee-4cc2cf30c816" (UID: "b28e71eb-09c2-46d4-a2ee-4cc2cf30c816"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 10 18:45:57 crc kubenswrapper[4799]: I1010 18:45:57.483420 4799 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b28e71eb-09c2-46d4-a2ee-4cc2cf30c816-utilities\") on node \"crc\" DevicePath \"\"" Oct 10 18:45:57 crc kubenswrapper[4799]: I1010 18:45:57.488943 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b28e71eb-09c2-46d4-a2ee-4cc2cf30c816-kube-api-access-fcwhp" (OuterVolumeSpecName: "kube-api-access-fcwhp") pod "b28e71eb-09c2-46d4-a2ee-4cc2cf30c816" (UID: "b28e71eb-09c2-46d4-a2ee-4cc2cf30c816"). InnerVolumeSpecName "kube-api-access-fcwhp". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 18:45:57 crc kubenswrapper[4799]: I1010 18:45:57.546812 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b28e71eb-09c2-46d4-a2ee-4cc2cf30c816-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b28e71eb-09c2-46d4-a2ee-4cc2cf30c816" (UID: "b28e71eb-09c2-46d4-a2ee-4cc2cf30c816"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 10 18:45:57 crc kubenswrapper[4799]: I1010 18:45:57.585262 4799 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b28e71eb-09c2-46d4-a2ee-4cc2cf30c816-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 10 18:45:57 crc kubenswrapper[4799]: I1010 18:45:57.585297 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fcwhp\" (UniqueName: \"kubernetes.io/projected/b28e71eb-09c2-46d4-a2ee-4cc2cf30c816-kube-api-access-fcwhp\") on node \"crc\" DevicePath \"\"" Oct 10 18:45:57 crc kubenswrapper[4799]: I1010 18:45:57.895317 4799 generic.go:334] "Generic (PLEG): container finished" podID="b28e71eb-09c2-46d4-a2ee-4cc2cf30c816" containerID="c7918d4080372e2aed19144735f1d0b49c2a999d310416cae1996beeee09370f" exitCode=0 Oct 10 18:45:57 crc kubenswrapper[4799]: I1010 18:45:57.895361 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-r7xgq" event={"ID":"b28e71eb-09c2-46d4-a2ee-4cc2cf30c816","Type":"ContainerDied","Data":"c7918d4080372e2aed19144735f1d0b49c2a999d310416cae1996beeee09370f"} Oct 10 18:45:57 crc kubenswrapper[4799]: I1010 18:45:57.895398 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-r7xgq" event={"ID":"b28e71eb-09c2-46d4-a2ee-4cc2cf30c816","Type":"ContainerDied","Data":"71b9ed3e32f0b7b54adac47f1feb44e13b1d8e71f991e316c365d78fed6f162d"} Oct 10 18:45:57 crc kubenswrapper[4799]: I1010 18:45:57.895418 4799 scope.go:117] "RemoveContainer" containerID="c7918d4080372e2aed19144735f1d0b49c2a999d310416cae1996beeee09370f" Oct 10 18:45:57 crc kubenswrapper[4799]: I1010 18:45:57.895432 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-r7xgq" Oct 10 18:45:57 crc kubenswrapper[4799]: I1010 18:45:57.931420 4799 scope.go:117] "RemoveContainer" containerID="12767744adfac5e0f465d6cebd29f787d1f8714b2e40deb01ee194e2c5f86de9" Oct 10 18:45:57 crc kubenswrapper[4799]: I1010 18:45:57.941601 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-r7xgq"] Oct 10 18:45:57 crc kubenswrapper[4799]: I1010 18:45:57.953868 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-r7xgq"] Oct 10 18:45:57 crc kubenswrapper[4799]: I1010 18:45:57.973814 4799 scope.go:117] "RemoveContainer" containerID="53b4f07d0c89daa1860175c1c6c0e194f4d591de99dbbfa14aaa39b2149580ad" Oct 10 18:45:58 crc kubenswrapper[4799]: I1010 18:45:58.032995 4799 scope.go:117] "RemoveContainer" containerID="c7918d4080372e2aed19144735f1d0b49c2a999d310416cae1996beeee09370f" Oct 10 18:45:58 crc kubenswrapper[4799]: E1010 18:45:58.049529 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c7918d4080372e2aed19144735f1d0b49c2a999d310416cae1996beeee09370f\": container with ID starting with c7918d4080372e2aed19144735f1d0b49c2a999d310416cae1996beeee09370f not found: ID does not exist" containerID="c7918d4080372e2aed19144735f1d0b49c2a999d310416cae1996beeee09370f" Oct 10 18:45:58 crc kubenswrapper[4799]: I1010 18:45:58.049595 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c7918d4080372e2aed19144735f1d0b49c2a999d310416cae1996beeee09370f"} err="failed to get container status \"c7918d4080372e2aed19144735f1d0b49c2a999d310416cae1996beeee09370f\": rpc error: code = NotFound desc = could not find container \"c7918d4080372e2aed19144735f1d0b49c2a999d310416cae1996beeee09370f\": container with ID starting with c7918d4080372e2aed19144735f1d0b49c2a999d310416cae1996beeee09370f not found: ID does not exist" Oct 10 18:45:58 crc kubenswrapper[4799]: I1010 18:45:58.049625 4799 scope.go:117] "RemoveContainer" containerID="12767744adfac5e0f465d6cebd29f787d1f8714b2e40deb01ee194e2c5f86de9" Oct 10 18:45:58 crc kubenswrapper[4799]: E1010 18:45:58.052356 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"12767744adfac5e0f465d6cebd29f787d1f8714b2e40deb01ee194e2c5f86de9\": container with ID starting with 12767744adfac5e0f465d6cebd29f787d1f8714b2e40deb01ee194e2c5f86de9 not found: ID does not exist" containerID="12767744adfac5e0f465d6cebd29f787d1f8714b2e40deb01ee194e2c5f86de9" Oct 10 18:45:58 crc kubenswrapper[4799]: I1010 18:45:58.052471 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"12767744adfac5e0f465d6cebd29f787d1f8714b2e40deb01ee194e2c5f86de9"} err="failed to get container status \"12767744adfac5e0f465d6cebd29f787d1f8714b2e40deb01ee194e2c5f86de9\": rpc error: code = NotFound desc = could not find container \"12767744adfac5e0f465d6cebd29f787d1f8714b2e40deb01ee194e2c5f86de9\": container with ID starting with 12767744adfac5e0f465d6cebd29f787d1f8714b2e40deb01ee194e2c5f86de9 not found: ID does not exist" Oct 10 18:45:58 crc kubenswrapper[4799]: I1010 18:45:58.052571 4799 scope.go:117] "RemoveContainer" containerID="53b4f07d0c89daa1860175c1c6c0e194f4d591de99dbbfa14aaa39b2149580ad" Oct 10 18:45:58 crc kubenswrapper[4799]: E1010 18:45:58.053022 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"53b4f07d0c89daa1860175c1c6c0e194f4d591de99dbbfa14aaa39b2149580ad\": container with ID starting with 53b4f07d0c89daa1860175c1c6c0e194f4d591de99dbbfa14aaa39b2149580ad not found: ID does not exist" containerID="53b4f07d0c89daa1860175c1c6c0e194f4d591de99dbbfa14aaa39b2149580ad" Oct 10 18:45:58 crc kubenswrapper[4799]: I1010 18:45:58.053130 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"53b4f07d0c89daa1860175c1c6c0e194f4d591de99dbbfa14aaa39b2149580ad"} err="failed to get container status \"53b4f07d0c89daa1860175c1c6c0e194f4d591de99dbbfa14aaa39b2149580ad\": rpc error: code = NotFound desc = could not find container \"53b4f07d0c89daa1860175c1c6c0e194f4d591de99dbbfa14aaa39b2149580ad\": container with ID starting with 53b4f07d0c89daa1860175c1c6c0e194f4d591de99dbbfa14aaa39b2149580ad not found: ID does not exist" Oct 10 18:45:59 crc kubenswrapper[4799]: I1010 18:45:59.425044 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b28e71eb-09c2-46d4-a2ee-4cc2cf30c816" path="/var/lib/kubelet/pods/b28e71eb-09c2-46d4-a2ee-4cc2cf30c816/volumes" Oct 10 18:45:59 crc kubenswrapper[4799]: I1010 18:45:59.705047 4799 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-r4zkl" podUID="f61afcd4-3210-4dc9-9aff-d4b94d785079" containerName="registry-server" probeResult="failure" output=< Oct 10 18:45:59 crc kubenswrapper[4799]: timeout: failed to connect service ":50051" within 1s Oct 10 18:45:59 crc kubenswrapper[4799]: > Oct 10 18:46:08 crc kubenswrapper[4799]: I1010 18:46:08.724648 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-r4zkl" Oct 10 18:46:08 crc kubenswrapper[4799]: I1010 18:46:08.795011 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-r4zkl" Oct 10 18:46:09 crc kubenswrapper[4799]: I1010 18:46:09.524367 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-r4zkl"] Oct 10 18:46:10 crc kubenswrapper[4799]: I1010 18:46:10.076476 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-r4zkl" podUID="f61afcd4-3210-4dc9-9aff-d4b94d785079" containerName="registry-server" containerID="cri-o://3a2983f0adbd7bf57fa2dd9337510b7eda191e6c66a9698caa1002c45835f056" gracePeriod=2 Oct 10 18:46:10 crc kubenswrapper[4799]: I1010 18:46:10.645929 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-r4zkl" Oct 10 18:46:10 crc kubenswrapper[4799]: I1010 18:46:10.795467 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f61afcd4-3210-4dc9-9aff-d4b94d785079-utilities\") pod \"f61afcd4-3210-4dc9-9aff-d4b94d785079\" (UID: \"f61afcd4-3210-4dc9-9aff-d4b94d785079\") " Oct 10 18:46:10 crc kubenswrapper[4799]: I1010 18:46:10.795963 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f61afcd4-3210-4dc9-9aff-d4b94d785079-catalog-content\") pod \"f61afcd4-3210-4dc9-9aff-d4b94d785079\" (UID: \"f61afcd4-3210-4dc9-9aff-d4b94d785079\") " Oct 10 18:46:10 crc kubenswrapper[4799]: I1010 18:46:10.796293 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f61afcd4-3210-4dc9-9aff-d4b94d785079-utilities" (OuterVolumeSpecName: "utilities") pod "f61afcd4-3210-4dc9-9aff-d4b94d785079" (UID: "f61afcd4-3210-4dc9-9aff-d4b94d785079"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 10 18:46:10 crc kubenswrapper[4799]: I1010 18:46:10.796325 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d4s87\" (UniqueName: \"kubernetes.io/projected/f61afcd4-3210-4dc9-9aff-d4b94d785079-kube-api-access-d4s87\") pod \"f61afcd4-3210-4dc9-9aff-d4b94d785079\" (UID: \"f61afcd4-3210-4dc9-9aff-d4b94d785079\") " Oct 10 18:46:10 crc kubenswrapper[4799]: I1010 18:46:10.797201 4799 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f61afcd4-3210-4dc9-9aff-d4b94d785079-utilities\") on node \"crc\" DevicePath \"\"" Oct 10 18:46:10 crc kubenswrapper[4799]: I1010 18:46:10.807111 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f61afcd4-3210-4dc9-9aff-d4b94d785079-kube-api-access-d4s87" (OuterVolumeSpecName: "kube-api-access-d4s87") pod "f61afcd4-3210-4dc9-9aff-d4b94d785079" (UID: "f61afcd4-3210-4dc9-9aff-d4b94d785079"). InnerVolumeSpecName "kube-api-access-d4s87". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 18:46:10 crc kubenswrapper[4799]: I1010 18:46:10.877434 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f61afcd4-3210-4dc9-9aff-d4b94d785079-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "f61afcd4-3210-4dc9-9aff-d4b94d785079" (UID: "f61afcd4-3210-4dc9-9aff-d4b94d785079"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 10 18:46:10 crc kubenswrapper[4799]: I1010 18:46:10.899834 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d4s87\" (UniqueName: \"kubernetes.io/projected/f61afcd4-3210-4dc9-9aff-d4b94d785079-kube-api-access-d4s87\") on node \"crc\" DevicePath \"\"" Oct 10 18:46:10 crc kubenswrapper[4799]: I1010 18:46:10.899894 4799 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f61afcd4-3210-4dc9-9aff-d4b94d785079-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 10 18:46:11 crc kubenswrapper[4799]: I1010 18:46:11.092151 4799 generic.go:334] "Generic (PLEG): container finished" podID="f61afcd4-3210-4dc9-9aff-d4b94d785079" containerID="3a2983f0adbd7bf57fa2dd9337510b7eda191e6c66a9698caa1002c45835f056" exitCode=0 Oct 10 18:46:11 crc kubenswrapper[4799]: I1010 18:46:11.092205 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-r4zkl" Oct 10 18:46:11 crc kubenswrapper[4799]: I1010 18:46:11.092213 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-r4zkl" event={"ID":"f61afcd4-3210-4dc9-9aff-d4b94d785079","Type":"ContainerDied","Data":"3a2983f0adbd7bf57fa2dd9337510b7eda191e6c66a9698caa1002c45835f056"} Oct 10 18:46:11 crc kubenswrapper[4799]: I1010 18:46:11.092266 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-r4zkl" event={"ID":"f61afcd4-3210-4dc9-9aff-d4b94d785079","Type":"ContainerDied","Data":"8c98853ece136f1b374379199943b4b8b8553ab5c79de4bce55e4ebd735c1de7"} Oct 10 18:46:11 crc kubenswrapper[4799]: I1010 18:46:11.092297 4799 scope.go:117] "RemoveContainer" containerID="3a2983f0adbd7bf57fa2dd9337510b7eda191e6c66a9698caa1002c45835f056" Oct 10 18:46:11 crc kubenswrapper[4799]: I1010 18:46:11.154256 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-r4zkl"] Oct 10 18:46:11 crc kubenswrapper[4799]: I1010 18:46:11.155614 4799 scope.go:117] "RemoveContainer" containerID="dce53fcedfa1724ced5420203f920404e344b859218688c5cd42d2598d91c166" Oct 10 18:46:11 crc kubenswrapper[4799]: I1010 18:46:11.168541 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-r4zkl"] Oct 10 18:46:11 crc kubenswrapper[4799]: I1010 18:46:11.189694 4799 scope.go:117] "RemoveContainer" containerID="9286e73209ab361adc0b8c0b4bcd260cc17b2b17c9ebef39f6602381a6af8f03" Oct 10 18:46:11 crc kubenswrapper[4799]: I1010 18:46:11.255015 4799 scope.go:117] "RemoveContainer" containerID="3a2983f0adbd7bf57fa2dd9337510b7eda191e6c66a9698caa1002c45835f056" Oct 10 18:46:11 crc kubenswrapper[4799]: E1010 18:46:11.255544 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3a2983f0adbd7bf57fa2dd9337510b7eda191e6c66a9698caa1002c45835f056\": container with ID starting with 3a2983f0adbd7bf57fa2dd9337510b7eda191e6c66a9698caa1002c45835f056 not found: ID does not exist" containerID="3a2983f0adbd7bf57fa2dd9337510b7eda191e6c66a9698caa1002c45835f056" Oct 10 18:46:11 crc kubenswrapper[4799]: I1010 18:46:11.255588 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3a2983f0adbd7bf57fa2dd9337510b7eda191e6c66a9698caa1002c45835f056"} err="failed to get container status \"3a2983f0adbd7bf57fa2dd9337510b7eda191e6c66a9698caa1002c45835f056\": rpc error: code = NotFound desc = could not find container \"3a2983f0adbd7bf57fa2dd9337510b7eda191e6c66a9698caa1002c45835f056\": container with ID starting with 3a2983f0adbd7bf57fa2dd9337510b7eda191e6c66a9698caa1002c45835f056 not found: ID does not exist" Oct 10 18:46:11 crc kubenswrapper[4799]: I1010 18:46:11.255621 4799 scope.go:117] "RemoveContainer" containerID="dce53fcedfa1724ced5420203f920404e344b859218688c5cd42d2598d91c166" Oct 10 18:46:11 crc kubenswrapper[4799]: E1010 18:46:11.256104 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"dce53fcedfa1724ced5420203f920404e344b859218688c5cd42d2598d91c166\": container with ID starting with dce53fcedfa1724ced5420203f920404e344b859218688c5cd42d2598d91c166 not found: ID does not exist" containerID="dce53fcedfa1724ced5420203f920404e344b859218688c5cd42d2598d91c166" Oct 10 18:46:11 crc kubenswrapper[4799]: I1010 18:46:11.256131 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dce53fcedfa1724ced5420203f920404e344b859218688c5cd42d2598d91c166"} err="failed to get container status \"dce53fcedfa1724ced5420203f920404e344b859218688c5cd42d2598d91c166\": rpc error: code = NotFound desc = could not find container \"dce53fcedfa1724ced5420203f920404e344b859218688c5cd42d2598d91c166\": container with ID starting with dce53fcedfa1724ced5420203f920404e344b859218688c5cd42d2598d91c166 not found: ID does not exist" Oct 10 18:46:11 crc kubenswrapper[4799]: I1010 18:46:11.256153 4799 scope.go:117] "RemoveContainer" containerID="9286e73209ab361adc0b8c0b4bcd260cc17b2b17c9ebef39f6602381a6af8f03" Oct 10 18:46:11 crc kubenswrapper[4799]: E1010 18:46:11.256454 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9286e73209ab361adc0b8c0b4bcd260cc17b2b17c9ebef39f6602381a6af8f03\": container with ID starting with 9286e73209ab361adc0b8c0b4bcd260cc17b2b17c9ebef39f6602381a6af8f03 not found: ID does not exist" containerID="9286e73209ab361adc0b8c0b4bcd260cc17b2b17c9ebef39f6602381a6af8f03" Oct 10 18:46:11 crc kubenswrapper[4799]: I1010 18:46:11.256479 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9286e73209ab361adc0b8c0b4bcd260cc17b2b17c9ebef39f6602381a6af8f03"} err="failed to get container status \"9286e73209ab361adc0b8c0b4bcd260cc17b2b17c9ebef39f6602381a6af8f03\": rpc error: code = NotFound desc = could not find container \"9286e73209ab361adc0b8c0b4bcd260cc17b2b17c9ebef39f6602381a6af8f03\": container with ID starting with 9286e73209ab361adc0b8c0b4bcd260cc17b2b17c9ebef39f6602381a6af8f03 not found: ID does not exist" Oct 10 18:46:11 crc kubenswrapper[4799]: I1010 18:46:11.428164 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f61afcd4-3210-4dc9-9aff-d4b94d785079" path="/var/lib/kubelet/pods/f61afcd4-3210-4dc9-9aff-d4b94d785079/volumes" Oct 10 18:46:45 crc kubenswrapper[4799]: I1010 18:46:45.248607 4799 patch_prober.go:28] interesting pod/machine-config-daemon-rh8zc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 10 18:46:45 crc kubenswrapper[4799]: I1010 18:46:45.249308 4799 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 10 18:46:53 crc kubenswrapper[4799]: I1010 18:46:53.673606 4799 generic.go:334] "Generic (PLEG): container finished" podID="87e27a02-1f1f-4d72-be1d-4662e43bb0e3" containerID="5d316d24b40e6e7e2a8358df044103eaf6b1bf19a4ee0aa22dff891588fd0676" exitCode=0 Oct 10 18:46:53 crc kubenswrapper[4799]: I1010 18:46:53.673735 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-openstack-openstack-cell1-9l9vn" event={"ID":"87e27a02-1f1f-4d72-be1d-4662e43bb0e3","Type":"ContainerDied","Data":"5d316d24b40e6e7e2a8358df044103eaf6b1bf19a4ee0aa22dff891588fd0676"} Oct 10 18:46:55 crc kubenswrapper[4799]: I1010 18:46:55.269616 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-openstack-openstack-cell1-9l9vn" Oct 10 18:46:55 crc kubenswrapper[4799]: I1010 18:46:55.453245 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/87e27a02-1f1f-4d72-be1d-4662e43bb0e3-libvirt-combined-ca-bundle\") pod \"87e27a02-1f1f-4d72-be1d-4662e43bb0e3\" (UID: \"87e27a02-1f1f-4d72-be1d-4662e43bb0e3\") " Oct 10 18:46:55 crc kubenswrapper[4799]: I1010 18:46:55.453496 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lqh6v\" (UniqueName: \"kubernetes.io/projected/87e27a02-1f1f-4d72-be1d-4662e43bb0e3-kube-api-access-lqh6v\") pod \"87e27a02-1f1f-4d72-be1d-4662e43bb0e3\" (UID: \"87e27a02-1f1f-4d72-be1d-4662e43bb0e3\") " Oct 10 18:46:55 crc kubenswrapper[4799]: I1010 18:46:55.453771 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/87e27a02-1f1f-4d72-be1d-4662e43bb0e3-inventory\") pod \"87e27a02-1f1f-4d72-be1d-4662e43bb0e3\" (UID: \"87e27a02-1f1f-4d72-be1d-4662e43bb0e3\") " Oct 10 18:46:55 crc kubenswrapper[4799]: I1010 18:46:55.453827 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/87e27a02-1f1f-4d72-be1d-4662e43bb0e3-ssh-key\") pod \"87e27a02-1f1f-4d72-be1d-4662e43bb0e3\" (UID: \"87e27a02-1f1f-4d72-be1d-4662e43bb0e3\") " Oct 10 18:46:55 crc kubenswrapper[4799]: I1010 18:46:55.453901 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/87e27a02-1f1f-4d72-be1d-4662e43bb0e3-ceph\") pod \"87e27a02-1f1f-4d72-be1d-4662e43bb0e3\" (UID: \"87e27a02-1f1f-4d72-be1d-4662e43bb0e3\") " Oct 10 18:46:55 crc kubenswrapper[4799]: I1010 18:46:55.454019 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/87e27a02-1f1f-4d72-be1d-4662e43bb0e3-libvirt-secret-0\") pod \"87e27a02-1f1f-4d72-be1d-4662e43bb0e3\" (UID: \"87e27a02-1f1f-4d72-be1d-4662e43bb0e3\") " Oct 10 18:46:55 crc kubenswrapper[4799]: I1010 18:46:55.459844 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/87e27a02-1f1f-4d72-be1d-4662e43bb0e3-ceph" (OuterVolumeSpecName: "ceph") pod "87e27a02-1f1f-4d72-be1d-4662e43bb0e3" (UID: "87e27a02-1f1f-4d72-be1d-4662e43bb0e3"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 18:46:55 crc kubenswrapper[4799]: I1010 18:46:55.461228 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/87e27a02-1f1f-4d72-be1d-4662e43bb0e3-kube-api-access-lqh6v" (OuterVolumeSpecName: "kube-api-access-lqh6v") pod "87e27a02-1f1f-4d72-be1d-4662e43bb0e3" (UID: "87e27a02-1f1f-4d72-be1d-4662e43bb0e3"). InnerVolumeSpecName "kube-api-access-lqh6v". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 18:46:55 crc kubenswrapper[4799]: I1010 18:46:55.464256 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/87e27a02-1f1f-4d72-be1d-4662e43bb0e3-libvirt-combined-ca-bundle" (OuterVolumeSpecName: "libvirt-combined-ca-bundle") pod "87e27a02-1f1f-4d72-be1d-4662e43bb0e3" (UID: "87e27a02-1f1f-4d72-be1d-4662e43bb0e3"). InnerVolumeSpecName "libvirt-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 18:46:55 crc kubenswrapper[4799]: I1010 18:46:55.486869 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/87e27a02-1f1f-4d72-be1d-4662e43bb0e3-inventory" (OuterVolumeSpecName: "inventory") pod "87e27a02-1f1f-4d72-be1d-4662e43bb0e3" (UID: "87e27a02-1f1f-4d72-be1d-4662e43bb0e3"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 18:46:55 crc kubenswrapper[4799]: I1010 18:46:55.491448 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/87e27a02-1f1f-4d72-be1d-4662e43bb0e3-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "87e27a02-1f1f-4d72-be1d-4662e43bb0e3" (UID: "87e27a02-1f1f-4d72-be1d-4662e43bb0e3"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 18:46:55 crc kubenswrapper[4799]: I1010 18:46:55.494190 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/87e27a02-1f1f-4d72-be1d-4662e43bb0e3-libvirt-secret-0" (OuterVolumeSpecName: "libvirt-secret-0") pod "87e27a02-1f1f-4d72-be1d-4662e43bb0e3" (UID: "87e27a02-1f1f-4d72-be1d-4662e43bb0e3"). InnerVolumeSpecName "libvirt-secret-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 18:46:55 crc kubenswrapper[4799]: I1010 18:46:55.556866 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lqh6v\" (UniqueName: \"kubernetes.io/projected/87e27a02-1f1f-4d72-be1d-4662e43bb0e3-kube-api-access-lqh6v\") on node \"crc\" DevicePath \"\"" Oct 10 18:46:55 crc kubenswrapper[4799]: I1010 18:46:55.556904 4799 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/87e27a02-1f1f-4d72-be1d-4662e43bb0e3-inventory\") on node \"crc\" DevicePath \"\"" Oct 10 18:46:55 crc kubenswrapper[4799]: I1010 18:46:55.556917 4799 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/87e27a02-1f1f-4d72-be1d-4662e43bb0e3-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 10 18:46:55 crc kubenswrapper[4799]: I1010 18:46:55.557054 4799 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/87e27a02-1f1f-4d72-be1d-4662e43bb0e3-ceph\") on node \"crc\" DevicePath \"\"" Oct 10 18:46:55 crc kubenswrapper[4799]: I1010 18:46:55.557434 4799 reconciler_common.go:293] "Volume detached for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/87e27a02-1f1f-4d72-be1d-4662e43bb0e3-libvirt-secret-0\") on node \"crc\" DevicePath \"\"" Oct 10 18:46:55 crc kubenswrapper[4799]: I1010 18:46:55.557783 4799 reconciler_common.go:293] "Volume detached for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/87e27a02-1f1f-4d72-be1d-4662e43bb0e3-libvirt-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 10 18:46:55 crc kubenswrapper[4799]: I1010 18:46:55.706148 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-openstack-openstack-cell1-9l9vn" event={"ID":"87e27a02-1f1f-4d72-be1d-4662e43bb0e3","Type":"ContainerDied","Data":"7952367432f067cb9aa51e4114e5006163587b9f90857a3c50e8d9ef2e784dcd"} Oct 10 18:46:55 crc kubenswrapper[4799]: I1010 18:46:55.706200 4799 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7952367432f067cb9aa51e4114e5006163587b9f90857a3c50e8d9ef2e784dcd" Oct 10 18:46:55 crc kubenswrapper[4799]: I1010 18:46:55.706257 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-openstack-openstack-cell1-9l9vn" Oct 10 18:46:55 crc kubenswrapper[4799]: I1010 18:46:55.822055 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-openstack-openstack-cell1-vnbnb"] Oct 10 18:46:55 crc kubenswrapper[4799]: E1010 18:46:55.823304 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f61afcd4-3210-4dc9-9aff-d4b94d785079" containerName="extract-utilities" Oct 10 18:46:55 crc kubenswrapper[4799]: I1010 18:46:55.823338 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="f61afcd4-3210-4dc9-9aff-d4b94d785079" containerName="extract-utilities" Oct 10 18:46:55 crc kubenswrapper[4799]: E1010 18:46:55.823356 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b28e71eb-09c2-46d4-a2ee-4cc2cf30c816" containerName="extract-utilities" Oct 10 18:46:55 crc kubenswrapper[4799]: I1010 18:46:55.823367 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="b28e71eb-09c2-46d4-a2ee-4cc2cf30c816" containerName="extract-utilities" Oct 10 18:46:55 crc kubenswrapper[4799]: E1010 18:46:55.823382 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b28e71eb-09c2-46d4-a2ee-4cc2cf30c816" containerName="registry-server" Oct 10 18:46:55 crc kubenswrapper[4799]: I1010 18:46:55.823391 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="b28e71eb-09c2-46d4-a2ee-4cc2cf30c816" containerName="registry-server" Oct 10 18:46:55 crc kubenswrapper[4799]: E1010 18:46:55.823419 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b28e71eb-09c2-46d4-a2ee-4cc2cf30c816" containerName="extract-content" Oct 10 18:46:55 crc kubenswrapper[4799]: I1010 18:46:55.823427 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="b28e71eb-09c2-46d4-a2ee-4cc2cf30c816" containerName="extract-content" Oct 10 18:46:55 crc kubenswrapper[4799]: E1010 18:46:55.823464 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f61afcd4-3210-4dc9-9aff-d4b94d785079" containerName="extract-content" Oct 10 18:46:55 crc kubenswrapper[4799]: I1010 18:46:55.823473 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="f61afcd4-3210-4dc9-9aff-d4b94d785079" containerName="extract-content" Oct 10 18:46:55 crc kubenswrapper[4799]: E1010 18:46:55.823486 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="87e27a02-1f1f-4d72-be1d-4662e43bb0e3" containerName="libvirt-openstack-openstack-cell1" Oct 10 18:46:55 crc kubenswrapper[4799]: I1010 18:46:55.823495 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="87e27a02-1f1f-4d72-be1d-4662e43bb0e3" containerName="libvirt-openstack-openstack-cell1" Oct 10 18:46:55 crc kubenswrapper[4799]: E1010 18:46:55.823512 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f61afcd4-3210-4dc9-9aff-d4b94d785079" containerName="registry-server" Oct 10 18:46:55 crc kubenswrapper[4799]: I1010 18:46:55.823719 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="f61afcd4-3210-4dc9-9aff-d4b94d785079" containerName="registry-server" Oct 10 18:46:55 crc kubenswrapper[4799]: I1010 18:46:55.824033 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="87e27a02-1f1f-4d72-be1d-4662e43bb0e3" containerName="libvirt-openstack-openstack-cell1" Oct 10 18:46:55 crc kubenswrapper[4799]: I1010 18:46:55.824064 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="b28e71eb-09c2-46d4-a2ee-4cc2cf30c816" containerName="registry-server" Oct 10 18:46:55 crc kubenswrapper[4799]: I1010 18:46:55.824082 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="f61afcd4-3210-4dc9-9aff-d4b94d785079" containerName="registry-server" Oct 10 18:46:55 crc kubenswrapper[4799]: I1010 18:46:55.825741 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-openstack-openstack-cell1-vnbnb" Oct 10 18:46:55 crc kubenswrapper[4799]: I1010 18:46:55.829889 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-migration-ssh-key" Oct 10 18:46:55 crc kubenswrapper[4799]: I1010 18:46:55.833134 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"nova-cells-global-config" Oct 10 18:46:55 crc kubenswrapper[4799]: I1010 18:46:55.833143 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-adoption-secret" Oct 10 18:46:55 crc kubenswrapper[4799]: I1010 18:46:55.833150 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 10 18:46:55 crc kubenswrapper[4799]: I1010 18:46:55.833432 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Oct 10 18:46:55 crc kubenswrapper[4799]: I1010 18:46:55.834445 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-compute-config" Oct 10 18:46:55 crc kubenswrapper[4799]: I1010 18:46:55.837496 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-rdlhr" Oct 10 18:46:55 crc kubenswrapper[4799]: I1010 18:46:55.840689 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-openstack-openstack-cell1-vnbnb"] Oct 10 18:46:55 crc kubenswrapper[4799]: I1010 18:46:55.966743 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cell1-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/decb29f6-bfb5-4bce-bc6c-304a4a8b5964-nova-cell1-combined-ca-bundle\") pod \"nova-cell1-openstack-openstack-cell1-vnbnb\" (UID: \"decb29f6-bfb5-4bce-bc6c-304a4a8b5964\") " pod="openstack/nova-cell1-openstack-openstack-cell1-vnbnb" Oct 10 18:46:55 crc kubenswrapper[4799]: I1010 18:46:55.967021 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/decb29f6-bfb5-4bce-bc6c-304a4a8b5964-ceph\") pod \"nova-cell1-openstack-openstack-cell1-vnbnb\" (UID: \"decb29f6-bfb5-4bce-bc6c-304a4a8b5964\") " pod="openstack/nova-cell1-openstack-openstack-cell1-vnbnb" Oct 10 18:46:55 crc kubenswrapper[4799]: I1010 18:46:55.967093 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/decb29f6-bfb5-4bce-bc6c-304a4a8b5964-nova-migration-ssh-key-0\") pod \"nova-cell1-openstack-openstack-cell1-vnbnb\" (UID: \"decb29f6-bfb5-4bce-bc6c-304a4a8b5964\") " pod="openstack/nova-cell1-openstack-openstack-cell1-vnbnb" Oct 10 18:46:55 crc kubenswrapper[4799]: I1010 18:46:55.967220 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/decb29f6-bfb5-4bce-bc6c-304a4a8b5964-inventory\") pod \"nova-cell1-openstack-openstack-cell1-vnbnb\" (UID: \"decb29f6-bfb5-4bce-bc6c-304a4a8b5964\") " pod="openstack/nova-cell1-openstack-openstack-cell1-vnbnb" Oct 10 18:46:55 crc kubenswrapper[4799]: I1010 18:46:55.967492 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/decb29f6-bfb5-4bce-bc6c-304a4a8b5964-nova-migration-ssh-key-1\") pod \"nova-cell1-openstack-openstack-cell1-vnbnb\" (UID: \"decb29f6-bfb5-4bce-bc6c-304a4a8b5964\") " pod="openstack/nova-cell1-openstack-openstack-cell1-vnbnb" Oct 10 18:46:55 crc kubenswrapper[4799]: I1010 18:46:55.967600 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x9jrm\" (UniqueName: \"kubernetes.io/projected/decb29f6-bfb5-4bce-bc6c-304a4a8b5964-kube-api-access-x9jrm\") pod \"nova-cell1-openstack-openstack-cell1-vnbnb\" (UID: \"decb29f6-bfb5-4bce-bc6c-304a4a8b5964\") " pod="openstack/nova-cell1-openstack-openstack-cell1-vnbnb" Oct 10 18:46:55 crc kubenswrapper[4799]: I1010 18:46:55.967992 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/decb29f6-bfb5-4bce-bc6c-304a4a8b5964-nova-cell1-compute-config-0\") pod \"nova-cell1-openstack-openstack-cell1-vnbnb\" (UID: \"decb29f6-bfb5-4bce-bc6c-304a4a8b5964\") " pod="openstack/nova-cell1-openstack-openstack-cell1-vnbnb" Oct 10 18:46:55 crc kubenswrapper[4799]: I1010 18:46:55.968038 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/decb29f6-bfb5-4bce-bc6c-304a4a8b5964-nova-cell1-compute-config-1\") pod \"nova-cell1-openstack-openstack-cell1-vnbnb\" (UID: \"decb29f6-bfb5-4bce-bc6c-304a4a8b5964\") " pod="openstack/nova-cell1-openstack-openstack-cell1-vnbnb" Oct 10 18:46:55 crc kubenswrapper[4799]: I1010 18:46:55.968079 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cells-global-config-1\" (UniqueName: \"kubernetes.io/configmap/decb29f6-bfb5-4bce-bc6c-304a4a8b5964-nova-cells-global-config-1\") pod \"nova-cell1-openstack-openstack-cell1-vnbnb\" (UID: \"decb29f6-bfb5-4bce-bc6c-304a4a8b5964\") " pod="openstack/nova-cell1-openstack-openstack-cell1-vnbnb" Oct 10 18:46:55 crc kubenswrapper[4799]: I1010 18:46:55.968400 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cells-global-config-0\" (UniqueName: \"kubernetes.io/configmap/decb29f6-bfb5-4bce-bc6c-304a4a8b5964-nova-cells-global-config-0\") pod \"nova-cell1-openstack-openstack-cell1-vnbnb\" (UID: \"decb29f6-bfb5-4bce-bc6c-304a4a8b5964\") " pod="openstack/nova-cell1-openstack-openstack-cell1-vnbnb" Oct 10 18:46:55 crc kubenswrapper[4799]: I1010 18:46:55.968776 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/decb29f6-bfb5-4bce-bc6c-304a4a8b5964-ssh-key\") pod \"nova-cell1-openstack-openstack-cell1-vnbnb\" (UID: \"decb29f6-bfb5-4bce-bc6c-304a4a8b5964\") " pod="openstack/nova-cell1-openstack-openstack-cell1-vnbnb" Oct 10 18:46:56 crc kubenswrapper[4799]: I1010 18:46:56.071675 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/decb29f6-bfb5-4bce-bc6c-304a4a8b5964-nova-migration-ssh-key-0\") pod \"nova-cell1-openstack-openstack-cell1-vnbnb\" (UID: \"decb29f6-bfb5-4bce-bc6c-304a4a8b5964\") " pod="openstack/nova-cell1-openstack-openstack-cell1-vnbnb" Oct 10 18:46:56 crc kubenswrapper[4799]: I1010 18:46:56.071886 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/decb29f6-bfb5-4bce-bc6c-304a4a8b5964-inventory\") pod \"nova-cell1-openstack-openstack-cell1-vnbnb\" (UID: \"decb29f6-bfb5-4bce-bc6c-304a4a8b5964\") " pod="openstack/nova-cell1-openstack-openstack-cell1-vnbnb" Oct 10 18:46:56 crc kubenswrapper[4799]: I1010 18:46:56.072017 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/decb29f6-bfb5-4bce-bc6c-304a4a8b5964-nova-migration-ssh-key-1\") pod \"nova-cell1-openstack-openstack-cell1-vnbnb\" (UID: \"decb29f6-bfb5-4bce-bc6c-304a4a8b5964\") " pod="openstack/nova-cell1-openstack-openstack-cell1-vnbnb" Oct 10 18:46:56 crc kubenswrapper[4799]: I1010 18:46:56.072090 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x9jrm\" (UniqueName: \"kubernetes.io/projected/decb29f6-bfb5-4bce-bc6c-304a4a8b5964-kube-api-access-x9jrm\") pod \"nova-cell1-openstack-openstack-cell1-vnbnb\" (UID: \"decb29f6-bfb5-4bce-bc6c-304a4a8b5964\") " pod="openstack/nova-cell1-openstack-openstack-cell1-vnbnb" Oct 10 18:46:56 crc kubenswrapper[4799]: I1010 18:46:56.072217 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/decb29f6-bfb5-4bce-bc6c-304a4a8b5964-nova-cell1-compute-config-0\") pod \"nova-cell1-openstack-openstack-cell1-vnbnb\" (UID: \"decb29f6-bfb5-4bce-bc6c-304a4a8b5964\") " pod="openstack/nova-cell1-openstack-openstack-cell1-vnbnb" Oct 10 18:46:56 crc kubenswrapper[4799]: I1010 18:46:56.072268 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/decb29f6-bfb5-4bce-bc6c-304a4a8b5964-nova-cell1-compute-config-1\") pod \"nova-cell1-openstack-openstack-cell1-vnbnb\" (UID: \"decb29f6-bfb5-4bce-bc6c-304a4a8b5964\") " pod="openstack/nova-cell1-openstack-openstack-cell1-vnbnb" Oct 10 18:46:56 crc kubenswrapper[4799]: I1010 18:46:56.072328 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cells-global-config-1\" (UniqueName: \"kubernetes.io/configmap/decb29f6-bfb5-4bce-bc6c-304a4a8b5964-nova-cells-global-config-1\") pod \"nova-cell1-openstack-openstack-cell1-vnbnb\" (UID: \"decb29f6-bfb5-4bce-bc6c-304a4a8b5964\") " pod="openstack/nova-cell1-openstack-openstack-cell1-vnbnb" Oct 10 18:46:56 crc kubenswrapper[4799]: I1010 18:46:56.072455 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cells-global-config-0\" (UniqueName: \"kubernetes.io/configmap/decb29f6-bfb5-4bce-bc6c-304a4a8b5964-nova-cells-global-config-0\") pod \"nova-cell1-openstack-openstack-cell1-vnbnb\" (UID: \"decb29f6-bfb5-4bce-bc6c-304a4a8b5964\") " pod="openstack/nova-cell1-openstack-openstack-cell1-vnbnb" Oct 10 18:46:56 crc kubenswrapper[4799]: I1010 18:46:56.073180 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/decb29f6-bfb5-4bce-bc6c-304a4a8b5964-ssh-key\") pod \"nova-cell1-openstack-openstack-cell1-vnbnb\" (UID: \"decb29f6-bfb5-4bce-bc6c-304a4a8b5964\") " pod="openstack/nova-cell1-openstack-openstack-cell1-vnbnb" Oct 10 18:46:56 crc kubenswrapper[4799]: I1010 18:46:56.074090 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cell1-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/decb29f6-bfb5-4bce-bc6c-304a4a8b5964-nova-cell1-combined-ca-bundle\") pod \"nova-cell1-openstack-openstack-cell1-vnbnb\" (UID: \"decb29f6-bfb5-4bce-bc6c-304a4a8b5964\") " pod="openstack/nova-cell1-openstack-openstack-cell1-vnbnb" Oct 10 18:46:56 crc kubenswrapper[4799]: I1010 18:46:56.074119 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cells-global-config-0\" (UniqueName: \"kubernetes.io/configmap/decb29f6-bfb5-4bce-bc6c-304a4a8b5964-nova-cells-global-config-0\") pod \"nova-cell1-openstack-openstack-cell1-vnbnb\" (UID: \"decb29f6-bfb5-4bce-bc6c-304a4a8b5964\") " pod="openstack/nova-cell1-openstack-openstack-cell1-vnbnb" Oct 10 18:46:56 crc kubenswrapper[4799]: I1010 18:46:56.074324 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/decb29f6-bfb5-4bce-bc6c-304a4a8b5964-ceph\") pod \"nova-cell1-openstack-openstack-cell1-vnbnb\" (UID: \"decb29f6-bfb5-4bce-bc6c-304a4a8b5964\") " pod="openstack/nova-cell1-openstack-openstack-cell1-vnbnb" Oct 10 18:46:56 crc kubenswrapper[4799]: I1010 18:46:56.075061 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cells-global-config-1\" (UniqueName: \"kubernetes.io/configmap/decb29f6-bfb5-4bce-bc6c-304a4a8b5964-nova-cells-global-config-1\") pod \"nova-cell1-openstack-openstack-cell1-vnbnb\" (UID: \"decb29f6-bfb5-4bce-bc6c-304a4a8b5964\") " pod="openstack/nova-cell1-openstack-openstack-cell1-vnbnb" Oct 10 18:46:56 crc kubenswrapper[4799]: I1010 18:46:56.078158 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/decb29f6-bfb5-4bce-bc6c-304a4a8b5964-nova-migration-ssh-key-1\") pod \"nova-cell1-openstack-openstack-cell1-vnbnb\" (UID: \"decb29f6-bfb5-4bce-bc6c-304a4a8b5964\") " pod="openstack/nova-cell1-openstack-openstack-cell1-vnbnb" Oct 10 18:46:56 crc kubenswrapper[4799]: I1010 18:46:56.078883 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/decb29f6-bfb5-4bce-bc6c-304a4a8b5964-inventory\") pod \"nova-cell1-openstack-openstack-cell1-vnbnb\" (UID: \"decb29f6-bfb5-4bce-bc6c-304a4a8b5964\") " pod="openstack/nova-cell1-openstack-openstack-cell1-vnbnb" Oct 10 18:46:56 crc kubenswrapper[4799]: I1010 18:46:56.079488 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/decb29f6-bfb5-4bce-bc6c-304a4a8b5964-ssh-key\") pod \"nova-cell1-openstack-openstack-cell1-vnbnb\" (UID: \"decb29f6-bfb5-4bce-bc6c-304a4a8b5964\") " pod="openstack/nova-cell1-openstack-openstack-cell1-vnbnb" Oct 10 18:46:56 crc kubenswrapper[4799]: I1010 18:46:56.079934 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/decb29f6-bfb5-4bce-bc6c-304a4a8b5964-nova-cell1-compute-config-1\") pod \"nova-cell1-openstack-openstack-cell1-vnbnb\" (UID: \"decb29f6-bfb5-4bce-bc6c-304a4a8b5964\") " pod="openstack/nova-cell1-openstack-openstack-cell1-vnbnb" Oct 10 18:46:56 crc kubenswrapper[4799]: I1010 18:46:56.080924 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/decb29f6-bfb5-4bce-bc6c-304a4a8b5964-nova-cell1-compute-config-0\") pod \"nova-cell1-openstack-openstack-cell1-vnbnb\" (UID: \"decb29f6-bfb5-4bce-bc6c-304a4a8b5964\") " pod="openstack/nova-cell1-openstack-openstack-cell1-vnbnb" Oct 10 18:46:56 crc kubenswrapper[4799]: I1010 18:46:56.081719 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/decb29f6-bfb5-4bce-bc6c-304a4a8b5964-ceph\") pod \"nova-cell1-openstack-openstack-cell1-vnbnb\" (UID: \"decb29f6-bfb5-4bce-bc6c-304a4a8b5964\") " pod="openstack/nova-cell1-openstack-openstack-cell1-vnbnb" Oct 10 18:46:56 crc kubenswrapper[4799]: I1010 18:46:56.084390 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/decb29f6-bfb5-4bce-bc6c-304a4a8b5964-nova-migration-ssh-key-0\") pod \"nova-cell1-openstack-openstack-cell1-vnbnb\" (UID: \"decb29f6-bfb5-4bce-bc6c-304a4a8b5964\") " pod="openstack/nova-cell1-openstack-openstack-cell1-vnbnb" Oct 10 18:46:56 crc kubenswrapper[4799]: I1010 18:46:56.092477 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cell1-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/decb29f6-bfb5-4bce-bc6c-304a4a8b5964-nova-cell1-combined-ca-bundle\") pod \"nova-cell1-openstack-openstack-cell1-vnbnb\" (UID: \"decb29f6-bfb5-4bce-bc6c-304a4a8b5964\") " pod="openstack/nova-cell1-openstack-openstack-cell1-vnbnb" Oct 10 18:46:56 crc kubenswrapper[4799]: I1010 18:46:56.097869 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x9jrm\" (UniqueName: \"kubernetes.io/projected/decb29f6-bfb5-4bce-bc6c-304a4a8b5964-kube-api-access-x9jrm\") pod \"nova-cell1-openstack-openstack-cell1-vnbnb\" (UID: \"decb29f6-bfb5-4bce-bc6c-304a4a8b5964\") " pod="openstack/nova-cell1-openstack-openstack-cell1-vnbnb" Oct 10 18:46:56 crc kubenswrapper[4799]: I1010 18:46:56.162853 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-openstack-openstack-cell1-vnbnb" Oct 10 18:46:56 crc kubenswrapper[4799]: I1010 18:46:56.794081 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-openstack-openstack-cell1-vnbnb"] Oct 10 18:46:56 crc kubenswrapper[4799]: I1010 18:46:56.806272 4799 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 10 18:46:57 crc kubenswrapper[4799]: I1010 18:46:57.735547 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-openstack-openstack-cell1-vnbnb" event={"ID":"decb29f6-bfb5-4bce-bc6c-304a4a8b5964","Type":"ContainerStarted","Data":"a0f6c93a5013fdc8060cf87dca3265d311d2e02944fe8969e1820ce2eb470a1d"} Oct 10 18:46:58 crc kubenswrapper[4799]: I1010 18:46:58.750108 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-openstack-openstack-cell1-vnbnb" event={"ID":"decb29f6-bfb5-4bce-bc6c-304a4a8b5964","Type":"ContainerStarted","Data":"2749f154c4fc40a230265cd8fcc2b59f379f68c1be676a2fedc50ad2a7d1355f"} Oct 10 18:46:58 crc kubenswrapper[4799]: I1010 18:46:58.781924 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-openstack-openstack-cell1-vnbnb" podStartSLOduration=3.196525446 podStartE2EDuration="3.781898417s" podCreationTimestamp="2025-10-10 18:46:55 +0000 UTC" firstStartedPulling="2025-10-10 18:46:56.805978506 +0000 UTC m=+8110.314302621" lastFinishedPulling="2025-10-10 18:46:57.391351447 +0000 UTC m=+8110.899675592" observedRunningTime="2025-10-10 18:46:58.774993599 +0000 UTC m=+8112.283317724" watchObservedRunningTime="2025-10-10 18:46:58.781898417 +0000 UTC m=+8112.290222532" Oct 10 18:47:15 crc kubenswrapper[4799]: I1010 18:47:15.248315 4799 patch_prober.go:28] interesting pod/machine-config-daemon-rh8zc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 10 18:47:15 crc kubenswrapper[4799]: I1010 18:47:15.248724 4799 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 10 18:47:45 crc kubenswrapper[4799]: I1010 18:47:45.248678 4799 patch_prober.go:28] interesting pod/machine-config-daemon-rh8zc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 10 18:47:45 crc kubenswrapper[4799]: I1010 18:47:45.249707 4799 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 10 18:47:45 crc kubenswrapper[4799]: I1010 18:47:45.249823 4799 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" Oct 10 18:47:45 crc kubenswrapper[4799]: I1010 18:47:45.257105 4799 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"909ef57d9736704d016e95ad80ccfb8bd5f48a4f8725478c200f579b545041c4"} pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 10 18:47:45 crc kubenswrapper[4799]: I1010 18:47:45.257331 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" containerName="machine-config-daemon" containerID="cri-o://909ef57d9736704d016e95ad80ccfb8bd5f48a4f8725478c200f579b545041c4" gracePeriod=600 Oct 10 18:47:46 crc kubenswrapper[4799]: I1010 18:47:46.397355 4799 generic.go:334] "Generic (PLEG): container finished" podID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" containerID="909ef57d9736704d016e95ad80ccfb8bd5f48a4f8725478c200f579b545041c4" exitCode=0 Oct 10 18:47:46 crc kubenswrapper[4799]: I1010 18:47:46.397458 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" event={"ID":"6cebefda-e31d-4be2-9bf4-8e1f8ec002cb","Type":"ContainerDied","Data":"909ef57d9736704d016e95ad80ccfb8bd5f48a4f8725478c200f579b545041c4"} Oct 10 18:47:46 crc kubenswrapper[4799]: I1010 18:47:46.398223 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" event={"ID":"6cebefda-e31d-4be2-9bf4-8e1f8ec002cb","Type":"ContainerStarted","Data":"7b1000aefc8a60a452be056f6fd645ae7a95f088784af623ffab036bea5d3d0d"} Oct 10 18:47:46 crc kubenswrapper[4799]: I1010 18:47:46.398271 4799 scope.go:117] "RemoveContainer" containerID="5d726047dc458172d088ab3478c0245a0f320ffb1060865307a391a8a23b1065" Oct 10 18:49:45 crc kubenswrapper[4799]: I1010 18:49:45.249344 4799 patch_prober.go:28] interesting pod/machine-config-daemon-rh8zc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 10 18:49:45 crc kubenswrapper[4799]: I1010 18:49:45.249852 4799 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 10 18:50:15 crc kubenswrapper[4799]: I1010 18:50:15.248897 4799 patch_prober.go:28] interesting pod/machine-config-daemon-rh8zc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 10 18:50:15 crc kubenswrapper[4799]: I1010 18:50:15.249811 4799 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 10 18:50:45 crc kubenswrapper[4799]: I1010 18:50:45.249173 4799 patch_prober.go:28] interesting pod/machine-config-daemon-rh8zc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 10 18:50:45 crc kubenswrapper[4799]: I1010 18:50:45.251029 4799 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 10 18:50:45 crc kubenswrapper[4799]: I1010 18:50:45.251188 4799 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" Oct 10 18:50:45 crc kubenswrapper[4799]: I1010 18:50:45.252429 4799 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"7b1000aefc8a60a452be056f6fd645ae7a95f088784af623ffab036bea5d3d0d"} pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 10 18:50:45 crc kubenswrapper[4799]: I1010 18:50:45.252614 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" containerName="machine-config-daemon" containerID="cri-o://7b1000aefc8a60a452be056f6fd645ae7a95f088784af623ffab036bea5d3d0d" gracePeriod=600 Oct 10 18:50:45 crc kubenswrapper[4799]: E1010 18:50:45.393896 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 18:50:45 crc kubenswrapper[4799]: I1010 18:50:45.790808 4799 generic.go:334] "Generic (PLEG): container finished" podID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" containerID="7b1000aefc8a60a452be056f6fd645ae7a95f088784af623ffab036bea5d3d0d" exitCode=0 Oct 10 18:50:45 crc kubenswrapper[4799]: I1010 18:50:45.790870 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" event={"ID":"6cebefda-e31d-4be2-9bf4-8e1f8ec002cb","Type":"ContainerDied","Data":"7b1000aefc8a60a452be056f6fd645ae7a95f088784af623ffab036bea5d3d0d"} Oct 10 18:50:45 crc kubenswrapper[4799]: I1010 18:50:45.790919 4799 scope.go:117] "RemoveContainer" containerID="909ef57d9736704d016e95ad80ccfb8bd5f48a4f8725478c200f579b545041c4" Oct 10 18:50:45 crc kubenswrapper[4799]: I1010 18:50:45.791980 4799 scope.go:117] "RemoveContainer" containerID="7b1000aefc8a60a452be056f6fd645ae7a95f088784af623ffab036bea5d3d0d" Oct 10 18:50:45 crc kubenswrapper[4799]: E1010 18:50:45.792516 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 18:50:57 crc kubenswrapper[4799]: I1010 18:50:57.415048 4799 scope.go:117] "RemoveContainer" containerID="7b1000aefc8a60a452be056f6fd645ae7a95f088784af623ffab036bea5d3d0d" Oct 10 18:50:57 crc kubenswrapper[4799]: E1010 18:50:57.416439 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 18:50:59 crc kubenswrapper[4799]: I1010 18:50:59.965340 4799 generic.go:334] "Generic (PLEG): container finished" podID="decb29f6-bfb5-4bce-bc6c-304a4a8b5964" containerID="2749f154c4fc40a230265cd8fcc2b59f379f68c1be676a2fedc50ad2a7d1355f" exitCode=0 Oct 10 18:50:59 crc kubenswrapper[4799]: I1010 18:50:59.965421 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-openstack-openstack-cell1-vnbnb" event={"ID":"decb29f6-bfb5-4bce-bc6c-304a4a8b5964","Type":"ContainerDied","Data":"2749f154c4fc40a230265cd8fcc2b59f379f68c1be676a2fedc50ad2a7d1355f"} Oct 10 18:51:01 crc kubenswrapper[4799]: I1010 18:51:01.507183 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-openstack-openstack-cell1-vnbnb" Oct 10 18:51:01 crc kubenswrapper[4799]: I1010 18:51:01.630350 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cells-global-config-0\" (UniqueName: \"kubernetes.io/configmap/decb29f6-bfb5-4bce-bc6c-304a4a8b5964-nova-cells-global-config-0\") pod \"decb29f6-bfb5-4bce-bc6c-304a4a8b5964\" (UID: \"decb29f6-bfb5-4bce-bc6c-304a4a8b5964\") " Oct 10 18:51:01 crc kubenswrapper[4799]: I1010 18:51:01.630530 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cell1-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/decb29f6-bfb5-4bce-bc6c-304a4a8b5964-nova-cell1-combined-ca-bundle\") pod \"decb29f6-bfb5-4bce-bc6c-304a4a8b5964\" (UID: \"decb29f6-bfb5-4bce-bc6c-304a4a8b5964\") " Oct 10 18:51:01 crc kubenswrapper[4799]: I1010 18:51:01.630579 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/decb29f6-bfb5-4bce-bc6c-304a4a8b5964-nova-migration-ssh-key-0\") pod \"decb29f6-bfb5-4bce-bc6c-304a4a8b5964\" (UID: \"decb29f6-bfb5-4bce-bc6c-304a4a8b5964\") " Oct 10 18:51:01 crc kubenswrapper[4799]: I1010 18:51:01.631832 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/decb29f6-bfb5-4bce-bc6c-304a4a8b5964-nova-cell1-compute-config-1\") pod \"decb29f6-bfb5-4bce-bc6c-304a4a8b5964\" (UID: \"decb29f6-bfb5-4bce-bc6c-304a4a8b5964\") " Oct 10 18:51:01 crc kubenswrapper[4799]: I1010 18:51:01.631988 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x9jrm\" (UniqueName: \"kubernetes.io/projected/decb29f6-bfb5-4bce-bc6c-304a4a8b5964-kube-api-access-x9jrm\") pod \"decb29f6-bfb5-4bce-bc6c-304a4a8b5964\" (UID: \"decb29f6-bfb5-4bce-bc6c-304a4a8b5964\") " Oct 10 18:51:01 crc kubenswrapper[4799]: I1010 18:51:01.632817 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cells-global-config-1\" (UniqueName: \"kubernetes.io/configmap/decb29f6-bfb5-4bce-bc6c-304a4a8b5964-nova-cells-global-config-1\") pod \"decb29f6-bfb5-4bce-bc6c-304a4a8b5964\" (UID: \"decb29f6-bfb5-4bce-bc6c-304a4a8b5964\") " Oct 10 18:51:01 crc kubenswrapper[4799]: I1010 18:51:01.632986 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/decb29f6-bfb5-4bce-bc6c-304a4a8b5964-inventory\") pod \"decb29f6-bfb5-4bce-bc6c-304a4a8b5964\" (UID: \"decb29f6-bfb5-4bce-bc6c-304a4a8b5964\") " Oct 10 18:51:01 crc kubenswrapper[4799]: I1010 18:51:01.633147 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/decb29f6-bfb5-4bce-bc6c-304a4a8b5964-nova-migration-ssh-key-1\") pod \"decb29f6-bfb5-4bce-bc6c-304a4a8b5964\" (UID: \"decb29f6-bfb5-4bce-bc6c-304a4a8b5964\") " Oct 10 18:51:01 crc kubenswrapper[4799]: I1010 18:51:01.633220 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/decb29f6-bfb5-4bce-bc6c-304a4a8b5964-ceph\") pod \"decb29f6-bfb5-4bce-bc6c-304a4a8b5964\" (UID: \"decb29f6-bfb5-4bce-bc6c-304a4a8b5964\") " Oct 10 18:51:01 crc kubenswrapper[4799]: I1010 18:51:01.633352 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/decb29f6-bfb5-4bce-bc6c-304a4a8b5964-nova-cell1-compute-config-0\") pod \"decb29f6-bfb5-4bce-bc6c-304a4a8b5964\" (UID: \"decb29f6-bfb5-4bce-bc6c-304a4a8b5964\") " Oct 10 18:51:01 crc kubenswrapper[4799]: I1010 18:51:01.633433 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/decb29f6-bfb5-4bce-bc6c-304a4a8b5964-ssh-key\") pod \"decb29f6-bfb5-4bce-bc6c-304a4a8b5964\" (UID: \"decb29f6-bfb5-4bce-bc6c-304a4a8b5964\") " Oct 10 18:51:01 crc kubenswrapper[4799]: I1010 18:51:01.636931 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/decb29f6-bfb5-4bce-bc6c-304a4a8b5964-ceph" (OuterVolumeSpecName: "ceph") pod "decb29f6-bfb5-4bce-bc6c-304a4a8b5964" (UID: "decb29f6-bfb5-4bce-bc6c-304a4a8b5964"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 18:51:01 crc kubenswrapper[4799]: I1010 18:51:01.637621 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/decb29f6-bfb5-4bce-bc6c-304a4a8b5964-nova-cell1-combined-ca-bundle" (OuterVolumeSpecName: "nova-cell1-combined-ca-bundle") pod "decb29f6-bfb5-4bce-bc6c-304a4a8b5964" (UID: "decb29f6-bfb5-4bce-bc6c-304a4a8b5964"). InnerVolumeSpecName "nova-cell1-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 18:51:01 crc kubenswrapper[4799]: I1010 18:51:01.644634 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/decb29f6-bfb5-4bce-bc6c-304a4a8b5964-kube-api-access-x9jrm" (OuterVolumeSpecName: "kube-api-access-x9jrm") pod "decb29f6-bfb5-4bce-bc6c-304a4a8b5964" (UID: "decb29f6-bfb5-4bce-bc6c-304a4a8b5964"). InnerVolumeSpecName "kube-api-access-x9jrm". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 18:51:01 crc kubenswrapper[4799]: I1010 18:51:01.667912 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/decb29f6-bfb5-4bce-bc6c-304a4a8b5964-nova-migration-ssh-key-0" (OuterVolumeSpecName: "nova-migration-ssh-key-0") pod "decb29f6-bfb5-4bce-bc6c-304a4a8b5964" (UID: "decb29f6-bfb5-4bce-bc6c-304a4a8b5964"). InnerVolumeSpecName "nova-migration-ssh-key-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 18:51:01 crc kubenswrapper[4799]: I1010 18:51:01.670008 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/decb29f6-bfb5-4bce-bc6c-304a4a8b5964-nova-cell1-compute-config-0" (OuterVolumeSpecName: "nova-cell1-compute-config-0") pod "decb29f6-bfb5-4bce-bc6c-304a4a8b5964" (UID: "decb29f6-bfb5-4bce-bc6c-304a4a8b5964"). InnerVolumeSpecName "nova-cell1-compute-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 18:51:01 crc kubenswrapper[4799]: I1010 18:51:01.672709 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/decb29f6-bfb5-4bce-bc6c-304a4a8b5964-inventory" (OuterVolumeSpecName: "inventory") pod "decb29f6-bfb5-4bce-bc6c-304a4a8b5964" (UID: "decb29f6-bfb5-4bce-bc6c-304a4a8b5964"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 18:51:01 crc kubenswrapper[4799]: I1010 18:51:01.677192 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/decb29f6-bfb5-4bce-bc6c-304a4a8b5964-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "decb29f6-bfb5-4bce-bc6c-304a4a8b5964" (UID: "decb29f6-bfb5-4bce-bc6c-304a4a8b5964"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 18:51:01 crc kubenswrapper[4799]: I1010 18:51:01.688150 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/decb29f6-bfb5-4bce-bc6c-304a4a8b5964-nova-cells-global-config-1" (OuterVolumeSpecName: "nova-cells-global-config-1") pod "decb29f6-bfb5-4bce-bc6c-304a4a8b5964" (UID: "decb29f6-bfb5-4bce-bc6c-304a4a8b5964"). InnerVolumeSpecName "nova-cells-global-config-1". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 18:51:01 crc kubenswrapper[4799]: I1010 18:51:01.693104 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/decb29f6-bfb5-4bce-bc6c-304a4a8b5964-nova-cell1-compute-config-1" (OuterVolumeSpecName: "nova-cell1-compute-config-1") pod "decb29f6-bfb5-4bce-bc6c-304a4a8b5964" (UID: "decb29f6-bfb5-4bce-bc6c-304a4a8b5964"). InnerVolumeSpecName "nova-cell1-compute-config-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 18:51:01 crc kubenswrapper[4799]: I1010 18:51:01.696536 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/decb29f6-bfb5-4bce-bc6c-304a4a8b5964-nova-cells-global-config-0" (OuterVolumeSpecName: "nova-cells-global-config-0") pod "decb29f6-bfb5-4bce-bc6c-304a4a8b5964" (UID: "decb29f6-bfb5-4bce-bc6c-304a4a8b5964"). InnerVolumeSpecName "nova-cells-global-config-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 18:51:01 crc kubenswrapper[4799]: I1010 18:51:01.710285 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/decb29f6-bfb5-4bce-bc6c-304a4a8b5964-nova-migration-ssh-key-1" (OuterVolumeSpecName: "nova-migration-ssh-key-1") pod "decb29f6-bfb5-4bce-bc6c-304a4a8b5964" (UID: "decb29f6-bfb5-4bce-bc6c-304a4a8b5964"). InnerVolumeSpecName "nova-migration-ssh-key-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 18:51:01 crc kubenswrapper[4799]: I1010 18:51:01.737353 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x9jrm\" (UniqueName: \"kubernetes.io/projected/decb29f6-bfb5-4bce-bc6c-304a4a8b5964-kube-api-access-x9jrm\") on node \"crc\" DevicePath \"\"" Oct 10 18:51:01 crc kubenswrapper[4799]: I1010 18:51:01.737782 4799 reconciler_common.go:293] "Volume detached for volume \"nova-cells-global-config-1\" (UniqueName: \"kubernetes.io/configmap/decb29f6-bfb5-4bce-bc6c-304a4a8b5964-nova-cells-global-config-1\") on node \"crc\" DevicePath \"\"" Oct 10 18:51:01 crc kubenswrapper[4799]: I1010 18:51:01.737806 4799 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/decb29f6-bfb5-4bce-bc6c-304a4a8b5964-inventory\") on node \"crc\" DevicePath \"\"" Oct 10 18:51:01 crc kubenswrapper[4799]: I1010 18:51:01.737824 4799 reconciler_common.go:293] "Volume detached for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/decb29f6-bfb5-4bce-bc6c-304a4a8b5964-nova-migration-ssh-key-1\") on node \"crc\" DevicePath \"\"" Oct 10 18:51:01 crc kubenswrapper[4799]: I1010 18:51:01.737843 4799 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/decb29f6-bfb5-4bce-bc6c-304a4a8b5964-ceph\") on node \"crc\" DevicePath \"\"" Oct 10 18:51:01 crc kubenswrapper[4799]: I1010 18:51:01.737860 4799 reconciler_common.go:293] "Volume detached for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/decb29f6-bfb5-4bce-bc6c-304a4a8b5964-nova-cell1-compute-config-0\") on node \"crc\" DevicePath \"\"" Oct 10 18:51:01 crc kubenswrapper[4799]: I1010 18:51:01.737879 4799 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/decb29f6-bfb5-4bce-bc6c-304a4a8b5964-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 10 18:51:01 crc kubenswrapper[4799]: I1010 18:51:01.737898 4799 reconciler_common.go:293] "Volume detached for volume \"nova-cells-global-config-0\" (UniqueName: \"kubernetes.io/configmap/decb29f6-bfb5-4bce-bc6c-304a4a8b5964-nova-cells-global-config-0\") on node \"crc\" DevicePath \"\"" Oct 10 18:51:01 crc kubenswrapper[4799]: I1010 18:51:01.737915 4799 reconciler_common.go:293] "Volume detached for volume \"nova-cell1-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/decb29f6-bfb5-4bce-bc6c-304a4a8b5964-nova-cell1-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 10 18:51:01 crc kubenswrapper[4799]: I1010 18:51:01.737933 4799 reconciler_common.go:293] "Volume detached for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/decb29f6-bfb5-4bce-bc6c-304a4a8b5964-nova-migration-ssh-key-0\") on node \"crc\" DevicePath \"\"" Oct 10 18:51:01 crc kubenswrapper[4799]: I1010 18:51:01.737953 4799 reconciler_common.go:293] "Volume detached for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/decb29f6-bfb5-4bce-bc6c-304a4a8b5964-nova-cell1-compute-config-1\") on node \"crc\" DevicePath \"\"" Oct 10 18:51:01 crc kubenswrapper[4799]: I1010 18:51:01.994284 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-openstack-openstack-cell1-vnbnb" event={"ID":"decb29f6-bfb5-4bce-bc6c-304a4a8b5964","Type":"ContainerDied","Data":"a0f6c93a5013fdc8060cf87dca3265d311d2e02944fe8969e1820ce2eb470a1d"} Oct 10 18:51:01 crc kubenswrapper[4799]: I1010 18:51:01.994327 4799 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a0f6c93a5013fdc8060cf87dca3265d311d2e02944fe8969e1820ce2eb470a1d" Oct 10 18:51:01 crc kubenswrapper[4799]: I1010 18:51:01.994345 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-openstack-openstack-cell1-vnbnb" Oct 10 18:51:02 crc kubenswrapper[4799]: I1010 18:51:02.099710 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/telemetry-openstack-openstack-cell1-kwcwz"] Oct 10 18:51:02 crc kubenswrapper[4799]: E1010 18:51:02.100344 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="decb29f6-bfb5-4bce-bc6c-304a4a8b5964" containerName="nova-cell1-openstack-openstack-cell1" Oct 10 18:51:02 crc kubenswrapper[4799]: I1010 18:51:02.100381 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="decb29f6-bfb5-4bce-bc6c-304a4a8b5964" containerName="nova-cell1-openstack-openstack-cell1" Oct 10 18:51:02 crc kubenswrapper[4799]: I1010 18:51:02.100872 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="decb29f6-bfb5-4bce-bc6c-304a4a8b5964" containerName="nova-cell1-openstack-openstack-cell1" Oct 10 18:51:02 crc kubenswrapper[4799]: I1010 18:51:02.102358 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-openstack-openstack-cell1-kwcwz" Oct 10 18:51:02 crc kubenswrapper[4799]: I1010 18:51:02.105111 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 10 18:51:02 crc kubenswrapper[4799]: I1010 18:51:02.105172 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Oct 10 18:51:02 crc kubenswrapper[4799]: I1010 18:51:02.106263 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-adoption-secret" Oct 10 18:51:02 crc kubenswrapper[4799]: I1010 18:51:02.106623 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-compute-config-data" Oct 10 18:51:02 crc kubenswrapper[4799]: I1010 18:51:02.112429 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-rdlhr" Oct 10 18:51:02 crc kubenswrapper[4799]: I1010 18:51:02.114797 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/telemetry-openstack-openstack-cell1-kwcwz"] Oct 10 18:51:02 crc kubenswrapper[4799]: I1010 18:51:02.256799 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/02816ea3-9fb7-46aa-ae23-d6ff431c08b4-ceilometer-compute-config-data-1\") pod \"telemetry-openstack-openstack-cell1-kwcwz\" (UID: \"02816ea3-9fb7-46aa-ae23-d6ff431c08b4\") " pod="openstack/telemetry-openstack-openstack-cell1-kwcwz" Oct 10 18:51:02 crc kubenswrapper[4799]: I1010 18:51:02.257035 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fmbh7\" (UniqueName: \"kubernetes.io/projected/02816ea3-9fb7-46aa-ae23-d6ff431c08b4-kube-api-access-fmbh7\") pod \"telemetry-openstack-openstack-cell1-kwcwz\" (UID: \"02816ea3-9fb7-46aa-ae23-d6ff431c08b4\") " pod="openstack/telemetry-openstack-openstack-cell1-kwcwz" Oct 10 18:51:02 crc kubenswrapper[4799]: I1010 18:51:02.257126 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/02816ea3-9fb7-46aa-ae23-d6ff431c08b4-inventory\") pod \"telemetry-openstack-openstack-cell1-kwcwz\" (UID: \"02816ea3-9fb7-46aa-ae23-d6ff431c08b4\") " pod="openstack/telemetry-openstack-openstack-cell1-kwcwz" Oct 10 18:51:02 crc kubenswrapper[4799]: I1010 18:51:02.257289 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/02816ea3-9fb7-46aa-ae23-d6ff431c08b4-ceilometer-compute-config-data-0\") pod \"telemetry-openstack-openstack-cell1-kwcwz\" (UID: \"02816ea3-9fb7-46aa-ae23-d6ff431c08b4\") " pod="openstack/telemetry-openstack-openstack-cell1-kwcwz" Oct 10 18:51:02 crc kubenswrapper[4799]: I1010 18:51:02.257352 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/02816ea3-9fb7-46aa-ae23-d6ff431c08b4-ceilometer-compute-config-data-2\") pod \"telemetry-openstack-openstack-cell1-kwcwz\" (UID: \"02816ea3-9fb7-46aa-ae23-d6ff431c08b4\") " pod="openstack/telemetry-openstack-openstack-cell1-kwcwz" Oct 10 18:51:02 crc kubenswrapper[4799]: I1010 18:51:02.257416 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/02816ea3-9fb7-46aa-ae23-d6ff431c08b4-ceph\") pod \"telemetry-openstack-openstack-cell1-kwcwz\" (UID: \"02816ea3-9fb7-46aa-ae23-d6ff431c08b4\") " pod="openstack/telemetry-openstack-openstack-cell1-kwcwz" Oct 10 18:51:02 crc kubenswrapper[4799]: I1010 18:51:02.257468 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/02816ea3-9fb7-46aa-ae23-d6ff431c08b4-ssh-key\") pod \"telemetry-openstack-openstack-cell1-kwcwz\" (UID: \"02816ea3-9fb7-46aa-ae23-d6ff431c08b4\") " pod="openstack/telemetry-openstack-openstack-cell1-kwcwz" Oct 10 18:51:02 crc kubenswrapper[4799]: I1010 18:51:02.257593 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/02816ea3-9fb7-46aa-ae23-d6ff431c08b4-telemetry-combined-ca-bundle\") pod \"telemetry-openstack-openstack-cell1-kwcwz\" (UID: \"02816ea3-9fb7-46aa-ae23-d6ff431c08b4\") " pod="openstack/telemetry-openstack-openstack-cell1-kwcwz" Oct 10 18:51:02 crc kubenswrapper[4799]: I1010 18:51:02.360167 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/02816ea3-9fb7-46aa-ae23-d6ff431c08b4-ceilometer-compute-config-data-1\") pod \"telemetry-openstack-openstack-cell1-kwcwz\" (UID: \"02816ea3-9fb7-46aa-ae23-d6ff431c08b4\") " pod="openstack/telemetry-openstack-openstack-cell1-kwcwz" Oct 10 18:51:02 crc kubenswrapper[4799]: I1010 18:51:02.360304 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fmbh7\" (UniqueName: \"kubernetes.io/projected/02816ea3-9fb7-46aa-ae23-d6ff431c08b4-kube-api-access-fmbh7\") pod \"telemetry-openstack-openstack-cell1-kwcwz\" (UID: \"02816ea3-9fb7-46aa-ae23-d6ff431c08b4\") " pod="openstack/telemetry-openstack-openstack-cell1-kwcwz" Oct 10 18:51:02 crc kubenswrapper[4799]: I1010 18:51:02.360366 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/02816ea3-9fb7-46aa-ae23-d6ff431c08b4-inventory\") pod \"telemetry-openstack-openstack-cell1-kwcwz\" (UID: \"02816ea3-9fb7-46aa-ae23-d6ff431c08b4\") " pod="openstack/telemetry-openstack-openstack-cell1-kwcwz" Oct 10 18:51:02 crc kubenswrapper[4799]: I1010 18:51:02.360448 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/02816ea3-9fb7-46aa-ae23-d6ff431c08b4-ceilometer-compute-config-data-0\") pod \"telemetry-openstack-openstack-cell1-kwcwz\" (UID: \"02816ea3-9fb7-46aa-ae23-d6ff431c08b4\") " pod="openstack/telemetry-openstack-openstack-cell1-kwcwz" Oct 10 18:51:02 crc kubenswrapper[4799]: I1010 18:51:02.360481 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/02816ea3-9fb7-46aa-ae23-d6ff431c08b4-ceilometer-compute-config-data-2\") pod \"telemetry-openstack-openstack-cell1-kwcwz\" (UID: \"02816ea3-9fb7-46aa-ae23-d6ff431c08b4\") " pod="openstack/telemetry-openstack-openstack-cell1-kwcwz" Oct 10 18:51:02 crc kubenswrapper[4799]: I1010 18:51:02.360530 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/02816ea3-9fb7-46aa-ae23-d6ff431c08b4-ceph\") pod \"telemetry-openstack-openstack-cell1-kwcwz\" (UID: \"02816ea3-9fb7-46aa-ae23-d6ff431c08b4\") " pod="openstack/telemetry-openstack-openstack-cell1-kwcwz" Oct 10 18:51:02 crc kubenswrapper[4799]: I1010 18:51:02.361121 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/02816ea3-9fb7-46aa-ae23-d6ff431c08b4-ssh-key\") pod \"telemetry-openstack-openstack-cell1-kwcwz\" (UID: \"02816ea3-9fb7-46aa-ae23-d6ff431c08b4\") " pod="openstack/telemetry-openstack-openstack-cell1-kwcwz" Oct 10 18:51:02 crc kubenswrapper[4799]: I1010 18:51:02.361186 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/02816ea3-9fb7-46aa-ae23-d6ff431c08b4-telemetry-combined-ca-bundle\") pod \"telemetry-openstack-openstack-cell1-kwcwz\" (UID: \"02816ea3-9fb7-46aa-ae23-d6ff431c08b4\") " pod="openstack/telemetry-openstack-openstack-cell1-kwcwz" Oct 10 18:51:02 crc kubenswrapper[4799]: I1010 18:51:02.365161 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/02816ea3-9fb7-46aa-ae23-d6ff431c08b4-ceilometer-compute-config-data-0\") pod \"telemetry-openstack-openstack-cell1-kwcwz\" (UID: \"02816ea3-9fb7-46aa-ae23-d6ff431c08b4\") " pod="openstack/telemetry-openstack-openstack-cell1-kwcwz" Oct 10 18:51:02 crc kubenswrapper[4799]: I1010 18:51:02.365175 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/02816ea3-9fb7-46aa-ae23-d6ff431c08b4-ssh-key\") pod \"telemetry-openstack-openstack-cell1-kwcwz\" (UID: \"02816ea3-9fb7-46aa-ae23-d6ff431c08b4\") " pod="openstack/telemetry-openstack-openstack-cell1-kwcwz" Oct 10 18:51:02 crc kubenswrapper[4799]: I1010 18:51:02.366375 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/02816ea3-9fb7-46aa-ae23-d6ff431c08b4-ceilometer-compute-config-data-1\") pod \"telemetry-openstack-openstack-cell1-kwcwz\" (UID: \"02816ea3-9fb7-46aa-ae23-d6ff431c08b4\") " pod="openstack/telemetry-openstack-openstack-cell1-kwcwz" Oct 10 18:51:02 crc kubenswrapper[4799]: I1010 18:51:02.366468 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/02816ea3-9fb7-46aa-ae23-d6ff431c08b4-ceph\") pod \"telemetry-openstack-openstack-cell1-kwcwz\" (UID: \"02816ea3-9fb7-46aa-ae23-d6ff431c08b4\") " pod="openstack/telemetry-openstack-openstack-cell1-kwcwz" Oct 10 18:51:02 crc kubenswrapper[4799]: I1010 18:51:02.366610 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/02816ea3-9fb7-46aa-ae23-d6ff431c08b4-inventory\") pod \"telemetry-openstack-openstack-cell1-kwcwz\" (UID: \"02816ea3-9fb7-46aa-ae23-d6ff431c08b4\") " pod="openstack/telemetry-openstack-openstack-cell1-kwcwz" Oct 10 18:51:02 crc kubenswrapper[4799]: I1010 18:51:02.366646 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/02816ea3-9fb7-46aa-ae23-d6ff431c08b4-telemetry-combined-ca-bundle\") pod \"telemetry-openstack-openstack-cell1-kwcwz\" (UID: \"02816ea3-9fb7-46aa-ae23-d6ff431c08b4\") " pod="openstack/telemetry-openstack-openstack-cell1-kwcwz" Oct 10 18:51:02 crc kubenswrapper[4799]: I1010 18:51:02.366712 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/02816ea3-9fb7-46aa-ae23-d6ff431c08b4-ceilometer-compute-config-data-2\") pod \"telemetry-openstack-openstack-cell1-kwcwz\" (UID: \"02816ea3-9fb7-46aa-ae23-d6ff431c08b4\") " pod="openstack/telemetry-openstack-openstack-cell1-kwcwz" Oct 10 18:51:02 crc kubenswrapper[4799]: I1010 18:51:02.384006 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fmbh7\" (UniqueName: \"kubernetes.io/projected/02816ea3-9fb7-46aa-ae23-d6ff431c08b4-kube-api-access-fmbh7\") pod \"telemetry-openstack-openstack-cell1-kwcwz\" (UID: \"02816ea3-9fb7-46aa-ae23-d6ff431c08b4\") " pod="openstack/telemetry-openstack-openstack-cell1-kwcwz" Oct 10 18:51:02 crc kubenswrapper[4799]: I1010 18:51:02.472260 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-openstack-openstack-cell1-kwcwz" Oct 10 18:51:03 crc kubenswrapper[4799]: I1010 18:51:03.039017 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/telemetry-openstack-openstack-cell1-kwcwz"] Oct 10 18:51:03 crc kubenswrapper[4799]: W1010 18:51:03.054650 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod02816ea3_9fb7_46aa_ae23_d6ff431c08b4.slice/crio-7668df1af0d5c44af14ede5402f283b6f7da687674d1f292c46fd381662cbb83 WatchSource:0}: Error finding container 7668df1af0d5c44af14ede5402f283b6f7da687674d1f292c46fd381662cbb83: Status 404 returned error can't find the container with id 7668df1af0d5c44af14ede5402f283b6f7da687674d1f292c46fd381662cbb83 Oct 10 18:51:04 crc kubenswrapper[4799]: I1010 18:51:04.023501 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-openstack-openstack-cell1-kwcwz" event={"ID":"02816ea3-9fb7-46aa-ae23-d6ff431c08b4","Type":"ContainerStarted","Data":"c8905b9f872e02abda9c624ae56092c3e6bd71397e3f9f8c318425ec72d79126"} Oct 10 18:51:04 crc kubenswrapper[4799]: I1010 18:51:04.023796 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-openstack-openstack-cell1-kwcwz" event={"ID":"02816ea3-9fb7-46aa-ae23-d6ff431c08b4","Type":"ContainerStarted","Data":"7668df1af0d5c44af14ede5402f283b6f7da687674d1f292c46fd381662cbb83"} Oct 10 18:51:04 crc kubenswrapper[4799]: I1010 18:51:04.044017 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/telemetry-openstack-openstack-cell1-kwcwz" podStartSLOduration=1.521964712 podStartE2EDuration="2.044001365s" podCreationTimestamp="2025-10-10 18:51:02 +0000 UTC" firstStartedPulling="2025-10-10 18:51:03.059809412 +0000 UTC m=+8356.568133527" lastFinishedPulling="2025-10-10 18:51:03.581846025 +0000 UTC m=+8357.090170180" observedRunningTime="2025-10-10 18:51:04.037545118 +0000 UTC m=+8357.545869243" watchObservedRunningTime="2025-10-10 18:51:04.044001365 +0000 UTC m=+8357.552325490" Oct 10 18:51:04 crc kubenswrapper[4799]: I1010 18:51:04.155517 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-h9mws"] Oct 10 18:51:04 crc kubenswrapper[4799]: I1010 18:51:04.158602 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-h9mws" Oct 10 18:51:04 crc kubenswrapper[4799]: I1010 18:51:04.192938 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-h9mws"] Oct 10 18:51:04 crc kubenswrapper[4799]: I1010 18:51:04.303774 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b5101699-d9de-40a7-a569-72ff4711dca7-utilities\") pod \"community-operators-h9mws\" (UID: \"b5101699-d9de-40a7-a569-72ff4711dca7\") " pod="openshift-marketplace/community-operators-h9mws" Oct 10 18:51:04 crc kubenswrapper[4799]: I1010 18:51:04.303875 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ts6gv\" (UniqueName: \"kubernetes.io/projected/b5101699-d9de-40a7-a569-72ff4711dca7-kube-api-access-ts6gv\") pod \"community-operators-h9mws\" (UID: \"b5101699-d9de-40a7-a569-72ff4711dca7\") " pod="openshift-marketplace/community-operators-h9mws" Oct 10 18:51:04 crc kubenswrapper[4799]: I1010 18:51:04.303984 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b5101699-d9de-40a7-a569-72ff4711dca7-catalog-content\") pod \"community-operators-h9mws\" (UID: \"b5101699-d9de-40a7-a569-72ff4711dca7\") " pod="openshift-marketplace/community-operators-h9mws" Oct 10 18:51:04 crc kubenswrapper[4799]: I1010 18:51:04.405956 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b5101699-d9de-40a7-a569-72ff4711dca7-utilities\") pod \"community-operators-h9mws\" (UID: \"b5101699-d9de-40a7-a569-72ff4711dca7\") " pod="openshift-marketplace/community-operators-h9mws" Oct 10 18:51:04 crc kubenswrapper[4799]: I1010 18:51:04.406038 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ts6gv\" (UniqueName: \"kubernetes.io/projected/b5101699-d9de-40a7-a569-72ff4711dca7-kube-api-access-ts6gv\") pod \"community-operators-h9mws\" (UID: \"b5101699-d9de-40a7-a569-72ff4711dca7\") " pod="openshift-marketplace/community-operators-h9mws" Oct 10 18:51:04 crc kubenswrapper[4799]: I1010 18:51:04.406160 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b5101699-d9de-40a7-a569-72ff4711dca7-catalog-content\") pod \"community-operators-h9mws\" (UID: \"b5101699-d9de-40a7-a569-72ff4711dca7\") " pod="openshift-marketplace/community-operators-h9mws" Oct 10 18:51:04 crc kubenswrapper[4799]: I1010 18:51:04.406545 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b5101699-d9de-40a7-a569-72ff4711dca7-catalog-content\") pod \"community-operators-h9mws\" (UID: \"b5101699-d9de-40a7-a569-72ff4711dca7\") " pod="openshift-marketplace/community-operators-h9mws" Oct 10 18:51:04 crc kubenswrapper[4799]: I1010 18:51:04.407128 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b5101699-d9de-40a7-a569-72ff4711dca7-utilities\") pod \"community-operators-h9mws\" (UID: \"b5101699-d9de-40a7-a569-72ff4711dca7\") " pod="openshift-marketplace/community-operators-h9mws" Oct 10 18:51:04 crc kubenswrapper[4799]: I1010 18:51:04.432986 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ts6gv\" (UniqueName: \"kubernetes.io/projected/b5101699-d9de-40a7-a569-72ff4711dca7-kube-api-access-ts6gv\") pod \"community-operators-h9mws\" (UID: \"b5101699-d9de-40a7-a569-72ff4711dca7\") " pod="openshift-marketplace/community-operators-h9mws" Oct 10 18:51:04 crc kubenswrapper[4799]: I1010 18:51:04.484155 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-h9mws" Oct 10 18:51:05 crc kubenswrapper[4799]: I1010 18:51:05.055251 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-h9mws"] Oct 10 18:51:05 crc kubenswrapper[4799]: W1010 18:51:05.069287 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb5101699_d9de_40a7_a569_72ff4711dca7.slice/crio-21ff2abbe8627b48479aa5cd1feb00936a42e9e9d0bbb1be43ac1b037d56bf10 WatchSource:0}: Error finding container 21ff2abbe8627b48479aa5cd1feb00936a42e9e9d0bbb1be43ac1b037d56bf10: Status 404 returned error can't find the container with id 21ff2abbe8627b48479aa5cd1feb00936a42e9e9d0bbb1be43ac1b037d56bf10 Oct 10 18:51:06 crc kubenswrapper[4799]: I1010 18:51:06.053512 4799 generic.go:334] "Generic (PLEG): container finished" podID="b5101699-d9de-40a7-a569-72ff4711dca7" containerID="e0eed570fffad830bf2e7f3d8b3923c7591eb583c5c2e35490bab43f301eab07" exitCode=0 Oct 10 18:51:06 crc kubenswrapper[4799]: I1010 18:51:06.053581 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-h9mws" event={"ID":"b5101699-d9de-40a7-a569-72ff4711dca7","Type":"ContainerDied","Data":"e0eed570fffad830bf2e7f3d8b3923c7591eb583c5c2e35490bab43f301eab07"} Oct 10 18:51:06 crc kubenswrapper[4799]: I1010 18:51:06.053966 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-h9mws" event={"ID":"b5101699-d9de-40a7-a569-72ff4711dca7","Type":"ContainerStarted","Data":"21ff2abbe8627b48479aa5cd1feb00936a42e9e9d0bbb1be43ac1b037d56bf10"} Oct 10 18:51:08 crc kubenswrapper[4799]: I1010 18:51:08.087597 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-h9mws" event={"ID":"b5101699-d9de-40a7-a569-72ff4711dca7","Type":"ContainerStarted","Data":"a62b4c0e4a22fbb429a66d7af3067806ecaaa5009eff32e49cabb26333a32e69"} Oct 10 18:51:08 crc kubenswrapper[4799]: I1010 18:51:08.403012 4799 scope.go:117] "RemoveContainer" containerID="7b1000aefc8a60a452be056f6fd645ae7a95f088784af623ffab036bea5d3d0d" Oct 10 18:51:08 crc kubenswrapper[4799]: E1010 18:51:08.403625 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 18:51:09 crc kubenswrapper[4799]: I1010 18:51:09.099168 4799 generic.go:334] "Generic (PLEG): container finished" podID="b5101699-d9de-40a7-a569-72ff4711dca7" containerID="a62b4c0e4a22fbb429a66d7af3067806ecaaa5009eff32e49cabb26333a32e69" exitCode=0 Oct 10 18:51:09 crc kubenswrapper[4799]: I1010 18:51:09.099238 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-h9mws" event={"ID":"b5101699-d9de-40a7-a569-72ff4711dca7","Type":"ContainerDied","Data":"a62b4c0e4a22fbb429a66d7af3067806ecaaa5009eff32e49cabb26333a32e69"} Oct 10 18:51:10 crc kubenswrapper[4799]: I1010 18:51:10.117746 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-h9mws" event={"ID":"b5101699-d9de-40a7-a569-72ff4711dca7","Type":"ContainerStarted","Data":"6567052ba04866694284b8f25594cd38f2e931fe8b889be64ef6cd2ab3d41962"} Oct 10 18:51:10 crc kubenswrapper[4799]: I1010 18:51:10.161194 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-h9mws" podStartSLOduration=2.569324119 podStartE2EDuration="6.161166117s" podCreationTimestamp="2025-10-10 18:51:04 +0000 UTC" firstStartedPulling="2025-10-10 18:51:06.05649359 +0000 UTC m=+8359.564817705" lastFinishedPulling="2025-10-10 18:51:09.648335548 +0000 UTC m=+8363.156659703" observedRunningTime="2025-10-10 18:51:10.148244291 +0000 UTC m=+8363.656568506" watchObservedRunningTime="2025-10-10 18:51:10.161166117 +0000 UTC m=+8363.669490272" Oct 10 18:51:14 crc kubenswrapper[4799]: I1010 18:51:14.484873 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-h9mws" Oct 10 18:51:14 crc kubenswrapper[4799]: I1010 18:51:14.485425 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-h9mws" Oct 10 18:51:14 crc kubenswrapper[4799]: I1010 18:51:14.558254 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-h9mws" Oct 10 18:51:15 crc kubenswrapper[4799]: I1010 18:51:15.273178 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-h9mws" Oct 10 18:51:15 crc kubenswrapper[4799]: I1010 18:51:15.350826 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-h9mws"] Oct 10 18:51:17 crc kubenswrapper[4799]: I1010 18:51:17.208322 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-h9mws" podUID="b5101699-d9de-40a7-a569-72ff4711dca7" containerName="registry-server" containerID="cri-o://6567052ba04866694284b8f25594cd38f2e931fe8b889be64ef6cd2ab3d41962" gracePeriod=2 Oct 10 18:51:17 crc kubenswrapper[4799]: I1010 18:51:17.710434 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-h9mws" Oct 10 18:51:17 crc kubenswrapper[4799]: I1010 18:51:17.859483 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ts6gv\" (UniqueName: \"kubernetes.io/projected/b5101699-d9de-40a7-a569-72ff4711dca7-kube-api-access-ts6gv\") pod \"b5101699-d9de-40a7-a569-72ff4711dca7\" (UID: \"b5101699-d9de-40a7-a569-72ff4711dca7\") " Oct 10 18:51:17 crc kubenswrapper[4799]: I1010 18:51:17.859578 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b5101699-d9de-40a7-a569-72ff4711dca7-catalog-content\") pod \"b5101699-d9de-40a7-a569-72ff4711dca7\" (UID: \"b5101699-d9de-40a7-a569-72ff4711dca7\") " Oct 10 18:51:17 crc kubenswrapper[4799]: I1010 18:51:17.859817 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b5101699-d9de-40a7-a569-72ff4711dca7-utilities\") pod \"b5101699-d9de-40a7-a569-72ff4711dca7\" (UID: \"b5101699-d9de-40a7-a569-72ff4711dca7\") " Oct 10 18:51:17 crc kubenswrapper[4799]: I1010 18:51:17.861336 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b5101699-d9de-40a7-a569-72ff4711dca7-utilities" (OuterVolumeSpecName: "utilities") pod "b5101699-d9de-40a7-a569-72ff4711dca7" (UID: "b5101699-d9de-40a7-a569-72ff4711dca7"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 10 18:51:17 crc kubenswrapper[4799]: I1010 18:51:17.870092 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b5101699-d9de-40a7-a569-72ff4711dca7-kube-api-access-ts6gv" (OuterVolumeSpecName: "kube-api-access-ts6gv") pod "b5101699-d9de-40a7-a569-72ff4711dca7" (UID: "b5101699-d9de-40a7-a569-72ff4711dca7"). InnerVolumeSpecName "kube-api-access-ts6gv". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 18:51:17 crc kubenswrapper[4799]: I1010 18:51:17.918727 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b5101699-d9de-40a7-a569-72ff4711dca7-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b5101699-d9de-40a7-a569-72ff4711dca7" (UID: "b5101699-d9de-40a7-a569-72ff4711dca7"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 10 18:51:17 crc kubenswrapper[4799]: I1010 18:51:17.963072 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ts6gv\" (UniqueName: \"kubernetes.io/projected/b5101699-d9de-40a7-a569-72ff4711dca7-kube-api-access-ts6gv\") on node \"crc\" DevicePath \"\"" Oct 10 18:51:17 crc kubenswrapper[4799]: I1010 18:51:17.963126 4799 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b5101699-d9de-40a7-a569-72ff4711dca7-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 10 18:51:17 crc kubenswrapper[4799]: I1010 18:51:17.963143 4799 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b5101699-d9de-40a7-a569-72ff4711dca7-utilities\") on node \"crc\" DevicePath \"\"" Oct 10 18:51:18 crc kubenswrapper[4799]: I1010 18:51:18.228808 4799 generic.go:334] "Generic (PLEG): container finished" podID="b5101699-d9de-40a7-a569-72ff4711dca7" containerID="6567052ba04866694284b8f25594cd38f2e931fe8b889be64ef6cd2ab3d41962" exitCode=0 Oct 10 18:51:18 crc kubenswrapper[4799]: I1010 18:51:18.228877 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-h9mws" Oct 10 18:51:18 crc kubenswrapper[4799]: I1010 18:51:18.228884 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-h9mws" event={"ID":"b5101699-d9de-40a7-a569-72ff4711dca7","Type":"ContainerDied","Data":"6567052ba04866694284b8f25594cd38f2e931fe8b889be64ef6cd2ab3d41962"} Oct 10 18:51:18 crc kubenswrapper[4799]: I1010 18:51:18.229016 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-h9mws" event={"ID":"b5101699-d9de-40a7-a569-72ff4711dca7","Type":"ContainerDied","Data":"21ff2abbe8627b48479aa5cd1feb00936a42e9e9d0bbb1be43ac1b037d56bf10"} Oct 10 18:51:18 crc kubenswrapper[4799]: I1010 18:51:18.229050 4799 scope.go:117] "RemoveContainer" containerID="6567052ba04866694284b8f25594cd38f2e931fe8b889be64ef6cd2ab3d41962" Oct 10 18:51:18 crc kubenswrapper[4799]: I1010 18:51:18.267115 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-h9mws"] Oct 10 18:51:18 crc kubenswrapper[4799]: I1010 18:51:18.278144 4799 scope.go:117] "RemoveContainer" containerID="a62b4c0e4a22fbb429a66d7af3067806ecaaa5009eff32e49cabb26333a32e69" Oct 10 18:51:18 crc kubenswrapper[4799]: I1010 18:51:18.282693 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-h9mws"] Oct 10 18:51:18 crc kubenswrapper[4799]: I1010 18:51:18.309935 4799 scope.go:117] "RemoveContainer" containerID="e0eed570fffad830bf2e7f3d8b3923c7591eb583c5c2e35490bab43f301eab07" Oct 10 18:51:18 crc kubenswrapper[4799]: I1010 18:51:18.365728 4799 scope.go:117] "RemoveContainer" containerID="6567052ba04866694284b8f25594cd38f2e931fe8b889be64ef6cd2ab3d41962" Oct 10 18:51:18 crc kubenswrapper[4799]: E1010 18:51:18.366358 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6567052ba04866694284b8f25594cd38f2e931fe8b889be64ef6cd2ab3d41962\": container with ID starting with 6567052ba04866694284b8f25594cd38f2e931fe8b889be64ef6cd2ab3d41962 not found: ID does not exist" containerID="6567052ba04866694284b8f25594cd38f2e931fe8b889be64ef6cd2ab3d41962" Oct 10 18:51:18 crc kubenswrapper[4799]: I1010 18:51:18.366441 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6567052ba04866694284b8f25594cd38f2e931fe8b889be64ef6cd2ab3d41962"} err="failed to get container status \"6567052ba04866694284b8f25594cd38f2e931fe8b889be64ef6cd2ab3d41962\": rpc error: code = NotFound desc = could not find container \"6567052ba04866694284b8f25594cd38f2e931fe8b889be64ef6cd2ab3d41962\": container with ID starting with 6567052ba04866694284b8f25594cd38f2e931fe8b889be64ef6cd2ab3d41962 not found: ID does not exist" Oct 10 18:51:18 crc kubenswrapper[4799]: I1010 18:51:18.366492 4799 scope.go:117] "RemoveContainer" containerID="a62b4c0e4a22fbb429a66d7af3067806ecaaa5009eff32e49cabb26333a32e69" Oct 10 18:51:18 crc kubenswrapper[4799]: E1010 18:51:18.367921 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a62b4c0e4a22fbb429a66d7af3067806ecaaa5009eff32e49cabb26333a32e69\": container with ID starting with a62b4c0e4a22fbb429a66d7af3067806ecaaa5009eff32e49cabb26333a32e69 not found: ID does not exist" containerID="a62b4c0e4a22fbb429a66d7af3067806ecaaa5009eff32e49cabb26333a32e69" Oct 10 18:51:18 crc kubenswrapper[4799]: I1010 18:51:18.367949 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a62b4c0e4a22fbb429a66d7af3067806ecaaa5009eff32e49cabb26333a32e69"} err="failed to get container status \"a62b4c0e4a22fbb429a66d7af3067806ecaaa5009eff32e49cabb26333a32e69\": rpc error: code = NotFound desc = could not find container \"a62b4c0e4a22fbb429a66d7af3067806ecaaa5009eff32e49cabb26333a32e69\": container with ID starting with a62b4c0e4a22fbb429a66d7af3067806ecaaa5009eff32e49cabb26333a32e69 not found: ID does not exist" Oct 10 18:51:18 crc kubenswrapper[4799]: I1010 18:51:18.367971 4799 scope.go:117] "RemoveContainer" containerID="e0eed570fffad830bf2e7f3d8b3923c7591eb583c5c2e35490bab43f301eab07" Oct 10 18:51:18 crc kubenswrapper[4799]: E1010 18:51:18.368348 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e0eed570fffad830bf2e7f3d8b3923c7591eb583c5c2e35490bab43f301eab07\": container with ID starting with e0eed570fffad830bf2e7f3d8b3923c7591eb583c5c2e35490bab43f301eab07 not found: ID does not exist" containerID="e0eed570fffad830bf2e7f3d8b3923c7591eb583c5c2e35490bab43f301eab07" Oct 10 18:51:18 crc kubenswrapper[4799]: I1010 18:51:18.368370 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e0eed570fffad830bf2e7f3d8b3923c7591eb583c5c2e35490bab43f301eab07"} err="failed to get container status \"e0eed570fffad830bf2e7f3d8b3923c7591eb583c5c2e35490bab43f301eab07\": rpc error: code = NotFound desc = could not find container \"e0eed570fffad830bf2e7f3d8b3923c7591eb583c5c2e35490bab43f301eab07\": container with ID starting with e0eed570fffad830bf2e7f3d8b3923c7591eb583c5c2e35490bab43f301eab07 not found: ID does not exist" Oct 10 18:51:19 crc kubenswrapper[4799]: I1010 18:51:19.419428 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b5101699-d9de-40a7-a569-72ff4711dca7" path="/var/lib/kubelet/pods/b5101699-d9de-40a7-a569-72ff4711dca7/volumes" Oct 10 18:51:21 crc kubenswrapper[4799]: I1010 18:51:21.403198 4799 scope.go:117] "RemoveContainer" containerID="7b1000aefc8a60a452be056f6fd645ae7a95f088784af623ffab036bea5d3d0d" Oct 10 18:51:21 crc kubenswrapper[4799]: E1010 18:51:21.403999 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 18:51:34 crc kubenswrapper[4799]: I1010 18:51:34.402463 4799 scope.go:117] "RemoveContainer" containerID="7b1000aefc8a60a452be056f6fd645ae7a95f088784af623ffab036bea5d3d0d" Oct 10 18:51:34 crc kubenswrapper[4799]: E1010 18:51:34.403505 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 18:51:46 crc kubenswrapper[4799]: I1010 18:51:46.416614 4799 scope.go:117] "RemoveContainer" containerID="7b1000aefc8a60a452be056f6fd645ae7a95f088784af623ffab036bea5d3d0d" Oct 10 18:51:46 crc kubenswrapper[4799]: E1010 18:51:46.425484 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 18:52:00 crc kubenswrapper[4799]: I1010 18:52:00.402731 4799 scope.go:117] "RemoveContainer" containerID="7b1000aefc8a60a452be056f6fd645ae7a95f088784af623ffab036bea5d3d0d" Oct 10 18:52:00 crc kubenswrapper[4799]: E1010 18:52:00.403799 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 18:52:11 crc kubenswrapper[4799]: I1010 18:52:11.403953 4799 scope.go:117] "RemoveContainer" containerID="7b1000aefc8a60a452be056f6fd645ae7a95f088784af623ffab036bea5d3d0d" Oct 10 18:52:11 crc kubenswrapper[4799]: E1010 18:52:11.405032 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 18:52:22 crc kubenswrapper[4799]: I1010 18:52:22.402149 4799 scope.go:117] "RemoveContainer" containerID="7b1000aefc8a60a452be056f6fd645ae7a95f088784af623ffab036bea5d3d0d" Oct 10 18:52:22 crc kubenswrapper[4799]: E1010 18:52:22.403265 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 18:52:31 crc kubenswrapper[4799]: I1010 18:52:31.375908 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-fcsbc"] Oct 10 18:52:31 crc kubenswrapper[4799]: E1010 18:52:31.379023 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b5101699-d9de-40a7-a569-72ff4711dca7" containerName="extract-utilities" Oct 10 18:52:31 crc kubenswrapper[4799]: I1010 18:52:31.379230 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="b5101699-d9de-40a7-a569-72ff4711dca7" containerName="extract-utilities" Oct 10 18:52:31 crc kubenswrapper[4799]: E1010 18:52:31.379391 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b5101699-d9de-40a7-a569-72ff4711dca7" containerName="extract-content" Oct 10 18:52:31 crc kubenswrapper[4799]: I1010 18:52:31.379558 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="b5101699-d9de-40a7-a569-72ff4711dca7" containerName="extract-content" Oct 10 18:52:31 crc kubenswrapper[4799]: E1010 18:52:31.379751 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b5101699-d9de-40a7-a569-72ff4711dca7" containerName="registry-server" Oct 10 18:52:31 crc kubenswrapper[4799]: I1010 18:52:31.379911 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="b5101699-d9de-40a7-a569-72ff4711dca7" containerName="registry-server" Oct 10 18:52:31 crc kubenswrapper[4799]: I1010 18:52:31.380452 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="b5101699-d9de-40a7-a569-72ff4711dca7" containerName="registry-server" Oct 10 18:52:31 crc kubenswrapper[4799]: I1010 18:52:31.383680 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-fcsbc" Oct 10 18:52:31 crc kubenswrapper[4799]: I1010 18:52:31.393212 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-fcsbc"] Oct 10 18:52:31 crc kubenswrapper[4799]: I1010 18:52:31.422451 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6ccfcac9-5c82-4b34-b90e-a09864bea6b8-catalog-content\") pod \"redhat-marketplace-fcsbc\" (UID: \"6ccfcac9-5c82-4b34-b90e-a09864bea6b8\") " pod="openshift-marketplace/redhat-marketplace-fcsbc" Oct 10 18:52:31 crc kubenswrapper[4799]: I1010 18:52:31.422553 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6ccfcac9-5c82-4b34-b90e-a09864bea6b8-utilities\") pod \"redhat-marketplace-fcsbc\" (UID: \"6ccfcac9-5c82-4b34-b90e-a09864bea6b8\") " pod="openshift-marketplace/redhat-marketplace-fcsbc" Oct 10 18:52:31 crc kubenswrapper[4799]: I1010 18:52:31.422671 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wtfzr\" (UniqueName: \"kubernetes.io/projected/6ccfcac9-5c82-4b34-b90e-a09864bea6b8-kube-api-access-wtfzr\") pod \"redhat-marketplace-fcsbc\" (UID: \"6ccfcac9-5c82-4b34-b90e-a09864bea6b8\") " pod="openshift-marketplace/redhat-marketplace-fcsbc" Oct 10 18:52:31 crc kubenswrapper[4799]: I1010 18:52:31.525117 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wtfzr\" (UniqueName: \"kubernetes.io/projected/6ccfcac9-5c82-4b34-b90e-a09864bea6b8-kube-api-access-wtfzr\") pod \"redhat-marketplace-fcsbc\" (UID: \"6ccfcac9-5c82-4b34-b90e-a09864bea6b8\") " pod="openshift-marketplace/redhat-marketplace-fcsbc" Oct 10 18:52:31 crc kubenswrapper[4799]: I1010 18:52:31.525376 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6ccfcac9-5c82-4b34-b90e-a09864bea6b8-catalog-content\") pod \"redhat-marketplace-fcsbc\" (UID: \"6ccfcac9-5c82-4b34-b90e-a09864bea6b8\") " pod="openshift-marketplace/redhat-marketplace-fcsbc" Oct 10 18:52:31 crc kubenswrapper[4799]: I1010 18:52:31.525603 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6ccfcac9-5c82-4b34-b90e-a09864bea6b8-utilities\") pod \"redhat-marketplace-fcsbc\" (UID: \"6ccfcac9-5c82-4b34-b90e-a09864bea6b8\") " pod="openshift-marketplace/redhat-marketplace-fcsbc" Oct 10 18:52:31 crc kubenswrapper[4799]: I1010 18:52:31.525992 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6ccfcac9-5c82-4b34-b90e-a09864bea6b8-catalog-content\") pod \"redhat-marketplace-fcsbc\" (UID: \"6ccfcac9-5c82-4b34-b90e-a09864bea6b8\") " pod="openshift-marketplace/redhat-marketplace-fcsbc" Oct 10 18:52:31 crc kubenswrapper[4799]: I1010 18:52:31.526291 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6ccfcac9-5c82-4b34-b90e-a09864bea6b8-utilities\") pod \"redhat-marketplace-fcsbc\" (UID: \"6ccfcac9-5c82-4b34-b90e-a09864bea6b8\") " pod="openshift-marketplace/redhat-marketplace-fcsbc" Oct 10 18:52:31 crc kubenswrapper[4799]: I1010 18:52:31.550312 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wtfzr\" (UniqueName: \"kubernetes.io/projected/6ccfcac9-5c82-4b34-b90e-a09864bea6b8-kube-api-access-wtfzr\") pod \"redhat-marketplace-fcsbc\" (UID: \"6ccfcac9-5c82-4b34-b90e-a09864bea6b8\") " pod="openshift-marketplace/redhat-marketplace-fcsbc" Oct 10 18:52:31 crc kubenswrapper[4799]: I1010 18:52:31.728724 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-fcsbc" Oct 10 18:52:32 crc kubenswrapper[4799]: I1010 18:52:32.253935 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-fcsbc"] Oct 10 18:52:33 crc kubenswrapper[4799]: I1010 18:52:33.192264 4799 generic.go:334] "Generic (PLEG): container finished" podID="6ccfcac9-5c82-4b34-b90e-a09864bea6b8" containerID="f0c4fb298e8ad1f6098850f1def9b3f30a6e13792ff2049c64e4d3b16ed8647d" exitCode=0 Oct 10 18:52:33 crc kubenswrapper[4799]: I1010 18:52:33.192879 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-fcsbc" event={"ID":"6ccfcac9-5c82-4b34-b90e-a09864bea6b8","Type":"ContainerDied","Data":"f0c4fb298e8ad1f6098850f1def9b3f30a6e13792ff2049c64e4d3b16ed8647d"} Oct 10 18:52:33 crc kubenswrapper[4799]: I1010 18:52:33.192922 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-fcsbc" event={"ID":"6ccfcac9-5c82-4b34-b90e-a09864bea6b8","Type":"ContainerStarted","Data":"94b43d908ce35be18d4053dac4b41bb29dba0352d09532e8519f9cd2ea3dd32a"} Oct 10 18:52:33 crc kubenswrapper[4799]: I1010 18:52:33.195812 4799 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 10 18:52:34 crc kubenswrapper[4799]: I1010 18:52:34.207163 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-fcsbc" event={"ID":"6ccfcac9-5c82-4b34-b90e-a09864bea6b8","Type":"ContainerStarted","Data":"55eb2988b500e8fdd834e0e067b7f861440add018b7c0e2e7f57ecf2cff74e01"} Oct 10 18:52:35 crc kubenswrapper[4799]: I1010 18:52:35.232029 4799 generic.go:334] "Generic (PLEG): container finished" podID="6ccfcac9-5c82-4b34-b90e-a09864bea6b8" containerID="55eb2988b500e8fdd834e0e067b7f861440add018b7c0e2e7f57ecf2cff74e01" exitCode=0 Oct 10 18:52:35 crc kubenswrapper[4799]: I1010 18:52:35.232091 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-fcsbc" event={"ID":"6ccfcac9-5c82-4b34-b90e-a09864bea6b8","Type":"ContainerDied","Data":"55eb2988b500e8fdd834e0e067b7f861440add018b7c0e2e7f57ecf2cff74e01"} Oct 10 18:52:36 crc kubenswrapper[4799]: I1010 18:52:36.247635 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-fcsbc" event={"ID":"6ccfcac9-5c82-4b34-b90e-a09864bea6b8","Type":"ContainerStarted","Data":"b3afab1c01cc0f5e262018a32f469177080fd5e1bab940d916f2897f6f56b006"} Oct 10 18:52:36 crc kubenswrapper[4799]: I1010 18:52:36.277242 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-fcsbc" podStartSLOduration=2.593260221 podStartE2EDuration="5.277209719s" podCreationTimestamp="2025-10-10 18:52:31 +0000 UTC" firstStartedPulling="2025-10-10 18:52:33.195529564 +0000 UTC m=+8446.703853679" lastFinishedPulling="2025-10-10 18:52:35.879479022 +0000 UTC m=+8449.387803177" observedRunningTime="2025-10-10 18:52:36.270154147 +0000 UTC m=+8449.778478302" watchObservedRunningTime="2025-10-10 18:52:36.277209719 +0000 UTC m=+8449.785533884" Oct 10 18:52:37 crc kubenswrapper[4799]: I1010 18:52:37.412360 4799 scope.go:117] "RemoveContainer" containerID="7b1000aefc8a60a452be056f6fd645ae7a95f088784af623ffab036bea5d3d0d" Oct 10 18:52:37 crc kubenswrapper[4799]: E1010 18:52:37.414157 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 18:52:41 crc kubenswrapper[4799]: I1010 18:52:41.731104 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-fcsbc" Oct 10 18:52:41 crc kubenswrapper[4799]: I1010 18:52:41.731592 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-fcsbc" Oct 10 18:52:41 crc kubenswrapper[4799]: I1010 18:52:41.822062 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-fcsbc" Oct 10 18:52:42 crc kubenswrapper[4799]: I1010 18:52:42.390469 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-fcsbc" Oct 10 18:52:42 crc kubenswrapper[4799]: I1010 18:52:42.459741 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-fcsbc"] Oct 10 18:52:44 crc kubenswrapper[4799]: I1010 18:52:44.342142 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-fcsbc" podUID="6ccfcac9-5c82-4b34-b90e-a09864bea6b8" containerName="registry-server" containerID="cri-o://b3afab1c01cc0f5e262018a32f469177080fd5e1bab940d916f2897f6f56b006" gracePeriod=2 Oct 10 18:52:44 crc kubenswrapper[4799]: I1010 18:52:44.964122 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-fcsbc" Oct 10 18:52:45 crc kubenswrapper[4799]: I1010 18:52:45.097472 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wtfzr\" (UniqueName: \"kubernetes.io/projected/6ccfcac9-5c82-4b34-b90e-a09864bea6b8-kube-api-access-wtfzr\") pod \"6ccfcac9-5c82-4b34-b90e-a09864bea6b8\" (UID: \"6ccfcac9-5c82-4b34-b90e-a09864bea6b8\") " Oct 10 18:52:45 crc kubenswrapper[4799]: I1010 18:52:45.097597 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6ccfcac9-5c82-4b34-b90e-a09864bea6b8-utilities\") pod \"6ccfcac9-5c82-4b34-b90e-a09864bea6b8\" (UID: \"6ccfcac9-5c82-4b34-b90e-a09864bea6b8\") " Oct 10 18:52:45 crc kubenswrapper[4799]: I1010 18:52:45.097739 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6ccfcac9-5c82-4b34-b90e-a09864bea6b8-catalog-content\") pod \"6ccfcac9-5c82-4b34-b90e-a09864bea6b8\" (UID: \"6ccfcac9-5c82-4b34-b90e-a09864bea6b8\") " Oct 10 18:52:45 crc kubenswrapper[4799]: I1010 18:52:45.098788 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6ccfcac9-5c82-4b34-b90e-a09864bea6b8-utilities" (OuterVolumeSpecName: "utilities") pod "6ccfcac9-5c82-4b34-b90e-a09864bea6b8" (UID: "6ccfcac9-5c82-4b34-b90e-a09864bea6b8"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 10 18:52:45 crc kubenswrapper[4799]: I1010 18:52:45.107059 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6ccfcac9-5c82-4b34-b90e-a09864bea6b8-kube-api-access-wtfzr" (OuterVolumeSpecName: "kube-api-access-wtfzr") pod "6ccfcac9-5c82-4b34-b90e-a09864bea6b8" (UID: "6ccfcac9-5c82-4b34-b90e-a09864bea6b8"). InnerVolumeSpecName "kube-api-access-wtfzr". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 18:52:45 crc kubenswrapper[4799]: I1010 18:52:45.112126 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6ccfcac9-5c82-4b34-b90e-a09864bea6b8-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "6ccfcac9-5c82-4b34-b90e-a09864bea6b8" (UID: "6ccfcac9-5c82-4b34-b90e-a09864bea6b8"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 10 18:52:45 crc kubenswrapper[4799]: I1010 18:52:45.200567 4799 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6ccfcac9-5c82-4b34-b90e-a09864bea6b8-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 10 18:52:45 crc kubenswrapper[4799]: I1010 18:52:45.200607 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wtfzr\" (UniqueName: \"kubernetes.io/projected/6ccfcac9-5c82-4b34-b90e-a09864bea6b8-kube-api-access-wtfzr\") on node \"crc\" DevicePath \"\"" Oct 10 18:52:45 crc kubenswrapper[4799]: I1010 18:52:45.200623 4799 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6ccfcac9-5c82-4b34-b90e-a09864bea6b8-utilities\") on node \"crc\" DevicePath \"\"" Oct 10 18:52:45 crc kubenswrapper[4799]: I1010 18:52:45.358803 4799 generic.go:334] "Generic (PLEG): container finished" podID="6ccfcac9-5c82-4b34-b90e-a09864bea6b8" containerID="b3afab1c01cc0f5e262018a32f469177080fd5e1bab940d916f2897f6f56b006" exitCode=0 Oct 10 18:52:45 crc kubenswrapper[4799]: I1010 18:52:45.358908 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-fcsbc" Oct 10 18:52:45 crc kubenswrapper[4799]: I1010 18:52:45.358888 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-fcsbc" event={"ID":"6ccfcac9-5c82-4b34-b90e-a09864bea6b8","Type":"ContainerDied","Data":"b3afab1c01cc0f5e262018a32f469177080fd5e1bab940d916f2897f6f56b006"} Oct 10 18:52:45 crc kubenswrapper[4799]: I1010 18:52:45.358992 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-fcsbc" event={"ID":"6ccfcac9-5c82-4b34-b90e-a09864bea6b8","Type":"ContainerDied","Data":"94b43d908ce35be18d4053dac4b41bb29dba0352d09532e8519f9cd2ea3dd32a"} Oct 10 18:52:45 crc kubenswrapper[4799]: I1010 18:52:45.359023 4799 scope.go:117] "RemoveContainer" containerID="b3afab1c01cc0f5e262018a32f469177080fd5e1bab940d916f2897f6f56b006" Oct 10 18:52:45 crc kubenswrapper[4799]: I1010 18:52:45.429033 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-fcsbc"] Oct 10 18:52:45 crc kubenswrapper[4799]: I1010 18:52:45.431517 4799 scope.go:117] "RemoveContainer" containerID="55eb2988b500e8fdd834e0e067b7f861440add018b7c0e2e7f57ecf2cff74e01" Oct 10 18:52:45 crc kubenswrapper[4799]: I1010 18:52:45.440554 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-fcsbc"] Oct 10 18:52:45 crc kubenswrapper[4799]: I1010 18:52:45.467968 4799 scope.go:117] "RemoveContainer" containerID="f0c4fb298e8ad1f6098850f1def9b3f30a6e13792ff2049c64e4d3b16ed8647d" Oct 10 18:52:45 crc kubenswrapper[4799]: I1010 18:52:45.532469 4799 scope.go:117] "RemoveContainer" containerID="b3afab1c01cc0f5e262018a32f469177080fd5e1bab940d916f2897f6f56b006" Oct 10 18:52:45 crc kubenswrapper[4799]: E1010 18:52:45.533016 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b3afab1c01cc0f5e262018a32f469177080fd5e1bab940d916f2897f6f56b006\": container with ID starting with b3afab1c01cc0f5e262018a32f469177080fd5e1bab940d916f2897f6f56b006 not found: ID does not exist" containerID="b3afab1c01cc0f5e262018a32f469177080fd5e1bab940d916f2897f6f56b006" Oct 10 18:52:45 crc kubenswrapper[4799]: I1010 18:52:45.533053 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b3afab1c01cc0f5e262018a32f469177080fd5e1bab940d916f2897f6f56b006"} err="failed to get container status \"b3afab1c01cc0f5e262018a32f469177080fd5e1bab940d916f2897f6f56b006\": rpc error: code = NotFound desc = could not find container \"b3afab1c01cc0f5e262018a32f469177080fd5e1bab940d916f2897f6f56b006\": container with ID starting with b3afab1c01cc0f5e262018a32f469177080fd5e1bab940d916f2897f6f56b006 not found: ID does not exist" Oct 10 18:52:45 crc kubenswrapper[4799]: I1010 18:52:45.533076 4799 scope.go:117] "RemoveContainer" containerID="55eb2988b500e8fdd834e0e067b7f861440add018b7c0e2e7f57ecf2cff74e01" Oct 10 18:52:45 crc kubenswrapper[4799]: E1010 18:52:45.533777 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"55eb2988b500e8fdd834e0e067b7f861440add018b7c0e2e7f57ecf2cff74e01\": container with ID starting with 55eb2988b500e8fdd834e0e067b7f861440add018b7c0e2e7f57ecf2cff74e01 not found: ID does not exist" containerID="55eb2988b500e8fdd834e0e067b7f861440add018b7c0e2e7f57ecf2cff74e01" Oct 10 18:52:45 crc kubenswrapper[4799]: I1010 18:52:45.533885 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"55eb2988b500e8fdd834e0e067b7f861440add018b7c0e2e7f57ecf2cff74e01"} err="failed to get container status \"55eb2988b500e8fdd834e0e067b7f861440add018b7c0e2e7f57ecf2cff74e01\": rpc error: code = NotFound desc = could not find container \"55eb2988b500e8fdd834e0e067b7f861440add018b7c0e2e7f57ecf2cff74e01\": container with ID starting with 55eb2988b500e8fdd834e0e067b7f861440add018b7c0e2e7f57ecf2cff74e01 not found: ID does not exist" Oct 10 18:52:45 crc kubenswrapper[4799]: I1010 18:52:45.533927 4799 scope.go:117] "RemoveContainer" containerID="f0c4fb298e8ad1f6098850f1def9b3f30a6e13792ff2049c64e4d3b16ed8647d" Oct 10 18:52:45 crc kubenswrapper[4799]: E1010 18:52:45.534364 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f0c4fb298e8ad1f6098850f1def9b3f30a6e13792ff2049c64e4d3b16ed8647d\": container with ID starting with f0c4fb298e8ad1f6098850f1def9b3f30a6e13792ff2049c64e4d3b16ed8647d not found: ID does not exist" containerID="f0c4fb298e8ad1f6098850f1def9b3f30a6e13792ff2049c64e4d3b16ed8647d" Oct 10 18:52:45 crc kubenswrapper[4799]: I1010 18:52:45.534404 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f0c4fb298e8ad1f6098850f1def9b3f30a6e13792ff2049c64e4d3b16ed8647d"} err="failed to get container status \"f0c4fb298e8ad1f6098850f1def9b3f30a6e13792ff2049c64e4d3b16ed8647d\": rpc error: code = NotFound desc = could not find container \"f0c4fb298e8ad1f6098850f1def9b3f30a6e13792ff2049c64e4d3b16ed8647d\": container with ID starting with f0c4fb298e8ad1f6098850f1def9b3f30a6e13792ff2049c64e4d3b16ed8647d not found: ID does not exist" Oct 10 18:52:47 crc kubenswrapper[4799]: I1010 18:52:47.426497 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6ccfcac9-5c82-4b34-b90e-a09864bea6b8" path="/var/lib/kubelet/pods/6ccfcac9-5c82-4b34-b90e-a09864bea6b8/volumes" Oct 10 18:52:50 crc kubenswrapper[4799]: I1010 18:52:50.403368 4799 scope.go:117] "RemoveContainer" containerID="7b1000aefc8a60a452be056f6fd645ae7a95f088784af623ffab036bea5d3d0d" Oct 10 18:52:50 crc kubenswrapper[4799]: E1010 18:52:50.405522 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 18:53:03 crc kubenswrapper[4799]: I1010 18:53:03.404532 4799 scope.go:117] "RemoveContainer" containerID="7b1000aefc8a60a452be056f6fd645ae7a95f088784af623ffab036bea5d3d0d" Oct 10 18:53:03 crc kubenswrapper[4799]: E1010 18:53:03.407468 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 18:53:14 crc kubenswrapper[4799]: I1010 18:53:14.402589 4799 scope.go:117] "RemoveContainer" containerID="7b1000aefc8a60a452be056f6fd645ae7a95f088784af623ffab036bea5d3d0d" Oct 10 18:53:14 crc kubenswrapper[4799]: E1010 18:53:14.403347 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 18:53:26 crc kubenswrapper[4799]: I1010 18:53:26.404833 4799 scope.go:117] "RemoveContainer" containerID="7b1000aefc8a60a452be056f6fd645ae7a95f088784af623ffab036bea5d3d0d" Oct 10 18:53:26 crc kubenswrapper[4799]: E1010 18:53:26.405907 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 18:53:41 crc kubenswrapper[4799]: I1010 18:53:41.404182 4799 scope.go:117] "RemoveContainer" containerID="7b1000aefc8a60a452be056f6fd645ae7a95f088784af623ffab036bea5d3d0d" Oct 10 18:53:41 crc kubenswrapper[4799]: E1010 18:53:41.405382 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 18:53:55 crc kubenswrapper[4799]: I1010 18:53:55.403985 4799 scope.go:117] "RemoveContainer" containerID="7b1000aefc8a60a452be056f6fd645ae7a95f088784af623ffab036bea5d3d0d" Oct 10 18:53:55 crc kubenswrapper[4799]: E1010 18:53:55.405470 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 18:54:06 crc kubenswrapper[4799]: I1010 18:54:06.403673 4799 scope.go:117] "RemoveContainer" containerID="7b1000aefc8a60a452be056f6fd645ae7a95f088784af623ffab036bea5d3d0d" Oct 10 18:54:06 crc kubenswrapper[4799]: E1010 18:54:06.404609 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 18:54:19 crc kubenswrapper[4799]: I1010 18:54:19.403350 4799 scope.go:117] "RemoveContainer" containerID="7b1000aefc8a60a452be056f6fd645ae7a95f088784af623ffab036bea5d3d0d" Oct 10 18:54:19 crc kubenswrapper[4799]: E1010 18:54:19.404291 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 18:54:30 crc kubenswrapper[4799]: I1010 18:54:30.402612 4799 scope.go:117] "RemoveContainer" containerID="7b1000aefc8a60a452be056f6fd645ae7a95f088784af623ffab036bea5d3d0d" Oct 10 18:54:30 crc kubenswrapper[4799]: E1010 18:54:30.405370 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 18:54:41 crc kubenswrapper[4799]: I1010 18:54:41.402903 4799 scope.go:117] "RemoveContainer" containerID="7b1000aefc8a60a452be056f6fd645ae7a95f088784af623ffab036bea5d3d0d" Oct 10 18:54:41 crc kubenswrapper[4799]: E1010 18:54:41.404121 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 18:54:55 crc kubenswrapper[4799]: I1010 18:54:55.403513 4799 scope.go:117] "RemoveContainer" containerID="7b1000aefc8a60a452be056f6fd645ae7a95f088784af623ffab036bea5d3d0d" Oct 10 18:54:55 crc kubenswrapper[4799]: E1010 18:54:55.404645 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 18:55:08 crc kubenswrapper[4799]: I1010 18:55:08.403974 4799 scope.go:117] "RemoveContainer" containerID="7b1000aefc8a60a452be056f6fd645ae7a95f088784af623ffab036bea5d3d0d" Oct 10 18:55:08 crc kubenswrapper[4799]: E1010 18:55:08.405424 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 18:55:19 crc kubenswrapper[4799]: I1010 18:55:19.403642 4799 scope.go:117] "RemoveContainer" containerID="7b1000aefc8a60a452be056f6fd645ae7a95f088784af623ffab036bea5d3d0d" Oct 10 18:55:19 crc kubenswrapper[4799]: E1010 18:55:19.405326 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 18:55:30 crc kubenswrapper[4799]: I1010 18:55:30.403126 4799 scope.go:117] "RemoveContainer" containerID="7b1000aefc8a60a452be056f6fd645ae7a95f088784af623ffab036bea5d3d0d" Oct 10 18:55:30 crc kubenswrapper[4799]: E1010 18:55:30.404242 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 18:55:39 crc kubenswrapper[4799]: I1010 18:55:39.624374 4799 generic.go:334] "Generic (PLEG): container finished" podID="02816ea3-9fb7-46aa-ae23-d6ff431c08b4" containerID="c8905b9f872e02abda9c624ae56092c3e6bd71397e3f9f8c318425ec72d79126" exitCode=0 Oct 10 18:55:39 crc kubenswrapper[4799]: I1010 18:55:39.624423 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-openstack-openstack-cell1-kwcwz" event={"ID":"02816ea3-9fb7-46aa-ae23-d6ff431c08b4","Type":"ContainerDied","Data":"c8905b9f872e02abda9c624ae56092c3e6bd71397e3f9f8c318425ec72d79126"} Oct 10 18:55:41 crc kubenswrapper[4799]: I1010 18:55:41.244737 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-openstack-openstack-cell1-kwcwz" Oct 10 18:55:41 crc kubenswrapper[4799]: I1010 18:55:41.350199 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/02816ea3-9fb7-46aa-ae23-d6ff431c08b4-ceilometer-compute-config-data-1\") pod \"02816ea3-9fb7-46aa-ae23-d6ff431c08b4\" (UID: \"02816ea3-9fb7-46aa-ae23-d6ff431c08b4\") " Oct 10 18:55:41 crc kubenswrapper[4799]: I1010 18:55:41.350654 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/02816ea3-9fb7-46aa-ae23-d6ff431c08b4-ssh-key\") pod \"02816ea3-9fb7-46aa-ae23-d6ff431c08b4\" (UID: \"02816ea3-9fb7-46aa-ae23-d6ff431c08b4\") " Oct 10 18:55:41 crc kubenswrapper[4799]: I1010 18:55:41.350897 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/02816ea3-9fb7-46aa-ae23-d6ff431c08b4-ceph\") pod \"02816ea3-9fb7-46aa-ae23-d6ff431c08b4\" (UID: \"02816ea3-9fb7-46aa-ae23-d6ff431c08b4\") " Oct 10 18:55:41 crc kubenswrapper[4799]: I1010 18:55:41.351697 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/02816ea3-9fb7-46aa-ae23-d6ff431c08b4-ceilometer-compute-config-data-2\") pod \"02816ea3-9fb7-46aa-ae23-d6ff431c08b4\" (UID: \"02816ea3-9fb7-46aa-ae23-d6ff431c08b4\") " Oct 10 18:55:41 crc kubenswrapper[4799]: I1010 18:55:41.351848 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/02816ea3-9fb7-46aa-ae23-d6ff431c08b4-telemetry-combined-ca-bundle\") pod \"02816ea3-9fb7-46aa-ae23-d6ff431c08b4\" (UID: \"02816ea3-9fb7-46aa-ae23-d6ff431c08b4\") " Oct 10 18:55:41 crc kubenswrapper[4799]: I1010 18:55:41.351983 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/02816ea3-9fb7-46aa-ae23-d6ff431c08b4-inventory\") pod \"02816ea3-9fb7-46aa-ae23-d6ff431c08b4\" (UID: \"02816ea3-9fb7-46aa-ae23-d6ff431c08b4\") " Oct 10 18:55:41 crc kubenswrapper[4799]: I1010 18:55:41.352114 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fmbh7\" (UniqueName: \"kubernetes.io/projected/02816ea3-9fb7-46aa-ae23-d6ff431c08b4-kube-api-access-fmbh7\") pod \"02816ea3-9fb7-46aa-ae23-d6ff431c08b4\" (UID: \"02816ea3-9fb7-46aa-ae23-d6ff431c08b4\") " Oct 10 18:55:41 crc kubenswrapper[4799]: I1010 18:55:41.352282 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/02816ea3-9fb7-46aa-ae23-d6ff431c08b4-ceilometer-compute-config-data-0\") pod \"02816ea3-9fb7-46aa-ae23-d6ff431c08b4\" (UID: \"02816ea3-9fb7-46aa-ae23-d6ff431c08b4\") " Oct 10 18:55:41 crc kubenswrapper[4799]: I1010 18:55:41.358934 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/02816ea3-9fb7-46aa-ae23-d6ff431c08b4-kube-api-access-fmbh7" (OuterVolumeSpecName: "kube-api-access-fmbh7") pod "02816ea3-9fb7-46aa-ae23-d6ff431c08b4" (UID: "02816ea3-9fb7-46aa-ae23-d6ff431c08b4"). InnerVolumeSpecName "kube-api-access-fmbh7". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 18:55:41 crc kubenswrapper[4799]: I1010 18:55:41.360452 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/02816ea3-9fb7-46aa-ae23-d6ff431c08b4-ceph" (OuterVolumeSpecName: "ceph") pod "02816ea3-9fb7-46aa-ae23-d6ff431c08b4" (UID: "02816ea3-9fb7-46aa-ae23-d6ff431c08b4"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 18:55:41 crc kubenswrapper[4799]: I1010 18:55:41.364447 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/02816ea3-9fb7-46aa-ae23-d6ff431c08b4-telemetry-combined-ca-bundle" (OuterVolumeSpecName: "telemetry-combined-ca-bundle") pod "02816ea3-9fb7-46aa-ae23-d6ff431c08b4" (UID: "02816ea3-9fb7-46aa-ae23-d6ff431c08b4"). InnerVolumeSpecName "telemetry-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 18:55:41 crc kubenswrapper[4799]: I1010 18:55:41.381366 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/02816ea3-9fb7-46aa-ae23-d6ff431c08b4-ceilometer-compute-config-data-2" (OuterVolumeSpecName: "ceilometer-compute-config-data-2") pod "02816ea3-9fb7-46aa-ae23-d6ff431c08b4" (UID: "02816ea3-9fb7-46aa-ae23-d6ff431c08b4"). InnerVolumeSpecName "ceilometer-compute-config-data-2". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 18:55:41 crc kubenswrapper[4799]: I1010 18:55:41.381399 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/02816ea3-9fb7-46aa-ae23-d6ff431c08b4-ceilometer-compute-config-data-0" (OuterVolumeSpecName: "ceilometer-compute-config-data-0") pod "02816ea3-9fb7-46aa-ae23-d6ff431c08b4" (UID: "02816ea3-9fb7-46aa-ae23-d6ff431c08b4"). InnerVolumeSpecName "ceilometer-compute-config-data-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 18:55:41 crc kubenswrapper[4799]: I1010 18:55:41.402838 4799 scope.go:117] "RemoveContainer" containerID="7b1000aefc8a60a452be056f6fd645ae7a95f088784af623ffab036bea5d3d0d" Oct 10 18:55:41 crc kubenswrapper[4799]: E1010 18:55:41.403626 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 18:55:41 crc kubenswrapper[4799]: I1010 18:55:41.405729 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/02816ea3-9fb7-46aa-ae23-d6ff431c08b4-ceilometer-compute-config-data-1" (OuterVolumeSpecName: "ceilometer-compute-config-data-1") pod "02816ea3-9fb7-46aa-ae23-d6ff431c08b4" (UID: "02816ea3-9fb7-46aa-ae23-d6ff431c08b4"). InnerVolumeSpecName "ceilometer-compute-config-data-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 18:55:41 crc kubenswrapper[4799]: I1010 18:55:41.411541 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/02816ea3-9fb7-46aa-ae23-d6ff431c08b4-inventory" (OuterVolumeSpecName: "inventory") pod "02816ea3-9fb7-46aa-ae23-d6ff431c08b4" (UID: "02816ea3-9fb7-46aa-ae23-d6ff431c08b4"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 18:55:41 crc kubenswrapper[4799]: I1010 18:55:41.412978 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/02816ea3-9fb7-46aa-ae23-d6ff431c08b4-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "02816ea3-9fb7-46aa-ae23-d6ff431c08b4" (UID: "02816ea3-9fb7-46aa-ae23-d6ff431c08b4"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 18:55:41 crc kubenswrapper[4799]: I1010 18:55:41.456370 4799 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/02816ea3-9fb7-46aa-ae23-d6ff431c08b4-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 10 18:55:41 crc kubenswrapper[4799]: I1010 18:55:41.456612 4799 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/02816ea3-9fb7-46aa-ae23-d6ff431c08b4-ceph\") on node \"crc\" DevicePath \"\"" Oct 10 18:55:41 crc kubenswrapper[4799]: I1010 18:55:41.456703 4799 reconciler_common.go:293] "Volume detached for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/02816ea3-9fb7-46aa-ae23-d6ff431c08b4-ceilometer-compute-config-data-2\") on node \"crc\" DevicePath \"\"" Oct 10 18:55:41 crc kubenswrapper[4799]: I1010 18:55:41.456824 4799 reconciler_common.go:293] "Volume detached for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/02816ea3-9fb7-46aa-ae23-d6ff431c08b4-telemetry-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 10 18:55:41 crc kubenswrapper[4799]: I1010 18:55:41.456906 4799 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/02816ea3-9fb7-46aa-ae23-d6ff431c08b4-inventory\") on node \"crc\" DevicePath \"\"" Oct 10 18:55:41 crc kubenswrapper[4799]: I1010 18:55:41.456990 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fmbh7\" (UniqueName: \"kubernetes.io/projected/02816ea3-9fb7-46aa-ae23-d6ff431c08b4-kube-api-access-fmbh7\") on node \"crc\" DevicePath \"\"" Oct 10 18:55:41 crc kubenswrapper[4799]: I1010 18:55:41.457074 4799 reconciler_common.go:293] "Volume detached for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/02816ea3-9fb7-46aa-ae23-d6ff431c08b4-ceilometer-compute-config-data-0\") on node \"crc\" DevicePath \"\"" Oct 10 18:55:41 crc kubenswrapper[4799]: I1010 18:55:41.457150 4799 reconciler_common.go:293] "Volume detached for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/02816ea3-9fb7-46aa-ae23-d6ff431c08b4-ceilometer-compute-config-data-1\") on node \"crc\" DevicePath \"\"" Oct 10 18:55:41 crc kubenswrapper[4799]: I1010 18:55:41.656906 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-openstack-openstack-cell1-kwcwz" event={"ID":"02816ea3-9fb7-46aa-ae23-d6ff431c08b4","Type":"ContainerDied","Data":"7668df1af0d5c44af14ede5402f283b6f7da687674d1f292c46fd381662cbb83"} Oct 10 18:55:41 crc kubenswrapper[4799]: I1010 18:55:41.656950 4799 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7668df1af0d5c44af14ede5402f283b6f7da687674d1f292c46fd381662cbb83" Oct 10 18:55:41 crc kubenswrapper[4799]: I1010 18:55:41.657318 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-openstack-openstack-cell1-kwcwz" Oct 10 18:55:41 crc kubenswrapper[4799]: I1010 18:55:41.786926 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-sriov-openstack-openstack-cell1-8nm8r"] Oct 10 18:55:41 crc kubenswrapper[4799]: E1010 18:55:41.789723 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6ccfcac9-5c82-4b34-b90e-a09864bea6b8" containerName="extract-utilities" Oct 10 18:55:41 crc kubenswrapper[4799]: I1010 18:55:41.789763 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="6ccfcac9-5c82-4b34-b90e-a09864bea6b8" containerName="extract-utilities" Oct 10 18:55:41 crc kubenswrapper[4799]: E1010 18:55:41.789856 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6ccfcac9-5c82-4b34-b90e-a09864bea6b8" containerName="registry-server" Oct 10 18:55:41 crc kubenswrapper[4799]: I1010 18:55:41.789870 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="6ccfcac9-5c82-4b34-b90e-a09864bea6b8" containerName="registry-server" Oct 10 18:55:41 crc kubenswrapper[4799]: E1010 18:55:41.789900 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="02816ea3-9fb7-46aa-ae23-d6ff431c08b4" containerName="telemetry-openstack-openstack-cell1" Oct 10 18:55:41 crc kubenswrapper[4799]: I1010 18:55:41.789915 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="02816ea3-9fb7-46aa-ae23-d6ff431c08b4" containerName="telemetry-openstack-openstack-cell1" Oct 10 18:55:41 crc kubenswrapper[4799]: E1010 18:55:41.789950 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6ccfcac9-5c82-4b34-b90e-a09864bea6b8" containerName="extract-content" Oct 10 18:55:41 crc kubenswrapper[4799]: I1010 18:55:41.789962 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="6ccfcac9-5c82-4b34-b90e-a09864bea6b8" containerName="extract-content" Oct 10 18:55:41 crc kubenswrapper[4799]: I1010 18:55:41.790398 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="02816ea3-9fb7-46aa-ae23-d6ff431c08b4" containerName="telemetry-openstack-openstack-cell1" Oct 10 18:55:41 crc kubenswrapper[4799]: I1010 18:55:41.790464 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="6ccfcac9-5c82-4b34-b90e-a09864bea6b8" containerName="registry-server" Oct 10 18:55:41 crc kubenswrapper[4799]: I1010 18:55:41.791804 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-sriov-openstack-openstack-cell1-8nm8r" Oct 10 18:55:41 crc kubenswrapper[4799]: I1010 18:55:41.793946 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-sriov-agent-neutron-config" Oct 10 18:55:41 crc kubenswrapper[4799]: I1010 18:55:41.794629 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-adoption-secret" Oct 10 18:55:41 crc kubenswrapper[4799]: I1010 18:55:41.794843 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-rdlhr" Oct 10 18:55:41 crc kubenswrapper[4799]: I1010 18:55:41.807211 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 10 18:55:41 crc kubenswrapper[4799]: I1010 18:55:41.807809 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Oct 10 18:55:41 crc kubenswrapper[4799]: I1010 18:55:41.818376 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-sriov-openstack-openstack-cell1-8nm8r"] Oct 10 18:55:41 crc kubenswrapper[4799]: I1010 18:55:41.864072 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-sriov-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c2ec587b-71a4-4612-8079-4c32275f871d-neutron-sriov-combined-ca-bundle\") pod \"neutron-sriov-openstack-openstack-cell1-8nm8r\" (UID: \"c2ec587b-71a4-4612-8079-4c32275f871d\") " pod="openstack/neutron-sriov-openstack-openstack-cell1-8nm8r" Oct 10 18:55:41 crc kubenswrapper[4799]: I1010 18:55:41.864135 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c2ec587b-71a4-4612-8079-4c32275f871d-inventory\") pod \"neutron-sriov-openstack-openstack-cell1-8nm8r\" (UID: \"c2ec587b-71a4-4612-8079-4c32275f871d\") " pod="openstack/neutron-sriov-openstack-openstack-cell1-8nm8r" Oct 10 18:55:41 crc kubenswrapper[4799]: I1010 18:55:41.864191 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c2ec587b-71a4-4612-8079-4c32275f871d-ssh-key\") pod \"neutron-sriov-openstack-openstack-cell1-8nm8r\" (UID: \"c2ec587b-71a4-4612-8079-4c32275f871d\") " pod="openstack/neutron-sriov-openstack-openstack-cell1-8nm8r" Oct 10 18:55:41 crc kubenswrapper[4799]: I1010 18:55:41.864321 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4t27f\" (UniqueName: \"kubernetes.io/projected/c2ec587b-71a4-4612-8079-4c32275f871d-kube-api-access-4t27f\") pod \"neutron-sriov-openstack-openstack-cell1-8nm8r\" (UID: \"c2ec587b-71a4-4612-8079-4c32275f871d\") " pod="openstack/neutron-sriov-openstack-openstack-cell1-8nm8r" Oct 10 18:55:41 crc kubenswrapper[4799]: I1010 18:55:41.864349 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/c2ec587b-71a4-4612-8079-4c32275f871d-ceph\") pod \"neutron-sriov-openstack-openstack-cell1-8nm8r\" (UID: \"c2ec587b-71a4-4612-8079-4c32275f871d\") " pod="openstack/neutron-sriov-openstack-openstack-cell1-8nm8r" Oct 10 18:55:41 crc kubenswrapper[4799]: I1010 18:55:41.864458 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-sriov-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/c2ec587b-71a4-4612-8079-4c32275f871d-neutron-sriov-agent-neutron-config-0\") pod \"neutron-sriov-openstack-openstack-cell1-8nm8r\" (UID: \"c2ec587b-71a4-4612-8079-4c32275f871d\") " pod="openstack/neutron-sriov-openstack-openstack-cell1-8nm8r" Oct 10 18:55:41 crc kubenswrapper[4799]: I1010 18:55:41.965806 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4t27f\" (UniqueName: \"kubernetes.io/projected/c2ec587b-71a4-4612-8079-4c32275f871d-kube-api-access-4t27f\") pod \"neutron-sriov-openstack-openstack-cell1-8nm8r\" (UID: \"c2ec587b-71a4-4612-8079-4c32275f871d\") " pod="openstack/neutron-sriov-openstack-openstack-cell1-8nm8r" Oct 10 18:55:41 crc kubenswrapper[4799]: I1010 18:55:41.965853 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/c2ec587b-71a4-4612-8079-4c32275f871d-ceph\") pod \"neutron-sriov-openstack-openstack-cell1-8nm8r\" (UID: \"c2ec587b-71a4-4612-8079-4c32275f871d\") " pod="openstack/neutron-sriov-openstack-openstack-cell1-8nm8r" Oct 10 18:55:41 crc kubenswrapper[4799]: I1010 18:55:41.965900 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-sriov-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/c2ec587b-71a4-4612-8079-4c32275f871d-neutron-sriov-agent-neutron-config-0\") pod \"neutron-sriov-openstack-openstack-cell1-8nm8r\" (UID: \"c2ec587b-71a4-4612-8079-4c32275f871d\") " pod="openstack/neutron-sriov-openstack-openstack-cell1-8nm8r" Oct 10 18:55:41 crc kubenswrapper[4799]: I1010 18:55:41.966076 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-sriov-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c2ec587b-71a4-4612-8079-4c32275f871d-neutron-sriov-combined-ca-bundle\") pod \"neutron-sriov-openstack-openstack-cell1-8nm8r\" (UID: \"c2ec587b-71a4-4612-8079-4c32275f871d\") " pod="openstack/neutron-sriov-openstack-openstack-cell1-8nm8r" Oct 10 18:55:41 crc kubenswrapper[4799]: I1010 18:55:41.966221 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c2ec587b-71a4-4612-8079-4c32275f871d-inventory\") pod \"neutron-sriov-openstack-openstack-cell1-8nm8r\" (UID: \"c2ec587b-71a4-4612-8079-4c32275f871d\") " pod="openstack/neutron-sriov-openstack-openstack-cell1-8nm8r" Oct 10 18:55:41 crc kubenswrapper[4799]: I1010 18:55:41.967801 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c2ec587b-71a4-4612-8079-4c32275f871d-ssh-key\") pod \"neutron-sriov-openstack-openstack-cell1-8nm8r\" (UID: \"c2ec587b-71a4-4612-8079-4c32275f871d\") " pod="openstack/neutron-sriov-openstack-openstack-cell1-8nm8r" Oct 10 18:55:41 crc kubenswrapper[4799]: I1010 18:55:41.970431 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c2ec587b-71a4-4612-8079-4c32275f871d-inventory\") pod \"neutron-sriov-openstack-openstack-cell1-8nm8r\" (UID: \"c2ec587b-71a4-4612-8079-4c32275f871d\") " pod="openstack/neutron-sriov-openstack-openstack-cell1-8nm8r" Oct 10 18:55:41 crc kubenswrapper[4799]: I1010 18:55:41.970608 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/c2ec587b-71a4-4612-8079-4c32275f871d-ceph\") pod \"neutron-sriov-openstack-openstack-cell1-8nm8r\" (UID: \"c2ec587b-71a4-4612-8079-4c32275f871d\") " pod="openstack/neutron-sriov-openstack-openstack-cell1-8nm8r" Oct 10 18:55:41 crc kubenswrapper[4799]: I1010 18:55:41.972096 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-sriov-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c2ec587b-71a4-4612-8079-4c32275f871d-neutron-sriov-combined-ca-bundle\") pod \"neutron-sriov-openstack-openstack-cell1-8nm8r\" (UID: \"c2ec587b-71a4-4612-8079-4c32275f871d\") " pod="openstack/neutron-sriov-openstack-openstack-cell1-8nm8r" Oct 10 18:55:41 crc kubenswrapper[4799]: I1010 18:55:41.972751 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-sriov-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/c2ec587b-71a4-4612-8079-4c32275f871d-neutron-sriov-agent-neutron-config-0\") pod \"neutron-sriov-openstack-openstack-cell1-8nm8r\" (UID: \"c2ec587b-71a4-4612-8079-4c32275f871d\") " pod="openstack/neutron-sriov-openstack-openstack-cell1-8nm8r" Oct 10 18:55:41 crc kubenswrapper[4799]: I1010 18:55:41.973200 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c2ec587b-71a4-4612-8079-4c32275f871d-ssh-key\") pod \"neutron-sriov-openstack-openstack-cell1-8nm8r\" (UID: \"c2ec587b-71a4-4612-8079-4c32275f871d\") " pod="openstack/neutron-sriov-openstack-openstack-cell1-8nm8r" Oct 10 18:55:41 crc kubenswrapper[4799]: I1010 18:55:41.994944 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4t27f\" (UniqueName: \"kubernetes.io/projected/c2ec587b-71a4-4612-8079-4c32275f871d-kube-api-access-4t27f\") pod \"neutron-sriov-openstack-openstack-cell1-8nm8r\" (UID: \"c2ec587b-71a4-4612-8079-4c32275f871d\") " pod="openstack/neutron-sriov-openstack-openstack-cell1-8nm8r" Oct 10 18:55:42 crc kubenswrapper[4799]: I1010 18:55:42.119559 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-sriov-openstack-openstack-cell1-8nm8r" Oct 10 18:55:42 crc kubenswrapper[4799]: I1010 18:55:42.573673 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-sriov-openstack-openstack-cell1-8nm8r"] Oct 10 18:55:42 crc kubenswrapper[4799]: I1010 18:55:42.667781 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-sriov-openstack-openstack-cell1-8nm8r" event={"ID":"c2ec587b-71a4-4612-8079-4c32275f871d","Type":"ContainerStarted","Data":"9961aa8b5410a064c1077fb8a3c7a460b39c3e36335e5357c3825489bcc3b53f"} Oct 10 18:55:43 crc kubenswrapper[4799]: I1010 18:55:43.677277 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-sriov-openstack-openstack-cell1-8nm8r" event={"ID":"c2ec587b-71a4-4612-8079-4c32275f871d","Type":"ContainerStarted","Data":"674777ba9e3a7c95d493c25999b8bd75910fee49bbbb0c00182ee34642812aa7"} Oct 10 18:55:43 crc kubenswrapper[4799]: I1010 18:55:43.701267 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-sriov-openstack-openstack-cell1-8nm8r" podStartSLOduration=2.15475171 podStartE2EDuration="2.701250131s" podCreationTimestamp="2025-10-10 18:55:41 +0000 UTC" firstStartedPulling="2025-10-10 18:55:42.593115229 +0000 UTC m=+8636.101439384" lastFinishedPulling="2025-10-10 18:55:43.13961368 +0000 UTC m=+8636.647937805" observedRunningTime="2025-10-10 18:55:43.693460261 +0000 UTC m=+8637.201784386" watchObservedRunningTime="2025-10-10 18:55:43.701250131 +0000 UTC m=+8637.209574246" Oct 10 18:55:53 crc kubenswrapper[4799]: I1010 18:55:53.403246 4799 scope.go:117] "RemoveContainer" containerID="7b1000aefc8a60a452be056f6fd645ae7a95f088784af623ffab036bea5d3d0d" Oct 10 18:55:53 crc kubenswrapper[4799]: I1010 18:55:53.812644 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" event={"ID":"6cebefda-e31d-4be2-9bf4-8e1f8ec002cb","Type":"ContainerStarted","Data":"5ff5a69863067c2e020a02278a8b3d2cd11069a30ff86a06260c11a3866bc104"} Oct 10 18:56:44 crc kubenswrapper[4799]: I1010 18:56:44.459450 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-nnc4b"] Oct 10 18:56:44 crc kubenswrapper[4799]: I1010 18:56:44.464415 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-nnc4b" Oct 10 18:56:44 crc kubenswrapper[4799]: I1010 18:56:44.504026 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-nnc4b"] Oct 10 18:56:44 crc kubenswrapper[4799]: I1010 18:56:44.569558 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f81227ff-44b2-4f5d-8bb3-ab41c37093fd-utilities\") pod \"redhat-operators-nnc4b\" (UID: \"f81227ff-44b2-4f5d-8bb3-ab41c37093fd\") " pod="openshift-marketplace/redhat-operators-nnc4b" Oct 10 18:56:44 crc kubenswrapper[4799]: I1010 18:56:44.569692 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sw9nj\" (UniqueName: \"kubernetes.io/projected/f81227ff-44b2-4f5d-8bb3-ab41c37093fd-kube-api-access-sw9nj\") pod \"redhat-operators-nnc4b\" (UID: \"f81227ff-44b2-4f5d-8bb3-ab41c37093fd\") " pod="openshift-marketplace/redhat-operators-nnc4b" Oct 10 18:56:44 crc kubenswrapper[4799]: I1010 18:56:44.569820 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f81227ff-44b2-4f5d-8bb3-ab41c37093fd-catalog-content\") pod \"redhat-operators-nnc4b\" (UID: \"f81227ff-44b2-4f5d-8bb3-ab41c37093fd\") " pod="openshift-marketplace/redhat-operators-nnc4b" Oct 10 18:56:44 crc kubenswrapper[4799]: I1010 18:56:44.672032 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f81227ff-44b2-4f5d-8bb3-ab41c37093fd-utilities\") pod \"redhat-operators-nnc4b\" (UID: \"f81227ff-44b2-4f5d-8bb3-ab41c37093fd\") " pod="openshift-marketplace/redhat-operators-nnc4b" Oct 10 18:56:44 crc kubenswrapper[4799]: I1010 18:56:44.672193 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sw9nj\" (UniqueName: \"kubernetes.io/projected/f81227ff-44b2-4f5d-8bb3-ab41c37093fd-kube-api-access-sw9nj\") pod \"redhat-operators-nnc4b\" (UID: \"f81227ff-44b2-4f5d-8bb3-ab41c37093fd\") " pod="openshift-marketplace/redhat-operators-nnc4b" Oct 10 18:56:44 crc kubenswrapper[4799]: I1010 18:56:44.672319 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f81227ff-44b2-4f5d-8bb3-ab41c37093fd-catalog-content\") pod \"redhat-operators-nnc4b\" (UID: \"f81227ff-44b2-4f5d-8bb3-ab41c37093fd\") " pod="openshift-marketplace/redhat-operators-nnc4b" Oct 10 18:56:44 crc kubenswrapper[4799]: I1010 18:56:44.672866 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f81227ff-44b2-4f5d-8bb3-ab41c37093fd-utilities\") pod \"redhat-operators-nnc4b\" (UID: \"f81227ff-44b2-4f5d-8bb3-ab41c37093fd\") " pod="openshift-marketplace/redhat-operators-nnc4b" Oct 10 18:56:44 crc kubenswrapper[4799]: I1010 18:56:44.672902 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f81227ff-44b2-4f5d-8bb3-ab41c37093fd-catalog-content\") pod \"redhat-operators-nnc4b\" (UID: \"f81227ff-44b2-4f5d-8bb3-ab41c37093fd\") " pod="openshift-marketplace/redhat-operators-nnc4b" Oct 10 18:56:44 crc kubenswrapper[4799]: I1010 18:56:44.696457 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sw9nj\" (UniqueName: \"kubernetes.io/projected/f81227ff-44b2-4f5d-8bb3-ab41c37093fd-kube-api-access-sw9nj\") pod \"redhat-operators-nnc4b\" (UID: \"f81227ff-44b2-4f5d-8bb3-ab41c37093fd\") " pod="openshift-marketplace/redhat-operators-nnc4b" Oct 10 18:56:44 crc kubenswrapper[4799]: I1010 18:56:44.806821 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-nnc4b" Oct 10 18:56:45 crc kubenswrapper[4799]: I1010 18:56:45.260602 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-nnc4b"] Oct 10 18:56:45 crc kubenswrapper[4799]: I1010 18:56:45.547607 4799 generic.go:334] "Generic (PLEG): container finished" podID="f81227ff-44b2-4f5d-8bb3-ab41c37093fd" containerID="910e660c4b83152524e88cb306170a954ac5f67ae82204f0eeaab9cd39e96483" exitCode=0 Oct 10 18:56:45 crc kubenswrapper[4799]: I1010 18:56:45.547665 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-nnc4b" event={"ID":"f81227ff-44b2-4f5d-8bb3-ab41c37093fd","Type":"ContainerDied","Data":"910e660c4b83152524e88cb306170a954ac5f67ae82204f0eeaab9cd39e96483"} Oct 10 18:56:45 crc kubenswrapper[4799]: I1010 18:56:45.548155 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-nnc4b" event={"ID":"f81227ff-44b2-4f5d-8bb3-ab41c37093fd","Type":"ContainerStarted","Data":"c3a9c4c33bf4e2eeff8b99c767dea842df4165391a203d6bd2894698ef3b43c3"} Oct 10 18:56:46 crc kubenswrapper[4799]: I1010 18:56:46.564537 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-nnc4b" event={"ID":"f81227ff-44b2-4f5d-8bb3-ab41c37093fd","Type":"ContainerStarted","Data":"a23f106bdb00419a05ffc6abeff67daebd97f6627915fc64fd1729c608df0806"} Oct 10 18:56:49 crc kubenswrapper[4799]: I1010 18:56:49.610679 4799 generic.go:334] "Generic (PLEG): container finished" podID="f81227ff-44b2-4f5d-8bb3-ab41c37093fd" containerID="a23f106bdb00419a05ffc6abeff67daebd97f6627915fc64fd1729c608df0806" exitCode=0 Oct 10 18:56:49 crc kubenswrapper[4799]: I1010 18:56:49.610896 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-nnc4b" event={"ID":"f81227ff-44b2-4f5d-8bb3-ab41c37093fd","Type":"ContainerDied","Data":"a23f106bdb00419a05ffc6abeff67daebd97f6627915fc64fd1729c608df0806"} Oct 10 18:56:50 crc kubenswrapper[4799]: I1010 18:56:50.635572 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-nnc4b" event={"ID":"f81227ff-44b2-4f5d-8bb3-ab41c37093fd","Type":"ContainerStarted","Data":"ec5adbc63e9e28730551fd5d5bb415ee33032766303a1ae95d4bc1d5a83c42fd"} Oct 10 18:56:50 crc kubenswrapper[4799]: I1010 18:56:50.666643 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-nnc4b" podStartSLOduration=2.185152934 podStartE2EDuration="6.666625626s" podCreationTimestamp="2025-10-10 18:56:44 +0000 UTC" firstStartedPulling="2025-10-10 18:56:45.549399022 +0000 UTC m=+8699.057723137" lastFinishedPulling="2025-10-10 18:56:50.030871674 +0000 UTC m=+8703.539195829" observedRunningTime="2025-10-10 18:56:50.664935735 +0000 UTC m=+8704.173259860" watchObservedRunningTime="2025-10-10 18:56:50.666625626 +0000 UTC m=+8704.174949741" Oct 10 18:56:54 crc kubenswrapper[4799]: I1010 18:56:54.807926 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-nnc4b" Oct 10 18:56:54 crc kubenswrapper[4799]: I1010 18:56:54.809020 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-nnc4b" Oct 10 18:56:55 crc kubenswrapper[4799]: I1010 18:56:55.884649 4799 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-nnc4b" podUID="f81227ff-44b2-4f5d-8bb3-ab41c37093fd" containerName="registry-server" probeResult="failure" output=< Oct 10 18:56:55 crc kubenswrapper[4799]: timeout: failed to connect service ":50051" within 1s Oct 10 18:56:55 crc kubenswrapper[4799]: > Oct 10 18:57:04 crc kubenswrapper[4799]: I1010 18:57:04.956396 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-nnc4b" Oct 10 18:57:05 crc kubenswrapper[4799]: I1010 18:57:05.015170 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-nnc4b" Oct 10 18:57:05 crc kubenswrapper[4799]: I1010 18:57:05.213664 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-nnc4b"] Oct 10 18:57:06 crc kubenswrapper[4799]: I1010 18:57:06.854418 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-nnc4b" podUID="f81227ff-44b2-4f5d-8bb3-ab41c37093fd" containerName="registry-server" containerID="cri-o://ec5adbc63e9e28730551fd5d5bb415ee33032766303a1ae95d4bc1d5a83c42fd" gracePeriod=2 Oct 10 18:57:07 crc kubenswrapper[4799]: I1010 18:57:07.396482 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-nnc4b" Oct 10 18:57:07 crc kubenswrapper[4799]: I1010 18:57:07.557138 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sw9nj\" (UniqueName: \"kubernetes.io/projected/f81227ff-44b2-4f5d-8bb3-ab41c37093fd-kube-api-access-sw9nj\") pod \"f81227ff-44b2-4f5d-8bb3-ab41c37093fd\" (UID: \"f81227ff-44b2-4f5d-8bb3-ab41c37093fd\") " Oct 10 18:57:07 crc kubenswrapper[4799]: I1010 18:57:07.557512 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f81227ff-44b2-4f5d-8bb3-ab41c37093fd-catalog-content\") pod \"f81227ff-44b2-4f5d-8bb3-ab41c37093fd\" (UID: \"f81227ff-44b2-4f5d-8bb3-ab41c37093fd\") " Oct 10 18:57:07 crc kubenswrapper[4799]: I1010 18:57:07.557652 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f81227ff-44b2-4f5d-8bb3-ab41c37093fd-utilities\") pod \"f81227ff-44b2-4f5d-8bb3-ab41c37093fd\" (UID: \"f81227ff-44b2-4f5d-8bb3-ab41c37093fd\") " Oct 10 18:57:07 crc kubenswrapper[4799]: I1010 18:57:07.559873 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f81227ff-44b2-4f5d-8bb3-ab41c37093fd-utilities" (OuterVolumeSpecName: "utilities") pod "f81227ff-44b2-4f5d-8bb3-ab41c37093fd" (UID: "f81227ff-44b2-4f5d-8bb3-ab41c37093fd"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 10 18:57:07 crc kubenswrapper[4799]: I1010 18:57:07.568706 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f81227ff-44b2-4f5d-8bb3-ab41c37093fd-kube-api-access-sw9nj" (OuterVolumeSpecName: "kube-api-access-sw9nj") pod "f81227ff-44b2-4f5d-8bb3-ab41c37093fd" (UID: "f81227ff-44b2-4f5d-8bb3-ab41c37093fd"). InnerVolumeSpecName "kube-api-access-sw9nj". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 18:57:07 crc kubenswrapper[4799]: I1010 18:57:07.631837 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f81227ff-44b2-4f5d-8bb3-ab41c37093fd-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "f81227ff-44b2-4f5d-8bb3-ab41c37093fd" (UID: "f81227ff-44b2-4f5d-8bb3-ab41c37093fd"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 10 18:57:07 crc kubenswrapper[4799]: I1010 18:57:07.660492 4799 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f81227ff-44b2-4f5d-8bb3-ab41c37093fd-utilities\") on node \"crc\" DevicePath \"\"" Oct 10 18:57:07 crc kubenswrapper[4799]: I1010 18:57:07.660526 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sw9nj\" (UniqueName: \"kubernetes.io/projected/f81227ff-44b2-4f5d-8bb3-ab41c37093fd-kube-api-access-sw9nj\") on node \"crc\" DevicePath \"\"" Oct 10 18:57:07 crc kubenswrapper[4799]: I1010 18:57:07.660540 4799 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f81227ff-44b2-4f5d-8bb3-ab41c37093fd-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 10 18:57:07 crc kubenswrapper[4799]: I1010 18:57:07.876112 4799 generic.go:334] "Generic (PLEG): container finished" podID="f81227ff-44b2-4f5d-8bb3-ab41c37093fd" containerID="ec5adbc63e9e28730551fd5d5bb415ee33032766303a1ae95d4bc1d5a83c42fd" exitCode=0 Oct 10 18:57:07 crc kubenswrapper[4799]: I1010 18:57:07.876180 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-nnc4b" event={"ID":"f81227ff-44b2-4f5d-8bb3-ab41c37093fd","Type":"ContainerDied","Data":"ec5adbc63e9e28730551fd5d5bb415ee33032766303a1ae95d4bc1d5a83c42fd"} Oct 10 18:57:07 crc kubenswrapper[4799]: I1010 18:57:07.876225 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-nnc4b" event={"ID":"f81227ff-44b2-4f5d-8bb3-ab41c37093fd","Type":"ContainerDied","Data":"c3a9c4c33bf4e2eeff8b99c767dea842df4165391a203d6bd2894698ef3b43c3"} Oct 10 18:57:07 crc kubenswrapper[4799]: I1010 18:57:07.876254 4799 scope.go:117] "RemoveContainer" containerID="ec5adbc63e9e28730551fd5d5bb415ee33032766303a1ae95d4bc1d5a83c42fd" Oct 10 18:57:07 crc kubenswrapper[4799]: I1010 18:57:07.876258 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-nnc4b" Oct 10 18:57:07 crc kubenswrapper[4799]: I1010 18:57:07.927304 4799 scope.go:117] "RemoveContainer" containerID="a23f106bdb00419a05ffc6abeff67daebd97f6627915fc64fd1729c608df0806" Oct 10 18:57:07 crc kubenswrapper[4799]: I1010 18:57:07.938748 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-nnc4b"] Oct 10 18:57:07 crc kubenswrapper[4799]: I1010 18:57:07.958111 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-nnc4b"] Oct 10 18:57:07 crc kubenswrapper[4799]: I1010 18:57:07.985801 4799 scope.go:117] "RemoveContainer" containerID="910e660c4b83152524e88cb306170a954ac5f67ae82204f0eeaab9cd39e96483" Oct 10 18:57:08 crc kubenswrapper[4799]: I1010 18:57:08.045077 4799 scope.go:117] "RemoveContainer" containerID="ec5adbc63e9e28730551fd5d5bb415ee33032766303a1ae95d4bc1d5a83c42fd" Oct 10 18:57:08 crc kubenswrapper[4799]: E1010 18:57:08.045813 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ec5adbc63e9e28730551fd5d5bb415ee33032766303a1ae95d4bc1d5a83c42fd\": container with ID starting with ec5adbc63e9e28730551fd5d5bb415ee33032766303a1ae95d4bc1d5a83c42fd not found: ID does not exist" containerID="ec5adbc63e9e28730551fd5d5bb415ee33032766303a1ae95d4bc1d5a83c42fd" Oct 10 18:57:08 crc kubenswrapper[4799]: I1010 18:57:08.045863 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ec5adbc63e9e28730551fd5d5bb415ee33032766303a1ae95d4bc1d5a83c42fd"} err="failed to get container status \"ec5adbc63e9e28730551fd5d5bb415ee33032766303a1ae95d4bc1d5a83c42fd\": rpc error: code = NotFound desc = could not find container \"ec5adbc63e9e28730551fd5d5bb415ee33032766303a1ae95d4bc1d5a83c42fd\": container with ID starting with ec5adbc63e9e28730551fd5d5bb415ee33032766303a1ae95d4bc1d5a83c42fd not found: ID does not exist" Oct 10 18:57:08 crc kubenswrapper[4799]: I1010 18:57:08.045897 4799 scope.go:117] "RemoveContainer" containerID="a23f106bdb00419a05ffc6abeff67daebd97f6627915fc64fd1729c608df0806" Oct 10 18:57:08 crc kubenswrapper[4799]: E1010 18:57:08.046781 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a23f106bdb00419a05ffc6abeff67daebd97f6627915fc64fd1729c608df0806\": container with ID starting with a23f106bdb00419a05ffc6abeff67daebd97f6627915fc64fd1729c608df0806 not found: ID does not exist" containerID="a23f106bdb00419a05ffc6abeff67daebd97f6627915fc64fd1729c608df0806" Oct 10 18:57:08 crc kubenswrapper[4799]: I1010 18:57:08.046805 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a23f106bdb00419a05ffc6abeff67daebd97f6627915fc64fd1729c608df0806"} err="failed to get container status \"a23f106bdb00419a05ffc6abeff67daebd97f6627915fc64fd1729c608df0806\": rpc error: code = NotFound desc = could not find container \"a23f106bdb00419a05ffc6abeff67daebd97f6627915fc64fd1729c608df0806\": container with ID starting with a23f106bdb00419a05ffc6abeff67daebd97f6627915fc64fd1729c608df0806 not found: ID does not exist" Oct 10 18:57:08 crc kubenswrapper[4799]: I1010 18:57:08.046817 4799 scope.go:117] "RemoveContainer" containerID="910e660c4b83152524e88cb306170a954ac5f67ae82204f0eeaab9cd39e96483" Oct 10 18:57:08 crc kubenswrapper[4799]: E1010 18:57:08.047245 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"910e660c4b83152524e88cb306170a954ac5f67ae82204f0eeaab9cd39e96483\": container with ID starting with 910e660c4b83152524e88cb306170a954ac5f67ae82204f0eeaab9cd39e96483 not found: ID does not exist" containerID="910e660c4b83152524e88cb306170a954ac5f67ae82204f0eeaab9cd39e96483" Oct 10 18:57:08 crc kubenswrapper[4799]: I1010 18:57:08.047287 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"910e660c4b83152524e88cb306170a954ac5f67ae82204f0eeaab9cd39e96483"} err="failed to get container status \"910e660c4b83152524e88cb306170a954ac5f67ae82204f0eeaab9cd39e96483\": rpc error: code = NotFound desc = could not find container \"910e660c4b83152524e88cb306170a954ac5f67ae82204f0eeaab9cd39e96483\": container with ID starting with 910e660c4b83152524e88cb306170a954ac5f67ae82204f0eeaab9cd39e96483 not found: ID does not exist" Oct 10 18:57:09 crc kubenswrapper[4799]: I1010 18:57:09.427520 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f81227ff-44b2-4f5d-8bb3-ab41c37093fd" path="/var/lib/kubelet/pods/f81227ff-44b2-4f5d-8bb3-ab41c37093fd/volumes" Oct 10 18:58:15 crc kubenswrapper[4799]: I1010 18:58:15.249304 4799 patch_prober.go:28] interesting pod/machine-config-daemon-rh8zc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 10 18:58:15 crc kubenswrapper[4799]: I1010 18:58:15.250019 4799 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 10 18:58:45 crc kubenswrapper[4799]: I1010 18:58:45.249247 4799 patch_prober.go:28] interesting pod/machine-config-daemon-rh8zc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 10 18:58:45 crc kubenswrapper[4799]: I1010 18:58:45.250008 4799 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 10 18:58:48 crc kubenswrapper[4799]: I1010 18:58:48.325995 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-sk6jx"] Oct 10 18:58:48 crc kubenswrapper[4799]: E1010 18:58:48.328073 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f81227ff-44b2-4f5d-8bb3-ab41c37093fd" containerName="extract-content" Oct 10 18:58:48 crc kubenswrapper[4799]: I1010 18:58:48.328103 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="f81227ff-44b2-4f5d-8bb3-ab41c37093fd" containerName="extract-content" Oct 10 18:58:48 crc kubenswrapper[4799]: E1010 18:58:48.328171 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f81227ff-44b2-4f5d-8bb3-ab41c37093fd" containerName="extract-utilities" Oct 10 18:58:48 crc kubenswrapper[4799]: I1010 18:58:48.328187 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="f81227ff-44b2-4f5d-8bb3-ab41c37093fd" containerName="extract-utilities" Oct 10 18:58:48 crc kubenswrapper[4799]: E1010 18:58:48.328247 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f81227ff-44b2-4f5d-8bb3-ab41c37093fd" containerName="registry-server" Oct 10 18:58:48 crc kubenswrapper[4799]: I1010 18:58:48.328264 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="f81227ff-44b2-4f5d-8bb3-ab41c37093fd" containerName="registry-server" Oct 10 18:58:48 crc kubenswrapper[4799]: I1010 18:58:48.328804 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="f81227ff-44b2-4f5d-8bb3-ab41c37093fd" containerName="registry-server" Oct 10 18:58:48 crc kubenswrapper[4799]: I1010 18:58:48.332035 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-sk6jx" Oct 10 18:58:48 crc kubenswrapper[4799]: I1010 18:58:48.345236 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-sk6jx"] Oct 10 18:58:48 crc kubenswrapper[4799]: I1010 18:58:48.345978 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-skswg\" (UniqueName: \"kubernetes.io/projected/b07da810-72ef-4a53-b3c1-bfa3734a2219-kube-api-access-skswg\") pod \"certified-operators-sk6jx\" (UID: \"b07da810-72ef-4a53-b3c1-bfa3734a2219\") " pod="openshift-marketplace/certified-operators-sk6jx" Oct 10 18:58:48 crc kubenswrapper[4799]: I1010 18:58:48.346137 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b07da810-72ef-4a53-b3c1-bfa3734a2219-utilities\") pod \"certified-operators-sk6jx\" (UID: \"b07da810-72ef-4a53-b3c1-bfa3734a2219\") " pod="openshift-marketplace/certified-operators-sk6jx" Oct 10 18:58:48 crc kubenswrapper[4799]: I1010 18:58:48.346239 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b07da810-72ef-4a53-b3c1-bfa3734a2219-catalog-content\") pod \"certified-operators-sk6jx\" (UID: \"b07da810-72ef-4a53-b3c1-bfa3734a2219\") " pod="openshift-marketplace/certified-operators-sk6jx" Oct 10 18:58:48 crc kubenswrapper[4799]: I1010 18:58:48.448473 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-skswg\" (UniqueName: \"kubernetes.io/projected/b07da810-72ef-4a53-b3c1-bfa3734a2219-kube-api-access-skswg\") pod \"certified-operators-sk6jx\" (UID: \"b07da810-72ef-4a53-b3c1-bfa3734a2219\") " pod="openshift-marketplace/certified-operators-sk6jx" Oct 10 18:58:48 crc kubenswrapper[4799]: I1010 18:58:48.448803 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b07da810-72ef-4a53-b3c1-bfa3734a2219-utilities\") pod \"certified-operators-sk6jx\" (UID: \"b07da810-72ef-4a53-b3c1-bfa3734a2219\") " pod="openshift-marketplace/certified-operators-sk6jx" Oct 10 18:58:48 crc kubenswrapper[4799]: I1010 18:58:48.448872 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b07da810-72ef-4a53-b3c1-bfa3734a2219-catalog-content\") pod \"certified-operators-sk6jx\" (UID: \"b07da810-72ef-4a53-b3c1-bfa3734a2219\") " pod="openshift-marketplace/certified-operators-sk6jx" Oct 10 18:58:48 crc kubenswrapper[4799]: I1010 18:58:48.449469 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b07da810-72ef-4a53-b3c1-bfa3734a2219-catalog-content\") pod \"certified-operators-sk6jx\" (UID: \"b07da810-72ef-4a53-b3c1-bfa3734a2219\") " pod="openshift-marketplace/certified-operators-sk6jx" Oct 10 18:58:48 crc kubenswrapper[4799]: I1010 18:58:48.450123 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b07da810-72ef-4a53-b3c1-bfa3734a2219-utilities\") pod \"certified-operators-sk6jx\" (UID: \"b07da810-72ef-4a53-b3c1-bfa3734a2219\") " pod="openshift-marketplace/certified-operators-sk6jx" Oct 10 18:58:48 crc kubenswrapper[4799]: I1010 18:58:48.476064 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-skswg\" (UniqueName: \"kubernetes.io/projected/b07da810-72ef-4a53-b3c1-bfa3734a2219-kube-api-access-skswg\") pod \"certified-operators-sk6jx\" (UID: \"b07da810-72ef-4a53-b3c1-bfa3734a2219\") " pod="openshift-marketplace/certified-operators-sk6jx" Oct 10 18:58:48 crc kubenswrapper[4799]: I1010 18:58:48.664831 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-sk6jx" Oct 10 18:58:49 crc kubenswrapper[4799]: I1010 18:58:49.268111 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-sk6jx"] Oct 10 18:58:49 crc kubenswrapper[4799]: W1010 18:58:49.276800 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb07da810_72ef_4a53_b3c1_bfa3734a2219.slice/crio-de2b76300d52aaebc612217aae34d7fff8444f32444974497080ffb151ec3991 WatchSource:0}: Error finding container de2b76300d52aaebc612217aae34d7fff8444f32444974497080ffb151ec3991: Status 404 returned error can't find the container with id de2b76300d52aaebc612217aae34d7fff8444f32444974497080ffb151ec3991 Oct 10 18:58:50 crc kubenswrapper[4799]: I1010 18:58:50.275727 4799 generic.go:334] "Generic (PLEG): container finished" podID="b07da810-72ef-4a53-b3c1-bfa3734a2219" containerID="d809ea274085f5a4c26e7818b67956483d62e5e9e32196d8a3b3b4df8a178a19" exitCode=0 Oct 10 18:58:50 crc kubenswrapper[4799]: I1010 18:58:50.276075 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-sk6jx" event={"ID":"b07da810-72ef-4a53-b3c1-bfa3734a2219","Type":"ContainerDied","Data":"d809ea274085f5a4c26e7818b67956483d62e5e9e32196d8a3b3b4df8a178a19"} Oct 10 18:58:50 crc kubenswrapper[4799]: I1010 18:58:50.276145 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-sk6jx" event={"ID":"b07da810-72ef-4a53-b3c1-bfa3734a2219","Type":"ContainerStarted","Data":"de2b76300d52aaebc612217aae34d7fff8444f32444974497080ffb151ec3991"} Oct 10 18:58:50 crc kubenswrapper[4799]: I1010 18:58:50.280028 4799 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 10 18:58:52 crc kubenswrapper[4799]: I1010 18:58:52.304559 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-sk6jx" event={"ID":"b07da810-72ef-4a53-b3c1-bfa3734a2219","Type":"ContainerStarted","Data":"3eb5927f985836e94701ab95db44db0b87fe6a3d4d5aaeeab4898a5014d4d965"} Oct 10 18:58:53 crc kubenswrapper[4799]: I1010 18:58:53.321378 4799 generic.go:334] "Generic (PLEG): container finished" podID="b07da810-72ef-4a53-b3c1-bfa3734a2219" containerID="3eb5927f985836e94701ab95db44db0b87fe6a3d4d5aaeeab4898a5014d4d965" exitCode=0 Oct 10 18:58:53 crc kubenswrapper[4799]: I1010 18:58:53.321467 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-sk6jx" event={"ID":"b07da810-72ef-4a53-b3c1-bfa3734a2219","Type":"ContainerDied","Data":"3eb5927f985836e94701ab95db44db0b87fe6a3d4d5aaeeab4898a5014d4d965"} Oct 10 18:58:53 crc kubenswrapper[4799]: I1010 18:58:53.324424 4799 generic.go:334] "Generic (PLEG): container finished" podID="c2ec587b-71a4-4612-8079-4c32275f871d" containerID="674777ba9e3a7c95d493c25999b8bd75910fee49bbbb0c00182ee34642812aa7" exitCode=0 Oct 10 18:58:53 crc kubenswrapper[4799]: I1010 18:58:53.324496 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-sriov-openstack-openstack-cell1-8nm8r" event={"ID":"c2ec587b-71a4-4612-8079-4c32275f871d","Type":"ContainerDied","Data":"674777ba9e3a7c95d493c25999b8bd75910fee49bbbb0c00182ee34642812aa7"} Oct 10 18:58:54 crc kubenswrapper[4799]: I1010 18:58:54.339954 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-sk6jx" event={"ID":"b07da810-72ef-4a53-b3c1-bfa3734a2219","Type":"ContainerStarted","Data":"785ab96c1bd464977b188f45a712b35ce686103cd06b32f552d09d57f1d47589"} Oct 10 18:58:54 crc kubenswrapper[4799]: I1010 18:58:54.367376 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-sk6jx" podStartSLOduration=2.905033102 podStartE2EDuration="6.367358916s" podCreationTimestamp="2025-10-10 18:58:48 +0000 UTC" firstStartedPulling="2025-10-10 18:58:50.279622974 +0000 UTC m=+8823.787947099" lastFinishedPulling="2025-10-10 18:58:53.741948788 +0000 UTC m=+8827.250272913" observedRunningTime="2025-10-10 18:58:54.361978535 +0000 UTC m=+8827.870302660" watchObservedRunningTime="2025-10-10 18:58:54.367358916 +0000 UTC m=+8827.875683031" Oct 10 18:58:54 crc kubenswrapper[4799]: I1010 18:58:54.839436 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-sriov-openstack-openstack-cell1-8nm8r" Oct 10 18:58:54 crc kubenswrapper[4799]: I1010 18:58:54.997219 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c2ec587b-71a4-4612-8079-4c32275f871d-inventory\") pod \"c2ec587b-71a4-4612-8079-4c32275f871d\" (UID: \"c2ec587b-71a4-4612-8079-4c32275f871d\") " Oct 10 18:58:54 crc kubenswrapper[4799]: I1010 18:58:54.997347 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c2ec587b-71a4-4612-8079-4c32275f871d-ssh-key\") pod \"c2ec587b-71a4-4612-8079-4c32275f871d\" (UID: \"c2ec587b-71a4-4612-8079-4c32275f871d\") " Oct 10 18:58:54 crc kubenswrapper[4799]: I1010 18:58:54.997486 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4t27f\" (UniqueName: \"kubernetes.io/projected/c2ec587b-71a4-4612-8079-4c32275f871d-kube-api-access-4t27f\") pod \"c2ec587b-71a4-4612-8079-4c32275f871d\" (UID: \"c2ec587b-71a4-4612-8079-4c32275f871d\") " Oct 10 18:58:54 crc kubenswrapper[4799]: I1010 18:58:54.997995 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-sriov-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c2ec587b-71a4-4612-8079-4c32275f871d-neutron-sriov-combined-ca-bundle\") pod \"c2ec587b-71a4-4612-8079-4c32275f871d\" (UID: \"c2ec587b-71a4-4612-8079-4c32275f871d\") " Oct 10 18:58:54 crc kubenswrapper[4799]: I1010 18:58:54.998025 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/c2ec587b-71a4-4612-8079-4c32275f871d-ceph\") pod \"c2ec587b-71a4-4612-8079-4c32275f871d\" (UID: \"c2ec587b-71a4-4612-8079-4c32275f871d\") " Oct 10 18:58:54 crc kubenswrapper[4799]: I1010 18:58:54.998052 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-sriov-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/c2ec587b-71a4-4612-8079-4c32275f871d-neutron-sriov-agent-neutron-config-0\") pod \"c2ec587b-71a4-4612-8079-4c32275f871d\" (UID: \"c2ec587b-71a4-4612-8079-4c32275f871d\") " Oct 10 18:58:55 crc kubenswrapper[4799]: I1010 18:58:55.002986 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c2ec587b-71a4-4612-8079-4c32275f871d-ceph" (OuterVolumeSpecName: "ceph") pod "c2ec587b-71a4-4612-8079-4c32275f871d" (UID: "c2ec587b-71a4-4612-8079-4c32275f871d"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 18:58:55 crc kubenswrapper[4799]: I1010 18:58:55.003029 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c2ec587b-71a4-4612-8079-4c32275f871d-neutron-sriov-combined-ca-bundle" (OuterVolumeSpecName: "neutron-sriov-combined-ca-bundle") pod "c2ec587b-71a4-4612-8079-4c32275f871d" (UID: "c2ec587b-71a4-4612-8079-4c32275f871d"). InnerVolumeSpecName "neutron-sriov-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 18:58:55 crc kubenswrapper[4799]: I1010 18:58:55.010907 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c2ec587b-71a4-4612-8079-4c32275f871d-kube-api-access-4t27f" (OuterVolumeSpecName: "kube-api-access-4t27f") pod "c2ec587b-71a4-4612-8079-4c32275f871d" (UID: "c2ec587b-71a4-4612-8079-4c32275f871d"). InnerVolumeSpecName "kube-api-access-4t27f". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 18:58:55 crc kubenswrapper[4799]: I1010 18:58:55.028925 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c2ec587b-71a4-4612-8079-4c32275f871d-neutron-sriov-agent-neutron-config-0" (OuterVolumeSpecName: "neutron-sriov-agent-neutron-config-0") pod "c2ec587b-71a4-4612-8079-4c32275f871d" (UID: "c2ec587b-71a4-4612-8079-4c32275f871d"). InnerVolumeSpecName "neutron-sriov-agent-neutron-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 18:58:55 crc kubenswrapper[4799]: I1010 18:58:55.030891 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c2ec587b-71a4-4612-8079-4c32275f871d-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "c2ec587b-71a4-4612-8079-4c32275f871d" (UID: "c2ec587b-71a4-4612-8079-4c32275f871d"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 18:58:55 crc kubenswrapper[4799]: I1010 18:58:55.064343 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c2ec587b-71a4-4612-8079-4c32275f871d-inventory" (OuterVolumeSpecName: "inventory") pod "c2ec587b-71a4-4612-8079-4c32275f871d" (UID: "c2ec587b-71a4-4612-8079-4c32275f871d"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 18:58:55 crc kubenswrapper[4799]: I1010 18:58:55.100428 4799 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c2ec587b-71a4-4612-8079-4c32275f871d-inventory\") on node \"crc\" DevicePath \"\"" Oct 10 18:58:55 crc kubenswrapper[4799]: I1010 18:58:55.100643 4799 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c2ec587b-71a4-4612-8079-4c32275f871d-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 10 18:58:55 crc kubenswrapper[4799]: I1010 18:58:55.100735 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4t27f\" (UniqueName: \"kubernetes.io/projected/c2ec587b-71a4-4612-8079-4c32275f871d-kube-api-access-4t27f\") on node \"crc\" DevicePath \"\"" Oct 10 18:58:55 crc kubenswrapper[4799]: I1010 18:58:55.100824 4799 reconciler_common.go:293] "Volume detached for volume \"neutron-sriov-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c2ec587b-71a4-4612-8079-4c32275f871d-neutron-sriov-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 10 18:58:55 crc kubenswrapper[4799]: I1010 18:58:55.100901 4799 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/c2ec587b-71a4-4612-8079-4c32275f871d-ceph\") on node \"crc\" DevicePath \"\"" Oct 10 18:58:55 crc kubenswrapper[4799]: I1010 18:58:55.100965 4799 reconciler_common.go:293] "Volume detached for volume \"neutron-sriov-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/c2ec587b-71a4-4612-8079-4c32275f871d-neutron-sriov-agent-neutron-config-0\") on node \"crc\" DevicePath \"\"" Oct 10 18:58:55 crc kubenswrapper[4799]: I1010 18:58:55.353258 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-sriov-openstack-openstack-cell1-8nm8r" event={"ID":"c2ec587b-71a4-4612-8079-4c32275f871d","Type":"ContainerDied","Data":"9961aa8b5410a064c1077fb8a3c7a460b39c3e36335e5357c3825489bcc3b53f"} Oct 10 18:58:55 crc kubenswrapper[4799]: I1010 18:58:55.353314 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-sriov-openstack-openstack-cell1-8nm8r" Oct 10 18:58:55 crc kubenswrapper[4799]: I1010 18:58:55.353342 4799 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9961aa8b5410a064c1077fb8a3c7a460b39c3e36335e5357c3825489bcc3b53f" Oct 10 18:58:55 crc kubenswrapper[4799]: I1010 18:58:55.498662 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-dhcp-openstack-openstack-cell1-9snz2"] Oct 10 18:58:55 crc kubenswrapper[4799]: E1010 18:58:55.499054 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c2ec587b-71a4-4612-8079-4c32275f871d" containerName="neutron-sriov-openstack-openstack-cell1" Oct 10 18:58:55 crc kubenswrapper[4799]: I1010 18:58:55.499069 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="c2ec587b-71a4-4612-8079-4c32275f871d" containerName="neutron-sriov-openstack-openstack-cell1" Oct 10 18:58:55 crc kubenswrapper[4799]: I1010 18:58:55.499289 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="c2ec587b-71a4-4612-8079-4c32275f871d" containerName="neutron-sriov-openstack-openstack-cell1" Oct 10 18:58:55 crc kubenswrapper[4799]: I1010 18:58:55.499973 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-dhcp-openstack-openstack-cell1-9snz2" Oct 10 18:58:55 crc kubenswrapper[4799]: I1010 18:58:55.506170 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-rdlhr" Oct 10 18:58:55 crc kubenswrapper[4799]: I1010 18:58:55.506197 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Oct 10 18:58:55 crc kubenswrapper[4799]: I1010 18:58:55.506381 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-dhcp-agent-neutron-config" Oct 10 18:58:55 crc kubenswrapper[4799]: I1010 18:58:55.506451 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 10 18:58:55 crc kubenswrapper[4799]: I1010 18:58:55.506492 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-adoption-secret" Oct 10 18:58:55 crc kubenswrapper[4799]: I1010 18:58:55.525362 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-dhcp-openstack-openstack-cell1-9snz2"] Oct 10 18:58:55 crc kubenswrapper[4799]: I1010 18:58:55.612909 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3b3bb6d0-ba03-42fb-aa81-ddc9a1b95d28-ssh-key\") pod \"neutron-dhcp-openstack-openstack-cell1-9snz2\" (UID: \"3b3bb6d0-ba03-42fb-aa81-ddc9a1b95d28\") " pod="openstack/neutron-dhcp-openstack-openstack-cell1-9snz2" Oct 10 18:58:55 crc kubenswrapper[4799]: I1010 18:58:55.612960 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-dhcp-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/3b3bb6d0-ba03-42fb-aa81-ddc9a1b95d28-neutron-dhcp-agent-neutron-config-0\") pod \"neutron-dhcp-openstack-openstack-cell1-9snz2\" (UID: \"3b3bb6d0-ba03-42fb-aa81-ddc9a1b95d28\") " pod="openstack/neutron-dhcp-openstack-openstack-cell1-9snz2" Oct 10 18:58:55 crc kubenswrapper[4799]: I1010 18:58:55.613053 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r697v\" (UniqueName: \"kubernetes.io/projected/3b3bb6d0-ba03-42fb-aa81-ddc9a1b95d28-kube-api-access-r697v\") pod \"neutron-dhcp-openstack-openstack-cell1-9snz2\" (UID: \"3b3bb6d0-ba03-42fb-aa81-ddc9a1b95d28\") " pod="openstack/neutron-dhcp-openstack-openstack-cell1-9snz2" Oct 10 18:58:55 crc kubenswrapper[4799]: I1010 18:58:55.613093 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/3b3bb6d0-ba03-42fb-aa81-ddc9a1b95d28-ceph\") pod \"neutron-dhcp-openstack-openstack-cell1-9snz2\" (UID: \"3b3bb6d0-ba03-42fb-aa81-ddc9a1b95d28\") " pod="openstack/neutron-dhcp-openstack-openstack-cell1-9snz2" Oct 10 18:58:55 crc kubenswrapper[4799]: I1010 18:58:55.613113 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3b3bb6d0-ba03-42fb-aa81-ddc9a1b95d28-inventory\") pod \"neutron-dhcp-openstack-openstack-cell1-9snz2\" (UID: \"3b3bb6d0-ba03-42fb-aa81-ddc9a1b95d28\") " pod="openstack/neutron-dhcp-openstack-openstack-cell1-9snz2" Oct 10 18:58:55 crc kubenswrapper[4799]: I1010 18:58:55.613132 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-dhcp-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3b3bb6d0-ba03-42fb-aa81-ddc9a1b95d28-neutron-dhcp-combined-ca-bundle\") pod \"neutron-dhcp-openstack-openstack-cell1-9snz2\" (UID: \"3b3bb6d0-ba03-42fb-aa81-ddc9a1b95d28\") " pod="openstack/neutron-dhcp-openstack-openstack-cell1-9snz2" Oct 10 18:58:55 crc kubenswrapper[4799]: I1010 18:58:55.714379 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3b3bb6d0-ba03-42fb-aa81-ddc9a1b95d28-ssh-key\") pod \"neutron-dhcp-openstack-openstack-cell1-9snz2\" (UID: \"3b3bb6d0-ba03-42fb-aa81-ddc9a1b95d28\") " pod="openstack/neutron-dhcp-openstack-openstack-cell1-9snz2" Oct 10 18:58:55 crc kubenswrapper[4799]: I1010 18:58:55.714423 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-dhcp-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/3b3bb6d0-ba03-42fb-aa81-ddc9a1b95d28-neutron-dhcp-agent-neutron-config-0\") pod \"neutron-dhcp-openstack-openstack-cell1-9snz2\" (UID: \"3b3bb6d0-ba03-42fb-aa81-ddc9a1b95d28\") " pod="openstack/neutron-dhcp-openstack-openstack-cell1-9snz2" Oct 10 18:58:55 crc kubenswrapper[4799]: I1010 18:58:55.714532 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r697v\" (UniqueName: \"kubernetes.io/projected/3b3bb6d0-ba03-42fb-aa81-ddc9a1b95d28-kube-api-access-r697v\") pod \"neutron-dhcp-openstack-openstack-cell1-9snz2\" (UID: \"3b3bb6d0-ba03-42fb-aa81-ddc9a1b95d28\") " pod="openstack/neutron-dhcp-openstack-openstack-cell1-9snz2" Oct 10 18:58:55 crc kubenswrapper[4799]: I1010 18:58:55.714570 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/3b3bb6d0-ba03-42fb-aa81-ddc9a1b95d28-ceph\") pod \"neutron-dhcp-openstack-openstack-cell1-9snz2\" (UID: \"3b3bb6d0-ba03-42fb-aa81-ddc9a1b95d28\") " pod="openstack/neutron-dhcp-openstack-openstack-cell1-9snz2" Oct 10 18:58:55 crc kubenswrapper[4799]: I1010 18:58:55.714592 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3b3bb6d0-ba03-42fb-aa81-ddc9a1b95d28-inventory\") pod \"neutron-dhcp-openstack-openstack-cell1-9snz2\" (UID: \"3b3bb6d0-ba03-42fb-aa81-ddc9a1b95d28\") " pod="openstack/neutron-dhcp-openstack-openstack-cell1-9snz2" Oct 10 18:58:55 crc kubenswrapper[4799]: I1010 18:58:55.714610 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-dhcp-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3b3bb6d0-ba03-42fb-aa81-ddc9a1b95d28-neutron-dhcp-combined-ca-bundle\") pod \"neutron-dhcp-openstack-openstack-cell1-9snz2\" (UID: \"3b3bb6d0-ba03-42fb-aa81-ddc9a1b95d28\") " pod="openstack/neutron-dhcp-openstack-openstack-cell1-9snz2" Oct 10 18:58:55 crc kubenswrapper[4799]: I1010 18:58:55.718830 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3b3bb6d0-ba03-42fb-aa81-ddc9a1b95d28-ssh-key\") pod \"neutron-dhcp-openstack-openstack-cell1-9snz2\" (UID: \"3b3bb6d0-ba03-42fb-aa81-ddc9a1b95d28\") " pod="openstack/neutron-dhcp-openstack-openstack-cell1-9snz2" Oct 10 18:58:55 crc kubenswrapper[4799]: I1010 18:58:55.718987 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/3b3bb6d0-ba03-42fb-aa81-ddc9a1b95d28-ceph\") pod \"neutron-dhcp-openstack-openstack-cell1-9snz2\" (UID: \"3b3bb6d0-ba03-42fb-aa81-ddc9a1b95d28\") " pod="openstack/neutron-dhcp-openstack-openstack-cell1-9snz2" Oct 10 18:58:55 crc kubenswrapper[4799]: I1010 18:58:55.719118 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3b3bb6d0-ba03-42fb-aa81-ddc9a1b95d28-inventory\") pod \"neutron-dhcp-openstack-openstack-cell1-9snz2\" (UID: \"3b3bb6d0-ba03-42fb-aa81-ddc9a1b95d28\") " pod="openstack/neutron-dhcp-openstack-openstack-cell1-9snz2" Oct 10 18:58:55 crc kubenswrapper[4799]: I1010 18:58:55.721109 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-dhcp-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/3b3bb6d0-ba03-42fb-aa81-ddc9a1b95d28-neutron-dhcp-agent-neutron-config-0\") pod \"neutron-dhcp-openstack-openstack-cell1-9snz2\" (UID: \"3b3bb6d0-ba03-42fb-aa81-ddc9a1b95d28\") " pod="openstack/neutron-dhcp-openstack-openstack-cell1-9snz2" Oct 10 18:58:55 crc kubenswrapper[4799]: I1010 18:58:55.724527 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-dhcp-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3b3bb6d0-ba03-42fb-aa81-ddc9a1b95d28-neutron-dhcp-combined-ca-bundle\") pod \"neutron-dhcp-openstack-openstack-cell1-9snz2\" (UID: \"3b3bb6d0-ba03-42fb-aa81-ddc9a1b95d28\") " pod="openstack/neutron-dhcp-openstack-openstack-cell1-9snz2" Oct 10 18:58:55 crc kubenswrapper[4799]: I1010 18:58:55.731846 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r697v\" (UniqueName: \"kubernetes.io/projected/3b3bb6d0-ba03-42fb-aa81-ddc9a1b95d28-kube-api-access-r697v\") pod \"neutron-dhcp-openstack-openstack-cell1-9snz2\" (UID: \"3b3bb6d0-ba03-42fb-aa81-ddc9a1b95d28\") " pod="openstack/neutron-dhcp-openstack-openstack-cell1-9snz2" Oct 10 18:58:55 crc kubenswrapper[4799]: I1010 18:58:55.820639 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-dhcp-openstack-openstack-cell1-9snz2" Oct 10 18:58:56 crc kubenswrapper[4799]: W1010 18:58:56.488709 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3b3bb6d0_ba03_42fb_aa81_ddc9a1b95d28.slice/crio-c18e923cd109dd0dae9671ec2a10f4364a24b0f39ad22bc3bb41a19c0947d2bb WatchSource:0}: Error finding container c18e923cd109dd0dae9671ec2a10f4364a24b0f39ad22bc3bb41a19c0947d2bb: Status 404 returned error can't find the container with id c18e923cd109dd0dae9671ec2a10f4364a24b0f39ad22bc3bb41a19c0947d2bb Oct 10 18:58:56 crc kubenswrapper[4799]: I1010 18:58:56.495028 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-dhcp-openstack-openstack-cell1-9snz2"] Oct 10 18:58:57 crc kubenswrapper[4799]: I1010 18:58:57.375312 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-dhcp-openstack-openstack-cell1-9snz2" event={"ID":"3b3bb6d0-ba03-42fb-aa81-ddc9a1b95d28","Type":"ContainerStarted","Data":"3064e8484ec762eb6f9afa88147792b67e500b0ccb34a3a607dc4d40a4abf5b4"} Oct 10 18:58:57 crc kubenswrapper[4799]: I1010 18:58:57.375686 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-dhcp-openstack-openstack-cell1-9snz2" event={"ID":"3b3bb6d0-ba03-42fb-aa81-ddc9a1b95d28","Type":"ContainerStarted","Data":"c18e923cd109dd0dae9671ec2a10f4364a24b0f39ad22bc3bb41a19c0947d2bb"} Oct 10 18:58:57 crc kubenswrapper[4799]: I1010 18:58:57.395799 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-dhcp-openstack-openstack-cell1-9snz2" podStartSLOduration=1.8819594670000002 podStartE2EDuration="2.39577929s" podCreationTimestamp="2025-10-10 18:58:55 +0000 UTC" firstStartedPulling="2025-10-10 18:58:56.49236494 +0000 UTC m=+8830.000689055" lastFinishedPulling="2025-10-10 18:58:57.006184763 +0000 UTC m=+8830.514508878" observedRunningTime="2025-10-10 18:58:57.392621723 +0000 UTC m=+8830.900945858" watchObservedRunningTime="2025-10-10 18:58:57.39577929 +0000 UTC m=+8830.904103425" Oct 10 18:58:58 crc kubenswrapper[4799]: I1010 18:58:58.665870 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-sk6jx" Oct 10 18:58:58 crc kubenswrapper[4799]: I1010 18:58:58.666259 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-sk6jx" Oct 10 18:58:58 crc kubenswrapper[4799]: I1010 18:58:58.758148 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-sk6jx" Oct 10 18:58:59 crc kubenswrapper[4799]: I1010 18:58:59.477889 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-sk6jx" Oct 10 18:59:00 crc kubenswrapper[4799]: I1010 18:59:00.705627 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-sk6jx"] Oct 10 18:59:01 crc kubenswrapper[4799]: I1010 18:59:01.422987 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-sk6jx" podUID="b07da810-72ef-4a53-b3c1-bfa3734a2219" containerName="registry-server" containerID="cri-o://785ab96c1bd464977b188f45a712b35ce686103cd06b32f552d09d57f1d47589" gracePeriod=2 Oct 10 18:59:02 crc kubenswrapper[4799]: I1010 18:59:02.078664 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-sk6jx" Oct 10 18:59:02 crc kubenswrapper[4799]: I1010 18:59:02.202730 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-skswg\" (UniqueName: \"kubernetes.io/projected/b07da810-72ef-4a53-b3c1-bfa3734a2219-kube-api-access-skswg\") pod \"b07da810-72ef-4a53-b3c1-bfa3734a2219\" (UID: \"b07da810-72ef-4a53-b3c1-bfa3734a2219\") " Oct 10 18:59:02 crc kubenswrapper[4799]: I1010 18:59:02.203083 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b07da810-72ef-4a53-b3c1-bfa3734a2219-catalog-content\") pod \"b07da810-72ef-4a53-b3c1-bfa3734a2219\" (UID: \"b07da810-72ef-4a53-b3c1-bfa3734a2219\") " Oct 10 18:59:02 crc kubenswrapper[4799]: I1010 18:59:02.203191 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b07da810-72ef-4a53-b3c1-bfa3734a2219-utilities\") pod \"b07da810-72ef-4a53-b3c1-bfa3734a2219\" (UID: \"b07da810-72ef-4a53-b3c1-bfa3734a2219\") " Oct 10 18:59:02 crc kubenswrapper[4799]: I1010 18:59:02.204261 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b07da810-72ef-4a53-b3c1-bfa3734a2219-utilities" (OuterVolumeSpecName: "utilities") pod "b07da810-72ef-4a53-b3c1-bfa3734a2219" (UID: "b07da810-72ef-4a53-b3c1-bfa3734a2219"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 10 18:59:02 crc kubenswrapper[4799]: I1010 18:59:02.208581 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b07da810-72ef-4a53-b3c1-bfa3734a2219-kube-api-access-skswg" (OuterVolumeSpecName: "kube-api-access-skswg") pod "b07da810-72ef-4a53-b3c1-bfa3734a2219" (UID: "b07da810-72ef-4a53-b3c1-bfa3734a2219"). InnerVolumeSpecName "kube-api-access-skswg". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 18:59:02 crc kubenswrapper[4799]: I1010 18:59:02.253027 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b07da810-72ef-4a53-b3c1-bfa3734a2219-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b07da810-72ef-4a53-b3c1-bfa3734a2219" (UID: "b07da810-72ef-4a53-b3c1-bfa3734a2219"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 10 18:59:02 crc kubenswrapper[4799]: I1010 18:59:02.306528 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-skswg\" (UniqueName: \"kubernetes.io/projected/b07da810-72ef-4a53-b3c1-bfa3734a2219-kube-api-access-skswg\") on node \"crc\" DevicePath \"\"" Oct 10 18:59:02 crc kubenswrapper[4799]: I1010 18:59:02.306574 4799 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b07da810-72ef-4a53-b3c1-bfa3734a2219-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 10 18:59:02 crc kubenswrapper[4799]: I1010 18:59:02.306594 4799 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b07da810-72ef-4a53-b3c1-bfa3734a2219-utilities\") on node \"crc\" DevicePath \"\"" Oct 10 18:59:02 crc kubenswrapper[4799]: I1010 18:59:02.439123 4799 generic.go:334] "Generic (PLEG): container finished" podID="b07da810-72ef-4a53-b3c1-bfa3734a2219" containerID="785ab96c1bd464977b188f45a712b35ce686103cd06b32f552d09d57f1d47589" exitCode=0 Oct 10 18:59:02 crc kubenswrapper[4799]: I1010 18:59:02.439193 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-sk6jx" event={"ID":"b07da810-72ef-4a53-b3c1-bfa3734a2219","Type":"ContainerDied","Data":"785ab96c1bd464977b188f45a712b35ce686103cd06b32f552d09d57f1d47589"} Oct 10 18:59:02 crc kubenswrapper[4799]: I1010 18:59:02.439234 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-sk6jx" Oct 10 18:59:02 crc kubenswrapper[4799]: I1010 18:59:02.439285 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-sk6jx" event={"ID":"b07da810-72ef-4a53-b3c1-bfa3734a2219","Type":"ContainerDied","Data":"de2b76300d52aaebc612217aae34d7fff8444f32444974497080ffb151ec3991"} Oct 10 18:59:02 crc kubenswrapper[4799]: I1010 18:59:02.439326 4799 scope.go:117] "RemoveContainer" containerID="785ab96c1bd464977b188f45a712b35ce686103cd06b32f552d09d57f1d47589" Oct 10 18:59:02 crc kubenswrapper[4799]: I1010 18:59:02.474590 4799 scope.go:117] "RemoveContainer" containerID="3eb5927f985836e94701ab95db44db0b87fe6a3d4d5aaeeab4898a5014d4d965" Oct 10 18:59:02 crc kubenswrapper[4799]: I1010 18:59:02.505021 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-sk6jx"] Oct 10 18:59:02 crc kubenswrapper[4799]: I1010 18:59:02.534253 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-sk6jx"] Oct 10 18:59:02 crc kubenswrapper[4799]: I1010 18:59:02.534522 4799 scope.go:117] "RemoveContainer" containerID="d809ea274085f5a4c26e7818b67956483d62e5e9e32196d8a3b3b4df8a178a19" Oct 10 18:59:02 crc kubenswrapper[4799]: I1010 18:59:02.600809 4799 scope.go:117] "RemoveContainer" containerID="785ab96c1bd464977b188f45a712b35ce686103cd06b32f552d09d57f1d47589" Oct 10 18:59:02 crc kubenswrapper[4799]: E1010 18:59:02.601243 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"785ab96c1bd464977b188f45a712b35ce686103cd06b32f552d09d57f1d47589\": container with ID starting with 785ab96c1bd464977b188f45a712b35ce686103cd06b32f552d09d57f1d47589 not found: ID does not exist" containerID="785ab96c1bd464977b188f45a712b35ce686103cd06b32f552d09d57f1d47589" Oct 10 18:59:02 crc kubenswrapper[4799]: I1010 18:59:02.601282 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"785ab96c1bd464977b188f45a712b35ce686103cd06b32f552d09d57f1d47589"} err="failed to get container status \"785ab96c1bd464977b188f45a712b35ce686103cd06b32f552d09d57f1d47589\": rpc error: code = NotFound desc = could not find container \"785ab96c1bd464977b188f45a712b35ce686103cd06b32f552d09d57f1d47589\": container with ID starting with 785ab96c1bd464977b188f45a712b35ce686103cd06b32f552d09d57f1d47589 not found: ID does not exist" Oct 10 18:59:02 crc kubenswrapper[4799]: I1010 18:59:02.601305 4799 scope.go:117] "RemoveContainer" containerID="3eb5927f985836e94701ab95db44db0b87fe6a3d4d5aaeeab4898a5014d4d965" Oct 10 18:59:02 crc kubenswrapper[4799]: E1010 18:59:02.601927 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3eb5927f985836e94701ab95db44db0b87fe6a3d4d5aaeeab4898a5014d4d965\": container with ID starting with 3eb5927f985836e94701ab95db44db0b87fe6a3d4d5aaeeab4898a5014d4d965 not found: ID does not exist" containerID="3eb5927f985836e94701ab95db44db0b87fe6a3d4d5aaeeab4898a5014d4d965" Oct 10 18:59:02 crc kubenswrapper[4799]: I1010 18:59:02.601956 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3eb5927f985836e94701ab95db44db0b87fe6a3d4d5aaeeab4898a5014d4d965"} err="failed to get container status \"3eb5927f985836e94701ab95db44db0b87fe6a3d4d5aaeeab4898a5014d4d965\": rpc error: code = NotFound desc = could not find container \"3eb5927f985836e94701ab95db44db0b87fe6a3d4d5aaeeab4898a5014d4d965\": container with ID starting with 3eb5927f985836e94701ab95db44db0b87fe6a3d4d5aaeeab4898a5014d4d965 not found: ID does not exist" Oct 10 18:59:02 crc kubenswrapper[4799]: I1010 18:59:02.601979 4799 scope.go:117] "RemoveContainer" containerID="d809ea274085f5a4c26e7818b67956483d62e5e9e32196d8a3b3b4df8a178a19" Oct 10 18:59:02 crc kubenswrapper[4799]: E1010 18:59:02.602417 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d809ea274085f5a4c26e7818b67956483d62e5e9e32196d8a3b3b4df8a178a19\": container with ID starting with d809ea274085f5a4c26e7818b67956483d62e5e9e32196d8a3b3b4df8a178a19 not found: ID does not exist" containerID="d809ea274085f5a4c26e7818b67956483d62e5e9e32196d8a3b3b4df8a178a19" Oct 10 18:59:02 crc kubenswrapper[4799]: I1010 18:59:02.602441 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d809ea274085f5a4c26e7818b67956483d62e5e9e32196d8a3b3b4df8a178a19"} err="failed to get container status \"d809ea274085f5a4c26e7818b67956483d62e5e9e32196d8a3b3b4df8a178a19\": rpc error: code = NotFound desc = could not find container \"d809ea274085f5a4c26e7818b67956483d62e5e9e32196d8a3b3b4df8a178a19\": container with ID starting with d809ea274085f5a4c26e7818b67956483d62e5e9e32196d8a3b3b4df8a178a19 not found: ID does not exist" Oct 10 18:59:03 crc kubenswrapper[4799]: I1010 18:59:03.415973 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b07da810-72ef-4a53-b3c1-bfa3734a2219" path="/var/lib/kubelet/pods/b07da810-72ef-4a53-b3c1-bfa3734a2219/volumes" Oct 10 18:59:15 crc kubenswrapper[4799]: I1010 18:59:15.249305 4799 patch_prober.go:28] interesting pod/machine-config-daemon-rh8zc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 10 18:59:15 crc kubenswrapper[4799]: I1010 18:59:15.249961 4799 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 10 18:59:15 crc kubenswrapper[4799]: I1010 18:59:15.250008 4799 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" Oct 10 18:59:15 crc kubenswrapper[4799]: I1010 18:59:15.250824 4799 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"5ff5a69863067c2e020a02278a8b3d2cd11069a30ff86a06260c11a3866bc104"} pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 10 18:59:15 crc kubenswrapper[4799]: I1010 18:59:15.250882 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" containerName="machine-config-daemon" containerID="cri-o://5ff5a69863067c2e020a02278a8b3d2cd11069a30ff86a06260c11a3866bc104" gracePeriod=600 Oct 10 18:59:15 crc kubenswrapper[4799]: E1010 18:59:15.390378 4799 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod6cebefda_e31d_4be2_9bf4_8e1f8ec002cb.slice/crio-5ff5a69863067c2e020a02278a8b3d2cd11069a30ff86a06260c11a3866bc104.scope\": RecentStats: unable to find data in memory cache]" Oct 10 18:59:15 crc kubenswrapper[4799]: I1010 18:59:15.614525 4799 generic.go:334] "Generic (PLEG): container finished" podID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" containerID="5ff5a69863067c2e020a02278a8b3d2cd11069a30ff86a06260c11a3866bc104" exitCode=0 Oct 10 18:59:15 crc kubenswrapper[4799]: I1010 18:59:15.614585 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" event={"ID":"6cebefda-e31d-4be2-9bf4-8e1f8ec002cb","Type":"ContainerDied","Data":"5ff5a69863067c2e020a02278a8b3d2cd11069a30ff86a06260c11a3866bc104"} Oct 10 18:59:15 crc kubenswrapper[4799]: I1010 18:59:15.614641 4799 scope.go:117] "RemoveContainer" containerID="7b1000aefc8a60a452be056f6fd645ae7a95f088784af623ffab036bea5d3d0d" Oct 10 18:59:16 crc kubenswrapper[4799]: I1010 18:59:16.631829 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" event={"ID":"6cebefda-e31d-4be2-9bf4-8e1f8ec002cb","Type":"ContainerStarted","Data":"464cc628232a84253f4a698dbac51619b612599e327b0d8fc8447f771bb6664a"} Oct 10 19:00:00 crc kubenswrapper[4799]: I1010 19:00:00.164444 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29335380-vghld"] Oct 10 19:00:00 crc kubenswrapper[4799]: E1010 19:00:00.166029 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b07da810-72ef-4a53-b3c1-bfa3734a2219" containerName="extract-utilities" Oct 10 19:00:00 crc kubenswrapper[4799]: I1010 19:00:00.166062 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="b07da810-72ef-4a53-b3c1-bfa3734a2219" containerName="extract-utilities" Oct 10 19:00:00 crc kubenswrapper[4799]: E1010 19:00:00.166107 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b07da810-72ef-4a53-b3c1-bfa3734a2219" containerName="extract-content" Oct 10 19:00:00 crc kubenswrapper[4799]: I1010 19:00:00.166156 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="b07da810-72ef-4a53-b3c1-bfa3734a2219" containerName="extract-content" Oct 10 19:00:00 crc kubenswrapper[4799]: E1010 19:00:00.166208 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b07da810-72ef-4a53-b3c1-bfa3734a2219" containerName="registry-server" Oct 10 19:00:00 crc kubenswrapper[4799]: I1010 19:00:00.166230 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="b07da810-72ef-4a53-b3c1-bfa3734a2219" containerName="registry-server" Oct 10 19:00:00 crc kubenswrapper[4799]: I1010 19:00:00.166876 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="b07da810-72ef-4a53-b3c1-bfa3734a2219" containerName="registry-server" Oct 10 19:00:00 crc kubenswrapper[4799]: I1010 19:00:00.168666 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29335380-vghld" Oct 10 19:00:00 crc kubenswrapper[4799]: I1010 19:00:00.170880 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Oct 10 19:00:00 crc kubenswrapper[4799]: I1010 19:00:00.172088 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Oct 10 19:00:00 crc kubenswrapper[4799]: I1010 19:00:00.176346 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29335380-vghld"] Oct 10 19:00:00 crc kubenswrapper[4799]: I1010 19:00:00.214679 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4vdlh\" (UniqueName: \"kubernetes.io/projected/c5b52041-b67b-41ab-a74f-cb9938559999-kube-api-access-4vdlh\") pod \"collect-profiles-29335380-vghld\" (UID: \"c5b52041-b67b-41ab-a74f-cb9938559999\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29335380-vghld" Oct 10 19:00:00 crc kubenswrapper[4799]: I1010 19:00:00.214746 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/c5b52041-b67b-41ab-a74f-cb9938559999-secret-volume\") pod \"collect-profiles-29335380-vghld\" (UID: \"c5b52041-b67b-41ab-a74f-cb9938559999\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29335380-vghld" Oct 10 19:00:00 crc kubenswrapper[4799]: I1010 19:00:00.214798 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/c5b52041-b67b-41ab-a74f-cb9938559999-config-volume\") pod \"collect-profiles-29335380-vghld\" (UID: \"c5b52041-b67b-41ab-a74f-cb9938559999\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29335380-vghld" Oct 10 19:00:00 crc kubenswrapper[4799]: I1010 19:00:00.317371 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4vdlh\" (UniqueName: \"kubernetes.io/projected/c5b52041-b67b-41ab-a74f-cb9938559999-kube-api-access-4vdlh\") pod \"collect-profiles-29335380-vghld\" (UID: \"c5b52041-b67b-41ab-a74f-cb9938559999\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29335380-vghld" Oct 10 19:00:00 crc kubenswrapper[4799]: I1010 19:00:00.317685 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/c5b52041-b67b-41ab-a74f-cb9938559999-secret-volume\") pod \"collect-profiles-29335380-vghld\" (UID: \"c5b52041-b67b-41ab-a74f-cb9938559999\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29335380-vghld" Oct 10 19:00:00 crc kubenswrapper[4799]: I1010 19:00:00.317836 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/c5b52041-b67b-41ab-a74f-cb9938559999-config-volume\") pod \"collect-profiles-29335380-vghld\" (UID: \"c5b52041-b67b-41ab-a74f-cb9938559999\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29335380-vghld" Oct 10 19:00:00 crc kubenswrapper[4799]: I1010 19:00:00.319783 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/c5b52041-b67b-41ab-a74f-cb9938559999-config-volume\") pod \"collect-profiles-29335380-vghld\" (UID: \"c5b52041-b67b-41ab-a74f-cb9938559999\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29335380-vghld" Oct 10 19:00:00 crc kubenswrapper[4799]: I1010 19:00:00.329932 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/c5b52041-b67b-41ab-a74f-cb9938559999-secret-volume\") pod \"collect-profiles-29335380-vghld\" (UID: \"c5b52041-b67b-41ab-a74f-cb9938559999\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29335380-vghld" Oct 10 19:00:00 crc kubenswrapper[4799]: I1010 19:00:00.339088 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4vdlh\" (UniqueName: \"kubernetes.io/projected/c5b52041-b67b-41ab-a74f-cb9938559999-kube-api-access-4vdlh\") pod \"collect-profiles-29335380-vghld\" (UID: \"c5b52041-b67b-41ab-a74f-cb9938559999\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29335380-vghld" Oct 10 19:00:00 crc kubenswrapper[4799]: I1010 19:00:00.517321 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29335380-vghld" Oct 10 19:00:01 crc kubenswrapper[4799]: I1010 19:00:01.085685 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29335380-vghld"] Oct 10 19:00:01 crc kubenswrapper[4799]: I1010 19:00:01.232653 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29335380-vghld" event={"ID":"c5b52041-b67b-41ab-a74f-cb9938559999","Type":"ContainerStarted","Data":"ea966bb969ba8f31edfd08bf74f5468049ead5fb12a93df11935247d0f6ef930"} Oct 10 19:00:02 crc kubenswrapper[4799]: I1010 19:00:02.247658 4799 generic.go:334] "Generic (PLEG): container finished" podID="c5b52041-b67b-41ab-a74f-cb9938559999" containerID="82de4124ccfc14480787662dc119319450744b0399c5cfac956f5679dc2119e3" exitCode=0 Oct 10 19:00:02 crc kubenswrapper[4799]: I1010 19:00:02.248111 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29335380-vghld" event={"ID":"c5b52041-b67b-41ab-a74f-cb9938559999","Type":"ContainerDied","Data":"82de4124ccfc14480787662dc119319450744b0399c5cfac956f5679dc2119e3"} Oct 10 19:00:03 crc kubenswrapper[4799]: I1010 19:00:03.736600 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29335380-vghld" Oct 10 19:00:03 crc kubenswrapper[4799]: I1010 19:00:03.837198 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/c5b52041-b67b-41ab-a74f-cb9938559999-config-volume\") pod \"c5b52041-b67b-41ab-a74f-cb9938559999\" (UID: \"c5b52041-b67b-41ab-a74f-cb9938559999\") " Oct 10 19:00:03 crc kubenswrapper[4799]: I1010 19:00:03.837298 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4vdlh\" (UniqueName: \"kubernetes.io/projected/c5b52041-b67b-41ab-a74f-cb9938559999-kube-api-access-4vdlh\") pod \"c5b52041-b67b-41ab-a74f-cb9938559999\" (UID: \"c5b52041-b67b-41ab-a74f-cb9938559999\") " Oct 10 19:00:03 crc kubenswrapper[4799]: I1010 19:00:03.837399 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/c5b52041-b67b-41ab-a74f-cb9938559999-secret-volume\") pod \"c5b52041-b67b-41ab-a74f-cb9938559999\" (UID: \"c5b52041-b67b-41ab-a74f-cb9938559999\") " Oct 10 19:00:03 crc kubenswrapper[4799]: I1010 19:00:03.838594 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c5b52041-b67b-41ab-a74f-cb9938559999-config-volume" (OuterVolumeSpecName: "config-volume") pod "c5b52041-b67b-41ab-a74f-cb9938559999" (UID: "c5b52041-b67b-41ab-a74f-cb9938559999"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 19:00:03 crc kubenswrapper[4799]: I1010 19:00:03.843936 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c5b52041-b67b-41ab-a74f-cb9938559999-kube-api-access-4vdlh" (OuterVolumeSpecName: "kube-api-access-4vdlh") pod "c5b52041-b67b-41ab-a74f-cb9938559999" (UID: "c5b52041-b67b-41ab-a74f-cb9938559999"). InnerVolumeSpecName "kube-api-access-4vdlh". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 19:00:03 crc kubenswrapper[4799]: I1010 19:00:03.844399 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c5b52041-b67b-41ab-a74f-cb9938559999-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "c5b52041-b67b-41ab-a74f-cb9938559999" (UID: "c5b52041-b67b-41ab-a74f-cb9938559999"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 19:00:03 crc kubenswrapper[4799]: I1010 19:00:03.940094 4799 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/c5b52041-b67b-41ab-a74f-cb9938559999-config-volume\") on node \"crc\" DevicePath \"\"" Oct 10 19:00:03 crc kubenswrapper[4799]: I1010 19:00:03.940125 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4vdlh\" (UniqueName: \"kubernetes.io/projected/c5b52041-b67b-41ab-a74f-cb9938559999-kube-api-access-4vdlh\") on node \"crc\" DevicePath \"\"" Oct 10 19:00:03 crc kubenswrapper[4799]: I1010 19:00:03.940142 4799 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/c5b52041-b67b-41ab-a74f-cb9938559999-secret-volume\") on node \"crc\" DevicePath \"\"" Oct 10 19:00:04 crc kubenswrapper[4799]: I1010 19:00:04.281721 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29335380-vghld" event={"ID":"c5b52041-b67b-41ab-a74f-cb9938559999","Type":"ContainerDied","Data":"ea966bb969ba8f31edfd08bf74f5468049ead5fb12a93df11935247d0f6ef930"} Oct 10 19:00:04 crc kubenswrapper[4799]: I1010 19:00:04.281814 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29335380-vghld" Oct 10 19:00:04 crc kubenswrapper[4799]: I1010 19:00:04.281840 4799 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ea966bb969ba8f31edfd08bf74f5468049ead5fb12a93df11935247d0f6ef930" Oct 10 19:00:04 crc kubenswrapper[4799]: I1010 19:00:04.852526 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29335335-9xp6n"] Oct 10 19:00:04 crc kubenswrapper[4799]: I1010 19:00:04.867701 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29335335-9xp6n"] Oct 10 19:00:05 crc kubenswrapper[4799]: I1010 19:00:05.421630 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a0d00f09-0bc5-40df-bdc8-3b973e57c60b" path="/var/lib/kubelet/pods/a0d00f09-0bc5-40df-bdc8-3b973e57c60b/volumes" Oct 10 19:00:17 crc kubenswrapper[4799]: I1010 19:00:17.259156 4799 scope.go:117] "RemoveContainer" containerID="a750c77142ebc474f2a876dd5a9c832bc8db4cdf377e1de81edde855501204c5" Oct 10 19:01:00 crc kubenswrapper[4799]: I1010 19:01:00.177626 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-cron-29335381-9s7wn"] Oct 10 19:01:00 crc kubenswrapper[4799]: E1010 19:01:00.179437 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c5b52041-b67b-41ab-a74f-cb9938559999" containerName="collect-profiles" Oct 10 19:01:00 crc kubenswrapper[4799]: I1010 19:01:00.179470 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="c5b52041-b67b-41ab-a74f-cb9938559999" containerName="collect-profiles" Oct 10 19:01:00 crc kubenswrapper[4799]: I1010 19:01:00.179982 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="c5b52041-b67b-41ab-a74f-cb9938559999" containerName="collect-profiles" Oct 10 19:01:00 crc kubenswrapper[4799]: I1010 19:01:00.181380 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29335381-9s7wn" Oct 10 19:01:00 crc kubenswrapper[4799]: I1010 19:01:00.225491 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-cron-29335381-9s7wn"] Oct 10 19:01:00 crc kubenswrapper[4799]: I1010 19:01:00.359066 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/32b9508e-11a6-4fe8-970f-2d4c5f8f12c6-config-data\") pod \"keystone-cron-29335381-9s7wn\" (UID: \"32b9508e-11a6-4fe8-970f-2d4c5f8f12c6\") " pod="openstack/keystone-cron-29335381-9s7wn" Oct 10 19:01:00 crc kubenswrapper[4799]: I1010 19:01:00.359158 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/32b9508e-11a6-4fe8-970f-2d4c5f8f12c6-combined-ca-bundle\") pod \"keystone-cron-29335381-9s7wn\" (UID: \"32b9508e-11a6-4fe8-970f-2d4c5f8f12c6\") " pod="openstack/keystone-cron-29335381-9s7wn" Oct 10 19:01:00 crc kubenswrapper[4799]: I1010 19:01:00.359302 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nf4kv\" (UniqueName: \"kubernetes.io/projected/32b9508e-11a6-4fe8-970f-2d4c5f8f12c6-kube-api-access-nf4kv\") pod \"keystone-cron-29335381-9s7wn\" (UID: \"32b9508e-11a6-4fe8-970f-2d4c5f8f12c6\") " pod="openstack/keystone-cron-29335381-9s7wn" Oct 10 19:01:00 crc kubenswrapper[4799]: I1010 19:01:00.359544 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/32b9508e-11a6-4fe8-970f-2d4c5f8f12c6-fernet-keys\") pod \"keystone-cron-29335381-9s7wn\" (UID: \"32b9508e-11a6-4fe8-970f-2d4c5f8f12c6\") " pod="openstack/keystone-cron-29335381-9s7wn" Oct 10 19:01:00 crc kubenswrapper[4799]: I1010 19:01:00.462129 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/32b9508e-11a6-4fe8-970f-2d4c5f8f12c6-fernet-keys\") pod \"keystone-cron-29335381-9s7wn\" (UID: \"32b9508e-11a6-4fe8-970f-2d4c5f8f12c6\") " pod="openstack/keystone-cron-29335381-9s7wn" Oct 10 19:01:00 crc kubenswrapper[4799]: I1010 19:01:00.462339 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/32b9508e-11a6-4fe8-970f-2d4c5f8f12c6-config-data\") pod \"keystone-cron-29335381-9s7wn\" (UID: \"32b9508e-11a6-4fe8-970f-2d4c5f8f12c6\") " pod="openstack/keystone-cron-29335381-9s7wn" Oct 10 19:01:00 crc kubenswrapper[4799]: I1010 19:01:00.462403 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/32b9508e-11a6-4fe8-970f-2d4c5f8f12c6-combined-ca-bundle\") pod \"keystone-cron-29335381-9s7wn\" (UID: \"32b9508e-11a6-4fe8-970f-2d4c5f8f12c6\") " pod="openstack/keystone-cron-29335381-9s7wn" Oct 10 19:01:00 crc kubenswrapper[4799]: I1010 19:01:00.462569 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nf4kv\" (UniqueName: \"kubernetes.io/projected/32b9508e-11a6-4fe8-970f-2d4c5f8f12c6-kube-api-access-nf4kv\") pod \"keystone-cron-29335381-9s7wn\" (UID: \"32b9508e-11a6-4fe8-970f-2d4c5f8f12c6\") " pod="openstack/keystone-cron-29335381-9s7wn" Oct 10 19:01:00 crc kubenswrapper[4799]: I1010 19:01:00.471566 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/32b9508e-11a6-4fe8-970f-2d4c5f8f12c6-combined-ca-bundle\") pod \"keystone-cron-29335381-9s7wn\" (UID: \"32b9508e-11a6-4fe8-970f-2d4c5f8f12c6\") " pod="openstack/keystone-cron-29335381-9s7wn" Oct 10 19:01:00 crc kubenswrapper[4799]: I1010 19:01:00.473283 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/32b9508e-11a6-4fe8-970f-2d4c5f8f12c6-config-data\") pod \"keystone-cron-29335381-9s7wn\" (UID: \"32b9508e-11a6-4fe8-970f-2d4c5f8f12c6\") " pod="openstack/keystone-cron-29335381-9s7wn" Oct 10 19:01:00 crc kubenswrapper[4799]: I1010 19:01:00.495363 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nf4kv\" (UniqueName: \"kubernetes.io/projected/32b9508e-11a6-4fe8-970f-2d4c5f8f12c6-kube-api-access-nf4kv\") pod \"keystone-cron-29335381-9s7wn\" (UID: \"32b9508e-11a6-4fe8-970f-2d4c5f8f12c6\") " pod="openstack/keystone-cron-29335381-9s7wn" Oct 10 19:01:00 crc kubenswrapper[4799]: I1010 19:01:00.496161 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/32b9508e-11a6-4fe8-970f-2d4c5f8f12c6-fernet-keys\") pod \"keystone-cron-29335381-9s7wn\" (UID: \"32b9508e-11a6-4fe8-970f-2d4c5f8f12c6\") " pod="openstack/keystone-cron-29335381-9s7wn" Oct 10 19:01:00 crc kubenswrapper[4799]: I1010 19:01:00.555092 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29335381-9s7wn" Oct 10 19:01:01 crc kubenswrapper[4799]: I1010 19:01:01.130002 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-cron-29335381-9s7wn"] Oct 10 19:01:01 crc kubenswrapper[4799]: W1010 19:01:01.134753 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod32b9508e_11a6_4fe8_970f_2d4c5f8f12c6.slice/crio-c002de7411693417a9cf6fca0283729152ec696c6b5b10b1f4c67d03e34484fb WatchSource:0}: Error finding container c002de7411693417a9cf6fca0283729152ec696c6b5b10b1f4c67d03e34484fb: Status 404 returned error can't find the container with id c002de7411693417a9cf6fca0283729152ec696c6b5b10b1f4c67d03e34484fb Oct 10 19:01:01 crc kubenswrapper[4799]: I1010 19:01:01.156682 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29335381-9s7wn" event={"ID":"32b9508e-11a6-4fe8-970f-2d4c5f8f12c6","Type":"ContainerStarted","Data":"c002de7411693417a9cf6fca0283729152ec696c6b5b10b1f4c67d03e34484fb"} Oct 10 19:01:02 crc kubenswrapper[4799]: I1010 19:01:02.173302 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29335381-9s7wn" event={"ID":"32b9508e-11a6-4fe8-970f-2d4c5f8f12c6","Type":"ContainerStarted","Data":"324d265d3a75d3f02bf005c2d56ffd761fc83265f6795480b51b8139d40b66a4"} Oct 10 19:01:02 crc kubenswrapper[4799]: I1010 19:01:02.244196 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-cron-29335381-9s7wn" podStartSLOduration=2.244174059 podStartE2EDuration="2.244174059s" podCreationTimestamp="2025-10-10 19:01:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-10 19:01:02.24134399 +0000 UTC m=+8955.749668135" watchObservedRunningTime="2025-10-10 19:01:02.244174059 +0000 UTC m=+8955.752498194" Oct 10 19:01:04 crc kubenswrapper[4799]: I1010 19:01:04.203870 4799 generic.go:334] "Generic (PLEG): container finished" podID="32b9508e-11a6-4fe8-970f-2d4c5f8f12c6" containerID="324d265d3a75d3f02bf005c2d56ffd761fc83265f6795480b51b8139d40b66a4" exitCode=0 Oct 10 19:01:04 crc kubenswrapper[4799]: I1010 19:01:04.203967 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29335381-9s7wn" event={"ID":"32b9508e-11a6-4fe8-970f-2d4c5f8f12c6","Type":"ContainerDied","Data":"324d265d3a75d3f02bf005c2d56ffd761fc83265f6795480b51b8139d40b66a4"} Oct 10 19:01:05 crc kubenswrapper[4799]: I1010 19:01:05.535728 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-bjdtt"] Oct 10 19:01:05 crc kubenswrapper[4799]: I1010 19:01:05.538883 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-bjdtt" Oct 10 19:01:05 crc kubenswrapper[4799]: I1010 19:01:05.545656 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-bjdtt"] Oct 10 19:01:05 crc kubenswrapper[4799]: I1010 19:01:05.696041 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29335381-9s7wn" Oct 10 19:01:05 crc kubenswrapper[4799]: I1010 19:01:05.702294 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c5x58\" (UniqueName: \"kubernetes.io/projected/248f7bea-9636-46a5-8aa5-001da4d8c5e4-kube-api-access-c5x58\") pod \"community-operators-bjdtt\" (UID: \"248f7bea-9636-46a5-8aa5-001da4d8c5e4\") " pod="openshift-marketplace/community-operators-bjdtt" Oct 10 19:01:05 crc kubenswrapper[4799]: I1010 19:01:05.702340 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/248f7bea-9636-46a5-8aa5-001da4d8c5e4-catalog-content\") pod \"community-operators-bjdtt\" (UID: \"248f7bea-9636-46a5-8aa5-001da4d8c5e4\") " pod="openshift-marketplace/community-operators-bjdtt" Oct 10 19:01:05 crc kubenswrapper[4799]: I1010 19:01:05.702413 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/248f7bea-9636-46a5-8aa5-001da4d8c5e4-utilities\") pod \"community-operators-bjdtt\" (UID: \"248f7bea-9636-46a5-8aa5-001da4d8c5e4\") " pod="openshift-marketplace/community-operators-bjdtt" Oct 10 19:01:05 crc kubenswrapper[4799]: I1010 19:01:05.804030 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nf4kv\" (UniqueName: \"kubernetes.io/projected/32b9508e-11a6-4fe8-970f-2d4c5f8f12c6-kube-api-access-nf4kv\") pod \"32b9508e-11a6-4fe8-970f-2d4c5f8f12c6\" (UID: \"32b9508e-11a6-4fe8-970f-2d4c5f8f12c6\") " Oct 10 19:01:05 crc kubenswrapper[4799]: I1010 19:01:05.804087 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/32b9508e-11a6-4fe8-970f-2d4c5f8f12c6-combined-ca-bundle\") pod \"32b9508e-11a6-4fe8-970f-2d4c5f8f12c6\" (UID: \"32b9508e-11a6-4fe8-970f-2d4c5f8f12c6\") " Oct 10 19:01:05 crc kubenswrapper[4799]: I1010 19:01:05.804135 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/32b9508e-11a6-4fe8-970f-2d4c5f8f12c6-fernet-keys\") pod \"32b9508e-11a6-4fe8-970f-2d4c5f8f12c6\" (UID: \"32b9508e-11a6-4fe8-970f-2d4c5f8f12c6\") " Oct 10 19:01:05 crc kubenswrapper[4799]: I1010 19:01:05.804277 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/32b9508e-11a6-4fe8-970f-2d4c5f8f12c6-config-data\") pod \"32b9508e-11a6-4fe8-970f-2d4c5f8f12c6\" (UID: \"32b9508e-11a6-4fe8-970f-2d4c5f8f12c6\") " Oct 10 19:01:05 crc kubenswrapper[4799]: I1010 19:01:05.805327 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c5x58\" (UniqueName: \"kubernetes.io/projected/248f7bea-9636-46a5-8aa5-001da4d8c5e4-kube-api-access-c5x58\") pod \"community-operators-bjdtt\" (UID: \"248f7bea-9636-46a5-8aa5-001da4d8c5e4\") " pod="openshift-marketplace/community-operators-bjdtt" Oct 10 19:01:05 crc kubenswrapper[4799]: I1010 19:01:05.805655 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/248f7bea-9636-46a5-8aa5-001da4d8c5e4-catalog-content\") pod \"community-operators-bjdtt\" (UID: \"248f7bea-9636-46a5-8aa5-001da4d8c5e4\") " pod="openshift-marketplace/community-operators-bjdtt" Oct 10 19:01:05 crc kubenswrapper[4799]: I1010 19:01:05.805793 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/248f7bea-9636-46a5-8aa5-001da4d8c5e4-utilities\") pod \"community-operators-bjdtt\" (UID: \"248f7bea-9636-46a5-8aa5-001da4d8c5e4\") " pod="openshift-marketplace/community-operators-bjdtt" Oct 10 19:01:05 crc kubenswrapper[4799]: I1010 19:01:05.806178 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/248f7bea-9636-46a5-8aa5-001da4d8c5e4-catalog-content\") pod \"community-operators-bjdtt\" (UID: \"248f7bea-9636-46a5-8aa5-001da4d8c5e4\") " pod="openshift-marketplace/community-operators-bjdtt" Oct 10 19:01:05 crc kubenswrapper[4799]: I1010 19:01:05.806244 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/248f7bea-9636-46a5-8aa5-001da4d8c5e4-utilities\") pod \"community-operators-bjdtt\" (UID: \"248f7bea-9636-46a5-8aa5-001da4d8c5e4\") " pod="openshift-marketplace/community-operators-bjdtt" Oct 10 19:01:05 crc kubenswrapper[4799]: I1010 19:01:05.814285 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/32b9508e-11a6-4fe8-970f-2d4c5f8f12c6-kube-api-access-nf4kv" (OuterVolumeSpecName: "kube-api-access-nf4kv") pod "32b9508e-11a6-4fe8-970f-2d4c5f8f12c6" (UID: "32b9508e-11a6-4fe8-970f-2d4c5f8f12c6"). InnerVolumeSpecName "kube-api-access-nf4kv". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 19:01:05 crc kubenswrapper[4799]: I1010 19:01:05.818978 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/32b9508e-11a6-4fe8-970f-2d4c5f8f12c6-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "32b9508e-11a6-4fe8-970f-2d4c5f8f12c6" (UID: "32b9508e-11a6-4fe8-970f-2d4c5f8f12c6"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 19:01:05 crc kubenswrapper[4799]: I1010 19:01:05.839954 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c5x58\" (UniqueName: \"kubernetes.io/projected/248f7bea-9636-46a5-8aa5-001da4d8c5e4-kube-api-access-c5x58\") pod \"community-operators-bjdtt\" (UID: \"248f7bea-9636-46a5-8aa5-001da4d8c5e4\") " pod="openshift-marketplace/community-operators-bjdtt" Oct 10 19:01:05 crc kubenswrapper[4799]: I1010 19:01:05.856928 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/32b9508e-11a6-4fe8-970f-2d4c5f8f12c6-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "32b9508e-11a6-4fe8-970f-2d4c5f8f12c6" (UID: "32b9508e-11a6-4fe8-970f-2d4c5f8f12c6"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 19:01:05 crc kubenswrapper[4799]: I1010 19:01:05.863702 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-bjdtt" Oct 10 19:01:05 crc kubenswrapper[4799]: I1010 19:01:05.934233 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nf4kv\" (UniqueName: \"kubernetes.io/projected/32b9508e-11a6-4fe8-970f-2d4c5f8f12c6-kube-api-access-nf4kv\") on node \"crc\" DevicePath \"\"" Oct 10 19:01:05 crc kubenswrapper[4799]: I1010 19:01:05.934272 4799 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/32b9508e-11a6-4fe8-970f-2d4c5f8f12c6-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 10 19:01:05 crc kubenswrapper[4799]: I1010 19:01:05.934287 4799 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/32b9508e-11a6-4fe8-970f-2d4c5f8f12c6-fernet-keys\") on node \"crc\" DevicePath \"\"" Oct 10 19:01:05 crc kubenswrapper[4799]: I1010 19:01:05.945259 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/32b9508e-11a6-4fe8-970f-2d4c5f8f12c6-config-data" (OuterVolumeSpecName: "config-data") pod "32b9508e-11a6-4fe8-970f-2d4c5f8f12c6" (UID: "32b9508e-11a6-4fe8-970f-2d4c5f8f12c6"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 19:01:06 crc kubenswrapper[4799]: I1010 19:01:06.035271 4799 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/32b9508e-11a6-4fe8-970f-2d4c5f8f12c6-config-data\") on node \"crc\" DevicePath \"\"" Oct 10 19:01:06 crc kubenswrapper[4799]: I1010 19:01:06.237520 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29335381-9s7wn" event={"ID":"32b9508e-11a6-4fe8-970f-2d4c5f8f12c6","Type":"ContainerDied","Data":"c002de7411693417a9cf6fca0283729152ec696c6b5b10b1f4c67d03e34484fb"} Oct 10 19:01:06 crc kubenswrapper[4799]: I1010 19:01:06.237772 4799 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c002de7411693417a9cf6fca0283729152ec696c6b5b10b1f4c67d03e34484fb" Oct 10 19:01:06 crc kubenswrapper[4799]: I1010 19:01:06.237825 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29335381-9s7wn" Oct 10 19:01:06 crc kubenswrapper[4799]: I1010 19:01:06.443691 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-bjdtt"] Oct 10 19:01:07 crc kubenswrapper[4799]: I1010 19:01:07.253927 4799 generic.go:334] "Generic (PLEG): container finished" podID="248f7bea-9636-46a5-8aa5-001da4d8c5e4" containerID="568c0a23c3632cca249837a09032a6bcd3de915c3b0943ddc17f52583e65abc2" exitCode=0 Oct 10 19:01:07 crc kubenswrapper[4799]: I1010 19:01:07.253980 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-bjdtt" event={"ID":"248f7bea-9636-46a5-8aa5-001da4d8c5e4","Type":"ContainerDied","Data":"568c0a23c3632cca249837a09032a6bcd3de915c3b0943ddc17f52583e65abc2"} Oct 10 19:01:07 crc kubenswrapper[4799]: I1010 19:01:07.254380 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-bjdtt" event={"ID":"248f7bea-9636-46a5-8aa5-001da4d8c5e4","Type":"ContainerStarted","Data":"e831003733828c222cab2409d70eb82960580d916fa9fddd16a48a72cb63e835"} Oct 10 19:01:09 crc kubenswrapper[4799]: I1010 19:01:09.279041 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-bjdtt" event={"ID":"248f7bea-9636-46a5-8aa5-001da4d8c5e4","Type":"ContainerStarted","Data":"63816fea31d17c4f558120721d07266c55881963bf5ff3d9609eeef86aa49e2c"} Oct 10 19:01:10 crc kubenswrapper[4799]: I1010 19:01:10.297168 4799 generic.go:334] "Generic (PLEG): container finished" podID="248f7bea-9636-46a5-8aa5-001da4d8c5e4" containerID="63816fea31d17c4f558120721d07266c55881963bf5ff3d9609eeef86aa49e2c" exitCode=0 Oct 10 19:01:10 crc kubenswrapper[4799]: I1010 19:01:10.297243 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-bjdtt" event={"ID":"248f7bea-9636-46a5-8aa5-001da4d8c5e4","Type":"ContainerDied","Data":"63816fea31d17c4f558120721d07266c55881963bf5ff3d9609eeef86aa49e2c"} Oct 10 19:01:11 crc kubenswrapper[4799]: I1010 19:01:11.311482 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-bjdtt" event={"ID":"248f7bea-9636-46a5-8aa5-001da4d8c5e4","Type":"ContainerStarted","Data":"7305dc7872fc90ee3581c4d551ff37e5e951e1c1805a312aabcfe36524d30ec0"} Oct 10 19:01:11 crc kubenswrapper[4799]: I1010 19:01:11.338961 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-bjdtt" podStartSLOduration=2.809665623 podStartE2EDuration="6.338933503s" podCreationTimestamp="2025-10-10 19:01:05 +0000 UTC" firstStartedPulling="2025-10-10 19:01:07.25938246 +0000 UTC m=+8960.767706575" lastFinishedPulling="2025-10-10 19:01:10.7886503 +0000 UTC m=+8964.296974455" observedRunningTime="2025-10-10 19:01:11.336494334 +0000 UTC m=+8964.844818449" watchObservedRunningTime="2025-10-10 19:01:11.338933503 +0000 UTC m=+8964.847257668" Oct 10 19:01:15 crc kubenswrapper[4799]: I1010 19:01:15.248729 4799 patch_prober.go:28] interesting pod/machine-config-daemon-rh8zc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 10 19:01:15 crc kubenswrapper[4799]: I1010 19:01:15.249674 4799 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 10 19:01:15 crc kubenswrapper[4799]: I1010 19:01:15.864629 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-bjdtt" Oct 10 19:01:15 crc kubenswrapper[4799]: I1010 19:01:15.865103 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-bjdtt" Oct 10 19:01:15 crc kubenswrapper[4799]: I1010 19:01:15.947076 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-bjdtt" Oct 10 19:01:16 crc kubenswrapper[4799]: I1010 19:01:16.441875 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-bjdtt" Oct 10 19:01:16 crc kubenswrapper[4799]: I1010 19:01:16.498776 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-bjdtt"] Oct 10 19:01:18 crc kubenswrapper[4799]: I1010 19:01:18.398964 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-bjdtt" podUID="248f7bea-9636-46a5-8aa5-001da4d8c5e4" containerName="registry-server" containerID="cri-o://7305dc7872fc90ee3581c4d551ff37e5e951e1c1805a312aabcfe36524d30ec0" gracePeriod=2 Oct 10 19:01:19 crc kubenswrapper[4799]: I1010 19:01:19.018937 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-bjdtt" Oct 10 19:01:19 crc kubenswrapper[4799]: I1010 19:01:19.193549 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/248f7bea-9636-46a5-8aa5-001da4d8c5e4-catalog-content\") pod \"248f7bea-9636-46a5-8aa5-001da4d8c5e4\" (UID: \"248f7bea-9636-46a5-8aa5-001da4d8c5e4\") " Oct 10 19:01:19 crc kubenswrapper[4799]: I1010 19:01:19.194881 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/248f7bea-9636-46a5-8aa5-001da4d8c5e4-utilities\") pod \"248f7bea-9636-46a5-8aa5-001da4d8c5e4\" (UID: \"248f7bea-9636-46a5-8aa5-001da4d8c5e4\") " Oct 10 19:01:19 crc kubenswrapper[4799]: I1010 19:01:19.195819 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-c5x58\" (UniqueName: \"kubernetes.io/projected/248f7bea-9636-46a5-8aa5-001da4d8c5e4-kube-api-access-c5x58\") pod \"248f7bea-9636-46a5-8aa5-001da4d8c5e4\" (UID: \"248f7bea-9636-46a5-8aa5-001da4d8c5e4\") " Oct 10 19:01:19 crc kubenswrapper[4799]: I1010 19:01:19.195991 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/248f7bea-9636-46a5-8aa5-001da4d8c5e4-utilities" (OuterVolumeSpecName: "utilities") pod "248f7bea-9636-46a5-8aa5-001da4d8c5e4" (UID: "248f7bea-9636-46a5-8aa5-001da4d8c5e4"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 10 19:01:19 crc kubenswrapper[4799]: I1010 19:01:19.198008 4799 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/248f7bea-9636-46a5-8aa5-001da4d8c5e4-utilities\") on node \"crc\" DevicePath \"\"" Oct 10 19:01:19 crc kubenswrapper[4799]: I1010 19:01:19.207342 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/248f7bea-9636-46a5-8aa5-001da4d8c5e4-kube-api-access-c5x58" (OuterVolumeSpecName: "kube-api-access-c5x58") pod "248f7bea-9636-46a5-8aa5-001da4d8c5e4" (UID: "248f7bea-9636-46a5-8aa5-001da4d8c5e4"). InnerVolumeSpecName "kube-api-access-c5x58". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 19:01:19 crc kubenswrapper[4799]: I1010 19:01:19.252191 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/248f7bea-9636-46a5-8aa5-001da4d8c5e4-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "248f7bea-9636-46a5-8aa5-001da4d8c5e4" (UID: "248f7bea-9636-46a5-8aa5-001da4d8c5e4"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 10 19:01:19 crc kubenswrapper[4799]: I1010 19:01:19.300378 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-c5x58\" (UniqueName: \"kubernetes.io/projected/248f7bea-9636-46a5-8aa5-001da4d8c5e4-kube-api-access-c5x58\") on node \"crc\" DevicePath \"\"" Oct 10 19:01:19 crc kubenswrapper[4799]: I1010 19:01:19.300416 4799 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/248f7bea-9636-46a5-8aa5-001da4d8c5e4-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 10 19:01:19 crc kubenswrapper[4799]: I1010 19:01:19.416355 4799 generic.go:334] "Generic (PLEG): container finished" podID="248f7bea-9636-46a5-8aa5-001da4d8c5e4" containerID="7305dc7872fc90ee3581c4d551ff37e5e951e1c1805a312aabcfe36524d30ec0" exitCode=0 Oct 10 19:01:19 crc kubenswrapper[4799]: I1010 19:01:19.417164 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-bjdtt" Oct 10 19:01:19 crc kubenswrapper[4799]: I1010 19:01:19.418442 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-bjdtt" event={"ID":"248f7bea-9636-46a5-8aa5-001da4d8c5e4","Type":"ContainerDied","Data":"7305dc7872fc90ee3581c4d551ff37e5e951e1c1805a312aabcfe36524d30ec0"} Oct 10 19:01:19 crc kubenswrapper[4799]: I1010 19:01:19.418526 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-bjdtt" event={"ID":"248f7bea-9636-46a5-8aa5-001da4d8c5e4","Type":"ContainerDied","Data":"e831003733828c222cab2409d70eb82960580d916fa9fddd16a48a72cb63e835"} Oct 10 19:01:19 crc kubenswrapper[4799]: I1010 19:01:19.418570 4799 scope.go:117] "RemoveContainer" containerID="7305dc7872fc90ee3581c4d551ff37e5e951e1c1805a312aabcfe36524d30ec0" Oct 10 19:01:19 crc kubenswrapper[4799]: I1010 19:01:19.459451 4799 scope.go:117] "RemoveContainer" containerID="63816fea31d17c4f558120721d07266c55881963bf5ff3d9609eeef86aa49e2c" Oct 10 19:01:19 crc kubenswrapper[4799]: I1010 19:01:19.490190 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-bjdtt"] Oct 10 19:01:19 crc kubenswrapper[4799]: I1010 19:01:19.503166 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-bjdtt"] Oct 10 19:01:19 crc kubenswrapper[4799]: I1010 19:01:19.513007 4799 scope.go:117] "RemoveContainer" containerID="568c0a23c3632cca249837a09032a6bcd3de915c3b0943ddc17f52583e65abc2" Oct 10 19:01:19 crc kubenswrapper[4799]: I1010 19:01:19.593277 4799 scope.go:117] "RemoveContainer" containerID="7305dc7872fc90ee3581c4d551ff37e5e951e1c1805a312aabcfe36524d30ec0" Oct 10 19:01:19 crc kubenswrapper[4799]: E1010 19:01:19.593729 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7305dc7872fc90ee3581c4d551ff37e5e951e1c1805a312aabcfe36524d30ec0\": container with ID starting with 7305dc7872fc90ee3581c4d551ff37e5e951e1c1805a312aabcfe36524d30ec0 not found: ID does not exist" containerID="7305dc7872fc90ee3581c4d551ff37e5e951e1c1805a312aabcfe36524d30ec0" Oct 10 19:01:19 crc kubenswrapper[4799]: I1010 19:01:19.593783 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7305dc7872fc90ee3581c4d551ff37e5e951e1c1805a312aabcfe36524d30ec0"} err="failed to get container status \"7305dc7872fc90ee3581c4d551ff37e5e951e1c1805a312aabcfe36524d30ec0\": rpc error: code = NotFound desc = could not find container \"7305dc7872fc90ee3581c4d551ff37e5e951e1c1805a312aabcfe36524d30ec0\": container with ID starting with 7305dc7872fc90ee3581c4d551ff37e5e951e1c1805a312aabcfe36524d30ec0 not found: ID does not exist" Oct 10 19:01:19 crc kubenswrapper[4799]: I1010 19:01:19.593810 4799 scope.go:117] "RemoveContainer" containerID="63816fea31d17c4f558120721d07266c55881963bf5ff3d9609eeef86aa49e2c" Oct 10 19:01:19 crc kubenswrapper[4799]: E1010 19:01:19.594542 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"63816fea31d17c4f558120721d07266c55881963bf5ff3d9609eeef86aa49e2c\": container with ID starting with 63816fea31d17c4f558120721d07266c55881963bf5ff3d9609eeef86aa49e2c not found: ID does not exist" containerID="63816fea31d17c4f558120721d07266c55881963bf5ff3d9609eeef86aa49e2c" Oct 10 19:01:19 crc kubenswrapper[4799]: I1010 19:01:19.594571 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"63816fea31d17c4f558120721d07266c55881963bf5ff3d9609eeef86aa49e2c"} err="failed to get container status \"63816fea31d17c4f558120721d07266c55881963bf5ff3d9609eeef86aa49e2c\": rpc error: code = NotFound desc = could not find container \"63816fea31d17c4f558120721d07266c55881963bf5ff3d9609eeef86aa49e2c\": container with ID starting with 63816fea31d17c4f558120721d07266c55881963bf5ff3d9609eeef86aa49e2c not found: ID does not exist" Oct 10 19:01:19 crc kubenswrapper[4799]: I1010 19:01:19.594590 4799 scope.go:117] "RemoveContainer" containerID="568c0a23c3632cca249837a09032a6bcd3de915c3b0943ddc17f52583e65abc2" Oct 10 19:01:19 crc kubenswrapper[4799]: E1010 19:01:19.595100 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"568c0a23c3632cca249837a09032a6bcd3de915c3b0943ddc17f52583e65abc2\": container with ID starting with 568c0a23c3632cca249837a09032a6bcd3de915c3b0943ddc17f52583e65abc2 not found: ID does not exist" containerID="568c0a23c3632cca249837a09032a6bcd3de915c3b0943ddc17f52583e65abc2" Oct 10 19:01:19 crc kubenswrapper[4799]: I1010 19:01:19.595155 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"568c0a23c3632cca249837a09032a6bcd3de915c3b0943ddc17f52583e65abc2"} err="failed to get container status \"568c0a23c3632cca249837a09032a6bcd3de915c3b0943ddc17f52583e65abc2\": rpc error: code = NotFound desc = could not find container \"568c0a23c3632cca249837a09032a6bcd3de915c3b0943ddc17f52583e65abc2\": container with ID starting with 568c0a23c3632cca249837a09032a6bcd3de915c3b0943ddc17f52583e65abc2 not found: ID does not exist" Oct 10 19:01:21 crc kubenswrapper[4799]: I1010 19:01:21.424433 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="248f7bea-9636-46a5-8aa5-001da4d8c5e4" path="/var/lib/kubelet/pods/248f7bea-9636-46a5-8aa5-001da4d8c5e4/volumes" Oct 10 19:01:45 crc kubenswrapper[4799]: I1010 19:01:45.249456 4799 patch_prober.go:28] interesting pod/machine-config-daemon-rh8zc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 10 19:01:45 crc kubenswrapper[4799]: I1010 19:01:45.250140 4799 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 10 19:02:15 crc kubenswrapper[4799]: I1010 19:02:15.248813 4799 patch_prober.go:28] interesting pod/machine-config-daemon-rh8zc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 10 19:02:15 crc kubenswrapper[4799]: I1010 19:02:15.249471 4799 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 10 19:02:15 crc kubenswrapper[4799]: I1010 19:02:15.249536 4799 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" Oct 10 19:02:15 crc kubenswrapper[4799]: I1010 19:02:15.250804 4799 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"464cc628232a84253f4a698dbac51619b612599e327b0d8fc8447f771bb6664a"} pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 10 19:02:15 crc kubenswrapper[4799]: I1010 19:02:15.250906 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" containerName="machine-config-daemon" containerID="cri-o://464cc628232a84253f4a698dbac51619b612599e327b0d8fc8447f771bb6664a" gracePeriod=600 Oct 10 19:02:15 crc kubenswrapper[4799]: E1010 19:02:15.381568 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 19:02:16 crc kubenswrapper[4799]: I1010 19:02:16.197393 4799 generic.go:334] "Generic (PLEG): container finished" podID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" containerID="464cc628232a84253f4a698dbac51619b612599e327b0d8fc8447f771bb6664a" exitCode=0 Oct 10 19:02:16 crc kubenswrapper[4799]: I1010 19:02:16.197498 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" event={"ID":"6cebefda-e31d-4be2-9bf4-8e1f8ec002cb","Type":"ContainerDied","Data":"464cc628232a84253f4a698dbac51619b612599e327b0d8fc8447f771bb6664a"} Oct 10 19:02:16 crc kubenswrapper[4799]: I1010 19:02:16.197830 4799 scope.go:117] "RemoveContainer" containerID="5ff5a69863067c2e020a02278a8b3d2cd11069a30ff86a06260c11a3866bc104" Oct 10 19:02:16 crc kubenswrapper[4799]: I1010 19:02:16.198892 4799 scope.go:117] "RemoveContainer" containerID="464cc628232a84253f4a698dbac51619b612599e327b0d8fc8447f771bb6664a" Oct 10 19:02:16 crc kubenswrapper[4799]: E1010 19:02:16.199739 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 19:02:27 crc kubenswrapper[4799]: I1010 19:02:27.418951 4799 scope.go:117] "RemoveContainer" containerID="464cc628232a84253f4a698dbac51619b612599e327b0d8fc8447f771bb6664a" Oct 10 19:02:27 crc kubenswrapper[4799]: E1010 19:02:27.421349 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 19:02:40 crc kubenswrapper[4799]: I1010 19:02:40.404253 4799 scope.go:117] "RemoveContainer" containerID="464cc628232a84253f4a698dbac51619b612599e327b0d8fc8447f771bb6664a" Oct 10 19:02:40 crc kubenswrapper[4799]: E1010 19:02:40.405454 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 19:02:53 crc kubenswrapper[4799]: I1010 19:02:53.402928 4799 scope.go:117] "RemoveContainer" containerID="464cc628232a84253f4a698dbac51619b612599e327b0d8fc8447f771bb6664a" Oct 10 19:02:53 crc kubenswrapper[4799]: E1010 19:02:53.404257 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 19:03:06 crc kubenswrapper[4799]: I1010 19:03:06.402446 4799 scope.go:117] "RemoveContainer" containerID="464cc628232a84253f4a698dbac51619b612599e327b0d8fc8447f771bb6664a" Oct 10 19:03:06 crc kubenswrapper[4799]: E1010 19:03:06.403644 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 19:03:18 crc kubenswrapper[4799]: I1010 19:03:18.402342 4799 scope.go:117] "RemoveContainer" containerID="464cc628232a84253f4a698dbac51619b612599e327b0d8fc8447f771bb6664a" Oct 10 19:03:18 crc kubenswrapper[4799]: E1010 19:03:18.403063 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 19:03:32 crc kubenswrapper[4799]: I1010 19:03:32.405214 4799 scope.go:117] "RemoveContainer" containerID="464cc628232a84253f4a698dbac51619b612599e327b0d8fc8447f771bb6664a" Oct 10 19:03:32 crc kubenswrapper[4799]: E1010 19:03:32.406696 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 19:03:46 crc kubenswrapper[4799]: I1010 19:03:46.402871 4799 scope.go:117] "RemoveContainer" containerID="464cc628232a84253f4a698dbac51619b612599e327b0d8fc8447f771bb6664a" Oct 10 19:03:46 crc kubenswrapper[4799]: E1010 19:03:46.404003 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 19:04:01 crc kubenswrapper[4799]: I1010 19:04:01.403952 4799 scope.go:117] "RemoveContainer" containerID="464cc628232a84253f4a698dbac51619b612599e327b0d8fc8447f771bb6664a" Oct 10 19:04:01 crc kubenswrapper[4799]: E1010 19:04:01.405095 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 19:04:13 crc kubenswrapper[4799]: I1010 19:04:13.402798 4799 scope.go:117] "RemoveContainer" containerID="464cc628232a84253f4a698dbac51619b612599e327b0d8fc8447f771bb6664a" Oct 10 19:04:13 crc kubenswrapper[4799]: E1010 19:04:13.403798 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 19:04:28 crc kubenswrapper[4799]: I1010 19:04:28.403188 4799 scope.go:117] "RemoveContainer" containerID="464cc628232a84253f4a698dbac51619b612599e327b0d8fc8447f771bb6664a" Oct 10 19:04:28 crc kubenswrapper[4799]: E1010 19:04:28.404838 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 19:04:40 crc kubenswrapper[4799]: I1010 19:04:40.403481 4799 scope.go:117] "RemoveContainer" containerID="464cc628232a84253f4a698dbac51619b612599e327b0d8fc8447f771bb6664a" Oct 10 19:04:40 crc kubenswrapper[4799]: E1010 19:04:40.404600 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 19:04:54 crc kubenswrapper[4799]: I1010 19:04:54.402486 4799 scope.go:117] "RemoveContainer" containerID="464cc628232a84253f4a698dbac51619b612599e327b0d8fc8447f771bb6664a" Oct 10 19:04:54 crc kubenswrapper[4799]: E1010 19:04:54.403847 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 19:05:06 crc kubenswrapper[4799]: I1010 19:05:06.403660 4799 scope.go:117] "RemoveContainer" containerID="464cc628232a84253f4a698dbac51619b612599e327b0d8fc8447f771bb6664a" Oct 10 19:05:06 crc kubenswrapper[4799]: E1010 19:05:06.404868 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 19:05:17 crc kubenswrapper[4799]: I1010 19:05:17.410626 4799 scope.go:117] "RemoveContainer" containerID="464cc628232a84253f4a698dbac51619b612599e327b0d8fc8447f771bb6664a" Oct 10 19:05:17 crc kubenswrapper[4799]: E1010 19:05:17.411690 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 19:05:30 crc kubenswrapper[4799]: I1010 19:05:30.402403 4799 scope.go:117] "RemoveContainer" containerID="464cc628232a84253f4a698dbac51619b612599e327b0d8fc8447f771bb6664a" Oct 10 19:05:30 crc kubenswrapper[4799]: E1010 19:05:30.407238 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 19:05:42 crc kubenswrapper[4799]: I1010 19:05:42.643376 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-wbjd5"] Oct 10 19:05:42 crc kubenswrapper[4799]: E1010 19:05:42.644672 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="248f7bea-9636-46a5-8aa5-001da4d8c5e4" containerName="registry-server" Oct 10 19:05:42 crc kubenswrapper[4799]: I1010 19:05:42.644696 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="248f7bea-9636-46a5-8aa5-001da4d8c5e4" containerName="registry-server" Oct 10 19:05:42 crc kubenswrapper[4799]: E1010 19:05:42.644786 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="32b9508e-11a6-4fe8-970f-2d4c5f8f12c6" containerName="keystone-cron" Oct 10 19:05:42 crc kubenswrapper[4799]: I1010 19:05:42.644801 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="32b9508e-11a6-4fe8-970f-2d4c5f8f12c6" containerName="keystone-cron" Oct 10 19:05:42 crc kubenswrapper[4799]: E1010 19:05:42.644829 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="248f7bea-9636-46a5-8aa5-001da4d8c5e4" containerName="extract-utilities" Oct 10 19:05:42 crc kubenswrapper[4799]: I1010 19:05:42.644844 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="248f7bea-9636-46a5-8aa5-001da4d8c5e4" containerName="extract-utilities" Oct 10 19:05:42 crc kubenswrapper[4799]: E1010 19:05:42.644894 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="248f7bea-9636-46a5-8aa5-001da4d8c5e4" containerName="extract-content" Oct 10 19:05:42 crc kubenswrapper[4799]: I1010 19:05:42.644908 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="248f7bea-9636-46a5-8aa5-001da4d8c5e4" containerName="extract-content" Oct 10 19:05:42 crc kubenswrapper[4799]: I1010 19:05:42.645252 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="248f7bea-9636-46a5-8aa5-001da4d8c5e4" containerName="registry-server" Oct 10 19:05:42 crc kubenswrapper[4799]: I1010 19:05:42.645267 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="32b9508e-11a6-4fe8-970f-2d4c5f8f12c6" containerName="keystone-cron" Oct 10 19:05:42 crc kubenswrapper[4799]: I1010 19:05:42.647177 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-wbjd5" Oct 10 19:05:42 crc kubenswrapper[4799]: I1010 19:05:42.675460 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-wbjd5"] Oct 10 19:05:42 crc kubenswrapper[4799]: I1010 19:05:42.775648 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8d202df0-e4fd-4d60-8fd7-7c05e281f020-catalog-content\") pod \"redhat-marketplace-wbjd5\" (UID: \"8d202df0-e4fd-4d60-8fd7-7c05e281f020\") " pod="openshift-marketplace/redhat-marketplace-wbjd5" Oct 10 19:05:42 crc kubenswrapper[4799]: I1010 19:05:42.775745 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8d202df0-e4fd-4d60-8fd7-7c05e281f020-utilities\") pod \"redhat-marketplace-wbjd5\" (UID: \"8d202df0-e4fd-4d60-8fd7-7c05e281f020\") " pod="openshift-marketplace/redhat-marketplace-wbjd5" Oct 10 19:05:42 crc kubenswrapper[4799]: I1010 19:05:42.775812 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dpj4z\" (UniqueName: \"kubernetes.io/projected/8d202df0-e4fd-4d60-8fd7-7c05e281f020-kube-api-access-dpj4z\") pod \"redhat-marketplace-wbjd5\" (UID: \"8d202df0-e4fd-4d60-8fd7-7c05e281f020\") " pod="openshift-marketplace/redhat-marketplace-wbjd5" Oct 10 19:05:42 crc kubenswrapper[4799]: I1010 19:05:42.877638 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8d202df0-e4fd-4d60-8fd7-7c05e281f020-utilities\") pod \"redhat-marketplace-wbjd5\" (UID: \"8d202df0-e4fd-4d60-8fd7-7c05e281f020\") " pod="openshift-marketplace/redhat-marketplace-wbjd5" Oct 10 19:05:42 crc kubenswrapper[4799]: I1010 19:05:42.877719 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dpj4z\" (UniqueName: \"kubernetes.io/projected/8d202df0-e4fd-4d60-8fd7-7c05e281f020-kube-api-access-dpj4z\") pod \"redhat-marketplace-wbjd5\" (UID: \"8d202df0-e4fd-4d60-8fd7-7c05e281f020\") " pod="openshift-marketplace/redhat-marketplace-wbjd5" Oct 10 19:05:42 crc kubenswrapper[4799]: I1010 19:05:42.877923 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8d202df0-e4fd-4d60-8fd7-7c05e281f020-catalog-content\") pod \"redhat-marketplace-wbjd5\" (UID: \"8d202df0-e4fd-4d60-8fd7-7c05e281f020\") " pod="openshift-marketplace/redhat-marketplace-wbjd5" Oct 10 19:05:42 crc kubenswrapper[4799]: I1010 19:05:42.878505 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8d202df0-e4fd-4d60-8fd7-7c05e281f020-catalog-content\") pod \"redhat-marketplace-wbjd5\" (UID: \"8d202df0-e4fd-4d60-8fd7-7c05e281f020\") " pod="openshift-marketplace/redhat-marketplace-wbjd5" Oct 10 19:05:42 crc kubenswrapper[4799]: I1010 19:05:42.878832 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8d202df0-e4fd-4d60-8fd7-7c05e281f020-utilities\") pod \"redhat-marketplace-wbjd5\" (UID: \"8d202df0-e4fd-4d60-8fd7-7c05e281f020\") " pod="openshift-marketplace/redhat-marketplace-wbjd5" Oct 10 19:05:43 crc kubenswrapper[4799]: I1010 19:05:43.188983 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dpj4z\" (UniqueName: \"kubernetes.io/projected/8d202df0-e4fd-4d60-8fd7-7c05e281f020-kube-api-access-dpj4z\") pod \"redhat-marketplace-wbjd5\" (UID: \"8d202df0-e4fd-4d60-8fd7-7c05e281f020\") " pod="openshift-marketplace/redhat-marketplace-wbjd5" Oct 10 19:05:43 crc kubenswrapper[4799]: I1010 19:05:43.286896 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-wbjd5" Oct 10 19:05:43 crc kubenswrapper[4799]: I1010 19:05:43.413042 4799 scope.go:117] "RemoveContainer" containerID="464cc628232a84253f4a698dbac51619b612599e327b0d8fc8447f771bb6664a" Oct 10 19:05:43 crc kubenswrapper[4799]: E1010 19:05:43.413532 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 19:05:43 crc kubenswrapper[4799]: I1010 19:05:43.782599 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-wbjd5"] Oct 10 19:05:44 crc kubenswrapper[4799]: I1010 19:05:44.075574 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-wbjd5" event={"ID":"8d202df0-e4fd-4d60-8fd7-7c05e281f020","Type":"ContainerStarted","Data":"d43fc9533019b1fba225dc19cad3baac5aaf281c1f1cba63bb77d6401ae09664"} Oct 10 19:05:44 crc kubenswrapper[4799]: I1010 19:05:44.075841 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-wbjd5" event={"ID":"8d202df0-e4fd-4d60-8fd7-7c05e281f020","Type":"ContainerStarted","Data":"ea9562f0fe6c75f8f94a5c7295e9abe66b23d733fc43450848dceb4f29731d0b"} Oct 10 19:05:45 crc kubenswrapper[4799]: I1010 19:05:45.096053 4799 generic.go:334] "Generic (PLEG): container finished" podID="8d202df0-e4fd-4d60-8fd7-7c05e281f020" containerID="d43fc9533019b1fba225dc19cad3baac5aaf281c1f1cba63bb77d6401ae09664" exitCode=0 Oct 10 19:05:45 crc kubenswrapper[4799]: I1010 19:05:45.096516 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-wbjd5" event={"ID":"8d202df0-e4fd-4d60-8fd7-7c05e281f020","Type":"ContainerDied","Data":"d43fc9533019b1fba225dc19cad3baac5aaf281c1f1cba63bb77d6401ae09664"} Oct 10 19:05:45 crc kubenswrapper[4799]: I1010 19:05:45.099874 4799 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 10 19:05:47 crc kubenswrapper[4799]: I1010 19:05:47.125373 4799 generic.go:334] "Generic (PLEG): container finished" podID="8d202df0-e4fd-4d60-8fd7-7c05e281f020" containerID="115aeb8c8ffbeb47dc5a0d5bca256f236f4c1e1a96db57b02ac85869f0e90501" exitCode=0 Oct 10 19:05:47 crc kubenswrapper[4799]: I1010 19:05:47.125442 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-wbjd5" event={"ID":"8d202df0-e4fd-4d60-8fd7-7c05e281f020","Type":"ContainerDied","Data":"115aeb8c8ffbeb47dc5a0d5bca256f236f4c1e1a96db57b02ac85869f0e90501"} Oct 10 19:05:48 crc kubenswrapper[4799]: I1010 19:05:48.142018 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-wbjd5" event={"ID":"8d202df0-e4fd-4d60-8fd7-7c05e281f020","Type":"ContainerStarted","Data":"2563db4dc18d53bb7ec8cd1301b87d75c9a5223d6536c9715ee529a1ce3190ba"} Oct 10 19:05:48 crc kubenswrapper[4799]: I1010 19:05:48.179299 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-wbjd5" podStartSLOduration=3.7251554430000002 podStartE2EDuration="6.179281776s" podCreationTimestamp="2025-10-10 19:05:42 +0000 UTC" firstStartedPulling="2025-10-10 19:05:45.099099143 +0000 UTC m=+9238.607423288" lastFinishedPulling="2025-10-10 19:05:47.553225496 +0000 UTC m=+9241.061549621" observedRunningTime="2025-10-10 19:05:48.168453201 +0000 UTC m=+9241.676777336" watchObservedRunningTime="2025-10-10 19:05:48.179281776 +0000 UTC m=+9241.687605891" Oct 10 19:05:53 crc kubenswrapper[4799]: I1010 19:05:53.287352 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-wbjd5" Oct 10 19:05:53 crc kubenswrapper[4799]: I1010 19:05:53.287886 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-wbjd5" Oct 10 19:05:53 crc kubenswrapper[4799]: I1010 19:05:53.337715 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-wbjd5" Oct 10 19:05:54 crc kubenswrapper[4799]: I1010 19:05:54.310694 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-wbjd5" Oct 10 19:05:54 crc kubenswrapper[4799]: I1010 19:05:54.383155 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-wbjd5"] Oct 10 19:05:56 crc kubenswrapper[4799]: I1010 19:05:56.253255 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-wbjd5" podUID="8d202df0-e4fd-4d60-8fd7-7c05e281f020" containerName="registry-server" containerID="cri-o://2563db4dc18d53bb7ec8cd1301b87d75c9a5223d6536c9715ee529a1ce3190ba" gracePeriod=2 Oct 10 19:05:56 crc kubenswrapper[4799]: I1010 19:05:56.805002 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-wbjd5" Oct 10 19:05:56 crc kubenswrapper[4799]: I1010 19:05:56.965166 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dpj4z\" (UniqueName: \"kubernetes.io/projected/8d202df0-e4fd-4d60-8fd7-7c05e281f020-kube-api-access-dpj4z\") pod \"8d202df0-e4fd-4d60-8fd7-7c05e281f020\" (UID: \"8d202df0-e4fd-4d60-8fd7-7c05e281f020\") " Oct 10 19:05:56 crc kubenswrapper[4799]: I1010 19:05:56.965283 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8d202df0-e4fd-4d60-8fd7-7c05e281f020-utilities\") pod \"8d202df0-e4fd-4d60-8fd7-7c05e281f020\" (UID: \"8d202df0-e4fd-4d60-8fd7-7c05e281f020\") " Oct 10 19:05:56 crc kubenswrapper[4799]: I1010 19:05:56.965434 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8d202df0-e4fd-4d60-8fd7-7c05e281f020-catalog-content\") pod \"8d202df0-e4fd-4d60-8fd7-7c05e281f020\" (UID: \"8d202df0-e4fd-4d60-8fd7-7c05e281f020\") " Oct 10 19:05:56 crc kubenswrapper[4799]: I1010 19:05:56.967718 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8d202df0-e4fd-4d60-8fd7-7c05e281f020-utilities" (OuterVolumeSpecName: "utilities") pod "8d202df0-e4fd-4d60-8fd7-7c05e281f020" (UID: "8d202df0-e4fd-4d60-8fd7-7c05e281f020"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 10 19:05:56 crc kubenswrapper[4799]: I1010 19:05:56.980183 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8d202df0-e4fd-4d60-8fd7-7c05e281f020-kube-api-access-dpj4z" (OuterVolumeSpecName: "kube-api-access-dpj4z") pod "8d202df0-e4fd-4d60-8fd7-7c05e281f020" (UID: "8d202df0-e4fd-4d60-8fd7-7c05e281f020"). InnerVolumeSpecName "kube-api-access-dpj4z". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 19:05:56 crc kubenswrapper[4799]: I1010 19:05:56.989285 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8d202df0-e4fd-4d60-8fd7-7c05e281f020-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "8d202df0-e4fd-4d60-8fd7-7c05e281f020" (UID: "8d202df0-e4fd-4d60-8fd7-7c05e281f020"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 10 19:05:57 crc kubenswrapper[4799]: I1010 19:05:57.069295 4799 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8d202df0-e4fd-4d60-8fd7-7c05e281f020-utilities\") on node \"crc\" DevicePath \"\"" Oct 10 19:05:57 crc kubenswrapper[4799]: I1010 19:05:57.069344 4799 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8d202df0-e4fd-4d60-8fd7-7c05e281f020-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 10 19:05:57 crc kubenswrapper[4799]: I1010 19:05:57.069369 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dpj4z\" (UniqueName: \"kubernetes.io/projected/8d202df0-e4fd-4d60-8fd7-7c05e281f020-kube-api-access-dpj4z\") on node \"crc\" DevicePath \"\"" Oct 10 19:05:57 crc kubenswrapper[4799]: I1010 19:05:57.267053 4799 generic.go:334] "Generic (PLEG): container finished" podID="8d202df0-e4fd-4d60-8fd7-7c05e281f020" containerID="2563db4dc18d53bb7ec8cd1301b87d75c9a5223d6536c9715ee529a1ce3190ba" exitCode=0 Oct 10 19:05:57 crc kubenswrapper[4799]: I1010 19:05:57.267097 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-wbjd5" event={"ID":"8d202df0-e4fd-4d60-8fd7-7c05e281f020","Type":"ContainerDied","Data":"2563db4dc18d53bb7ec8cd1301b87d75c9a5223d6536c9715ee529a1ce3190ba"} Oct 10 19:05:57 crc kubenswrapper[4799]: I1010 19:05:57.267127 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-wbjd5" event={"ID":"8d202df0-e4fd-4d60-8fd7-7c05e281f020","Type":"ContainerDied","Data":"ea9562f0fe6c75f8f94a5c7295e9abe66b23d733fc43450848dceb4f29731d0b"} Oct 10 19:05:57 crc kubenswrapper[4799]: I1010 19:05:57.267148 4799 scope.go:117] "RemoveContainer" containerID="2563db4dc18d53bb7ec8cd1301b87d75c9a5223d6536c9715ee529a1ce3190ba" Oct 10 19:05:57 crc kubenswrapper[4799]: I1010 19:05:57.267173 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-wbjd5" Oct 10 19:05:57 crc kubenswrapper[4799]: I1010 19:05:57.294603 4799 scope.go:117] "RemoveContainer" containerID="115aeb8c8ffbeb47dc5a0d5bca256f236f4c1e1a96db57b02ac85869f0e90501" Oct 10 19:05:57 crc kubenswrapper[4799]: I1010 19:05:57.320356 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-wbjd5"] Oct 10 19:05:57 crc kubenswrapper[4799]: I1010 19:05:57.330290 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-wbjd5"] Oct 10 19:05:57 crc kubenswrapper[4799]: I1010 19:05:57.410957 4799 scope.go:117] "RemoveContainer" containerID="464cc628232a84253f4a698dbac51619b612599e327b0d8fc8447f771bb6664a" Oct 10 19:05:57 crc kubenswrapper[4799]: E1010 19:05:57.411672 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 19:05:57 crc kubenswrapper[4799]: I1010 19:05:57.422140 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8d202df0-e4fd-4d60-8fd7-7c05e281f020" path="/var/lib/kubelet/pods/8d202df0-e4fd-4d60-8fd7-7c05e281f020/volumes" Oct 10 19:05:58 crc kubenswrapper[4799]: I1010 19:05:58.312159 4799 scope.go:117] "RemoveContainer" containerID="d43fc9533019b1fba225dc19cad3baac5aaf281c1f1cba63bb77d6401ae09664" Oct 10 19:05:58 crc kubenswrapper[4799]: I1010 19:05:58.499361 4799 scope.go:117] "RemoveContainer" containerID="2563db4dc18d53bb7ec8cd1301b87d75c9a5223d6536c9715ee529a1ce3190ba" Oct 10 19:05:58 crc kubenswrapper[4799]: E1010 19:05:58.500334 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2563db4dc18d53bb7ec8cd1301b87d75c9a5223d6536c9715ee529a1ce3190ba\": container with ID starting with 2563db4dc18d53bb7ec8cd1301b87d75c9a5223d6536c9715ee529a1ce3190ba not found: ID does not exist" containerID="2563db4dc18d53bb7ec8cd1301b87d75c9a5223d6536c9715ee529a1ce3190ba" Oct 10 19:05:58 crc kubenswrapper[4799]: I1010 19:05:58.500401 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2563db4dc18d53bb7ec8cd1301b87d75c9a5223d6536c9715ee529a1ce3190ba"} err="failed to get container status \"2563db4dc18d53bb7ec8cd1301b87d75c9a5223d6536c9715ee529a1ce3190ba\": rpc error: code = NotFound desc = could not find container \"2563db4dc18d53bb7ec8cd1301b87d75c9a5223d6536c9715ee529a1ce3190ba\": container with ID starting with 2563db4dc18d53bb7ec8cd1301b87d75c9a5223d6536c9715ee529a1ce3190ba not found: ID does not exist" Oct 10 19:05:58 crc kubenswrapper[4799]: I1010 19:05:58.500441 4799 scope.go:117] "RemoveContainer" containerID="115aeb8c8ffbeb47dc5a0d5bca256f236f4c1e1a96db57b02ac85869f0e90501" Oct 10 19:05:58 crc kubenswrapper[4799]: E1010 19:05:58.500978 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"115aeb8c8ffbeb47dc5a0d5bca256f236f4c1e1a96db57b02ac85869f0e90501\": container with ID starting with 115aeb8c8ffbeb47dc5a0d5bca256f236f4c1e1a96db57b02ac85869f0e90501 not found: ID does not exist" containerID="115aeb8c8ffbeb47dc5a0d5bca256f236f4c1e1a96db57b02ac85869f0e90501" Oct 10 19:05:58 crc kubenswrapper[4799]: I1010 19:05:58.501034 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"115aeb8c8ffbeb47dc5a0d5bca256f236f4c1e1a96db57b02ac85869f0e90501"} err="failed to get container status \"115aeb8c8ffbeb47dc5a0d5bca256f236f4c1e1a96db57b02ac85869f0e90501\": rpc error: code = NotFound desc = could not find container \"115aeb8c8ffbeb47dc5a0d5bca256f236f4c1e1a96db57b02ac85869f0e90501\": container with ID starting with 115aeb8c8ffbeb47dc5a0d5bca256f236f4c1e1a96db57b02ac85869f0e90501 not found: ID does not exist" Oct 10 19:05:58 crc kubenswrapper[4799]: I1010 19:05:58.501065 4799 scope.go:117] "RemoveContainer" containerID="d43fc9533019b1fba225dc19cad3baac5aaf281c1f1cba63bb77d6401ae09664" Oct 10 19:05:58 crc kubenswrapper[4799]: E1010 19:05:58.501435 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d43fc9533019b1fba225dc19cad3baac5aaf281c1f1cba63bb77d6401ae09664\": container with ID starting with d43fc9533019b1fba225dc19cad3baac5aaf281c1f1cba63bb77d6401ae09664 not found: ID does not exist" containerID="d43fc9533019b1fba225dc19cad3baac5aaf281c1f1cba63bb77d6401ae09664" Oct 10 19:05:58 crc kubenswrapper[4799]: I1010 19:05:58.501477 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d43fc9533019b1fba225dc19cad3baac5aaf281c1f1cba63bb77d6401ae09664"} err="failed to get container status \"d43fc9533019b1fba225dc19cad3baac5aaf281c1f1cba63bb77d6401ae09664\": rpc error: code = NotFound desc = could not find container \"d43fc9533019b1fba225dc19cad3baac5aaf281c1f1cba63bb77d6401ae09664\": container with ID starting with d43fc9533019b1fba225dc19cad3baac5aaf281c1f1cba63bb77d6401ae09664 not found: ID does not exist" Oct 10 19:06:08 crc kubenswrapper[4799]: I1010 19:06:08.402801 4799 scope.go:117] "RemoveContainer" containerID="464cc628232a84253f4a698dbac51619b612599e327b0d8fc8447f771bb6664a" Oct 10 19:06:08 crc kubenswrapper[4799]: E1010 19:06:08.404358 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 19:06:22 crc kubenswrapper[4799]: I1010 19:06:22.402626 4799 scope.go:117] "RemoveContainer" containerID="464cc628232a84253f4a698dbac51619b612599e327b0d8fc8447f771bb6664a" Oct 10 19:06:22 crc kubenswrapper[4799]: E1010 19:06:22.403670 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 19:06:35 crc kubenswrapper[4799]: I1010 19:06:35.403489 4799 scope.go:117] "RemoveContainer" containerID="464cc628232a84253f4a698dbac51619b612599e327b0d8fc8447f771bb6664a" Oct 10 19:06:35 crc kubenswrapper[4799]: E1010 19:06:35.404907 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 19:06:47 crc kubenswrapper[4799]: I1010 19:06:47.415915 4799 scope.go:117] "RemoveContainer" containerID="464cc628232a84253f4a698dbac51619b612599e327b0d8fc8447f771bb6664a" Oct 10 19:06:47 crc kubenswrapper[4799]: E1010 19:06:47.417367 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 19:06:59 crc kubenswrapper[4799]: I1010 19:06:59.402911 4799 scope.go:117] "RemoveContainer" containerID="464cc628232a84253f4a698dbac51619b612599e327b0d8fc8447f771bb6664a" Oct 10 19:06:59 crc kubenswrapper[4799]: E1010 19:06:59.404165 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 19:07:06 crc kubenswrapper[4799]: I1010 19:07:06.276285 4799 generic.go:334] "Generic (PLEG): container finished" podID="3b3bb6d0-ba03-42fb-aa81-ddc9a1b95d28" containerID="3064e8484ec762eb6f9afa88147792b67e500b0ccb34a3a607dc4d40a4abf5b4" exitCode=0 Oct 10 19:07:06 crc kubenswrapper[4799]: I1010 19:07:06.276401 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-dhcp-openstack-openstack-cell1-9snz2" event={"ID":"3b3bb6d0-ba03-42fb-aa81-ddc9a1b95d28","Type":"ContainerDied","Data":"3064e8484ec762eb6f9afa88147792b67e500b0ccb34a3a607dc4d40a4abf5b4"} Oct 10 19:07:07 crc kubenswrapper[4799]: I1010 19:07:07.931449 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-dhcp-openstack-openstack-cell1-9snz2" Oct 10 19:07:08 crc kubenswrapper[4799]: I1010 19:07:08.030790 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-r697v\" (UniqueName: \"kubernetes.io/projected/3b3bb6d0-ba03-42fb-aa81-ddc9a1b95d28-kube-api-access-r697v\") pod \"3b3bb6d0-ba03-42fb-aa81-ddc9a1b95d28\" (UID: \"3b3bb6d0-ba03-42fb-aa81-ddc9a1b95d28\") " Oct 10 19:07:08 crc kubenswrapper[4799]: I1010 19:07:08.030918 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3b3bb6d0-ba03-42fb-aa81-ddc9a1b95d28-inventory\") pod \"3b3bb6d0-ba03-42fb-aa81-ddc9a1b95d28\" (UID: \"3b3bb6d0-ba03-42fb-aa81-ddc9a1b95d28\") " Oct 10 19:07:08 crc kubenswrapper[4799]: I1010 19:07:08.030975 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-dhcp-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/3b3bb6d0-ba03-42fb-aa81-ddc9a1b95d28-neutron-dhcp-agent-neutron-config-0\") pod \"3b3bb6d0-ba03-42fb-aa81-ddc9a1b95d28\" (UID: \"3b3bb6d0-ba03-42fb-aa81-ddc9a1b95d28\") " Oct 10 19:07:08 crc kubenswrapper[4799]: I1010 19:07:08.031215 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-dhcp-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3b3bb6d0-ba03-42fb-aa81-ddc9a1b95d28-neutron-dhcp-combined-ca-bundle\") pod \"3b3bb6d0-ba03-42fb-aa81-ddc9a1b95d28\" (UID: \"3b3bb6d0-ba03-42fb-aa81-ddc9a1b95d28\") " Oct 10 19:07:08 crc kubenswrapper[4799]: I1010 19:07:08.031383 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/3b3bb6d0-ba03-42fb-aa81-ddc9a1b95d28-ceph\") pod \"3b3bb6d0-ba03-42fb-aa81-ddc9a1b95d28\" (UID: \"3b3bb6d0-ba03-42fb-aa81-ddc9a1b95d28\") " Oct 10 19:07:08 crc kubenswrapper[4799]: I1010 19:07:08.031451 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3b3bb6d0-ba03-42fb-aa81-ddc9a1b95d28-ssh-key\") pod \"3b3bb6d0-ba03-42fb-aa81-ddc9a1b95d28\" (UID: \"3b3bb6d0-ba03-42fb-aa81-ddc9a1b95d28\") " Oct 10 19:07:08 crc kubenswrapper[4799]: I1010 19:07:08.040132 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3b3bb6d0-ba03-42fb-aa81-ddc9a1b95d28-ceph" (OuterVolumeSpecName: "ceph") pod "3b3bb6d0-ba03-42fb-aa81-ddc9a1b95d28" (UID: "3b3bb6d0-ba03-42fb-aa81-ddc9a1b95d28"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 19:07:08 crc kubenswrapper[4799]: I1010 19:07:08.040843 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3b3bb6d0-ba03-42fb-aa81-ddc9a1b95d28-neutron-dhcp-combined-ca-bundle" (OuterVolumeSpecName: "neutron-dhcp-combined-ca-bundle") pod "3b3bb6d0-ba03-42fb-aa81-ddc9a1b95d28" (UID: "3b3bb6d0-ba03-42fb-aa81-ddc9a1b95d28"). InnerVolumeSpecName "neutron-dhcp-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 19:07:08 crc kubenswrapper[4799]: I1010 19:07:08.041001 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3b3bb6d0-ba03-42fb-aa81-ddc9a1b95d28-kube-api-access-r697v" (OuterVolumeSpecName: "kube-api-access-r697v") pod "3b3bb6d0-ba03-42fb-aa81-ddc9a1b95d28" (UID: "3b3bb6d0-ba03-42fb-aa81-ddc9a1b95d28"). InnerVolumeSpecName "kube-api-access-r697v". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 19:07:08 crc kubenswrapper[4799]: I1010 19:07:08.078864 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3b3bb6d0-ba03-42fb-aa81-ddc9a1b95d28-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "3b3bb6d0-ba03-42fb-aa81-ddc9a1b95d28" (UID: "3b3bb6d0-ba03-42fb-aa81-ddc9a1b95d28"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 19:07:08 crc kubenswrapper[4799]: I1010 19:07:08.080204 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3b3bb6d0-ba03-42fb-aa81-ddc9a1b95d28-neutron-dhcp-agent-neutron-config-0" (OuterVolumeSpecName: "neutron-dhcp-agent-neutron-config-0") pod "3b3bb6d0-ba03-42fb-aa81-ddc9a1b95d28" (UID: "3b3bb6d0-ba03-42fb-aa81-ddc9a1b95d28"). InnerVolumeSpecName "neutron-dhcp-agent-neutron-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 19:07:08 crc kubenswrapper[4799]: I1010 19:07:08.096942 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3b3bb6d0-ba03-42fb-aa81-ddc9a1b95d28-inventory" (OuterVolumeSpecName: "inventory") pod "3b3bb6d0-ba03-42fb-aa81-ddc9a1b95d28" (UID: "3b3bb6d0-ba03-42fb-aa81-ddc9a1b95d28"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 19:07:08 crc kubenswrapper[4799]: I1010 19:07:08.136446 4799 reconciler_common.go:293] "Volume detached for volume \"neutron-dhcp-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3b3bb6d0-ba03-42fb-aa81-ddc9a1b95d28-neutron-dhcp-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 10 19:07:08 crc kubenswrapper[4799]: I1010 19:07:08.136507 4799 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/3b3bb6d0-ba03-42fb-aa81-ddc9a1b95d28-ceph\") on node \"crc\" DevicePath \"\"" Oct 10 19:07:08 crc kubenswrapper[4799]: I1010 19:07:08.136522 4799 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3b3bb6d0-ba03-42fb-aa81-ddc9a1b95d28-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 10 19:07:08 crc kubenswrapper[4799]: I1010 19:07:08.136539 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-r697v\" (UniqueName: \"kubernetes.io/projected/3b3bb6d0-ba03-42fb-aa81-ddc9a1b95d28-kube-api-access-r697v\") on node \"crc\" DevicePath \"\"" Oct 10 19:07:08 crc kubenswrapper[4799]: I1010 19:07:08.136556 4799 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3b3bb6d0-ba03-42fb-aa81-ddc9a1b95d28-inventory\") on node \"crc\" DevicePath \"\"" Oct 10 19:07:08 crc kubenswrapper[4799]: I1010 19:07:08.136569 4799 reconciler_common.go:293] "Volume detached for volume \"neutron-dhcp-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/3b3bb6d0-ba03-42fb-aa81-ddc9a1b95d28-neutron-dhcp-agent-neutron-config-0\") on node \"crc\" DevicePath \"\"" Oct 10 19:07:08 crc kubenswrapper[4799]: I1010 19:07:08.308943 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-dhcp-openstack-openstack-cell1-9snz2" event={"ID":"3b3bb6d0-ba03-42fb-aa81-ddc9a1b95d28","Type":"ContainerDied","Data":"c18e923cd109dd0dae9671ec2a10f4364a24b0f39ad22bc3bb41a19c0947d2bb"} Oct 10 19:07:08 crc kubenswrapper[4799]: I1010 19:07:08.309341 4799 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c18e923cd109dd0dae9671ec2a10f4364a24b0f39ad22bc3bb41a19c0947d2bb" Oct 10 19:07:08 crc kubenswrapper[4799]: I1010 19:07:08.309027 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-dhcp-openstack-openstack-cell1-9snz2" Oct 10 19:07:10 crc kubenswrapper[4799]: I1010 19:07:10.401960 4799 scope.go:117] "RemoveContainer" containerID="464cc628232a84253f4a698dbac51619b612599e327b0d8fc8447f771bb6664a" Oct 10 19:07:10 crc kubenswrapper[4799]: E1010 19:07:10.402676 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 19:07:21 crc kubenswrapper[4799]: I1010 19:07:21.404556 4799 scope.go:117] "RemoveContainer" containerID="464cc628232a84253f4a698dbac51619b612599e327b0d8fc8447f771bb6664a" Oct 10 19:07:22 crc kubenswrapper[4799]: I1010 19:07:22.520883 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" event={"ID":"6cebefda-e31d-4be2-9bf4-8e1f8ec002cb","Type":"ContainerStarted","Data":"1f5a8550e24ec4e5bea48e4d229935d178982f553c4ca4c823e783a71ccc174c"} Oct 10 19:07:26 crc kubenswrapper[4799]: I1010 19:07:26.375202 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-conductor-0"] Oct 10 19:07:26 crc kubenswrapper[4799]: I1010 19:07:26.375922 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-cell0-conductor-0" podUID="44c716fd-97e8-41e2-a350-99ec283d47d7" containerName="nova-cell0-conductor-conductor" containerID="cri-o://b5286df2abe730c9b0086c71d5f1c6b73d94403b6f477b779d598d862320347e" gracePeriod=30 Oct 10 19:07:27 crc kubenswrapper[4799]: I1010 19:07:27.382278 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-conductor-0"] Oct 10 19:07:27 crc kubenswrapper[4799]: I1010 19:07:27.382806 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-cell1-conductor-0" podUID="c2a9c57f-e6e4-4792-b89b-ab5f9724bf36" containerName="nova-cell1-conductor-conductor" containerID="cri-o://64382c304beacff299cd8fe303ce2e24e2978c781d0ee3c6055e62035ff81435" gracePeriod=30 Oct 10 19:07:27 crc kubenswrapper[4799]: I1010 19:07:27.525613 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Oct 10 19:07:27 crc kubenswrapper[4799]: I1010 19:07:27.525882 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="5938ba89-d944-4772-834f-67074d54da34" containerName="nova-scheduler-scheduler" containerID="cri-o://08e3bad4a85582426737268e738b80039fecbcaaa296a24c98b087818d85da91" gracePeriod=30 Oct 10 19:07:27 crc kubenswrapper[4799]: I1010 19:07:27.546108 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Oct 10 19:07:27 crc kubenswrapper[4799]: I1010 19:07:27.546409 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="3f04144b-1b73-4aa0-8525-53f1a68da6ee" containerName="nova-api-log" containerID="cri-o://edc89c18d9c9a9d77cbeac4da2357579278c2841d39602040e0d43b3dc9839ec" gracePeriod=30 Oct 10 19:07:27 crc kubenswrapper[4799]: I1010 19:07:27.546492 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="3f04144b-1b73-4aa0-8525-53f1a68da6ee" containerName="nova-api-api" containerID="cri-o://20359d41f42d593db627882ec635cdb4e247abcdb9e6bcf45e80cc035d84cdfb" gracePeriod=30 Oct 10 19:07:27 crc kubenswrapper[4799]: I1010 19:07:27.559032 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Oct 10 19:07:27 crc kubenswrapper[4799]: I1010 19:07:27.559248 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="d3ae1096-1bb5-408b-84a1-58b8cd21bad7" containerName="nova-metadata-log" containerID="cri-o://59ecd2d4a250236551316a6d1607810dac5f688a05127dcdf83bdcdfdc7a7f02" gracePeriod=30 Oct 10 19:07:27 crc kubenswrapper[4799]: I1010 19:07:27.559379 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="d3ae1096-1bb5-408b-84a1-58b8cd21bad7" containerName="nova-metadata-metadata" containerID="cri-o://8a162cb9ff7707a7d12da2bd28d025e2ff34657d53cdd924e8311302a0804bf3" gracePeriod=30 Oct 10 19:07:28 crc kubenswrapper[4799]: E1010 19:07:28.402883 4799 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="08e3bad4a85582426737268e738b80039fecbcaaa296a24c98b087818d85da91" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Oct 10 19:07:28 crc kubenswrapper[4799]: E1010 19:07:28.405447 4799 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="08e3bad4a85582426737268e738b80039fecbcaaa296a24c98b087818d85da91" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Oct 10 19:07:28 crc kubenswrapper[4799]: E1010 19:07:28.407245 4799 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="08e3bad4a85582426737268e738b80039fecbcaaa296a24c98b087818d85da91" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Oct 10 19:07:28 crc kubenswrapper[4799]: E1010 19:07:28.407310 4799 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/nova-scheduler-0" podUID="5938ba89-d944-4772-834f-67074d54da34" containerName="nova-scheduler-scheduler" Oct 10 19:07:28 crc kubenswrapper[4799]: I1010 19:07:28.623047 4799 generic.go:334] "Generic (PLEG): container finished" podID="d3ae1096-1bb5-408b-84a1-58b8cd21bad7" containerID="59ecd2d4a250236551316a6d1607810dac5f688a05127dcdf83bdcdfdc7a7f02" exitCode=143 Oct 10 19:07:28 crc kubenswrapper[4799]: I1010 19:07:28.623120 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"d3ae1096-1bb5-408b-84a1-58b8cd21bad7","Type":"ContainerDied","Data":"59ecd2d4a250236551316a6d1607810dac5f688a05127dcdf83bdcdfdc7a7f02"} Oct 10 19:07:28 crc kubenswrapper[4799]: I1010 19:07:28.626391 4799 generic.go:334] "Generic (PLEG): container finished" podID="3f04144b-1b73-4aa0-8525-53f1a68da6ee" containerID="edc89c18d9c9a9d77cbeac4da2357579278c2841d39602040e0d43b3dc9839ec" exitCode=143 Oct 10 19:07:28 crc kubenswrapper[4799]: I1010 19:07:28.626424 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"3f04144b-1b73-4aa0-8525-53f1a68da6ee","Type":"ContainerDied","Data":"edc89c18d9c9a9d77cbeac4da2357579278c2841d39602040e0d43b3dc9839ec"} Oct 10 19:07:29 crc kubenswrapper[4799]: I1010 19:07:29.640701 4799 generic.go:334] "Generic (PLEG): container finished" podID="44c716fd-97e8-41e2-a350-99ec283d47d7" containerID="b5286df2abe730c9b0086c71d5f1c6b73d94403b6f477b779d598d862320347e" exitCode=0 Oct 10 19:07:29 crc kubenswrapper[4799]: I1010 19:07:29.640840 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"44c716fd-97e8-41e2-a350-99ec283d47d7","Type":"ContainerDied","Data":"b5286df2abe730c9b0086c71d5f1c6b73d94403b6f477b779d598d862320347e"} Oct 10 19:07:30 crc kubenswrapper[4799]: I1010 19:07:30.001314 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Oct 10 19:07:30 crc kubenswrapper[4799]: I1010 19:07:30.175430 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7cqxb\" (UniqueName: \"kubernetes.io/projected/44c716fd-97e8-41e2-a350-99ec283d47d7-kube-api-access-7cqxb\") pod \"44c716fd-97e8-41e2-a350-99ec283d47d7\" (UID: \"44c716fd-97e8-41e2-a350-99ec283d47d7\") " Oct 10 19:07:30 crc kubenswrapper[4799]: I1010 19:07:30.175484 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/44c716fd-97e8-41e2-a350-99ec283d47d7-combined-ca-bundle\") pod \"44c716fd-97e8-41e2-a350-99ec283d47d7\" (UID: \"44c716fd-97e8-41e2-a350-99ec283d47d7\") " Oct 10 19:07:30 crc kubenswrapper[4799]: I1010 19:07:30.175542 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/44c716fd-97e8-41e2-a350-99ec283d47d7-config-data\") pod \"44c716fd-97e8-41e2-a350-99ec283d47d7\" (UID: \"44c716fd-97e8-41e2-a350-99ec283d47d7\") " Oct 10 19:07:30 crc kubenswrapper[4799]: I1010 19:07:30.184200 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/44c716fd-97e8-41e2-a350-99ec283d47d7-kube-api-access-7cqxb" (OuterVolumeSpecName: "kube-api-access-7cqxb") pod "44c716fd-97e8-41e2-a350-99ec283d47d7" (UID: "44c716fd-97e8-41e2-a350-99ec283d47d7"). InnerVolumeSpecName "kube-api-access-7cqxb". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 19:07:30 crc kubenswrapper[4799]: E1010 19:07:30.192662 4799 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 64382c304beacff299cd8fe303ce2e24e2978c781d0ee3c6055e62035ff81435 is running failed: container process not found" containerID="64382c304beacff299cd8fe303ce2e24e2978c781d0ee3c6055e62035ff81435" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Oct 10 19:07:30 crc kubenswrapper[4799]: E1010 19:07:30.193159 4799 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 64382c304beacff299cd8fe303ce2e24e2978c781d0ee3c6055e62035ff81435 is running failed: container process not found" containerID="64382c304beacff299cd8fe303ce2e24e2978c781d0ee3c6055e62035ff81435" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Oct 10 19:07:30 crc kubenswrapper[4799]: E1010 19:07:30.193603 4799 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 64382c304beacff299cd8fe303ce2e24e2978c781d0ee3c6055e62035ff81435 is running failed: container process not found" containerID="64382c304beacff299cd8fe303ce2e24e2978c781d0ee3c6055e62035ff81435" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Oct 10 19:07:30 crc kubenswrapper[4799]: E1010 19:07:30.193634 4799 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 64382c304beacff299cd8fe303ce2e24e2978c781d0ee3c6055e62035ff81435 is running failed: container process not found" probeType="Readiness" pod="openstack/nova-cell1-conductor-0" podUID="c2a9c57f-e6e4-4792-b89b-ab5f9724bf36" containerName="nova-cell1-conductor-conductor" Oct 10 19:07:30 crc kubenswrapper[4799]: I1010 19:07:30.203309 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Oct 10 19:07:30 crc kubenswrapper[4799]: I1010 19:07:30.208837 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/44c716fd-97e8-41e2-a350-99ec283d47d7-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "44c716fd-97e8-41e2-a350-99ec283d47d7" (UID: "44c716fd-97e8-41e2-a350-99ec283d47d7"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 19:07:30 crc kubenswrapper[4799]: I1010 19:07:30.210020 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/44c716fd-97e8-41e2-a350-99ec283d47d7-config-data" (OuterVolumeSpecName: "config-data") pod "44c716fd-97e8-41e2-a350-99ec283d47d7" (UID: "44c716fd-97e8-41e2-a350-99ec283d47d7"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 19:07:30 crc kubenswrapper[4799]: I1010 19:07:30.278226 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7cqxb\" (UniqueName: \"kubernetes.io/projected/44c716fd-97e8-41e2-a350-99ec283d47d7-kube-api-access-7cqxb\") on node \"crc\" DevicePath \"\"" Oct 10 19:07:30 crc kubenswrapper[4799]: I1010 19:07:30.278257 4799 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/44c716fd-97e8-41e2-a350-99ec283d47d7-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 10 19:07:30 crc kubenswrapper[4799]: I1010 19:07:30.278266 4799 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/44c716fd-97e8-41e2-a350-99ec283d47d7-config-data\") on node \"crc\" DevicePath \"\"" Oct 10 19:07:30 crc kubenswrapper[4799]: I1010 19:07:30.379137 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c2a9c57f-e6e4-4792-b89b-ab5f9724bf36-combined-ca-bundle\") pod \"c2a9c57f-e6e4-4792-b89b-ab5f9724bf36\" (UID: \"c2a9c57f-e6e4-4792-b89b-ab5f9724bf36\") " Oct 10 19:07:30 crc kubenswrapper[4799]: I1010 19:07:30.379494 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qmqbq\" (UniqueName: \"kubernetes.io/projected/c2a9c57f-e6e4-4792-b89b-ab5f9724bf36-kube-api-access-qmqbq\") pod \"c2a9c57f-e6e4-4792-b89b-ab5f9724bf36\" (UID: \"c2a9c57f-e6e4-4792-b89b-ab5f9724bf36\") " Oct 10 19:07:30 crc kubenswrapper[4799]: I1010 19:07:30.379536 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c2a9c57f-e6e4-4792-b89b-ab5f9724bf36-config-data\") pod \"c2a9c57f-e6e4-4792-b89b-ab5f9724bf36\" (UID: \"c2a9c57f-e6e4-4792-b89b-ab5f9724bf36\") " Oct 10 19:07:30 crc kubenswrapper[4799]: I1010 19:07:30.386987 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c2a9c57f-e6e4-4792-b89b-ab5f9724bf36-kube-api-access-qmqbq" (OuterVolumeSpecName: "kube-api-access-qmqbq") pod "c2a9c57f-e6e4-4792-b89b-ab5f9724bf36" (UID: "c2a9c57f-e6e4-4792-b89b-ab5f9724bf36"). InnerVolumeSpecName "kube-api-access-qmqbq". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 19:07:30 crc kubenswrapper[4799]: I1010 19:07:30.430002 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c2a9c57f-e6e4-4792-b89b-ab5f9724bf36-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "c2a9c57f-e6e4-4792-b89b-ab5f9724bf36" (UID: "c2a9c57f-e6e4-4792-b89b-ab5f9724bf36"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 19:07:30 crc kubenswrapper[4799]: I1010 19:07:30.442107 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c2a9c57f-e6e4-4792-b89b-ab5f9724bf36-config-data" (OuterVolumeSpecName: "config-data") pod "c2a9c57f-e6e4-4792-b89b-ab5f9724bf36" (UID: "c2a9c57f-e6e4-4792-b89b-ab5f9724bf36"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 19:07:30 crc kubenswrapper[4799]: I1010 19:07:30.482169 4799 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c2a9c57f-e6e4-4792-b89b-ab5f9724bf36-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 10 19:07:30 crc kubenswrapper[4799]: I1010 19:07:30.482207 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qmqbq\" (UniqueName: \"kubernetes.io/projected/c2a9c57f-e6e4-4792-b89b-ab5f9724bf36-kube-api-access-qmqbq\") on node \"crc\" DevicePath \"\"" Oct 10 19:07:30 crc kubenswrapper[4799]: I1010 19:07:30.482220 4799 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c2a9c57f-e6e4-4792-b89b-ab5f9724bf36-config-data\") on node \"crc\" DevicePath \"\"" Oct 10 19:07:30 crc kubenswrapper[4799]: I1010 19:07:30.657439 4799 generic.go:334] "Generic (PLEG): container finished" podID="c2a9c57f-e6e4-4792-b89b-ab5f9724bf36" containerID="64382c304beacff299cd8fe303ce2e24e2978c781d0ee3c6055e62035ff81435" exitCode=0 Oct 10 19:07:30 crc kubenswrapper[4799]: I1010 19:07:30.657551 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"c2a9c57f-e6e4-4792-b89b-ab5f9724bf36","Type":"ContainerDied","Data":"64382c304beacff299cd8fe303ce2e24e2978c781d0ee3c6055e62035ff81435"} Oct 10 19:07:30 crc kubenswrapper[4799]: I1010 19:07:30.657592 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"c2a9c57f-e6e4-4792-b89b-ab5f9724bf36","Type":"ContainerDied","Data":"2c9c633543b7304b6d7b2ca0a844d1d17abd89b0528206001b1e1e1124985c05"} Oct 10 19:07:30 crc kubenswrapper[4799]: I1010 19:07:30.657621 4799 scope.go:117] "RemoveContainer" containerID="64382c304beacff299cd8fe303ce2e24e2978c781d0ee3c6055e62035ff81435" Oct 10 19:07:30 crc kubenswrapper[4799]: I1010 19:07:30.657800 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Oct 10 19:07:30 crc kubenswrapper[4799]: I1010 19:07:30.663093 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"44c716fd-97e8-41e2-a350-99ec283d47d7","Type":"ContainerDied","Data":"7bf9facc25356684a55466b8a34f2d517a6f321573b4d7f658c2a16d57b2c2f4"} Oct 10 19:07:30 crc kubenswrapper[4799]: I1010 19:07:30.663232 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Oct 10 19:07:30 crc kubenswrapper[4799]: I1010 19:07:30.701373 4799 scope.go:117] "RemoveContainer" containerID="64382c304beacff299cd8fe303ce2e24e2978c781d0ee3c6055e62035ff81435" Oct 10 19:07:30 crc kubenswrapper[4799]: E1010 19:07:30.702302 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"64382c304beacff299cd8fe303ce2e24e2978c781d0ee3c6055e62035ff81435\": container with ID starting with 64382c304beacff299cd8fe303ce2e24e2978c781d0ee3c6055e62035ff81435 not found: ID does not exist" containerID="64382c304beacff299cd8fe303ce2e24e2978c781d0ee3c6055e62035ff81435" Oct 10 19:07:30 crc kubenswrapper[4799]: I1010 19:07:30.702344 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"64382c304beacff299cd8fe303ce2e24e2978c781d0ee3c6055e62035ff81435"} err="failed to get container status \"64382c304beacff299cd8fe303ce2e24e2978c781d0ee3c6055e62035ff81435\": rpc error: code = NotFound desc = could not find container \"64382c304beacff299cd8fe303ce2e24e2978c781d0ee3c6055e62035ff81435\": container with ID starting with 64382c304beacff299cd8fe303ce2e24e2978c781d0ee3c6055e62035ff81435 not found: ID does not exist" Oct 10 19:07:30 crc kubenswrapper[4799]: I1010 19:07:30.702385 4799 scope.go:117] "RemoveContainer" containerID="b5286df2abe730c9b0086c71d5f1c6b73d94403b6f477b779d598d862320347e" Oct 10 19:07:30 crc kubenswrapper[4799]: I1010 19:07:30.711375 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-conductor-0"] Oct 10 19:07:30 crc kubenswrapper[4799]: I1010 19:07:30.713573 4799 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-metadata-0" podUID="d3ae1096-1bb5-408b-84a1-58b8cd21bad7" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"http://10.217.1.94:8775/\": read tcp 10.217.0.2:45724->10.217.1.94:8775: read: connection reset by peer" Oct 10 19:07:30 crc kubenswrapper[4799]: I1010 19:07:30.713607 4799 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-metadata-0" podUID="d3ae1096-1bb5-408b-84a1-58b8cd21bad7" containerName="nova-metadata-log" probeResult="failure" output="Get \"http://10.217.1.94:8775/\": read tcp 10.217.0.2:45722->10.217.1.94:8775: read: connection reset by peer" Oct 10 19:07:30 crc kubenswrapper[4799]: I1010 19:07:30.740907 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-conductor-0"] Oct 10 19:07:30 crc kubenswrapper[4799]: I1010 19:07:30.777887 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-conductor-0"] Oct 10 19:07:30 crc kubenswrapper[4799]: I1010 19:07:30.798180 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-conductor-0"] Oct 10 19:07:30 crc kubenswrapper[4799]: I1010 19:07:30.819128 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-0"] Oct 10 19:07:30 crc kubenswrapper[4799]: E1010 19:07:30.819667 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8d202df0-e4fd-4d60-8fd7-7c05e281f020" containerName="registry-server" Oct 10 19:07:30 crc kubenswrapper[4799]: I1010 19:07:30.819687 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="8d202df0-e4fd-4d60-8fd7-7c05e281f020" containerName="registry-server" Oct 10 19:07:30 crc kubenswrapper[4799]: E1010 19:07:30.819723 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c2a9c57f-e6e4-4792-b89b-ab5f9724bf36" containerName="nova-cell1-conductor-conductor" Oct 10 19:07:30 crc kubenswrapper[4799]: I1010 19:07:30.819730 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="c2a9c57f-e6e4-4792-b89b-ab5f9724bf36" containerName="nova-cell1-conductor-conductor" Oct 10 19:07:30 crc kubenswrapper[4799]: E1010 19:07:30.819737 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8d202df0-e4fd-4d60-8fd7-7c05e281f020" containerName="extract-content" Oct 10 19:07:30 crc kubenswrapper[4799]: I1010 19:07:30.819743 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="8d202df0-e4fd-4d60-8fd7-7c05e281f020" containerName="extract-content" Oct 10 19:07:30 crc kubenswrapper[4799]: E1010 19:07:30.819769 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="44c716fd-97e8-41e2-a350-99ec283d47d7" containerName="nova-cell0-conductor-conductor" Oct 10 19:07:30 crc kubenswrapper[4799]: I1010 19:07:30.819775 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="44c716fd-97e8-41e2-a350-99ec283d47d7" containerName="nova-cell0-conductor-conductor" Oct 10 19:07:30 crc kubenswrapper[4799]: E1010 19:07:30.819786 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8d202df0-e4fd-4d60-8fd7-7c05e281f020" containerName="extract-utilities" Oct 10 19:07:30 crc kubenswrapper[4799]: I1010 19:07:30.819792 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="8d202df0-e4fd-4d60-8fd7-7c05e281f020" containerName="extract-utilities" Oct 10 19:07:30 crc kubenswrapper[4799]: E1010 19:07:30.819817 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3b3bb6d0-ba03-42fb-aa81-ddc9a1b95d28" containerName="neutron-dhcp-openstack-openstack-cell1" Oct 10 19:07:30 crc kubenswrapper[4799]: I1010 19:07:30.819823 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="3b3bb6d0-ba03-42fb-aa81-ddc9a1b95d28" containerName="neutron-dhcp-openstack-openstack-cell1" Oct 10 19:07:30 crc kubenswrapper[4799]: I1010 19:07:30.820460 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="44c716fd-97e8-41e2-a350-99ec283d47d7" containerName="nova-cell0-conductor-conductor" Oct 10 19:07:30 crc kubenswrapper[4799]: I1010 19:07:30.820484 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="c2a9c57f-e6e4-4792-b89b-ab5f9724bf36" containerName="nova-cell1-conductor-conductor" Oct 10 19:07:30 crc kubenswrapper[4799]: I1010 19:07:30.820510 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="8d202df0-e4fd-4d60-8fd7-7c05e281f020" containerName="registry-server" Oct 10 19:07:30 crc kubenswrapper[4799]: I1010 19:07:30.820519 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="3b3bb6d0-ba03-42fb-aa81-ddc9a1b95d28" containerName="neutron-dhcp-openstack-openstack-cell1" Oct 10 19:07:30 crc kubenswrapper[4799]: I1010 19:07:30.821314 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Oct 10 19:07:30 crc kubenswrapper[4799]: I1010 19:07:30.823402 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-config-data" Oct 10 19:07:30 crc kubenswrapper[4799]: I1010 19:07:30.840649 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Oct 10 19:07:30 crc kubenswrapper[4799]: I1010 19:07:30.852504 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-0"] Oct 10 19:07:30 crc kubenswrapper[4799]: I1010 19:07:30.853995 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Oct 10 19:07:30 crc kubenswrapper[4799]: I1010 19:07:30.855281 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Oct 10 19:07:30 crc kubenswrapper[4799]: I1010 19:07:30.875208 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Oct 10 19:07:30 crc kubenswrapper[4799]: I1010 19:07:30.994242 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zr89q\" (UniqueName: \"kubernetes.io/projected/69bae6bc-ccad-4146-b345-5ca2a96d74c5-kube-api-access-zr89q\") pod \"nova-cell0-conductor-0\" (UID: \"69bae6bc-ccad-4146-b345-5ca2a96d74c5\") " pod="openstack/nova-cell0-conductor-0" Oct 10 19:07:30 crc kubenswrapper[4799]: I1010 19:07:30.994775 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/69bae6bc-ccad-4146-b345-5ca2a96d74c5-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"69bae6bc-ccad-4146-b345-5ca2a96d74c5\") " pod="openstack/nova-cell0-conductor-0" Oct 10 19:07:30 crc kubenswrapper[4799]: I1010 19:07:30.995909 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m4vrr\" (UniqueName: \"kubernetes.io/projected/c6588e6c-163e-4898-9dfc-0fa6932e44af-kube-api-access-m4vrr\") pod \"nova-cell1-conductor-0\" (UID: \"c6588e6c-163e-4898-9dfc-0fa6932e44af\") " pod="openstack/nova-cell1-conductor-0" Oct 10 19:07:30 crc kubenswrapper[4799]: I1010 19:07:30.997097 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c6588e6c-163e-4898-9dfc-0fa6932e44af-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"c6588e6c-163e-4898-9dfc-0fa6932e44af\") " pod="openstack/nova-cell1-conductor-0" Oct 10 19:07:30 crc kubenswrapper[4799]: I1010 19:07:30.997239 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/69bae6bc-ccad-4146-b345-5ca2a96d74c5-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"69bae6bc-ccad-4146-b345-5ca2a96d74c5\") " pod="openstack/nova-cell0-conductor-0" Oct 10 19:07:30 crc kubenswrapper[4799]: I1010 19:07:30.997329 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c6588e6c-163e-4898-9dfc-0fa6932e44af-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"c6588e6c-163e-4898-9dfc-0fa6932e44af\") " pod="openstack/nova-cell1-conductor-0" Oct 10 19:07:31 crc kubenswrapper[4799]: I1010 19:07:31.099517 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c6588e6c-163e-4898-9dfc-0fa6932e44af-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"c6588e6c-163e-4898-9dfc-0fa6932e44af\") " pod="openstack/nova-cell1-conductor-0" Oct 10 19:07:31 crc kubenswrapper[4799]: I1010 19:07:31.099592 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/69bae6bc-ccad-4146-b345-5ca2a96d74c5-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"69bae6bc-ccad-4146-b345-5ca2a96d74c5\") " pod="openstack/nova-cell0-conductor-0" Oct 10 19:07:31 crc kubenswrapper[4799]: I1010 19:07:31.099651 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c6588e6c-163e-4898-9dfc-0fa6932e44af-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"c6588e6c-163e-4898-9dfc-0fa6932e44af\") " pod="openstack/nova-cell1-conductor-0" Oct 10 19:07:31 crc kubenswrapper[4799]: I1010 19:07:31.099692 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zr89q\" (UniqueName: \"kubernetes.io/projected/69bae6bc-ccad-4146-b345-5ca2a96d74c5-kube-api-access-zr89q\") pod \"nova-cell0-conductor-0\" (UID: \"69bae6bc-ccad-4146-b345-5ca2a96d74c5\") " pod="openstack/nova-cell0-conductor-0" Oct 10 19:07:31 crc kubenswrapper[4799]: I1010 19:07:31.099742 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/69bae6bc-ccad-4146-b345-5ca2a96d74c5-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"69bae6bc-ccad-4146-b345-5ca2a96d74c5\") " pod="openstack/nova-cell0-conductor-0" Oct 10 19:07:31 crc kubenswrapper[4799]: I1010 19:07:31.099849 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m4vrr\" (UniqueName: \"kubernetes.io/projected/c6588e6c-163e-4898-9dfc-0fa6932e44af-kube-api-access-m4vrr\") pod \"nova-cell1-conductor-0\" (UID: \"c6588e6c-163e-4898-9dfc-0fa6932e44af\") " pod="openstack/nova-cell1-conductor-0" Oct 10 19:07:31 crc kubenswrapper[4799]: I1010 19:07:31.118362 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m4vrr\" (UniqueName: \"kubernetes.io/projected/c6588e6c-163e-4898-9dfc-0fa6932e44af-kube-api-access-m4vrr\") pod \"nova-cell1-conductor-0\" (UID: \"c6588e6c-163e-4898-9dfc-0fa6932e44af\") " pod="openstack/nova-cell1-conductor-0" Oct 10 19:07:31 crc kubenswrapper[4799]: I1010 19:07:31.119908 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/69bae6bc-ccad-4146-b345-5ca2a96d74c5-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"69bae6bc-ccad-4146-b345-5ca2a96d74c5\") " pod="openstack/nova-cell0-conductor-0" Oct 10 19:07:31 crc kubenswrapper[4799]: I1010 19:07:31.121975 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/69bae6bc-ccad-4146-b345-5ca2a96d74c5-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"69bae6bc-ccad-4146-b345-5ca2a96d74c5\") " pod="openstack/nova-cell0-conductor-0" Oct 10 19:07:31 crc kubenswrapper[4799]: I1010 19:07:31.122235 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zr89q\" (UniqueName: \"kubernetes.io/projected/69bae6bc-ccad-4146-b345-5ca2a96d74c5-kube-api-access-zr89q\") pod \"nova-cell0-conductor-0\" (UID: \"69bae6bc-ccad-4146-b345-5ca2a96d74c5\") " pod="openstack/nova-cell0-conductor-0" Oct 10 19:07:31 crc kubenswrapper[4799]: I1010 19:07:31.122835 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c6588e6c-163e-4898-9dfc-0fa6932e44af-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"c6588e6c-163e-4898-9dfc-0fa6932e44af\") " pod="openstack/nova-cell1-conductor-0" Oct 10 19:07:31 crc kubenswrapper[4799]: I1010 19:07:31.127551 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c6588e6c-163e-4898-9dfc-0fa6932e44af-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"c6588e6c-163e-4898-9dfc-0fa6932e44af\") " pod="openstack/nova-cell1-conductor-0" Oct 10 19:07:31 crc kubenswrapper[4799]: I1010 19:07:31.228585 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Oct 10 19:07:31 crc kubenswrapper[4799]: I1010 19:07:31.242394 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 10 19:07:31 crc kubenswrapper[4799]: I1010 19:07:31.246282 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Oct 10 19:07:31 crc kubenswrapper[4799]: I1010 19:07:31.268772 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 10 19:07:31 crc kubenswrapper[4799]: I1010 19:07:31.417303 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d3ae1096-1bb5-408b-84a1-58b8cd21bad7-logs\") pod \"d3ae1096-1bb5-408b-84a1-58b8cd21bad7\" (UID: \"d3ae1096-1bb5-408b-84a1-58b8cd21bad7\") " Oct 10 19:07:31 crc kubenswrapper[4799]: I1010 19:07:31.417598 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4p2pj\" (UniqueName: \"kubernetes.io/projected/d3ae1096-1bb5-408b-84a1-58b8cd21bad7-kube-api-access-4p2pj\") pod \"d3ae1096-1bb5-408b-84a1-58b8cd21bad7\" (UID: \"d3ae1096-1bb5-408b-84a1-58b8cd21bad7\") " Oct 10 19:07:31 crc kubenswrapper[4799]: I1010 19:07:31.417680 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3f04144b-1b73-4aa0-8525-53f1a68da6ee-logs\") pod \"3f04144b-1b73-4aa0-8525-53f1a68da6ee\" (UID: \"3f04144b-1b73-4aa0-8525-53f1a68da6ee\") " Oct 10 19:07:31 crc kubenswrapper[4799]: I1010 19:07:31.417734 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d3ae1096-1bb5-408b-84a1-58b8cd21bad7-combined-ca-bundle\") pod \"d3ae1096-1bb5-408b-84a1-58b8cd21bad7\" (UID: \"d3ae1096-1bb5-408b-84a1-58b8cd21bad7\") " Oct 10 19:07:31 crc kubenswrapper[4799]: I1010 19:07:31.417853 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d3ae1096-1bb5-408b-84a1-58b8cd21bad7-config-data\") pod \"d3ae1096-1bb5-408b-84a1-58b8cd21bad7\" (UID: \"d3ae1096-1bb5-408b-84a1-58b8cd21bad7\") " Oct 10 19:07:31 crc kubenswrapper[4799]: I1010 19:07:31.417920 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3f04144b-1b73-4aa0-8525-53f1a68da6ee-config-data\") pod \"3f04144b-1b73-4aa0-8525-53f1a68da6ee\" (UID: \"3f04144b-1b73-4aa0-8525-53f1a68da6ee\") " Oct 10 19:07:31 crc kubenswrapper[4799]: I1010 19:07:31.417993 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3f04144b-1b73-4aa0-8525-53f1a68da6ee-combined-ca-bundle\") pod \"3f04144b-1b73-4aa0-8525-53f1a68da6ee\" (UID: \"3f04144b-1b73-4aa0-8525-53f1a68da6ee\") " Oct 10 19:07:31 crc kubenswrapper[4799]: I1010 19:07:31.418024 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6v5d5\" (UniqueName: \"kubernetes.io/projected/3f04144b-1b73-4aa0-8525-53f1a68da6ee-kube-api-access-6v5d5\") pod \"3f04144b-1b73-4aa0-8525-53f1a68da6ee\" (UID: \"3f04144b-1b73-4aa0-8525-53f1a68da6ee\") " Oct 10 19:07:31 crc kubenswrapper[4799]: I1010 19:07:31.418080 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3f04144b-1b73-4aa0-8525-53f1a68da6ee-logs" (OuterVolumeSpecName: "logs") pod "3f04144b-1b73-4aa0-8525-53f1a68da6ee" (UID: "3f04144b-1b73-4aa0-8525-53f1a68da6ee"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 10 19:07:31 crc kubenswrapper[4799]: I1010 19:07:31.418493 4799 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3f04144b-1b73-4aa0-8525-53f1a68da6ee-logs\") on node \"crc\" DevicePath \"\"" Oct 10 19:07:31 crc kubenswrapper[4799]: I1010 19:07:31.419095 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d3ae1096-1bb5-408b-84a1-58b8cd21bad7-logs" (OuterVolumeSpecName: "logs") pod "d3ae1096-1bb5-408b-84a1-58b8cd21bad7" (UID: "d3ae1096-1bb5-408b-84a1-58b8cd21bad7"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 10 19:07:31 crc kubenswrapper[4799]: I1010 19:07:31.425160 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d3ae1096-1bb5-408b-84a1-58b8cd21bad7-kube-api-access-4p2pj" (OuterVolumeSpecName: "kube-api-access-4p2pj") pod "d3ae1096-1bb5-408b-84a1-58b8cd21bad7" (UID: "d3ae1096-1bb5-408b-84a1-58b8cd21bad7"). InnerVolumeSpecName "kube-api-access-4p2pj". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 19:07:31 crc kubenswrapper[4799]: I1010 19:07:31.425198 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3f04144b-1b73-4aa0-8525-53f1a68da6ee-kube-api-access-6v5d5" (OuterVolumeSpecName: "kube-api-access-6v5d5") pod "3f04144b-1b73-4aa0-8525-53f1a68da6ee" (UID: "3f04144b-1b73-4aa0-8525-53f1a68da6ee"). InnerVolumeSpecName "kube-api-access-6v5d5". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 19:07:31 crc kubenswrapper[4799]: I1010 19:07:31.434657 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="44c716fd-97e8-41e2-a350-99ec283d47d7" path="/var/lib/kubelet/pods/44c716fd-97e8-41e2-a350-99ec283d47d7/volumes" Oct 10 19:07:31 crc kubenswrapper[4799]: I1010 19:07:31.435984 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c2a9c57f-e6e4-4792-b89b-ab5f9724bf36" path="/var/lib/kubelet/pods/c2a9c57f-e6e4-4792-b89b-ab5f9724bf36/volumes" Oct 10 19:07:31 crc kubenswrapper[4799]: I1010 19:07:31.455819 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d3ae1096-1bb5-408b-84a1-58b8cd21bad7-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d3ae1096-1bb5-408b-84a1-58b8cd21bad7" (UID: "d3ae1096-1bb5-408b-84a1-58b8cd21bad7"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 19:07:31 crc kubenswrapper[4799]: I1010 19:07:31.456117 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3f04144b-1b73-4aa0-8525-53f1a68da6ee-config-data" (OuterVolumeSpecName: "config-data") pod "3f04144b-1b73-4aa0-8525-53f1a68da6ee" (UID: "3f04144b-1b73-4aa0-8525-53f1a68da6ee"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 19:07:31 crc kubenswrapper[4799]: I1010 19:07:31.456860 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3f04144b-1b73-4aa0-8525-53f1a68da6ee-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "3f04144b-1b73-4aa0-8525-53f1a68da6ee" (UID: "3f04144b-1b73-4aa0-8525-53f1a68da6ee"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 19:07:31 crc kubenswrapper[4799]: I1010 19:07:31.463579 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d3ae1096-1bb5-408b-84a1-58b8cd21bad7-config-data" (OuterVolumeSpecName: "config-data") pod "d3ae1096-1bb5-408b-84a1-58b8cd21bad7" (UID: "d3ae1096-1bb5-408b-84a1-58b8cd21bad7"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 19:07:31 crc kubenswrapper[4799]: I1010 19:07:31.520105 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6v5d5\" (UniqueName: \"kubernetes.io/projected/3f04144b-1b73-4aa0-8525-53f1a68da6ee-kube-api-access-6v5d5\") on node \"crc\" DevicePath \"\"" Oct 10 19:07:31 crc kubenswrapper[4799]: I1010 19:07:31.520130 4799 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d3ae1096-1bb5-408b-84a1-58b8cd21bad7-logs\") on node \"crc\" DevicePath \"\"" Oct 10 19:07:31 crc kubenswrapper[4799]: I1010 19:07:31.520140 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4p2pj\" (UniqueName: \"kubernetes.io/projected/d3ae1096-1bb5-408b-84a1-58b8cd21bad7-kube-api-access-4p2pj\") on node \"crc\" DevicePath \"\"" Oct 10 19:07:31 crc kubenswrapper[4799]: I1010 19:07:31.520149 4799 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d3ae1096-1bb5-408b-84a1-58b8cd21bad7-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 10 19:07:31 crc kubenswrapper[4799]: I1010 19:07:31.520157 4799 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d3ae1096-1bb5-408b-84a1-58b8cd21bad7-config-data\") on node \"crc\" DevicePath \"\"" Oct 10 19:07:31 crc kubenswrapper[4799]: I1010 19:07:31.520167 4799 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3f04144b-1b73-4aa0-8525-53f1a68da6ee-config-data\") on node \"crc\" DevicePath \"\"" Oct 10 19:07:31 crc kubenswrapper[4799]: I1010 19:07:31.520177 4799 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3f04144b-1b73-4aa0-8525-53f1a68da6ee-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 10 19:07:31 crc kubenswrapper[4799]: I1010 19:07:31.676320 4799 generic.go:334] "Generic (PLEG): container finished" podID="d3ae1096-1bb5-408b-84a1-58b8cd21bad7" containerID="8a162cb9ff7707a7d12da2bd28d025e2ff34657d53cdd924e8311302a0804bf3" exitCode=0 Oct 10 19:07:31 crc kubenswrapper[4799]: I1010 19:07:31.676372 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"d3ae1096-1bb5-408b-84a1-58b8cd21bad7","Type":"ContainerDied","Data":"8a162cb9ff7707a7d12da2bd28d025e2ff34657d53cdd924e8311302a0804bf3"} Oct 10 19:07:31 crc kubenswrapper[4799]: I1010 19:07:31.676429 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"d3ae1096-1bb5-408b-84a1-58b8cd21bad7","Type":"ContainerDied","Data":"bf2278442e31a64f005650faca74f800a4d40ec5bfcee20c0f8f433ea36251a0"} Oct 10 19:07:31 crc kubenswrapper[4799]: I1010 19:07:31.676453 4799 scope.go:117] "RemoveContainer" containerID="8a162cb9ff7707a7d12da2bd28d025e2ff34657d53cdd924e8311302a0804bf3" Oct 10 19:07:31 crc kubenswrapper[4799]: I1010 19:07:31.676389 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 10 19:07:31 crc kubenswrapper[4799]: I1010 19:07:31.684634 4799 generic.go:334] "Generic (PLEG): container finished" podID="3f04144b-1b73-4aa0-8525-53f1a68da6ee" containerID="20359d41f42d593db627882ec635cdb4e247abcdb9e6bcf45e80cc035d84cdfb" exitCode=0 Oct 10 19:07:31 crc kubenswrapper[4799]: I1010 19:07:31.684694 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"3f04144b-1b73-4aa0-8525-53f1a68da6ee","Type":"ContainerDied","Data":"20359d41f42d593db627882ec635cdb4e247abcdb9e6bcf45e80cc035d84cdfb"} Oct 10 19:07:31 crc kubenswrapper[4799]: I1010 19:07:31.684720 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"3f04144b-1b73-4aa0-8525-53f1a68da6ee","Type":"ContainerDied","Data":"a5eed2577162c8e66eeb4eda9d6decb9db911d9b46e4884414317f20579f4f95"} Oct 10 19:07:31 crc kubenswrapper[4799]: I1010 19:07:31.684788 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 10 19:07:31 crc kubenswrapper[4799]: I1010 19:07:31.722965 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Oct 10 19:07:31 crc kubenswrapper[4799]: I1010 19:07:31.740703 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Oct 10 19:07:31 crc kubenswrapper[4799]: I1010 19:07:31.755104 4799 scope.go:117] "RemoveContainer" containerID="59ecd2d4a250236551316a6d1607810dac5f688a05127dcdf83bdcdfdc7a7f02" Oct 10 19:07:31 crc kubenswrapper[4799]: I1010 19:07:31.776305 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Oct 10 19:07:31 crc kubenswrapper[4799]: E1010 19:07:31.776720 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3f04144b-1b73-4aa0-8525-53f1a68da6ee" containerName="nova-api-api" Oct 10 19:07:31 crc kubenswrapper[4799]: I1010 19:07:31.776735 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="3f04144b-1b73-4aa0-8525-53f1a68da6ee" containerName="nova-api-api" Oct 10 19:07:31 crc kubenswrapper[4799]: E1010 19:07:31.806846 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3f04144b-1b73-4aa0-8525-53f1a68da6ee" containerName="nova-api-log" Oct 10 19:07:31 crc kubenswrapper[4799]: I1010 19:07:31.806885 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="3f04144b-1b73-4aa0-8525-53f1a68da6ee" containerName="nova-api-log" Oct 10 19:07:31 crc kubenswrapper[4799]: E1010 19:07:31.806925 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d3ae1096-1bb5-408b-84a1-58b8cd21bad7" containerName="nova-metadata-metadata" Oct 10 19:07:31 crc kubenswrapper[4799]: I1010 19:07:31.806932 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="d3ae1096-1bb5-408b-84a1-58b8cd21bad7" containerName="nova-metadata-metadata" Oct 10 19:07:31 crc kubenswrapper[4799]: E1010 19:07:31.806969 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d3ae1096-1bb5-408b-84a1-58b8cd21bad7" containerName="nova-metadata-log" Oct 10 19:07:31 crc kubenswrapper[4799]: I1010 19:07:31.806978 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="d3ae1096-1bb5-408b-84a1-58b8cd21bad7" containerName="nova-metadata-log" Oct 10 19:07:31 crc kubenswrapper[4799]: I1010 19:07:31.807340 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="3f04144b-1b73-4aa0-8525-53f1a68da6ee" containerName="nova-api-api" Oct 10 19:07:31 crc kubenswrapper[4799]: I1010 19:07:31.807360 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="d3ae1096-1bb5-408b-84a1-58b8cd21bad7" containerName="nova-metadata-metadata" Oct 10 19:07:31 crc kubenswrapper[4799]: I1010 19:07:31.807374 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="3f04144b-1b73-4aa0-8525-53f1a68da6ee" containerName="nova-api-log" Oct 10 19:07:31 crc kubenswrapper[4799]: I1010 19:07:31.807390 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="d3ae1096-1bb5-408b-84a1-58b8cd21bad7" containerName="nova-metadata-log" Oct 10 19:07:31 crc kubenswrapper[4799]: I1010 19:07:31.808525 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Oct 10 19:07:31 crc kubenswrapper[4799]: I1010 19:07:31.808612 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 10 19:07:31 crc kubenswrapper[4799]: I1010 19:07:31.810111 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Oct 10 19:07:31 crc kubenswrapper[4799]: I1010 19:07:31.813286 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Oct 10 19:07:31 crc kubenswrapper[4799]: I1010 19:07:31.820921 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Oct 10 19:07:31 crc kubenswrapper[4799]: I1010 19:07:31.852964 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Oct 10 19:07:31 crc kubenswrapper[4799]: I1010 19:07:31.867245 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Oct 10 19:07:31 crc kubenswrapper[4799]: W1010 19:07:31.867641 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc6588e6c_163e_4898_9dfc_0fa6932e44af.slice/crio-b592b99ff03628b6a390a2883809ca84541100fd655fc625e7d1b2330a2a1aa9 WatchSource:0}: Error finding container b592b99ff03628b6a390a2883809ca84541100fd655fc625e7d1b2330a2a1aa9: Status 404 returned error can't find the container with id b592b99ff03628b6a390a2883809ca84541100fd655fc625e7d1b2330a2a1aa9 Oct 10 19:07:31 crc kubenswrapper[4799]: I1010 19:07:31.869855 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 10 19:07:31 crc kubenswrapper[4799]: I1010 19:07:31.871113 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Oct 10 19:07:31 crc kubenswrapper[4799]: I1010 19:07:31.880379 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Oct 10 19:07:31 crc kubenswrapper[4799]: I1010 19:07:31.899121 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Oct 10 19:07:31 crc kubenswrapper[4799]: I1010 19:07:31.927248 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e1033dc6-9f93-4a9c-bba1-7ca137bbd199-logs\") pod \"nova-metadata-0\" (UID: \"e1033dc6-9f93-4a9c-bba1-7ca137bbd199\") " pod="openstack/nova-metadata-0" Oct 10 19:07:31 crc kubenswrapper[4799]: I1010 19:07:31.927311 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tlc7m\" (UniqueName: \"kubernetes.io/projected/e1033dc6-9f93-4a9c-bba1-7ca137bbd199-kube-api-access-tlc7m\") pod \"nova-metadata-0\" (UID: \"e1033dc6-9f93-4a9c-bba1-7ca137bbd199\") " pod="openstack/nova-metadata-0" Oct 10 19:07:31 crc kubenswrapper[4799]: I1010 19:07:31.927623 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e1033dc6-9f93-4a9c-bba1-7ca137bbd199-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"e1033dc6-9f93-4a9c-bba1-7ca137bbd199\") " pod="openstack/nova-metadata-0" Oct 10 19:07:31 crc kubenswrapper[4799]: I1010 19:07:31.927817 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e1033dc6-9f93-4a9c-bba1-7ca137bbd199-config-data\") pod \"nova-metadata-0\" (UID: \"e1033dc6-9f93-4a9c-bba1-7ca137bbd199\") " pod="openstack/nova-metadata-0" Oct 10 19:07:31 crc kubenswrapper[4799]: I1010 19:07:31.949095 4799 scope.go:117] "RemoveContainer" containerID="8a162cb9ff7707a7d12da2bd28d025e2ff34657d53cdd924e8311302a0804bf3" Oct 10 19:07:31 crc kubenswrapper[4799]: E1010 19:07:31.949592 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8a162cb9ff7707a7d12da2bd28d025e2ff34657d53cdd924e8311302a0804bf3\": container with ID starting with 8a162cb9ff7707a7d12da2bd28d025e2ff34657d53cdd924e8311302a0804bf3 not found: ID does not exist" containerID="8a162cb9ff7707a7d12da2bd28d025e2ff34657d53cdd924e8311302a0804bf3" Oct 10 19:07:31 crc kubenswrapper[4799]: I1010 19:07:31.949623 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8a162cb9ff7707a7d12da2bd28d025e2ff34657d53cdd924e8311302a0804bf3"} err="failed to get container status \"8a162cb9ff7707a7d12da2bd28d025e2ff34657d53cdd924e8311302a0804bf3\": rpc error: code = NotFound desc = could not find container \"8a162cb9ff7707a7d12da2bd28d025e2ff34657d53cdd924e8311302a0804bf3\": container with ID starting with 8a162cb9ff7707a7d12da2bd28d025e2ff34657d53cdd924e8311302a0804bf3 not found: ID does not exist" Oct 10 19:07:31 crc kubenswrapper[4799]: I1010 19:07:31.949650 4799 scope.go:117] "RemoveContainer" containerID="59ecd2d4a250236551316a6d1607810dac5f688a05127dcdf83bdcdfdc7a7f02" Oct 10 19:07:31 crc kubenswrapper[4799]: E1010 19:07:31.950060 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"59ecd2d4a250236551316a6d1607810dac5f688a05127dcdf83bdcdfdc7a7f02\": container with ID starting with 59ecd2d4a250236551316a6d1607810dac5f688a05127dcdf83bdcdfdc7a7f02 not found: ID does not exist" containerID="59ecd2d4a250236551316a6d1607810dac5f688a05127dcdf83bdcdfdc7a7f02" Oct 10 19:07:31 crc kubenswrapper[4799]: I1010 19:07:31.950088 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"59ecd2d4a250236551316a6d1607810dac5f688a05127dcdf83bdcdfdc7a7f02"} err="failed to get container status \"59ecd2d4a250236551316a6d1607810dac5f688a05127dcdf83bdcdfdc7a7f02\": rpc error: code = NotFound desc = could not find container \"59ecd2d4a250236551316a6d1607810dac5f688a05127dcdf83bdcdfdc7a7f02\": container with ID starting with 59ecd2d4a250236551316a6d1607810dac5f688a05127dcdf83bdcdfdc7a7f02 not found: ID does not exist" Oct 10 19:07:31 crc kubenswrapper[4799]: I1010 19:07:31.950109 4799 scope.go:117] "RemoveContainer" containerID="20359d41f42d593db627882ec635cdb4e247abcdb9e6bcf45e80cc035d84cdfb" Oct 10 19:07:32 crc kubenswrapper[4799]: I1010 19:07:32.029243 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tlc7m\" (UniqueName: \"kubernetes.io/projected/e1033dc6-9f93-4a9c-bba1-7ca137bbd199-kube-api-access-tlc7m\") pod \"nova-metadata-0\" (UID: \"e1033dc6-9f93-4a9c-bba1-7ca137bbd199\") " pod="openstack/nova-metadata-0" Oct 10 19:07:32 crc kubenswrapper[4799]: I1010 19:07:32.029435 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e1033dc6-9f93-4a9c-bba1-7ca137bbd199-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"e1033dc6-9f93-4a9c-bba1-7ca137bbd199\") " pod="openstack/nova-metadata-0" Oct 10 19:07:32 crc kubenswrapper[4799]: I1010 19:07:32.029543 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a8eabce3-7d36-4c68-b130-ef95aab11607-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"a8eabce3-7d36-4c68-b130-ef95aab11607\") " pod="openstack/nova-api-0" Oct 10 19:07:32 crc kubenswrapper[4799]: I1010 19:07:32.029648 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a8eabce3-7d36-4c68-b130-ef95aab11607-logs\") pod \"nova-api-0\" (UID: \"a8eabce3-7d36-4c68-b130-ef95aab11607\") " pod="openstack/nova-api-0" Oct 10 19:07:32 crc kubenswrapper[4799]: I1010 19:07:32.029679 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lvx5m\" (UniqueName: \"kubernetes.io/projected/a8eabce3-7d36-4c68-b130-ef95aab11607-kube-api-access-lvx5m\") pod \"nova-api-0\" (UID: \"a8eabce3-7d36-4c68-b130-ef95aab11607\") " pod="openstack/nova-api-0" Oct 10 19:07:32 crc kubenswrapper[4799]: I1010 19:07:32.029994 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a8eabce3-7d36-4c68-b130-ef95aab11607-config-data\") pod \"nova-api-0\" (UID: \"a8eabce3-7d36-4c68-b130-ef95aab11607\") " pod="openstack/nova-api-0" Oct 10 19:07:32 crc kubenswrapper[4799]: I1010 19:07:32.030103 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e1033dc6-9f93-4a9c-bba1-7ca137bbd199-config-data\") pod \"nova-metadata-0\" (UID: \"e1033dc6-9f93-4a9c-bba1-7ca137bbd199\") " pod="openstack/nova-metadata-0" Oct 10 19:07:32 crc kubenswrapper[4799]: I1010 19:07:32.030193 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e1033dc6-9f93-4a9c-bba1-7ca137bbd199-logs\") pod \"nova-metadata-0\" (UID: \"e1033dc6-9f93-4a9c-bba1-7ca137bbd199\") " pod="openstack/nova-metadata-0" Oct 10 19:07:32 crc kubenswrapper[4799]: I1010 19:07:32.031113 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e1033dc6-9f93-4a9c-bba1-7ca137bbd199-logs\") pod \"nova-metadata-0\" (UID: \"e1033dc6-9f93-4a9c-bba1-7ca137bbd199\") " pod="openstack/nova-metadata-0" Oct 10 19:07:32 crc kubenswrapper[4799]: I1010 19:07:32.033294 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e1033dc6-9f93-4a9c-bba1-7ca137bbd199-config-data\") pod \"nova-metadata-0\" (UID: \"e1033dc6-9f93-4a9c-bba1-7ca137bbd199\") " pod="openstack/nova-metadata-0" Oct 10 19:07:32 crc kubenswrapper[4799]: I1010 19:07:32.039344 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e1033dc6-9f93-4a9c-bba1-7ca137bbd199-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"e1033dc6-9f93-4a9c-bba1-7ca137bbd199\") " pod="openstack/nova-metadata-0" Oct 10 19:07:32 crc kubenswrapper[4799]: I1010 19:07:32.045463 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tlc7m\" (UniqueName: \"kubernetes.io/projected/e1033dc6-9f93-4a9c-bba1-7ca137bbd199-kube-api-access-tlc7m\") pod \"nova-metadata-0\" (UID: \"e1033dc6-9f93-4a9c-bba1-7ca137bbd199\") " pod="openstack/nova-metadata-0" Oct 10 19:07:32 crc kubenswrapper[4799]: I1010 19:07:32.114684 4799 scope.go:117] "RemoveContainer" containerID="edc89c18d9c9a9d77cbeac4da2357579278c2841d39602040e0d43b3dc9839ec" Oct 10 19:07:32 crc kubenswrapper[4799]: I1010 19:07:32.128496 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 10 19:07:32 crc kubenswrapper[4799]: I1010 19:07:32.133223 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a8eabce3-7d36-4c68-b130-ef95aab11607-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"a8eabce3-7d36-4c68-b130-ef95aab11607\") " pod="openstack/nova-api-0" Oct 10 19:07:32 crc kubenswrapper[4799]: I1010 19:07:32.133286 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a8eabce3-7d36-4c68-b130-ef95aab11607-logs\") pod \"nova-api-0\" (UID: \"a8eabce3-7d36-4c68-b130-ef95aab11607\") " pod="openstack/nova-api-0" Oct 10 19:07:32 crc kubenswrapper[4799]: I1010 19:07:32.133312 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lvx5m\" (UniqueName: \"kubernetes.io/projected/a8eabce3-7d36-4c68-b130-ef95aab11607-kube-api-access-lvx5m\") pod \"nova-api-0\" (UID: \"a8eabce3-7d36-4c68-b130-ef95aab11607\") " pod="openstack/nova-api-0" Oct 10 19:07:32 crc kubenswrapper[4799]: I1010 19:07:32.133360 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a8eabce3-7d36-4c68-b130-ef95aab11607-config-data\") pod \"nova-api-0\" (UID: \"a8eabce3-7d36-4c68-b130-ef95aab11607\") " pod="openstack/nova-api-0" Oct 10 19:07:32 crc kubenswrapper[4799]: I1010 19:07:32.133942 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a8eabce3-7d36-4c68-b130-ef95aab11607-logs\") pod \"nova-api-0\" (UID: \"a8eabce3-7d36-4c68-b130-ef95aab11607\") " pod="openstack/nova-api-0" Oct 10 19:07:32 crc kubenswrapper[4799]: I1010 19:07:32.139303 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a8eabce3-7d36-4c68-b130-ef95aab11607-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"a8eabce3-7d36-4c68-b130-ef95aab11607\") " pod="openstack/nova-api-0" Oct 10 19:07:32 crc kubenswrapper[4799]: I1010 19:07:32.140139 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a8eabce3-7d36-4c68-b130-ef95aab11607-config-data\") pod \"nova-api-0\" (UID: \"a8eabce3-7d36-4c68-b130-ef95aab11607\") " pod="openstack/nova-api-0" Oct 10 19:07:32 crc kubenswrapper[4799]: I1010 19:07:32.165158 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lvx5m\" (UniqueName: \"kubernetes.io/projected/a8eabce3-7d36-4c68-b130-ef95aab11607-kube-api-access-lvx5m\") pod \"nova-api-0\" (UID: \"a8eabce3-7d36-4c68-b130-ef95aab11607\") " pod="openstack/nova-api-0" Oct 10 19:07:32 crc kubenswrapper[4799]: I1010 19:07:32.174128 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 10 19:07:32 crc kubenswrapper[4799]: I1010 19:07:32.176133 4799 scope.go:117] "RemoveContainer" containerID="20359d41f42d593db627882ec635cdb4e247abcdb9e6bcf45e80cc035d84cdfb" Oct 10 19:07:32 crc kubenswrapper[4799]: E1010 19:07:32.176769 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"20359d41f42d593db627882ec635cdb4e247abcdb9e6bcf45e80cc035d84cdfb\": container with ID starting with 20359d41f42d593db627882ec635cdb4e247abcdb9e6bcf45e80cc035d84cdfb not found: ID does not exist" containerID="20359d41f42d593db627882ec635cdb4e247abcdb9e6bcf45e80cc035d84cdfb" Oct 10 19:07:32 crc kubenswrapper[4799]: I1010 19:07:32.176812 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"20359d41f42d593db627882ec635cdb4e247abcdb9e6bcf45e80cc035d84cdfb"} err="failed to get container status \"20359d41f42d593db627882ec635cdb4e247abcdb9e6bcf45e80cc035d84cdfb\": rpc error: code = NotFound desc = could not find container \"20359d41f42d593db627882ec635cdb4e247abcdb9e6bcf45e80cc035d84cdfb\": container with ID starting with 20359d41f42d593db627882ec635cdb4e247abcdb9e6bcf45e80cc035d84cdfb not found: ID does not exist" Oct 10 19:07:32 crc kubenswrapper[4799]: I1010 19:07:32.176845 4799 scope.go:117] "RemoveContainer" containerID="edc89c18d9c9a9d77cbeac4da2357579278c2841d39602040e0d43b3dc9839ec" Oct 10 19:07:32 crc kubenswrapper[4799]: E1010 19:07:32.177370 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"edc89c18d9c9a9d77cbeac4da2357579278c2841d39602040e0d43b3dc9839ec\": container with ID starting with edc89c18d9c9a9d77cbeac4da2357579278c2841d39602040e0d43b3dc9839ec not found: ID does not exist" containerID="edc89c18d9c9a9d77cbeac4da2357579278c2841d39602040e0d43b3dc9839ec" Oct 10 19:07:32 crc kubenswrapper[4799]: I1010 19:07:32.177435 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"edc89c18d9c9a9d77cbeac4da2357579278c2841d39602040e0d43b3dc9839ec"} err="failed to get container status \"edc89c18d9c9a9d77cbeac4da2357579278c2841d39602040e0d43b3dc9839ec\": rpc error: code = NotFound desc = could not find container \"edc89c18d9c9a9d77cbeac4da2357579278c2841d39602040e0d43b3dc9839ec\": container with ID starting with edc89c18d9c9a9d77cbeac4da2357579278c2841d39602040e0d43b3dc9839ec not found: ID does not exist" Oct 10 19:07:32 crc kubenswrapper[4799]: W1010 19:07:32.673263 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode1033dc6_9f93_4a9c_bba1_7ca137bbd199.slice/crio-8d967251e9bbf8b01ff1d147bc4347fbba0a0f5198df9b064b37eccfa633dd46 WatchSource:0}: Error finding container 8d967251e9bbf8b01ff1d147bc4347fbba0a0f5198df9b064b37eccfa633dd46: Status 404 returned error can't find the container with id 8d967251e9bbf8b01ff1d147bc4347fbba0a0f5198df9b064b37eccfa633dd46 Oct 10 19:07:32 crc kubenswrapper[4799]: I1010 19:07:32.674727 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Oct 10 19:07:32 crc kubenswrapper[4799]: I1010 19:07:32.723722 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"c6588e6c-163e-4898-9dfc-0fa6932e44af","Type":"ContainerStarted","Data":"c40d598af7bc0b883c6c536e8bba247800cd0a63943204debea5f8e64bdf3c5b"} Oct 10 19:07:32 crc kubenswrapper[4799]: I1010 19:07:32.723802 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"c6588e6c-163e-4898-9dfc-0fa6932e44af","Type":"ContainerStarted","Data":"b592b99ff03628b6a390a2883809ca84541100fd655fc625e7d1b2330a2a1aa9"} Oct 10 19:07:32 crc kubenswrapper[4799]: I1010 19:07:32.725386 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-conductor-0" Oct 10 19:07:32 crc kubenswrapper[4799]: I1010 19:07:32.731725 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"69bae6bc-ccad-4146-b345-5ca2a96d74c5","Type":"ContainerStarted","Data":"d581cf36fe04c2fb352ab542f8a5110e6ed594db0e51c5b7513a662827df7d41"} Oct 10 19:07:32 crc kubenswrapper[4799]: I1010 19:07:32.731784 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"69bae6bc-ccad-4146-b345-5ca2a96d74c5","Type":"ContainerStarted","Data":"3fa6f6d6ed5cfb17776aa15a0185552166a4fdb8d32c57c200951966de8ff4a3"} Oct 10 19:07:32 crc kubenswrapper[4799]: I1010 19:07:32.731816 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell0-conductor-0" Oct 10 19:07:32 crc kubenswrapper[4799]: I1010 19:07:32.748283 4799 generic.go:334] "Generic (PLEG): container finished" podID="5938ba89-d944-4772-834f-67074d54da34" containerID="08e3bad4a85582426737268e738b80039fecbcaaa296a24c98b087818d85da91" exitCode=0 Oct 10 19:07:32 crc kubenswrapper[4799]: I1010 19:07:32.748386 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"5938ba89-d944-4772-834f-67074d54da34","Type":"ContainerDied","Data":"08e3bad4a85582426737268e738b80039fecbcaaa296a24c98b087818d85da91"} Oct 10 19:07:32 crc kubenswrapper[4799]: I1010 19:07:32.751975 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-0" podStartSLOduration=2.751958375 podStartE2EDuration="2.751958375s" podCreationTimestamp="2025-10-10 19:07:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-10 19:07:32.742581455 +0000 UTC m=+9346.250905570" watchObservedRunningTime="2025-10-10 19:07:32.751958375 +0000 UTC m=+9346.260282490" Oct 10 19:07:32 crc kubenswrapper[4799]: I1010 19:07:32.761597 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"e1033dc6-9f93-4a9c-bba1-7ca137bbd199","Type":"ContainerStarted","Data":"8d967251e9bbf8b01ff1d147bc4347fbba0a0f5198df9b064b37eccfa633dd46"} Oct 10 19:07:32 crc kubenswrapper[4799]: I1010 19:07:32.766481 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Oct 10 19:07:32 crc kubenswrapper[4799]: I1010 19:07:32.771772 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-0" podStartSLOduration=2.771740009 podStartE2EDuration="2.771740009s" podCreationTimestamp="2025-10-10 19:07:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-10 19:07:32.758348411 +0000 UTC m=+9346.266672536" watchObservedRunningTime="2025-10-10 19:07:32.771740009 +0000 UTC m=+9346.280064124" Oct 10 19:07:32 crc kubenswrapper[4799]: W1010 19:07:32.783931 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda8eabce3_7d36_4c68_b130_ef95aab11607.slice/crio-ad7bcd51680b1c4ee9bc6d05b578016a49c8029764d12bee9ffac4bb44986453 WatchSource:0}: Error finding container ad7bcd51680b1c4ee9bc6d05b578016a49c8029764d12bee9ffac4bb44986453: Status 404 returned error can't find the container with id ad7bcd51680b1c4ee9bc6d05b578016a49c8029764d12bee9ffac4bb44986453 Oct 10 19:07:32 crc kubenswrapper[4799]: I1010 19:07:32.981586 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 10 19:07:33 crc kubenswrapper[4799]: I1010 19:07:33.154981 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5938ba89-d944-4772-834f-67074d54da34-config-data\") pod \"5938ba89-d944-4772-834f-67074d54da34\" (UID: \"5938ba89-d944-4772-834f-67074d54da34\") " Oct 10 19:07:33 crc kubenswrapper[4799]: I1010 19:07:33.155279 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-btbgv\" (UniqueName: \"kubernetes.io/projected/5938ba89-d944-4772-834f-67074d54da34-kube-api-access-btbgv\") pod \"5938ba89-d944-4772-834f-67074d54da34\" (UID: \"5938ba89-d944-4772-834f-67074d54da34\") " Oct 10 19:07:33 crc kubenswrapper[4799]: I1010 19:07:33.155347 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5938ba89-d944-4772-834f-67074d54da34-combined-ca-bundle\") pod \"5938ba89-d944-4772-834f-67074d54da34\" (UID: \"5938ba89-d944-4772-834f-67074d54da34\") " Oct 10 19:07:33 crc kubenswrapper[4799]: I1010 19:07:33.177670 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5938ba89-d944-4772-834f-67074d54da34-kube-api-access-btbgv" (OuterVolumeSpecName: "kube-api-access-btbgv") pod "5938ba89-d944-4772-834f-67074d54da34" (UID: "5938ba89-d944-4772-834f-67074d54da34"). InnerVolumeSpecName "kube-api-access-btbgv". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 19:07:33 crc kubenswrapper[4799]: I1010 19:07:33.202419 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5938ba89-d944-4772-834f-67074d54da34-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "5938ba89-d944-4772-834f-67074d54da34" (UID: "5938ba89-d944-4772-834f-67074d54da34"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 19:07:33 crc kubenswrapper[4799]: I1010 19:07:33.228275 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5938ba89-d944-4772-834f-67074d54da34-config-data" (OuterVolumeSpecName: "config-data") pod "5938ba89-d944-4772-834f-67074d54da34" (UID: "5938ba89-d944-4772-834f-67074d54da34"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 19:07:33 crc kubenswrapper[4799]: I1010 19:07:33.258562 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-btbgv\" (UniqueName: \"kubernetes.io/projected/5938ba89-d944-4772-834f-67074d54da34-kube-api-access-btbgv\") on node \"crc\" DevicePath \"\"" Oct 10 19:07:33 crc kubenswrapper[4799]: I1010 19:07:33.258595 4799 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5938ba89-d944-4772-834f-67074d54da34-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 10 19:07:33 crc kubenswrapper[4799]: I1010 19:07:33.258604 4799 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5938ba89-d944-4772-834f-67074d54da34-config-data\") on node \"crc\" DevicePath \"\"" Oct 10 19:07:33 crc kubenswrapper[4799]: I1010 19:07:33.417162 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3f04144b-1b73-4aa0-8525-53f1a68da6ee" path="/var/lib/kubelet/pods/3f04144b-1b73-4aa0-8525-53f1a68da6ee/volumes" Oct 10 19:07:33 crc kubenswrapper[4799]: I1010 19:07:33.418525 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d3ae1096-1bb5-408b-84a1-58b8cd21bad7" path="/var/lib/kubelet/pods/d3ae1096-1bb5-408b-84a1-58b8cd21bad7/volumes" Oct 10 19:07:33 crc kubenswrapper[4799]: I1010 19:07:33.773572 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"a8eabce3-7d36-4c68-b130-ef95aab11607","Type":"ContainerStarted","Data":"2393c00d3dc50a9af638fc64c9c104cb1abf02b66a26355e9a3387134a462c6b"} Oct 10 19:07:33 crc kubenswrapper[4799]: I1010 19:07:33.773635 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"a8eabce3-7d36-4c68-b130-ef95aab11607","Type":"ContainerStarted","Data":"e5b023d06cd8d2a2a5d4444de61a3747155983b6bb1249c9f2d006fd79897a23"} Oct 10 19:07:33 crc kubenswrapper[4799]: I1010 19:07:33.773656 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"a8eabce3-7d36-4c68-b130-ef95aab11607","Type":"ContainerStarted","Data":"ad7bcd51680b1c4ee9bc6d05b578016a49c8029764d12bee9ffac4bb44986453"} Oct 10 19:07:33 crc kubenswrapper[4799]: I1010 19:07:33.775399 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"5938ba89-d944-4772-834f-67074d54da34","Type":"ContainerDied","Data":"8e3cba8437bd76085794176c8cd61e13b106167f28d165dc138b0f888d2ed87c"} Oct 10 19:07:33 crc kubenswrapper[4799]: I1010 19:07:33.775418 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 10 19:07:33 crc kubenswrapper[4799]: I1010 19:07:33.775621 4799 scope.go:117] "RemoveContainer" containerID="08e3bad4a85582426737268e738b80039fecbcaaa296a24c98b087818d85da91" Oct 10 19:07:33 crc kubenswrapper[4799]: I1010 19:07:33.790578 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"e1033dc6-9f93-4a9c-bba1-7ca137bbd199","Type":"ContainerStarted","Data":"d931423c5dd7bbe33bbf47046ec827416438447e0e0b32942e345343472eca5e"} Oct 10 19:07:33 crc kubenswrapper[4799]: I1010 19:07:33.790807 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"e1033dc6-9f93-4a9c-bba1-7ca137bbd199","Type":"ContainerStarted","Data":"e4243cae57d37b991bc0c52c0a3674585f2ba6970e72afebc11c3531f6ea8ca5"} Oct 10 19:07:33 crc kubenswrapper[4799]: I1010 19:07:33.802602 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.802583433 podStartE2EDuration="2.802583433s" podCreationTimestamp="2025-10-10 19:07:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-10 19:07:33.791550253 +0000 UTC m=+9347.299874408" watchObservedRunningTime="2025-10-10 19:07:33.802583433 +0000 UTC m=+9347.310907538" Oct 10 19:07:33 crc kubenswrapper[4799]: I1010 19:07:33.825252 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.825233607 podStartE2EDuration="2.825233607s" podCreationTimestamp="2025-10-10 19:07:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-10 19:07:33.820751777 +0000 UTC m=+9347.329075892" watchObservedRunningTime="2025-10-10 19:07:33.825233607 +0000 UTC m=+9347.333557732" Oct 10 19:07:33 crc kubenswrapper[4799]: I1010 19:07:33.862803 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Oct 10 19:07:33 crc kubenswrapper[4799]: I1010 19:07:33.878911 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Oct 10 19:07:33 crc kubenswrapper[4799]: I1010 19:07:33.890817 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Oct 10 19:07:33 crc kubenswrapper[4799]: E1010 19:07:33.891657 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5938ba89-d944-4772-834f-67074d54da34" containerName="nova-scheduler-scheduler" Oct 10 19:07:33 crc kubenswrapper[4799]: I1010 19:07:33.891679 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="5938ba89-d944-4772-834f-67074d54da34" containerName="nova-scheduler-scheduler" Oct 10 19:07:33 crc kubenswrapper[4799]: I1010 19:07:33.891997 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="5938ba89-d944-4772-834f-67074d54da34" containerName="nova-scheduler-scheduler" Oct 10 19:07:33 crc kubenswrapper[4799]: I1010 19:07:33.892963 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 10 19:07:33 crc kubenswrapper[4799]: I1010 19:07:33.896028 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Oct 10 19:07:33 crc kubenswrapper[4799]: I1010 19:07:33.897071 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Oct 10 19:07:33 crc kubenswrapper[4799]: I1010 19:07:33.973258 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/73eb2584-d6fc-4453-a1fb-6df47179064b-config-data\") pod \"nova-scheduler-0\" (UID: \"73eb2584-d6fc-4453-a1fb-6df47179064b\") " pod="openstack/nova-scheduler-0" Oct 10 19:07:33 crc kubenswrapper[4799]: I1010 19:07:33.973871 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mr2fq\" (UniqueName: \"kubernetes.io/projected/73eb2584-d6fc-4453-a1fb-6df47179064b-kube-api-access-mr2fq\") pod \"nova-scheduler-0\" (UID: \"73eb2584-d6fc-4453-a1fb-6df47179064b\") " pod="openstack/nova-scheduler-0" Oct 10 19:07:33 crc kubenswrapper[4799]: I1010 19:07:33.973930 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/73eb2584-d6fc-4453-a1fb-6df47179064b-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"73eb2584-d6fc-4453-a1fb-6df47179064b\") " pod="openstack/nova-scheduler-0" Oct 10 19:07:34 crc kubenswrapper[4799]: I1010 19:07:34.076170 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mr2fq\" (UniqueName: \"kubernetes.io/projected/73eb2584-d6fc-4453-a1fb-6df47179064b-kube-api-access-mr2fq\") pod \"nova-scheduler-0\" (UID: \"73eb2584-d6fc-4453-a1fb-6df47179064b\") " pod="openstack/nova-scheduler-0" Oct 10 19:07:34 crc kubenswrapper[4799]: I1010 19:07:34.076220 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/73eb2584-d6fc-4453-a1fb-6df47179064b-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"73eb2584-d6fc-4453-a1fb-6df47179064b\") " pod="openstack/nova-scheduler-0" Oct 10 19:07:34 crc kubenswrapper[4799]: I1010 19:07:34.076253 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/73eb2584-d6fc-4453-a1fb-6df47179064b-config-data\") pod \"nova-scheduler-0\" (UID: \"73eb2584-d6fc-4453-a1fb-6df47179064b\") " pod="openstack/nova-scheduler-0" Oct 10 19:07:34 crc kubenswrapper[4799]: I1010 19:07:34.081452 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/73eb2584-d6fc-4453-a1fb-6df47179064b-config-data\") pod \"nova-scheduler-0\" (UID: \"73eb2584-d6fc-4453-a1fb-6df47179064b\") " pod="openstack/nova-scheduler-0" Oct 10 19:07:34 crc kubenswrapper[4799]: I1010 19:07:34.089684 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/73eb2584-d6fc-4453-a1fb-6df47179064b-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"73eb2584-d6fc-4453-a1fb-6df47179064b\") " pod="openstack/nova-scheduler-0" Oct 10 19:07:34 crc kubenswrapper[4799]: I1010 19:07:34.090918 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mr2fq\" (UniqueName: \"kubernetes.io/projected/73eb2584-d6fc-4453-a1fb-6df47179064b-kube-api-access-mr2fq\") pod \"nova-scheduler-0\" (UID: \"73eb2584-d6fc-4453-a1fb-6df47179064b\") " pod="openstack/nova-scheduler-0" Oct 10 19:07:34 crc kubenswrapper[4799]: I1010 19:07:34.213454 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 10 19:07:34 crc kubenswrapper[4799]: I1010 19:07:34.753787 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Oct 10 19:07:34 crc kubenswrapper[4799]: W1010 19:07:34.758569 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod73eb2584_d6fc_4453_a1fb_6df47179064b.slice/crio-c5739e1b3912f53c3ab0101d79b69bc8b7f14560980d8e3a2e97460f1d34e77c WatchSource:0}: Error finding container c5739e1b3912f53c3ab0101d79b69bc8b7f14560980d8e3a2e97460f1d34e77c: Status 404 returned error can't find the container with id c5739e1b3912f53c3ab0101d79b69bc8b7f14560980d8e3a2e97460f1d34e77c Oct 10 19:07:34 crc kubenswrapper[4799]: I1010 19:07:34.810187 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"73eb2584-d6fc-4453-a1fb-6df47179064b","Type":"ContainerStarted","Data":"c5739e1b3912f53c3ab0101d79b69bc8b7f14560980d8e3a2e97460f1d34e77c"} Oct 10 19:07:35 crc kubenswrapper[4799]: I1010 19:07:35.428398 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5938ba89-d944-4772-834f-67074d54da34" path="/var/lib/kubelet/pods/5938ba89-d944-4772-834f-67074d54da34/volumes" Oct 10 19:07:35 crc kubenswrapper[4799]: I1010 19:07:35.821618 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"73eb2584-d6fc-4453-a1fb-6df47179064b","Type":"ContainerStarted","Data":"110f4e27661bc2adafdd8ba6392a70a020a6f9bc872146a735d630049b827d95"} Oct 10 19:07:35 crc kubenswrapper[4799]: I1010 19:07:35.853751 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.853725764 podStartE2EDuration="2.853725764s" podCreationTimestamp="2025-10-10 19:07:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-10 19:07:35.844476168 +0000 UTC m=+9349.352800283" watchObservedRunningTime="2025-10-10 19:07:35.853725764 +0000 UTC m=+9349.362049889" Oct 10 19:07:37 crc kubenswrapper[4799]: I1010 19:07:37.127265 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Oct 10 19:07:37 crc kubenswrapper[4799]: I1010 19:07:37.127977 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Oct 10 19:07:39 crc kubenswrapper[4799]: I1010 19:07:39.213782 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Oct 10 19:07:41 crc kubenswrapper[4799]: I1010 19:07:41.290595 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-conductor-0" Oct 10 19:07:41 crc kubenswrapper[4799]: I1010 19:07:41.302317 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell0-conductor-0" Oct 10 19:07:42 crc kubenswrapper[4799]: I1010 19:07:42.127800 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Oct 10 19:07:42 crc kubenswrapper[4799]: I1010 19:07:42.127854 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Oct 10 19:07:42 crc kubenswrapper[4799]: I1010 19:07:42.174677 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Oct 10 19:07:42 crc kubenswrapper[4799]: I1010 19:07:42.174733 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Oct 10 19:07:43 crc kubenswrapper[4799]: I1010 19:07:43.211893 4799 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="e1033dc6-9f93-4a9c-bba1-7ca137bbd199" containerName="nova-metadata-log" probeResult="failure" output="Get \"http://10.217.1.206:8775/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Oct 10 19:07:43 crc kubenswrapper[4799]: I1010 19:07:43.212039 4799 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="e1033dc6-9f93-4a9c-bba1-7ca137bbd199" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"http://10.217.1.206:8775/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Oct 10 19:07:43 crc kubenswrapper[4799]: I1010 19:07:43.295002 4799 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="a8eabce3-7d36-4c68-b130-ef95aab11607" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.1.207:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Oct 10 19:07:43 crc kubenswrapper[4799]: I1010 19:07:43.295124 4799 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="a8eabce3-7d36-4c68-b130-ef95aab11607" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.1.207:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Oct 10 19:07:44 crc kubenswrapper[4799]: I1010 19:07:44.214517 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Oct 10 19:07:44 crc kubenswrapper[4799]: I1010 19:07:44.266812 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Oct 10 19:07:45 crc kubenswrapper[4799]: I1010 19:07:45.004878 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Oct 10 19:07:52 crc kubenswrapper[4799]: I1010 19:07:52.131455 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Oct 10 19:07:52 crc kubenswrapper[4799]: I1010 19:07:52.132366 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Oct 10 19:07:52 crc kubenswrapper[4799]: I1010 19:07:52.135997 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Oct 10 19:07:52 crc kubenswrapper[4799]: I1010 19:07:52.136941 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Oct 10 19:07:52 crc kubenswrapper[4799]: I1010 19:07:52.180337 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Oct 10 19:07:52 crc kubenswrapper[4799]: I1010 19:07:52.182145 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Oct 10 19:07:52 crc kubenswrapper[4799]: I1010 19:07:52.182613 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Oct 10 19:07:52 crc kubenswrapper[4799]: I1010 19:07:52.185654 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Oct 10 19:07:53 crc kubenswrapper[4799]: I1010 19:07:53.063296 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Oct 10 19:07:53 crc kubenswrapper[4799]: I1010 19:07:53.068560 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Oct 10 19:07:54 crc kubenswrapper[4799]: I1010 19:07:54.455277 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellgtfn5"] Oct 10 19:07:54 crc kubenswrapper[4799]: I1010 19:07:54.457550 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellgtfn5" Oct 10 19:07:54 crc kubenswrapper[4799]: I1010 19:07:54.460024 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-migration-ssh-key" Oct 10 19:07:54 crc kubenswrapper[4799]: I1010 19:07:54.461589 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-adoption-secret" Oct 10 19:07:54 crc kubenswrapper[4799]: I1010 19:07:54.461632 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Oct 10 19:07:54 crc kubenswrapper[4799]: I1010 19:07:54.461648 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"nova-cells-global-config" Oct 10 19:07:54 crc kubenswrapper[4799]: I1010 19:07:54.461826 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-rdlhr" Oct 10 19:07:54 crc kubenswrapper[4799]: I1010 19:07:54.462112 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-compute-config" Oct 10 19:07:54 crc kubenswrapper[4799]: I1010 19:07:54.462117 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 10 19:07:54 crc kubenswrapper[4799]: I1010 19:07:54.491188 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellgtfn5"] Oct 10 19:07:54 crc kubenswrapper[4799]: I1010 19:07:54.589689 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/74fb1b56-dea6-4091-bc8d-0eff60bb1113-nova-migration-ssh-key-0\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellgtfn5\" (UID: \"74fb1b56-dea6-4091-bc8d-0eff60bb1113\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellgtfn5" Oct 10 19:07:54 crc kubenswrapper[4799]: I1010 19:07:54.589940 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/74fb1b56-dea6-4091-bc8d-0eff60bb1113-ceph\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellgtfn5\" (UID: \"74fb1b56-dea6-4091-bc8d-0eff60bb1113\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellgtfn5" Oct 10 19:07:54 crc kubenswrapper[4799]: I1010 19:07:54.590100 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/74fb1b56-dea6-4091-bc8d-0eff60bb1113-inventory\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellgtfn5\" (UID: \"74fb1b56-dea6-4091-bc8d-0eff60bb1113\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellgtfn5" Oct 10 19:07:54 crc kubenswrapper[4799]: I1010 19:07:54.590260 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cells-global-config-0\" (UniqueName: \"kubernetes.io/configmap/74fb1b56-dea6-4091-bc8d-0eff60bb1113-nova-cells-global-config-0\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellgtfn5\" (UID: \"74fb1b56-dea6-4091-bc8d-0eff60bb1113\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellgtfn5" Oct 10 19:07:54 crc kubenswrapper[4799]: I1010 19:07:54.590472 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cells-global-config-1\" (UniqueName: \"kubernetes.io/configmap/74fb1b56-dea6-4091-bc8d-0eff60bb1113-nova-cells-global-config-1\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellgtfn5\" (UID: \"74fb1b56-dea6-4091-bc8d-0eff60bb1113\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellgtfn5" Oct 10 19:07:54 crc kubenswrapper[4799]: I1010 19:07:54.590689 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/74fb1b56-dea6-4091-bc8d-0eff60bb1113-nova-cell1-compute-config-1\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellgtfn5\" (UID: \"74fb1b56-dea6-4091-bc8d-0eff60bb1113\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellgtfn5" Oct 10 19:07:54 crc kubenswrapper[4799]: I1010 19:07:54.590789 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/74fb1b56-dea6-4091-bc8d-0eff60bb1113-nova-cell1-compute-config-0\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellgtfn5\" (UID: \"74fb1b56-dea6-4091-bc8d-0eff60bb1113\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellgtfn5" Oct 10 19:07:54 crc kubenswrapper[4799]: I1010 19:07:54.590956 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/74fb1b56-dea6-4091-bc8d-0eff60bb1113-ssh-key\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellgtfn5\" (UID: \"74fb1b56-dea6-4091-bc8d-0eff60bb1113\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellgtfn5" Oct 10 19:07:54 crc kubenswrapper[4799]: I1010 19:07:54.591061 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gjtf7\" (UniqueName: \"kubernetes.io/projected/74fb1b56-dea6-4091-bc8d-0eff60bb1113-kube-api-access-gjtf7\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellgtfn5\" (UID: \"74fb1b56-dea6-4091-bc8d-0eff60bb1113\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellgtfn5" Oct 10 19:07:54 crc kubenswrapper[4799]: I1010 19:07:54.591133 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cell1-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/74fb1b56-dea6-4091-bc8d-0eff60bb1113-nova-cell1-combined-ca-bundle\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellgtfn5\" (UID: \"74fb1b56-dea6-4091-bc8d-0eff60bb1113\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellgtfn5" Oct 10 19:07:54 crc kubenswrapper[4799]: I1010 19:07:54.591207 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/74fb1b56-dea6-4091-bc8d-0eff60bb1113-nova-migration-ssh-key-1\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellgtfn5\" (UID: \"74fb1b56-dea6-4091-bc8d-0eff60bb1113\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellgtfn5" Oct 10 19:07:54 crc kubenswrapper[4799]: I1010 19:07:54.693475 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/74fb1b56-dea6-4091-bc8d-0eff60bb1113-nova-cell1-compute-config-1\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellgtfn5\" (UID: \"74fb1b56-dea6-4091-bc8d-0eff60bb1113\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellgtfn5" Oct 10 19:07:54 crc kubenswrapper[4799]: I1010 19:07:54.693895 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/74fb1b56-dea6-4091-bc8d-0eff60bb1113-nova-cell1-compute-config-0\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellgtfn5\" (UID: \"74fb1b56-dea6-4091-bc8d-0eff60bb1113\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellgtfn5" Oct 10 19:07:54 crc kubenswrapper[4799]: I1010 19:07:54.693974 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/74fb1b56-dea6-4091-bc8d-0eff60bb1113-ssh-key\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellgtfn5\" (UID: \"74fb1b56-dea6-4091-bc8d-0eff60bb1113\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellgtfn5" Oct 10 19:07:54 crc kubenswrapper[4799]: I1010 19:07:54.694028 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gjtf7\" (UniqueName: \"kubernetes.io/projected/74fb1b56-dea6-4091-bc8d-0eff60bb1113-kube-api-access-gjtf7\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellgtfn5\" (UID: \"74fb1b56-dea6-4091-bc8d-0eff60bb1113\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellgtfn5" Oct 10 19:07:54 crc kubenswrapper[4799]: I1010 19:07:54.694073 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cell1-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/74fb1b56-dea6-4091-bc8d-0eff60bb1113-nova-cell1-combined-ca-bundle\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellgtfn5\" (UID: \"74fb1b56-dea6-4091-bc8d-0eff60bb1113\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellgtfn5" Oct 10 19:07:54 crc kubenswrapper[4799]: I1010 19:07:54.694128 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/74fb1b56-dea6-4091-bc8d-0eff60bb1113-nova-migration-ssh-key-1\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellgtfn5\" (UID: \"74fb1b56-dea6-4091-bc8d-0eff60bb1113\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellgtfn5" Oct 10 19:07:54 crc kubenswrapper[4799]: I1010 19:07:54.694216 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/74fb1b56-dea6-4091-bc8d-0eff60bb1113-nova-migration-ssh-key-0\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellgtfn5\" (UID: \"74fb1b56-dea6-4091-bc8d-0eff60bb1113\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellgtfn5" Oct 10 19:07:54 crc kubenswrapper[4799]: I1010 19:07:54.694267 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/74fb1b56-dea6-4091-bc8d-0eff60bb1113-ceph\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellgtfn5\" (UID: \"74fb1b56-dea6-4091-bc8d-0eff60bb1113\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellgtfn5" Oct 10 19:07:54 crc kubenswrapper[4799]: I1010 19:07:54.694327 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/74fb1b56-dea6-4091-bc8d-0eff60bb1113-inventory\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellgtfn5\" (UID: \"74fb1b56-dea6-4091-bc8d-0eff60bb1113\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellgtfn5" Oct 10 19:07:54 crc kubenswrapper[4799]: I1010 19:07:54.694425 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cells-global-config-0\" (UniqueName: \"kubernetes.io/configmap/74fb1b56-dea6-4091-bc8d-0eff60bb1113-nova-cells-global-config-0\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellgtfn5\" (UID: \"74fb1b56-dea6-4091-bc8d-0eff60bb1113\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellgtfn5" Oct 10 19:07:54 crc kubenswrapper[4799]: I1010 19:07:54.694519 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cells-global-config-1\" (UniqueName: \"kubernetes.io/configmap/74fb1b56-dea6-4091-bc8d-0eff60bb1113-nova-cells-global-config-1\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellgtfn5\" (UID: \"74fb1b56-dea6-4091-bc8d-0eff60bb1113\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellgtfn5" Oct 10 19:07:54 crc kubenswrapper[4799]: I1010 19:07:54.696102 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cells-global-config-1\" (UniqueName: \"kubernetes.io/configmap/74fb1b56-dea6-4091-bc8d-0eff60bb1113-nova-cells-global-config-1\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellgtfn5\" (UID: \"74fb1b56-dea6-4091-bc8d-0eff60bb1113\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellgtfn5" Oct 10 19:07:54 crc kubenswrapper[4799]: I1010 19:07:54.698522 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cells-global-config-0\" (UniqueName: \"kubernetes.io/configmap/74fb1b56-dea6-4091-bc8d-0eff60bb1113-nova-cells-global-config-0\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellgtfn5\" (UID: \"74fb1b56-dea6-4091-bc8d-0eff60bb1113\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellgtfn5" Oct 10 19:07:54 crc kubenswrapper[4799]: I1010 19:07:54.702709 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/74fb1b56-dea6-4091-bc8d-0eff60bb1113-inventory\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellgtfn5\" (UID: \"74fb1b56-dea6-4091-bc8d-0eff60bb1113\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellgtfn5" Oct 10 19:07:54 crc kubenswrapper[4799]: I1010 19:07:54.702740 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/74fb1b56-dea6-4091-bc8d-0eff60bb1113-nova-cell1-compute-config-0\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellgtfn5\" (UID: \"74fb1b56-dea6-4091-bc8d-0eff60bb1113\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellgtfn5" Oct 10 19:07:54 crc kubenswrapper[4799]: I1010 19:07:54.703260 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/74fb1b56-dea6-4091-bc8d-0eff60bb1113-nova-migration-ssh-key-1\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellgtfn5\" (UID: \"74fb1b56-dea6-4091-bc8d-0eff60bb1113\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellgtfn5" Oct 10 19:07:54 crc kubenswrapper[4799]: I1010 19:07:54.704102 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/74fb1b56-dea6-4091-bc8d-0eff60bb1113-nova-migration-ssh-key-0\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellgtfn5\" (UID: \"74fb1b56-dea6-4091-bc8d-0eff60bb1113\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellgtfn5" Oct 10 19:07:54 crc kubenswrapper[4799]: I1010 19:07:54.705041 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/74fb1b56-dea6-4091-bc8d-0eff60bb1113-ssh-key\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellgtfn5\" (UID: \"74fb1b56-dea6-4091-bc8d-0eff60bb1113\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellgtfn5" Oct 10 19:07:54 crc kubenswrapper[4799]: I1010 19:07:54.705883 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/74fb1b56-dea6-4091-bc8d-0eff60bb1113-ceph\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellgtfn5\" (UID: \"74fb1b56-dea6-4091-bc8d-0eff60bb1113\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellgtfn5" Oct 10 19:07:54 crc kubenswrapper[4799]: I1010 19:07:54.708327 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cell1-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/74fb1b56-dea6-4091-bc8d-0eff60bb1113-nova-cell1-combined-ca-bundle\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellgtfn5\" (UID: \"74fb1b56-dea6-4091-bc8d-0eff60bb1113\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellgtfn5" Oct 10 19:07:54 crc kubenswrapper[4799]: I1010 19:07:54.709032 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/74fb1b56-dea6-4091-bc8d-0eff60bb1113-nova-cell1-compute-config-1\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellgtfn5\" (UID: \"74fb1b56-dea6-4091-bc8d-0eff60bb1113\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellgtfn5" Oct 10 19:07:54 crc kubenswrapper[4799]: I1010 19:07:54.725893 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gjtf7\" (UniqueName: \"kubernetes.io/projected/74fb1b56-dea6-4091-bc8d-0eff60bb1113-kube-api-access-gjtf7\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellgtfn5\" (UID: \"74fb1b56-dea6-4091-bc8d-0eff60bb1113\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellgtfn5" Oct 10 19:07:54 crc kubenswrapper[4799]: I1010 19:07:54.780698 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellgtfn5" Oct 10 19:07:55 crc kubenswrapper[4799]: W1010 19:07:55.391708 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod74fb1b56_dea6_4091_bc8d_0eff60bb1113.slice/crio-25dcb60e53408b1c916a7f83f9c0ec769df72698e18bfc51929f56d57c9b590d WatchSource:0}: Error finding container 25dcb60e53408b1c916a7f83f9c0ec769df72698e18bfc51929f56d57c9b590d: Status 404 returned error can't find the container with id 25dcb60e53408b1c916a7f83f9c0ec769df72698e18bfc51929f56d57c9b590d Oct 10 19:07:55 crc kubenswrapper[4799]: I1010 19:07:55.401305 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellgtfn5"] Oct 10 19:07:56 crc kubenswrapper[4799]: I1010 19:07:56.121303 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellgtfn5" event={"ID":"74fb1b56-dea6-4091-bc8d-0eff60bb1113","Type":"ContainerStarted","Data":"25dcb60e53408b1c916a7f83f9c0ec769df72698e18bfc51929f56d57c9b590d"} Oct 10 19:07:57 crc kubenswrapper[4799]: I1010 19:07:57.137117 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellgtfn5" event={"ID":"74fb1b56-dea6-4091-bc8d-0eff60bb1113","Type":"ContainerStarted","Data":"4184fb47ea4ef3452de8b5487bcbf725480fec940407415f4fabc39e60380805"} Oct 10 19:07:57 crc kubenswrapper[4799]: I1010 19:07:57.175079 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellgtfn5" podStartSLOduration=2.639856032 podStartE2EDuration="3.175054809s" podCreationTimestamp="2025-10-10 19:07:54 +0000 UTC" firstStartedPulling="2025-10-10 19:07:55.394832406 +0000 UTC m=+9368.903156531" lastFinishedPulling="2025-10-10 19:07:55.930031153 +0000 UTC m=+9369.438355308" observedRunningTime="2025-10-10 19:07:57.166000707 +0000 UTC m=+9370.674324862" watchObservedRunningTime="2025-10-10 19:07:57.175054809 +0000 UTC m=+9370.683378954" Oct 10 19:09:24 crc kubenswrapper[4799]: I1010 19:09:24.317376 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-pqkgv"] Oct 10 19:09:24 crc kubenswrapper[4799]: I1010 19:09:24.320125 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-pqkgv" Oct 10 19:09:24 crc kubenswrapper[4799]: I1010 19:09:24.344746 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-pqkgv"] Oct 10 19:09:24 crc kubenswrapper[4799]: I1010 19:09:24.379670 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/580df29d-0817-48c4-8352-dc2bc21e9ee0-catalog-content\") pod \"certified-operators-pqkgv\" (UID: \"580df29d-0817-48c4-8352-dc2bc21e9ee0\") " pod="openshift-marketplace/certified-operators-pqkgv" Oct 10 19:09:24 crc kubenswrapper[4799]: I1010 19:09:24.379892 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5rljk\" (UniqueName: \"kubernetes.io/projected/580df29d-0817-48c4-8352-dc2bc21e9ee0-kube-api-access-5rljk\") pod \"certified-operators-pqkgv\" (UID: \"580df29d-0817-48c4-8352-dc2bc21e9ee0\") " pod="openshift-marketplace/certified-operators-pqkgv" Oct 10 19:09:24 crc kubenswrapper[4799]: I1010 19:09:24.380132 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/580df29d-0817-48c4-8352-dc2bc21e9ee0-utilities\") pod \"certified-operators-pqkgv\" (UID: \"580df29d-0817-48c4-8352-dc2bc21e9ee0\") " pod="openshift-marketplace/certified-operators-pqkgv" Oct 10 19:09:24 crc kubenswrapper[4799]: I1010 19:09:24.481505 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5rljk\" (UniqueName: \"kubernetes.io/projected/580df29d-0817-48c4-8352-dc2bc21e9ee0-kube-api-access-5rljk\") pod \"certified-operators-pqkgv\" (UID: \"580df29d-0817-48c4-8352-dc2bc21e9ee0\") " pod="openshift-marketplace/certified-operators-pqkgv" Oct 10 19:09:24 crc kubenswrapper[4799]: I1010 19:09:24.481571 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/580df29d-0817-48c4-8352-dc2bc21e9ee0-utilities\") pod \"certified-operators-pqkgv\" (UID: \"580df29d-0817-48c4-8352-dc2bc21e9ee0\") " pod="openshift-marketplace/certified-operators-pqkgv" Oct 10 19:09:24 crc kubenswrapper[4799]: I1010 19:09:24.481714 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/580df29d-0817-48c4-8352-dc2bc21e9ee0-catalog-content\") pod \"certified-operators-pqkgv\" (UID: \"580df29d-0817-48c4-8352-dc2bc21e9ee0\") " pod="openshift-marketplace/certified-operators-pqkgv" Oct 10 19:09:24 crc kubenswrapper[4799]: I1010 19:09:24.482338 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/580df29d-0817-48c4-8352-dc2bc21e9ee0-catalog-content\") pod \"certified-operators-pqkgv\" (UID: \"580df29d-0817-48c4-8352-dc2bc21e9ee0\") " pod="openshift-marketplace/certified-operators-pqkgv" Oct 10 19:09:24 crc kubenswrapper[4799]: I1010 19:09:24.482960 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/580df29d-0817-48c4-8352-dc2bc21e9ee0-utilities\") pod \"certified-operators-pqkgv\" (UID: \"580df29d-0817-48c4-8352-dc2bc21e9ee0\") " pod="openshift-marketplace/certified-operators-pqkgv" Oct 10 19:09:24 crc kubenswrapper[4799]: I1010 19:09:24.502530 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5rljk\" (UniqueName: \"kubernetes.io/projected/580df29d-0817-48c4-8352-dc2bc21e9ee0-kube-api-access-5rljk\") pod \"certified-operators-pqkgv\" (UID: \"580df29d-0817-48c4-8352-dc2bc21e9ee0\") " pod="openshift-marketplace/certified-operators-pqkgv" Oct 10 19:09:24 crc kubenswrapper[4799]: I1010 19:09:24.642185 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-pqkgv" Oct 10 19:09:25 crc kubenswrapper[4799]: I1010 19:09:25.172028 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-pqkgv"] Oct 10 19:09:25 crc kubenswrapper[4799]: I1010 19:09:25.382455 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-pqkgv" event={"ID":"580df29d-0817-48c4-8352-dc2bc21e9ee0","Type":"ContainerStarted","Data":"7864c23ff29be775d31eb25a5f8acdd004e79e9dcd0481ae8355a24d1d782236"} Oct 10 19:09:26 crc kubenswrapper[4799]: I1010 19:09:26.413325 4799 generic.go:334] "Generic (PLEG): container finished" podID="580df29d-0817-48c4-8352-dc2bc21e9ee0" containerID="1d1c331441d98ed1176fd715cbdf9a48fbc2c5652c4c9e874787fa1548c4425a" exitCode=0 Oct 10 19:09:26 crc kubenswrapper[4799]: I1010 19:09:26.413505 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-pqkgv" event={"ID":"580df29d-0817-48c4-8352-dc2bc21e9ee0","Type":"ContainerDied","Data":"1d1c331441d98ed1176fd715cbdf9a48fbc2c5652c4c9e874787fa1548c4425a"} Oct 10 19:09:27 crc kubenswrapper[4799]: I1010 19:09:27.435878 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-pqkgv" event={"ID":"580df29d-0817-48c4-8352-dc2bc21e9ee0","Type":"ContainerStarted","Data":"a7b06913ec70226964ec339a771f8e05abf39560a0f13cc314092ce48587b893"} Oct 10 19:09:29 crc kubenswrapper[4799]: I1010 19:09:29.463007 4799 generic.go:334] "Generic (PLEG): container finished" podID="580df29d-0817-48c4-8352-dc2bc21e9ee0" containerID="a7b06913ec70226964ec339a771f8e05abf39560a0f13cc314092ce48587b893" exitCode=0 Oct 10 19:09:29 crc kubenswrapper[4799]: I1010 19:09:29.463048 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-pqkgv" event={"ID":"580df29d-0817-48c4-8352-dc2bc21e9ee0","Type":"ContainerDied","Data":"a7b06913ec70226964ec339a771f8e05abf39560a0f13cc314092ce48587b893"} Oct 10 19:09:30 crc kubenswrapper[4799]: I1010 19:09:30.477964 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-pqkgv" event={"ID":"580df29d-0817-48c4-8352-dc2bc21e9ee0","Type":"ContainerStarted","Data":"adf5521607db398fd58e10aadc8c372f16b0aecb07efeddf5403bc4fcd918a41"} Oct 10 19:09:30 crc kubenswrapper[4799]: I1010 19:09:30.503075 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-pqkgv" podStartSLOduration=2.987193158 podStartE2EDuration="6.503048511s" podCreationTimestamp="2025-10-10 19:09:24 +0000 UTC" firstStartedPulling="2025-10-10 19:09:26.417151229 +0000 UTC m=+9459.925475354" lastFinishedPulling="2025-10-10 19:09:29.933006552 +0000 UTC m=+9463.441330707" observedRunningTime="2025-10-10 19:09:30.501084483 +0000 UTC m=+9464.009408618" watchObservedRunningTime="2025-10-10 19:09:30.503048511 +0000 UTC m=+9464.011372646" Oct 10 19:09:34 crc kubenswrapper[4799]: I1010 19:09:34.642654 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-pqkgv" Oct 10 19:09:34 crc kubenswrapper[4799]: I1010 19:09:34.643374 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-pqkgv" Oct 10 19:09:34 crc kubenswrapper[4799]: I1010 19:09:34.731664 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-pqkgv" Oct 10 19:09:35 crc kubenswrapper[4799]: I1010 19:09:35.595097 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-pqkgv" Oct 10 19:09:35 crc kubenswrapper[4799]: I1010 19:09:35.656721 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-pqkgv"] Oct 10 19:09:37 crc kubenswrapper[4799]: I1010 19:09:37.567621 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-pqkgv" podUID="580df29d-0817-48c4-8352-dc2bc21e9ee0" containerName="registry-server" containerID="cri-o://adf5521607db398fd58e10aadc8c372f16b0aecb07efeddf5403bc4fcd918a41" gracePeriod=2 Oct 10 19:09:38 crc kubenswrapper[4799]: I1010 19:09:38.079709 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-pqkgv" Oct 10 19:09:38 crc kubenswrapper[4799]: I1010 19:09:38.227499 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/580df29d-0817-48c4-8352-dc2bc21e9ee0-catalog-content\") pod \"580df29d-0817-48c4-8352-dc2bc21e9ee0\" (UID: \"580df29d-0817-48c4-8352-dc2bc21e9ee0\") " Oct 10 19:09:38 crc kubenswrapper[4799]: I1010 19:09:38.227694 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/580df29d-0817-48c4-8352-dc2bc21e9ee0-utilities\") pod \"580df29d-0817-48c4-8352-dc2bc21e9ee0\" (UID: \"580df29d-0817-48c4-8352-dc2bc21e9ee0\") " Oct 10 19:09:38 crc kubenswrapper[4799]: I1010 19:09:38.228007 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5rljk\" (UniqueName: \"kubernetes.io/projected/580df29d-0817-48c4-8352-dc2bc21e9ee0-kube-api-access-5rljk\") pod \"580df29d-0817-48c4-8352-dc2bc21e9ee0\" (UID: \"580df29d-0817-48c4-8352-dc2bc21e9ee0\") " Oct 10 19:09:38 crc kubenswrapper[4799]: I1010 19:09:38.229718 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/580df29d-0817-48c4-8352-dc2bc21e9ee0-utilities" (OuterVolumeSpecName: "utilities") pod "580df29d-0817-48c4-8352-dc2bc21e9ee0" (UID: "580df29d-0817-48c4-8352-dc2bc21e9ee0"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 10 19:09:38 crc kubenswrapper[4799]: I1010 19:09:38.253090 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/580df29d-0817-48c4-8352-dc2bc21e9ee0-kube-api-access-5rljk" (OuterVolumeSpecName: "kube-api-access-5rljk") pod "580df29d-0817-48c4-8352-dc2bc21e9ee0" (UID: "580df29d-0817-48c4-8352-dc2bc21e9ee0"). InnerVolumeSpecName "kube-api-access-5rljk". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 19:09:38 crc kubenswrapper[4799]: I1010 19:09:38.288401 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/580df29d-0817-48c4-8352-dc2bc21e9ee0-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "580df29d-0817-48c4-8352-dc2bc21e9ee0" (UID: "580df29d-0817-48c4-8352-dc2bc21e9ee0"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 10 19:09:38 crc kubenswrapper[4799]: I1010 19:09:38.331260 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5rljk\" (UniqueName: \"kubernetes.io/projected/580df29d-0817-48c4-8352-dc2bc21e9ee0-kube-api-access-5rljk\") on node \"crc\" DevicePath \"\"" Oct 10 19:09:38 crc kubenswrapper[4799]: I1010 19:09:38.331313 4799 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/580df29d-0817-48c4-8352-dc2bc21e9ee0-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 10 19:09:38 crc kubenswrapper[4799]: I1010 19:09:38.331336 4799 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/580df29d-0817-48c4-8352-dc2bc21e9ee0-utilities\") on node \"crc\" DevicePath \"\"" Oct 10 19:09:38 crc kubenswrapper[4799]: I1010 19:09:38.586672 4799 generic.go:334] "Generic (PLEG): container finished" podID="580df29d-0817-48c4-8352-dc2bc21e9ee0" containerID="adf5521607db398fd58e10aadc8c372f16b0aecb07efeddf5403bc4fcd918a41" exitCode=0 Oct 10 19:09:38 crc kubenswrapper[4799]: I1010 19:09:38.587102 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-pqkgv" Oct 10 19:09:38 crc kubenswrapper[4799]: I1010 19:09:38.586957 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-pqkgv" event={"ID":"580df29d-0817-48c4-8352-dc2bc21e9ee0","Type":"ContainerDied","Data":"adf5521607db398fd58e10aadc8c372f16b0aecb07efeddf5403bc4fcd918a41"} Oct 10 19:09:38 crc kubenswrapper[4799]: I1010 19:09:38.587462 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-pqkgv" event={"ID":"580df29d-0817-48c4-8352-dc2bc21e9ee0","Type":"ContainerDied","Data":"7864c23ff29be775d31eb25a5f8acdd004e79e9dcd0481ae8355a24d1d782236"} Oct 10 19:09:38 crc kubenswrapper[4799]: I1010 19:09:38.587515 4799 scope.go:117] "RemoveContainer" containerID="adf5521607db398fd58e10aadc8c372f16b0aecb07efeddf5403bc4fcd918a41" Oct 10 19:09:38 crc kubenswrapper[4799]: I1010 19:09:38.637715 4799 scope.go:117] "RemoveContainer" containerID="a7b06913ec70226964ec339a771f8e05abf39560a0f13cc314092ce48587b893" Oct 10 19:09:38 crc kubenswrapper[4799]: I1010 19:09:38.650509 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-pqkgv"] Oct 10 19:09:38 crc kubenswrapper[4799]: I1010 19:09:38.683658 4799 scope.go:117] "RemoveContainer" containerID="1d1c331441d98ed1176fd715cbdf9a48fbc2c5652c4c9e874787fa1548c4425a" Oct 10 19:09:38 crc kubenswrapper[4799]: I1010 19:09:38.712456 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-pqkgv"] Oct 10 19:09:38 crc kubenswrapper[4799]: I1010 19:09:38.735212 4799 scope.go:117] "RemoveContainer" containerID="adf5521607db398fd58e10aadc8c372f16b0aecb07efeddf5403bc4fcd918a41" Oct 10 19:09:38 crc kubenswrapper[4799]: E1010 19:09:38.735986 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"adf5521607db398fd58e10aadc8c372f16b0aecb07efeddf5403bc4fcd918a41\": container with ID starting with adf5521607db398fd58e10aadc8c372f16b0aecb07efeddf5403bc4fcd918a41 not found: ID does not exist" containerID="adf5521607db398fd58e10aadc8c372f16b0aecb07efeddf5403bc4fcd918a41" Oct 10 19:09:38 crc kubenswrapper[4799]: I1010 19:09:38.736033 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"adf5521607db398fd58e10aadc8c372f16b0aecb07efeddf5403bc4fcd918a41"} err="failed to get container status \"adf5521607db398fd58e10aadc8c372f16b0aecb07efeddf5403bc4fcd918a41\": rpc error: code = NotFound desc = could not find container \"adf5521607db398fd58e10aadc8c372f16b0aecb07efeddf5403bc4fcd918a41\": container with ID starting with adf5521607db398fd58e10aadc8c372f16b0aecb07efeddf5403bc4fcd918a41 not found: ID does not exist" Oct 10 19:09:38 crc kubenswrapper[4799]: I1010 19:09:38.736064 4799 scope.go:117] "RemoveContainer" containerID="a7b06913ec70226964ec339a771f8e05abf39560a0f13cc314092ce48587b893" Oct 10 19:09:38 crc kubenswrapper[4799]: E1010 19:09:38.736481 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a7b06913ec70226964ec339a771f8e05abf39560a0f13cc314092ce48587b893\": container with ID starting with a7b06913ec70226964ec339a771f8e05abf39560a0f13cc314092ce48587b893 not found: ID does not exist" containerID="a7b06913ec70226964ec339a771f8e05abf39560a0f13cc314092ce48587b893" Oct 10 19:09:38 crc kubenswrapper[4799]: I1010 19:09:38.736517 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a7b06913ec70226964ec339a771f8e05abf39560a0f13cc314092ce48587b893"} err="failed to get container status \"a7b06913ec70226964ec339a771f8e05abf39560a0f13cc314092ce48587b893\": rpc error: code = NotFound desc = could not find container \"a7b06913ec70226964ec339a771f8e05abf39560a0f13cc314092ce48587b893\": container with ID starting with a7b06913ec70226964ec339a771f8e05abf39560a0f13cc314092ce48587b893 not found: ID does not exist" Oct 10 19:09:38 crc kubenswrapper[4799]: I1010 19:09:38.736541 4799 scope.go:117] "RemoveContainer" containerID="1d1c331441d98ed1176fd715cbdf9a48fbc2c5652c4c9e874787fa1548c4425a" Oct 10 19:09:38 crc kubenswrapper[4799]: E1010 19:09:38.736868 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1d1c331441d98ed1176fd715cbdf9a48fbc2c5652c4c9e874787fa1548c4425a\": container with ID starting with 1d1c331441d98ed1176fd715cbdf9a48fbc2c5652c4c9e874787fa1548c4425a not found: ID does not exist" containerID="1d1c331441d98ed1176fd715cbdf9a48fbc2c5652c4c9e874787fa1548c4425a" Oct 10 19:09:38 crc kubenswrapper[4799]: I1010 19:09:38.736902 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1d1c331441d98ed1176fd715cbdf9a48fbc2c5652c4c9e874787fa1548c4425a"} err="failed to get container status \"1d1c331441d98ed1176fd715cbdf9a48fbc2c5652c4c9e874787fa1548c4425a\": rpc error: code = NotFound desc = could not find container \"1d1c331441d98ed1176fd715cbdf9a48fbc2c5652c4c9e874787fa1548c4425a\": container with ID starting with 1d1c331441d98ed1176fd715cbdf9a48fbc2c5652c4c9e874787fa1548c4425a not found: ID does not exist" Oct 10 19:09:39 crc kubenswrapper[4799]: I1010 19:09:39.422008 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="580df29d-0817-48c4-8352-dc2bc21e9ee0" path="/var/lib/kubelet/pods/580df29d-0817-48c4-8352-dc2bc21e9ee0/volumes" Oct 10 19:09:45 crc kubenswrapper[4799]: I1010 19:09:45.248705 4799 patch_prober.go:28] interesting pod/machine-config-daemon-rh8zc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 10 19:09:45 crc kubenswrapper[4799]: I1010 19:09:45.249460 4799 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 10 19:10:15 crc kubenswrapper[4799]: I1010 19:10:15.248494 4799 patch_prober.go:28] interesting pod/machine-config-daemon-rh8zc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 10 19:10:15 crc kubenswrapper[4799]: I1010 19:10:15.249165 4799 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 10 19:10:29 crc kubenswrapper[4799]: I1010 19:10:29.031912 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-thhgl"] Oct 10 19:10:29 crc kubenswrapper[4799]: E1010 19:10:29.032846 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="580df29d-0817-48c4-8352-dc2bc21e9ee0" containerName="extract-utilities" Oct 10 19:10:29 crc kubenswrapper[4799]: I1010 19:10:29.032858 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="580df29d-0817-48c4-8352-dc2bc21e9ee0" containerName="extract-utilities" Oct 10 19:10:29 crc kubenswrapper[4799]: E1010 19:10:29.032867 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="580df29d-0817-48c4-8352-dc2bc21e9ee0" containerName="extract-content" Oct 10 19:10:29 crc kubenswrapper[4799]: I1010 19:10:29.032872 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="580df29d-0817-48c4-8352-dc2bc21e9ee0" containerName="extract-content" Oct 10 19:10:29 crc kubenswrapper[4799]: E1010 19:10:29.032896 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="580df29d-0817-48c4-8352-dc2bc21e9ee0" containerName="registry-server" Oct 10 19:10:29 crc kubenswrapper[4799]: I1010 19:10:29.032903 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="580df29d-0817-48c4-8352-dc2bc21e9ee0" containerName="registry-server" Oct 10 19:10:29 crc kubenswrapper[4799]: I1010 19:10:29.033086 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="580df29d-0817-48c4-8352-dc2bc21e9ee0" containerName="registry-server" Oct 10 19:10:29 crc kubenswrapper[4799]: I1010 19:10:29.038440 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-thhgl" Oct 10 19:10:29 crc kubenswrapper[4799]: I1010 19:10:29.054254 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-thhgl"] Oct 10 19:10:29 crc kubenswrapper[4799]: I1010 19:10:29.209282 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/668b7e96-fb6b-44f7-bc21-76f47fe127aa-utilities\") pod \"redhat-operators-thhgl\" (UID: \"668b7e96-fb6b-44f7-bc21-76f47fe127aa\") " pod="openshift-marketplace/redhat-operators-thhgl" Oct 10 19:10:29 crc kubenswrapper[4799]: I1010 19:10:29.209339 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/668b7e96-fb6b-44f7-bc21-76f47fe127aa-catalog-content\") pod \"redhat-operators-thhgl\" (UID: \"668b7e96-fb6b-44f7-bc21-76f47fe127aa\") " pod="openshift-marketplace/redhat-operators-thhgl" Oct 10 19:10:29 crc kubenswrapper[4799]: I1010 19:10:29.209431 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p89vz\" (UniqueName: \"kubernetes.io/projected/668b7e96-fb6b-44f7-bc21-76f47fe127aa-kube-api-access-p89vz\") pod \"redhat-operators-thhgl\" (UID: \"668b7e96-fb6b-44f7-bc21-76f47fe127aa\") " pod="openshift-marketplace/redhat-operators-thhgl" Oct 10 19:10:29 crc kubenswrapper[4799]: I1010 19:10:29.312106 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/668b7e96-fb6b-44f7-bc21-76f47fe127aa-utilities\") pod \"redhat-operators-thhgl\" (UID: \"668b7e96-fb6b-44f7-bc21-76f47fe127aa\") " pod="openshift-marketplace/redhat-operators-thhgl" Oct 10 19:10:29 crc kubenswrapper[4799]: I1010 19:10:29.312210 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/668b7e96-fb6b-44f7-bc21-76f47fe127aa-catalog-content\") pod \"redhat-operators-thhgl\" (UID: \"668b7e96-fb6b-44f7-bc21-76f47fe127aa\") " pod="openshift-marketplace/redhat-operators-thhgl" Oct 10 19:10:29 crc kubenswrapper[4799]: I1010 19:10:29.312325 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p89vz\" (UniqueName: \"kubernetes.io/projected/668b7e96-fb6b-44f7-bc21-76f47fe127aa-kube-api-access-p89vz\") pod \"redhat-operators-thhgl\" (UID: \"668b7e96-fb6b-44f7-bc21-76f47fe127aa\") " pod="openshift-marketplace/redhat-operators-thhgl" Oct 10 19:10:29 crc kubenswrapper[4799]: I1010 19:10:29.312859 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/668b7e96-fb6b-44f7-bc21-76f47fe127aa-utilities\") pod \"redhat-operators-thhgl\" (UID: \"668b7e96-fb6b-44f7-bc21-76f47fe127aa\") " pod="openshift-marketplace/redhat-operators-thhgl" Oct 10 19:10:29 crc kubenswrapper[4799]: I1010 19:10:29.313397 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/668b7e96-fb6b-44f7-bc21-76f47fe127aa-catalog-content\") pod \"redhat-operators-thhgl\" (UID: \"668b7e96-fb6b-44f7-bc21-76f47fe127aa\") " pod="openshift-marketplace/redhat-operators-thhgl" Oct 10 19:10:29 crc kubenswrapper[4799]: I1010 19:10:29.334805 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p89vz\" (UniqueName: \"kubernetes.io/projected/668b7e96-fb6b-44f7-bc21-76f47fe127aa-kube-api-access-p89vz\") pod \"redhat-operators-thhgl\" (UID: \"668b7e96-fb6b-44f7-bc21-76f47fe127aa\") " pod="openshift-marketplace/redhat-operators-thhgl" Oct 10 19:10:29 crc kubenswrapper[4799]: I1010 19:10:29.368242 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-thhgl" Oct 10 19:10:29 crc kubenswrapper[4799]: I1010 19:10:29.870447 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-thhgl"] Oct 10 19:10:30 crc kubenswrapper[4799]: I1010 19:10:30.269041 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-thhgl" event={"ID":"668b7e96-fb6b-44f7-bc21-76f47fe127aa","Type":"ContainerStarted","Data":"282ffa886623830b1182dbb2df3668d221f638a0a22fea5a76392762981bfc75"} Oct 10 19:10:30 crc kubenswrapper[4799]: I1010 19:10:30.269119 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-thhgl" event={"ID":"668b7e96-fb6b-44f7-bc21-76f47fe127aa","Type":"ContainerStarted","Data":"517018a8d8bf4aeb38e672f87f40e9e1a0ab362de5a2a3950446684ca6cbb006"} Oct 10 19:10:31 crc kubenswrapper[4799]: I1010 19:10:31.300677 4799 generic.go:334] "Generic (PLEG): container finished" podID="668b7e96-fb6b-44f7-bc21-76f47fe127aa" containerID="282ffa886623830b1182dbb2df3668d221f638a0a22fea5a76392762981bfc75" exitCode=0 Oct 10 19:10:31 crc kubenswrapper[4799]: I1010 19:10:31.300787 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-thhgl" event={"ID":"668b7e96-fb6b-44f7-bc21-76f47fe127aa","Type":"ContainerDied","Data":"282ffa886623830b1182dbb2df3668d221f638a0a22fea5a76392762981bfc75"} Oct 10 19:10:32 crc kubenswrapper[4799]: I1010 19:10:32.317948 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-thhgl" event={"ID":"668b7e96-fb6b-44f7-bc21-76f47fe127aa","Type":"ContainerStarted","Data":"12ced51e162ae4c7153e0e90f7d75677027c8bc0f22b5cd091a6b042690501ee"} Oct 10 19:10:36 crc kubenswrapper[4799]: I1010 19:10:36.371643 4799 generic.go:334] "Generic (PLEG): container finished" podID="668b7e96-fb6b-44f7-bc21-76f47fe127aa" containerID="12ced51e162ae4c7153e0e90f7d75677027c8bc0f22b5cd091a6b042690501ee" exitCode=0 Oct 10 19:10:36 crc kubenswrapper[4799]: I1010 19:10:36.371742 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-thhgl" event={"ID":"668b7e96-fb6b-44f7-bc21-76f47fe127aa","Type":"ContainerDied","Data":"12ced51e162ae4c7153e0e90f7d75677027c8bc0f22b5cd091a6b042690501ee"} Oct 10 19:10:37 crc kubenswrapper[4799]: I1010 19:10:37.386143 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-thhgl" event={"ID":"668b7e96-fb6b-44f7-bc21-76f47fe127aa","Type":"ContainerStarted","Data":"c352edf0599e3b67d57e06c602ffca38aa60443aad82bd603d91de53b3eb84ef"} Oct 10 19:10:37 crc kubenswrapper[4799]: I1010 19:10:37.415113 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-thhgl" podStartSLOduration=3.781724675 podStartE2EDuration="9.415090463s" podCreationTimestamp="2025-10-10 19:10:28 +0000 UTC" firstStartedPulling="2025-10-10 19:10:31.307727626 +0000 UTC m=+9524.816051761" lastFinishedPulling="2025-10-10 19:10:36.941093424 +0000 UTC m=+9530.449417549" observedRunningTime="2025-10-10 19:10:37.406258147 +0000 UTC m=+9530.914582272" watchObservedRunningTime="2025-10-10 19:10:37.415090463 +0000 UTC m=+9530.923414578" Oct 10 19:10:39 crc kubenswrapper[4799]: I1010 19:10:39.368524 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-thhgl" Oct 10 19:10:39 crc kubenswrapper[4799]: I1010 19:10:39.369094 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-thhgl" Oct 10 19:10:40 crc kubenswrapper[4799]: I1010 19:10:40.428423 4799 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-thhgl" podUID="668b7e96-fb6b-44f7-bc21-76f47fe127aa" containerName="registry-server" probeResult="failure" output=< Oct 10 19:10:40 crc kubenswrapper[4799]: timeout: failed to connect service ":50051" within 1s Oct 10 19:10:40 crc kubenswrapper[4799]: > Oct 10 19:10:45 crc kubenswrapper[4799]: I1010 19:10:45.248512 4799 patch_prober.go:28] interesting pod/machine-config-daemon-rh8zc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 10 19:10:45 crc kubenswrapper[4799]: I1010 19:10:45.249246 4799 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 10 19:10:45 crc kubenswrapper[4799]: I1010 19:10:45.249329 4799 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" Oct 10 19:10:45 crc kubenswrapper[4799]: I1010 19:10:45.250426 4799 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"1f5a8550e24ec4e5bea48e4d229935d178982f553c4ca4c823e783a71ccc174c"} pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 10 19:10:45 crc kubenswrapper[4799]: I1010 19:10:45.250533 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" containerName="machine-config-daemon" containerID="cri-o://1f5a8550e24ec4e5bea48e4d229935d178982f553c4ca4c823e783a71ccc174c" gracePeriod=600 Oct 10 19:10:46 crc kubenswrapper[4799]: I1010 19:10:46.541376 4799 generic.go:334] "Generic (PLEG): container finished" podID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" containerID="1f5a8550e24ec4e5bea48e4d229935d178982f553c4ca4c823e783a71ccc174c" exitCode=0 Oct 10 19:10:46 crc kubenswrapper[4799]: I1010 19:10:46.541497 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" event={"ID":"6cebefda-e31d-4be2-9bf4-8e1f8ec002cb","Type":"ContainerDied","Data":"1f5a8550e24ec4e5bea48e4d229935d178982f553c4ca4c823e783a71ccc174c"} Oct 10 19:10:46 crc kubenswrapper[4799]: I1010 19:10:46.541810 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" event={"ID":"6cebefda-e31d-4be2-9bf4-8e1f8ec002cb","Type":"ContainerStarted","Data":"e394ce15c48f6a512a9cf8ca7739698a6b4e472715126ea6e3b0f9e567e27a97"} Oct 10 19:10:46 crc kubenswrapper[4799]: I1010 19:10:46.541837 4799 scope.go:117] "RemoveContainer" containerID="464cc628232a84253f4a698dbac51619b612599e327b0d8fc8447f771bb6664a" Oct 10 19:10:50 crc kubenswrapper[4799]: I1010 19:10:50.433566 4799 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-thhgl" podUID="668b7e96-fb6b-44f7-bc21-76f47fe127aa" containerName="registry-server" probeResult="failure" output=< Oct 10 19:10:50 crc kubenswrapper[4799]: timeout: failed to connect service ":50051" within 1s Oct 10 19:10:50 crc kubenswrapper[4799]: > Oct 10 19:10:59 crc kubenswrapper[4799]: I1010 19:10:59.862959 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-thhgl" Oct 10 19:11:00 crc kubenswrapper[4799]: I1010 19:11:00.039188 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-thhgl" Oct 10 19:11:00 crc kubenswrapper[4799]: I1010 19:11:00.198257 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-thhgl"] Oct 10 19:11:01 crc kubenswrapper[4799]: I1010 19:11:01.725660 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-thhgl" podUID="668b7e96-fb6b-44f7-bc21-76f47fe127aa" containerName="registry-server" containerID="cri-o://c352edf0599e3b67d57e06c602ffca38aa60443aad82bd603d91de53b3eb84ef" gracePeriod=2 Oct 10 19:11:02 crc kubenswrapper[4799]: I1010 19:11:02.302220 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-thhgl" Oct 10 19:11:02 crc kubenswrapper[4799]: I1010 19:11:02.380794 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/668b7e96-fb6b-44f7-bc21-76f47fe127aa-utilities\") pod \"668b7e96-fb6b-44f7-bc21-76f47fe127aa\" (UID: \"668b7e96-fb6b-44f7-bc21-76f47fe127aa\") " Oct 10 19:11:02 crc kubenswrapper[4799]: I1010 19:11:02.380882 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-p89vz\" (UniqueName: \"kubernetes.io/projected/668b7e96-fb6b-44f7-bc21-76f47fe127aa-kube-api-access-p89vz\") pod \"668b7e96-fb6b-44f7-bc21-76f47fe127aa\" (UID: \"668b7e96-fb6b-44f7-bc21-76f47fe127aa\") " Oct 10 19:11:02 crc kubenswrapper[4799]: I1010 19:11:02.380916 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/668b7e96-fb6b-44f7-bc21-76f47fe127aa-catalog-content\") pod \"668b7e96-fb6b-44f7-bc21-76f47fe127aa\" (UID: \"668b7e96-fb6b-44f7-bc21-76f47fe127aa\") " Oct 10 19:11:02 crc kubenswrapper[4799]: I1010 19:11:02.383168 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/668b7e96-fb6b-44f7-bc21-76f47fe127aa-utilities" (OuterVolumeSpecName: "utilities") pod "668b7e96-fb6b-44f7-bc21-76f47fe127aa" (UID: "668b7e96-fb6b-44f7-bc21-76f47fe127aa"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 10 19:11:02 crc kubenswrapper[4799]: I1010 19:11:02.392040 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/668b7e96-fb6b-44f7-bc21-76f47fe127aa-kube-api-access-p89vz" (OuterVolumeSpecName: "kube-api-access-p89vz") pod "668b7e96-fb6b-44f7-bc21-76f47fe127aa" (UID: "668b7e96-fb6b-44f7-bc21-76f47fe127aa"). InnerVolumeSpecName "kube-api-access-p89vz". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 19:11:02 crc kubenswrapper[4799]: I1010 19:11:02.486523 4799 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/668b7e96-fb6b-44f7-bc21-76f47fe127aa-utilities\") on node \"crc\" DevicePath \"\"" Oct 10 19:11:02 crc kubenswrapper[4799]: I1010 19:11:02.486819 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-p89vz\" (UniqueName: \"kubernetes.io/projected/668b7e96-fb6b-44f7-bc21-76f47fe127aa-kube-api-access-p89vz\") on node \"crc\" DevicePath \"\"" Oct 10 19:11:02 crc kubenswrapper[4799]: I1010 19:11:02.492689 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/668b7e96-fb6b-44f7-bc21-76f47fe127aa-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "668b7e96-fb6b-44f7-bc21-76f47fe127aa" (UID: "668b7e96-fb6b-44f7-bc21-76f47fe127aa"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 10 19:11:02 crc kubenswrapper[4799]: I1010 19:11:02.588434 4799 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/668b7e96-fb6b-44f7-bc21-76f47fe127aa-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 10 19:11:02 crc kubenswrapper[4799]: I1010 19:11:02.736885 4799 generic.go:334] "Generic (PLEG): container finished" podID="668b7e96-fb6b-44f7-bc21-76f47fe127aa" containerID="c352edf0599e3b67d57e06c602ffca38aa60443aad82bd603d91de53b3eb84ef" exitCode=0 Oct 10 19:11:02 crc kubenswrapper[4799]: I1010 19:11:02.736940 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-thhgl" event={"ID":"668b7e96-fb6b-44f7-bc21-76f47fe127aa","Type":"ContainerDied","Data":"c352edf0599e3b67d57e06c602ffca38aa60443aad82bd603d91de53b3eb84ef"} Oct 10 19:11:02 crc kubenswrapper[4799]: I1010 19:11:02.736965 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-thhgl" event={"ID":"668b7e96-fb6b-44f7-bc21-76f47fe127aa","Type":"ContainerDied","Data":"517018a8d8bf4aeb38e672f87f40e9e1a0ab362de5a2a3950446684ca6cbb006"} Oct 10 19:11:02 crc kubenswrapper[4799]: I1010 19:11:02.736988 4799 scope.go:117] "RemoveContainer" containerID="c352edf0599e3b67d57e06c602ffca38aa60443aad82bd603d91de53b3eb84ef" Oct 10 19:11:02 crc kubenswrapper[4799]: I1010 19:11:02.736945 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-thhgl" Oct 10 19:11:02 crc kubenswrapper[4799]: I1010 19:11:02.786798 4799 scope.go:117] "RemoveContainer" containerID="12ced51e162ae4c7153e0e90f7d75677027c8bc0f22b5cd091a6b042690501ee" Oct 10 19:11:02 crc kubenswrapper[4799]: I1010 19:11:02.790360 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-thhgl"] Oct 10 19:11:02 crc kubenswrapper[4799]: I1010 19:11:02.799148 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-thhgl"] Oct 10 19:11:02 crc kubenswrapper[4799]: I1010 19:11:02.816782 4799 scope.go:117] "RemoveContainer" containerID="282ffa886623830b1182dbb2df3668d221f638a0a22fea5a76392762981bfc75" Oct 10 19:11:02 crc kubenswrapper[4799]: I1010 19:11:02.867358 4799 scope.go:117] "RemoveContainer" containerID="c352edf0599e3b67d57e06c602ffca38aa60443aad82bd603d91de53b3eb84ef" Oct 10 19:11:02 crc kubenswrapper[4799]: E1010 19:11:02.868297 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c352edf0599e3b67d57e06c602ffca38aa60443aad82bd603d91de53b3eb84ef\": container with ID starting with c352edf0599e3b67d57e06c602ffca38aa60443aad82bd603d91de53b3eb84ef not found: ID does not exist" containerID="c352edf0599e3b67d57e06c602ffca38aa60443aad82bd603d91de53b3eb84ef" Oct 10 19:11:02 crc kubenswrapper[4799]: I1010 19:11:02.868350 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c352edf0599e3b67d57e06c602ffca38aa60443aad82bd603d91de53b3eb84ef"} err="failed to get container status \"c352edf0599e3b67d57e06c602ffca38aa60443aad82bd603d91de53b3eb84ef\": rpc error: code = NotFound desc = could not find container \"c352edf0599e3b67d57e06c602ffca38aa60443aad82bd603d91de53b3eb84ef\": container with ID starting with c352edf0599e3b67d57e06c602ffca38aa60443aad82bd603d91de53b3eb84ef not found: ID does not exist" Oct 10 19:11:02 crc kubenswrapper[4799]: I1010 19:11:02.868384 4799 scope.go:117] "RemoveContainer" containerID="12ced51e162ae4c7153e0e90f7d75677027c8bc0f22b5cd091a6b042690501ee" Oct 10 19:11:02 crc kubenswrapper[4799]: E1010 19:11:02.868817 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"12ced51e162ae4c7153e0e90f7d75677027c8bc0f22b5cd091a6b042690501ee\": container with ID starting with 12ced51e162ae4c7153e0e90f7d75677027c8bc0f22b5cd091a6b042690501ee not found: ID does not exist" containerID="12ced51e162ae4c7153e0e90f7d75677027c8bc0f22b5cd091a6b042690501ee" Oct 10 19:11:02 crc kubenswrapper[4799]: I1010 19:11:02.868856 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"12ced51e162ae4c7153e0e90f7d75677027c8bc0f22b5cd091a6b042690501ee"} err="failed to get container status \"12ced51e162ae4c7153e0e90f7d75677027c8bc0f22b5cd091a6b042690501ee\": rpc error: code = NotFound desc = could not find container \"12ced51e162ae4c7153e0e90f7d75677027c8bc0f22b5cd091a6b042690501ee\": container with ID starting with 12ced51e162ae4c7153e0e90f7d75677027c8bc0f22b5cd091a6b042690501ee not found: ID does not exist" Oct 10 19:11:02 crc kubenswrapper[4799]: I1010 19:11:02.868878 4799 scope.go:117] "RemoveContainer" containerID="282ffa886623830b1182dbb2df3668d221f638a0a22fea5a76392762981bfc75" Oct 10 19:11:02 crc kubenswrapper[4799]: E1010 19:11:02.869296 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"282ffa886623830b1182dbb2df3668d221f638a0a22fea5a76392762981bfc75\": container with ID starting with 282ffa886623830b1182dbb2df3668d221f638a0a22fea5a76392762981bfc75 not found: ID does not exist" containerID="282ffa886623830b1182dbb2df3668d221f638a0a22fea5a76392762981bfc75" Oct 10 19:11:02 crc kubenswrapper[4799]: I1010 19:11:02.869375 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"282ffa886623830b1182dbb2df3668d221f638a0a22fea5a76392762981bfc75"} err="failed to get container status \"282ffa886623830b1182dbb2df3668d221f638a0a22fea5a76392762981bfc75\": rpc error: code = NotFound desc = could not find container \"282ffa886623830b1182dbb2df3668d221f638a0a22fea5a76392762981bfc75\": container with ID starting with 282ffa886623830b1182dbb2df3668d221f638a0a22fea5a76392762981bfc75 not found: ID does not exist" Oct 10 19:11:03 crc kubenswrapper[4799]: I1010 19:11:03.415214 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="668b7e96-fb6b-44f7-bc21-76f47fe127aa" path="/var/lib/kubelet/pods/668b7e96-fb6b-44f7-bc21-76f47fe127aa/volumes" Oct 10 19:11:32 crc kubenswrapper[4799]: I1010 19:11:32.542311 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-6chg5"] Oct 10 19:11:32 crc kubenswrapper[4799]: E1010 19:11:32.543473 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="668b7e96-fb6b-44f7-bc21-76f47fe127aa" containerName="extract-content" Oct 10 19:11:32 crc kubenswrapper[4799]: I1010 19:11:32.543494 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="668b7e96-fb6b-44f7-bc21-76f47fe127aa" containerName="extract-content" Oct 10 19:11:32 crc kubenswrapper[4799]: E1010 19:11:32.543550 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="668b7e96-fb6b-44f7-bc21-76f47fe127aa" containerName="extract-utilities" Oct 10 19:11:32 crc kubenswrapper[4799]: I1010 19:11:32.543563 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="668b7e96-fb6b-44f7-bc21-76f47fe127aa" containerName="extract-utilities" Oct 10 19:11:32 crc kubenswrapper[4799]: E1010 19:11:32.543592 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="668b7e96-fb6b-44f7-bc21-76f47fe127aa" containerName="registry-server" Oct 10 19:11:32 crc kubenswrapper[4799]: I1010 19:11:32.543604 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="668b7e96-fb6b-44f7-bc21-76f47fe127aa" containerName="registry-server" Oct 10 19:11:32 crc kubenswrapper[4799]: I1010 19:11:32.544023 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="668b7e96-fb6b-44f7-bc21-76f47fe127aa" containerName="registry-server" Oct 10 19:11:32 crc kubenswrapper[4799]: I1010 19:11:32.546905 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-6chg5" Oct 10 19:11:32 crc kubenswrapper[4799]: I1010 19:11:32.560000 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-6chg5"] Oct 10 19:11:32 crc kubenswrapper[4799]: I1010 19:11:32.646890 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/59b24d7f-0735-4f12-a6bb-658698d7a8f1-catalog-content\") pod \"community-operators-6chg5\" (UID: \"59b24d7f-0735-4f12-a6bb-658698d7a8f1\") " pod="openshift-marketplace/community-operators-6chg5" Oct 10 19:11:32 crc kubenswrapper[4799]: I1010 19:11:32.646937 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-prgp9\" (UniqueName: \"kubernetes.io/projected/59b24d7f-0735-4f12-a6bb-658698d7a8f1-kube-api-access-prgp9\") pod \"community-operators-6chg5\" (UID: \"59b24d7f-0735-4f12-a6bb-658698d7a8f1\") " pod="openshift-marketplace/community-operators-6chg5" Oct 10 19:11:32 crc kubenswrapper[4799]: I1010 19:11:32.646997 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/59b24d7f-0735-4f12-a6bb-658698d7a8f1-utilities\") pod \"community-operators-6chg5\" (UID: \"59b24d7f-0735-4f12-a6bb-658698d7a8f1\") " pod="openshift-marketplace/community-operators-6chg5" Oct 10 19:11:32 crc kubenswrapper[4799]: I1010 19:11:32.748993 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/59b24d7f-0735-4f12-a6bb-658698d7a8f1-utilities\") pod \"community-operators-6chg5\" (UID: \"59b24d7f-0735-4f12-a6bb-658698d7a8f1\") " pod="openshift-marketplace/community-operators-6chg5" Oct 10 19:11:32 crc kubenswrapper[4799]: I1010 19:11:32.749613 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/59b24d7f-0735-4f12-a6bb-658698d7a8f1-utilities\") pod \"community-operators-6chg5\" (UID: \"59b24d7f-0735-4f12-a6bb-658698d7a8f1\") " pod="openshift-marketplace/community-operators-6chg5" Oct 10 19:11:32 crc kubenswrapper[4799]: I1010 19:11:32.749818 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/59b24d7f-0735-4f12-a6bb-658698d7a8f1-catalog-content\") pod \"community-operators-6chg5\" (UID: \"59b24d7f-0735-4f12-a6bb-658698d7a8f1\") " pod="openshift-marketplace/community-operators-6chg5" Oct 10 19:11:32 crc kubenswrapper[4799]: I1010 19:11:32.750005 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-prgp9\" (UniqueName: \"kubernetes.io/projected/59b24d7f-0735-4f12-a6bb-658698d7a8f1-kube-api-access-prgp9\") pod \"community-operators-6chg5\" (UID: \"59b24d7f-0735-4f12-a6bb-658698d7a8f1\") " pod="openshift-marketplace/community-operators-6chg5" Oct 10 19:11:32 crc kubenswrapper[4799]: I1010 19:11:32.750099 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/59b24d7f-0735-4f12-a6bb-658698d7a8f1-catalog-content\") pod \"community-operators-6chg5\" (UID: \"59b24d7f-0735-4f12-a6bb-658698d7a8f1\") " pod="openshift-marketplace/community-operators-6chg5" Oct 10 19:11:32 crc kubenswrapper[4799]: I1010 19:11:32.775690 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-prgp9\" (UniqueName: \"kubernetes.io/projected/59b24d7f-0735-4f12-a6bb-658698d7a8f1-kube-api-access-prgp9\") pod \"community-operators-6chg5\" (UID: \"59b24d7f-0735-4f12-a6bb-658698d7a8f1\") " pod="openshift-marketplace/community-operators-6chg5" Oct 10 19:11:32 crc kubenswrapper[4799]: I1010 19:11:32.888405 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-6chg5" Oct 10 19:11:33 crc kubenswrapper[4799]: I1010 19:11:33.449135 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-6chg5"] Oct 10 19:11:33 crc kubenswrapper[4799]: W1010 19:11:33.452243 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod59b24d7f_0735_4f12_a6bb_658698d7a8f1.slice/crio-ebb11606e8aa405516b0f6cf7db4d07d1579c210228e4f228ed07f692ae858ee WatchSource:0}: Error finding container ebb11606e8aa405516b0f6cf7db4d07d1579c210228e4f228ed07f692ae858ee: Status 404 returned error can't find the container with id ebb11606e8aa405516b0f6cf7db4d07d1579c210228e4f228ed07f692ae858ee Oct 10 19:11:34 crc kubenswrapper[4799]: I1010 19:11:34.177495 4799 generic.go:334] "Generic (PLEG): container finished" podID="59b24d7f-0735-4f12-a6bb-658698d7a8f1" containerID="ef6de259698e053c4d6f1efea69d715e36059ed9b675e4c2469bcab4434805dd" exitCode=0 Oct 10 19:11:34 crc kubenswrapper[4799]: I1010 19:11:34.177566 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-6chg5" event={"ID":"59b24d7f-0735-4f12-a6bb-658698d7a8f1","Type":"ContainerDied","Data":"ef6de259698e053c4d6f1efea69d715e36059ed9b675e4c2469bcab4434805dd"} Oct 10 19:11:34 crc kubenswrapper[4799]: I1010 19:11:34.177959 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-6chg5" event={"ID":"59b24d7f-0735-4f12-a6bb-658698d7a8f1","Type":"ContainerStarted","Data":"ebb11606e8aa405516b0f6cf7db4d07d1579c210228e4f228ed07f692ae858ee"} Oct 10 19:11:34 crc kubenswrapper[4799]: I1010 19:11:34.180456 4799 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 10 19:11:35 crc kubenswrapper[4799]: I1010 19:11:35.196332 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-6chg5" event={"ID":"59b24d7f-0735-4f12-a6bb-658698d7a8f1","Type":"ContainerStarted","Data":"44a723db3e4ff3e7526333b82a668d2349e1787466aed00a3ecf768a5e9ed65b"} Oct 10 19:11:37 crc kubenswrapper[4799]: I1010 19:11:37.228114 4799 generic.go:334] "Generic (PLEG): container finished" podID="59b24d7f-0735-4f12-a6bb-658698d7a8f1" containerID="44a723db3e4ff3e7526333b82a668d2349e1787466aed00a3ecf768a5e9ed65b" exitCode=0 Oct 10 19:11:37 crc kubenswrapper[4799]: I1010 19:11:37.228242 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-6chg5" event={"ID":"59b24d7f-0735-4f12-a6bb-658698d7a8f1","Type":"ContainerDied","Data":"44a723db3e4ff3e7526333b82a668d2349e1787466aed00a3ecf768a5e9ed65b"} Oct 10 19:11:38 crc kubenswrapper[4799]: I1010 19:11:38.248206 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-6chg5" event={"ID":"59b24d7f-0735-4f12-a6bb-658698d7a8f1","Type":"ContainerStarted","Data":"834da61fc3a046c2e51b89e432126a723f3bb64c36e6ba4feffe26b1bf26920c"} Oct 10 19:11:38 crc kubenswrapper[4799]: I1010 19:11:38.282576 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-6chg5" podStartSLOduration=2.807265866 podStartE2EDuration="6.282544796s" podCreationTimestamp="2025-10-10 19:11:32 +0000 UTC" firstStartedPulling="2025-10-10 19:11:34.180040587 +0000 UTC m=+9587.688364742" lastFinishedPulling="2025-10-10 19:11:37.655319527 +0000 UTC m=+9591.163643672" observedRunningTime="2025-10-10 19:11:38.270122512 +0000 UTC m=+9591.778446657" watchObservedRunningTime="2025-10-10 19:11:38.282544796 +0000 UTC m=+9591.790868921" Oct 10 19:11:42 crc kubenswrapper[4799]: I1010 19:11:42.889464 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-6chg5" Oct 10 19:11:42 crc kubenswrapper[4799]: I1010 19:11:42.890282 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-6chg5" Oct 10 19:11:43 crc kubenswrapper[4799]: I1010 19:11:43.177465 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-6chg5" Oct 10 19:11:43 crc kubenswrapper[4799]: I1010 19:11:43.400576 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-6chg5" Oct 10 19:11:43 crc kubenswrapper[4799]: I1010 19:11:43.459068 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-6chg5"] Oct 10 19:11:45 crc kubenswrapper[4799]: I1010 19:11:45.360743 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-6chg5" podUID="59b24d7f-0735-4f12-a6bb-658698d7a8f1" containerName="registry-server" containerID="cri-o://834da61fc3a046c2e51b89e432126a723f3bb64c36e6ba4feffe26b1bf26920c" gracePeriod=2 Oct 10 19:11:46 crc kubenswrapper[4799]: I1010 19:11:46.375524 4799 generic.go:334] "Generic (PLEG): container finished" podID="59b24d7f-0735-4f12-a6bb-658698d7a8f1" containerID="834da61fc3a046c2e51b89e432126a723f3bb64c36e6ba4feffe26b1bf26920c" exitCode=0 Oct 10 19:11:46 crc kubenswrapper[4799]: I1010 19:11:46.375639 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-6chg5" event={"ID":"59b24d7f-0735-4f12-a6bb-658698d7a8f1","Type":"ContainerDied","Data":"834da61fc3a046c2e51b89e432126a723f3bb64c36e6ba4feffe26b1bf26920c"} Oct 10 19:11:46 crc kubenswrapper[4799]: I1010 19:11:46.376083 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-6chg5" event={"ID":"59b24d7f-0735-4f12-a6bb-658698d7a8f1","Type":"ContainerDied","Data":"ebb11606e8aa405516b0f6cf7db4d07d1579c210228e4f228ed07f692ae858ee"} Oct 10 19:11:46 crc kubenswrapper[4799]: I1010 19:11:46.376103 4799 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ebb11606e8aa405516b0f6cf7db4d07d1579c210228e4f228ed07f692ae858ee" Oct 10 19:11:46 crc kubenswrapper[4799]: I1010 19:11:46.421674 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-6chg5" Oct 10 19:11:46 crc kubenswrapper[4799]: I1010 19:11:46.603075 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/59b24d7f-0735-4f12-a6bb-658698d7a8f1-catalog-content\") pod \"59b24d7f-0735-4f12-a6bb-658698d7a8f1\" (UID: \"59b24d7f-0735-4f12-a6bb-658698d7a8f1\") " Oct 10 19:11:46 crc kubenswrapper[4799]: I1010 19:11:46.603199 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-prgp9\" (UniqueName: \"kubernetes.io/projected/59b24d7f-0735-4f12-a6bb-658698d7a8f1-kube-api-access-prgp9\") pod \"59b24d7f-0735-4f12-a6bb-658698d7a8f1\" (UID: \"59b24d7f-0735-4f12-a6bb-658698d7a8f1\") " Oct 10 19:11:46 crc kubenswrapper[4799]: I1010 19:11:46.603459 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/59b24d7f-0735-4f12-a6bb-658698d7a8f1-utilities\") pod \"59b24d7f-0735-4f12-a6bb-658698d7a8f1\" (UID: \"59b24d7f-0735-4f12-a6bb-658698d7a8f1\") " Oct 10 19:11:46 crc kubenswrapper[4799]: I1010 19:11:46.605800 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/59b24d7f-0735-4f12-a6bb-658698d7a8f1-utilities" (OuterVolumeSpecName: "utilities") pod "59b24d7f-0735-4f12-a6bb-658698d7a8f1" (UID: "59b24d7f-0735-4f12-a6bb-658698d7a8f1"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 10 19:11:46 crc kubenswrapper[4799]: I1010 19:11:46.615167 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/59b24d7f-0735-4f12-a6bb-658698d7a8f1-kube-api-access-prgp9" (OuterVolumeSpecName: "kube-api-access-prgp9") pod "59b24d7f-0735-4f12-a6bb-658698d7a8f1" (UID: "59b24d7f-0735-4f12-a6bb-658698d7a8f1"). InnerVolumeSpecName "kube-api-access-prgp9". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 19:11:46 crc kubenswrapper[4799]: I1010 19:11:46.662897 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/59b24d7f-0735-4f12-a6bb-658698d7a8f1-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "59b24d7f-0735-4f12-a6bb-658698d7a8f1" (UID: "59b24d7f-0735-4f12-a6bb-658698d7a8f1"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 10 19:11:46 crc kubenswrapper[4799]: I1010 19:11:46.706118 4799 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/59b24d7f-0735-4f12-a6bb-658698d7a8f1-utilities\") on node \"crc\" DevicePath \"\"" Oct 10 19:11:46 crc kubenswrapper[4799]: I1010 19:11:46.706167 4799 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/59b24d7f-0735-4f12-a6bb-658698d7a8f1-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 10 19:11:46 crc kubenswrapper[4799]: I1010 19:11:46.706183 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-prgp9\" (UniqueName: \"kubernetes.io/projected/59b24d7f-0735-4f12-a6bb-658698d7a8f1-kube-api-access-prgp9\") on node \"crc\" DevicePath \"\"" Oct 10 19:11:47 crc kubenswrapper[4799]: I1010 19:11:47.388700 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-6chg5" Oct 10 19:11:47 crc kubenswrapper[4799]: I1010 19:11:47.468807 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-6chg5"] Oct 10 19:11:47 crc kubenswrapper[4799]: I1010 19:11:47.489011 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-6chg5"] Oct 10 19:11:49 crc kubenswrapper[4799]: E1010 19:11:49.267893 4799 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod59b24d7f_0735_4f12_a6bb_658698d7a8f1.slice/crio-ebb11606e8aa405516b0f6cf7db4d07d1579c210228e4f228ed07f692ae858ee\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod59b24d7f_0735_4f12_a6bb_658698d7a8f1.slice\": RecentStats: unable to find data in memory cache]" Oct 10 19:11:49 crc kubenswrapper[4799]: I1010 19:11:49.422570 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="59b24d7f-0735-4f12-a6bb-658698d7a8f1" path="/var/lib/kubelet/pods/59b24d7f-0735-4f12-a6bb-658698d7a8f1/volumes" Oct 10 19:11:59 crc kubenswrapper[4799]: E1010 19:11:59.559650 4799 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod59b24d7f_0735_4f12_a6bb_658698d7a8f1.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod59b24d7f_0735_4f12_a6bb_658698d7a8f1.slice/crio-ebb11606e8aa405516b0f6cf7db4d07d1579c210228e4f228ed07f692ae858ee\": RecentStats: unable to find data in memory cache]" Oct 10 19:12:09 crc kubenswrapper[4799]: E1010 19:12:09.950062 4799 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod59b24d7f_0735_4f12_a6bb_658698d7a8f1.slice/crio-ebb11606e8aa405516b0f6cf7db4d07d1579c210228e4f228ed07f692ae858ee\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod59b24d7f_0735_4f12_a6bb_658698d7a8f1.slice\": RecentStats: unable to find data in memory cache]" Oct 10 19:12:20 crc kubenswrapper[4799]: E1010 19:12:20.247188 4799 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod59b24d7f_0735_4f12_a6bb_658698d7a8f1.slice/crio-ebb11606e8aa405516b0f6cf7db4d07d1579c210228e4f228ed07f692ae858ee\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod59b24d7f_0735_4f12_a6bb_658698d7a8f1.slice\": RecentStats: unable to find data in memory cache]" Oct 10 19:12:30 crc kubenswrapper[4799]: E1010 19:12:30.601679 4799 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod59b24d7f_0735_4f12_a6bb_658698d7a8f1.slice/crio-ebb11606e8aa405516b0f6cf7db4d07d1579c210228e4f228ed07f692ae858ee\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod59b24d7f_0735_4f12_a6bb_658698d7a8f1.slice\": RecentStats: unable to find data in memory cache]" Oct 10 19:12:40 crc kubenswrapper[4799]: E1010 19:12:40.956005 4799 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod59b24d7f_0735_4f12_a6bb_658698d7a8f1.slice/crio-ebb11606e8aa405516b0f6cf7db4d07d1579c210228e4f228ed07f692ae858ee\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod59b24d7f_0735_4f12_a6bb_658698d7a8f1.slice\": RecentStats: unable to find data in memory cache]" Oct 10 19:13:15 crc kubenswrapper[4799]: I1010 19:13:15.248677 4799 patch_prober.go:28] interesting pod/machine-config-daemon-rh8zc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 10 19:13:15 crc kubenswrapper[4799]: I1010 19:13:15.249335 4799 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 10 19:13:45 crc kubenswrapper[4799]: I1010 19:13:45.249240 4799 patch_prober.go:28] interesting pod/machine-config-daemon-rh8zc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 10 19:13:45 crc kubenswrapper[4799]: I1010 19:13:45.249857 4799 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 10 19:14:15 crc kubenswrapper[4799]: I1010 19:14:15.248613 4799 patch_prober.go:28] interesting pod/machine-config-daemon-rh8zc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 10 19:14:15 crc kubenswrapper[4799]: I1010 19:14:15.249613 4799 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 10 19:14:15 crc kubenswrapper[4799]: I1010 19:14:15.249714 4799 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" Oct 10 19:14:15 crc kubenswrapper[4799]: I1010 19:14:15.251235 4799 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"e394ce15c48f6a512a9cf8ca7739698a6b4e472715126ea6e3b0f9e567e27a97"} pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 10 19:14:15 crc kubenswrapper[4799]: I1010 19:14:15.251311 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" containerName="machine-config-daemon" containerID="cri-o://e394ce15c48f6a512a9cf8ca7739698a6b4e472715126ea6e3b0f9e567e27a97" gracePeriod=600 Oct 10 19:14:15 crc kubenswrapper[4799]: E1010 19:14:15.380588 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 19:14:15 crc kubenswrapper[4799]: I1010 19:14:15.409691 4799 generic.go:334] "Generic (PLEG): container finished" podID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" containerID="e394ce15c48f6a512a9cf8ca7739698a6b4e472715126ea6e3b0f9e567e27a97" exitCode=0 Oct 10 19:14:15 crc kubenswrapper[4799]: I1010 19:14:15.422934 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" event={"ID":"6cebefda-e31d-4be2-9bf4-8e1f8ec002cb","Type":"ContainerDied","Data":"e394ce15c48f6a512a9cf8ca7739698a6b4e472715126ea6e3b0f9e567e27a97"} Oct 10 19:14:15 crc kubenswrapper[4799]: I1010 19:14:15.423009 4799 scope.go:117] "RemoveContainer" containerID="1f5a8550e24ec4e5bea48e4d229935d178982f553c4ca4c823e783a71ccc174c" Oct 10 19:14:15 crc kubenswrapper[4799]: I1010 19:14:15.425995 4799 scope.go:117] "RemoveContainer" containerID="e394ce15c48f6a512a9cf8ca7739698a6b4e472715126ea6e3b0f9e567e27a97" Oct 10 19:14:15 crc kubenswrapper[4799]: E1010 19:14:15.426646 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 19:14:31 crc kubenswrapper[4799]: I1010 19:14:31.402460 4799 scope.go:117] "RemoveContainer" containerID="e394ce15c48f6a512a9cf8ca7739698a6b4e472715126ea6e3b0f9e567e27a97" Oct 10 19:14:31 crc kubenswrapper[4799]: E1010 19:14:31.403282 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 19:14:42 crc kubenswrapper[4799]: I1010 19:14:42.402960 4799 scope.go:117] "RemoveContainer" containerID="e394ce15c48f6a512a9cf8ca7739698a6b4e472715126ea6e3b0f9e567e27a97" Oct 10 19:14:42 crc kubenswrapper[4799]: E1010 19:14:42.403850 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 19:14:54 crc kubenswrapper[4799]: I1010 19:14:54.403394 4799 scope.go:117] "RemoveContainer" containerID="e394ce15c48f6a512a9cf8ca7739698a6b4e472715126ea6e3b0f9e567e27a97" Oct 10 19:14:54 crc kubenswrapper[4799]: E1010 19:14:54.404464 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 19:15:00 crc kubenswrapper[4799]: I1010 19:15:00.156918 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29335395-75dfq"] Oct 10 19:15:00 crc kubenswrapper[4799]: E1010 19:15:00.158144 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="59b24d7f-0735-4f12-a6bb-658698d7a8f1" containerName="registry-server" Oct 10 19:15:00 crc kubenswrapper[4799]: I1010 19:15:00.158166 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="59b24d7f-0735-4f12-a6bb-658698d7a8f1" containerName="registry-server" Oct 10 19:15:00 crc kubenswrapper[4799]: E1010 19:15:00.158219 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="59b24d7f-0735-4f12-a6bb-658698d7a8f1" containerName="extract-content" Oct 10 19:15:00 crc kubenswrapper[4799]: I1010 19:15:00.158231 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="59b24d7f-0735-4f12-a6bb-658698d7a8f1" containerName="extract-content" Oct 10 19:15:00 crc kubenswrapper[4799]: E1010 19:15:00.158255 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="59b24d7f-0735-4f12-a6bb-658698d7a8f1" containerName="extract-utilities" Oct 10 19:15:00 crc kubenswrapper[4799]: I1010 19:15:00.158267 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="59b24d7f-0735-4f12-a6bb-658698d7a8f1" containerName="extract-utilities" Oct 10 19:15:00 crc kubenswrapper[4799]: I1010 19:15:00.158626 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="59b24d7f-0735-4f12-a6bb-658698d7a8f1" containerName="registry-server" Oct 10 19:15:00 crc kubenswrapper[4799]: I1010 19:15:00.159835 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29335395-75dfq" Oct 10 19:15:00 crc kubenswrapper[4799]: I1010 19:15:00.162393 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Oct 10 19:15:00 crc kubenswrapper[4799]: I1010 19:15:00.164868 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Oct 10 19:15:00 crc kubenswrapper[4799]: I1010 19:15:00.173050 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29335395-75dfq"] Oct 10 19:15:00 crc kubenswrapper[4799]: I1010 19:15:00.296576 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n788d\" (UniqueName: \"kubernetes.io/projected/4e07c17b-a46f-424c-adac-4f8af7650e69-kube-api-access-n788d\") pod \"collect-profiles-29335395-75dfq\" (UID: \"4e07c17b-a46f-424c-adac-4f8af7650e69\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29335395-75dfq" Oct 10 19:15:00 crc kubenswrapper[4799]: I1010 19:15:00.296663 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/4e07c17b-a46f-424c-adac-4f8af7650e69-secret-volume\") pod \"collect-profiles-29335395-75dfq\" (UID: \"4e07c17b-a46f-424c-adac-4f8af7650e69\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29335395-75dfq" Oct 10 19:15:00 crc kubenswrapper[4799]: I1010 19:15:00.297310 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/4e07c17b-a46f-424c-adac-4f8af7650e69-config-volume\") pod \"collect-profiles-29335395-75dfq\" (UID: \"4e07c17b-a46f-424c-adac-4f8af7650e69\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29335395-75dfq" Oct 10 19:15:00 crc kubenswrapper[4799]: I1010 19:15:00.399888 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/4e07c17b-a46f-424c-adac-4f8af7650e69-config-volume\") pod \"collect-profiles-29335395-75dfq\" (UID: \"4e07c17b-a46f-424c-adac-4f8af7650e69\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29335395-75dfq" Oct 10 19:15:00 crc kubenswrapper[4799]: I1010 19:15:00.399995 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n788d\" (UniqueName: \"kubernetes.io/projected/4e07c17b-a46f-424c-adac-4f8af7650e69-kube-api-access-n788d\") pod \"collect-profiles-29335395-75dfq\" (UID: \"4e07c17b-a46f-424c-adac-4f8af7650e69\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29335395-75dfq" Oct 10 19:15:00 crc kubenswrapper[4799]: I1010 19:15:00.400030 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/4e07c17b-a46f-424c-adac-4f8af7650e69-secret-volume\") pod \"collect-profiles-29335395-75dfq\" (UID: \"4e07c17b-a46f-424c-adac-4f8af7650e69\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29335395-75dfq" Oct 10 19:15:00 crc kubenswrapper[4799]: I1010 19:15:00.401144 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/4e07c17b-a46f-424c-adac-4f8af7650e69-config-volume\") pod \"collect-profiles-29335395-75dfq\" (UID: \"4e07c17b-a46f-424c-adac-4f8af7650e69\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29335395-75dfq" Oct 10 19:15:00 crc kubenswrapper[4799]: I1010 19:15:00.405902 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/4e07c17b-a46f-424c-adac-4f8af7650e69-secret-volume\") pod \"collect-profiles-29335395-75dfq\" (UID: \"4e07c17b-a46f-424c-adac-4f8af7650e69\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29335395-75dfq" Oct 10 19:15:00 crc kubenswrapper[4799]: I1010 19:15:00.417333 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n788d\" (UniqueName: \"kubernetes.io/projected/4e07c17b-a46f-424c-adac-4f8af7650e69-kube-api-access-n788d\") pod \"collect-profiles-29335395-75dfq\" (UID: \"4e07c17b-a46f-424c-adac-4f8af7650e69\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29335395-75dfq" Oct 10 19:15:00 crc kubenswrapper[4799]: I1010 19:15:00.488331 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29335395-75dfq" Oct 10 19:15:00 crc kubenswrapper[4799]: I1010 19:15:00.951129 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29335395-75dfq"] Oct 10 19:15:01 crc kubenswrapper[4799]: I1010 19:15:01.062830 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29335395-75dfq" event={"ID":"4e07c17b-a46f-424c-adac-4f8af7650e69","Type":"ContainerStarted","Data":"847724223d1a0ee6a81c5c960418cf96f66d1ad8092167b5f5d4e8c4942d775e"} Oct 10 19:15:02 crc kubenswrapper[4799]: I1010 19:15:02.080729 4799 generic.go:334] "Generic (PLEG): container finished" podID="4e07c17b-a46f-424c-adac-4f8af7650e69" containerID="ad847dfa099a496a8ceb4307cd091863e68a698cf9b7a63e21c00902c782a459" exitCode=0 Oct 10 19:15:02 crc kubenswrapper[4799]: I1010 19:15:02.080823 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29335395-75dfq" event={"ID":"4e07c17b-a46f-424c-adac-4f8af7650e69","Type":"ContainerDied","Data":"ad847dfa099a496a8ceb4307cd091863e68a698cf9b7a63e21c00902c782a459"} Oct 10 19:15:03 crc kubenswrapper[4799]: I1010 19:15:03.537222 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29335395-75dfq" Oct 10 19:15:03 crc kubenswrapper[4799]: I1010 19:15:03.734032 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/4e07c17b-a46f-424c-adac-4f8af7650e69-config-volume\") pod \"4e07c17b-a46f-424c-adac-4f8af7650e69\" (UID: \"4e07c17b-a46f-424c-adac-4f8af7650e69\") " Oct 10 19:15:03 crc kubenswrapper[4799]: I1010 19:15:03.734158 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/4e07c17b-a46f-424c-adac-4f8af7650e69-secret-volume\") pod \"4e07c17b-a46f-424c-adac-4f8af7650e69\" (UID: \"4e07c17b-a46f-424c-adac-4f8af7650e69\") " Oct 10 19:15:03 crc kubenswrapper[4799]: I1010 19:15:03.734210 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-n788d\" (UniqueName: \"kubernetes.io/projected/4e07c17b-a46f-424c-adac-4f8af7650e69-kube-api-access-n788d\") pod \"4e07c17b-a46f-424c-adac-4f8af7650e69\" (UID: \"4e07c17b-a46f-424c-adac-4f8af7650e69\") " Oct 10 19:15:03 crc kubenswrapper[4799]: I1010 19:15:03.735825 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4e07c17b-a46f-424c-adac-4f8af7650e69-config-volume" (OuterVolumeSpecName: "config-volume") pod "4e07c17b-a46f-424c-adac-4f8af7650e69" (UID: "4e07c17b-a46f-424c-adac-4f8af7650e69"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 19:15:03 crc kubenswrapper[4799]: I1010 19:15:03.740301 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4e07c17b-a46f-424c-adac-4f8af7650e69-kube-api-access-n788d" (OuterVolumeSpecName: "kube-api-access-n788d") pod "4e07c17b-a46f-424c-adac-4f8af7650e69" (UID: "4e07c17b-a46f-424c-adac-4f8af7650e69"). InnerVolumeSpecName "kube-api-access-n788d". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 19:15:03 crc kubenswrapper[4799]: I1010 19:15:03.754804 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4e07c17b-a46f-424c-adac-4f8af7650e69-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "4e07c17b-a46f-424c-adac-4f8af7650e69" (UID: "4e07c17b-a46f-424c-adac-4f8af7650e69"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 19:15:03 crc kubenswrapper[4799]: I1010 19:15:03.836990 4799 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/4e07c17b-a46f-424c-adac-4f8af7650e69-secret-volume\") on node \"crc\" DevicePath \"\"" Oct 10 19:15:03 crc kubenswrapper[4799]: I1010 19:15:03.837017 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-n788d\" (UniqueName: \"kubernetes.io/projected/4e07c17b-a46f-424c-adac-4f8af7650e69-kube-api-access-n788d\") on node \"crc\" DevicePath \"\"" Oct 10 19:15:03 crc kubenswrapper[4799]: I1010 19:15:03.837027 4799 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/4e07c17b-a46f-424c-adac-4f8af7650e69-config-volume\") on node \"crc\" DevicePath \"\"" Oct 10 19:15:04 crc kubenswrapper[4799]: I1010 19:15:04.106829 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29335395-75dfq" event={"ID":"4e07c17b-a46f-424c-adac-4f8af7650e69","Type":"ContainerDied","Data":"847724223d1a0ee6a81c5c960418cf96f66d1ad8092167b5f5d4e8c4942d775e"} Oct 10 19:15:04 crc kubenswrapper[4799]: I1010 19:15:04.107218 4799 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="847724223d1a0ee6a81c5c960418cf96f66d1ad8092167b5f5d4e8c4942d775e" Oct 10 19:15:04 crc kubenswrapper[4799]: I1010 19:15:04.106974 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29335395-75dfq" Oct 10 19:15:04 crc kubenswrapper[4799]: I1010 19:15:04.643098 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29335350-hqw4q"] Oct 10 19:15:04 crc kubenswrapper[4799]: I1010 19:15:04.654070 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29335350-hqw4q"] Oct 10 19:15:05 crc kubenswrapper[4799]: I1010 19:15:05.415318 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fe898006-01ae-4272-841a-ea4e097c5dad" path="/var/lib/kubelet/pods/fe898006-01ae-4272-841a-ea4e097c5dad/volumes" Oct 10 19:15:06 crc kubenswrapper[4799]: I1010 19:15:06.403500 4799 scope.go:117] "RemoveContainer" containerID="e394ce15c48f6a512a9cf8ca7739698a6b4e472715126ea6e3b0f9e567e27a97" Oct 10 19:15:06 crc kubenswrapper[4799]: E1010 19:15:06.404535 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 19:15:17 crc kubenswrapper[4799]: I1010 19:15:17.911798 4799 scope.go:117] "RemoveContainer" containerID="a49d3b420281e1ceeacbbd77f0e90084601e059b92a78a6f31ec8024c8b06a55" Oct 10 19:15:19 crc kubenswrapper[4799]: I1010 19:15:19.403519 4799 scope.go:117] "RemoveContainer" containerID="e394ce15c48f6a512a9cf8ca7739698a6b4e472715126ea6e3b0f9e567e27a97" Oct 10 19:15:19 crc kubenswrapper[4799]: E1010 19:15:19.404298 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 19:15:34 crc kubenswrapper[4799]: I1010 19:15:34.402410 4799 scope.go:117] "RemoveContainer" containerID="e394ce15c48f6a512a9cf8ca7739698a6b4e472715126ea6e3b0f9e567e27a97" Oct 10 19:15:34 crc kubenswrapper[4799]: E1010 19:15:34.403656 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 19:15:39 crc kubenswrapper[4799]: I1010 19:15:39.600240 4799 generic.go:334] "Generic (PLEG): container finished" podID="74fb1b56-dea6-4091-bc8d-0eff60bb1113" containerID="4184fb47ea4ef3452de8b5487bcbf725480fec940407415f4fabc39e60380805" exitCode=0 Oct 10 19:15:39 crc kubenswrapper[4799]: I1010 19:15:39.600480 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellgtfn5" event={"ID":"74fb1b56-dea6-4091-bc8d-0eff60bb1113","Type":"ContainerDied","Data":"4184fb47ea4ef3452de8b5487bcbf725480fec940407415f4fabc39e60380805"} Oct 10 19:15:41 crc kubenswrapper[4799]: I1010 19:15:41.253196 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellgtfn5" Oct 10 19:15:41 crc kubenswrapper[4799]: I1010 19:15:41.380927 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/74fb1b56-dea6-4091-bc8d-0eff60bb1113-ceph\") pod \"74fb1b56-dea6-4091-bc8d-0eff60bb1113\" (UID: \"74fb1b56-dea6-4091-bc8d-0eff60bb1113\") " Oct 10 19:15:41 crc kubenswrapper[4799]: I1010 19:15:41.381069 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/74fb1b56-dea6-4091-bc8d-0eff60bb1113-nova-migration-ssh-key-0\") pod \"74fb1b56-dea6-4091-bc8d-0eff60bb1113\" (UID: \"74fb1b56-dea6-4091-bc8d-0eff60bb1113\") " Oct 10 19:15:41 crc kubenswrapper[4799]: I1010 19:15:41.381107 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cell1-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/74fb1b56-dea6-4091-bc8d-0eff60bb1113-nova-cell1-combined-ca-bundle\") pod \"74fb1b56-dea6-4091-bc8d-0eff60bb1113\" (UID: \"74fb1b56-dea6-4091-bc8d-0eff60bb1113\") " Oct 10 19:15:41 crc kubenswrapper[4799]: I1010 19:15:41.381202 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/74fb1b56-dea6-4091-bc8d-0eff60bb1113-inventory\") pod \"74fb1b56-dea6-4091-bc8d-0eff60bb1113\" (UID: \"74fb1b56-dea6-4091-bc8d-0eff60bb1113\") " Oct 10 19:15:41 crc kubenswrapper[4799]: I1010 19:15:41.381688 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cells-global-config-0\" (UniqueName: \"kubernetes.io/configmap/74fb1b56-dea6-4091-bc8d-0eff60bb1113-nova-cells-global-config-0\") pod \"74fb1b56-dea6-4091-bc8d-0eff60bb1113\" (UID: \"74fb1b56-dea6-4091-bc8d-0eff60bb1113\") " Oct 10 19:15:41 crc kubenswrapper[4799]: I1010 19:15:41.381742 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/74fb1b56-dea6-4091-bc8d-0eff60bb1113-nova-cell1-compute-config-0\") pod \"74fb1b56-dea6-4091-bc8d-0eff60bb1113\" (UID: \"74fb1b56-dea6-4091-bc8d-0eff60bb1113\") " Oct 10 19:15:41 crc kubenswrapper[4799]: I1010 19:15:41.381877 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cells-global-config-1\" (UniqueName: \"kubernetes.io/configmap/74fb1b56-dea6-4091-bc8d-0eff60bb1113-nova-cells-global-config-1\") pod \"74fb1b56-dea6-4091-bc8d-0eff60bb1113\" (UID: \"74fb1b56-dea6-4091-bc8d-0eff60bb1113\") " Oct 10 19:15:41 crc kubenswrapper[4799]: I1010 19:15:41.381976 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gjtf7\" (UniqueName: \"kubernetes.io/projected/74fb1b56-dea6-4091-bc8d-0eff60bb1113-kube-api-access-gjtf7\") pod \"74fb1b56-dea6-4091-bc8d-0eff60bb1113\" (UID: \"74fb1b56-dea6-4091-bc8d-0eff60bb1113\") " Oct 10 19:15:41 crc kubenswrapper[4799]: I1010 19:15:41.382015 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/74fb1b56-dea6-4091-bc8d-0eff60bb1113-ssh-key\") pod \"74fb1b56-dea6-4091-bc8d-0eff60bb1113\" (UID: \"74fb1b56-dea6-4091-bc8d-0eff60bb1113\") " Oct 10 19:15:41 crc kubenswrapper[4799]: I1010 19:15:41.382055 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/74fb1b56-dea6-4091-bc8d-0eff60bb1113-nova-migration-ssh-key-1\") pod \"74fb1b56-dea6-4091-bc8d-0eff60bb1113\" (UID: \"74fb1b56-dea6-4091-bc8d-0eff60bb1113\") " Oct 10 19:15:41 crc kubenswrapper[4799]: I1010 19:15:41.382149 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/74fb1b56-dea6-4091-bc8d-0eff60bb1113-nova-cell1-compute-config-1\") pod \"74fb1b56-dea6-4091-bc8d-0eff60bb1113\" (UID: \"74fb1b56-dea6-4091-bc8d-0eff60bb1113\") " Oct 10 19:15:41 crc kubenswrapper[4799]: I1010 19:15:41.396265 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/74fb1b56-dea6-4091-bc8d-0eff60bb1113-ceph" (OuterVolumeSpecName: "ceph") pod "74fb1b56-dea6-4091-bc8d-0eff60bb1113" (UID: "74fb1b56-dea6-4091-bc8d-0eff60bb1113"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 19:15:41 crc kubenswrapper[4799]: I1010 19:15:41.396697 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/74fb1b56-dea6-4091-bc8d-0eff60bb1113-nova-cell1-combined-ca-bundle" (OuterVolumeSpecName: "nova-cell1-combined-ca-bundle") pod "74fb1b56-dea6-4091-bc8d-0eff60bb1113" (UID: "74fb1b56-dea6-4091-bc8d-0eff60bb1113"). InnerVolumeSpecName "nova-cell1-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 19:15:41 crc kubenswrapper[4799]: I1010 19:15:41.405715 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/74fb1b56-dea6-4091-bc8d-0eff60bb1113-kube-api-access-gjtf7" (OuterVolumeSpecName: "kube-api-access-gjtf7") pod "74fb1b56-dea6-4091-bc8d-0eff60bb1113" (UID: "74fb1b56-dea6-4091-bc8d-0eff60bb1113"). InnerVolumeSpecName "kube-api-access-gjtf7". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 19:15:41 crc kubenswrapper[4799]: I1010 19:15:41.414315 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/74fb1b56-dea6-4091-bc8d-0eff60bb1113-nova-cell1-compute-config-0" (OuterVolumeSpecName: "nova-cell1-compute-config-0") pod "74fb1b56-dea6-4091-bc8d-0eff60bb1113" (UID: "74fb1b56-dea6-4091-bc8d-0eff60bb1113"). InnerVolumeSpecName "nova-cell1-compute-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 19:15:41 crc kubenswrapper[4799]: I1010 19:15:41.419702 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/74fb1b56-dea6-4091-bc8d-0eff60bb1113-nova-migration-ssh-key-0" (OuterVolumeSpecName: "nova-migration-ssh-key-0") pod "74fb1b56-dea6-4091-bc8d-0eff60bb1113" (UID: "74fb1b56-dea6-4091-bc8d-0eff60bb1113"). InnerVolumeSpecName "nova-migration-ssh-key-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 19:15:41 crc kubenswrapper[4799]: I1010 19:15:41.420083 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/74fb1b56-dea6-4091-bc8d-0eff60bb1113-nova-cell1-compute-config-1" (OuterVolumeSpecName: "nova-cell1-compute-config-1") pod "74fb1b56-dea6-4091-bc8d-0eff60bb1113" (UID: "74fb1b56-dea6-4091-bc8d-0eff60bb1113"). InnerVolumeSpecName "nova-cell1-compute-config-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 19:15:41 crc kubenswrapper[4799]: I1010 19:15:41.424537 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/74fb1b56-dea6-4091-bc8d-0eff60bb1113-inventory" (OuterVolumeSpecName: "inventory") pod "74fb1b56-dea6-4091-bc8d-0eff60bb1113" (UID: "74fb1b56-dea6-4091-bc8d-0eff60bb1113"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 19:15:41 crc kubenswrapper[4799]: I1010 19:15:41.430866 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/74fb1b56-dea6-4091-bc8d-0eff60bb1113-nova-cells-global-config-1" (OuterVolumeSpecName: "nova-cells-global-config-1") pod "74fb1b56-dea6-4091-bc8d-0eff60bb1113" (UID: "74fb1b56-dea6-4091-bc8d-0eff60bb1113"). InnerVolumeSpecName "nova-cells-global-config-1". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 19:15:41 crc kubenswrapper[4799]: I1010 19:15:41.440244 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/74fb1b56-dea6-4091-bc8d-0eff60bb1113-nova-migration-ssh-key-1" (OuterVolumeSpecName: "nova-migration-ssh-key-1") pod "74fb1b56-dea6-4091-bc8d-0eff60bb1113" (UID: "74fb1b56-dea6-4091-bc8d-0eff60bb1113"). InnerVolumeSpecName "nova-migration-ssh-key-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 19:15:41 crc kubenswrapper[4799]: I1010 19:15:41.447150 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/74fb1b56-dea6-4091-bc8d-0eff60bb1113-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "74fb1b56-dea6-4091-bc8d-0eff60bb1113" (UID: "74fb1b56-dea6-4091-bc8d-0eff60bb1113"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 19:15:41 crc kubenswrapper[4799]: I1010 19:15:41.447657 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/74fb1b56-dea6-4091-bc8d-0eff60bb1113-nova-cells-global-config-0" (OuterVolumeSpecName: "nova-cells-global-config-0") pod "74fb1b56-dea6-4091-bc8d-0eff60bb1113" (UID: "74fb1b56-dea6-4091-bc8d-0eff60bb1113"). InnerVolumeSpecName "nova-cells-global-config-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 10 19:15:41 crc kubenswrapper[4799]: I1010 19:15:41.486563 4799 reconciler_common.go:293] "Volume detached for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/74fb1b56-dea6-4091-bc8d-0eff60bb1113-nova-cell1-compute-config-1\") on node \"crc\" DevicePath \"\"" Oct 10 19:15:41 crc kubenswrapper[4799]: I1010 19:15:41.486590 4799 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/74fb1b56-dea6-4091-bc8d-0eff60bb1113-ceph\") on node \"crc\" DevicePath \"\"" Oct 10 19:15:41 crc kubenswrapper[4799]: I1010 19:15:41.486599 4799 reconciler_common.go:293] "Volume detached for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/74fb1b56-dea6-4091-bc8d-0eff60bb1113-nova-migration-ssh-key-0\") on node \"crc\" DevicePath \"\"" Oct 10 19:15:41 crc kubenswrapper[4799]: I1010 19:15:41.486608 4799 reconciler_common.go:293] "Volume detached for volume \"nova-cell1-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/74fb1b56-dea6-4091-bc8d-0eff60bb1113-nova-cell1-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 10 19:15:41 crc kubenswrapper[4799]: I1010 19:15:41.486617 4799 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/74fb1b56-dea6-4091-bc8d-0eff60bb1113-inventory\") on node \"crc\" DevicePath \"\"" Oct 10 19:15:41 crc kubenswrapper[4799]: I1010 19:15:41.486629 4799 reconciler_common.go:293] "Volume detached for volume \"nova-cells-global-config-0\" (UniqueName: \"kubernetes.io/configmap/74fb1b56-dea6-4091-bc8d-0eff60bb1113-nova-cells-global-config-0\") on node \"crc\" DevicePath \"\"" Oct 10 19:15:41 crc kubenswrapper[4799]: I1010 19:15:41.486639 4799 reconciler_common.go:293] "Volume detached for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/74fb1b56-dea6-4091-bc8d-0eff60bb1113-nova-cell1-compute-config-0\") on node \"crc\" DevicePath \"\"" Oct 10 19:15:41 crc kubenswrapper[4799]: I1010 19:15:41.486648 4799 reconciler_common.go:293] "Volume detached for volume \"nova-cells-global-config-1\" (UniqueName: \"kubernetes.io/configmap/74fb1b56-dea6-4091-bc8d-0eff60bb1113-nova-cells-global-config-1\") on node \"crc\" DevicePath \"\"" Oct 10 19:15:41 crc kubenswrapper[4799]: I1010 19:15:41.486657 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gjtf7\" (UniqueName: \"kubernetes.io/projected/74fb1b56-dea6-4091-bc8d-0eff60bb1113-kube-api-access-gjtf7\") on node \"crc\" DevicePath \"\"" Oct 10 19:15:41 crc kubenswrapper[4799]: I1010 19:15:41.486665 4799 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/74fb1b56-dea6-4091-bc8d-0eff60bb1113-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 10 19:15:41 crc kubenswrapper[4799]: I1010 19:15:41.486673 4799 reconciler_common.go:293] "Volume detached for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/74fb1b56-dea6-4091-bc8d-0eff60bb1113-nova-migration-ssh-key-1\") on node \"crc\" DevicePath \"\"" Oct 10 19:15:41 crc kubenswrapper[4799]: I1010 19:15:41.634929 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellgtfn5" event={"ID":"74fb1b56-dea6-4091-bc8d-0eff60bb1113","Type":"ContainerDied","Data":"25dcb60e53408b1c916a7f83f9c0ec769df72698e18bfc51929f56d57c9b590d"} Oct 10 19:15:41 crc kubenswrapper[4799]: I1010 19:15:41.635003 4799 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="25dcb60e53408b1c916a7f83f9c0ec769df72698e18bfc51929f56d57c9b590d" Oct 10 19:15:41 crc kubenswrapper[4799]: I1010 19:15:41.635025 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellgtfn5" Oct 10 19:15:47 crc kubenswrapper[4799]: I1010 19:15:47.411142 4799 scope.go:117] "RemoveContainer" containerID="e394ce15c48f6a512a9cf8ca7739698a6b4e472715126ea6e3b0f9e567e27a97" Oct 10 19:15:47 crc kubenswrapper[4799]: E1010 19:15:47.412285 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 19:16:02 crc kubenswrapper[4799]: I1010 19:16:02.402796 4799 scope.go:117] "RemoveContainer" containerID="e394ce15c48f6a512a9cf8ca7739698a6b4e472715126ea6e3b0f9e567e27a97" Oct 10 19:16:02 crc kubenswrapper[4799]: E1010 19:16:02.403680 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 19:16:17 crc kubenswrapper[4799]: I1010 19:16:17.412456 4799 scope.go:117] "RemoveContainer" containerID="e394ce15c48f6a512a9cf8ca7739698a6b4e472715126ea6e3b0f9e567e27a97" Oct 10 19:16:17 crc kubenswrapper[4799]: E1010 19:16:17.413565 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 19:16:28 crc kubenswrapper[4799]: I1010 19:16:28.202819 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-vdlsx"] Oct 10 19:16:28 crc kubenswrapper[4799]: E1010 19:16:28.205585 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="74fb1b56-dea6-4091-bc8d-0eff60bb1113" containerName="nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cell1" Oct 10 19:16:28 crc kubenswrapper[4799]: I1010 19:16:28.205788 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="74fb1b56-dea6-4091-bc8d-0eff60bb1113" containerName="nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cell1" Oct 10 19:16:28 crc kubenswrapper[4799]: E1010 19:16:28.206041 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4e07c17b-a46f-424c-adac-4f8af7650e69" containerName="collect-profiles" Oct 10 19:16:28 crc kubenswrapper[4799]: I1010 19:16:28.206170 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="4e07c17b-a46f-424c-adac-4f8af7650e69" containerName="collect-profiles" Oct 10 19:16:28 crc kubenswrapper[4799]: I1010 19:16:28.206750 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="74fb1b56-dea6-4091-bc8d-0eff60bb1113" containerName="nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cell1" Oct 10 19:16:28 crc kubenswrapper[4799]: I1010 19:16:28.206960 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="4e07c17b-a46f-424c-adac-4f8af7650e69" containerName="collect-profiles" Oct 10 19:16:28 crc kubenswrapper[4799]: I1010 19:16:28.210037 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-vdlsx" Oct 10 19:16:28 crc kubenswrapper[4799]: I1010 19:16:28.236423 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-vdlsx"] Oct 10 19:16:28 crc kubenswrapper[4799]: I1010 19:16:28.250579 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/56c3553a-a02e-409c-8d74-fde21179ffcd-utilities\") pod \"redhat-marketplace-vdlsx\" (UID: \"56c3553a-a02e-409c-8d74-fde21179ffcd\") " pod="openshift-marketplace/redhat-marketplace-vdlsx" Oct 10 19:16:28 crc kubenswrapper[4799]: I1010 19:16:28.251785 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/56c3553a-a02e-409c-8d74-fde21179ffcd-catalog-content\") pod \"redhat-marketplace-vdlsx\" (UID: \"56c3553a-a02e-409c-8d74-fde21179ffcd\") " pod="openshift-marketplace/redhat-marketplace-vdlsx" Oct 10 19:16:28 crc kubenswrapper[4799]: I1010 19:16:28.353741 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/56c3553a-a02e-409c-8d74-fde21179ffcd-utilities\") pod \"redhat-marketplace-vdlsx\" (UID: \"56c3553a-a02e-409c-8d74-fde21179ffcd\") " pod="openshift-marketplace/redhat-marketplace-vdlsx" Oct 10 19:16:28 crc kubenswrapper[4799]: I1010 19:16:28.353847 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/56c3553a-a02e-409c-8d74-fde21179ffcd-catalog-content\") pod \"redhat-marketplace-vdlsx\" (UID: \"56c3553a-a02e-409c-8d74-fde21179ffcd\") " pod="openshift-marketplace/redhat-marketplace-vdlsx" Oct 10 19:16:28 crc kubenswrapper[4799]: I1010 19:16:28.353955 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bpv8h\" (UniqueName: \"kubernetes.io/projected/56c3553a-a02e-409c-8d74-fde21179ffcd-kube-api-access-bpv8h\") pod \"redhat-marketplace-vdlsx\" (UID: \"56c3553a-a02e-409c-8d74-fde21179ffcd\") " pod="openshift-marketplace/redhat-marketplace-vdlsx" Oct 10 19:16:28 crc kubenswrapper[4799]: I1010 19:16:28.354278 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/56c3553a-a02e-409c-8d74-fde21179ffcd-utilities\") pod \"redhat-marketplace-vdlsx\" (UID: \"56c3553a-a02e-409c-8d74-fde21179ffcd\") " pod="openshift-marketplace/redhat-marketplace-vdlsx" Oct 10 19:16:28 crc kubenswrapper[4799]: I1010 19:16:28.354522 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/56c3553a-a02e-409c-8d74-fde21179ffcd-catalog-content\") pod \"redhat-marketplace-vdlsx\" (UID: \"56c3553a-a02e-409c-8d74-fde21179ffcd\") " pod="openshift-marketplace/redhat-marketplace-vdlsx" Oct 10 19:16:28 crc kubenswrapper[4799]: I1010 19:16:28.403171 4799 scope.go:117] "RemoveContainer" containerID="e394ce15c48f6a512a9cf8ca7739698a6b4e472715126ea6e3b0f9e567e27a97" Oct 10 19:16:28 crc kubenswrapper[4799]: E1010 19:16:28.403625 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 19:16:28 crc kubenswrapper[4799]: I1010 19:16:28.455575 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bpv8h\" (UniqueName: \"kubernetes.io/projected/56c3553a-a02e-409c-8d74-fde21179ffcd-kube-api-access-bpv8h\") pod \"redhat-marketplace-vdlsx\" (UID: \"56c3553a-a02e-409c-8d74-fde21179ffcd\") " pod="openshift-marketplace/redhat-marketplace-vdlsx" Oct 10 19:16:28 crc kubenswrapper[4799]: I1010 19:16:28.481894 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bpv8h\" (UniqueName: \"kubernetes.io/projected/56c3553a-a02e-409c-8d74-fde21179ffcd-kube-api-access-bpv8h\") pod \"redhat-marketplace-vdlsx\" (UID: \"56c3553a-a02e-409c-8d74-fde21179ffcd\") " pod="openshift-marketplace/redhat-marketplace-vdlsx" Oct 10 19:16:28 crc kubenswrapper[4799]: I1010 19:16:28.547095 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-vdlsx" Oct 10 19:16:29 crc kubenswrapper[4799]: I1010 19:16:29.083477 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-vdlsx"] Oct 10 19:16:29 crc kubenswrapper[4799]: I1010 19:16:29.336810 4799 generic.go:334] "Generic (PLEG): container finished" podID="56c3553a-a02e-409c-8d74-fde21179ffcd" containerID="f3376d97a3b83a9e0fa1feb70ead1b2479f81b9131179c6bccadad158a271197" exitCode=0 Oct 10 19:16:29 crc kubenswrapper[4799]: I1010 19:16:29.336852 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vdlsx" event={"ID":"56c3553a-a02e-409c-8d74-fde21179ffcd","Type":"ContainerDied","Data":"f3376d97a3b83a9e0fa1feb70ead1b2479f81b9131179c6bccadad158a271197"} Oct 10 19:16:29 crc kubenswrapper[4799]: I1010 19:16:29.336879 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vdlsx" event={"ID":"56c3553a-a02e-409c-8d74-fde21179ffcd","Type":"ContainerStarted","Data":"8ae06425c2a0b393989bec4902680c00f3d038bad10ba153780cfa8e137d6a5c"} Oct 10 19:16:30 crc kubenswrapper[4799]: I1010 19:16:30.350260 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vdlsx" event={"ID":"56c3553a-a02e-409c-8d74-fde21179ffcd","Type":"ContainerStarted","Data":"80c8f4060d55df0e815937ee3374f125d4794962bb3039f9afbced8d0198be5d"} Oct 10 19:16:31 crc kubenswrapper[4799]: I1010 19:16:31.373671 4799 generic.go:334] "Generic (PLEG): container finished" podID="56c3553a-a02e-409c-8d74-fde21179ffcd" containerID="80c8f4060d55df0e815937ee3374f125d4794962bb3039f9afbced8d0198be5d" exitCode=0 Oct 10 19:16:31 crc kubenswrapper[4799]: I1010 19:16:31.373811 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vdlsx" event={"ID":"56c3553a-a02e-409c-8d74-fde21179ffcd","Type":"ContainerDied","Data":"80c8f4060d55df0e815937ee3374f125d4794962bb3039f9afbced8d0198be5d"} Oct 10 19:16:32 crc kubenswrapper[4799]: I1010 19:16:32.388308 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vdlsx" event={"ID":"56c3553a-a02e-409c-8d74-fde21179ffcd","Type":"ContainerStarted","Data":"5622d692ea7155c66a294ba82b79d9e40929a2266582d45d432e2a49de24c8ac"} Oct 10 19:16:32 crc kubenswrapper[4799]: I1010 19:16:32.414301 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-vdlsx" podStartSLOduration=1.861733932 podStartE2EDuration="4.414277172s" podCreationTimestamp="2025-10-10 19:16:28 +0000 UTC" firstStartedPulling="2025-10-10 19:16:29.338761865 +0000 UTC m=+9882.847085980" lastFinishedPulling="2025-10-10 19:16:31.891305075 +0000 UTC m=+9885.399629220" observedRunningTime="2025-10-10 19:16:32.410736655 +0000 UTC m=+9885.919060780" watchObservedRunningTime="2025-10-10 19:16:32.414277172 +0000 UTC m=+9885.922601287" Oct 10 19:16:38 crc kubenswrapper[4799]: I1010 19:16:38.547497 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-vdlsx" Oct 10 19:16:38 crc kubenswrapper[4799]: I1010 19:16:38.548984 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-vdlsx" Oct 10 19:16:38 crc kubenswrapper[4799]: I1010 19:16:38.634823 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-vdlsx" Oct 10 19:16:39 crc kubenswrapper[4799]: I1010 19:16:39.403877 4799 scope.go:117] "RemoveContainer" containerID="e394ce15c48f6a512a9cf8ca7739698a6b4e472715126ea6e3b0f9e567e27a97" Oct 10 19:16:39 crc kubenswrapper[4799]: E1010 19:16:39.404676 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 19:16:39 crc kubenswrapper[4799]: I1010 19:16:39.572620 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-vdlsx" Oct 10 19:16:39 crc kubenswrapper[4799]: I1010 19:16:39.633473 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-vdlsx"] Oct 10 19:16:41 crc kubenswrapper[4799]: I1010 19:16:41.549882 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-vdlsx" podUID="56c3553a-a02e-409c-8d74-fde21179ffcd" containerName="registry-server" containerID="cri-o://5622d692ea7155c66a294ba82b79d9e40929a2266582d45d432e2a49de24c8ac" gracePeriod=2 Oct 10 19:16:42 crc kubenswrapper[4799]: I1010 19:16:42.149818 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-vdlsx" Oct 10 19:16:42 crc kubenswrapper[4799]: I1010 19:16:42.302488 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/56c3553a-a02e-409c-8d74-fde21179ffcd-catalog-content\") pod \"56c3553a-a02e-409c-8d74-fde21179ffcd\" (UID: \"56c3553a-a02e-409c-8d74-fde21179ffcd\") " Oct 10 19:16:42 crc kubenswrapper[4799]: I1010 19:16:42.302638 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bpv8h\" (UniqueName: \"kubernetes.io/projected/56c3553a-a02e-409c-8d74-fde21179ffcd-kube-api-access-bpv8h\") pod \"56c3553a-a02e-409c-8d74-fde21179ffcd\" (UID: \"56c3553a-a02e-409c-8d74-fde21179ffcd\") " Oct 10 19:16:42 crc kubenswrapper[4799]: I1010 19:16:42.302689 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/56c3553a-a02e-409c-8d74-fde21179ffcd-utilities\") pod \"56c3553a-a02e-409c-8d74-fde21179ffcd\" (UID: \"56c3553a-a02e-409c-8d74-fde21179ffcd\") " Oct 10 19:16:42 crc kubenswrapper[4799]: I1010 19:16:42.303650 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/56c3553a-a02e-409c-8d74-fde21179ffcd-utilities" (OuterVolumeSpecName: "utilities") pod "56c3553a-a02e-409c-8d74-fde21179ffcd" (UID: "56c3553a-a02e-409c-8d74-fde21179ffcd"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 10 19:16:42 crc kubenswrapper[4799]: I1010 19:16:42.314558 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/56c3553a-a02e-409c-8d74-fde21179ffcd-kube-api-access-bpv8h" (OuterVolumeSpecName: "kube-api-access-bpv8h") pod "56c3553a-a02e-409c-8d74-fde21179ffcd" (UID: "56c3553a-a02e-409c-8d74-fde21179ffcd"). InnerVolumeSpecName "kube-api-access-bpv8h". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 19:16:42 crc kubenswrapper[4799]: I1010 19:16:42.314687 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/56c3553a-a02e-409c-8d74-fde21179ffcd-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "56c3553a-a02e-409c-8d74-fde21179ffcd" (UID: "56c3553a-a02e-409c-8d74-fde21179ffcd"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 10 19:16:42 crc kubenswrapper[4799]: I1010 19:16:42.405978 4799 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/56c3553a-a02e-409c-8d74-fde21179ffcd-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 10 19:16:42 crc kubenswrapper[4799]: I1010 19:16:42.406255 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bpv8h\" (UniqueName: \"kubernetes.io/projected/56c3553a-a02e-409c-8d74-fde21179ffcd-kube-api-access-bpv8h\") on node \"crc\" DevicePath \"\"" Oct 10 19:16:42 crc kubenswrapper[4799]: I1010 19:16:42.406270 4799 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/56c3553a-a02e-409c-8d74-fde21179ffcd-utilities\") on node \"crc\" DevicePath \"\"" Oct 10 19:16:42 crc kubenswrapper[4799]: I1010 19:16:42.586750 4799 generic.go:334] "Generic (PLEG): container finished" podID="56c3553a-a02e-409c-8d74-fde21179ffcd" containerID="5622d692ea7155c66a294ba82b79d9e40929a2266582d45d432e2a49de24c8ac" exitCode=0 Oct 10 19:16:42 crc kubenswrapper[4799]: I1010 19:16:42.586808 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vdlsx" event={"ID":"56c3553a-a02e-409c-8d74-fde21179ffcd","Type":"ContainerDied","Data":"5622d692ea7155c66a294ba82b79d9e40929a2266582d45d432e2a49de24c8ac"} Oct 10 19:16:42 crc kubenswrapper[4799]: I1010 19:16:42.586844 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vdlsx" event={"ID":"56c3553a-a02e-409c-8d74-fde21179ffcd","Type":"ContainerDied","Data":"8ae06425c2a0b393989bec4902680c00f3d038bad10ba153780cfa8e137d6a5c"} Oct 10 19:16:42 crc kubenswrapper[4799]: I1010 19:16:42.586860 4799 scope.go:117] "RemoveContainer" containerID="5622d692ea7155c66a294ba82b79d9e40929a2266582d45d432e2a49de24c8ac" Oct 10 19:16:42 crc kubenswrapper[4799]: I1010 19:16:42.586984 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-vdlsx" Oct 10 19:16:42 crc kubenswrapper[4799]: I1010 19:16:42.648897 4799 scope.go:117] "RemoveContainer" containerID="80c8f4060d55df0e815937ee3374f125d4794962bb3039f9afbced8d0198be5d" Oct 10 19:16:42 crc kubenswrapper[4799]: I1010 19:16:42.677193 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-vdlsx"] Oct 10 19:16:42 crc kubenswrapper[4799]: I1010 19:16:42.704882 4799 scope.go:117] "RemoveContainer" containerID="f3376d97a3b83a9e0fa1feb70ead1b2479f81b9131179c6bccadad158a271197" Oct 10 19:16:42 crc kubenswrapper[4799]: I1010 19:16:42.724846 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-vdlsx"] Oct 10 19:16:42 crc kubenswrapper[4799]: I1010 19:16:42.734808 4799 scope.go:117] "RemoveContainer" containerID="5622d692ea7155c66a294ba82b79d9e40929a2266582d45d432e2a49de24c8ac" Oct 10 19:16:42 crc kubenswrapper[4799]: E1010 19:16:42.749536 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5622d692ea7155c66a294ba82b79d9e40929a2266582d45d432e2a49de24c8ac\": container with ID starting with 5622d692ea7155c66a294ba82b79d9e40929a2266582d45d432e2a49de24c8ac not found: ID does not exist" containerID="5622d692ea7155c66a294ba82b79d9e40929a2266582d45d432e2a49de24c8ac" Oct 10 19:16:42 crc kubenswrapper[4799]: I1010 19:16:42.749576 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5622d692ea7155c66a294ba82b79d9e40929a2266582d45d432e2a49de24c8ac"} err="failed to get container status \"5622d692ea7155c66a294ba82b79d9e40929a2266582d45d432e2a49de24c8ac\": rpc error: code = NotFound desc = could not find container \"5622d692ea7155c66a294ba82b79d9e40929a2266582d45d432e2a49de24c8ac\": container with ID starting with 5622d692ea7155c66a294ba82b79d9e40929a2266582d45d432e2a49de24c8ac not found: ID does not exist" Oct 10 19:16:42 crc kubenswrapper[4799]: I1010 19:16:42.749600 4799 scope.go:117] "RemoveContainer" containerID="80c8f4060d55df0e815937ee3374f125d4794962bb3039f9afbced8d0198be5d" Oct 10 19:16:42 crc kubenswrapper[4799]: E1010 19:16:42.755993 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"80c8f4060d55df0e815937ee3374f125d4794962bb3039f9afbced8d0198be5d\": container with ID starting with 80c8f4060d55df0e815937ee3374f125d4794962bb3039f9afbced8d0198be5d not found: ID does not exist" containerID="80c8f4060d55df0e815937ee3374f125d4794962bb3039f9afbced8d0198be5d" Oct 10 19:16:42 crc kubenswrapper[4799]: I1010 19:16:42.756050 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"80c8f4060d55df0e815937ee3374f125d4794962bb3039f9afbced8d0198be5d"} err="failed to get container status \"80c8f4060d55df0e815937ee3374f125d4794962bb3039f9afbced8d0198be5d\": rpc error: code = NotFound desc = could not find container \"80c8f4060d55df0e815937ee3374f125d4794962bb3039f9afbced8d0198be5d\": container with ID starting with 80c8f4060d55df0e815937ee3374f125d4794962bb3039f9afbced8d0198be5d not found: ID does not exist" Oct 10 19:16:42 crc kubenswrapper[4799]: I1010 19:16:42.756082 4799 scope.go:117] "RemoveContainer" containerID="f3376d97a3b83a9e0fa1feb70ead1b2479f81b9131179c6bccadad158a271197" Oct 10 19:16:42 crc kubenswrapper[4799]: E1010 19:16:42.757059 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f3376d97a3b83a9e0fa1feb70ead1b2479f81b9131179c6bccadad158a271197\": container with ID starting with f3376d97a3b83a9e0fa1feb70ead1b2479f81b9131179c6bccadad158a271197 not found: ID does not exist" containerID="f3376d97a3b83a9e0fa1feb70ead1b2479f81b9131179c6bccadad158a271197" Oct 10 19:16:42 crc kubenswrapper[4799]: I1010 19:16:42.757115 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f3376d97a3b83a9e0fa1feb70ead1b2479f81b9131179c6bccadad158a271197"} err="failed to get container status \"f3376d97a3b83a9e0fa1feb70ead1b2479f81b9131179c6bccadad158a271197\": rpc error: code = NotFound desc = could not find container \"f3376d97a3b83a9e0fa1feb70ead1b2479f81b9131179c6bccadad158a271197\": container with ID starting with f3376d97a3b83a9e0fa1feb70ead1b2479f81b9131179c6bccadad158a271197 not found: ID does not exist" Oct 10 19:16:43 crc kubenswrapper[4799]: I1010 19:16:43.423854 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="56c3553a-a02e-409c-8d74-fde21179ffcd" path="/var/lib/kubelet/pods/56c3553a-a02e-409c-8d74-fde21179ffcd/volumes" Oct 10 19:16:53 crc kubenswrapper[4799]: I1010 19:16:53.403327 4799 scope.go:117] "RemoveContainer" containerID="e394ce15c48f6a512a9cf8ca7739698a6b4e472715126ea6e3b0f9e567e27a97" Oct 10 19:16:53 crc kubenswrapper[4799]: E1010 19:16:53.404632 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 19:17:06 crc kubenswrapper[4799]: I1010 19:17:06.407316 4799 scope.go:117] "RemoveContainer" containerID="e394ce15c48f6a512a9cf8ca7739698a6b4e472715126ea6e3b0f9e567e27a97" Oct 10 19:17:06 crc kubenswrapper[4799]: E1010 19:17:06.408671 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 19:17:13 crc kubenswrapper[4799]: E1010 19:17:13.995448 4799 upgradeaware.go:427] Error proxying data from client to backend: readfrom tcp 38.102.83.145:50548->38.102.83.145:34753: write tcp 38.102.83.145:50548->38.102.83.145:34753: write: broken pipe Oct 10 19:17:18 crc kubenswrapper[4799]: E1010 19:17:18.983232 4799 upgradeaware.go:427] Error proxying data from client to backend: readfrom tcp 38.102.83.145:42336->38.102.83.145:34753: write tcp 38.102.83.145:42336->38.102.83.145:34753: write: broken pipe Oct 10 19:17:19 crc kubenswrapper[4799]: I1010 19:17:19.403221 4799 scope.go:117] "RemoveContainer" containerID="e394ce15c48f6a512a9cf8ca7739698a6b4e472715126ea6e3b0f9e567e27a97" Oct 10 19:17:19 crc kubenswrapper[4799]: E1010 19:17:19.405470 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 19:17:30 crc kubenswrapper[4799]: I1010 19:17:30.402879 4799 scope.go:117] "RemoveContainer" containerID="e394ce15c48f6a512a9cf8ca7739698a6b4e472715126ea6e3b0f9e567e27a97" Oct 10 19:17:30 crc kubenswrapper[4799]: E1010 19:17:30.403902 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 19:17:45 crc kubenswrapper[4799]: I1010 19:17:45.403926 4799 scope.go:117] "RemoveContainer" containerID="e394ce15c48f6a512a9cf8ca7739698a6b4e472715126ea6e3b0f9e567e27a97" Oct 10 19:17:45 crc kubenswrapper[4799]: E1010 19:17:45.405494 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 19:17:59 crc kubenswrapper[4799]: I1010 19:17:59.404981 4799 scope.go:117] "RemoveContainer" containerID="e394ce15c48f6a512a9cf8ca7739698a6b4e472715126ea6e3b0f9e567e27a97" Oct 10 19:17:59 crc kubenswrapper[4799]: E1010 19:17:59.406440 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 19:18:06 crc kubenswrapper[4799]: I1010 19:18:06.702563 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/mariadb-copy-data"] Oct 10 19:18:06 crc kubenswrapper[4799]: I1010 19:18:06.704527 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/mariadb-copy-data" podUID="dac860f8-b331-42c2-8706-1db45e4285fe" containerName="adoption" containerID="cri-o://52cf81e369b3402a9e64209e361a8b4a6788083f3959698665b23f522f103326" gracePeriod=30 Oct 10 19:18:10 crc kubenswrapper[4799]: I1010 19:18:10.402411 4799 scope.go:117] "RemoveContainer" containerID="e394ce15c48f6a512a9cf8ca7739698a6b4e472715126ea6e3b0f9e567e27a97" Oct 10 19:18:10 crc kubenswrapper[4799]: E1010 19:18:10.403939 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 19:18:18 crc kubenswrapper[4799]: I1010 19:18:18.380090 4799 scope.go:117] "RemoveContainer" containerID="44a723db3e4ff3e7526333b82a668d2349e1787466aed00a3ecf768a5e9ed65b" Oct 10 19:18:18 crc kubenswrapper[4799]: I1010 19:18:18.432185 4799 scope.go:117] "RemoveContainer" containerID="ef6de259698e053c4d6f1efea69d715e36059ed9b675e4c2469bcab4434805dd" Oct 10 19:18:18 crc kubenswrapper[4799]: I1010 19:18:18.505383 4799 scope.go:117] "RemoveContainer" containerID="834da61fc3a046c2e51b89e432126a723f3bb64c36e6ba4feffe26b1bf26920c" Oct 10 19:18:23 crc kubenswrapper[4799]: I1010 19:18:23.403425 4799 scope.go:117] "RemoveContainer" containerID="e394ce15c48f6a512a9cf8ca7739698a6b4e472715126ea6e3b0f9e567e27a97" Oct 10 19:18:23 crc kubenswrapper[4799]: E1010 19:18:23.404741 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 19:18:37 crc kubenswrapper[4799]: I1010 19:18:37.127245 4799 generic.go:334] "Generic (PLEG): container finished" podID="dac860f8-b331-42c2-8706-1db45e4285fe" containerID="52cf81e369b3402a9e64209e361a8b4a6788083f3959698665b23f522f103326" exitCode=137 Oct 10 19:18:37 crc kubenswrapper[4799]: I1010 19:18:37.127957 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-copy-data" event={"ID":"dac860f8-b331-42c2-8706-1db45e4285fe","Type":"ContainerDied","Data":"52cf81e369b3402a9e64209e361a8b4a6788083f3959698665b23f522f103326"} Oct 10 19:18:37 crc kubenswrapper[4799]: I1010 19:18:37.326233 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-copy-data" Oct 10 19:18:37 crc kubenswrapper[4799]: I1010 19:18:37.374982 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mariadb-data\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-f1e2244a-acf8-447d-bca8-5ca755b37f8c\") pod \"dac860f8-b331-42c2-8706-1db45e4285fe\" (UID: \"dac860f8-b331-42c2-8706-1db45e4285fe\") " Oct 10 19:18:37 crc kubenswrapper[4799]: I1010 19:18:37.375110 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2xc64\" (UniqueName: \"kubernetes.io/projected/dac860f8-b331-42c2-8706-1db45e4285fe-kube-api-access-2xc64\") pod \"dac860f8-b331-42c2-8706-1db45e4285fe\" (UID: \"dac860f8-b331-42c2-8706-1db45e4285fe\") " Oct 10 19:18:37 crc kubenswrapper[4799]: I1010 19:18:37.382962 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dac860f8-b331-42c2-8706-1db45e4285fe-kube-api-access-2xc64" (OuterVolumeSpecName: "kube-api-access-2xc64") pod "dac860f8-b331-42c2-8706-1db45e4285fe" (UID: "dac860f8-b331-42c2-8706-1db45e4285fe"). InnerVolumeSpecName "kube-api-access-2xc64". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 19:18:37 crc kubenswrapper[4799]: I1010 19:18:37.403881 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-f1e2244a-acf8-447d-bca8-5ca755b37f8c" (OuterVolumeSpecName: "mariadb-data") pod "dac860f8-b331-42c2-8706-1db45e4285fe" (UID: "dac860f8-b331-42c2-8706-1db45e4285fe"). InnerVolumeSpecName "pvc-f1e2244a-acf8-447d-bca8-5ca755b37f8c". PluginName "kubernetes.io/csi", VolumeGidValue "" Oct 10 19:18:37 crc kubenswrapper[4799]: I1010 19:18:37.477228 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2xc64\" (UniqueName: \"kubernetes.io/projected/dac860f8-b331-42c2-8706-1db45e4285fe-kube-api-access-2xc64\") on node \"crc\" DevicePath \"\"" Oct 10 19:18:37 crc kubenswrapper[4799]: I1010 19:18:37.477289 4799 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"pvc-f1e2244a-acf8-447d-bca8-5ca755b37f8c\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-f1e2244a-acf8-447d-bca8-5ca755b37f8c\") on node \"crc\" " Oct 10 19:18:37 crc kubenswrapper[4799]: I1010 19:18:37.513851 4799 csi_attacher.go:630] kubernetes.io/csi: attacher.UnmountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping UnmountDevice... Oct 10 19:18:37 crc kubenswrapper[4799]: I1010 19:18:37.514072 4799 operation_generator.go:917] UnmountDevice succeeded for volume "pvc-f1e2244a-acf8-447d-bca8-5ca755b37f8c" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-f1e2244a-acf8-447d-bca8-5ca755b37f8c") on node "crc" Oct 10 19:18:37 crc kubenswrapper[4799]: I1010 19:18:37.579423 4799 reconciler_common.go:293] "Volume detached for volume \"pvc-f1e2244a-acf8-447d-bca8-5ca755b37f8c\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-f1e2244a-acf8-447d-bca8-5ca755b37f8c\") on node \"crc\" DevicePath \"\"" Oct 10 19:18:38 crc kubenswrapper[4799]: I1010 19:18:38.144174 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-copy-data" event={"ID":"dac860f8-b331-42c2-8706-1db45e4285fe","Type":"ContainerDied","Data":"9b172d09bfd9bb0ad75cec313f803b7cc704ba2026eaeee50d15b87410163d8a"} Oct 10 19:18:38 crc kubenswrapper[4799]: I1010 19:18:38.144244 4799 scope.go:117] "RemoveContainer" containerID="52cf81e369b3402a9e64209e361a8b4a6788083f3959698665b23f522f103326" Oct 10 19:18:38 crc kubenswrapper[4799]: I1010 19:18:38.144249 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-copy-data" Oct 10 19:18:38 crc kubenswrapper[4799]: I1010 19:18:38.219573 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/mariadb-copy-data"] Oct 10 19:18:38 crc kubenswrapper[4799]: I1010 19:18:38.232542 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/mariadb-copy-data"] Oct 10 19:18:38 crc kubenswrapper[4799]: I1010 19:18:38.403165 4799 scope.go:117] "RemoveContainer" containerID="e394ce15c48f6a512a9cf8ca7739698a6b4e472715126ea6e3b0f9e567e27a97" Oct 10 19:18:38 crc kubenswrapper[4799]: E1010 19:18:38.403630 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 19:18:38 crc kubenswrapper[4799]: I1010 19:18:38.819325 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-copy-data"] Oct 10 19:18:38 crc kubenswrapper[4799]: I1010 19:18:38.819557 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovn-copy-data" podUID="dad0e268-f7c7-4e98-a300-9943db4ae46b" containerName="adoption" containerID="cri-o://43d7fb51961135812355c5249d7be1a2c2670f6fc9821e4c30b0041592525a7a" gracePeriod=30 Oct 10 19:18:39 crc kubenswrapper[4799]: I1010 19:18:39.421819 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="dac860f8-b331-42c2-8706-1db45e4285fe" path="/var/lib/kubelet/pods/dac860f8-b331-42c2-8706-1db45e4285fe/volumes" Oct 10 19:18:50 crc kubenswrapper[4799]: I1010 19:18:50.402385 4799 scope.go:117] "RemoveContainer" containerID="e394ce15c48f6a512a9cf8ca7739698a6b4e472715126ea6e3b0f9e567e27a97" Oct 10 19:18:50 crc kubenswrapper[4799]: E1010 19:18:50.403524 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 19:19:01 crc kubenswrapper[4799]: I1010 19:19:01.404200 4799 scope.go:117] "RemoveContainer" containerID="e394ce15c48f6a512a9cf8ca7739698a6b4e472715126ea6e3b0f9e567e27a97" Oct 10 19:19:01 crc kubenswrapper[4799]: E1010 19:19:01.405409 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 19:19:09 crc kubenswrapper[4799]: I1010 19:19:09.408283 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-copy-data" Oct 10 19:19:09 crc kubenswrapper[4799]: I1010 19:19:09.532855 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-data-cert\" (UniqueName: \"kubernetes.io/secret/dad0e268-f7c7-4e98-a300-9943db4ae46b-ovn-data-cert\") pod \"dad0e268-f7c7-4e98-a300-9943db4ae46b\" (UID: \"dad0e268-f7c7-4e98-a300-9943db4ae46b\") " Oct 10 19:19:09 crc kubenswrapper[4799]: I1010 19:19:09.533597 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-data\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-3bddd74c-598e-4193-8b05-454847ddae91\") pod \"dad0e268-f7c7-4e98-a300-9943db4ae46b\" (UID: \"dad0e268-f7c7-4e98-a300-9943db4ae46b\") " Oct 10 19:19:09 crc kubenswrapper[4799]: I1010 19:19:09.533665 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-p9mmw\" (UniqueName: \"kubernetes.io/projected/dad0e268-f7c7-4e98-a300-9943db4ae46b-kube-api-access-p9mmw\") pod \"dad0e268-f7c7-4e98-a300-9943db4ae46b\" (UID: \"dad0e268-f7c7-4e98-a300-9943db4ae46b\") " Oct 10 19:19:09 crc kubenswrapper[4799]: I1010 19:19:09.540110 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dad0e268-f7c7-4e98-a300-9943db4ae46b-kube-api-access-p9mmw" (OuterVolumeSpecName: "kube-api-access-p9mmw") pod "dad0e268-f7c7-4e98-a300-9943db4ae46b" (UID: "dad0e268-f7c7-4e98-a300-9943db4ae46b"). InnerVolumeSpecName "kube-api-access-p9mmw". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 19:19:09 crc kubenswrapper[4799]: I1010 19:19:09.544866 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dad0e268-f7c7-4e98-a300-9943db4ae46b-ovn-data-cert" (OuterVolumeSpecName: "ovn-data-cert") pod "dad0e268-f7c7-4e98-a300-9943db4ae46b" (UID: "dad0e268-f7c7-4e98-a300-9943db4ae46b"). InnerVolumeSpecName "ovn-data-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 10 19:19:09 crc kubenswrapper[4799]: I1010 19:19:09.557871 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-3bddd74c-598e-4193-8b05-454847ddae91" (OuterVolumeSpecName: "ovn-data") pod "dad0e268-f7c7-4e98-a300-9943db4ae46b" (UID: "dad0e268-f7c7-4e98-a300-9943db4ae46b"). InnerVolumeSpecName "pvc-3bddd74c-598e-4193-8b05-454847ddae91". PluginName "kubernetes.io/csi", VolumeGidValue "" Oct 10 19:19:09 crc kubenswrapper[4799]: I1010 19:19:09.581290 4799 generic.go:334] "Generic (PLEG): container finished" podID="dad0e268-f7c7-4e98-a300-9943db4ae46b" containerID="43d7fb51961135812355c5249d7be1a2c2670f6fc9821e4c30b0041592525a7a" exitCode=137 Oct 10 19:19:09 crc kubenswrapper[4799]: I1010 19:19:09.581335 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-copy-data" event={"ID":"dad0e268-f7c7-4e98-a300-9943db4ae46b","Type":"ContainerDied","Data":"43d7fb51961135812355c5249d7be1a2c2670f6fc9821e4c30b0041592525a7a"} Oct 10 19:19:09 crc kubenswrapper[4799]: I1010 19:19:09.581359 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-copy-data" event={"ID":"dad0e268-f7c7-4e98-a300-9943db4ae46b","Type":"ContainerDied","Data":"ee69fd33a1f33f0a6d50fa05d31f05b244efe65b9aedbeb4add795d4e34e57c8"} Oct 10 19:19:09 crc kubenswrapper[4799]: I1010 19:19:09.581375 4799 scope.go:117] "RemoveContainer" containerID="43d7fb51961135812355c5249d7be1a2c2670f6fc9821e4c30b0041592525a7a" Oct 10 19:19:09 crc kubenswrapper[4799]: I1010 19:19:09.581498 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-copy-data" Oct 10 19:19:09 crc kubenswrapper[4799]: I1010 19:19:09.636331 4799 reconciler_common.go:293] "Volume detached for volume \"ovn-data-cert\" (UniqueName: \"kubernetes.io/secret/dad0e268-f7c7-4e98-a300-9943db4ae46b-ovn-data-cert\") on node \"crc\" DevicePath \"\"" Oct 10 19:19:09 crc kubenswrapper[4799]: I1010 19:19:09.637104 4799 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"pvc-3bddd74c-598e-4193-8b05-454847ddae91\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-3bddd74c-598e-4193-8b05-454847ddae91\") on node \"crc\" " Oct 10 19:19:09 crc kubenswrapper[4799]: I1010 19:19:09.637361 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-p9mmw\" (UniqueName: \"kubernetes.io/projected/dad0e268-f7c7-4e98-a300-9943db4ae46b-kube-api-access-p9mmw\") on node \"crc\" DevicePath \"\"" Oct 10 19:19:09 crc kubenswrapper[4799]: I1010 19:19:09.650844 4799 scope.go:117] "RemoveContainer" containerID="43d7fb51961135812355c5249d7be1a2c2670f6fc9821e4c30b0041592525a7a" Oct 10 19:19:09 crc kubenswrapper[4799]: E1010 19:19:09.651422 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"43d7fb51961135812355c5249d7be1a2c2670f6fc9821e4c30b0041592525a7a\": container with ID starting with 43d7fb51961135812355c5249d7be1a2c2670f6fc9821e4c30b0041592525a7a not found: ID does not exist" containerID="43d7fb51961135812355c5249d7be1a2c2670f6fc9821e4c30b0041592525a7a" Oct 10 19:19:09 crc kubenswrapper[4799]: I1010 19:19:09.651465 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"43d7fb51961135812355c5249d7be1a2c2670f6fc9821e4c30b0041592525a7a"} err="failed to get container status \"43d7fb51961135812355c5249d7be1a2c2670f6fc9821e4c30b0041592525a7a\": rpc error: code = NotFound desc = could not find container \"43d7fb51961135812355c5249d7be1a2c2670f6fc9821e4c30b0041592525a7a\": container with ID starting with 43d7fb51961135812355c5249d7be1a2c2670f6fc9821e4c30b0041592525a7a not found: ID does not exist" Oct 10 19:19:09 crc kubenswrapper[4799]: I1010 19:19:09.661537 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-copy-data"] Oct 10 19:19:09 crc kubenswrapper[4799]: I1010 19:19:09.670099 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-copy-data"] Oct 10 19:19:09 crc kubenswrapper[4799]: I1010 19:19:09.672826 4799 csi_attacher.go:630] kubernetes.io/csi: attacher.UnmountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping UnmountDevice... Oct 10 19:19:09 crc kubenswrapper[4799]: I1010 19:19:09.672965 4799 operation_generator.go:917] UnmountDevice succeeded for volume "pvc-3bddd74c-598e-4193-8b05-454847ddae91" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-3bddd74c-598e-4193-8b05-454847ddae91") on node "crc" Oct 10 19:19:09 crc kubenswrapper[4799]: I1010 19:19:09.740027 4799 reconciler_common.go:293] "Volume detached for volume \"pvc-3bddd74c-598e-4193-8b05-454847ddae91\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-3bddd74c-598e-4193-8b05-454847ddae91\") on node \"crc\" DevicePath \"\"" Oct 10 19:19:11 crc kubenswrapper[4799]: I1010 19:19:11.428886 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="dad0e268-f7c7-4e98-a300-9943db4ae46b" path="/var/lib/kubelet/pods/dad0e268-f7c7-4e98-a300-9943db4ae46b/volumes" Oct 10 19:19:12 crc kubenswrapper[4799]: I1010 19:19:12.403421 4799 scope.go:117] "RemoveContainer" containerID="e394ce15c48f6a512a9cf8ca7739698a6b4e472715126ea6e3b0f9e567e27a97" Oct 10 19:19:12 crc kubenswrapper[4799]: E1010 19:19:12.404235 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 19:19:26 crc kubenswrapper[4799]: I1010 19:19:26.402910 4799 scope.go:117] "RemoveContainer" containerID="e394ce15c48f6a512a9cf8ca7739698a6b4e472715126ea6e3b0f9e567e27a97" Oct 10 19:19:26 crc kubenswrapper[4799]: I1010 19:19:26.816826 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" event={"ID":"6cebefda-e31d-4be2-9bf4-8e1f8ec002cb","Type":"ContainerStarted","Data":"e4a70c95dd2f99d68bb76ad34b45e9cb4ce8bb671fad71218da22698541986e1"} Oct 10 19:19:29 crc kubenswrapper[4799]: I1010 19:19:29.320964 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-hn4ns"] Oct 10 19:19:29 crc kubenswrapper[4799]: E1010 19:19:29.324711 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="56c3553a-a02e-409c-8d74-fde21179ffcd" containerName="extract-content" Oct 10 19:19:29 crc kubenswrapper[4799]: I1010 19:19:29.324931 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="56c3553a-a02e-409c-8d74-fde21179ffcd" containerName="extract-content" Oct 10 19:19:29 crc kubenswrapper[4799]: E1010 19:19:29.325076 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dad0e268-f7c7-4e98-a300-9943db4ae46b" containerName="adoption" Oct 10 19:19:29 crc kubenswrapper[4799]: I1010 19:19:29.325224 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="dad0e268-f7c7-4e98-a300-9943db4ae46b" containerName="adoption" Oct 10 19:19:29 crc kubenswrapper[4799]: E1010 19:19:29.325376 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="56c3553a-a02e-409c-8d74-fde21179ffcd" containerName="registry-server" Oct 10 19:19:29 crc kubenswrapper[4799]: I1010 19:19:29.325511 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="56c3553a-a02e-409c-8d74-fde21179ffcd" containerName="registry-server" Oct 10 19:19:29 crc kubenswrapper[4799]: E1010 19:19:29.325647 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="56c3553a-a02e-409c-8d74-fde21179ffcd" containerName="extract-utilities" Oct 10 19:19:29 crc kubenswrapper[4799]: I1010 19:19:29.325805 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="56c3553a-a02e-409c-8d74-fde21179ffcd" containerName="extract-utilities" Oct 10 19:19:29 crc kubenswrapper[4799]: E1010 19:19:29.325959 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dac860f8-b331-42c2-8706-1db45e4285fe" containerName="adoption" Oct 10 19:19:29 crc kubenswrapper[4799]: I1010 19:19:29.326105 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="dac860f8-b331-42c2-8706-1db45e4285fe" containerName="adoption" Oct 10 19:19:29 crc kubenswrapper[4799]: I1010 19:19:29.326639 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="56c3553a-a02e-409c-8d74-fde21179ffcd" containerName="registry-server" Oct 10 19:19:29 crc kubenswrapper[4799]: I1010 19:19:29.326828 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="dac860f8-b331-42c2-8706-1db45e4285fe" containerName="adoption" Oct 10 19:19:29 crc kubenswrapper[4799]: I1010 19:19:29.326979 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="dad0e268-f7c7-4e98-a300-9943db4ae46b" containerName="adoption" Oct 10 19:19:29 crc kubenswrapper[4799]: I1010 19:19:29.331750 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-hn4ns" Oct 10 19:19:29 crc kubenswrapper[4799]: I1010 19:19:29.347995 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-hn4ns"] Oct 10 19:19:29 crc kubenswrapper[4799]: I1010 19:19:29.468006 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7nc7z\" (UniqueName: \"kubernetes.io/projected/aee05748-f1f8-4640-98d9-abc717034f1f-kube-api-access-7nc7z\") pod \"certified-operators-hn4ns\" (UID: \"aee05748-f1f8-4640-98d9-abc717034f1f\") " pod="openshift-marketplace/certified-operators-hn4ns" Oct 10 19:19:29 crc kubenswrapper[4799]: I1010 19:19:29.468410 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/aee05748-f1f8-4640-98d9-abc717034f1f-utilities\") pod \"certified-operators-hn4ns\" (UID: \"aee05748-f1f8-4640-98d9-abc717034f1f\") " pod="openshift-marketplace/certified-operators-hn4ns" Oct 10 19:19:29 crc kubenswrapper[4799]: I1010 19:19:29.468525 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/aee05748-f1f8-4640-98d9-abc717034f1f-catalog-content\") pod \"certified-operators-hn4ns\" (UID: \"aee05748-f1f8-4640-98d9-abc717034f1f\") " pod="openshift-marketplace/certified-operators-hn4ns" Oct 10 19:19:29 crc kubenswrapper[4799]: I1010 19:19:29.571117 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7nc7z\" (UniqueName: \"kubernetes.io/projected/aee05748-f1f8-4640-98d9-abc717034f1f-kube-api-access-7nc7z\") pod \"certified-operators-hn4ns\" (UID: \"aee05748-f1f8-4640-98d9-abc717034f1f\") " pod="openshift-marketplace/certified-operators-hn4ns" Oct 10 19:19:29 crc kubenswrapper[4799]: I1010 19:19:29.571226 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/aee05748-f1f8-4640-98d9-abc717034f1f-utilities\") pod \"certified-operators-hn4ns\" (UID: \"aee05748-f1f8-4640-98d9-abc717034f1f\") " pod="openshift-marketplace/certified-operators-hn4ns" Oct 10 19:19:29 crc kubenswrapper[4799]: I1010 19:19:29.571399 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/aee05748-f1f8-4640-98d9-abc717034f1f-catalog-content\") pod \"certified-operators-hn4ns\" (UID: \"aee05748-f1f8-4640-98d9-abc717034f1f\") " pod="openshift-marketplace/certified-operators-hn4ns" Oct 10 19:19:29 crc kubenswrapper[4799]: I1010 19:19:29.571723 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/aee05748-f1f8-4640-98d9-abc717034f1f-utilities\") pod \"certified-operators-hn4ns\" (UID: \"aee05748-f1f8-4640-98d9-abc717034f1f\") " pod="openshift-marketplace/certified-operators-hn4ns" Oct 10 19:19:29 crc kubenswrapper[4799]: I1010 19:19:29.571982 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/aee05748-f1f8-4640-98d9-abc717034f1f-catalog-content\") pod \"certified-operators-hn4ns\" (UID: \"aee05748-f1f8-4640-98d9-abc717034f1f\") " pod="openshift-marketplace/certified-operators-hn4ns" Oct 10 19:19:29 crc kubenswrapper[4799]: I1010 19:19:29.592411 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7nc7z\" (UniqueName: \"kubernetes.io/projected/aee05748-f1f8-4640-98d9-abc717034f1f-kube-api-access-7nc7z\") pod \"certified-operators-hn4ns\" (UID: \"aee05748-f1f8-4640-98d9-abc717034f1f\") " pod="openshift-marketplace/certified-operators-hn4ns" Oct 10 19:19:29 crc kubenswrapper[4799]: I1010 19:19:29.696879 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-hn4ns" Oct 10 19:19:30 crc kubenswrapper[4799]: I1010 19:19:30.794262 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-hn4ns"] Oct 10 19:19:30 crc kubenswrapper[4799]: W1010 19:19:30.808015 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podaee05748_f1f8_4640_98d9_abc717034f1f.slice/crio-58995809db53672e72e69c1f9775443f97bdfa8ac19d8035b517b434e5ec0309 WatchSource:0}: Error finding container 58995809db53672e72e69c1f9775443f97bdfa8ac19d8035b517b434e5ec0309: Status 404 returned error can't find the container with id 58995809db53672e72e69c1f9775443f97bdfa8ac19d8035b517b434e5ec0309 Oct 10 19:19:30 crc kubenswrapper[4799]: I1010 19:19:30.870942 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hn4ns" event={"ID":"aee05748-f1f8-4640-98d9-abc717034f1f","Type":"ContainerStarted","Data":"58995809db53672e72e69c1f9775443f97bdfa8ac19d8035b517b434e5ec0309"} Oct 10 19:19:31 crc kubenswrapper[4799]: I1010 19:19:31.886948 4799 generic.go:334] "Generic (PLEG): container finished" podID="aee05748-f1f8-4640-98d9-abc717034f1f" containerID="720d109f094aeecaff3b30d164c47f8d4c9875416173006b73f3edd028a307d7" exitCode=0 Oct 10 19:19:31 crc kubenswrapper[4799]: I1010 19:19:31.887035 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hn4ns" event={"ID":"aee05748-f1f8-4640-98d9-abc717034f1f","Type":"ContainerDied","Data":"720d109f094aeecaff3b30d164c47f8d4c9875416173006b73f3edd028a307d7"} Oct 10 19:19:31 crc kubenswrapper[4799]: I1010 19:19:31.889732 4799 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 10 19:19:32 crc kubenswrapper[4799]: I1010 19:19:32.900888 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hn4ns" event={"ID":"aee05748-f1f8-4640-98d9-abc717034f1f","Type":"ContainerStarted","Data":"5ac82a7d5927eb7a8aaf069b30b1c1b68c742ca5950a32424bffe524d7b6ac12"} Oct 10 19:19:34 crc kubenswrapper[4799]: I1010 19:19:34.932123 4799 generic.go:334] "Generic (PLEG): container finished" podID="aee05748-f1f8-4640-98d9-abc717034f1f" containerID="5ac82a7d5927eb7a8aaf069b30b1c1b68c742ca5950a32424bffe524d7b6ac12" exitCode=0 Oct 10 19:19:34 crc kubenswrapper[4799]: I1010 19:19:34.932247 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hn4ns" event={"ID":"aee05748-f1f8-4640-98d9-abc717034f1f","Type":"ContainerDied","Data":"5ac82a7d5927eb7a8aaf069b30b1c1b68c742ca5950a32424bffe524d7b6ac12"} Oct 10 19:19:35 crc kubenswrapper[4799]: I1010 19:19:35.947279 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hn4ns" event={"ID":"aee05748-f1f8-4640-98d9-abc717034f1f","Type":"ContainerStarted","Data":"8715c85a0f3db5fa9e35b869d2b6eb02b4fa6f06fb0caa9c59815382d59d6361"} Oct 10 19:19:35 crc kubenswrapper[4799]: I1010 19:19:35.991119 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-hn4ns" podStartSLOduration=3.452864322 podStartE2EDuration="6.991091132s" podCreationTimestamp="2025-10-10 19:19:29 +0000 UTC" firstStartedPulling="2025-10-10 19:19:31.889333162 +0000 UTC m=+10065.397657307" lastFinishedPulling="2025-10-10 19:19:35.427559962 +0000 UTC m=+10068.935884117" observedRunningTime="2025-10-10 19:19:35.969932444 +0000 UTC m=+10069.478256599" watchObservedRunningTime="2025-10-10 19:19:35.991091132 +0000 UTC m=+10069.499415297" Oct 10 19:19:39 crc kubenswrapper[4799]: I1010 19:19:39.697599 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-hn4ns" Oct 10 19:19:39 crc kubenswrapper[4799]: I1010 19:19:39.698303 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-hn4ns" Oct 10 19:19:39 crc kubenswrapper[4799]: I1010 19:19:39.771202 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-hn4ns" Oct 10 19:19:40 crc kubenswrapper[4799]: I1010 19:19:40.097064 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-hn4ns" Oct 10 19:19:40 crc kubenswrapper[4799]: I1010 19:19:40.163413 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-hn4ns"] Oct 10 19:19:42 crc kubenswrapper[4799]: I1010 19:19:42.036562 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-hn4ns" podUID="aee05748-f1f8-4640-98d9-abc717034f1f" containerName="registry-server" containerID="cri-o://8715c85a0f3db5fa9e35b869d2b6eb02b4fa6f06fb0caa9c59815382d59d6361" gracePeriod=2 Oct 10 19:19:43 crc kubenswrapper[4799]: I1010 19:19:43.046499 4799 generic.go:334] "Generic (PLEG): container finished" podID="aee05748-f1f8-4640-98d9-abc717034f1f" containerID="8715c85a0f3db5fa9e35b869d2b6eb02b4fa6f06fb0caa9c59815382d59d6361" exitCode=0 Oct 10 19:19:43 crc kubenswrapper[4799]: I1010 19:19:43.046581 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hn4ns" event={"ID":"aee05748-f1f8-4640-98d9-abc717034f1f","Type":"ContainerDied","Data":"8715c85a0f3db5fa9e35b869d2b6eb02b4fa6f06fb0caa9c59815382d59d6361"} Oct 10 19:19:43 crc kubenswrapper[4799]: I1010 19:19:43.047002 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hn4ns" event={"ID":"aee05748-f1f8-4640-98d9-abc717034f1f","Type":"ContainerDied","Data":"58995809db53672e72e69c1f9775443f97bdfa8ac19d8035b517b434e5ec0309"} Oct 10 19:19:43 crc kubenswrapper[4799]: I1010 19:19:43.047037 4799 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="58995809db53672e72e69c1f9775443f97bdfa8ac19d8035b517b434e5ec0309" Oct 10 19:19:43 crc kubenswrapper[4799]: I1010 19:19:43.077544 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-hn4ns" Oct 10 19:19:43 crc kubenswrapper[4799]: I1010 19:19:43.208254 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/aee05748-f1f8-4640-98d9-abc717034f1f-catalog-content\") pod \"aee05748-f1f8-4640-98d9-abc717034f1f\" (UID: \"aee05748-f1f8-4640-98d9-abc717034f1f\") " Oct 10 19:19:43 crc kubenswrapper[4799]: I1010 19:19:43.208318 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/aee05748-f1f8-4640-98d9-abc717034f1f-utilities\") pod \"aee05748-f1f8-4640-98d9-abc717034f1f\" (UID: \"aee05748-f1f8-4640-98d9-abc717034f1f\") " Oct 10 19:19:43 crc kubenswrapper[4799]: I1010 19:19:43.208589 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7nc7z\" (UniqueName: \"kubernetes.io/projected/aee05748-f1f8-4640-98d9-abc717034f1f-kube-api-access-7nc7z\") pod \"aee05748-f1f8-4640-98d9-abc717034f1f\" (UID: \"aee05748-f1f8-4640-98d9-abc717034f1f\") " Oct 10 19:19:43 crc kubenswrapper[4799]: I1010 19:19:43.211115 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/aee05748-f1f8-4640-98d9-abc717034f1f-utilities" (OuterVolumeSpecName: "utilities") pod "aee05748-f1f8-4640-98d9-abc717034f1f" (UID: "aee05748-f1f8-4640-98d9-abc717034f1f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 10 19:19:43 crc kubenswrapper[4799]: I1010 19:19:43.218159 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/aee05748-f1f8-4640-98d9-abc717034f1f-kube-api-access-7nc7z" (OuterVolumeSpecName: "kube-api-access-7nc7z") pod "aee05748-f1f8-4640-98d9-abc717034f1f" (UID: "aee05748-f1f8-4640-98d9-abc717034f1f"). InnerVolumeSpecName "kube-api-access-7nc7z". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 19:19:43 crc kubenswrapper[4799]: I1010 19:19:43.312878 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7nc7z\" (UniqueName: \"kubernetes.io/projected/aee05748-f1f8-4640-98d9-abc717034f1f-kube-api-access-7nc7z\") on node \"crc\" DevicePath \"\"" Oct 10 19:19:43 crc kubenswrapper[4799]: I1010 19:19:43.312969 4799 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/aee05748-f1f8-4640-98d9-abc717034f1f-utilities\") on node \"crc\" DevicePath \"\"" Oct 10 19:19:43 crc kubenswrapper[4799]: I1010 19:19:43.380477 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/aee05748-f1f8-4640-98d9-abc717034f1f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "aee05748-f1f8-4640-98d9-abc717034f1f" (UID: "aee05748-f1f8-4640-98d9-abc717034f1f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 10 19:19:43 crc kubenswrapper[4799]: I1010 19:19:43.414944 4799 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/aee05748-f1f8-4640-98d9-abc717034f1f-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 10 19:19:44 crc kubenswrapper[4799]: I1010 19:19:44.057256 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-hn4ns" Oct 10 19:19:44 crc kubenswrapper[4799]: I1010 19:19:44.096300 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-hn4ns"] Oct 10 19:19:44 crc kubenswrapper[4799]: I1010 19:19:44.110327 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-hn4ns"] Oct 10 19:19:45 crc kubenswrapper[4799]: I1010 19:19:45.422741 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="aee05748-f1f8-4640-98d9-abc717034f1f" path="/var/lib/kubelet/pods/aee05748-f1f8-4640-98d9-abc717034f1f/volumes" Oct 10 19:20:00 crc kubenswrapper[4799]: I1010 19:20:00.020330 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-k9xz4/must-gather-z22h5"] Oct 10 19:20:00 crc kubenswrapper[4799]: E1010 19:20:00.021162 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aee05748-f1f8-4640-98d9-abc717034f1f" containerName="extract-content" Oct 10 19:20:00 crc kubenswrapper[4799]: I1010 19:20:00.021175 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="aee05748-f1f8-4640-98d9-abc717034f1f" containerName="extract-content" Oct 10 19:20:00 crc kubenswrapper[4799]: E1010 19:20:00.021188 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aee05748-f1f8-4640-98d9-abc717034f1f" containerName="extract-utilities" Oct 10 19:20:00 crc kubenswrapper[4799]: I1010 19:20:00.021194 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="aee05748-f1f8-4640-98d9-abc717034f1f" containerName="extract-utilities" Oct 10 19:20:00 crc kubenswrapper[4799]: E1010 19:20:00.021210 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aee05748-f1f8-4640-98d9-abc717034f1f" containerName="registry-server" Oct 10 19:20:00 crc kubenswrapper[4799]: I1010 19:20:00.021216 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="aee05748-f1f8-4640-98d9-abc717034f1f" containerName="registry-server" Oct 10 19:20:00 crc kubenswrapper[4799]: I1010 19:20:00.021428 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="aee05748-f1f8-4640-98d9-abc717034f1f" containerName="registry-server" Oct 10 19:20:00 crc kubenswrapper[4799]: I1010 19:20:00.022536 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-k9xz4/must-gather-z22h5" Oct 10 19:20:00 crc kubenswrapper[4799]: I1010 19:20:00.031186 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-must-gather-k9xz4"/"default-dockercfg-9xsjr" Oct 10 19:20:00 crc kubenswrapper[4799]: I1010 19:20:00.031262 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-k9xz4"/"openshift-service-ca.crt" Oct 10 19:20:00 crc kubenswrapper[4799]: I1010 19:20:00.044484 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-k9xz4"/"kube-root-ca.crt" Oct 10 19:20:00 crc kubenswrapper[4799]: I1010 19:20:00.049697 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-k9xz4/must-gather-z22h5"] Oct 10 19:20:00 crc kubenswrapper[4799]: I1010 19:20:00.116321 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/ac543bf9-73b9-417e-a8be-708d36ad2353-must-gather-output\") pod \"must-gather-z22h5\" (UID: \"ac543bf9-73b9-417e-a8be-708d36ad2353\") " pod="openshift-must-gather-k9xz4/must-gather-z22h5" Oct 10 19:20:00 crc kubenswrapper[4799]: I1010 19:20:00.116596 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r988b\" (UniqueName: \"kubernetes.io/projected/ac543bf9-73b9-417e-a8be-708d36ad2353-kube-api-access-r988b\") pod \"must-gather-z22h5\" (UID: \"ac543bf9-73b9-417e-a8be-708d36ad2353\") " pod="openshift-must-gather-k9xz4/must-gather-z22h5" Oct 10 19:20:00 crc kubenswrapper[4799]: I1010 19:20:00.219314 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/ac543bf9-73b9-417e-a8be-708d36ad2353-must-gather-output\") pod \"must-gather-z22h5\" (UID: \"ac543bf9-73b9-417e-a8be-708d36ad2353\") " pod="openshift-must-gather-k9xz4/must-gather-z22h5" Oct 10 19:20:00 crc kubenswrapper[4799]: I1010 19:20:00.219436 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r988b\" (UniqueName: \"kubernetes.io/projected/ac543bf9-73b9-417e-a8be-708d36ad2353-kube-api-access-r988b\") pod \"must-gather-z22h5\" (UID: \"ac543bf9-73b9-417e-a8be-708d36ad2353\") " pod="openshift-must-gather-k9xz4/must-gather-z22h5" Oct 10 19:20:00 crc kubenswrapper[4799]: I1010 19:20:00.219822 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/ac543bf9-73b9-417e-a8be-708d36ad2353-must-gather-output\") pod \"must-gather-z22h5\" (UID: \"ac543bf9-73b9-417e-a8be-708d36ad2353\") " pod="openshift-must-gather-k9xz4/must-gather-z22h5" Oct 10 19:20:00 crc kubenswrapper[4799]: I1010 19:20:00.299562 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r988b\" (UniqueName: \"kubernetes.io/projected/ac543bf9-73b9-417e-a8be-708d36ad2353-kube-api-access-r988b\") pod \"must-gather-z22h5\" (UID: \"ac543bf9-73b9-417e-a8be-708d36ad2353\") " pod="openshift-must-gather-k9xz4/must-gather-z22h5" Oct 10 19:20:00 crc kubenswrapper[4799]: I1010 19:20:00.342153 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-k9xz4/must-gather-z22h5" Oct 10 19:20:00 crc kubenswrapper[4799]: I1010 19:20:00.834718 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-k9xz4/must-gather-z22h5"] Oct 10 19:20:00 crc kubenswrapper[4799]: W1010 19:20:00.836856 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podac543bf9_73b9_417e_a8be_708d36ad2353.slice/crio-305ec1c5437d4f08a454e99c2610a48a21fb01df54b978fcd253cd7d720a1dd5 WatchSource:0}: Error finding container 305ec1c5437d4f08a454e99c2610a48a21fb01df54b978fcd253cd7d720a1dd5: Status 404 returned error can't find the container with id 305ec1c5437d4f08a454e99c2610a48a21fb01df54b978fcd253cd7d720a1dd5 Oct 10 19:20:01 crc kubenswrapper[4799]: I1010 19:20:01.299324 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-k9xz4/must-gather-z22h5" event={"ID":"ac543bf9-73b9-417e-a8be-708d36ad2353","Type":"ContainerStarted","Data":"305ec1c5437d4f08a454e99c2610a48a21fb01df54b978fcd253cd7d720a1dd5"} Oct 10 19:20:06 crc kubenswrapper[4799]: I1010 19:20:06.348932 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-k9xz4/must-gather-z22h5" event={"ID":"ac543bf9-73b9-417e-a8be-708d36ad2353","Type":"ContainerStarted","Data":"bfff071ce9d2dc0ea8e2f7691a4dc1b77c9970f09c293ed40e5f84933f3ce4f6"} Oct 10 19:20:06 crc kubenswrapper[4799]: I1010 19:20:06.349583 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-k9xz4/must-gather-z22h5" event={"ID":"ac543bf9-73b9-417e-a8be-708d36ad2353","Type":"ContainerStarted","Data":"c2dd7ae84da2148bc46d53da826524e87cbc1d2d7b704dfb5dc65507b254cf2f"} Oct 10 19:20:12 crc kubenswrapper[4799]: I1010 19:20:12.143651 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-k9xz4/must-gather-z22h5" podStartSLOduration=8.566727135 podStartE2EDuration="13.143624441s" podCreationTimestamp="2025-10-10 19:19:59 +0000 UTC" firstStartedPulling="2025-10-10 19:20:00.840330471 +0000 UTC m=+10094.348654586" lastFinishedPulling="2025-10-10 19:20:05.417227777 +0000 UTC m=+10098.925551892" observedRunningTime="2025-10-10 19:20:06.369617402 +0000 UTC m=+10099.877941577" watchObservedRunningTime="2025-10-10 19:20:12.143624441 +0000 UTC m=+10105.651948596" Oct 10 19:20:12 crc kubenswrapper[4799]: I1010 19:20:12.157471 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-k9xz4/crc-debug-lckkn"] Oct 10 19:20:12 crc kubenswrapper[4799]: I1010 19:20:12.159508 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-k9xz4/crc-debug-lckkn" Oct 10 19:20:12 crc kubenswrapper[4799]: I1010 19:20:12.177984 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/46c564cf-0a5d-46f6-8877-47035d623065-host\") pod \"crc-debug-lckkn\" (UID: \"46c564cf-0a5d-46f6-8877-47035d623065\") " pod="openshift-must-gather-k9xz4/crc-debug-lckkn" Oct 10 19:20:12 crc kubenswrapper[4799]: I1010 19:20:12.178345 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zlqs4\" (UniqueName: \"kubernetes.io/projected/46c564cf-0a5d-46f6-8877-47035d623065-kube-api-access-zlqs4\") pod \"crc-debug-lckkn\" (UID: \"46c564cf-0a5d-46f6-8877-47035d623065\") " pod="openshift-must-gather-k9xz4/crc-debug-lckkn" Oct 10 19:20:12 crc kubenswrapper[4799]: I1010 19:20:12.280278 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/46c564cf-0a5d-46f6-8877-47035d623065-host\") pod \"crc-debug-lckkn\" (UID: \"46c564cf-0a5d-46f6-8877-47035d623065\") " pod="openshift-must-gather-k9xz4/crc-debug-lckkn" Oct 10 19:20:12 crc kubenswrapper[4799]: I1010 19:20:12.280474 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/46c564cf-0a5d-46f6-8877-47035d623065-host\") pod \"crc-debug-lckkn\" (UID: \"46c564cf-0a5d-46f6-8877-47035d623065\") " pod="openshift-must-gather-k9xz4/crc-debug-lckkn" Oct 10 19:20:12 crc kubenswrapper[4799]: I1010 19:20:12.280614 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zlqs4\" (UniqueName: \"kubernetes.io/projected/46c564cf-0a5d-46f6-8877-47035d623065-kube-api-access-zlqs4\") pod \"crc-debug-lckkn\" (UID: \"46c564cf-0a5d-46f6-8877-47035d623065\") " pod="openshift-must-gather-k9xz4/crc-debug-lckkn" Oct 10 19:20:12 crc kubenswrapper[4799]: I1010 19:20:12.309542 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zlqs4\" (UniqueName: \"kubernetes.io/projected/46c564cf-0a5d-46f6-8877-47035d623065-kube-api-access-zlqs4\") pod \"crc-debug-lckkn\" (UID: \"46c564cf-0a5d-46f6-8877-47035d623065\") " pod="openshift-must-gather-k9xz4/crc-debug-lckkn" Oct 10 19:20:12 crc kubenswrapper[4799]: I1010 19:20:12.479840 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-k9xz4/crc-debug-lckkn" Oct 10 19:20:13 crc kubenswrapper[4799]: I1010 19:20:13.436409 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-k9xz4/crc-debug-lckkn" event={"ID":"46c564cf-0a5d-46f6-8877-47035d623065","Type":"ContainerStarted","Data":"ff9be7f0e7162274c2f149b88292a7fd6e4d9eecdf6aea9c01b391ce02872146"} Oct 10 19:20:25 crc kubenswrapper[4799]: I1010 19:20:25.545374 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-k9xz4/crc-debug-lckkn" event={"ID":"46c564cf-0a5d-46f6-8877-47035d623065","Type":"ContainerStarted","Data":"aca0ece093820bf04e922137b16eea383805bc56c0c4b16f6372a33c40958fbd"} Oct 10 19:20:25 crc kubenswrapper[4799]: I1010 19:20:25.567239 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-k9xz4/crc-debug-lckkn" podStartSLOduration=1.008324801 podStartE2EDuration="13.567222687s" podCreationTimestamp="2025-10-10 19:20:12 +0000 UTC" firstStartedPulling="2025-10-10 19:20:12.512305523 +0000 UTC m=+10106.020629658" lastFinishedPulling="2025-10-10 19:20:25.071203429 +0000 UTC m=+10118.579527544" observedRunningTime="2025-10-10 19:20:25.556384952 +0000 UTC m=+10119.064709067" watchObservedRunningTime="2025-10-10 19:20:25.567222687 +0000 UTC m=+10119.075546792" Oct 10 19:20:29 crc kubenswrapper[4799]: I1010 19:20:29.808363 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-sbq6d"] Oct 10 19:20:29 crc kubenswrapper[4799]: I1010 19:20:29.811203 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-sbq6d" Oct 10 19:20:29 crc kubenswrapper[4799]: I1010 19:20:29.822860 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-sbq6d"] Oct 10 19:20:29 crc kubenswrapper[4799]: I1010 19:20:29.852687 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ad47a313-c923-40e0-8cd6-b1e69332bebb-catalog-content\") pod \"redhat-operators-sbq6d\" (UID: \"ad47a313-c923-40e0-8cd6-b1e69332bebb\") " pod="openshift-marketplace/redhat-operators-sbq6d" Oct 10 19:20:29 crc kubenswrapper[4799]: I1010 19:20:29.852779 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9l5ds\" (UniqueName: \"kubernetes.io/projected/ad47a313-c923-40e0-8cd6-b1e69332bebb-kube-api-access-9l5ds\") pod \"redhat-operators-sbq6d\" (UID: \"ad47a313-c923-40e0-8cd6-b1e69332bebb\") " pod="openshift-marketplace/redhat-operators-sbq6d" Oct 10 19:20:29 crc kubenswrapper[4799]: I1010 19:20:29.852908 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ad47a313-c923-40e0-8cd6-b1e69332bebb-utilities\") pod \"redhat-operators-sbq6d\" (UID: \"ad47a313-c923-40e0-8cd6-b1e69332bebb\") " pod="openshift-marketplace/redhat-operators-sbq6d" Oct 10 19:20:29 crc kubenswrapper[4799]: I1010 19:20:29.961636 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ad47a313-c923-40e0-8cd6-b1e69332bebb-catalog-content\") pod \"redhat-operators-sbq6d\" (UID: \"ad47a313-c923-40e0-8cd6-b1e69332bebb\") " pod="openshift-marketplace/redhat-operators-sbq6d" Oct 10 19:20:29 crc kubenswrapper[4799]: I1010 19:20:29.961750 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9l5ds\" (UniqueName: \"kubernetes.io/projected/ad47a313-c923-40e0-8cd6-b1e69332bebb-kube-api-access-9l5ds\") pod \"redhat-operators-sbq6d\" (UID: \"ad47a313-c923-40e0-8cd6-b1e69332bebb\") " pod="openshift-marketplace/redhat-operators-sbq6d" Oct 10 19:20:29 crc kubenswrapper[4799]: I1010 19:20:29.962052 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ad47a313-c923-40e0-8cd6-b1e69332bebb-utilities\") pod \"redhat-operators-sbq6d\" (UID: \"ad47a313-c923-40e0-8cd6-b1e69332bebb\") " pod="openshift-marketplace/redhat-operators-sbq6d" Oct 10 19:20:29 crc kubenswrapper[4799]: I1010 19:20:29.962579 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ad47a313-c923-40e0-8cd6-b1e69332bebb-utilities\") pod \"redhat-operators-sbq6d\" (UID: \"ad47a313-c923-40e0-8cd6-b1e69332bebb\") " pod="openshift-marketplace/redhat-operators-sbq6d" Oct 10 19:20:29 crc kubenswrapper[4799]: I1010 19:20:29.962830 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ad47a313-c923-40e0-8cd6-b1e69332bebb-catalog-content\") pod \"redhat-operators-sbq6d\" (UID: \"ad47a313-c923-40e0-8cd6-b1e69332bebb\") " pod="openshift-marketplace/redhat-operators-sbq6d" Oct 10 19:20:29 crc kubenswrapper[4799]: I1010 19:20:29.989045 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9l5ds\" (UniqueName: \"kubernetes.io/projected/ad47a313-c923-40e0-8cd6-b1e69332bebb-kube-api-access-9l5ds\") pod \"redhat-operators-sbq6d\" (UID: \"ad47a313-c923-40e0-8cd6-b1e69332bebb\") " pod="openshift-marketplace/redhat-operators-sbq6d" Oct 10 19:20:30 crc kubenswrapper[4799]: I1010 19:20:30.142647 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-sbq6d" Oct 10 19:20:30 crc kubenswrapper[4799]: I1010 19:20:30.769233 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-sbq6d"] Oct 10 19:20:33 crc kubenswrapper[4799]: I1010 19:20:33.633260 4799 generic.go:334] "Generic (PLEG): container finished" podID="ad47a313-c923-40e0-8cd6-b1e69332bebb" containerID="7028f340da49b679f992adb7eb344e9d23d48ff6bb46c40260568f5198cb3d69" exitCode=0 Oct 10 19:20:33 crc kubenswrapper[4799]: I1010 19:20:33.633356 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-sbq6d" event={"ID":"ad47a313-c923-40e0-8cd6-b1e69332bebb","Type":"ContainerDied","Data":"7028f340da49b679f992adb7eb344e9d23d48ff6bb46c40260568f5198cb3d69"} Oct 10 19:20:33 crc kubenswrapper[4799]: I1010 19:20:33.633718 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-sbq6d" event={"ID":"ad47a313-c923-40e0-8cd6-b1e69332bebb","Type":"ContainerStarted","Data":"25a5a0d99546c7ffa0351182f4ebf002a9fb3056263dd8b834e03f08b4e72b47"} Oct 10 19:20:34 crc kubenswrapper[4799]: I1010 19:20:34.650215 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-sbq6d" event={"ID":"ad47a313-c923-40e0-8cd6-b1e69332bebb","Type":"ContainerStarted","Data":"9310040b00d92a7831d39898fcb0d52c7169b8f8d1be21a4f6d630a013374e3b"} Oct 10 19:20:39 crc kubenswrapper[4799]: I1010 19:20:39.705189 4799 generic.go:334] "Generic (PLEG): container finished" podID="ad47a313-c923-40e0-8cd6-b1e69332bebb" containerID="9310040b00d92a7831d39898fcb0d52c7169b8f8d1be21a4f6d630a013374e3b" exitCode=0 Oct 10 19:20:39 crc kubenswrapper[4799]: I1010 19:20:39.705265 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-sbq6d" event={"ID":"ad47a313-c923-40e0-8cd6-b1e69332bebb","Type":"ContainerDied","Data":"9310040b00d92a7831d39898fcb0d52c7169b8f8d1be21a4f6d630a013374e3b"} Oct 10 19:20:40 crc kubenswrapper[4799]: I1010 19:20:40.716152 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-sbq6d" event={"ID":"ad47a313-c923-40e0-8cd6-b1e69332bebb","Type":"ContainerStarted","Data":"8b27e4c865fa6dc3e4e0d8c055b93e177d871385938a7d461c2f7f2868f71845"} Oct 10 19:20:40 crc kubenswrapper[4799]: I1010 19:20:40.734965 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-sbq6d" podStartSLOduration=5.269219362 podStartE2EDuration="11.734948399s" podCreationTimestamp="2025-10-10 19:20:29 +0000 UTC" firstStartedPulling="2025-10-10 19:20:33.642941688 +0000 UTC m=+10127.151265803" lastFinishedPulling="2025-10-10 19:20:40.108670725 +0000 UTC m=+10133.616994840" observedRunningTime="2025-10-10 19:20:40.732452098 +0000 UTC m=+10134.240776213" watchObservedRunningTime="2025-10-10 19:20:40.734948399 +0000 UTC m=+10134.243272504" Oct 10 19:20:42 crc kubenswrapper[4799]: I1010 19:20:42.738460 4799 generic.go:334] "Generic (PLEG): container finished" podID="46c564cf-0a5d-46f6-8877-47035d623065" containerID="aca0ece093820bf04e922137b16eea383805bc56c0c4b16f6372a33c40958fbd" exitCode=0 Oct 10 19:20:42 crc kubenswrapper[4799]: I1010 19:20:42.738558 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-k9xz4/crc-debug-lckkn" event={"ID":"46c564cf-0a5d-46f6-8877-47035d623065","Type":"ContainerDied","Data":"aca0ece093820bf04e922137b16eea383805bc56c0c4b16f6372a33c40958fbd"} Oct 10 19:20:44 crc kubenswrapper[4799]: I1010 19:20:44.309459 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-k9xz4/crc-debug-lckkn" Oct 10 19:20:44 crc kubenswrapper[4799]: I1010 19:20:44.357816 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-k9xz4/crc-debug-lckkn"] Oct 10 19:20:44 crc kubenswrapper[4799]: I1010 19:20:44.379012 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-k9xz4/crc-debug-lckkn"] Oct 10 19:20:44 crc kubenswrapper[4799]: I1010 19:20:44.480352 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zlqs4\" (UniqueName: \"kubernetes.io/projected/46c564cf-0a5d-46f6-8877-47035d623065-kube-api-access-zlqs4\") pod \"46c564cf-0a5d-46f6-8877-47035d623065\" (UID: \"46c564cf-0a5d-46f6-8877-47035d623065\") " Oct 10 19:20:44 crc kubenswrapper[4799]: I1010 19:20:44.480846 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/46c564cf-0a5d-46f6-8877-47035d623065-host\") pod \"46c564cf-0a5d-46f6-8877-47035d623065\" (UID: \"46c564cf-0a5d-46f6-8877-47035d623065\") " Oct 10 19:20:44 crc kubenswrapper[4799]: I1010 19:20:44.481363 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/46c564cf-0a5d-46f6-8877-47035d623065-host" (OuterVolumeSpecName: "host") pod "46c564cf-0a5d-46f6-8877-47035d623065" (UID: "46c564cf-0a5d-46f6-8877-47035d623065"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 10 19:20:44 crc kubenswrapper[4799]: I1010 19:20:44.489688 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/46c564cf-0a5d-46f6-8877-47035d623065-kube-api-access-zlqs4" (OuterVolumeSpecName: "kube-api-access-zlqs4") pod "46c564cf-0a5d-46f6-8877-47035d623065" (UID: "46c564cf-0a5d-46f6-8877-47035d623065"). InnerVolumeSpecName "kube-api-access-zlqs4". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 19:20:44 crc kubenswrapper[4799]: I1010 19:20:44.582702 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zlqs4\" (UniqueName: \"kubernetes.io/projected/46c564cf-0a5d-46f6-8877-47035d623065-kube-api-access-zlqs4\") on node \"crc\" DevicePath \"\"" Oct 10 19:20:44 crc kubenswrapper[4799]: I1010 19:20:44.582731 4799 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/46c564cf-0a5d-46f6-8877-47035d623065-host\") on node \"crc\" DevicePath \"\"" Oct 10 19:20:44 crc kubenswrapper[4799]: I1010 19:20:44.758458 4799 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ff9be7f0e7162274c2f149b88292a7fd6e4d9eecdf6aea9c01b391ce02872146" Oct 10 19:20:44 crc kubenswrapper[4799]: I1010 19:20:44.758552 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-k9xz4/crc-debug-lckkn" Oct 10 19:20:45 crc kubenswrapper[4799]: I1010 19:20:45.422652 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="46c564cf-0a5d-46f6-8877-47035d623065" path="/var/lib/kubelet/pods/46c564cf-0a5d-46f6-8877-47035d623065/volumes" Oct 10 19:20:45 crc kubenswrapper[4799]: I1010 19:20:45.651622 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-k9xz4/crc-debug-trwjw"] Oct 10 19:20:45 crc kubenswrapper[4799]: E1010 19:20:45.652074 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="46c564cf-0a5d-46f6-8877-47035d623065" containerName="container-00" Oct 10 19:20:45 crc kubenswrapper[4799]: I1010 19:20:45.652093 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="46c564cf-0a5d-46f6-8877-47035d623065" containerName="container-00" Oct 10 19:20:45 crc kubenswrapper[4799]: I1010 19:20:45.652348 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="46c564cf-0a5d-46f6-8877-47035d623065" containerName="container-00" Oct 10 19:20:45 crc kubenswrapper[4799]: I1010 19:20:45.653125 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-k9xz4/crc-debug-trwjw" Oct 10 19:20:45 crc kubenswrapper[4799]: I1010 19:20:45.807901 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g7zk5\" (UniqueName: \"kubernetes.io/projected/59c3f7a3-bd96-4aa8-abc0-f064c841e120-kube-api-access-g7zk5\") pod \"crc-debug-trwjw\" (UID: \"59c3f7a3-bd96-4aa8-abc0-f064c841e120\") " pod="openshift-must-gather-k9xz4/crc-debug-trwjw" Oct 10 19:20:45 crc kubenswrapper[4799]: I1010 19:20:45.808066 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/59c3f7a3-bd96-4aa8-abc0-f064c841e120-host\") pod \"crc-debug-trwjw\" (UID: \"59c3f7a3-bd96-4aa8-abc0-f064c841e120\") " pod="openshift-must-gather-k9xz4/crc-debug-trwjw" Oct 10 19:20:45 crc kubenswrapper[4799]: I1010 19:20:45.910458 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g7zk5\" (UniqueName: \"kubernetes.io/projected/59c3f7a3-bd96-4aa8-abc0-f064c841e120-kube-api-access-g7zk5\") pod \"crc-debug-trwjw\" (UID: \"59c3f7a3-bd96-4aa8-abc0-f064c841e120\") " pod="openshift-must-gather-k9xz4/crc-debug-trwjw" Oct 10 19:20:45 crc kubenswrapper[4799]: I1010 19:20:45.910542 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/59c3f7a3-bd96-4aa8-abc0-f064c841e120-host\") pod \"crc-debug-trwjw\" (UID: \"59c3f7a3-bd96-4aa8-abc0-f064c841e120\") " pod="openshift-must-gather-k9xz4/crc-debug-trwjw" Oct 10 19:20:45 crc kubenswrapper[4799]: I1010 19:20:45.910766 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/59c3f7a3-bd96-4aa8-abc0-f064c841e120-host\") pod \"crc-debug-trwjw\" (UID: \"59c3f7a3-bd96-4aa8-abc0-f064c841e120\") " pod="openshift-must-gather-k9xz4/crc-debug-trwjw" Oct 10 19:20:45 crc kubenswrapper[4799]: I1010 19:20:45.938549 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g7zk5\" (UniqueName: \"kubernetes.io/projected/59c3f7a3-bd96-4aa8-abc0-f064c841e120-kube-api-access-g7zk5\") pod \"crc-debug-trwjw\" (UID: \"59c3f7a3-bd96-4aa8-abc0-f064c841e120\") " pod="openshift-must-gather-k9xz4/crc-debug-trwjw" Oct 10 19:20:45 crc kubenswrapper[4799]: I1010 19:20:45.969855 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-k9xz4/crc-debug-trwjw" Oct 10 19:20:46 crc kubenswrapper[4799]: W1010 19:20:46.047469 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod59c3f7a3_bd96_4aa8_abc0_f064c841e120.slice/crio-3420d047e1b72f8bba9d57d1d5ae3c9a14fe98b2e3cb0f89d009195ab93999f8 WatchSource:0}: Error finding container 3420d047e1b72f8bba9d57d1d5ae3c9a14fe98b2e3cb0f89d009195ab93999f8: Status 404 returned error can't find the container with id 3420d047e1b72f8bba9d57d1d5ae3c9a14fe98b2e3cb0f89d009195ab93999f8 Oct 10 19:20:46 crc kubenswrapper[4799]: I1010 19:20:46.777246 4799 generic.go:334] "Generic (PLEG): container finished" podID="59c3f7a3-bd96-4aa8-abc0-f064c841e120" containerID="22bd6326ea015cb8528536bdf848ae23298ca83b162bdb4ea3f0c4d27128849f" exitCode=1 Oct 10 19:20:46 crc kubenswrapper[4799]: I1010 19:20:46.777310 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-k9xz4/crc-debug-trwjw" event={"ID":"59c3f7a3-bd96-4aa8-abc0-f064c841e120","Type":"ContainerDied","Data":"22bd6326ea015cb8528536bdf848ae23298ca83b162bdb4ea3f0c4d27128849f"} Oct 10 19:20:46 crc kubenswrapper[4799]: I1010 19:20:46.777780 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-k9xz4/crc-debug-trwjw" event={"ID":"59c3f7a3-bd96-4aa8-abc0-f064c841e120","Type":"ContainerStarted","Data":"3420d047e1b72f8bba9d57d1d5ae3c9a14fe98b2e3cb0f89d009195ab93999f8"} Oct 10 19:20:46 crc kubenswrapper[4799]: I1010 19:20:46.819166 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-k9xz4/crc-debug-trwjw"] Oct 10 19:20:46 crc kubenswrapper[4799]: I1010 19:20:46.830150 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-k9xz4/crc-debug-trwjw"] Oct 10 19:20:47 crc kubenswrapper[4799]: I1010 19:20:47.933215 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-k9xz4/crc-debug-trwjw" Oct 10 19:20:48 crc kubenswrapper[4799]: I1010 19:20:48.054639 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/59c3f7a3-bd96-4aa8-abc0-f064c841e120-host\") pod \"59c3f7a3-bd96-4aa8-abc0-f064c841e120\" (UID: \"59c3f7a3-bd96-4aa8-abc0-f064c841e120\") " Oct 10 19:20:48 crc kubenswrapper[4799]: I1010 19:20:48.054681 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/59c3f7a3-bd96-4aa8-abc0-f064c841e120-host" (OuterVolumeSpecName: "host") pod "59c3f7a3-bd96-4aa8-abc0-f064c841e120" (UID: "59c3f7a3-bd96-4aa8-abc0-f064c841e120"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 10 19:20:48 crc kubenswrapper[4799]: I1010 19:20:48.055033 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-g7zk5\" (UniqueName: \"kubernetes.io/projected/59c3f7a3-bd96-4aa8-abc0-f064c841e120-kube-api-access-g7zk5\") pod \"59c3f7a3-bd96-4aa8-abc0-f064c841e120\" (UID: \"59c3f7a3-bd96-4aa8-abc0-f064c841e120\") " Oct 10 19:20:48 crc kubenswrapper[4799]: I1010 19:20:48.056212 4799 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/59c3f7a3-bd96-4aa8-abc0-f064c841e120-host\") on node \"crc\" DevicePath \"\"" Oct 10 19:20:48 crc kubenswrapper[4799]: I1010 19:20:48.070954 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/59c3f7a3-bd96-4aa8-abc0-f064c841e120-kube-api-access-g7zk5" (OuterVolumeSpecName: "kube-api-access-g7zk5") pod "59c3f7a3-bd96-4aa8-abc0-f064c841e120" (UID: "59c3f7a3-bd96-4aa8-abc0-f064c841e120"). InnerVolumeSpecName "kube-api-access-g7zk5". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 19:20:48 crc kubenswrapper[4799]: I1010 19:20:48.157599 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-g7zk5\" (UniqueName: \"kubernetes.io/projected/59c3f7a3-bd96-4aa8-abc0-f064c841e120-kube-api-access-g7zk5\") on node \"crc\" DevicePath \"\"" Oct 10 19:20:48 crc kubenswrapper[4799]: I1010 19:20:48.798269 4799 scope.go:117] "RemoveContainer" containerID="22bd6326ea015cb8528536bdf848ae23298ca83b162bdb4ea3f0c4d27128849f" Oct 10 19:20:48 crc kubenswrapper[4799]: I1010 19:20:48.798372 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-k9xz4/crc-debug-trwjw" Oct 10 19:20:49 crc kubenswrapper[4799]: I1010 19:20:49.415007 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="59c3f7a3-bd96-4aa8-abc0-f064c841e120" path="/var/lib/kubelet/pods/59c3f7a3-bd96-4aa8-abc0-f064c841e120/volumes" Oct 10 19:20:50 crc kubenswrapper[4799]: I1010 19:20:50.143398 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-sbq6d" Oct 10 19:20:50 crc kubenswrapper[4799]: I1010 19:20:50.143823 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-sbq6d" Oct 10 19:20:50 crc kubenswrapper[4799]: I1010 19:20:50.214427 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-sbq6d" Oct 10 19:20:50 crc kubenswrapper[4799]: I1010 19:20:50.917988 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-sbq6d" Oct 10 19:20:50 crc kubenswrapper[4799]: I1010 19:20:50.977829 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-sbq6d"] Oct 10 19:20:52 crc kubenswrapper[4799]: I1010 19:20:52.847788 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-sbq6d" podUID="ad47a313-c923-40e0-8cd6-b1e69332bebb" containerName="registry-server" containerID="cri-o://8b27e4c865fa6dc3e4e0d8c055b93e177d871385938a7d461c2f7f2868f71845" gracePeriod=2 Oct 10 19:20:53 crc kubenswrapper[4799]: I1010 19:20:53.482205 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-sbq6d" Oct 10 19:20:53 crc kubenswrapper[4799]: I1010 19:20:53.580399 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ad47a313-c923-40e0-8cd6-b1e69332bebb-catalog-content\") pod \"ad47a313-c923-40e0-8cd6-b1e69332bebb\" (UID: \"ad47a313-c923-40e0-8cd6-b1e69332bebb\") " Oct 10 19:20:53 crc kubenswrapper[4799]: I1010 19:20:53.580848 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9l5ds\" (UniqueName: \"kubernetes.io/projected/ad47a313-c923-40e0-8cd6-b1e69332bebb-kube-api-access-9l5ds\") pod \"ad47a313-c923-40e0-8cd6-b1e69332bebb\" (UID: \"ad47a313-c923-40e0-8cd6-b1e69332bebb\") " Oct 10 19:20:53 crc kubenswrapper[4799]: I1010 19:20:53.580965 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ad47a313-c923-40e0-8cd6-b1e69332bebb-utilities\") pod \"ad47a313-c923-40e0-8cd6-b1e69332bebb\" (UID: \"ad47a313-c923-40e0-8cd6-b1e69332bebb\") " Oct 10 19:20:53 crc kubenswrapper[4799]: I1010 19:20:53.581849 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ad47a313-c923-40e0-8cd6-b1e69332bebb-utilities" (OuterVolumeSpecName: "utilities") pod "ad47a313-c923-40e0-8cd6-b1e69332bebb" (UID: "ad47a313-c923-40e0-8cd6-b1e69332bebb"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 10 19:20:53 crc kubenswrapper[4799]: I1010 19:20:53.594495 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ad47a313-c923-40e0-8cd6-b1e69332bebb-kube-api-access-9l5ds" (OuterVolumeSpecName: "kube-api-access-9l5ds") pod "ad47a313-c923-40e0-8cd6-b1e69332bebb" (UID: "ad47a313-c923-40e0-8cd6-b1e69332bebb"). InnerVolumeSpecName "kube-api-access-9l5ds". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 19:20:53 crc kubenswrapper[4799]: I1010 19:20:53.669978 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ad47a313-c923-40e0-8cd6-b1e69332bebb-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "ad47a313-c923-40e0-8cd6-b1e69332bebb" (UID: "ad47a313-c923-40e0-8cd6-b1e69332bebb"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 10 19:20:53 crc kubenswrapper[4799]: I1010 19:20:53.684195 4799 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ad47a313-c923-40e0-8cd6-b1e69332bebb-utilities\") on node \"crc\" DevicePath \"\"" Oct 10 19:20:53 crc kubenswrapper[4799]: I1010 19:20:53.684219 4799 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ad47a313-c923-40e0-8cd6-b1e69332bebb-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 10 19:20:53 crc kubenswrapper[4799]: I1010 19:20:53.684230 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9l5ds\" (UniqueName: \"kubernetes.io/projected/ad47a313-c923-40e0-8cd6-b1e69332bebb-kube-api-access-9l5ds\") on node \"crc\" DevicePath \"\"" Oct 10 19:20:53 crc kubenswrapper[4799]: I1010 19:20:53.857008 4799 generic.go:334] "Generic (PLEG): container finished" podID="ad47a313-c923-40e0-8cd6-b1e69332bebb" containerID="8b27e4c865fa6dc3e4e0d8c055b93e177d871385938a7d461c2f7f2868f71845" exitCode=0 Oct 10 19:20:53 crc kubenswrapper[4799]: I1010 19:20:53.857059 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-sbq6d" event={"ID":"ad47a313-c923-40e0-8cd6-b1e69332bebb","Type":"ContainerDied","Data":"8b27e4c865fa6dc3e4e0d8c055b93e177d871385938a7d461c2f7f2868f71845"} Oct 10 19:20:53 crc kubenswrapper[4799]: I1010 19:20:53.857098 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-sbq6d" event={"ID":"ad47a313-c923-40e0-8cd6-b1e69332bebb","Type":"ContainerDied","Data":"25a5a0d99546c7ffa0351182f4ebf002a9fb3056263dd8b834e03f08b4e72b47"} Oct 10 19:20:53 crc kubenswrapper[4799]: I1010 19:20:53.857128 4799 scope.go:117] "RemoveContainer" containerID="8b27e4c865fa6dc3e4e0d8c055b93e177d871385938a7d461c2f7f2868f71845" Oct 10 19:20:53 crc kubenswrapper[4799]: I1010 19:20:53.857140 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-sbq6d" Oct 10 19:20:53 crc kubenswrapper[4799]: I1010 19:20:53.877915 4799 scope.go:117] "RemoveContainer" containerID="9310040b00d92a7831d39898fcb0d52c7169b8f8d1be21a4f6d630a013374e3b" Oct 10 19:20:53 crc kubenswrapper[4799]: I1010 19:20:53.901578 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-sbq6d"] Oct 10 19:20:53 crc kubenswrapper[4799]: I1010 19:20:53.909411 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-sbq6d"] Oct 10 19:20:53 crc kubenswrapper[4799]: I1010 19:20:53.916025 4799 scope.go:117] "RemoveContainer" containerID="7028f340da49b679f992adb7eb344e9d23d48ff6bb46c40260568f5198cb3d69" Oct 10 19:20:53 crc kubenswrapper[4799]: I1010 19:20:53.974700 4799 scope.go:117] "RemoveContainer" containerID="8b27e4c865fa6dc3e4e0d8c055b93e177d871385938a7d461c2f7f2868f71845" Oct 10 19:20:53 crc kubenswrapper[4799]: E1010 19:20:53.975187 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8b27e4c865fa6dc3e4e0d8c055b93e177d871385938a7d461c2f7f2868f71845\": container with ID starting with 8b27e4c865fa6dc3e4e0d8c055b93e177d871385938a7d461c2f7f2868f71845 not found: ID does not exist" containerID="8b27e4c865fa6dc3e4e0d8c055b93e177d871385938a7d461c2f7f2868f71845" Oct 10 19:20:53 crc kubenswrapper[4799]: I1010 19:20:53.975231 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8b27e4c865fa6dc3e4e0d8c055b93e177d871385938a7d461c2f7f2868f71845"} err="failed to get container status \"8b27e4c865fa6dc3e4e0d8c055b93e177d871385938a7d461c2f7f2868f71845\": rpc error: code = NotFound desc = could not find container \"8b27e4c865fa6dc3e4e0d8c055b93e177d871385938a7d461c2f7f2868f71845\": container with ID starting with 8b27e4c865fa6dc3e4e0d8c055b93e177d871385938a7d461c2f7f2868f71845 not found: ID does not exist" Oct 10 19:20:53 crc kubenswrapper[4799]: I1010 19:20:53.975256 4799 scope.go:117] "RemoveContainer" containerID="9310040b00d92a7831d39898fcb0d52c7169b8f8d1be21a4f6d630a013374e3b" Oct 10 19:20:53 crc kubenswrapper[4799]: E1010 19:20:53.975601 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9310040b00d92a7831d39898fcb0d52c7169b8f8d1be21a4f6d630a013374e3b\": container with ID starting with 9310040b00d92a7831d39898fcb0d52c7169b8f8d1be21a4f6d630a013374e3b not found: ID does not exist" containerID="9310040b00d92a7831d39898fcb0d52c7169b8f8d1be21a4f6d630a013374e3b" Oct 10 19:20:53 crc kubenswrapper[4799]: I1010 19:20:53.975658 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9310040b00d92a7831d39898fcb0d52c7169b8f8d1be21a4f6d630a013374e3b"} err="failed to get container status \"9310040b00d92a7831d39898fcb0d52c7169b8f8d1be21a4f6d630a013374e3b\": rpc error: code = NotFound desc = could not find container \"9310040b00d92a7831d39898fcb0d52c7169b8f8d1be21a4f6d630a013374e3b\": container with ID starting with 9310040b00d92a7831d39898fcb0d52c7169b8f8d1be21a4f6d630a013374e3b not found: ID does not exist" Oct 10 19:20:53 crc kubenswrapper[4799]: I1010 19:20:53.975706 4799 scope.go:117] "RemoveContainer" containerID="7028f340da49b679f992adb7eb344e9d23d48ff6bb46c40260568f5198cb3d69" Oct 10 19:20:53 crc kubenswrapper[4799]: E1010 19:20:53.976446 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7028f340da49b679f992adb7eb344e9d23d48ff6bb46c40260568f5198cb3d69\": container with ID starting with 7028f340da49b679f992adb7eb344e9d23d48ff6bb46c40260568f5198cb3d69 not found: ID does not exist" containerID="7028f340da49b679f992adb7eb344e9d23d48ff6bb46c40260568f5198cb3d69" Oct 10 19:20:53 crc kubenswrapper[4799]: I1010 19:20:53.976533 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7028f340da49b679f992adb7eb344e9d23d48ff6bb46c40260568f5198cb3d69"} err="failed to get container status \"7028f340da49b679f992adb7eb344e9d23d48ff6bb46c40260568f5198cb3d69\": rpc error: code = NotFound desc = could not find container \"7028f340da49b679f992adb7eb344e9d23d48ff6bb46c40260568f5198cb3d69\": container with ID starting with 7028f340da49b679f992adb7eb344e9d23d48ff6bb46c40260568f5198cb3d69 not found: ID does not exist" Oct 10 19:20:55 crc kubenswrapper[4799]: I1010 19:20:55.441633 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ad47a313-c923-40e0-8cd6-b1e69332bebb" path="/var/lib/kubelet/pods/ad47a313-c923-40e0-8cd6-b1e69332bebb/volumes" Oct 10 19:21:38 crc kubenswrapper[4799]: I1010 19:21:38.309062 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_alertmanager-metric-storage-0_3885dc5d-b551-4371-b731-0ffda7c18126/init-config-reloader/0.log" Oct 10 19:21:38 crc kubenswrapper[4799]: I1010 19:21:38.486704 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_alertmanager-metric-storage-0_3885dc5d-b551-4371-b731-0ffda7c18126/init-config-reloader/0.log" Oct 10 19:21:38 crc kubenswrapper[4799]: I1010 19:21:38.502611 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_alertmanager-metric-storage-0_3885dc5d-b551-4371-b731-0ffda7c18126/alertmanager/0.log" Oct 10 19:21:38 crc kubenswrapper[4799]: I1010 19:21:38.608016 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_alertmanager-metric-storage-0_3885dc5d-b551-4371-b731-0ffda7c18126/config-reloader/0.log" Oct 10 19:21:38 crc kubenswrapper[4799]: I1010 19:21:38.689081 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_aodh-0_91a68421-466f-4eaa-a89b-59fa972a0726/aodh-api/0.log" Oct 10 19:21:38 crc kubenswrapper[4799]: I1010 19:21:38.797044 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_aodh-0_91a68421-466f-4eaa-a89b-59fa972a0726/aodh-evaluator/0.log" Oct 10 19:21:38 crc kubenswrapper[4799]: I1010 19:21:38.861380 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_aodh-0_91a68421-466f-4eaa-a89b-59fa972a0726/aodh-listener/0.log" Oct 10 19:21:38 crc kubenswrapper[4799]: I1010 19:21:38.915979 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_aodh-0_91a68421-466f-4eaa-a89b-59fa972a0726/aodh-notifier/0.log" Oct 10 19:21:39 crc kubenswrapper[4799]: I1010 19:21:39.044606 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-596f5c78cd-pnssg_203afbc9-83b5-49dc-a989-d12e6185fa4c/barbican-api/0.log" Oct 10 19:21:39 crc kubenswrapper[4799]: I1010 19:21:39.098203 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-596f5c78cd-pnssg_203afbc9-83b5-49dc-a989-d12e6185fa4c/barbican-api-log/0.log" Oct 10 19:21:39 crc kubenswrapper[4799]: I1010 19:21:39.280229 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-58d7bfb486-ptqg5_ede7ea54-dc54-4eba-8a41-9c8fcb73f481/barbican-keystone-listener/0.log" Oct 10 19:21:39 crc kubenswrapper[4799]: I1010 19:21:39.336749 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-58d7bfb486-ptqg5_ede7ea54-dc54-4eba-8a41-9c8fcb73f481/barbican-keystone-listener-log/0.log" Oct 10 19:21:39 crc kubenswrapper[4799]: I1010 19:21:39.478639 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-7bb467888f-4bvnt_f46ef205-6289-4443-893c-ea3e3c7728a9/barbican-worker/0.log" Oct 10 19:21:39 crc kubenswrapper[4799]: I1010 19:21:39.501202 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-7bb467888f-4bvnt_f46ef205-6289-4443-893c-ea3e3c7728a9/barbican-worker-log/0.log" Oct 10 19:21:39 crc kubenswrapper[4799]: I1010 19:21:39.693450 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_bootstrap-openstack-openstack-cell1-pgztr_797acbff-1308-4140-9468-a7eaaa3e5e75/bootstrap-openstack-openstack-cell1/0.log" Oct 10 19:21:39 crc kubenswrapper[4799]: I1010 19:21:39.910121 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_cf924bb3-b276-45c5-bcaa-bb6dc669a569/ceilometer-central-agent/0.log" Oct 10 19:21:39 crc kubenswrapper[4799]: I1010 19:21:39.920426 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_cf924bb3-b276-45c5-bcaa-bb6dc669a569/ceilometer-notification-agent/0.log" Oct 10 19:21:39 crc kubenswrapper[4799]: I1010 19:21:39.957079 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_cf924bb3-b276-45c5-bcaa-bb6dc669a569/proxy-httpd/0.log" Oct 10 19:21:40 crc kubenswrapper[4799]: I1010 19:21:40.058204 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_cf924bb3-b276-45c5-bcaa-bb6dc669a569/sg-core/0.log" Oct 10 19:21:40 crc kubenswrapper[4799]: I1010 19:21:40.080164 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceph-client-openstack-openstack-cell1-ptt5f_bf2a1658-7b1c-4b1f-af4a-87d92927e9c4/ceph-client-openstack-openstack-cell1/0.log" Oct 10 19:21:40 crc kubenswrapper[4799]: I1010 19:21:40.292913 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_30b1084d-18c0-428e-8682-48773409a820/cinder-api-log/0.log" Oct 10 19:21:40 crc kubenswrapper[4799]: I1010 19:21:40.343010 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_30b1084d-18c0-428e-8682-48773409a820/cinder-api/0.log" Oct 10 19:21:40 crc kubenswrapper[4799]: I1010 19:21:40.561576 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-backup-0_8d1df32c-4647-4ce9-9a5a-c7d32f297332/cinder-backup/0.log" Oct 10 19:21:40 crc kubenswrapper[4799]: I1010 19:21:40.573042 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-backup-0_8d1df32c-4647-4ce9-9a5a-c7d32f297332/probe/0.log" Oct 10 19:21:40 crc kubenswrapper[4799]: I1010 19:21:40.649273 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_62762f59-7347-40da-9e08-51c45798a4cd/cinder-scheduler/0.log" Oct 10 19:21:40 crc kubenswrapper[4799]: I1010 19:21:40.796570 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_62762f59-7347-40da-9e08-51c45798a4cd/probe/0.log" Oct 10 19:21:40 crc kubenswrapper[4799]: I1010 19:21:40.910473 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-volume-volume1-0_e0c788e1-2358-4b32-b410-bb615bed3971/probe/0.log" Oct 10 19:21:40 crc kubenswrapper[4799]: I1010 19:21:40.938222 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-volume-volume1-0_e0c788e1-2358-4b32-b410-bb615bed3971/cinder-volume/0.log" Oct 10 19:21:40 crc kubenswrapper[4799]: I1010 19:21:40.997423 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-network-openstack-openstack-cell1-78wrs_f452ed12-b58b-47b2-86f9-eb1910405e02/configure-network-openstack-openstack-cell1/0.log" Oct 10 19:21:41 crc kubenswrapper[4799]: I1010 19:21:41.159052 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-os-openstack-openstack-cell1-wwqb7_49cf2daa-8a65-4dfd-bd0f-cf28839297fd/configure-os-openstack-openstack-cell1/0.log" Oct 10 19:21:41 crc kubenswrapper[4799]: I1010 19:21:41.276176 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-cdbd4bc47-gjsnm_42f9f525-e7cc-4e82-8a3f-49e481628714/init/0.log" Oct 10 19:21:41 crc kubenswrapper[4799]: I1010 19:21:41.423748 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-cdbd4bc47-gjsnm_42f9f525-e7cc-4e82-8a3f-49e481628714/init/0.log" Oct 10 19:21:41 crc kubenswrapper[4799]: I1010 19:21:41.442303 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-cdbd4bc47-gjsnm_42f9f525-e7cc-4e82-8a3f-49e481628714/dnsmasq-dns/0.log" Oct 10 19:21:41 crc kubenswrapper[4799]: I1010 19:21:41.475738 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_download-cache-openstack-openstack-cell1-jrjjm_5d50c12a-292f-4bcc-9915-0c2f99f7eb28/download-cache-openstack-openstack-cell1/0.log" Oct 10 19:21:41 crc kubenswrapper[4799]: I1010 19:21:41.617350 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_0aeab9ba-576d-4959-9244-459a574bbfe4/glance-httpd/0.log" Oct 10 19:21:41 crc kubenswrapper[4799]: I1010 19:21:41.638001 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_0aeab9ba-576d-4959-9244-459a574bbfe4/glance-log/0.log" Oct 10 19:21:41 crc kubenswrapper[4799]: I1010 19:21:41.805076 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_ae0869c7-c5f5-4abe-8db1-cdbe7cd3f7d2/glance-log/0.log" Oct 10 19:21:41 crc kubenswrapper[4799]: I1010 19:21:41.839117 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_ae0869c7-c5f5-4abe-8db1-cdbe7cd3f7d2/glance-httpd/0.log" Oct 10 19:21:42 crc kubenswrapper[4799]: I1010 19:21:42.081215 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_heat-api-6c6c4696cc-82gmt_2e588bc1-697a-43cc-9b80-18937afee8bd/heat-api/0.log" Oct 10 19:21:42 crc kubenswrapper[4799]: I1010 19:21:42.113174 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_heat-cfnapi-699b8f6d77-rp46k_506276c2-7a4c-4603-9ab2-052c409fb136/heat-cfnapi/0.log" Oct 10 19:21:42 crc kubenswrapper[4799]: I1010 19:21:42.437568 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_heat-engine-78d7fd67f8-xrdlh_6f976566-a6af-40d3-81e1-085366e2b6fe/heat-engine/0.log" Oct 10 19:21:42 crc kubenswrapper[4799]: I1010 19:21:42.553609 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_horizon-5c94bc8f8f-92blf_6cf64fd1-1e8a-4fb5-becf-b52d1629e093/horizon/0.log" Oct 10 19:21:42 crc kubenswrapper[4799]: I1010 19:21:42.784842 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-certs-openstack-openstack-cell1-6r6fv_e5dc63d0-e9a5-4035-8308-ef71ec9be69e/install-certs-openstack-openstack-cell1/0.log" Oct 10 19:21:42 crc kubenswrapper[4799]: I1010 19:21:42.900588 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_horizon-5c94bc8f8f-92blf_6cf64fd1-1e8a-4fb5-becf-b52d1629e093/horizon-log/0.log" Oct 10 19:21:43 crc kubenswrapper[4799]: I1010 19:21:43.026987 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-os-openstack-openstack-cell1-2pnf6_94d4ca2f-8a56-4d20-8f4e-2adb8a134bed/install-os-openstack-openstack-cell1/0.log" Oct 10 19:21:43 crc kubenswrapper[4799]: I1010 19:21:43.263365 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-cron-29335321-x447z_386b8891-6eca-4986-b808-0e7ac3ec3339/keystone-cron/0.log" Oct 10 19:21:43 crc kubenswrapper[4799]: I1010 19:21:43.311980 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-59fb5d4d57-hlz5n_3f7d1ffe-1c3f-42e7-9b77-966e7e1e1936/keystone-api/0.log" Oct 10 19:21:43 crc kubenswrapper[4799]: I1010 19:21:43.412009 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-cron-29335381-9s7wn_32b9508e-11a6-4fe8-970f-2d4c5f8f12c6/keystone-cron/0.log" Oct 10 19:21:43 crc kubenswrapper[4799]: I1010 19:21:43.704248 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_kube-state-metrics-0_171aaf88-50cd-47d9-a43f-1df1d7ed96f8/kube-state-metrics/0.log" Oct 10 19:21:43 crc kubenswrapper[4799]: I1010 19:21:43.901528 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_libvirt-openstack-openstack-cell1-9l9vn_87e27a02-1f1f-4d72-be1d-4662e43bb0e3/libvirt-openstack-openstack-cell1/0.log" Oct 10 19:21:44 crc kubenswrapper[4799]: I1010 19:21:44.063426 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_manila-api-0_9d2ccf03-5031-462b-8dd8-7b716885ec78/manila-api/0.log" Oct 10 19:21:44 crc kubenswrapper[4799]: I1010 19:21:44.158501 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_manila-api-0_9d2ccf03-5031-462b-8dd8-7b716885ec78/manila-api-log/0.log" Oct 10 19:21:44 crc kubenswrapper[4799]: I1010 19:21:44.272960 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_manila-scheduler-0_6d5974bf-9794-4599-92ab-7d9f833aa967/probe/0.log" Oct 10 19:21:44 crc kubenswrapper[4799]: I1010 19:21:44.292280 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_manila-scheduler-0_6d5974bf-9794-4599-92ab-7d9f833aa967/manila-scheduler/0.log" Oct 10 19:21:44 crc kubenswrapper[4799]: I1010 19:21:44.491595 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_manila-share-share1-0_fc512908-5386-4b26-8563-1bb18eeb5a7b/probe/0.log" Oct 10 19:21:44 crc kubenswrapper[4799]: I1010 19:21:44.496539 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_manila-share-share1-0_fc512908-5386-4b26-8563-1bb18eeb5a7b/manila-share/0.log" Oct 10 19:21:45 crc kubenswrapper[4799]: I1010 19:21:45.248161 4799 patch_prober.go:28] interesting pod/machine-config-daemon-rh8zc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 10 19:21:45 crc kubenswrapper[4799]: I1010 19:21:45.248226 4799 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 10 19:21:45 crc kubenswrapper[4799]: I1010 19:21:45.802281 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-6f5b986d77-bn68m_57db1912-1efa-4d2a-ba44-c55a62f3929b/neutron-api/0.log" Oct 10 19:21:45 crc kubenswrapper[4799]: I1010 19:21:45.833224 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-6f5b986d77-bn68m_57db1912-1efa-4d2a-ba44-c55a62f3929b/neutron-httpd/0.log" Oct 10 19:21:46 crc kubenswrapper[4799]: I1010 19:21:46.104104 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-dhcp-openstack-openstack-cell1-9snz2_3b3bb6d0-ba03-42fb-aa81-ddc9a1b95d28/neutron-dhcp-openstack-openstack-cell1/0.log" Oct 10 19:21:46 crc kubenswrapper[4799]: I1010 19:21:46.153868 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-metadata-openstack-openstack-cell1-2pzds_6851d838-6f19-4e28-87d6-68e79ff22050/neutron-metadata-openstack-openstack-cell1/0.log" Oct 10 19:21:46 crc kubenswrapper[4799]: I1010 19:21:46.544770 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-sriov-openstack-openstack-cell1-8nm8r_c2ec587b-71a4-4612-8079-4c32275f871d/neutron-sriov-openstack-openstack-cell1/0.log" Oct 10 19:21:47 crc kubenswrapper[4799]: I1010 19:21:47.389824 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_a8eabce3-7d36-4c68-b130-ef95aab11607/nova-api-api/0.log" Oct 10 19:21:47 crc kubenswrapper[4799]: I1010 19:21:47.391365 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_a8eabce3-7d36-4c68-b130-ef95aab11607/nova-api-log/0.log" Oct 10 19:21:47 crc kubenswrapper[4799]: I1010 19:21:47.519392 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_memcached-0_3e6ff0b3-5b8c-4b08-8351-a25ee4071299/memcached/0.log" Oct 10 19:21:47 crc kubenswrapper[4799]: I1010 19:21:47.590148 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell0-conductor-0_69bae6bc-ccad-4146-b345-5ca2a96d74c5/nova-cell0-conductor-conductor/0.log" Oct 10 19:21:47 crc kubenswrapper[4799]: I1010 19:21:47.686683 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-conductor-0_c6588e6c-163e-4898-9dfc-0fa6932e44af/nova-cell1-conductor-conductor/0.log" Oct 10 19:21:47 crc kubenswrapper[4799]: I1010 19:21:47.873490 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-novncproxy-0_22259f82-d023-44bb-95c6-1def08492d5b/nova-cell1-novncproxy-novncproxy/0.log" Oct 10 19:21:48 crc kubenswrapper[4799]: I1010 19:21:48.021187 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellgtfn5_74fb1b56-dea6-4091-bc8d-0eff60bb1113/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cell1/0.log" Oct 10 19:21:48 crc kubenswrapper[4799]: I1010 19:21:48.467576 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_e1033dc6-9f93-4a9c-bba1-7ca137bbd199/nova-metadata-log/0.log" Oct 10 19:21:48 crc kubenswrapper[4799]: I1010 19:21:48.468098 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-openstack-openstack-cell1-vnbnb_decb29f6-bfb5-4bce-bc6c-304a4a8b5964/nova-cell1-openstack-openstack-cell1/0.log" Oct 10 19:21:48 crc kubenswrapper[4799]: I1010 19:21:48.514064 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_e1033dc6-9f93-4a9c-bba1-7ca137bbd199/nova-metadata-metadata/0.log" Oct 10 19:21:48 crc kubenswrapper[4799]: I1010 19:21:48.723467 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-scheduler-0_73eb2584-d6fc-4453-a1fb-6df47179064b/nova-scheduler-scheduler/0.log" Oct 10 19:21:48 crc kubenswrapper[4799]: I1010 19:21:48.744813 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_octavia-api-65d4ffcdbb-6xxlk_f349eb02-eae6-4047-852e-bd65208edf94/init/0.log" Oct 10 19:21:48 crc kubenswrapper[4799]: I1010 19:21:48.941946 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_octavia-api-65d4ffcdbb-6xxlk_f349eb02-eae6-4047-852e-bd65208edf94/init/0.log" Oct 10 19:21:48 crc kubenswrapper[4799]: I1010 19:21:48.959168 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_octavia-api-65d4ffcdbb-6xxlk_f349eb02-eae6-4047-852e-bd65208edf94/octavia-api-provider-agent/0.log" Oct 10 19:21:49 crc kubenswrapper[4799]: I1010 19:21:49.037953 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_octavia-api-65d4ffcdbb-6xxlk_f349eb02-eae6-4047-852e-bd65208edf94/octavia-api/0.log" Oct 10 19:21:49 crc kubenswrapper[4799]: I1010 19:21:49.168959 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_octavia-healthmanager-rsn7b_feecd9e6-4f59-495f-85ab-0067ed38a79c/init/0.log" Oct 10 19:21:49 crc kubenswrapper[4799]: I1010 19:21:49.399743 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_octavia-healthmanager-rsn7b_feecd9e6-4f59-495f-85ab-0067ed38a79c/init/0.log" Oct 10 19:21:49 crc kubenswrapper[4799]: I1010 19:21:49.448304 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_octavia-housekeeping-8fpkg_7f287658-c2be-401e-89a1-89203fadb380/init/0.log" Oct 10 19:21:49 crc kubenswrapper[4799]: I1010 19:21:49.449623 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_octavia-healthmanager-rsn7b_feecd9e6-4f59-495f-85ab-0067ed38a79c/octavia-healthmanager/0.log" Oct 10 19:21:49 crc kubenswrapper[4799]: I1010 19:21:49.628695 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_octavia-housekeeping-8fpkg_7f287658-c2be-401e-89a1-89203fadb380/init/0.log" Oct 10 19:21:49 crc kubenswrapper[4799]: I1010 19:21:49.657611 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_octavia-housekeeping-8fpkg_7f287658-c2be-401e-89a1-89203fadb380/octavia-housekeeping/0.log" Oct 10 19:21:49 crc kubenswrapper[4799]: I1010 19:21:49.689479 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_octavia-image-upload-678599687f-dlbhf_b3d27d6a-4cd4-4955-9cfb-7a4d92f3af7a/init/0.log" Oct 10 19:21:49 crc kubenswrapper[4799]: I1010 19:21:49.880605 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_octavia-image-upload-678599687f-dlbhf_b3d27d6a-4cd4-4955-9cfb-7a4d92f3af7a/init/0.log" Oct 10 19:21:49 crc kubenswrapper[4799]: I1010 19:21:49.895489 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_octavia-image-upload-678599687f-dlbhf_b3d27d6a-4cd4-4955-9cfb-7a4d92f3af7a/octavia-amphora-httpd/0.log" Oct 10 19:21:49 crc kubenswrapper[4799]: I1010 19:21:49.979011 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_octavia-rsyslog-nth2b_62197d1d-2108-4294-96f3-afe7487d515b/init/0.log" Oct 10 19:21:50 crc kubenswrapper[4799]: I1010 19:21:50.151889 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_octavia-rsyslog-nth2b_62197d1d-2108-4294-96f3-afe7487d515b/init/0.log" Oct 10 19:21:50 crc kubenswrapper[4799]: I1010 19:21:50.153611 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_octavia-rsyslog-nth2b_62197d1d-2108-4294-96f3-afe7487d515b/octavia-rsyslog/0.log" Oct 10 19:21:50 crc kubenswrapper[4799]: I1010 19:21:50.203467 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_octavia-worker-5fwqt_0908b7ab-2ec4-4514-a38c-0595d3554396/init/0.log" Oct 10 19:21:50 crc kubenswrapper[4799]: I1010 19:21:50.356171 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_octavia-worker-5fwqt_0908b7ab-2ec4-4514-a38c-0595d3554396/init/0.log" Oct 10 19:21:50 crc kubenswrapper[4799]: I1010 19:21:50.455557 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_31b3c69e-7294-4166-bacb-98c92f97ab85/mysql-bootstrap/0.log" Oct 10 19:21:50 crc kubenswrapper[4799]: I1010 19:21:50.465764 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_octavia-worker-5fwqt_0908b7ab-2ec4-4514-a38c-0595d3554396/octavia-worker/0.log" Oct 10 19:21:50 crc kubenswrapper[4799]: I1010 19:21:50.645659 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_31b3c69e-7294-4166-bacb-98c92f97ab85/mysql-bootstrap/0.log" Oct 10 19:21:50 crc kubenswrapper[4799]: I1010 19:21:50.657998 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_31b3c69e-7294-4166-bacb-98c92f97ab85/galera/0.log" Oct 10 19:21:50 crc kubenswrapper[4799]: I1010 19:21:50.733279 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_78d6963b-d770-4b29-b6de-a09b36913cc0/mysql-bootstrap/0.log" Oct 10 19:21:50 crc kubenswrapper[4799]: I1010 19:21:50.884522 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_78d6963b-d770-4b29-b6de-a09b36913cc0/galera/0.log" Oct 10 19:21:50 crc kubenswrapper[4799]: I1010 19:21:50.926506 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_78d6963b-d770-4b29-b6de-a09b36913cc0/mysql-bootstrap/0.log" Oct 10 19:21:50 crc kubenswrapper[4799]: I1010 19:21:50.935289 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstackclient_b0e07cac-b1ab-436c-95ac-8c150d84e709/openstackclient/0.log" Oct 10 19:21:51 crc kubenswrapper[4799]: I1010 19:21:51.122420 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-j24zd_0b6a8795-4a13-49de-a9cd-c58595e216dc/ovn-controller/0.log" Oct 10 19:21:51 crc kubenswrapper[4799]: I1010 19:21:51.216687 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-metrics-5hxb5_0da0a07e-3ab0-416b-a5c2-a70d10d75135/openstack-network-exporter/0.log" Oct 10 19:21:51 crc kubenswrapper[4799]: I1010 19:21:51.315348 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-8zzgn_505e76c2-15f5-4188-bc6e-a249a8d753d0/ovsdb-server-init/0.log" Oct 10 19:21:51 crc kubenswrapper[4799]: I1010 19:21:51.522203 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-8zzgn_505e76c2-15f5-4188-bc6e-a249a8d753d0/ovs-vswitchd/0.log" Oct 10 19:21:51 crc kubenswrapper[4799]: I1010 19:21:51.539227 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-8zzgn_505e76c2-15f5-4188-bc6e-a249a8d753d0/ovsdb-server-init/0.log" Oct 10 19:21:51 crc kubenswrapper[4799]: I1010 19:21:51.555141 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-8zzgn_505e76c2-15f5-4188-bc6e-a249a8d753d0/ovsdb-server/0.log" Oct 10 19:21:51 crc kubenswrapper[4799]: I1010 19:21:51.690514 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_56df8c28-dca8-463f-997b-d6c986163dfe/ovn-northd/0.log" Oct 10 19:21:51 crc kubenswrapper[4799]: I1010 19:21:51.737175 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_56df8c28-dca8-463f-997b-d6c986163dfe/openstack-network-exporter/0.log" Oct 10 19:21:51 crc kubenswrapper[4799]: I1010 19:21:51.897942 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-openstack-openstack-cell1-ltfpg_1ba70610-066c-4a5b-a16d-3555884bf1b2/ovn-openstack-openstack-cell1/0.log" Oct 10 19:21:51 crc kubenswrapper[4799]: I1010 19:21:51.954274 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_2a529948-04ef-4796-9237-9e8e30fe5f5c/openstack-network-exporter/0.log" Oct 10 19:21:52 crc kubenswrapper[4799]: I1010 19:21:52.193934 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_2a529948-04ef-4796-9237-9e8e30fe5f5c/ovsdbserver-nb/0.log" Oct 10 19:21:52 crc kubenswrapper[4799]: I1010 19:21:52.325358 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-1_25609809-9782-4b4b-9e3c-005b491e60a3/ovsdbserver-nb/0.log" Oct 10 19:21:52 crc kubenswrapper[4799]: I1010 19:21:52.332655 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-1_25609809-9782-4b4b-9e3c-005b491e60a3/openstack-network-exporter/0.log" Oct 10 19:21:52 crc kubenswrapper[4799]: I1010 19:21:52.497930 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-2_061a94d3-1f07-41b9-b64d-4f2470084fe7/openstack-network-exporter/0.log" Oct 10 19:21:52 crc kubenswrapper[4799]: I1010 19:21:52.523437 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-2_061a94d3-1f07-41b9-b64d-4f2470084fe7/ovsdbserver-nb/0.log" Oct 10 19:21:52 crc kubenswrapper[4799]: I1010 19:21:52.651554 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_0ec95c7a-ba6b-45d9-bcde-9b26b0068c83/openstack-network-exporter/0.log" Oct 10 19:21:52 crc kubenswrapper[4799]: I1010 19:21:52.703240 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_0ec95c7a-ba6b-45d9-bcde-9b26b0068c83/ovsdbserver-sb/0.log" Oct 10 19:21:52 crc kubenswrapper[4799]: I1010 19:21:52.796949 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-1_215d8c12-5f45-450c-949b-fce862e0290a/openstack-network-exporter/0.log" Oct 10 19:21:52 crc kubenswrapper[4799]: I1010 19:21:52.843049 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-1_215d8c12-5f45-450c-949b-fce862e0290a/ovsdbserver-sb/0.log" Oct 10 19:21:52 crc kubenswrapper[4799]: I1010 19:21:52.933673 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-2_a08b3348-e784-42f6-bae6-f2f05b77af51/openstack-network-exporter/0.log" Oct 10 19:21:53 crc kubenswrapper[4799]: I1010 19:21:53.007080 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-2_a08b3348-e784-42f6-bae6-f2f05b77af51/ovsdbserver-sb/0.log" Oct 10 19:21:53 crc kubenswrapper[4799]: I1010 19:21:53.171162 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-6b78c5594-k6jlg_e35e375b-b334-49ca-865d-cc4852481337/placement-api/0.log" Oct 10 19:21:53 crc kubenswrapper[4799]: I1010 19:21:53.196462 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-6b78c5594-k6jlg_e35e375b-b334-49ca-865d-cc4852481337/placement-log/0.log" Oct 10 19:21:53 crc kubenswrapper[4799]: I1010 19:21:53.285129 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_pre-adoption-validation-openstack-pre-adoption-openstack-cn6jt7_7d5d2b84-7192-4c14-83c7-2fe15a984da5/pre-adoption-validation-openstack-pre-adoption-openstack-cell1/0.log" Oct 10 19:21:53 crc kubenswrapper[4799]: I1010 19:21:53.392235 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_prometheus-metric-storage-0_98cdb179-8d6d-47f5-8bfa-c0d77def55df/init-config-reloader/0.log" Oct 10 19:21:53 crc kubenswrapper[4799]: I1010 19:21:53.587214 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_prometheus-metric-storage-0_98cdb179-8d6d-47f5-8bfa-c0d77def55df/init-config-reloader/0.log" Oct 10 19:21:53 crc kubenswrapper[4799]: I1010 19:21:53.597622 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_prometheus-metric-storage-0_98cdb179-8d6d-47f5-8bfa-c0d77def55df/prometheus/0.log" Oct 10 19:21:53 crc kubenswrapper[4799]: I1010 19:21:53.652202 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_prometheus-metric-storage-0_98cdb179-8d6d-47f5-8bfa-c0d77def55df/config-reloader/0.log" Oct 10 19:21:53 crc kubenswrapper[4799]: I1010 19:21:53.659166 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_prometheus-metric-storage-0_98cdb179-8d6d-47f5-8bfa-c0d77def55df/thanos-sidecar/0.log" Oct 10 19:21:53 crc kubenswrapper[4799]: I1010 19:21:53.819922 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_217d7ceb-fa68-4e17-bd2d-8cf07d85e871/setup-container/0.log" Oct 10 19:21:53 crc kubenswrapper[4799]: I1010 19:21:53.962218 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_217d7ceb-fa68-4e17-bd2d-8cf07d85e871/setup-container/0.log" Oct 10 19:21:54 crc kubenswrapper[4799]: I1010 19:21:54.015353 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_217d7ceb-fa68-4e17-bd2d-8cf07d85e871/rabbitmq/0.log" Oct 10 19:21:54 crc kubenswrapper[4799]: I1010 19:21:54.074343 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_38e633f5-1f40-40b7-979b-4c34ec12dcf4/setup-container/0.log" Oct 10 19:21:54 crc kubenswrapper[4799]: I1010 19:21:54.265111 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_38e633f5-1f40-40b7-979b-4c34ec12dcf4/setup-container/0.log" Oct 10 19:21:54 crc kubenswrapper[4799]: I1010 19:21:54.273588 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_reboot-os-openstack-openstack-cell1-nl6wz_902c2ae4-7de9-4c43-9fb5-9c7ea89e1b31/reboot-os-openstack-openstack-cell1/0.log" Oct 10 19:21:54 crc kubenswrapper[4799]: I1010 19:21:54.329485 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_38e633f5-1f40-40b7-979b-4c34ec12dcf4/rabbitmq/0.log" Oct 10 19:21:55 crc kubenswrapper[4799]: I1010 19:21:55.388108 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_run-os-openstack-openstack-cell1-25fp2_18788551-0d2a-43c8-9aa2-d712be9c3c9f/run-os-openstack-openstack-cell1/0.log" Oct 10 19:21:55 crc kubenswrapper[4799]: I1010 19:21:55.442195 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ssh-known-hosts-openstack-qnt8c_24ca7de6-8478-4c9f-b3b2-e1ffe93ec6d2/ssh-known-hosts-openstack/0.log" Oct 10 19:21:55 crc kubenswrapper[4799]: I1010 19:21:55.613820 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_telemetry-openstack-openstack-cell1-kwcwz_02816ea3-9fb7-46aa-ae23-d6ff431c08b4/telemetry-openstack-openstack-cell1/0.log" Oct 10 19:21:55 crc kubenswrapper[4799]: I1010 19:21:55.751915 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_tripleo-cleanup-tripleo-cleanup-openstack-cell1-wpgn4_c4e679ee-ac3b-4e3c-9869-b86de400033e/tripleo-cleanup-tripleo-cleanup-openstack-cell1/0.log" Oct 10 19:21:55 crc kubenswrapper[4799]: I1010 19:21:55.856563 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_validate-network-openstack-openstack-cell1-cqxqm_22f1eaab-b5ce-4c1f-82f2-b92e28875983/validate-network-openstack-openstack-cell1/0.log" Oct 10 19:22:06 crc kubenswrapper[4799]: I1010 19:22:06.159522 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-8n9w4"] Oct 10 19:22:06 crc kubenswrapper[4799]: E1010 19:22:06.160934 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ad47a313-c923-40e0-8cd6-b1e69332bebb" containerName="registry-server" Oct 10 19:22:06 crc kubenswrapper[4799]: I1010 19:22:06.160959 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="ad47a313-c923-40e0-8cd6-b1e69332bebb" containerName="registry-server" Oct 10 19:22:06 crc kubenswrapper[4799]: E1010 19:22:06.161019 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="59c3f7a3-bd96-4aa8-abc0-f064c841e120" containerName="container-00" Oct 10 19:22:06 crc kubenswrapper[4799]: I1010 19:22:06.161032 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="59c3f7a3-bd96-4aa8-abc0-f064c841e120" containerName="container-00" Oct 10 19:22:06 crc kubenswrapper[4799]: E1010 19:22:06.161081 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ad47a313-c923-40e0-8cd6-b1e69332bebb" containerName="extract-content" Oct 10 19:22:06 crc kubenswrapper[4799]: I1010 19:22:06.161096 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="ad47a313-c923-40e0-8cd6-b1e69332bebb" containerName="extract-content" Oct 10 19:22:06 crc kubenswrapper[4799]: E1010 19:22:06.161117 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ad47a313-c923-40e0-8cd6-b1e69332bebb" containerName="extract-utilities" Oct 10 19:22:06 crc kubenswrapper[4799]: I1010 19:22:06.161131 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="ad47a313-c923-40e0-8cd6-b1e69332bebb" containerName="extract-utilities" Oct 10 19:22:06 crc kubenswrapper[4799]: I1010 19:22:06.161529 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="ad47a313-c923-40e0-8cd6-b1e69332bebb" containerName="registry-server" Oct 10 19:22:06 crc kubenswrapper[4799]: I1010 19:22:06.161580 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="59c3f7a3-bd96-4aa8-abc0-f064c841e120" containerName="container-00" Oct 10 19:22:06 crc kubenswrapper[4799]: I1010 19:22:06.164539 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-8n9w4" Oct 10 19:22:06 crc kubenswrapper[4799]: I1010 19:22:06.173613 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-8n9w4"] Oct 10 19:22:06 crc kubenswrapper[4799]: I1010 19:22:06.309859 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a5177601-179d-4fd5-b0de-4da9c8f64794-catalog-content\") pod \"community-operators-8n9w4\" (UID: \"a5177601-179d-4fd5-b0de-4da9c8f64794\") " pod="openshift-marketplace/community-operators-8n9w4" Oct 10 19:22:06 crc kubenswrapper[4799]: I1010 19:22:06.309918 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-md6tq\" (UniqueName: \"kubernetes.io/projected/a5177601-179d-4fd5-b0de-4da9c8f64794-kube-api-access-md6tq\") pod \"community-operators-8n9w4\" (UID: \"a5177601-179d-4fd5-b0de-4da9c8f64794\") " pod="openshift-marketplace/community-operators-8n9w4" Oct 10 19:22:06 crc kubenswrapper[4799]: I1010 19:22:06.310201 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a5177601-179d-4fd5-b0de-4da9c8f64794-utilities\") pod \"community-operators-8n9w4\" (UID: \"a5177601-179d-4fd5-b0de-4da9c8f64794\") " pod="openshift-marketplace/community-operators-8n9w4" Oct 10 19:22:06 crc kubenswrapper[4799]: I1010 19:22:06.412208 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a5177601-179d-4fd5-b0de-4da9c8f64794-catalog-content\") pod \"community-operators-8n9w4\" (UID: \"a5177601-179d-4fd5-b0de-4da9c8f64794\") " pod="openshift-marketplace/community-operators-8n9w4" Oct 10 19:22:06 crc kubenswrapper[4799]: I1010 19:22:06.412260 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-md6tq\" (UniqueName: \"kubernetes.io/projected/a5177601-179d-4fd5-b0de-4da9c8f64794-kube-api-access-md6tq\") pod \"community-operators-8n9w4\" (UID: \"a5177601-179d-4fd5-b0de-4da9c8f64794\") " pod="openshift-marketplace/community-operators-8n9w4" Oct 10 19:22:06 crc kubenswrapper[4799]: I1010 19:22:06.412338 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a5177601-179d-4fd5-b0de-4da9c8f64794-utilities\") pod \"community-operators-8n9w4\" (UID: \"a5177601-179d-4fd5-b0de-4da9c8f64794\") " pod="openshift-marketplace/community-operators-8n9w4" Oct 10 19:22:06 crc kubenswrapper[4799]: I1010 19:22:06.412708 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a5177601-179d-4fd5-b0de-4da9c8f64794-catalog-content\") pod \"community-operators-8n9w4\" (UID: \"a5177601-179d-4fd5-b0de-4da9c8f64794\") " pod="openshift-marketplace/community-operators-8n9w4" Oct 10 19:22:06 crc kubenswrapper[4799]: I1010 19:22:06.412786 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a5177601-179d-4fd5-b0de-4da9c8f64794-utilities\") pod \"community-operators-8n9w4\" (UID: \"a5177601-179d-4fd5-b0de-4da9c8f64794\") " pod="openshift-marketplace/community-operators-8n9w4" Oct 10 19:22:06 crc kubenswrapper[4799]: I1010 19:22:06.431160 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-md6tq\" (UniqueName: \"kubernetes.io/projected/a5177601-179d-4fd5-b0de-4da9c8f64794-kube-api-access-md6tq\") pod \"community-operators-8n9w4\" (UID: \"a5177601-179d-4fd5-b0de-4da9c8f64794\") " pod="openshift-marketplace/community-operators-8n9w4" Oct 10 19:22:06 crc kubenswrapper[4799]: I1010 19:22:06.504034 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-8n9w4" Oct 10 19:22:07 crc kubenswrapper[4799]: I1010 19:22:07.045771 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-8n9w4"] Oct 10 19:22:07 crc kubenswrapper[4799]: I1010 19:22:07.773346 4799 generic.go:334] "Generic (PLEG): container finished" podID="a5177601-179d-4fd5-b0de-4da9c8f64794" containerID="1120b808774adab2c3c244236573785fae18c35f1c049cee3f19181c99f3a50f" exitCode=0 Oct 10 19:22:07 crc kubenswrapper[4799]: I1010 19:22:07.774853 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-8n9w4" event={"ID":"a5177601-179d-4fd5-b0de-4da9c8f64794","Type":"ContainerDied","Data":"1120b808774adab2c3c244236573785fae18c35f1c049cee3f19181c99f3a50f"} Oct 10 19:22:07 crc kubenswrapper[4799]: I1010 19:22:07.774922 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-8n9w4" event={"ID":"a5177601-179d-4fd5-b0de-4da9c8f64794","Type":"ContainerStarted","Data":"963403225e5132e15f5934b53471603324dacca20c1ff624d9e15cbeb5740dc1"} Oct 10 19:22:08 crc kubenswrapper[4799]: I1010 19:22:08.786411 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-8n9w4" event={"ID":"a5177601-179d-4fd5-b0de-4da9c8f64794","Type":"ContainerStarted","Data":"1f8f3725d7ba48443bddb867f1900ae86ad6aaf2cbaa03a1d21dd7b33cc9c259"} Oct 10 19:22:10 crc kubenswrapper[4799]: I1010 19:22:10.805336 4799 generic.go:334] "Generic (PLEG): container finished" podID="a5177601-179d-4fd5-b0de-4da9c8f64794" containerID="1f8f3725d7ba48443bddb867f1900ae86ad6aaf2cbaa03a1d21dd7b33cc9c259" exitCode=0 Oct 10 19:22:10 crc kubenswrapper[4799]: I1010 19:22:10.805414 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-8n9w4" event={"ID":"a5177601-179d-4fd5-b0de-4da9c8f64794","Type":"ContainerDied","Data":"1f8f3725d7ba48443bddb867f1900ae86ad6aaf2cbaa03a1d21dd7b33cc9c259"} Oct 10 19:22:11 crc kubenswrapper[4799]: I1010 19:22:11.820114 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-8n9w4" event={"ID":"a5177601-179d-4fd5-b0de-4da9c8f64794","Type":"ContainerStarted","Data":"10517113e5da7b8d931df6c3c3ee7a57c4ac73a4dcbe1b3957c993c044175ea1"} Oct 10 19:22:11 crc kubenswrapper[4799]: I1010 19:22:11.845121 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-8n9w4" podStartSLOduration=2.187154253 podStartE2EDuration="5.845098255s" podCreationTimestamp="2025-10-10 19:22:06 +0000 UTC" firstStartedPulling="2025-10-10 19:22:07.781505505 +0000 UTC m=+10221.289829620" lastFinishedPulling="2025-10-10 19:22:11.439449497 +0000 UTC m=+10224.947773622" observedRunningTime="2025-10-10 19:22:11.837381987 +0000 UTC m=+10225.345706112" watchObservedRunningTime="2025-10-10 19:22:11.845098255 +0000 UTC m=+10225.353422380" Oct 10 19:22:15 crc kubenswrapper[4799]: I1010 19:22:15.249137 4799 patch_prober.go:28] interesting pod/machine-config-daemon-rh8zc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 10 19:22:15 crc kubenswrapper[4799]: I1010 19:22:15.249799 4799 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 10 19:22:16 crc kubenswrapper[4799]: I1010 19:22:16.505494 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-8n9w4" Oct 10 19:22:16 crc kubenswrapper[4799]: I1010 19:22:16.505960 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-8n9w4" Oct 10 19:22:16 crc kubenswrapper[4799]: I1010 19:22:16.573969 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-8n9w4" Oct 10 19:22:17 crc kubenswrapper[4799]: I1010 19:22:17.439533 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-8n9w4" Oct 10 19:22:17 crc kubenswrapper[4799]: I1010 19:22:17.505350 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-8n9w4"] Oct 10 19:22:18 crc kubenswrapper[4799]: I1010 19:22:18.949907 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-8n9w4" podUID="a5177601-179d-4fd5-b0de-4da9c8f64794" containerName="registry-server" containerID="cri-o://10517113e5da7b8d931df6c3c3ee7a57c4ac73a4dcbe1b3957c993c044175ea1" gracePeriod=2 Oct 10 19:22:19 crc kubenswrapper[4799]: I1010 19:22:19.515679 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-8n9w4" Oct 10 19:22:19 crc kubenswrapper[4799]: I1010 19:22:19.605818 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a5177601-179d-4fd5-b0de-4da9c8f64794-utilities\") pod \"a5177601-179d-4fd5-b0de-4da9c8f64794\" (UID: \"a5177601-179d-4fd5-b0de-4da9c8f64794\") " Oct 10 19:22:19 crc kubenswrapper[4799]: I1010 19:22:19.606122 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a5177601-179d-4fd5-b0de-4da9c8f64794-catalog-content\") pod \"a5177601-179d-4fd5-b0de-4da9c8f64794\" (UID: \"a5177601-179d-4fd5-b0de-4da9c8f64794\") " Oct 10 19:22:19 crc kubenswrapper[4799]: I1010 19:22:19.606166 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-md6tq\" (UniqueName: \"kubernetes.io/projected/a5177601-179d-4fd5-b0de-4da9c8f64794-kube-api-access-md6tq\") pod \"a5177601-179d-4fd5-b0de-4da9c8f64794\" (UID: \"a5177601-179d-4fd5-b0de-4da9c8f64794\") " Oct 10 19:22:19 crc kubenswrapper[4799]: I1010 19:22:19.606933 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a5177601-179d-4fd5-b0de-4da9c8f64794-utilities" (OuterVolumeSpecName: "utilities") pod "a5177601-179d-4fd5-b0de-4da9c8f64794" (UID: "a5177601-179d-4fd5-b0de-4da9c8f64794"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 10 19:22:19 crc kubenswrapper[4799]: I1010 19:22:19.612472 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a5177601-179d-4fd5-b0de-4da9c8f64794-kube-api-access-md6tq" (OuterVolumeSpecName: "kube-api-access-md6tq") pod "a5177601-179d-4fd5-b0de-4da9c8f64794" (UID: "a5177601-179d-4fd5-b0de-4da9c8f64794"). InnerVolumeSpecName "kube-api-access-md6tq". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 19:22:19 crc kubenswrapper[4799]: I1010 19:22:19.651055 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a5177601-179d-4fd5-b0de-4da9c8f64794-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "a5177601-179d-4fd5-b0de-4da9c8f64794" (UID: "a5177601-179d-4fd5-b0de-4da9c8f64794"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 10 19:22:19 crc kubenswrapper[4799]: I1010 19:22:19.709207 4799 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a5177601-179d-4fd5-b0de-4da9c8f64794-utilities\") on node \"crc\" DevicePath \"\"" Oct 10 19:22:19 crc kubenswrapper[4799]: I1010 19:22:19.709261 4799 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a5177601-179d-4fd5-b0de-4da9c8f64794-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 10 19:22:19 crc kubenswrapper[4799]: I1010 19:22:19.709325 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-md6tq\" (UniqueName: \"kubernetes.io/projected/a5177601-179d-4fd5-b0de-4da9c8f64794-kube-api-access-md6tq\") on node \"crc\" DevicePath \"\"" Oct 10 19:22:19 crc kubenswrapper[4799]: I1010 19:22:19.962415 4799 generic.go:334] "Generic (PLEG): container finished" podID="a5177601-179d-4fd5-b0de-4da9c8f64794" containerID="10517113e5da7b8d931df6c3c3ee7a57c4ac73a4dcbe1b3957c993c044175ea1" exitCode=0 Oct 10 19:22:19 crc kubenswrapper[4799]: I1010 19:22:19.962459 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-8n9w4" event={"ID":"a5177601-179d-4fd5-b0de-4da9c8f64794","Type":"ContainerDied","Data":"10517113e5da7b8d931df6c3c3ee7a57c4ac73a4dcbe1b3957c993c044175ea1"} Oct 10 19:22:19 crc kubenswrapper[4799]: I1010 19:22:19.962492 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-8n9w4" event={"ID":"a5177601-179d-4fd5-b0de-4da9c8f64794","Type":"ContainerDied","Data":"963403225e5132e15f5934b53471603324dacca20c1ff624d9e15cbeb5740dc1"} Oct 10 19:22:19 crc kubenswrapper[4799]: I1010 19:22:19.962516 4799 scope.go:117] "RemoveContainer" containerID="10517113e5da7b8d931df6c3c3ee7a57c4ac73a4dcbe1b3957c993c044175ea1" Oct 10 19:22:19 crc kubenswrapper[4799]: I1010 19:22:19.962561 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-8n9w4" Oct 10 19:22:19 crc kubenswrapper[4799]: I1010 19:22:19.998838 4799 scope.go:117] "RemoveContainer" containerID="1f8f3725d7ba48443bddb867f1900ae86ad6aaf2cbaa03a1d21dd7b33cc9c259" Oct 10 19:22:20 crc kubenswrapper[4799]: I1010 19:22:20.017784 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-8n9w4"] Oct 10 19:22:20 crc kubenswrapper[4799]: I1010 19:22:20.031781 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-8n9w4"] Oct 10 19:22:20 crc kubenswrapper[4799]: I1010 19:22:20.044593 4799 scope.go:117] "RemoveContainer" containerID="1120b808774adab2c3c244236573785fae18c35f1c049cee3f19181c99f3a50f" Oct 10 19:22:20 crc kubenswrapper[4799]: I1010 19:22:20.118288 4799 scope.go:117] "RemoveContainer" containerID="10517113e5da7b8d931df6c3c3ee7a57c4ac73a4dcbe1b3957c993c044175ea1" Oct 10 19:22:20 crc kubenswrapper[4799]: E1010 19:22:20.119077 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"10517113e5da7b8d931df6c3c3ee7a57c4ac73a4dcbe1b3957c993c044175ea1\": container with ID starting with 10517113e5da7b8d931df6c3c3ee7a57c4ac73a4dcbe1b3957c993c044175ea1 not found: ID does not exist" containerID="10517113e5da7b8d931df6c3c3ee7a57c4ac73a4dcbe1b3957c993c044175ea1" Oct 10 19:22:20 crc kubenswrapper[4799]: I1010 19:22:20.119212 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"10517113e5da7b8d931df6c3c3ee7a57c4ac73a4dcbe1b3957c993c044175ea1"} err="failed to get container status \"10517113e5da7b8d931df6c3c3ee7a57c4ac73a4dcbe1b3957c993c044175ea1\": rpc error: code = NotFound desc = could not find container \"10517113e5da7b8d931df6c3c3ee7a57c4ac73a4dcbe1b3957c993c044175ea1\": container with ID starting with 10517113e5da7b8d931df6c3c3ee7a57c4ac73a4dcbe1b3957c993c044175ea1 not found: ID does not exist" Oct 10 19:22:20 crc kubenswrapper[4799]: I1010 19:22:20.119314 4799 scope.go:117] "RemoveContainer" containerID="1f8f3725d7ba48443bddb867f1900ae86ad6aaf2cbaa03a1d21dd7b33cc9c259" Oct 10 19:22:20 crc kubenswrapper[4799]: E1010 19:22:20.119852 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1f8f3725d7ba48443bddb867f1900ae86ad6aaf2cbaa03a1d21dd7b33cc9c259\": container with ID starting with 1f8f3725d7ba48443bddb867f1900ae86ad6aaf2cbaa03a1d21dd7b33cc9c259 not found: ID does not exist" containerID="1f8f3725d7ba48443bddb867f1900ae86ad6aaf2cbaa03a1d21dd7b33cc9c259" Oct 10 19:22:20 crc kubenswrapper[4799]: I1010 19:22:20.119970 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1f8f3725d7ba48443bddb867f1900ae86ad6aaf2cbaa03a1d21dd7b33cc9c259"} err="failed to get container status \"1f8f3725d7ba48443bddb867f1900ae86ad6aaf2cbaa03a1d21dd7b33cc9c259\": rpc error: code = NotFound desc = could not find container \"1f8f3725d7ba48443bddb867f1900ae86ad6aaf2cbaa03a1d21dd7b33cc9c259\": container with ID starting with 1f8f3725d7ba48443bddb867f1900ae86ad6aaf2cbaa03a1d21dd7b33cc9c259 not found: ID does not exist" Oct 10 19:22:20 crc kubenswrapper[4799]: I1010 19:22:20.120051 4799 scope.go:117] "RemoveContainer" containerID="1120b808774adab2c3c244236573785fae18c35f1c049cee3f19181c99f3a50f" Oct 10 19:22:20 crc kubenswrapper[4799]: E1010 19:22:20.120459 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1120b808774adab2c3c244236573785fae18c35f1c049cee3f19181c99f3a50f\": container with ID starting with 1120b808774adab2c3c244236573785fae18c35f1c049cee3f19181c99f3a50f not found: ID does not exist" containerID="1120b808774adab2c3c244236573785fae18c35f1c049cee3f19181c99f3a50f" Oct 10 19:22:20 crc kubenswrapper[4799]: I1010 19:22:20.120595 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1120b808774adab2c3c244236573785fae18c35f1c049cee3f19181c99f3a50f"} err="failed to get container status \"1120b808774adab2c3c244236573785fae18c35f1c049cee3f19181c99f3a50f\": rpc error: code = NotFound desc = could not find container \"1120b808774adab2c3c244236573785fae18c35f1c049cee3f19181c99f3a50f\": container with ID starting with 1120b808774adab2c3c244236573785fae18c35f1c049cee3f19181c99f3a50f not found: ID does not exist" Oct 10 19:22:21 crc kubenswrapper[4799]: I1010 19:22:21.417458 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a5177601-179d-4fd5-b0de-4da9c8f64794" path="/var/lib/kubelet/pods/a5177601-179d-4fd5-b0de-4da9c8f64794/volumes" Oct 10 19:22:45 crc kubenswrapper[4799]: I1010 19:22:45.249059 4799 patch_prober.go:28] interesting pod/machine-config-daemon-rh8zc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 10 19:22:45 crc kubenswrapper[4799]: I1010 19:22:45.249910 4799 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 10 19:22:45 crc kubenswrapper[4799]: I1010 19:22:45.249988 4799 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" Oct 10 19:22:45 crc kubenswrapper[4799]: I1010 19:22:45.251364 4799 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"e4a70c95dd2f99d68bb76ad34b45e9cb4ce8bb671fad71218da22698541986e1"} pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 10 19:22:45 crc kubenswrapper[4799]: I1010 19:22:45.251497 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" containerName="machine-config-daemon" containerID="cri-o://e4a70c95dd2f99d68bb76ad34b45e9cb4ce8bb671fad71218da22698541986e1" gracePeriod=600 Oct 10 19:22:46 crc kubenswrapper[4799]: I1010 19:22:46.321704 4799 generic.go:334] "Generic (PLEG): container finished" podID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" containerID="e4a70c95dd2f99d68bb76ad34b45e9cb4ce8bb671fad71218da22698541986e1" exitCode=0 Oct 10 19:22:46 crc kubenswrapper[4799]: I1010 19:22:46.321888 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" event={"ID":"6cebefda-e31d-4be2-9bf4-8e1f8ec002cb","Type":"ContainerDied","Data":"e4a70c95dd2f99d68bb76ad34b45e9cb4ce8bb671fad71218da22698541986e1"} Oct 10 19:22:46 crc kubenswrapper[4799]: I1010 19:22:46.322382 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" event={"ID":"6cebefda-e31d-4be2-9bf4-8e1f8ec002cb","Type":"ContainerStarted","Data":"fd63e0805bbe909759f172e22eb07f44f02ddc6ac94031e7c6af68f116a1b614"} Oct 10 19:22:46 crc kubenswrapper[4799]: I1010 19:22:46.322410 4799 scope.go:117] "RemoveContainer" containerID="e394ce15c48f6a512a9cf8ca7739698a6b4e472715126ea6e3b0f9e567e27a97" Oct 10 19:23:33 crc kubenswrapper[4799]: I1010 19:23:33.956389 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-658bdf4b74-stptz_9e711c48-2d32-4933-b13f-a0f9fec33e0d/kube-rbac-proxy/0.log" Oct 10 19:23:34 crc kubenswrapper[4799]: I1010 19:23:34.092939 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-658bdf4b74-stptz_9e711c48-2d32-4933-b13f-a0f9fec33e0d/manager/0.log" Oct 10 19:23:34 crc kubenswrapper[4799]: I1010 19:23:34.171184 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_bbf55ab9b6da9dfde4a224fc1e3f049ee7cb6cab839422fb52a09a365b8q86b_d8eb7f5e-d8d8-4f95-ad7d-d591d0df2ee5/util/0.log" Oct 10 19:23:34 crc kubenswrapper[4799]: I1010 19:23:34.296656 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_bbf55ab9b6da9dfde4a224fc1e3f049ee7cb6cab839422fb52a09a365b8q86b_d8eb7f5e-d8d8-4f95-ad7d-d591d0df2ee5/util/0.log" Oct 10 19:23:34 crc kubenswrapper[4799]: I1010 19:23:34.333098 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_bbf55ab9b6da9dfde4a224fc1e3f049ee7cb6cab839422fb52a09a365b8q86b_d8eb7f5e-d8d8-4f95-ad7d-d591d0df2ee5/pull/0.log" Oct 10 19:23:34 crc kubenswrapper[4799]: I1010 19:23:34.361711 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_bbf55ab9b6da9dfde4a224fc1e3f049ee7cb6cab839422fb52a09a365b8q86b_d8eb7f5e-d8d8-4f95-ad7d-d591d0df2ee5/pull/0.log" Oct 10 19:23:34 crc kubenswrapper[4799]: I1010 19:23:34.521665 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_bbf55ab9b6da9dfde4a224fc1e3f049ee7cb6cab839422fb52a09a365b8q86b_d8eb7f5e-d8d8-4f95-ad7d-d591d0df2ee5/util/0.log" Oct 10 19:23:34 crc kubenswrapper[4799]: I1010 19:23:34.543684 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_bbf55ab9b6da9dfde4a224fc1e3f049ee7cb6cab839422fb52a09a365b8q86b_d8eb7f5e-d8d8-4f95-ad7d-d591d0df2ee5/extract/0.log" Oct 10 19:23:34 crc kubenswrapper[4799]: I1010 19:23:34.550391 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_bbf55ab9b6da9dfde4a224fc1e3f049ee7cb6cab839422fb52a09a365b8q86b_d8eb7f5e-d8d8-4f95-ad7d-d591d0df2ee5/pull/0.log" Oct 10 19:23:34 crc kubenswrapper[4799]: I1010 19:23:34.687850 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-7b7fb68549-bl9j6_98cf31e5-618a-4363-8a3d-1b0d0bc75b48/kube-rbac-proxy/0.log" Oct 10 19:23:34 crc kubenswrapper[4799]: I1010 19:23:34.743593 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-85d5d9dd78-w2d2v_324da982-9067-490a-98a5-9ad58296841a/kube-rbac-proxy/0.log" Oct 10 19:23:34 crc kubenswrapper[4799]: I1010 19:23:34.768519 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-7b7fb68549-bl9j6_98cf31e5-618a-4363-8a3d-1b0d0bc75b48/manager/0.log" Oct 10 19:23:34 crc kubenswrapper[4799]: I1010 19:23:34.883520 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-85d5d9dd78-w2d2v_324da982-9067-490a-98a5-9ad58296841a/manager/0.log" Oct 10 19:23:34 crc kubenswrapper[4799]: I1010 19:23:34.953123 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-84b9b84486-dvk5w_a61aa86a-a90e-439b-85e9-15b7a1466785/kube-rbac-proxy/0.log" Oct 10 19:23:35 crc kubenswrapper[4799]: I1010 19:23:35.092285 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-84b9b84486-dvk5w_a61aa86a-a90e-439b-85e9-15b7a1466785/manager/0.log" Oct 10 19:23:35 crc kubenswrapper[4799]: I1010 19:23:35.121028 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-858f76bbdd-tzx89_37273794-7563-423e-a2d5-86c9e9f957cb/kube-rbac-proxy/0.log" Oct 10 19:23:35 crc kubenswrapper[4799]: I1010 19:23:35.214378 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-858f76bbdd-tzx89_37273794-7563-423e-a2d5-86c9e9f957cb/manager/0.log" Oct 10 19:23:35 crc kubenswrapper[4799]: I1010 19:23:35.298148 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-7ffbcb7588-kv2gv_b0caa8f4-5c59-402a-9025-a2ba80d70577/kube-rbac-proxy/0.log" Oct 10 19:23:35 crc kubenswrapper[4799]: I1010 19:23:35.358652 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-7ffbcb7588-kv2gv_b0caa8f4-5c59-402a-9025-a2ba80d70577/manager/0.log" Oct 10 19:23:35 crc kubenswrapper[4799]: I1010 19:23:35.538737 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-656bcbd775-nh542_62fda0e4-55d3-481f-8da4-66e8f4dd39d4/kube-rbac-proxy/0.log" Oct 10 19:23:35 crc kubenswrapper[4799]: I1010 19:23:35.639691 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-9c5c78d49-vjjnj_c89f9acf-7ee6-4600-9331-635eb7fce931/kube-rbac-proxy/0.log" Oct 10 19:23:35 crc kubenswrapper[4799]: I1010 19:23:35.730048 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-656bcbd775-nh542_62fda0e4-55d3-481f-8da4-66e8f4dd39d4/manager/0.log" Oct 10 19:23:35 crc kubenswrapper[4799]: I1010 19:23:35.746160 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-9c5c78d49-vjjnj_c89f9acf-7ee6-4600-9331-635eb7fce931/manager/0.log" Oct 10 19:23:35 crc kubenswrapper[4799]: I1010 19:23:35.839810 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-55b6b7c7b8-ll94t_490a3592-9d71-4182-8b1b-6f8c55a01bde/kube-rbac-proxy/0.log" Oct 10 19:23:36 crc kubenswrapper[4799]: I1010 19:23:36.025538 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-5f67fbc655-2qw6c_92c8227d-0d33-41f9-b186-2f17c2753fa2/kube-rbac-proxy/0.log" Oct 10 19:23:36 crc kubenswrapper[4799]: I1010 19:23:36.035304 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-55b6b7c7b8-ll94t_490a3592-9d71-4182-8b1b-6f8c55a01bde/manager/0.log" Oct 10 19:23:36 crc kubenswrapper[4799]: I1010 19:23:36.095663 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-5f67fbc655-2qw6c_92c8227d-0d33-41f9-b186-2f17c2753fa2/manager/0.log" Oct 10 19:23:36 crc kubenswrapper[4799]: I1010 19:23:36.201378 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-f9fb45f8f-22wxr_118111a7-9601-4a05-94b9-79601cb47623/kube-rbac-proxy/0.log" Oct 10 19:23:36 crc kubenswrapper[4799]: I1010 19:23:36.248093 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-f9fb45f8f-22wxr_118111a7-9601-4a05-94b9-79601cb47623/manager/0.log" Oct 10 19:23:36 crc kubenswrapper[4799]: I1010 19:23:36.382836 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-79d585cb66-45997_4367b146-2ac4-497e-b15a-c35615498938/kube-rbac-proxy/0.log" Oct 10 19:23:36 crc kubenswrapper[4799]: I1010 19:23:36.478914 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-5df598886f-rcld5_f9c815da-6e31-4ac7-a019-037983b9a9fd/kube-rbac-proxy/0.log" Oct 10 19:23:36 crc kubenswrapper[4799]: I1010 19:23:36.496915 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-79d585cb66-45997_4367b146-2ac4-497e-b15a-c35615498938/manager/0.log" Oct 10 19:23:36 crc kubenswrapper[4799]: I1010 19:23:36.668676 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-69fdcfc5f5-6thbh_71551afb-1aa4-4cdf-9cb4-a136ec3cadf5/kube-rbac-proxy/0.log" Oct 10 19:23:36 crc kubenswrapper[4799]: I1010 19:23:36.760105 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-69fdcfc5f5-6thbh_71551afb-1aa4-4cdf-9cb4-a136ec3cadf5/manager/0.log" Oct 10 19:23:36 crc kubenswrapper[4799]: I1010 19:23:36.790811 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-5df598886f-rcld5_f9c815da-6e31-4ac7-a019-037983b9a9fd/manager/0.log" Oct 10 19:23:36 crc kubenswrapper[4799]: I1010 19:23:36.928224 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-5956dffb7b725zs_ca5131fc-12e4-41b5-a4e2-6e71ed4049e1/kube-rbac-proxy/0.log" Oct 10 19:23:37 crc kubenswrapper[4799]: I1010 19:23:37.041470 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-5956dffb7b725zs_ca5131fc-12e4-41b5-a4e2-6e71ed4049e1/manager/0.log" Oct 10 19:23:37 crc kubenswrapper[4799]: I1010 19:23:37.077453 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-manager-5b95c8954b-nfst6_9267d3c3-fe67-41e8-9240-81955432d822/kube-rbac-proxy/0.log" Oct 10 19:23:37 crc kubenswrapper[4799]: I1010 19:23:37.262081 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-operator-688d597459-zvstz_d917bfc2-cb7d-4628-9a34-3747e15e6fbf/kube-rbac-proxy/0.log" Oct 10 19:23:37 crc kubenswrapper[4799]: I1010 19:23:37.470140 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-operator-688d597459-zvstz_d917bfc2-cb7d-4628-9a34-3747e15e6fbf/operator/0.log" Oct 10 19:23:37 crc kubenswrapper[4799]: I1010 19:23:37.682557 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-index-j2rtf_79620bb9-c574-4dfb-ac62-62804f00ee08/registry-server/0.log" Oct 10 19:23:37 crc kubenswrapper[4799]: I1010 19:23:37.741503 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-79df5fb58c-g57bz_2ac4be00-5d25-4bb4-8f98-599d7d637d38/kube-rbac-proxy/0.log" Oct 10 19:23:37 crc kubenswrapper[4799]: I1010 19:23:37.830059 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-79df5fb58c-g57bz_2ac4be00-5d25-4bb4-8f98-599d7d637d38/manager/0.log" Oct 10 19:23:37 crc kubenswrapper[4799]: I1010 19:23:37.870091 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-68b6c87b68-wxv94_18c5ac5f-08f2-431b-9aaf-0b2e5c3f9bbf/kube-rbac-proxy/0.log" Oct 10 19:23:37 crc kubenswrapper[4799]: I1010 19:23:37.917306 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-68b6c87b68-wxv94_18c5ac5f-08f2-431b-9aaf-0b2e5c3f9bbf/manager/0.log" Oct 10 19:23:38 crc kubenswrapper[4799]: I1010 19:23:38.038819 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_rabbitmq-cluster-operator-manager-5f97d8c699-z59wk_bd9e6a7c-702e-4424-9cbb-f9832e91d4a3/operator/0.log" Oct 10 19:23:38 crc kubenswrapper[4799]: I1010 19:23:38.104830 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-db6d7f97b-pcn6t_e9f4174f-ea12-45c8-840c-ccdce9dd4c1f/kube-rbac-proxy/0.log" Oct 10 19:23:38 crc kubenswrapper[4799]: I1010 19:23:38.194217 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-db6d7f97b-pcn6t_e9f4174f-ea12-45c8-840c-ccdce9dd4c1f/manager/0.log" Oct 10 19:23:38 crc kubenswrapper[4799]: I1010 19:23:38.328317 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-67cfc6749b-vbq52_e669c0e2-564c-4391-b2d0-5ab8cc0f38cd/kube-rbac-proxy/0.log" Oct 10 19:23:38 crc kubenswrapper[4799]: I1010 19:23:38.448996 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-5458f77c4-k5cjx_176cdbb9-4289-4322-8112-7ffae6a8efe8/kube-rbac-proxy/0.log" Oct 10 19:23:38 crc kubenswrapper[4799]: I1010 19:23:38.537034 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-5458f77c4-k5cjx_176cdbb9-4289-4322-8112-7ffae6a8efe8/manager/0.log" Oct 10 19:23:38 crc kubenswrapper[4799]: I1010 19:23:38.641135 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-7f554bff7b-dq9x7_98a13753-0658-4a22-ba99-aaebe22b5746/kube-rbac-proxy/0.log" Oct 10 19:23:38 crc kubenswrapper[4799]: I1010 19:23:38.687007 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-67cfc6749b-vbq52_e669c0e2-564c-4391-b2d0-5ab8cc0f38cd/manager/0.log" Oct 10 19:23:38 crc kubenswrapper[4799]: I1010 19:23:38.705978 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-7f554bff7b-dq9x7_98a13753-0658-4a22-ba99-aaebe22b5746/manager/0.log" Oct 10 19:23:39 crc kubenswrapper[4799]: I1010 19:23:39.318394 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-manager-5b95c8954b-nfst6_9267d3c3-fe67-41e8-9240-81955432d822/manager/0.log" Oct 10 19:23:56 crc kubenswrapper[4799]: I1010 19:23:56.906368 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_control-plane-machine-set-operator-78cbb6b69f-6mnhh_a958b529-c3f0-4131-be7b-4d81a3c25499/control-plane-machine-set-operator/0.log" Oct 10 19:23:57 crc kubenswrapper[4799]: I1010 19:23:57.102792 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-w26tc_159d9178-1402-4232-a9df-ad4389bed9b0/kube-rbac-proxy/0.log" Oct 10 19:23:57 crc kubenswrapper[4799]: I1010 19:23:57.122021 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-w26tc_159d9178-1402-4232-a9df-ad4389bed9b0/machine-api-operator/0.log" Oct 10 19:24:11 crc kubenswrapper[4799]: I1010 19:24:11.127055 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-7d4cc89fcb-cwpfj_74f3da58-30e9-4ee2-8324-e8bc0b8f84c3/cert-manager-controller/0.log" Oct 10 19:24:11 crc kubenswrapper[4799]: I1010 19:24:11.258656 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-cainjector-7d9f95dbf-4gd47_7cd45f74-0b01-4d01-8d27-35cd165fff89/cert-manager-cainjector/0.log" Oct 10 19:24:11 crc kubenswrapper[4799]: I1010 19:24:11.300401 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-webhook-d969966f-klvh7_9125ea98-355f-4ec6-ac42-ae70f1dedd24/cert-manager-webhook/0.log" Oct 10 19:24:25 crc kubenswrapper[4799]: I1010 19:24:25.616607 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-console-plugin-6b874cbd85-stz8n_9a1584c9-53d3-448d-a84d-123526cfc076/nmstate-console-plugin/0.log" Oct 10 19:24:25 crc kubenswrapper[4799]: I1010 19:24:25.774510 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-handler-44gb8_1cea25d9-ee17-4a2d-8cfc-74b0cad9e91a/nmstate-handler/0.log" Oct 10 19:24:25 crc kubenswrapper[4799]: I1010 19:24:25.788424 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-fdff9cb8d-8szkw_cf339ce7-7475-4cea-a474-df2df7c47cbc/kube-rbac-proxy/0.log" Oct 10 19:24:25 crc kubenswrapper[4799]: I1010 19:24:25.844387 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-fdff9cb8d-8szkw_cf339ce7-7475-4cea-a474-df2df7c47cbc/nmstate-metrics/0.log" Oct 10 19:24:25 crc kubenswrapper[4799]: I1010 19:24:25.941432 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-operator-858ddd8f98-8pflw_5a084a6f-f89a-48f4-b61a-0111ac1b72ba/nmstate-operator/0.log" Oct 10 19:24:26 crc kubenswrapper[4799]: I1010 19:24:26.033830 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-webhook-6cdbc54649-5j22z_03e8f0e3-1119-4f24-9625-4f799ca6d87f/nmstate-webhook/0.log" Oct 10 19:24:40 crc kubenswrapper[4799]: I1010 19:24:40.876947 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-68d546b9d8-sbrtn_95668eab-11e1-4872-a646-b8573de8e2cc/kube-rbac-proxy/0.log" Oct 10 19:24:41 crc kubenswrapper[4799]: I1010 19:24:41.159314 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-webhook-server-64bf5d555-ktzxr_c6b6d702-2a2f-4ae0-8ab4-69129ebf689e/frr-k8s-webhook-server/0.log" Oct 10 19:24:41 crc kubenswrapper[4799]: I1010 19:24:41.291146 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-zsdl8_230d418d-5545-483e-996f-533e967cf0a4/cp-frr-files/0.log" Oct 10 19:24:41 crc kubenswrapper[4799]: I1010 19:24:41.299403 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-68d546b9d8-sbrtn_95668eab-11e1-4872-a646-b8573de8e2cc/controller/0.log" Oct 10 19:24:41 crc kubenswrapper[4799]: I1010 19:24:41.447711 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-zsdl8_230d418d-5545-483e-996f-533e967cf0a4/cp-metrics/0.log" Oct 10 19:24:41 crc kubenswrapper[4799]: I1010 19:24:41.459733 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-zsdl8_230d418d-5545-483e-996f-533e967cf0a4/cp-reloader/0.log" Oct 10 19:24:41 crc kubenswrapper[4799]: I1010 19:24:41.472258 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-zsdl8_230d418d-5545-483e-996f-533e967cf0a4/cp-frr-files/0.log" Oct 10 19:24:41 crc kubenswrapper[4799]: I1010 19:24:41.484618 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-zsdl8_230d418d-5545-483e-996f-533e967cf0a4/cp-reloader/0.log" Oct 10 19:24:41 crc kubenswrapper[4799]: I1010 19:24:41.641177 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-zsdl8_230d418d-5545-483e-996f-533e967cf0a4/cp-frr-files/0.log" Oct 10 19:24:41 crc kubenswrapper[4799]: I1010 19:24:41.669684 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-zsdl8_230d418d-5545-483e-996f-533e967cf0a4/cp-metrics/0.log" Oct 10 19:24:41 crc kubenswrapper[4799]: I1010 19:24:41.680336 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-zsdl8_230d418d-5545-483e-996f-533e967cf0a4/cp-metrics/0.log" Oct 10 19:24:41 crc kubenswrapper[4799]: I1010 19:24:41.716590 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-zsdl8_230d418d-5545-483e-996f-533e967cf0a4/cp-reloader/0.log" Oct 10 19:24:41 crc kubenswrapper[4799]: I1010 19:24:41.941608 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-zsdl8_230d418d-5545-483e-996f-533e967cf0a4/cp-frr-files/0.log" Oct 10 19:24:41 crc kubenswrapper[4799]: I1010 19:24:41.949237 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-zsdl8_230d418d-5545-483e-996f-533e967cf0a4/cp-metrics/0.log" Oct 10 19:24:41 crc kubenswrapper[4799]: I1010 19:24:41.952303 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-zsdl8_230d418d-5545-483e-996f-533e967cf0a4/cp-reloader/0.log" Oct 10 19:24:41 crc kubenswrapper[4799]: I1010 19:24:41.960346 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-zsdl8_230d418d-5545-483e-996f-533e967cf0a4/controller/0.log" Oct 10 19:24:42 crc kubenswrapper[4799]: I1010 19:24:42.113593 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-zsdl8_230d418d-5545-483e-996f-533e967cf0a4/frr-metrics/0.log" Oct 10 19:24:42 crc kubenswrapper[4799]: I1010 19:24:42.140623 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-zsdl8_230d418d-5545-483e-996f-533e967cf0a4/kube-rbac-proxy/0.log" Oct 10 19:24:42 crc kubenswrapper[4799]: I1010 19:24:42.159412 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-zsdl8_230d418d-5545-483e-996f-533e967cf0a4/kube-rbac-proxy-frr/0.log" Oct 10 19:24:42 crc kubenswrapper[4799]: I1010 19:24:42.328647 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-zsdl8_230d418d-5545-483e-996f-533e967cf0a4/reloader/0.log" Oct 10 19:24:42 crc kubenswrapper[4799]: I1010 19:24:42.481159 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-controller-manager-75cc6fd4f5-h4qjd_dfd38d6c-f6ac-44c3-9602-c045dcb55735/manager/0.log" Oct 10 19:24:42 crc kubenswrapper[4799]: I1010 19:24:42.594187 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-webhook-server-564bc5bbdc-rzbnf_7914fde3-52d8-49eb-a258-505730801250/webhook-server/0.log" Oct 10 19:24:42 crc kubenswrapper[4799]: I1010 19:24:42.741402 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-6n9wt_b5825284-c8ef-4b48-9838-3da136e5a670/kube-rbac-proxy/0.log" Oct 10 19:24:43 crc kubenswrapper[4799]: I1010 19:24:43.796424 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-6n9wt_b5825284-c8ef-4b48-9838-3da136e5a670/speaker/0.log" Oct 10 19:24:45 crc kubenswrapper[4799]: I1010 19:24:45.248316 4799 patch_prober.go:28] interesting pod/machine-config-daemon-rh8zc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 10 19:24:45 crc kubenswrapper[4799]: I1010 19:24:45.248572 4799 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 10 19:24:45 crc kubenswrapper[4799]: I1010 19:24:45.391612 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-zsdl8_230d418d-5545-483e-996f-533e967cf0a4/frr/0.log" Oct 10 19:24:58 crc kubenswrapper[4799]: I1010 19:24:58.178241 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69wtj6b_acb519d9-d10f-4149-bd99-88526d2a60c5/util/0.log" Oct 10 19:24:58 crc kubenswrapper[4799]: I1010 19:24:58.354100 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69wtj6b_acb519d9-d10f-4149-bd99-88526d2a60c5/util/0.log" Oct 10 19:24:58 crc kubenswrapper[4799]: I1010 19:24:58.397639 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69wtj6b_acb519d9-d10f-4149-bd99-88526d2a60c5/pull/0.log" Oct 10 19:24:58 crc kubenswrapper[4799]: I1010 19:24:58.419321 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69wtj6b_acb519d9-d10f-4149-bd99-88526d2a60c5/pull/0.log" Oct 10 19:24:58 crc kubenswrapper[4799]: I1010 19:24:58.548378 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69wtj6b_acb519d9-d10f-4149-bd99-88526d2a60c5/util/0.log" Oct 10 19:24:58 crc kubenswrapper[4799]: I1010 19:24:58.549566 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69wtj6b_acb519d9-d10f-4149-bd99-88526d2a60c5/extract/0.log" Oct 10 19:24:58 crc kubenswrapper[4799]: I1010 19:24:58.566026 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69wtj6b_acb519d9-d10f-4149-bd99-88526d2a60c5/pull/0.log" Oct 10 19:24:58 crc kubenswrapper[4799]: I1010 19:24:58.693976 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d24s9jq_f532bcbd-a09f-4d14-b41d-0e55252454c2/util/0.log" Oct 10 19:24:58 crc kubenswrapper[4799]: I1010 19:24:58.915052 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d24s9jq_f532bcbd-a09f-4d14-b41d-0e55252454c2/pull/0.log" Oct 10 19:24:58 crc kubenswrapper[4799]: I1010 19:24:58.915138 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d24s9jq_f532bcbd-a09f-4d14-b41d-0e55252454c2/pull/0.log" Oct 10 19:24:58 crc kubenswrapper[4799]: I1010 19:24:58.924070 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d24s9jq_f532bcbd-a09f-4d14-b41d-0e55252454c2/util/0.log" Oct 10 19:24:59 crc kubenswrapper[4799]: I1010 19:24:59.775949 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d24s9jq_f532bcbd-a09f-4d14-b41d-0e55252454c2/pull/0.log" Oct 10 19:24:59 crc kubenswrapper[4799]: I1010 19:24:59.786003 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d24s9jq_f532bcbd-a09f-4d14-b41d-0e55252454c2/util/0.log" Oct 10 19:24:59 crc kubenswrapper[4799]: I1010 19:24:59.797306 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d24s9jq_f532bcbd-a09f-4d14-b41d-0e55252454c2/extract/0.log" Oct 10 19:24:59 crc kubenswrapper[4799]: I1010 19:24:59.934689 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dmrqq6_0c273054-c72f-4e27-88e5-a7366ceb9dde/util/0.log" Oct 10 19:25:00 crc kubenswrapper[4799]: I1010 19:25:00.122621 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dmrqq6_0c273054-c72f-4e27-88e5-a7366ceb9dde/pull/0.log" Oct 10 19:25:00 crc kubenswrapper[4799]: I1010 19:25:00.130411 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dmrqq6_0c273054-c72f-4e27-88e5-a7366ceb9dde/util/0.log" Oct 10 19:25:00 crc kubenswrapper[4799]: I1010 19:25:00.179033 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dmrqq6_0c273054-c72f-4e27-88e5-a7366ceb9dde/pull/0.log" Oct 10 19:25:00 crc kubenswrapper[4799]: I1010 19:25:00.332542 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dmrqq6_0c273054-c72f-4e27-88e5-a7366ceb9dde/util/0.log" Oct 10 19:25:00 crc kubenswrapper[4799]: I1010 19:25:00.333106 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dmrqq6_0c273054-c72f-4e27-88e5-a7366ceb9dde/pull/0.log" Oct 10 19:25:00 crc kubenswrapper[4799]: I1010 19:25:00.381447 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dmrqq6_0c273054-c72f-4e27-88e5-a7366ceb9dde/extract/0.log" Oct 10 19:25:00 crc kubenswrapper[4799]: I1010 19:25:00.477641 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-7956v_5738a268-1112-42a4-9935-b60b3066e452/extract-utilities/0.log" Oct 10 19:25:00 crc kubenswrapper[4799]: I1010 19:25:00.663557 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-7956v_5738a268-1112-42a4-9935-b60b3066e452/extract-utilities/0.log" Oct 10 19:25:00 crc kubenswrapper[4799]: I1010 19:25:00.684967 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-7956v_5738a268-1112-42a4-9935-b60b3066e452/extract-content/0.log" Oct 10 19:25:00 crc kubenswrapper[4799]: I1010 19:25:00.734135 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-7956v_5738a268-1112-42a4-9935-b60b3066e452/extract-content/0.log" Oct 10 19:25:00 crc kubenswrapper[4799]: I1010 19:25:00.943071 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-7956v_5738a268-1112-42a4-9935-b60b3066e452/extract-utilities/0.log" Oct 10 19:25:00 crc kubenswrapper[4799]: I1010 19:25:00.944494 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-7956v_5738a268-1112-42a4-9935-b60b3066e452/extract-content/0.log" Oct 10 19:25:01 crc kubenswrapper[4799]: I1010 19:25:01.760181 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-fdnkj_69cca437-b936-4407-9b61-335d742d795a/extract-utilities/0.log" Oct 10 19:25:01 crc kubenswrapper[4799]: I1010 19:25:01.943887 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-fdnkj_69cca437-b936-4407-9b61-335d742d795a/extract-content/0.log" Oct 10 19:25:01 crc kubenswrapper[4799]: I1010 19:25:01.974508 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-fdnkj_69cca437-b936-4407-9b61-335d742d795a/extract-utilities/0.log" Oct 10 19:25:01 crc kubenswrapper[4799]: I1010 19:25:01.976460 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-fdnkj_69cca437-b936-4407-9b61-335d742d795a/extract-content/0.log" Oct 10 19:25:02 crc kubenswrapper[4799]: I1010 19:25:02.176829 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-7956v_5738a268-1112-42a4-9935-b60b3066e452/registry-server/0.log" Oct 10 19:25:02 crc kubenswrapper[4799]: I1010 19:25:02.250432 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-fdnkj_69cca437-b936-4407-9b61-335d742d795a/extract-utilities/0.log" Oct 10 19:25:02 crc kubenswrapper[4799]: I1010 19:25:02.272268 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-fdnkj_69cca437-b936-4407-9b61-335d742d795a/extract-content/0.log" Oct 10 19:25:02 crc kubenswrapper[4799]: I1010 19:25:02.474392 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c29kcf_d37f944b-0659-443a-baf7-fb1f2fc07fc2/util/0.log" Oct 10 19:25:02 crc kubenswrapper[4799]: I1010 19:25:02.660560 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c29kcf_d37f944b-0659-443a-baf7-fb1f2fc07fc2/util/0.log" Oct 10 19:25:02 crc kubenswrapper[4799]: I1010 19:25:02.667311 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c29kcf_d37f944b-0659-443a-baf7-fb1f2fc07fc2/pull/0.log" Oct 10 19:25:02 crc kubenswrapper[4799]: I1010 19:25:02.686528 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c29kcf_d37f944b-0659-443a-baf7-fb1f2fc07fc2/pull/0.log" Oct 10 19:25:02 crc kubenswrapper[4799]: I1010 19:25:02.864866 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c29kcf_d37f944b-0659-443a-baf7-fb1f2fc07fc2/util/0.log" Oct 10 19:25:03 crc kubenswrapper[4799]: I1010 19:25:03.009436 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c29kcf_d37f944b-0659-443a-baf7-fb1f2fc07fc2/pull/0.log" Oct 10 19:25:03 crc kubenswrapper[4799]: I1010 19:25:03.016942 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c29kcf_d37f944b-0659-443a-baf7-fb1f2fc07fc2/extract/0.log" Oct 10 19:25:03 crc kubenswrapper[4799]: I1010 19:25:03.150348 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-82hks_ec7d66a7-bea8-4dd5-8d91-84ec51cd57a7/marketplace-operator/0.log" Oct 10 19:25:03 crc kubenswrapper[4799]: I1010 19:25:03.324044 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-fdnkj_69cca437-b936-4407-9b61-335d742d795a/registry-server/0.log" Oct 10 19:25:03 crc kubenswrapper[4799]: I1010 19:25:03.450237 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-s2pcd_96d94a46-8fc7-4315-8353-8f36275a0669/extract-utilities/0.log" Oct 10 19:25:03 crc kubenswrapper[4799]: I1010 19:25:03.547569 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-s2pcd_96d94a46-8fc7-4315-8353-8f36275a0669/extract-utilities/0.log" Oct 10 19:25:03 crc kubenswrapper[4799]: I1010 19:25:03.578382 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-s2pcd_96d94a46-8fc7-4315-8353-8f36275a0669/extract-content/0.log" Oct 10 19:25:03 crc kubenswrapper[4799]: I1010 19:25:03.579905 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-s2pcd_96d94a46-8fc7-4315-8353-8f36275a0669/extract-content/0.log" Oct 10 19:25:03 crc kubenswrapper[4799]: I1010 19:25:03.770915 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-s2pcd_96d94a46-8fc7-4315-8353-8f36275a0669/extract-utilities/0.log" Oct 10 19:25:03 crc kubenswrapper[4799]: I1010 19:25:03.777580 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-s2pcd_96d94a46-8fc7-4315-8353-8f36275a0669/extract-content/0.log" Oct 10 19:25:03 crc kubenswrapper[4799]: I1010 19:25:03.890842 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-bmgch_a313fc1d-b984-4d89-bfa1-2703e0e27a5b/extract-utilities/0.log" Oct 10 19:25:04 crc kubenswrapper[4799]: I1010 19:25:04.130853 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-s2pcd_96d94a46-8fc7-4315-8353-8f36275a0669/registry-server/0.log" Oct 10 19:25:04 crc kubenswrapper[4799]: I1010 19:25:04.163359 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-bmgch_a313fc1d-b984-4d89-bfa1-2703e0e27a5b/extract-utilities/0.log" Oct 10 19:25:04 crc kubenswrapper[4799]: I1010 19:25:04.185243 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-bmgch_a313fc1d-b984-4d89-bfa1-2703e0e27a5b/extract-content/0.log" Oct 10 19:25:04 crc kubenswrapper[4799]: I1010 19:25:04.187326 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-bmgch_a313fc1d-b984-4d89-bfa1-2703e0e27a5b/extract-content/0.log" Oct 10 19:25:04 crc kubenswrapper[4799]: I1010 19:25:04.301405 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-bmgch_a313fc1d-b984-4d89-bfa1-2703e0e27a5b/extract-utilities/0.log" Oct 10 19:25:04 crc kubenswrapper[4799]: I1010 19:25:04.324899 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-bmgch_a313fc1d-b984-4d89-bfa1-2703e0e27a5b/extract-content/0.log" Oct 10 19:25:05 crc kubenswrapper[4799]: I1010 19:25:05.469153 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-bmgch_a313fc1d-b984-4d89-bfa1-2703e0e27a5b/registry-server/0.log" Oct 10 19:25:15 crc kubenswrapper[4799]: I1010 19:25:15.249067 4799 patch_prober.go:28] interesting pod/machine-config-daemon-rh8zc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 10 19:25:15 crc kubenswrapper[4799]: I1010 19:25:15.249713 4799 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 10 19:25:19 crc kubenswrapper[4799]: I1010 19:25:19.236640 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-7c8cf85677-bk4jp_f4d71c7a-ca15-4516-804a-a318b170705a/prometheus-operator/0.log" Oct 10 19:25:19 crc kubenswrapper[4799]: I1010 19:25:19.385730 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-admission-webhook-7549bdddd6-d69ks_a0f9c29a-475d-41c0-8519-8cb77db2ae52/prometheus-operator-admission-webhook/0.log" Oct 10 19:25:19 crc kubenswrapper[4799]: I1010 19:25:19.445710 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-admission-webhook-7549bdddd6-wfjc9_b4149afd-c602-43fa-b00c-f0b0738b2193/prometheus-operator-admission-webhook/0.log" Oct 10 19:25:19 crc kubenswrapper[4799]: I1010 19:25:19.597082 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_observability-operator-cc5f78dfc-jn5lf_f5d3a877-feb5-48ab-b4a4-becae9e904e8/operator/0.log" Oct 10 19:25:19 crc kubenswrapper[4799]: I1010 19:25:19.636106 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_perses-operator-54bc95c9fb-ddg9b_e2ad530e-a05b-4fef-85e7-85a827687bb6/perses-operator/0.log" Oct 10 19:25:39 crc kubenswrapper[4799]: E1010 19:25:39.460836 4799 upgradeaware.go:427] Error proxying data from client to backend: readfrom tcp 38.102.83.145:41260->38.102.83.145:34753: write tcp 38.102.83.145:41260->38.102.83.145:34753: write: broken pipe Oct 10 19:25:45 crc kubenswrapper[4799]: I1010 19:25:45.248242 4799 patch_prober.go:28] interesting pod/machine-config-daemon-rh8zc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 10 19:25:45 crc kubenswrapper[4799]: I1010 19:25:45.248930 4799 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 10 19:25:45 crc kubenswrapper[4799]: I1010 19:25:45.249007 4799 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" Oct 10 19:25:45 crc kubenswrapper[4799]: I1010 19:25:45.250333 4799 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"fd63e0805bbe909759f172e22eb07f44f02ddc6ac94031e7c6af68f116a1b614"} pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 10 19:25:45 crc kubenswrapper[4799]: I1010 19:25:45.250437 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" containerName="machine-config-daemon" containerID="cri-o://fd63e0805bbe909759f172e22eb07f44f02ddc6ac94031e7c6af68f116a1b614" gracePeriod=600 Oct 10 19:25:45 crc kubenswrapper[4799]: E1010 19:25:45.374519 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 19:25:45 crc kubenswrapper[4799]: I1010 19:25:45.485837 4799 generic.go:334] "Generic (PLEG): container finished" podID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" containerID="fd63e0805bbe909759f172e22eb07f44f02ddc6ac94031e7c6af68f116a1b614" exitCode=0 Oct 10 19:25:45 crc kubenswrapper[4799]: I1010 19:25:45.486059 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" event={"ID":"6cebefda-e31d-4be2-9bf4-8e1f8ec002cb","Type":"ContainerDied","Data":"fd63e0805bbe909759f172e22eb07f44f02ddc6ac94031e7c6af68f116a1b614"} Oct 10 19:25:45 crc kubenswrapper[4799]: I1010 19:25:45.486197 4799 scope.go:117] "RemoveContainer" containerID="e4a70c95dd2f99d68bb76ad34b45e9cb4ce8bb671fad71218da22698541986e1" Oct 10 19:25:45 crc kubenswrapper[4799]: I1010 19:25:45.487195 4799 scope.go:117] "RemoveContainer" containerID="fd63e0805bbe909759f172e22eb07f44f02ddc6ac94031e7c6af68f116a1b614" Oct 10 19:25:45 crc kubenswrapper[4799]: E1010 19:25:45.487895 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 19:25:58 crc kubenswrapper[4799]: I1010 19:25:58.404898 4799 scope.go:117] "RemoveContainer" containerID="fd63e0805bbe909759f172e22eb07f44f02ddc6ac94031e7c6af68f116a1b614" Oct 10 19:25:58 crc kubenswrapper[4799]: E1010 19:25:58.406151 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 19:26:12 crc kubenswrapper[4799]: I1010 19:26:12.403577 4799 scope.go:117] "RemoveContainer" containerID="fd63e0805bbe909759f172e22eb07f44f02ddc6ac94031e7c6af68f116a1b614" Oct 10 19:26:12 crc kubenswrapper[4799]: E1010 19:26:12.404889 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 19:26:18 crc kubenswrapper[4799]: I1010 19:26:18.907466 4799 scope.go:117] "RemoveContainer" containerID="8715c85a0f3db5fa9e35b869d2b6eb02b4fa6f06fb0caa9c59815382d59d6361" Oct 10 19:26:18 crc kubenswrapper[4799]: I1010 19:26:18.935302 4799 scope.go:117] "RemoveContainer" containerID="5ac82a7d5927eb7a8aaf069b30b1c1b68c742ca5950a32424bffe524d7b6ac12" Oct 10 19:26:18 crc kubenswrapper[4799]: I1010 19:26:18.967517 4799 scope.go:117] "RemoveContainer" containerID="720d109f094aeecaff3b30d164c47f8d4c9875416173006b73f3edd028a307d7" Oct 10 19:26:23 crc kubenswrapper[4799]: I1010 19:26:23.409913 4799 scope.go:117] "RemoveContainer" containerID="fd63e0805bbe909759f172e22eb07f44f02ddc6ac94031e7c6af68f116a1b614" Oct 10 19:26:23 crc kubenswrapper[4799]: E1010 19:26:23.410886 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 19:26:34 crc kubenswrapper[4799]: I1010 19:26:34.402972 4799 scope.go:117] "RemoveContainer" containerID="fd63e0805bbe909759f172e22eb07f44f02ddc6ac94031e7c6af68f116a1b614" Oct 10 19:26:34 crc kubenswrapper[4799]: E1010 19:26:34.405948 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 19:26:45 crc kubenswrapper[4799]: I1010 19:26:45.403862 4799 scope.go:117] "RemoveContainer" containerID="fd63e0805bbe909759f172e22eb07f44f02ddc6ac94031e7c6af68f116a1b614" Oct 10 19:26:45 crc kubenswrapper[4799]: E1010 19:26:45.404921 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 19:26:56 crc kubenswrapper[4799]: I1010 19:26:56.403267 4799 scope.go:117] "RemoveContainer" containerID="fd63e0805bbe909759f172e22eb07f44f02ddc6ac94031e7c6af68f116a1b614" Oct 10 19:26:56 crc kubenswrapper[4799]: E1010 19:26:56.404550 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 19:27:11 crc kubenswrapper[4799]: I1010 19:27:11.404295 4799 scope.go:117] "RemoveContainer" containerID="fd63e0805bbe909759f172e22eb07f44f02ddc6ac94031e7c6af68f116a1b614" Oct 10 19:27:11 crc kubenswrapper[4799]: E1010 19:27:11.406692 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 19:27:19 crc kubenswrapper[4799]: I1010 19:27:19.067010 4799 scope.go:117] "RemoveContainer" containerID="aca0ece093820bf04e922137b16eea383805bc56c0c4b16f6372a33c40958fbd" Oct 10 19:27:22 crc kubenswrapper[4799]: I1010 19:27:22.403682 4799 scope.go:117] "RemoveContainer" containerID="fd63e0805bbe909759f172e22eb07f44f02ddc6ac94031e7c6af68f116a1b614" Oct 10 19:27:22 crc kubenswrapper[4799]: E1010 19:27:22.404582 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 19:27:23 crc kubenswrapper[4799]: I1010 19:27:23.854299 4799 generic.go:334] "Generic (PLEG): container finished" podID="ac543bf9-73b9-417e-a8be-708d36ad2353" containerID="c2dd7ae84da2148bc46d53da826524e87cbc1d2d7b704dfb5dc65507b254cf2f" exitCode=0 Oct 10 19:27:23 crc kubenswrapper[4799]: I1010 19:27:23.854622 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-k9xz4/must-gather-z22h5" event={"ID":"ac543bf9-73b9-417e-a8be-708d36ad2353","Type":"ContainerDied","Data":"c2dd7ae84da2148bc46d53da826524e87cbc1d2d7b704dfb5dc65507b254cf2f"} Oct 10 19:27:23 crc kubenswrapper[4799]: I1010 19:27:23.855699 4799 scope.go:117] "RemoveContainer" containerID="c2dd7ae84da2148bc46d53da826524e87cbc1d2d7b704dfb5dc65507b254cf2f" Oct 10 19:27:24 crc kubenswrapper[4799]: I1010 19:27:24.723593 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-k9xz4_must-gather-z22h5_ac543bf9-73b9-417e-a8be-708d36ad2353/gather/0.log" Oct 10 19:27:32 crc kubenswrapper[4799]: I1010 19:27:32.553904 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-k9xz4/must-gather-z22h5"] Oct 10 19:27:32 crc kubenswrapper[4799]: I1010 19:27:32.555003 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-must-gather-k9xz4/must-gather-z22h5" podUID="ac543bf9-73b9-417e-a8be-708d36ad2353" containerName="copy" containerID="cri-o://bfff071ce9d2dc0ea8e2f7691a4dc1b77c9970f09c293ed40e5f84933f3ce4f6" gracePeriod=2 Oct 10 19:27:32 crc kubenswrapper[4799]: I1010 19:27:32.568846 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-k9xz4/must-gather-z22h5"] Oct 10 19:27:32 crc kubenswrapper[4799]: I1010 19:27:32.971001 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-k9xz4_must-gather-z22h5_ac543bf9-73b9-417e-a8be-708d36ad2353/copy/0.log" Oct 10 19:27:32 crc kubenswrapper[4799]: I1010 19:27:32.971609 4799 generic.go:334] "Generic (PLEG): container finished" podID="ac543bf9-73b9-417e-a8be-708d36ad2353" containerID="bfff071ce9d2dc0ea8e2f7691a4dc1b77c9970f09c293ed40e5f84933f3ce4f6" exitCode=143 Oct 10 19:27:32 crc kubenswrapper[4799]: I1010 19:27:32.971662 4799 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="305ec1c5437d4f08a454e99c2610a48a21fb01df54b978fcd253cd7d720a1dd5" Oct 10 19:27:33 crc kubenswrapper[4799]: I1010 19:27:33.566734 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-k9xz4_must-gather-z22h5_ac543bf9-73b9-417e-a8be-708d36ad2353/copy/0.log" Oct 10 19:27:33 crc kubenswrapper[4799]: I1010 19:27:33.567188 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-k9xz4/must-gather-z22h5" Oct 10 19:27:33 crc kubenswrapper[4799]: I1010 19:27:33.722208 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/ac543bf9-73b9-417e-a8be-708d36ad2353-must-gather-output\") pod \"ac543bf9-73b9-417e-a8be-708d36ad2353\" (UID: \"ac543bf9-73b9-417e-a8be-708d36ad2353\") " Oct 10 19:27:33 crc kubenswrapper[4799]: I1010 19:27:33.722580 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-r988b\" (UniqueName: \"kubernetes.io/projected/ac543bf9-73b9-417e-a8be-708d36ad2353-kube-api-access-r988b\") pod \"ac543bf9-73b9-417e-a8be-708d36ad2353\" (UID: \"ac543bf9-73b9-417e-a8be-708d36ad2353\") " Oct 10 19:27:33 crc kubenswrapper[4799]: I1010 19:27:33.731972 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ac543bf9-73b9-417e-a8be-708d36ad2353-kube-api-access-r988b" (OuterVolumeSpecName: "kube-api-access-r988b") pod "ac543bf9-73b9-417e-a8be-708d36ad2353" (UID: "ac543bf9-73b9-417e-a8be-708d36ad2353"). InnerVolumeSpecName "kube-api-access-r988b". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 10 19:27:33 crc kubenswrapper[4799]: I1010 19:27:33.824539 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-r988b\" (UniqueName: \"kubernetes.io/projected/ac543bf9-73b9-417e-a8be-708d36ad2353-kube-api-access-r988b\") on node \"crc\" DevicePath \"\"" Oct 10 19:27:33 crc kubenswrapper[4799]: I1010 19:27:33.928629 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ac543bf9-73b9-417e-a8be-708d36ad2353-must-gather-output" (OuterVolumeSpecName: "must-gather-output") pod "ac543bf9-73b9-417e-a8be-708d36ad2353" (UID: "ac543bf9-73b9-417e-a8be-708d36ad2353"). InnerVolumeSpecName "must-gather-output". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 10 19:27:34 crc kubenswrapper[4799]: I1010 19:27:34.001039 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-k9xz4/must-gather-z22h5" Oct 10 19:27:34 crc kubenswrapper[4799]: I1010 19:27:34.029065 4799 reconciler_common.go:293] "Volume detached for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/ac543bf9-73b9-417e-a8be-708d36ad2353-must-gather-output\") on node \"crc\" DevicePath \"\"" Oct 10 19:27:35 crc kubenswrapper[4799]: I1010 19:27:35.407512 4799 scope.go:117] "RemoveContainer" containerID="fd63e0805bbe909759f172e22eb07f44f02ddc6ac94031e7c6af68f116a1b614" Oct 10 19:27:35 crc kubenswrapper[4799]: E1010 19:27:35.408061 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 19:27:35 crc kubenswrapper[4799]: I1010 19:27:35.418440 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ac543bf9-73b9-417e-a8be-708d36ad2353" path="/var/lib/kubelet/pods/ac543bf9-73b9-417e-a8be-708d36ad2353/volumes" Oct 10 19:27:50 crc kubenswrapper[4799]: I1010 19:27:50.402625 4799 scope.go:117] "RemoveContainer" containerID="fd63e0805bbe909759f172e22eb07f44f02ddc6ac94031e7c6af68f116a1b614" Oct 10 19:27:50 crc kubenswrapper[4799]: E1010 19:27:50.403531 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 19:28:02 crc kubenswrapper[4799]: I1010 19:28:02.403459 4799 scope.go:117] "RemoveContainer" containerID="fd63e0805bbe909759f172e22eb07f44f02ddc6ac94031e7c6af68f116a1b614" Oct 10 19:28:02 crc kubenswrapper[4799]: E1010 19:28:02.406036 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 19:28:16 crc kubenswrapper[4799]: I1010 19:28:16.404376 4799 scope.go:117] "RemoveContainer" containerID="fd63e0805bbe909759f172e22eb07f44f02ddc6ac94031e7c6af68f116a1b614" Oct 10 19:28:16 crc kubenswrapper[4799]: E1010 19:28:16.406494 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" Oct 10 19:28:19 crc kubenswrapper[4799]: I1010 19:28:19.134924 4799 scope.go:117] "RemoveContainer" containerID="c2dd7ae84da2148bc46d53da826524e87cbc1d2d7b704dfb5dc65507b254cf2f" Oct 10 19:28:19 crc kubenswrapper[4799]: I1010 19:28:19.242532 4799 scope.go:117] "RemoveContainer" containerID="bfff071ce9d2dc0ea8e2f7691a4dc1b77c9970f09c293ed40e5f84933f3ce4f6" Oct 10 19:28:29 crc kubenswrapper[4799]: I1010 19:28:29.404138 4799 scope.go:117] "RemoveContainer" containerID="fd63e0805bbe909759f172e22eb07f44f02ddc6ac94031e7c6af68f116a1b614" Oct 10 19:28:29 crc kubenswrapper[4799]: E1010 19:28:29.405311 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rh8zc_openshift-machine-config-operator(6cebefda-e31d-4be2-9bf4-8e1f8ec002cb)\"" pod="openshift-machine-config-operator/machine-config-daemon-rh8zc" podUID="6cebefda-e31d-4be2-9bf4-8e1f8ec002cb" var/home/core/zuul-output/logs/crc-cloud-workdir-crc-all-logs.tar.gz0000644000175000000000000000005515072257153024454 0ustar coreroot‹íÁ  ÷Om7 €7šÞ'(var/home/core/zuul-output/logs/crc-cloud/0000755000175000000000000000000015072257154017372 5ustar corerootvar/home/core/zuul-output/artifacts/0000755000175000017500000000000015072232027016505 5ustar corecorevar/home/core/zuul-output/docs/0000755000175000017500000000000015072232027015455 5ustar corecore